@context-engine-bridge/context-engine-mcp-bridge 0.0.22 → 0.0.24
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +2 -2
- package/src/cli.js +8 -1
- package/src/connectCli.js +440 -0
- package/src/mcpServer.js +77 -4
- package/src/resultPathMapping.js +28 -8
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@context-engine-bridge/context-engine-mcp-bridge",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.24",
|
|
4
4
|
"description": "Context Engine MCP bridge (http/stdio proxy combining indexer + memory servers)",
|
|
5
5
|
"bin": {
|
|
6
6
|
"ctxce": "bin/ctxce.js",
|
|
@@ -29,4 +29,4 @@
|
|
|
29
29
|
"engines": {
|
|
30
30
|
"node": ">=18.0.0"
|
|
31
31
|
}
|
|
32
|
-
}
|
|
32
|
+
}
|
package/src/cli.js
CHANGED
|
@@ -5,11 +5,18 @@ import path from "node:path";
|
|
|
5
5
|
import { fileURLToPath } from "node:url";
|
|
6
6
|
import { runMcpServer, runHttpMcpServer } from "./mcpServer.js";
|
|
7
7
|
import { runAuthCommand } from "./authCli.js";
|
|
8
|
+
import { runConnectCommand } from "./connectCli.js";
|
|
8
9
|
|
|
9
10
|
export async function runCli() {
|
|
10
11
|
const argv = process.argv.slice(2);
|
|
11
12
|
const cmd = argv[0];
|
|
12
13
|
|
|
14
|
+
if (cmd === "connect") {
|
|
15
|
+
const args = argv.slice(1);
|
|
16
|
+
await runConnectCommand(args);
|
|
17
|
+
return;
|
|
18
|
+
}
|
|
19
|
+
|
|
13
20
|
if (cmd === "auth") {
|
|
14
21
|
const sub = argv[1] || "";
|
|
15
22
|
const args = argv.slice(2);
|
|
@@ -134,7 +141,7 @@ export async function runCli() {
|
|
|
134
141
|
|
|
135
142
|
// eslint-disable-next-line no-console
|
|
136
143
|
console.error(
|
|
137
|
-
`Usage: ${binName} mcp-serve [--workspace <path>] [--indexer-url <url>] [--memory-url <url>] [--collection <name>] | ${binName} mcp-http-serve [--workspace <path>] [--indexer-url <url>] [--memory-url <url>] [--port <port>] [--collection <name>] | ${binName} auth <login|status|logout> [--backend-url <url>] [--token <token>] [--username <name> --password <pass>]`,
|
|
144
|
+
`Usage: ${binName} connect <api-key> [--workspace <path>] [--interval <sec>] [--no-watch] | ${binName} mcp-serve [--workspace <path>] [--indexer-url <url>] [--memory-url <url>] [--collection <name>] | ${binName} mcp-http-serve [--workspace <path>] [--indexer-url <url>] [--memory-url <url>] [--port <port>] [--collection <name>] | ${binName} auth <login|status|logout> [--backend-url <url>] [--token <token>] [--username <name> --password <pass>]`,
|
|
138
145
|
);
|
|
139
146
|
process.exit(1);
|
|
140
147
|
}
|
|
@@ -0,0 +1,440 @@
|
|
|
1
|
+
import process from "node:process";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import fs from "node:fs";
|
|
4
|
+
import { execSync, spawn } from "node:child_process";
|
|
5
|
+
import { saveAuthEntry } from "./authConfig.js";
|
|
6
|
+
|
|
7
|
+
const SAAS_ENDPOINTS = {
|
|
8
|
+
uploadEndpoint: "https://dev.context-engine.ai/upload",
|
|
9
|
+
authBackendUrl: "https://dev.context-engine.ai",
|
|
10
|
+
mcpIndexerUrl: "https://dev.context-engine.ai/indexer/mcp",
|
|
11
|
+
mcpMemoryUrl: "https://dev.context-engine.ai/memory/mcp",
|
|
12
|
+
};
|
|
13
|
+
|
|
14
|
+
const DEFAULT_WATCH_INTERVAL_MS = 30000;
|
|
15
|
+
const DEFAULT_DEBOUNCE_MS = 2000;
|
|
16
|
+
|
|
17
|
+
function parseConnectArgs(args) {
|
|
18
|
+
let apiKey = "";
|
|
19
|
+
let workspace = process.cwd();
|
|
20
|
+
let skipIndex = false;
|
|
21
|
+
let noWatch = false;
|
|
22
|
+
let watchInterval = DEFAULT_WATCH_INTERVAL_MS;
|
|
23
|
+
|
|
24
|
+
for (let i = 0; i < args.length; i += 1) {
|
|
25
|
+
const a = args[i];
|
|
26
|
+
|
|
27
|
+
if (!a.startsWith("-") && !apiKey) {
|
|
28
|
+
apiKey = a;
|
|
29
|
+
continue;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
if ((a === "--api-key" || a === "--key" || a === "-k") && i + 1 < args.length) {
|
|
33
|
+
apiKey = args[i + 1];
|
|
34
|
+
i += 1;
|
|
35
|
+
continue;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
if ((a === "--workspace" || a === "--path" || a === "-w") && i + 1 < args.length) {
|
|
39
|
+
workspace = args[i + 1];
|
|
40
|
+
i += 1;
|
|
41
|
+
continue;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
if (a === "--skip-index" || a === "--auth-only") {
|
|
45
|
+
skipIndex = true;
|
|
46
|
+
continue;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
if (a === "--no-watch" || a === "--once") {
|
|
50
|
+
noWatch = true;
|
|
51
|
+
continue;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
if (a === "--interval" && i + 1 < args.length) {
|
|
55
|
+
const parsed = parseInt(args[i + 1], 10);
|
|
56
|
+
if (!isNaN(parsed) && parsed > 0) {
|
|
57
|
+
watchInterval = parsed * 1000;
|
|
58
|
+
}
|
|
59
|
+
i += 1;
|
|
60
|
+
continue;
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
return { apiKey, workspace, skipIndex, noWatch, watchInterval };
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
async function authenticateWithApiKey(apiKey) {
|
|
68
|
+
const authUrl = `${SAAS_ENDPOINTS.authBackendUrl}/auth/login`;
|
|
69
|
+
|
|
70
|
+
console.error("[ctxce] Authenticating with Context Engine SaaS...");
|
|
71
|
+
|
|
72
|
+
const body = {
|
|
73
|
+
client: "ctxce-cli",
|
|
74
|
+
token: apiKey,
|
|
75
|
+
workspace: process.cwd(),
|
|
76
|
+
};
|
|
77
|
+
|
|
78
|
+
let resp;
|
|
79
|
+
try {
|
|
80
|
+
resp = await fetch(authUrl, {
|
|
81
|
+
method: "POST",
|
|
82
|
+
headers: { "Content-Type": "application/json" },
|
|
83
|
+
body: JSON.stringify(body),
|
|
84
|
+
});
|
|
85
|
+
} catch (err) {
|
|
86
|
+
console.error("[ctxce] Failed to connect to SaaS backend:", String(err));
|
|
87
|
+
return null;
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
if (!resp || !resp.ok) {
|
|
91
|
+
const status = resp ? resp.status : "<no-response>";
|
|
92
|
+
let errorMsg = `HTTP ${status}`;
|
|
93
|
+
try {
|
|
94
|
+
const errorData = await resp.json();
|
|
95
|
+
if (errorData.detail) {
|
|
96
|
+
errorMsg = errorData.detail;
|
|
97
|
+
}
|
|
98
|
+
} catch {
|
|
99
|
+
}
|
|
100
|
+
console.error("[ctxce] Authentication failed:", errorMsg);
|
|
101
|
+
return null;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
let data;
|
|
105
|
+
try {
|
|
106
|
+
data = await resp.json();
|
|
107
|
+
} catch {
|
|
108
|
+
data = {};
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
const sessionId = data.session_id || data.sessionId || null;
|
|
112
|
+
const userId = data.user_id || data.userId || null;
|
|
113
|
+
const expiresAt = data.expires_at || data.expiresAt || null;
|
|
114
|
+
const orgId = data.org_id || data.orgId || null;
|
|
115
|
+
const orgSlug = data.org_slug || data.orgSlug || null;
|
|
116
|
+
|
|
117
|
+
if (!sessionId) {
|
|
118
|
+
console.error("[ctxce] Authentication response missing session ID.");
|
|
119
|
+
return null;
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
const entry = {
|
|
123
|
+
sessionId,
|
|
124
|
+
userId,
|
|
125
|
+
expiresAt,
|
|
126
|
+
org_id: orgId,
|
|
127
|
+
org_slug: orgSlug,
|
|
128
|
+
apiKey,
|
|
129
|
+
};
|
|
130
|
+
|
|
131
|
+
saveAuthEntry(SAAS_ENDPOINTS.authBackendUrl, entry);
|
|
132
|
+
|
|
133
|
+
console.error("[ctxce] Authenticated successfully!");
|
|
134
|
+
if (userId) {
|
|
135
|
+
console.error(`[ctxce] User: ${userId}`);
|
|
136
|
+
}
|
|
137
|
+
if (orgSlug) {
|
|
138
|
+
console.error(`[ctxce] Organization: ${orgSlug}`);
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
return entry;
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
function findUploadClient() {
|
|
145
|
+
const candidates = [
|
|
146
|
+
path.resolve(process.cwd(), "scripts", "standalone_upload_client.py"),
|
|
147
|
+
path.resolve(process.cwd(), "..", "scripts", "standalone_upload_client.py"),
|
|
148
|
+
path.resolve(__dirname, "..", "..", "scripts", "standalone_upload_client.py"),
|
|
149
|
+
];
|
|
150
|
+
|
|
151
|
+
for (const candidate of candidates) {
|
|
152
|
+
if (fs.existsSync(candidate)) {
|
|
153
|
+
return candidate;
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
return null;
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
function detectPython() {
|
|
161
|
+
for (const cmd of ["python3", "python"]) {
|
|
162
|
+
try {
|
|
163
|
+
execSync(`${cmd} --version`, { stdio: "ignore" });
|
|
164
|
+
return cmd;
|
|
165
|
+
} catch {
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
return null;
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
async function runUploadClient(workspace, sessionId, uploadClient, python) {
|
|
172
|
+
const env = {
|
|
173
|
+
...process.env,
|
|
174
|
+
REMOTE_UPLOAD_ENDPOINT: SAAS_ENDPOINTS.uploadEndpoint,
|
|
175
|
+
CTXCE_AUTH_BACKEND_URL: SAAS_ENDPOINTS.authBackendUrl,
|
|
176
|
+
CTXCE_SESSION_ID: sessionId,
|
|
177
|
+
HOST_ROOT: workspace,
|
|
178
|
+
CONTAINER_ROOT: "/work",
|
|
179
|
+
};
|
|
180
|
+
|
|
181
|
+
return new Promise((resolve) => {
|
|
182
|
+
const args = [
|
|
183
|
+
uploadClient,
|
|
184
|
+
"--path", workspace,
|
|
185
|
+
"--endpoint", SAAS_ENDPOINTS.uploadEndpoint,
|
|
186
|
+
"--force",
|
|
187
|
+
];
|
|
188
|
+
|
|
189
|
+
const proc = spawn(python, args, {
|
|
190
|
+
env,
|
|
191
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
192
|
+
cwd: workspace,
|
|
193
|
+
});
|
|
194
|
+
|
|
195
|
+
proc.stdout.on("data", (data) => {
|
|
196
|
+
const line = data.toString().trim();
|
|
197
|
+
if (line) {
|
|
198
|
+
console.error(`[upload] ${line}`);
|
|
199
|
+
}
|
|
200
|
+
});
|
|
201
|
+
|
|
202
|
+
proc.stderr.on("data", (data) => {
|
|
203
|
+
const line = data.toString().trim();
|
|
204
|
+
if (line) {
|
|
205
|
+
console.error(`[upload] ${line}`);
|
|
206
|
+
}
|
|
207
|
+
});
|
|
208
|
+
|
|
209
|
+
proc.on("close", (code) => {
|
|
210
|
+
resolve(code === 0);
|
|
211
|
+
});
|
|
212
|
+
|
|
213
|
+
proc.on("error", (err) => {
|
|
214
|
+
console.error(`[ctxce] Upload process error: ${err}`);
|
|
215
|
+
resolve(false);
|
|
216
|
+
});
|
|
217
|
+
});
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
async function triggerIndexing(workspace, sessionId) {
|
|
221
|
+
console.error("[ctxce] Starting workspace indexing...");
|
|
222
|
+
console.error(`[ctxce] Workspace: ${workspace}`);
|
|
223
|
+
console.error(`[ctxce] Endpoint: ${SAAS_ENDPOINTS.uploadEndpoint}`);
|
|
224
|
+
|
|
225
|
+
const uploadClient = findUploadClient();
|
|
226
|
+
const python = detectPython();
|
|
227
|
+
|
|
228
|
+
if (!uploadClient || !python) {
|
|
229
|
+
console.error("[ctxce] Python upload client not available.");
|
|
230
|
+
console.error("[ctxce] Install context-engine Python package or use VS Code extension.");
|
|
231
|
+
return false;
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
return await runUploadClient(workspace, sessionId, uploadClient, python);
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
function startWatcher(workspace, sessionId, intervalMs) {
|
|
238
|
+
const uploadClient = findUploadClient();
|
|
239
|
+
const python = detectPython();
|
|
240
|
+
|
|
241
|
+
if (!uploadClient || !python) {
|
|
242
|
+
console.error("[ctxce] Cannot start watcher: Python upload client not available.");
|
|
243
|
+
return null;
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
console.error(`[ctxce] Starting file watcher (sync every ${intervalMs / 1000}s)...`);
|
|
247
|
+
console.error("[ctxce] Press Ctrl+C to stop.");
|
|
248
|
+
|
|
249
|
+
let isRunning = false;
|
|
250
|
+
let pendingSync = false;
|
|
251
|
+
let lastSyncTime = Date.now();
|
|
252
|
+
|
|
253
|
+
const fileHashes = new Map();
|
|
254
|
+
|
|
255
|
+
function getFileHash(filePath) {
|
|
256
|
+
try {
|
|
257
|
+
const stat = fs.statSync(filePath);
|
|
258
|
+
return `${stat.mtime.getTime()}-${stat.size}`;
|
|
259
|
+
} catch {
|
|
260
|
+
return null;
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
function scanDirectory(dir, files = []) {
|
|
265
|
+
try {
|
|
266
|
+
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
267
|
+
for (const entry of entries) {
|
|
268
|
+
const fullPath = path.join(dir, entry.name);
|
|
269
|
+
|
|
270
|
+
if (entry.name.startsWith(".") ||
|
|
271
|
+
entry.name === "node_modules" ||
|
|
272
|
+
entry.name === "__pycache__" ||
|
|
273
|
+
entry.name === "venv" ||
|
|
274
|
+
entry.name === ".venv" ||
|
|
275
|
+
entry.name === "dist" ||
|
|
276
|
+
entry.name === "build") {
|
|
277
|
+
continue;
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
if (entry.isDirectory()) {
|
|
281
|
+
scanDirectory(fullPath, files);
|
|
282
|
+
} else if (entry.isFile()) {
|
|
283
|
+
files.push(fullPath);
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
} catch {
|
|
287
|
+
}
|
|
288
|
+
return files;
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
function detectChanges() {
|
|
292
|
+
const currentFiles = scanDirectory(workspace);
|
|
293
|
+
let hasChanges = false;
|
|
294
|
+
|
|
295
|
+
const currentPaths = new Set(currentFiles);
|
|
296
|
+
|
|
297
|
+
for (const filePath of currentFiles) {
|
|
298
|
+
const newHash = getFileHash(filePath);
|
|
299
|
+
const oldHash = fileHashes.get(filePath);
|
|
300
|
+
|
|
301
|
+
if (newHash !== oldHash) {
|
|
302
|
+
hasChanges = true;
|
|
303
|
+
fileHashes.set(filePath, newHash);
|
|
304
|
+
}
|
|
305
|
+
}
|
|
306
|
+
|
|
307
|
+
for (const oldPath of fileHashes.keys()) {
|
|
308
|
+
if (!currentPaths.has(oldPath)) {
|
|
309
|
+
hasChanges = true;
|
|
310
|
+
fileHashes.delete(oldPath);
|
|
311
|
+
}
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
return hasChanges;
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
async function doSync() {
|
|
318
|
+
if (isRunning) {
|
|
319
|
+
pendingSync = true;
|
|
320
|
+
return;
|
|
321
|
+
}
|
|
322
|
+
|
|
323
|
+
isRunning = true;
|
|
324
|
+
const now = new Date().toLocaleTimeString();
|
|
325
|
+
console.error(`[ctxce] [${now}] Syncing changes...`);
|
|
326
|
+
|
|
327
|
+
try {
|
|
328
|
+
const success = await runUploadClient(workspace, sessionId, uploadClient, python);
|
|
329
|
+
if (success) {
|
|
330
|
+
console.error(`[ctxce] [${now}] Sync complete.`);
|
|
331
|
+
} else {
|
|
332
|
+
console.error(`[ctxce] [${now}] Sync failed.`);
|
|
333
|
+
}
|
|
334
|
+
} catch (err) {
|
|
335
|
+
console.error(`[ctxce] [${now}] Sync error: ${err}`);
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
isRunning = false;
|
|
339
|
+
lastSyncTime = Date.now();
|
|
340
|
+
|
|
341
|
+
if (pendingSync) {
|
|
342
|
+
pendingSync = false;
|
|
343
|
+
setTimeout(doSync, DEFAULT_DEBOUNCE_MS);
|
|
344
|
+
}
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
scanDirectory(workspace).forEach(f => {
|
|
348
|
+
fileHashes.set(f, getFileHash(f));
|
|
349
|
+
});
|
|
350
|
+
|
|
351
|
+
const intervalId = setInterval(() => {
|
|
352
|
+
if (detectChanges()) {
|
|
353
|
+
doSync();
|
|
354
|
+
}
|
|
355
|
+
}, intervalMs);
|
|
356
|
+
|
|
357
|
+
const cleanup = () => {
|
|
358
|
+
clearInterval(intervalId);
|
|
359
|
+
console.error("\n[ctxce] Watcher stopped.");
|
|
360
|
+
};
|
|
361
|
+
|
|
362
|
+
process.on("SIGINT", () => {
|
|
363
|
+
cleanup();
|
|
364
|
+
process.exit(0);
|
|
365
|
+
});
|
|
366
|
+
|
|
367
|
+
process.on("SIGTERM", () => {
|
|
368
|
+
cleanup();
|
|
369
|
+
process.exit(0);
|
|
370
|
+
});
|
|
371
|
+
|
|
372
|
+
return { intervalId, cleanup };
|
|
373
|
+
}
|
|
374
|
+
|
|
375
|
+
function printSuccess() {
|
|
376
|
+
console.error("");
|
|
377
|
+
console.error("=".repeat(60));
|
|
378
|
+
console.error(" Context Engine connected!");
|
|
379
|
+
console.error("=".repeat(60));
|
|
380
|
+
console.error("");
|
|
381
|
+
console.error("Indexing complete. Watching for file changes...");
|
|
382
|
+
console.error("Press Ctrl+C to stop.");
|
|
383
|
+
console.error("");
|
|
384
|
+
}
|
|
385
|
+
|
|
386
|
+
function printUsage() {
|
|
387
|
+
console.error("Usage: ctxce connect <api-key> [options]");
|
|
388
|
+
console.error("");
|
|
389
|
+
console.error("Connect to Context Engine SaaS, index workspace, and watch for changes.");
|
|
390
|
+
console.error("");
|
|
391
|
+
console.error("Arguments:");
|
|
392
|
+
console.error(" <api-key> Your Context Engine API key");
|
|
393
|
+
console.error("");
|
|
394
|
+
console.error("Options:");
|
|
395
|
+
console.error(" --workspace, -w <path> Workspace path (default: current directory)");
|
|
396
|
+
console.error(" --interval <seconds> Sync interval in seconds (default: 30)");
|
|
397
|
+
console.error(" --no-watch, --once Index once and exit (don't watch for changes)");
|
|
398
|
+
console.error(" --skip-index Only authenticate, skip initial indexing");
|
|
399
|
+
console.error("");
|
|
400
|
+
console.error("Examples:");
|
|
401
|
+
console.error(" ctxce connect sk_abc123xyz");
|
|
402
|
+
console.error(" ctxce connect sk_abc123xyz -w /path/to/repo");
|
|
403
|
+
console.error(" ctxce connect sk_abc123xyz --once");
|
|
404
|
+
console.error("");
|
|
405
|
+
}
|
|
406
|
+
|
|
407
|
+
export async function runConnectCommand(args) {
|
|
408
|
+
const { apiKey, workspace, skipIndex, noWatch, watchInterval } = parseConnectArgs(args || []);
|
|
409
|
+
|
|
410
|
+
if (!apiKey) {
|
|
411
|
+
printUsage();
|
|
412
|
+
process.exit(1);
|
|
413
|
+
}
|
|
414
|
+
|
|
415
|
+
const resolvedWorkspace = path.resolve(workspace);
|
|
416
|
+
if (!fs.existsSync(resolvedWorkspace)) {
|
|
417
|
+
console.error(`[ctxce] Workspace path does not exist: ${resolvedWorkspace}`);
|
|
418
|
+
process.exit(1);
|
|
419
|
+
}
|
|
420
|
+
|
|
421
|
+
const authEntry = await authenticateWithApiKey(apiKey);
|
|
422
|
+
if (!authEntry) {
|
|
423
|
+
process.exit(1);
|
|
424
|
+
}
|
|
425
|
+
|
|
426
|
+
if (!skipIndex) {
|
|
427
|
+
const indexed = await triggerIndexing(resolvedWorkspace, authEntry.sessionId);
|
|
428
|
+
if (!indexed) {
|
|
429
|
+
console.error("[ctxce] Initial indexing failed, but will continue.");
|
|
430
|
+
}
|
|
431
|
+
}
|
|
432
|
+
|
|
433
|
+
if (noWatch) {
|
|
434
|
+
console.error("[ctxce] Done.");
|
|
435
|
+
return;
|
|
436
|
+
}
|
|
437
|
+
|
|
438
|
+
printSuccess();
|
|
439
|
+
startWatcher(resolvedWorkspace, authEntry.sessionId, watchInterval);
|
|
440
|
+
}
|
package/src/mcpServer.js
CHANGED
|
@@ -282,6 +282,16 @@ function isTransientToolError(error) {
|
|
|
282
282
|
return true;
|
|
283
283
|
}
|
|
284
284
|
|
|
285
|
+
// StreamableHTTP transport errors after server restart
|
|
286
|
+
if (
|
|
287
|
+
lower.includes("failed to fetch") ||
|
|
288
|
+
lower.includes("fetch failed") ||
|
|
289
|
+
lower.includes("socket hang up") ||
|
|
290
|
+
lower.includes("aborted")
|
|
291
|
+
) {
|
|
292
|
+
return true;
|
|
293
|
+
}
|
|
294
|
+
|
|
285
295
|
if (typeof error.code === "number" && error.code === -32001 && !isSessionError(error)) {
|
|
286
296
|
return true;
|
|
287
297
|
}
|
|
@@ -298,6 +308,35 @@ function isTransientToolError(error) {
|
|
|
298
308
|
return false;
|
|
299
309
|
}
|
|
300
310
|
}
|
|
311
|
+
|
|
312
|
+
/**
|
|
313
|
+
* Detect connection-level errors that indicate the underlying transport is dead
|
|
314
|
+
* and the client needs to be fully recreated (not just retried on the same socket).
|
|
315
|
+
*/
|
|
316
|
+
function isConnectionDeadError(error) {
|
|
317
|
+
try {
|
|
318
|
+
const msg =
|
|
319
|
+
(error && typeof error.message === "string" && error.message) ||
|
|
320
|
+
(typeof error === "string" ? error : String(error || ""));
|
|
321
|
+
if (!msg) {
|
|
322
|
+
return false;
|
|
323
|
+
}
|
|
324
|
+
const lower = msg.toLowerCase();
|
|
325
|
+
return (
|
|
326
|
+
lower.includes("econnrefused") ||
|
|
327
|
+
lower.includes("econnreset") ||
|
|
328
|
+
lower.includes("socket hang up") ||
|
|
329
|
+
lower.includes("fetch failed") ||
|
|
330
|
+
lower.includes("failed to fetch") ||
|
|
331
|
+
lower.includes("ehostunreach") ||
|
|
332
|
+
lower.includes("enetunreach") ||
|
|
333
|
+
lower.includes("aborted")
|
|
334
|
+
);
|
|
335
|
+
} catch {
|
|
336
|
+
return false;
|
|
337
|
+
}
|
|
338
|
+
}
|
|
339
|
+
|
|
301
340
|
// MCP stdio server implemented using the official MCP TypeScript SDK.
|
|
302
341
|
// Acts as a low-level proxy for tools, forwarding tools/list and tools/call
|
|
303
342
|
// to the remote qdrant-indexer MCP server while adding a local `ping` tool.
|
|
@@ -749,10 +788,27 @@ async function createBridgeServer(options) {
|
|
|
749
788
|
}
|
|
750
789
|
remote = await indexerClient.listTools();
|
|
751
790
|
} catch (err) {
|
|
752
|
-
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
|
|
791
|
+
// If the transport is dead (server restarted), recreate clients and retry
|
|
792
|
+
// once before falling back to memory-only tools.
|
|
793
|
+
if (isConnectionDeadError(err) || isSessionError(err)) {
|
|
794
|
+
debugLog("[ctxce] tools/list: connection/session error, recreating clients and retrying: " + String(err));
|
|
795
|
+
try {
|
|
796
|
+
await initializeRemoteClients(true);
|
|
797
|
+
if (indexerClient) {
|
|
798
|
+
remote = await indexerClient.listTools();
|
|
799
|
+
}
|
|
800
|
+
} catch (retryErr) {
|
|
801
|
+
debugLog("[ctxce] tools/list: retry after reconnect also failed: " + String(retryErr));
|
|
802
|
+
}
|
|
803
|
+
}
|
|
804
|
+
|
|
805
|
+
// If we still don't have remote tools, fall back to memory-only
|
|
806
|
+
if (!remote) {
|
|
807
|
+
debugLog("[ctxce] Error calling remote tools/list: " + String(err));
|
|
808
|
+
const memoryToolsFallback = await listMemoryTools(memoryClient);
|
|
809
|
+
const toolsFallback = dedupeTools([...memoryToolsFallback]);
|
|
810
|
+
return { tools: toolsFallback };
|
|
811
|
+
}
|
|
756
812
|
}
|
|
757
813
|
|
|
758
814
|
try {
|
|
@@ -795,6 +851,9 @@ async function createBridgeServer(options) {
|
|
|
795
851
|
if (!Object.prototype.hasOwnProperty.call(obj, "session")) {
|
|
796
852
|
obj.session = sessionId;
|
|
797
853
|
}
|
|
854
|
+
if (defaultsPayload.collection && !Object.prototype.hasOwnProperty.call(obj, "collection")) {
|
|
855
|
+
obj.collection = defaultsPayload.collection;
|
|
856
|
+
}
|
|
798
857
|
args = obj;
|
|
799
858
|
}
|
|
800
859
|
|
|
@@ -818,6 +877,7 @@ async function createBridgeServer(options) {
|
|
|
818
877
|
const maxAttempts = getBridgeRetryAttempts();
|
|
819
878
|
const retryDelayMs = getBridgeRetryDelayMs();
|
|
820
879
|
let sessionRetried = false;
|
|
880
|
+
let connectionRetried = false;
|
|
821
881
|
let lastError;
|
|
822
882
|
|
|
823
883
|
for (let attempt = 0; attempt < maxAttempts; attempt += 1) {
|
|
@@ -843,6 +903,19 @@ async function createBridgeServer(options) {
|
|
|
843
903
|
} catch (err) {
|
|
844
904
|
lastError = err;
|
|
845
905
|
|
|
906
|
+
// Connection-level error (ECONNREFUSED, ECONNRESET, etc.) means the
|
|
907
|
+
// transport is dead (e.g. server restarted). Recreate clients so the
|
|
908
|
+
// next attempt uses a fresh connection.
|
|
909
|
+
if (isConnectionDeadError(err) && !connectionRetried) {
|
|
910
|
+
debugLog(
|
|
911
|
+
"[ctxce] tools/call: connection dead (server may have restarted); recreating clients and retrying: " +
|
|
912
|
+
String(err),
|
|
913
|
+
);
|
|
914
|
+
await initializeRemoteClients(true);
|
|
915
|
+
connectionRetried = true;
|
|
916
|
+
continue;
|
|
917
|
+
}
|
|
918
|
+
|
|
846
919
|
if (isSessionError(err) && !sessionRetried) {
|
|
847
920
|
debugLog(
|
|
848
921
|
"[ctxce] tools/call: detected remote MCP session error; reinitializing clients and retrying once: " +
|
package/src/resultPathMapping.js
CHANGED
|
@@ -226,7 +226,6 @@ function remapRelatedPathToClient(p, workspaceRoot) {
|
|
|
226
226
|
return path.join(root, relNative);
|
|
227
227
|
}
|
|
228
228
|
|
|
229
|
-
// If it's already a relative path, join it to the workspace root.
|
|
230
229
|
if (!s.startsWith("/") && !s.includes(":") && !s.includes("\\")) {
|
|
231
230
|
const relPosix = s.trim();
|
|
232
231
|
if (relPosix && relPosix !== "." && !relPosix.startsWith("../") && relPosix !== "..") {
|
|
@@ -234,7 +233,13 @@ function remapRelatedPathToClient(p, workspaceRoot) {
|
|
|
234
233
|
const joined = path.join(root, relNative);
|
|
235
234
|
const relCheck = path.relative(root, joined);
|
|
236
235
|
if (relCheck && !relCheck.startsWith(`..${path.sep}`) && relCheck !== "..") {
|
|
237
|
-
|
|
236
|
+
try {
|
|
237
|
+
if (fs.existsSync(joined)) {
|
|
238
|
+
return joined;
|
|
239
|
+
}
|
|
240
|
+
} catch {
|
|
241
|
+
// ignore
|
|
242
|
+
}
|
|
238
243
|
}
|
|
239
244
|
}
|
|
240
245
|
}
|
|
@@ -291,27 +296,37 @@ function remapHitPaths(hit, workspaceRoot) {
|
|
|
291
296
|
out.client_path_source = "workspace_join";
|
|
292
297
|
}
|
|
293
298
|
} else {
|
|
294
|
-
// Prefer
|
|
295
|
-
//
|
|
296
|
-
//
|
|
299
|
+
// Prefer host_path if it is an absolute path that exists on disk,
|
|
300
|
+
// regardless of whether it falls under workspaceRoot. This handles
|
|
301
|
+
// the common case where the bridge workspace (cwd) differs from the
|
|
302
|
+
// actual repo location (e.g. workspace=/home/user but files live at
|
|
303
|
+
// /media/datadrive/project/).
|
|
297
304
|
const hp = typeof hostPath === "string" ? hostPath : "";
|
|
298
305
|
const hpNorm = hp ? hp.replace(/\\/g, path.sep) : "";
|
|
299
306
|
if (
|
|
300
307
|
hpNorm &&
|
|
301
|
-
|
|
302
|
-
|
|
308
|
+
path.isAbsolute(hpNorm) &&
|
|
309
|
+
fs.existsSync(hpNorm)
|
|
303
310
|
) {
|
|
304
311
|
out.client_path = hpNorm;
|
|
305
312
|
if (diagnostics) {
|
|
306
313
|
out.client_path_joined = candidate;
|
|
307
314
|
out.client_path_source = "host_path";
|
|
308
315
|
}
|
|
309
|
-
} else {
|
|
316
|
+
} else if (fs.existsSync(candidate)) {
|
|
310
317
|
out.client_path = candidate;
|
|
311
318
|
if (diagnostics) {
|
|
312
319
|
out.client_path_joined = candidate;
|
|
313
320
|
out.client_path_source = "workspace_join";
|
|
314
321
|
}
|
|
322
|
+
} else {
|
|
323
|
+
// Neither host_path nor the joined candidate exist on disk.
|
|
324
|
+
// Do NOT produce a wrong absolute path; leave client_path unset
|
|
325
|
+
// so the override logic below falls back to the relative path.
|
|
326
|
+
if (diagnostics) {
|
|
327
|
+
out.client_path_joined = candidate;
|
|
328
|
+
out.client_path_source = "fallback_relative";
|
|
329
|
+
}
|
|
315
330
|
}
|
|
316
331
|
}
|
|
317
332
|
} catch {
|
|
@@ -448,8 +463,13 @@ export function maybeRemapToolResult(name, result, workspaceRoot) {
|
|
|
448
463
|
const lower = String(name).toLowerCase();
|
|
449
464
|
const shouldMap = (
|
|
450
465
|
lower === "repo_search" ||
|
|
466
|
+
lower === "code_search" ||
|
|
451
467
|
lower === "context_search" ||
|
|
452
468
|
lower === "context_answer" ||
|
|
469
|
+
lower === "info_request" ||
|
|
470
|
+
lower === "symbol_graph" ||
|
|
471
|
+
lower === "pattern_search" ||
|
|
472
|
+
lower === "cross_repo_search" ||
|
|
453
473
|
lower.endsWith("search_tests_for") ||
|
|
454
474
|
lower.endsWith("search_config_for") ||
|
|
455
475
|
lower.endsWith("search_callers_for") ||
|