mock-mcp 0.5.0 → 0.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/adapter/index.cjs +465 -302
- package/dist/adapter/index.d.cts +93 -6
- package/dist/adapter/index.d.ts +93 -6
- package/dist/adapter/index.js +465 -302
- package/dist/client/connect.cjs +83 -5
- package/dist/client/connect.d.cts +10 -3
- package/dist/client/connect.d.ts +10 -3
- package/dist/client/connect.js +82 -4
- package/dist/client/index.cjs +83 -5
- package/dist/client/index.d.cts +1 -2
- package/dist/client/index.d.ts +1 -2
- package/dist/client/index.js +82 -4
- package/dist/daemon/index.cjs +55 -5
- package/dist/daemon/index.js +54 -4
- package/dist/index.cjs +559 -89
- package/dist/index.d.cts +137 -7
- package/dist/index.d.ts +137 -7
- package/dist/index.js +556 -90
- package/dist/shared/index.cjs +121 -1
- package/dist/shared/index.d.cts +240 -3
- package/dist/shared/index.d.ts +240 -3
- package/dist/shared/index.js +115 -2
- package/dist/{discovery-Dc2LdF8q.d.cts → types-bEGXLBF0.d.cts} +86 -1
- package/dist/{discovery-Dc2LdF8q.d.ts → types-bEGXLBF0.d.ts} +86 -1
- package/package.json +2 -1
- package/dist/protocol-CiwaQFOt.d.ts +0 -239
- package/dist/protocol-xZu-wb0n.d.cts +0 -239
- package/dist/types-BKREdsyr.d.cts +0 -32
- package/dist/types-BKREdsyr.d.ts +0 -32
package/dist/index.js
CHANGED
|
@@ -27,22 +27,50 @@ var __export = (target, all) => {
|
|
|
27
27
|
// src/shared/discovery.ts
|
|
28
28
|
var discovery_exports = {};
|
|
29
29
|
__export(discovery_exports, {
|
|
30
|
+
cleanupGlobalIndex: () => cleanupGlobalIndex,
|
|
30
31
|
computeProjectId: () => computeProjectId,
|
|
32
|
+
discoverAllDaemons: () => discoverAllDaemons,
|
|
31
33
|
ensureDaemonRunning: () => ensureDaemonRunning,
|
|
32
34
|
getCacheDir: () => getCacheDir,
|
|
33
35
|
getDaemonEntryPath: () => getDaemonEntryPath,
|
|
36
|
+
getGlobalIndexPath: () => getGlobalIndexPath,
|
|
34
37
|
getPaths: () => getPaths,
|
|
35
38
|
healthCheck: () => healthCheck,
|
|
36
39
|
randomToken: () => randomToken,
|
|
40
|
+
readGlobalIndex: () => readGlobalIndex,
|
|
37
41
|
readRegistry: () => readRegistry,
|
|
42
|
+
registerDaemonGlobally: () => registerDaemonGlobally,
|
|
38
43
|
releaseLock: () => releaseLock,
|
|
39
44
|
resolveProjectRoot: () => resolveProjectRoot,
|
|
40
45
|
sleep: () => sleep,
|
|
41
46
|
tryAcquireLock: () => tryAcquireLock,
|
|
47
|
+
unregisterDaemonGlobally: () => unregisterDaemonGlobally,
|
|
48
|
+
writeGlobalIndex: () => writeGlobalIndex,
|
|
42
49
|
writeRegistry: () => writeRegistry
|
|
43
50
|
});
|
|
44
51
|
function debugLog(_msg) {
|
|
45
52
|
}
|
|
53
|
+
function hasValidProjectMarker(dir) {
|
|
54
|
+
try {
|
|
55
|
+
const gitPath = path.join(dir, ".git");
|
|
56
|
+
try {
|
|
57
|
+
const stat = fssync.statSync(gitPath);
|
|
58
|
+
if (stat.isDirectory() || stat.isFile()) {
|
|
59
|
+
return true;
|
|
60
|
+
}
|
|
61
|
+
} catch {
|
|
62
|
+
}
|
|
63
|
+
const pkgPath = path.join(dir, "package.json");
|
|
64
|
+
try {
|
|
65
|
+
fssync.accessSync(pkgPath, fssync.constants.F_OK);
|
|
66
|
+
return true;
|
|
67
|
+
} catch {
|
|
68
|
+
}
|
|
69
|
+
return false;
|
|
70
|
+
} catch {
|
|
71
|
+
return false;
|
|
72
|
+
}
|
|
73
|
+
}
|
|
46
74
|
function resolveProjectRoot(startDir = process.cwd()) {
|
|
47
75
|
let current = path.resolve(startDir);
|
|
48
76
|
const root = path.parse(current).root;
|
|
@@ -177,7 +205,28 @@ function getDaemonEntryPath() {
|
|
|
177
205
|
return path.join(process.cwd(), "dist", "index.js");
|
|
178
206
|
}
|
|
179
207
|
async function ensureDaemonRunning(opts = {}) {
|
|
180
|
-
|
|
208
|
+
let projectRoot = opts.projectRoot ?? resolveProjectRoot();
|
|
209
|
+
if (!hasValidProjectMarker(projectRoot)) {
|
|
210
|
+
const resolved = resolveProjectRoot(projectRoot);
|
|
211
|
+
if (resolved !== projectRoot && hasValidProjectMarker(resolved)) {
|
|
212
|
+
console.error(`[mock-mcp] Warning: projectRoot "${projectRoot}" doesn't look like a project root`);
|
|
213
|
+
console.error(`[mock-mcp] Found .git/package.json at: "${resolved}"`);
|
|
214
|
+
projectRoot = resolved;
|
|
215
|
+
} else {
|
|
216
|
+
console.error(`[mock-mcp] \u26A0\uFE0F WARNING: Could not find a valid project root!`);
|
|
217
|
+
console.error(`[mock-mcp] Current path: "${projectRoot}"`);
|
|
218
|
+
console.error(`[mock-mcp] This path doesn't contain .git or package.json.`);
|
|
219
|
+
console.error(`[mock-mcp] This may cause project mismatch issues.`);
|
|
220
|
+
console.error(`[mock-mcp] `);
|
|
221
|
+
console.error(`[mock-mcp] For MCP adapters, please specify --project-root explicitly:`);
|
|
222
|
+
console.error(`[mock-mcp] mock-mcp adapter --project-root /path/to/your/project`);
|
|
223
|
+
console.error(`[mock-mcp] `);
|
|
224
|
+
console.error(`[mock-mcp] In your MCP client config (Cursor, Claude Desktop, etc.):`);
|
|
225
|
+
console.error(`[mock-mcp] {`);
|
|
226
|
+
console.error(`[mock-mcp] "args": ["-y", "mock-mcp", "adapter", "--project-root", "/path/to/your/project"]`);
|
|
227
|
+
console.error(`[mock-mcp] }`);
|
|
228
|
+
}
|
|
229
|
+
}
|
|
181
230
|
const projectId = computeProjectId(projectRoot);
|
|
182
231
|
const { base, registryPath, lockPath, ipcPath } = getPaths(
|
|
183
232
|
projectId,
|
|
@@ -285,6 +334,77 @@ ${daemonStderr}`);
|
|
|
285
334
|
function sleep(ms) {
|
|
286
335
|
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
287
336
|
}
|
|
337
|
+
function getGlobalIndexPath(cacheDir) {
|
|
338
|
+
const base = path.join(getCacheDir(cacheDir), "mock-mcp");
|
|
339
|
+
return path.join(base, "active-daemons.json");
|
|
340
|
+
}
|
|
341
|
+
async function readGlobalIndex(cacheDir) {
|
|
342
|
+
const indexPath = getGlobalIndexPath(cacheDir);
|
|
343
|
+
try {
|
|
344
|
+
const txt = await fs.readFile(indexPath, "utf-8");
|
|
345
|
+
return JSON.parse(txt);
|
|
346
|
+
} catch {
|
|
347
|
+
return { daemons: [], updatedAt: (/* @__PURE__ */ new Date()).toISOString() };
|
|
348
|
+
}
|
|
349
|
+
}
|
|
350
|
+
async function writeGlobalIndex(index, cacheDir) {
|
|
351
|
+
const indexPath = getGlobalIndexPath(cacheDir);
|
|
352
|
+
const base = path.dirname(indexPath);
|
|
353
|
+
await fs.mkdir(base, { recursive: true });
|
|
354
|
+
await fs.writeFile(indexPath, JSON.stringify(index, null, 2), {
|
|
355
|
+
encoding: "utf-8",
|
|
356
|
+
mode: 384
|
|
357
|
+
});
|
|
358
|
+
}
|
|
359
|
+
async function registerDaemonGlobally(entry, cacheDir) {
|
|
360
|
+
const index = await readGlobalIndex(cacheDir);
|
|
361
|
+
index.daemons = index.daemons.filter((d) => d.projectId !== entry.projectId);
|
|
362
|
+
index.daemons.push(entry);
|
|
363
|
+
index.updatedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
364
|
+
await writeGlobalIndex(index, cacheDir);
|
|
365
|
+
debugLog(`Registered daemon ${entry.projectId} in global index`);
|
|
366
|
+
}
|
|
367
|
+
async function unregisterDaemonGlobally(projectId, cacheDir) {
|
|
368
|
+
const index = await readGlobalIndex(cacheDir);
|
|
369
|
+
index.daemons = index.daemons.filter((d) => d.projectId !== projectId);
|
|
370
|
+
index.updatedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
371
|
+
await writeGlobalIndex(index, cacheDir);
|
|
372
|
+
}
|
|
373
|
+
async function cleanupGlobalIndex(cacheDir) {
|
|
374
|
+
const index = await readGlobalIndex(cacheDir);
|
|
375
|
+
const validDaemons = [];
|
|
376
|
+
for (const entry of index.daemons) {
|
|
377
|
+
try {
|
|
378
|
+
process.kill(entry.pid, 0);
|
|
379
|
+
const healthy = await healthCheck(entry.ipcPath, 1e3);
|
|
380
|
+
if (healthy) {
|
|
381
|
+
validDaemons.push(entry);
|
|
382
|
+
} else {
|
|
383
|
+
debugLog(`Removing unhealthy daemon ${entry.projectId} (pid ${entry.pid})`);
|
|
384
|
+
}
|
|
385
|
+
} catch {
|
|
386
|
+
debugLog(`Removing dead daemon ${entry.projectId} (pid ${entry.pid})`);
|
|
387
|
+
}
|
|
388
|
+
}
|
|
389
|
+
if (validDaemons.length !== index.daemons.length) {
|
|
390
|
+
index.daemons = validDaemons;
|
|
391
|
+
index.updatedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
392
|
+
await writeGlobalIndex(index, cacheDir);
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
async function discoverAllDaemons(cacheDir) {
|
|
396
|
+
await cleanupGlobalIndex(cacheDir);
|
|
397
|
+
const index = await readGlobalIndex(cacheDir);
|
|
398
|
+
const results = [];
|
|
399
|
+
for (const entry of index.daemons) {
|
|
400
|
+
const registry = await readRegistry(entry.registryPath);
|
|
401
|
+
if (registry) {
|
|
402
|
+
const healthy = await healthCheck(entry.ipcPath, 2e3);
|
|
403
|
+
results.push({ registry, healthy });
|
|
404
|
+
}
|
|
405
|
+
}
|
|
406
|
+
return results;
|
|
407
|
+
}
|
|
288
408
|
var __curDirname;
|
|
289
409
|
var init_discovery = __esm({
|
|
290
410
|
"src/shared/discovery.ts"() {
|
|
@@ -428,6 +548,16 @@ var init_daemon = __esm({
|
|
|
428
548
|
version: this.opts.version
|
|
429
549
|
};
|
|
430
550
|
await writeRegistry(registryPath, registry);
|
|
551
|
+
const globalEntry = {
|
|
552
|
+
projectId,
|
|
553
|
+
projectRoot: this.opts.projectRoot,
|
|
554
|
+
ipcPath,
|
|
555
|
+
registryPath,
|
|
556
|
+
pid: process.pid,
|
|
557
|
+
startedAt: registry.startedAt,
|
|
558
|
+
version: this.opts.version
|
|
559
|
+
};
|
|
560
|
+
await registerDaemonGlobally(globalEntry, this.opts.cacheDir);
|
|
431
561
|
this.sweepTimer = setInterval(() => this.sweepExpiredClaims(), this.opts.sweepIntervalMs);
|
|
432
562
|
this.sweepTimer.unref?.();
|
|
433
563
|
this.resetIdleTimer();
|
|
@@ -456,6 +586,8 @@ var init_daemon = __esm({
|
|
|
456
586
|
});
|
|
457
587
|
this.batches.clear();
|
|
458
588
|
this.pendingQueue.length = 0;
|
|
589
|
+
const projectId = computeProjectId(this.opts.projectRoot);
|
|
590
|
+
await unregisterDaemonGlobally(projectId, this.opts.cacheDir);
|
|
459
591
|
this.logger.error("\u{1F44B} Daemon stopped");
|
|
460
592
|
}
|
|
461
593
|
// ===========================================================================
|
|
@@ -881,54 +1013,209 @@ var init_daemon = __esm({
|
|
|
881
1013
|
};
|
|
882
1014
|
}
|
|
883
1015
|
});
|
|
884
|
-
var
|
|
885
|
-
var
|
|
886
|
-
"src/adapter/daemon-client.ts"() {
|
|
887
|
-
|
|
888
|
-
|
|
889
|
-
|
|
890
|
-
|
|
891
|
-
|
|
1016
|
+
var MultiDaemonClient;
|
|
1017
|
+
var init_multi_daemon_client = __esm({
|
|
1018
|
+
"src/adapter/multi-daemon-client.ts"() {
|
|
1019
|
+
init_discovery();
|
|
1020
|
+
MultiDaemonClient = class {
|
|
1021
|
+
logger;
|
|
1022
|
+
cacheDir;
|
|
1023
|
+
adapterId;
|
|
1024
|
+
constructor(opts = {}) {
|
|
1025
|
+
this.logger = opts.logger ?? console;
|
|
1026
|
+
this.cacheDir = opts.cacheDir;
|
|
1027
|
+
this.adapterId = crypto.randomUUID();
|
|
892
1028
|
}
|
|
893
1029
|
// ===========================================================================
|
|
894
|
-
//
|
|
1030
|
+
// Discovery
|
|
895
1031
|
// ===========================================================================
|
|
896
|
-
|
|
897
|
-
|
|
1032
|
+
/**
|
|
1033
|
+
* Discover all active and healthy daemons.
|
|
1034
|
+
*/
|
|
1035
|
+
async discoverDaemons() {
|
|
1036
|
+
return discoverAllDaemons(this.cacheDir);
|
|
898
1037
|
}
|
|
899
|
-
|
|
900
|
-
|
|
1038
|
+
// ===========================================================================
|
|
1039
|
+
// Aggregated RPC Methods
|
|
1040
|
+
// ===========================================================================
|
|
1041
|
+
/**
|
|
1042
|
+
* Get aggregated status from all daemons.
|
|
1043
|
+
*/
|
|
1044
|
+
async getAggregatedStatus() {
|
|
1045
|
+
const daemons = await this.discoverDaemons();
|
|
1046
|
+
const statuses = [];
|
|
1047
|
+
let totalRuns = 0;
|
|
1048
|
+
let totalPending = 0;
|
|
1049
|
+
let totalClaimed = 0;
|
|
1050
|
+
for (const { registry, healthy } of daemons) {
|
|
1051
|
+
if (!healthy) {
|
|
1052
|
+
statuses.push({
|
|
1053
|
+
version: registry.version,
|
|
1054
|
+
projectId: registry.projectId,
|
|
1055
|
+
projectRoot: registry.projectRoot,
|
|
1056
|
+
pid: registry.pid,
|
|
1057
|
+
uptime: 0,
|
|
1058
|
+
runs: 0,
|
|
1059
|
+
pending: 0,
|
|
1060
|
+
claimed: 0,
|
|
1061
|
+
totalBatches: 0,
|
|
1062
|
+
healthy: false
|
|
1063
|
+
});
|
|
1064
|
+
continue;
|
|
1065
|
+
}
|
|
1066
|
+
try {
|
|
1067
|
+
const status = await this.rpc(registry, "getStatus", {});
|
|
1068
|
+
statuses.push({ ...status, healthy: true });
|
|
1069
|
+
totalRuns += status.runs;
|
|
1070
|
+
totalPending += status.pending;
|
|
1071
|
+
totalClaimed += status.claimed;
|
|
1072
|
+
} catch (error) {
|
|
1073
|
+
this.logger.warn(`Failed to get status from daemon ${registry.projectId}: ${error}`);
|
|
1074
|
+
statuses.push({
|
|
1075
|
+
version: registry.version,
|
|
1076
|
+
projectId: registry.projectId,
|
|
1077
|
+
projectRoot: registry.projectRoot,
|
|
1078
|
+
pid: registry.pid,
|
|
1079
|
+
uptime: 0,
|
|
1080
|
+
runs: 0,
|
|
1081
|
+
pending: 0,
|
|
1082
|
+
claimed: 0,
|
|
1083
|
+
totalBatches: 0,
|
|
1084
|
+
healthy: false
|
|
1085
|
+
});
|
|
1086
|
+
}
|
|
1087
|
+
}
|
|
1088
|
+
return { daemons: statuses, totalRuns, totalPending, totalClaimed };
|
|
901
1089
|
}
|
|
1090
|
+
/**
|
|
1091
|
+
* List all runs across all daemons.
|
|
1092
|
+
*/
|
|
1093
|
+
async listAllRuns() {
|
|
1094
|
+
const daemons = await this.discoverDaemons();
|
|
1095
|
+
const allRuns = [];
|
|
1096
|
+
for (const { registry, healthy } of daemons) {
|
|
1097
|
+
if (!healthy) continue;
|
|
1098
|
+
try {
|
|
1099
|
+
const result = await this.rpc(registry, "listRuns", {});
|
|
1100
|
+
for (const run of result.runs) {
|
|
1101
|
+
allRuns.push({
|
|
1102
|
+
...run,
|
|
1103
|
+
projectId: registry.projectId,
|
|
1104
|
+
projectRoot: registry.projectRoot
|
|
1105
|
+
});
|
|
1106
|
+
}
|
|
1107
|
+
} catch (error) {
|
|
1108
|
+
this.logger.warn(`Failed to list runs from daemon ${registry.projectId}: ${error}`);
|
|
1109
|
+
}
|
|
1110
|
+
}
|
|
1111
|
+
return allRuns;
|
|
1112
|
+
}
|
|
1113
|
+
/**
|
|
1114
|
+
* Claim the next available batch from any daemon.
|
|
1115
|
+
* Searches through all daemons in order until finding one with a pending batch.
|
|
1116
|
+
*/
|
|
902
1117
|
async claimNextBatch(args) {
|
|
903
|
-
|
|
904
|
-
|
|
905
|
-
|
|
906
|
-
|
|
907
|
-
|
|
1118
|
+
const daemons = await this.discoverDaemons();
|
|
1119
|
+
for (const { registry, healthy } of daemons) {
|
|
1120
|
+
if (!healthy) continue;
|
|
1121
|
+
try {
|
|
1122
|
+
const result = await this.rpc(registry, "claimNextBatch", {
|
|
1123
|
+
adapterId: this.adapterId,
|
|
1124
|
+
runId: args.runId,
|
|
1125
|
+
leaseMs: args.leaseMs
|
|
1126
|
+
});
|
|
1127
|
+
if (result) {
|
|
1128
|
+
return {
|
|
1129
|
+
...result,
|
|
1130
|
+
projectId: registry.projectId,
|
|
1131
|
+
projectRoot: registry.projectRoot
|
|
1132
|
+
};
|
|
1133
|
+
}
|
|
1134
|
+
} catch (error) {
|
|
1135
|
+
this.logger.warn(`Failed to claim batch from daemon ${registry.projectId}: ${error}`);
|
|
1136
|
+
}
|
|
1137
|
+
}
|
|
1138
|
+
return null;
|
|
908
1139
|
}
|
|
1140
|
+
/**
|
|
1141
|
+
* Provide mock data for a batch.
|
|
1142
|
+
* Automatically routes to the correct daemon based on batchId.
|
|
1143
|
+
*/
|
|
909
1144
|
async provideBatch(args) {
|
|
910
|
-
|
|
911
|
-
|
|
912
|
-
batchId: args.batchId
|
|
913
|
-
|
|
914
|
-
|
|
915
|
-
})
|
|
1145
|
+
const parts = args.batchId.split(":");
|
|
1146
|
+
if (parts.length < 2) {
|
|
1147
|
+
return { ok: false, message: `Invalid batchId format: ${args.batchId}` };
|
|
1148
|
+
}
|
|
1149
|
+
const daemons = await this.discoverDaemons();
|
|
1150
|
+
for (const { registry, healthy } of daemons) {
|
|
1151
|
+
if (!healthy) continue;
|
|
1152
|
+
try {
|
|
1153
|
+
const result = await this.rpc(registry, "provideBatch", {
|
|
1154
|
+
adapterId: this.adapterId,
|
|
1155
|
+
batchId: args.batchId,
|
|
1156
|
+
claimToken: args.claimToken,
|
|
1157
|
+
mocks: args.mocks
|
|
1158
|
+
});
|
|
1159
|
+
return result;
|
|
1160
|
+
} catch (error) {
|
|
1161
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
1162
|
+
if (msg.includes("not found") || msg.includes("Not found")) {
|
|
1163
|
+
continue;
|
|
1164
|
+
}
|
|
1165
|
+
return { ok: false, message: msg };
|
|
1166
|
+
}
|
|
1167
|
+
}
|
|
1168
|
+
return { ok: false, message: `Batch not found: ${args.batchId}` };
|
|
916
1169
|
}
|
|
1170
|
+
/**
|
|
1171
|
+
* Release a batch.
|
|
1172
|
+
*/
|
|
917
1173
|
async releaseBatch(args) {
|
|
918
|
-
|
|
919
|
-
|
|
920
|
-
|
|
921
|
-
|
|
922
|
-
|
|
923
|
-
|
|
1174
|
+
const daemons = await this.discoverDaemons();
|
|
1175
|
+
for (const { registry, healthy } of daemons) {
|
|
1176
|
+
if (!healthy) continue;
|
|
1177
|
+
try {
|
|
1178
|
+
const result = await this.rpc(registry, "releaseBatch", {
|
|
1179
|
+
adapterId: this.adapterId,
|
|
1180
|
+
batchId: args.batchId,
|
|
1181
|
+
claimToken: args.claimToken,
|
|
1182
|
+
reason: args.reason
|
|
1183
|
+
});
|
|
1184
|
+
return result;
|
|
1185
|
+
} catch (error) {
|
|
1186
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
1187
|
+
if (msg.includes("not found") || msg.includes("Not found")) {
|
|
1188
|
+
continue;
|
|
1189
|
+
}
|
|
1190
|
+
return { ok: false, message: msg };
|
|
1191
|
+
}
|
|
1192
|
+
}
|
|
1193
|
+
return { ok: false, message: `Batch not found: ${args.batchId}` };
|
|
924
1194
|
}
|
|
1195
|
+
/**
|
|
1196
|
+
* Get a specific batch by ID.
|
|
1197
|
+
*/
|
|
925
1198
|
async getBatch(batchId) {
|
|
926
|
-
|
|
1199
|
+
const daemons = await this.discoverDaemons();
|
|
1200
|
+
for (const { registry, healthy } of daemons) {
|
|
1201
|
+
if (!healthy) continue;
|
|
1202
|
+
try {
|
|
1203
|
+
const result = await this.rpc(registry, "getBatch", { batchId });
|
|
1204
|
+
return result;
|
|
1205
|
+
} catch (error) {
|
|
1206
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
1207
|
+
if (msg.includes("not found") || msg.includes("Not found")) {
|
|
1208
|
+
continue;
|
|
1209
|
+
}
|
|
1210
|
+
throw error;
|
|
1211
|
+
}
|
|
1212
|
+
}
|
|
1213
|
+
return null;
|
|
927
1214
|
}
|
|
928
1215
|
// ===========================================================================
|
|
929
|
-
// Internal
|
|
1216
|
+
// Internal RPC
|
|
930
1217
|
// ===========================================================================
|
|
931
|
-
rpc(method, params) {
|
|
1218
|
+
rpc(registry, method, params) {
|
|
932
1219
|
const payload = {
|
|
933
1220
|
jsonrpc: "2.0",
|
|
934
1221
|
id: crypto.randomUUID(),
|
|
@@ -939,11 +1226,11 @@ var init_daemon_client = __esm({
|
|
|
939
1226
|
const req = http.request(
|
|
940
1227
|
{
|
|
941
1228
|
method: "POST",
|
|
942
|
-
socketPath:
|
|
1229
|
+
socketPath: registry.ipcPath,
|
|
943
1230
|
path: "/control",
|
|
944
1231
|
headers: {
|
|
945
1232
|
"content-type": "application/json",
|
|
946
|
-
"x-mock-mcp-token":
|
|
1233
|
+
"x-mock-mcp-token": registry.token
|
|
947
1234
|
},
|
|
948
1235
|
timeout: 3e4
|
|
949
1236
|
},
|
|
@@ -985,12 +1272,19 @@ __export(adapter_exports, {
|
|
|
985
1272
|
});
|
|
986
1273
|
async function runAdapter(opts = {}) {
|
|
987
1274
|
const logger = opts.logger ?? console;
|
|
988
|
-
const version = opts.version ?? "0.
|
|
989
|
-
logger.error("\u{1F50D}
|
|
990
|
-
const
|
|
991
|
-
const
|
|
992
|
-
|
|
993
|
-
|
|
1275
|
+
const version = opts.version ?? "0.5.0";
|
|
1276
|
+
logger.error("\u{1F50D} Initializing mock-mcp adapter (multi-daemon mode)...");
|
|
1277
|
+
const multiDaemon = new MultiDaemonClient({ logger });
|
|
1278
|
+
const daemons = await multiDaemon.discoverDaemons();
|
|
1279
|
+
if (daemons.length > 0) {
|
|
1280
|
+
logger.error(`\u2705 Found ${daemons.length} active daemon(s):`);
|
|
1281
|
+
for (const d of daemons) {
|
|
1282
|
+
const status = d.healthy ? "healthy" : "unhealthy";
|
|
1283
|
+
logger.error(` - ${d.registry.projectId}: ${d.registry.projectRoot} (${status})`);
|
|
1284
|
+
}
|
|
1285
|
+
} else {
|
|
1286
|
+
logger.error("\u2139\uFE0F No active daemons found. Waiting for test processes to start...");
|
|
1287
|
+
}
|
|
994
1288
|
const server = new Server(
|
|
995
1289
|
{
|
|
996
1290
|
name: "mock-mcp-adapter",
|
|
@@ -1008,15 +1302,15 @@ async function runAdapter(opts = {}) {
|
|
|
1008
1302
|
try {
|
|
1009
1303
|
switch (name) {
|
|
1010
1304
|
case "get_status": {
|
|
1011
|
-
const result = await
|
|
1012
|
-
return buildToolResponse(
|
|
1305
|
+
const result = await multiDaemon.getAggregatedStatus();
|
|
1306
|
+
return buildToolResponse(formatAggregatedStatus(result));
|
|
1013
1307
|
}
|
|
1014
1308
|
case "list_runs": {
|
|
1015
|
-
const result = await
|
|
1016
|
-
return buildToolResponse(
|
|
1309
|
+
const result = await multiDaemon.listAllRuns();
|
|
1310
|
+
return buildToolResponse(formatExtendedRuns(result));
|
|
1017
1311
|
}
|
|
1018
1312
|
case "claim_next_batch": {
|
|
1019
|
-
const result = await
|
|
1313
|
+
const result = await multiDaemon.claimNextBatch({
|
|
1020
1314
|
runId: args?.runId,
|
|
1021
1315
|
leaseMs: args?.leaseMs
|
|
1022
1316
|
});
|
|
@@ -1026,14 +1320,17 @@ async function runAdapter(opts = {}) {
|
|
|
1026
1320
|
if (!args?.batchId) {
|
|
1027
1321
|
throw new Error("batchId is required");
|
|
1028
1322
|
}
|
|
1029
|
-
const result = await
|
|
1323
|
+
const result = await multiDaemon.getBatch(args.batchId);
|
|
1324
|
+
if (!result) {
|
|
1325
|
+
throw new Error(`Batch not found: ${args.batchId}`);
|
|
1326
|
+
}
|
|
1030
1327
|
return buildToolResponse(formatBatch(result));
|
|
1031
1328
|
}
|
|
1032
1329
|
case "provide_batch_mock_data": {
|
|
1033
1330
|
if (!args?.batchId || !args?.claimToken || !args?.mocks) {
|
|
1034
1331
|
throw new Error("batchId, claimToken, and mocks are required");
|
|
1035
1332
|
}
|
|
1036
|
-
const result = await
|
|
1333
|
+
const result = await multiDaemon.provideBatch({
|
|
1037
1334
|
batchId: args.batchId,
|
|
1038
1335
|
claimToken: args.claimToken,
|
|
1039
1336
|
mocks: args.mocks
|
|
@@ -1044,7 +1341,7 @@ async function runAdapter(opts = {}) {
|
|
|
1044
1341
|
if (!args?.batchId || !args?.claimToken) {
|
|
1045
1342
|
throw new Error("batchId and claimToken are required");
|
|
1046
1343
|
}
|
|
1047
|
-
const result = await
|
|
1344
|
+
const result = await multiDaemon.releaseBatch({
|
|
1048
1345
|
batchId: args.batchId,
|
|
1049
1346
|
claimToken: args.claimToken,
|
|
1050
1347
|
reason: args?.reason
|
|
@@ -1056,7 +1353,7 @@ async function runAdapter(opts = {}) {
|
|
|
1056
1353
|
}
|
|
1057
1354
|
} catch (error) {
|
|
1058
1355
|
const message = error instanceof Error ? error.message : String(error);
|
|
1059
|
-
logger.error(`Tool error (${name})
|
|
1356
|
+
logger.error(`Tool error (${name}): ${message}`);
|
|
1060
1357
|
return buildToolResponse(`Error: ${message}`, true);
|
|
1061
1358
|
}
|
|
1062
1359
|
});
|
|
@@ -1070,53 +1367,86 @@ function buildToolResponse(text, isError = false) {
|
|
|
1070
1367
|
isError
|
|
1071
1368
|
};
|
|
1072
1369
|
}
|
|
1073
|
-
function
|
|
1074
|
-
|
|
1370
|
+
function formatAggregatedStatus(status) {
|
|
1371
|
+
if (status.daemons.length === 0) {
|
|
1372
|
+
return `# Mock MCP Status
|
|
1075
1373
|
|
|
1076
|
-
|
|
1077
|
-
- **Project ID**: ${status.projectId}
|
|
1078
|
-
- **Project Root**: ${status.projectRoot}
|
|
1079
|
-
- **PID**: ${status.pid}
|
|
1080
|
-
- **Uptime**: ${Math.round(status.uptime / 1e3)}s
|
|
1081
|
-
|
|
1082
|
-
## Batches
|
|
1083
|
-
- **Pending**: ${status.pending}
|
|
1084
|
-
- **Claimed**: ${status.claimed}
|
|
1085
|
-
- **Active Runs**: ${status.runs}
|
|
1374
|
+
No active daemons found. Start a test with \`MOCK_MCP=1\` to begin.
|
|
1086
1375
|
`;
|
|
1376
|
+
}
|
|
1377
|
+
const lines = [
|
|
1378
|
+
"# Mock MCP Status\n",
|
|
1379
|
+
"## Summary",
|
|
1380
|
+
`- **Active Daemons**: ${status.daemons.filter((d) => d.healthy).length}`,
|
|
1381
|
+
`- **Total Active Runs**: ${status.totalRuns}`,
|
|
1382
|
+
`- **Total Pending Batches**: ${status.totalPending}`,
|
|
1383
|
+
`- **Total Claimed Batches**: ${status.totalClaimed}`,
|
|
1384
|
+
"",
|
|
1385
|
+
"## Daemons\n"
|
|
1386
|
+
];
|
|
1387
|
+
for (const daemon of status.daemons) {
|
|
1388
|
+
const healthIcon = daemon.healthy ? "\u2705" : "\u274C";
|
|
1389
|
+
lines.push(`### ${healthIcon} ${daemon.projectRoot}`);
|
|
1390
|
+
lines.push(`- **Project ID**: ${daemon.projectId}`);
|
|
1391
|
+
lines.push(`- **Version**: ${daemon.version}`);
|
|
1392
|
+
lines.push(`- **PID**: ${daemon.pid}`);
|
|
1393
|
+
if (daemon.healthy) {
|
|
1394
|
+
lines.push(`- **Uptime**: ${Math.round(daemon.uptime / 1e3)}s`);
|
|
1395
|
+
lines.push(`- **Runs**: ${daemon.runs}`);
|
|
1396
|
+
lines.push(`- **Pending**: ${daemon.pending}`);
|
|
1397
|
+
lines.push(`- **Claimed**: ${daemon.claimed}`);
|
|
1398
|
+
} else {
|
|
1399
|
+
lines.push(`- **Status**: Not responding`);
|
|
1400
|
+
}
|
|
1401
|
+
lines.push("");
|
|
1402
|
+
}
|
|
1403
|
+
return lines.join("\n");
|
|
1087
1404
|
}
|
|
1088
|
-
function
|
|
1089
|
-
if (
|
|
1090
|
-
return "No active test runs.";
|
|
1405
|
+
function formatExtendedRuns(runs) {
|
|
1406
|
+
if (runs.length === 0) {
|
|
1407
|
+
return "No active test runs.\n\nStart a test with `MOCK_MCP=1` to begin.";
|
|
1091
1408
|
}
|
|
1092
1409
|
const lines = ["# Active Test Runs\n"];
|
|
1093
|
-
|
|
1094
|
-
|
|
1095
|
-
|
|
1096
|
-
|
|
1097
|
-
|
|
1098
|
-
|
|
1099
|
-
|
|
1100
|
-
|
|
1101
|
-
|
|
1102
|
-
|
|
1103
|
-
|
|
1104
|
-
|
|
1410
|
+
const byProject = /* @__PURE__ */ new Map();
|
|
1411
|
+
for (const run of runs) {
|
|
1412
|
+
const key = run.projectRoot;
|
|
1413
|
+
if (!byProject.has(key)) {
|
|
1414
|
+
byProject.set(key, []);
|
|
1415
|
+
}
|
|
1416
|
+
byProject.get(key).push(run);
|
|
1417
|
+
}
|
|
1418
|
+
for (const [projectRoot, projectRuns] of byProject) {
|
|
1419
|
+
lines.push(`## Project: ${projectRoot}
|
|
1420
|
+
`);
|
|
1421
|
+
for (const run of projectRuns) {
|
|
1422
|
+
lines.push(`### Run: ${run.runId}`);
|
|
1423
|
+
lines.push(`- **PID**: ${run.pid}`);
|
|
1424
|
+
lines.push(`- **CWD**: ${run.cwd}`);
|
|
1425
|
+
lines.push(`- **Started**: ${run.startedAt}`);
|
|
1426
|
+
lines.push(`- **Pending Batches**: ${run.pendingBatches}`);
|
|
1427
|
+
if (run.testMeta) {
|
|
1428
|
+
if (run.testMeta.testFile) {
|
|
1429
|
+
lines.push(`- **Test File**: ${run.testMeta.testFile}`);
|
|
1430
|
+
}
|
|
1431
|
+
if (run.testMeta.testName) {
|
|
1432
|
+
lines.push(`- **Test Name**: ${run.testMeta.testName}`);
|
|
1433
|
+
}
|
|
1105
1434
|
}
|
|
1435
|
+
lines.push("");
|
|
1106
1436
|
}
|
|
1107
|
-
lines.push("");
|
|
1108
1437
|
}
|
|
1109
1438
|
return lines.join("\n");
|
|
1110
1439
|
}
|
|
1111
1440
|
function formatClaimResult(result) {
|
|
1112
1441
|
if (!result) {
|
|
1113
|
-
return "No pending batches available to claim.";
|
|
1442
|
+
return "No pending batches available to claim.\n\nMake sure a test is running with `MOCK_MCP=1` and has pending mock requests.";
|
|
1114
1443
|
}
|
|
1115
1444
|
const lines = [
|
|
1116
1445
|
"# Batch Claimed Successfully\n",
|
|
1117
1446
|
`**Batch ID**: \`${result.batchId}\``,
|
|
1118
1447
|
`**Claim Token**: \`${result.claimToken}\``,
|
|
1119
1448
|
`**Run ID**: ${result.runId}`,
|
|
1449
|
+
`**Project**: ${result.projectRoot}`,
|
|
1120
1450
|
`**Lease Until**: ${new Date(result.leaseUntil).toISOString()}`,
|
|
1121
1451
|
"",
|
|
1122
1452
|
"## Requests\n"
|
|
@@ -1175,8 +1505,7 @@ function formatProvideResult(result) {
|
|
|
1175
1505
|
var TOOLS;
|
|
1176
1506
|
var init_adapter = __esm({
|
|
1177
1507
|
"src/adapter/adapter.ts"() {
|
|
1178
|
-
|
|
1179
|
-
init_discovery();
|
|
1508
|
+
init_multi_daemon_client();
|
|
1180
1509
|
TOOLS = [
|
|
1181
1510
|
{
|
|
1182
1511
|
name: "get_status",
|
|
@@ -1312,7 +1641,100 @@ init_daemon();
|
|
|
1312
1641
|
|
|
1313
1642
|
// src/adapter/index.ts
|
|
1314
1643
|
init_adapter();
|
|
1315
|
-
|
|
1644
|
+
var DaemonClient = class {
|
|
1645
|
+
constructor(ipcPath, token, adapterId) {
|
|
1646
|
+
this.ipcPath = ipcPath;
|
|
1647
|
+
this.token = token;
|
|
1648
|
+
this.adapterId = adapterId;
|
|
1649
|
+
}
|
|
1650
|
+
// ===========================================================================
|
|
1651
|
+
// RPC Methods
|
|
1652
|
+
// ===========================================================================
|
|
1653
|
+
async getStatus() {
|
|
1654
|
+
return this.rpc("getStatus", {});
|
|
1655
|
+
}
|
|
1656
|
+
async listRuns() {
|
|
1657
|
+
return this.rpc("listRuns", {});
|
|
1658
|
+
}
|
|
1659
|
+
async claimNextBatch(args) {
|
|
1660
|
+
return this.rpc("claimNextBatch", {
|
|
1661
|
+
adapterId: this.adapterId,
|
|
1662
|
+
runId: args.runId,
|
|
1663
|
+
leaseMs: args.leaseMs
|
|
1664
|
+
});
|
|
1665
|
+
}
|
|
1666
|
+
async provideBatch(args) {
|
|
1667
|
+
return this.rpc("provideBatch", {
|
|
1668
|
+
adapterId: this.adapterId,
|
|
1669
|
+
batchId: args.batchId,
|
|
1670
|
+
claimToken: args.claimToken,
|
|
1671
|
+
mocks: args.mocks
|
|
1672
|
+
});
|
|
1673
|
+
}
|
|
1674
|
+
async releaseBatch(args) {
|
|
1675
|
+
return this.rpc("releaseBatch", {
|
|
1676
|
+
adapterId: this.adapterId,
|
|
1677
|
+
batchId: args.batchId,
|
|
1678
|
+
claimToken: args.claimToken,
|
|
1679
|
+
reason: args.reason
|
|
1680
|
+
});
|
|
1681
|
+
}
|
|
1682
|
+
async getBatch(batchId) {
|
|
1683
|
+
return this.rpc("getBatch", { batchId });
|
|
1684
|
+
}
|
|
1685
|
+
// ===========================================================================
|
|
1686
|
+
// Internal
|
|
1687
|
+
// ===========================================================================
|
|
1688
|
+
rpc(method, params) {
|
|
1689
|
+
const payload = {
|
|
1690
|
+
jsonrpc: "2.0",
|
|
1691
|
+
id: crypto.randomUUID(),
|
|
1692
|
+
method,
|
|
1693
|
+
params
|
|
1694
|
+
};
|
|
1695
|
+
return new Promise((resolve, reject) => {
|
|
1696
|
+
const req = http.request(
|
|
1697
|
+
{
|
|
1698
|
+
method: "POST",
|
|
1699
|
+
socketPath: this.ipcPath,
|
|
1700
|
+
path: "/control",
|
|
1701
|
+
headers: {
|
|
1702
|
+
"content-type": "application/json",
|
|
1703
|
+
"x-mock-mcp-token": this.token
|
|
1704
|
+
},
|
|
1705
|
+
timeout: 3e4
|
|
1706
|
+
},
|
|
1707
|
+
(res) => {
|
|
1708
|
+
let buf = "";
|
|
1709
|
+
res.on("data", (chunk) => buf += chunk);
|
|
1710
|
+
res.on("end", () => {
|
|
1711
|
+
try {
|
|
1712
|
+
const response = JSON.parse(buf);
|
|
1713
|
+
if (response.error) {
|
|
1714
|
+
reject(new Error(response.error.message));
|
|
1715
|
+
} else {
|
|
1716
|
+
resolve(response.result);
|
|
1717
|
+
}
|
|
1718
|
+
} catch (e) {
|
|
1719
|
+
reject(e);
|
|
1720
|
+
}
|
|
1721
|
+
});
|
|
1722
|
+
}
|
|
1723
|
+
);
|
|
1724
|
+
req.on("error", (err) => {
|
|
1725
|
+
reject(new Error(`Daemon connection failed: ${err.message}`));
|
|
1726
|
+
});
|
|
1727
|
+
req.on("timeout", () => {
|
|
1728
|
+
req.destroy();
|
|
1729
|
+
reject(new Error("Daemon request timeout"));
|
|
1730
|
+
});
|
|
1731
|
+
req.end(JSON.stringify(payload));
|
|
1732
|
+
});
|
|
1733
|
+
}
|
|
1734
|
+
};
|
|
1735
|
+
|
|
1736
|
+
// src/adapter/index.ts
|
|
1737
|
+
init_multi_daemon_client();
|
|
1316
1738
|
|
|
1317
1739
|
// src/client/batch-mock-collector.ts
|
|
1318
1740
|
init_discovery();
|
|
@@ -1378,11 +1800,19 @@ var BatchMockCollector = class {
|
|
|
1378
1800
|
}
|
|
1379
1801
|
/**
|
|
1380
1802
|
* Resolve projectRoot from options.
|
|
1381
|
-
* Priority: projectRoot > filePath > undefined (auto-detect)
|
|
1803
|
+
* Priority: projectRoot (if valid) > filePath > projectRoot (fallback) > undefined (auto-detect)
|
|
1804
|
+
*
|
|
1805
|
+
* A projectRoot is "valid" if it contains .git or package.json. This prevents
|
|
1806
|
+
* accidentally using a wrong directory (e.g., user's home directory) when the
|
|
1807
|
+
* caller mistakenly passes process.cwd() as projectRoot.
|
|
1382
1808
|
*/
|
|
1383
1809
|
resolveProjectRootFromOptions(options) {
|
|
1384
1810
|
if (options.projectRoot) {
|
|
1385
|
-
|
|
1811
|
+
const hasGit = this.hasGitOrPackageJson(options.projectRoot);
|
|
1812
|
+
if (hasGit) {
|
|
1813
|
+
return options.projectRoot;
|
|
1814
|
+
}
|
|
1815
|
+
this.logger.warn(`[mock-mcp] Warning: projectRoot "${options.projectRoot}" doesn't contain .git or package.json`);
|
|
1386
1816
|
}
|
|
1387
1817
|
if (options.filePath) {
|
|
1388
1818
|
let filePath = options.filePath;
|
|
@@ -1401,8 +1831,36 @@ var BatchMockCollector = class {
|
|
|
1401
1831
|
this.logger.log(`[mock-mcp] projectRoot: ${resolved}`);
|
|
1402
1832
|
return resolved;
|
|
1403
1833
|
}
|
|
1834
|
+
if (options.projectRoot) {
|
|
1835
|
+
this.logger.warn(`[mock-mcp] Warning: Using projectRoot "${options.projectRoot}" despite missing .git/package.json`);
|
|
1836
|
+
return options.projectRoot;
|
|
1837
|
+
}
|
|
1404
1838
|
return void 0;
|
|
1405
1839
|
}
|
|
1840
|
+
/**
|
|
1841
|
+
* Check if a directory contains .git or package.json
|
|
1842
|
+
*/
|
|
1843
|
+
hasGitOrPackageJson(dir) {
|
|
1844
|
+
try {
|
|
1845
|
+
const gitPath = path.join(dir, ".git");
|
|
1846
|
+
const pkgPath = path.join(dir, "package.json");
|
|
1847
|
+
try {
|
|
1848
|
+
const stat = fssync.statSync(gitPath);
|
|
1849
|
+
if (stat.isDirectory() || stat.isFile()) {
|
|
1850
|
+
return true;
|
|
1851
|
+
}
|
|
1852
|
+
} catch {
|
|
1853
|
+
}
|
|
1854
|
+
try {
|
|
1855
|
+
fssync.accessSync(pkgPath, fssync.constants.F_OK);
|
|
1856
|
+
return true;
|
|
1857
|
+
} catch {
|
|
1858
|
+
}
|
|
1859
|
+
return false;
|
|
1860
|
+
} catch {
|
|
1861
|
+
return false;
|
|
1862
|
+
}
|
|
1863
|
+
}
|
|
1406
1864
|
/**
|
|
1407
1865
|
* Ensures the underlying connection is ready for use.
|
|
1408
1866
|
*/
|
|
@@ -2004,7 +2462,7 @@ async function runStatusCommand(_args) {
|
|
|
2004
2462
|
resolveProjectRoot: resolveProjectRoot2,
|
|
2005
2463
|
computeProjectId: computeProjectId2,
|
|
2006
2464
|
getPaths: getPaths2,
|
|
2007
|
-
readRegistry:
|
|
2465
|
+
readRegistry: readRegistry3
|
|
2008
2466
|
} = await Promise.resolve().then(() => (init_discovery(), discovery_exports));
|
|
2009
2467
|
const projectRoot = resolveProjectRoot2();
|
|
2010
2468
|
const projectId = computeProjectId2(projectRoot);
|
|
@@ -2013,7 +2471,7 @@ async function runStatusCommand(_args) {
|
|
|
2013
2471
|
console.log(`Project ID: ${projectId}`);
|
|
2014
2472
|
console.log(`IPC Path: ${ipcPath}`);
|
|
2015
2473
|
console.log("");
|
|
2016
|
-
const registry = await
|
|
2474
|
+
const registry = await readRegistry3(registryPath);
|
|
2017
2475
|
if (!registry) {
|
|
2018
2476
|
console.log("\u274C Daemon is not running (no registry found)");
|
|
2019
2477
|
return;
|
|
@@ -2083,12 +2541,12 @@ async function runStopCommand(_args) {
|
|
|
2083
2541
|
resolveProjectRoot: resolveProjectRoot2,
|
|
2084
2542
|
computeProjectId: computeProjectId2,
|
|
2085
2543
|
getPaths: getPaths2,
|
|
2086
|
-
readRegistry:
|
|
2544
|
+
readRegistry: readRegistry3
|
|
2087
2545
|
} = await Promise.resolve().then(() => (init_discovery(), discovery_exports));
|
|
2088
2546
|
const projectRoot = resolveProjectRoot2();
|
|
2089
2547
|
const projectId = computeProjectId2(projectRoot);
|
|
2090
2548
|
const { registryPath, ipcPath } = getPaths2(projectId);
|
|
2091
|
-
const registry = await
|
|
2549
|
+
const registry = await readRegistry3(registryPath);
|
|
2092
2550
|
if (!registry) {
|
|
2093
2551
|
console.log("Daemon is not running.");
|
|
2094
2552
|
return;
|
|
@@ -2130,6 +2588,8 @@ USAGE:
|
|
|
2130
2588
|
COMMANDS:
|
|
2131
2589
|
adapter Start the MCP adapter (default)
|
|
2132
2590
|
This is what you configure in your MCP client.
|
|
2591
|
+
The adapter automatically discovers ALL active daemons
|
|
2592
|
+
across all projects - no configuration needed!
|
|
2133
2593
|
|
|
2134
2594
|
daemon Start the daemon process
|
|
2135
2595
|
Usually auto-started by adapter/test code.
|
|
@@ -2144,6 +2604,7 @@ COMMANDS:
|
|
|
2144
2604
|
|
|
2145
2605
|
EXAMPLES:
|
|
2146
2606
|
# In your MCP client configuration (Cursor, Claude Desktop, etc.):
|
|
2607
|
+
# Simple configuration - works across all projects automatically!
|
|
2147
2608
|
{
|
|
2148
2609
|
"mcpServers": {
|
|
2149
2610
|
"mock-mcp": {
|
|
@@ -2159,6 +2620,11 @@ EXAMPLES:
|
|
|
2159
2620
|
# Stop daemon:
|
|
2160
2621
|
mock-mcp stop
|
|
2161
2622
|
|
|
2623
|
+
HOW IT WORKS:
|
|
2624
|
+
1. Run your tests with MOCK_MCP=1 to start a daemon and make mock requests
|
|
2625
|
+
2. The MCP adapter discovers all active daemons automatically
|
|
2626
|
+
3. Use list_runs/claim_next_batch tools from any MCP client to provide mocks
|
|
2627
|
+
|
|
2162
2628
|
ENVIRONMENT:
|
|
2163
2629
|
MOCK_MCP=1 Enable mock generation in test code
|
|
2164
2630
|
MOCK_MCP_CACHE_DIR Override cache directory for daemon files
|
|
@@ -2187,4 +2653,4 @@ if (isCliExecution) {
|
|
|
2187
2653
|
});
|
|
2188
2654
|
}
|
|
2189
2655
|
|
|
2190
|
-
export { BatchMockCollector, DaemonClient, MockMcpDaemon, computeProjectId, connect, ensureDaemonRunning, resolveProjectRoot, runAdapter };
|
|
2656
|
+
export { BatchMockCollector, DaemonClient, MockMcpDaemon, MultiDaemonClient, cleanupGlobalIndex, computeProjectId, connect, discoverAllDaemons, ensureDaemonRunning, readGlobalIndex, resolveProjectRoot, runAdapter };
|