mock-mcp 0.5.0 → 0.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/adapter/index.cjs +465 -302
- package/dist/adapter/index.d.cts +93 -6
- package/dist/adapter/index.d.ts +93 -6
- package/dist/adapter/index.js +465 -302
- package/dist/client/connect.cjs +83 -5
- package/dist/client/connect.d.cts +10 -3
- package/dist/client/connect.d.ts +10 -3
- package/dist/client/connect.js +82 -4
- package/dist/client/index.cjs +83 -5
- package/dist/client/index.d.cts +1 -2
- package/dist/client/index.d.ts +1 -2
- package/dist/client/index.js +82 -4
- package/dist/daemon/index.cjs +55 -5
- package/dist/daemon/index.js +54 -4
- package/dist/index.cjs +559 -89
- package/dist/index.d.cts +137 -7
- package/dist/index.d.ts +137 -7
- package/dist/index.js +556 -90
- package/dist/shared/index.cjs +121 -1
- package/dist/shared/index.d.cts +240 -3
- package/dist/shared/index.d.ts +240 -3
- package/dist/shared/index.js +115 -2
- package/dist/{discovery-Dc2LdF8q.d.cts → types-bEGXLBF0.d.cts} +86 -1
- package/dist/{discovery-Dc2LdF8q.d.ts → types-bEGXLBF0.d.ts} +86 -1
- package/package.json +2 -1
- package/dist/protocol-CiwaQFOt.d.ts +0 -239
- package/dist/protocol-xZu-wb0n.d.cts +0 -239
- package/dist/types-BKREdsyr.d.cts +0 -32
- package/dist/types-BKREdsyr.d.ts +0 -32
package/dist/index.cjs
CHANGED
|
@@ -70,22 +70,50 @@ var __export = (target, all) => {
|
|
|
70
70
|
// src/shared/discovery.ts
|
|
71
71
|
var discovery_exports = {};
|
|
72
72
|
__export(discovery_exports, {
|
|
73
|
+
cleanupGlobalIndex: () => cleanupGlobalIndex,
|
|
73
74
|
computeProjectId: () => computeProjectId,
|
|
75
|
+
discoverAllDaemons: () => discoverAllDaemons,
|
|
74
76
|
ensureDaemonRunning: () => ensureDaemonRunning,
|
|
75
77
|
getCacheDir: () => getCacheDir,
|
|
76
78
|
getDaemonEntryPath: () => getDaemonEntryPath,
|
|
79
|
+
getGlobalIndexPath: () => getGlobalIndexPath,
|
|
77
80
|
getPaths: () => getPaths,
|
|
78
81
|
healthCheck: () => healthCheck,
|
|
79
82
|
randomToken: () => randomToken,
|
|
83
|
+
readGlobalIndex: () => readGlobalIndex,
|
|
80
84
|
readRegistry: () => readRegistry,
|
|
85
|
+
registerDaemonGlobally: () => registerDaemonGlobally,
|
|
81
86
|
releaseLock: () => releaseLock,
|
|
82
87
|
resolveProjectRoot: () => resolveProjectRoot,
|
|
83
88
|
sleep: () => sleep,
|
|
84
89
|
tryAcquireLock: () => tryAcquireLock,
|
|
90
|
+
unregisterDaemonGlobally: () => unregisterDaemonGlobally,
|
|
91
|
+
writeGlobalIndex: () => writeGlobalIndex,
|
|
85
92
|
writeRegistry: () => writeRegistry
|
|
86
93
|
});
|
|
87
94
|
function debugLog(_msg) {
|
|
88
95
|
}
|
|
96
|
+
function hasValidProjectMarker(dir) {
|
|
97
|
+
try {
|
|
98
|
+
const gitPath = path__default.default.join(dir, ".git");
|
|
99
|
+
try {
|
|
100
|
+
const stat = fssync__default.default.statSync(gitPath);
|
|
101
|
+
if (stat.isDirectory() || stat.isFile()) {
|
|
102
|
+
return true;
|
|
103
|
+
}
|
|
104
|
+
} catch {
|
|
105
|
+
}
|
|
106
|
+
const pkgPath = path__default.default.join(dir, "package.json");
|
|
107
|
+
try {
|
|
108
|
+
fssync__default.default.accessSync(pkgPath, fssync__default.default.constants.F_OK);
|
|
109
|
+
return true;
|
|
110
|
+
} catch {
|
|
111
|
+
}
|
|
112
|
+
return false;
|
|
113
|
+
} catch {
|
|
114
|
+
return false;
|
|
115
|
+
}
|
|
116
|
+
}
|
|
89
117
|
function resolveProjectRoot(startDir = process.cwd()) {
|
|
90
118
|
let current = path__default.default.resolve(startDir);
|
|
91
119
|
const root = path__default.default.parse(current).root;
|
|
@@ -220,7 +248,28 @@ function getDaemonEntryPath() {
|
|
|
220
248
|
return path__default.default.join(process.cwd(), "dist", "index.js");
|
|
221
249
|
}
|
|
222
250
|
async function ensureDaemonRunning(opts = {}) {
|
|
223
|
-
|
|
251
|
+
let projectRoot = opts.projectRoot ?? resolveProjectRoot();
|
|
252
|
+
if (!hasValidProjectMarker(projectRoot)) {
|
|
253
|
+
const resolved = resolveProjectRoot(projectRoot);
|
|
254
|
+
if (resolved !== projectRoot && hasValidProjectMarker(resolved)) {
|
|
255
|
+
console.error(`[mock-mcp] Warning: projectRoot "${projectRoot}" doesn't look like a project root`);
|
|
256
|
+
console.error(`[mock-mcp] Found .git/package.json at: "${resolved}"`);
|
|
257
|
+
projectRoot = resolved;
|
|
258
|
+
} else {
|
|
259
|
+
console.error(`[mock-mcp] \u26A0\uFE0F WARNING: Could not find a valid project root!`);
|
|
260
|
+
console.error(`[mock-mcp] Current path: "${projectRoot}"`);
|
|
261
|
+
console.error(`[mock-mcp] This path doesn't contain .git or package.json.`);
|
|
262
|
+
console.error(`[mock-mcp] This may cause project mismatch issues.`);
|
|
263
|
+
console.error(`[mock-mcp] `);
|
|
264
|
+
console.error(`[mock-mcp] For MCP adapters, please specify --project-root explicitly:`);
|
|
265
|
+
console.error(`[mock-mcp] mock-mcp adapter --project-root /path/to/your/project`);
|
|
266
|
+
console.error(`[mock-mcp] `);
|
|
267
|
+
console.error(`[mock-mcp] In your MCP client config (Cursor, Claude Desktop, etc.):`);
|
|
268
|
+
console.error(`[mock-mcp] {`);
|
|
269
|
+
console.error(`[mock-mcp] "args": ["-y", "mock-mcp", "adapter", "--project-root", "/path/to/your/project"]`);
|
|
270
|
+
console.error(`[mock-mcp] }`);
|
|
271
|
+
}
|
|
272
|
+
}
|
|
224
273
|
const projectId = computeProjectId(projectRoot);
|
|
225
274
|
const { base, registryPath, lockPath, ipcPath } = getPaths(
|
|
226
275
|
projectId,
|
|
@@ -328,6 +377,77 @@ ${daemonStderr}`);
|
|
|
328
377
|
function sleep(ms) {
|
|
329
378
|
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
330
379
|
}
|
|
380
|
+
function getGlobalIndexPath(cacheDir) {
|
|
381
|
+
const base = path__default.default.join(getCacheDir(cacheDir), "mock-mcp");
|
|
382
|
+
return path__default.default.join(base, "active-daemons.json");
|
|
383
|
+
}
|
|
384
|
+
async function readGlobalIndex(cacheDir) {
|
|
385
|
+
const indexPath = getGlobalIndexPath(cacheDir);
|
|
386
|
+
try {
|
|
387
|
+
const txt = await fs__default.default.readFile(indexPath, "utf-8");
|
|
388
|
+
return JSON.parse(txt);
|
|
389
|
+
} catch {
|
|
390
|
+
return { daemons: [], updatedAt: (/* @__PURE__ */ new Date()).toISOString() };
|
|
391
|
+
}
|
|
392
|
+
}
|
|
393
|
+
async function writeGlobalIndex(index, cacheDir) {
|
|
394
|
+
const indexPath = getGlobalIndexPath(cacheDir);
|
|
395
|
+
const base = path__default.default.dirname(indexPath);
|
|
396
|
+
await fs__default.default.mkdir(base, { recursive: true });
|
|
397
|
+
await fs__default.default.writeFile(indexPath, JSON.stringify(index, null, 2), {
|
|
398
|
+
encoding: "utf-8",
|
|
399
|
+
mode: 384
|
|
400
|
+
});
|
|
401
|
+
}
|
|
402
|
+
async function registerDaemonGlobally(entry, cacheDir) {
|
|
403
|
+
const index = await readGlobalIndex(cacheDir);
|
|
404
|
+
index.daemons = index.daemons.filter((d) => d.projectId !== entry.projectId);
|
|
405
|
+
index.daemons.push(entry);
|
|
406
|
+
index.updatedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
407
|
+
await writeGlobalIndex(index, cacheDir);
|
|
408
|
+
debugLog(`Registered daemon ${entry.projectId} in global index`);
|
|
409
|
+
}
|
|
410
|
+
async function unregisterDaemonGlobally(projectId, cacheDir) {
|
|
411
|
+
const index = await readGlobalIndex(cacheDir);
|
|
412
|
+
index.daemons = index.daemons.filter((d) => d.projectId !== projectId);
|
|
413
|
+
index.updatedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
414
|
+
await writeGlobalIndex(index, cacheDir);
|
|
415
|
+
}
|
|
416
|
+
async function cleanupGlobalIndex(cacheDir) {
|
|
417
|
+
const index = await readGlobalIndex(cacheDir);
|
|
418
|
+
const validDaemons = [];
|
|
419
|
+
for (const entry of index.daemons) {
|
|
420
|
+
try {
|
|
421
|
+
process.kill(entry.pid, 0);
|
|
422
|
+
const healthy = await healthCheck(entry.ipcPath, 1e3);
|
|
423
|
+
if (healthy) {
|
|
424
|
+
validDaemons.push(entry);
|
|
425
|
+
} else {
|
|
426
|
+
debugLog(`Removing unhealthy daemon ${entry.projectId} (pid ${entry.pid})`);
|
|
427
|
+
}
|
|
428
|
+
} catch {
|
|
429
|
+
debugLog(`Removing dead daemon ${entry.projectId} (pid ${entry.pid})`);
|
|
430
|
+
}
|
|
431
|
+
}
|
|
432
|
+
if (validDaemons.length !== index.daemons.length) {
|
|
433
|
+
index.daemons = validDaemons;
|
|
434
|
+
index.updatedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
435
|
+
await writeGlobalIndex(index, cacheDir);
|
|
436
|
+
}
|
|
437
|
+
}
|
|
438
|
+
async function discoverAllDaemons(cacheDir) {
|
|
439
|
+
await cleanupGlobalIndex(cacheDir);
|
|
440
|
+
const index = await readGlobalIndex(cacheDir);
|
|
441
|
+
const results = [];
|
|
442
|
+
for (const entry of index.daemons) {
|
|
443
|
+
const registry = await readRegistry(entry.registryPath);
|
|
444
|
+
if (registry) {
|
|
445
|
+
const healthy = await healthCheck(entry.ipcPath, 2e3);
|
|
446
|
+
results.push({ registry, healthy });
|
|
447
|
+
}
|
|
448
|
+
}
|
|
449
|
+
return results;
|
|
450
|
+
}
|
|
331
451
|
var __curDirname;
|
|
332
452
|
var init_discovery = __esm({
|
|
333
453
|
"src/shared/discovery.ts"() {
|
|
@@ -471,6 +591,16 @@ var init_daemon = __esm({
|
|
|
471
591
|
version: this.opts.version
|
|
472
592
|
};
|
|
473
593
|
await writeRegistry(registryPath, registry);
|
|
594
|
+
const globalEntry = {
|
|
595
|
+
projectId,
|
|
596
|
+
projectRoot: this.opts.projectRoot,
|
|
597
|
+
ipcPath,
|
|
598
|
+
registryPath,
|
|
599
|
+
pid: process.pid,
|
|
600
|
+
startedAt: registry.startedAt,
|
|
601
|
+
version: this.opts.version
|
|
602
|
+
};
|
|
603
|
+
await registerDaemonGlobally(globalEntry, this.opts.cacheDir);
|
|
474
604
|
this.sweepTimer = setInterval(() => this.sweepExpiredClaims(), this.opts.sweepIntervalMs);
|
|
475
605
|
this.sweepTimer.unref?.();
|
|
476
606
|
this.resetIdleTimer();
|
|
@@ -499,6 +629,8 @@ var init_daemon = __esm({
|
|
|
499
629
|
});
|
|
500
630
|
this.batches.clear();
|
|
501
631
|
this.pendingQueue.length = 0;
|
|
632
|
+
const projectId = computeProjectId(this.opts.projectRoot);
|
|
633
|
+
await unregisterDaemonGlobally(projectId, this.opts.cacheDir);
|
|
502
634
|
this.logger.error("\u{1F44B} Daemon stopped");
|
|
503
635
|
}
|
|
504
636
|
// ===========================================================================
|
|
@@ -924,54 +1056,209 @@ var init_daemon = __esm({
|
|
|
924
1056
|
};
|
|
925
1057
|
}
|
|
926
1058
|
});
|
|
927
|
-
exports.
|
|
928
|
-
var
|
|
929
|
-
"src/adapter/daemon-client.ts"() {
|
|
930
|
-
|
|
931
|
-
|
|
932
|
-
|
|
933
|
-
|
|
934
|
-
|
|
1059
|
+
exports.MultiDaemonClient = void 0;
|
|
1060
|
+
var init_multi_daemon_client = __esm({
|
|
1061
|
+
"src/adapter/multi-daemon-client.ts"() {
|
|
1062
|
+
init_discovery();
|
|
1063
|
+
exports.MultiDaemonClient = class {
|
|
1064
|
+
logger;
|
|
1065
|
+
cacheDir;
|
|
1066
|
+
adapterId;
|
|
1067
|
+
constructor(opts = {}) {
|
|
1068
|
+
this.logger = opts.logger ?? console;
|
|
1069
|
+
this.cacheDir = opts.cacheDir;
|
|
1070
|
+
this.adapterId = crypto__default.default.randomUUID();
|
|
935
1071
|
}
|
|
936
1072
|
// ===========================================================================
|
|
937
|
-
//
|
|
1073
|
+
// Discovery
|
|
938
1074
|
// ===========================================================================
|
|
939
|
-
|
|
940
|
-
|
|
1075
|
+
/**
|
|
1076
|
+
* Discover all active and healthy daemons.
|
|
1077
|
+
*/
|
|
1078
|
+
async discoverDaemons() {
|
|
1079
|
+
return discoverAllDaemons(this.cacheDir);
|
|
941
1080
|
}
|
|
942
|
-
|
|
943
|
-
|
|
1081
|
+
// ===========================================================================
|
|
1082
|
+
// Aggregated RPC Methods
|
|
1083
|
+
// ===========================================================================
|
|
1084
|
+
/**
|
|
1085
|
+
* Get aggregated status from all daemons.
|
|
1086
|
+
*/
|
|
1087
|
+
async getAggregatedStatus() {
|
|
1088
|
+
const daemons = await this.discoverDaemons();
|
|
1089
|
+
const statuses = [];
|
|
1090
|
+
let totalRuns = 0;
|
|
1091
|
+
let totalPending = 0;
|
|
1092
|
+
let totalClaimed = 0;
|
|
1093
|
+
for (const { registry, healthy } of daemons) {
|
|
1094
|
+
if (!healthy) {
|
|
1095
|
+
statuses.push({
|
|
1096
|
+
version: registry.version,
|
|
1097
|
+
projectId: registry.projectId,
|
|
1098
|
+
projectRoot: registry.projectRoot,
|
|
1099
|
+
pid: registry.pid,
|
|
1100
|
+
uptime: 0,
|
|
1101
|
+
runs: 0,
|
|
1102
|
+
pending: 0,
|
|
1103
|
+
claimed: 0,
|
|
1104
|
+
totalBatches: 0,
|
|
1105
|
+
healthy: false
|
|
1106
|
+
});
|
|
1107
|
+
continue;
|
|
1108
|
+
}
|
|
1109
|
+
try {
|
|
1110
|
+
const status = await this.rpc(registry, "getStatus", {});
|
|
1111
|
+
statuses.push({ ...status, healthy: true });
|
|
1112
|
+
totalRuns += status.runs;
|
|
1113
|
+
totalPending += status.pending;
|
|
1114
|
+
totalClaimed += status.claimed;
|
|
1115
|
+
} catch (error) {
|
|
1116
|
+
this.logger.warn(`Failed to get status from daemon ${registry.projectId}: ${error}`);
|
|
1117
|
+
statuses.push({
|
|
1118
|
+
version: registry.version,
|
|
1119
|
+
projectId: registry.projectId,
|
|
1120
|
+
projectRoot: registry.projectRoot,
|
|
1121
|
+
pid: registry.pid,
|
|
1122
|
+
uptime: 0,
|
|
1123
|
+
runs: 0,
|
|
1124
|
+
pending: 0,
|
|
1125
|
+
claimed: 0,
|
|
1126
|
+
totalBatches: 0,
|
|
1127
|
+
healthy: false
|
|
1128
|
+
});
|
|
1129
|
+
}
|
|
1130
|
+
}
|
|
1131
|
+
return { daemons: statuses, totalRuns, totalPending, totalClaimed };
|
|
944
1132
|
}
|
|
1133
|
+
/**
|
|
1134
|
+
* List all runs across all daemons.
|
|
1135
|
+
*/
|
|
1136
|
+
async listAllRuns() {
|
|
1137
|
+
const daemons = await this.discoverDaemons();
|
|
1138
|
+
const allRuns = [];
|
|
1139
|
+
for (const { registry, healthy } of daemons) {
|
|
1140
|
+
if (!healthy) continue;
|
|
1141
|
+
try {
|
|
1142
|
+
const result = await this.rpc(registry, "listRuns", {});
|
|
1143
|
+
for (const run of result.runs) {
|
|
1144
|
+
allRuns.push({
|
|
1145
|
+
...run,
|
|
1146
|
+
projectId: registry.projectId,
|
|
1147
|
+
projectRoot: registry.projectRoot
|
|
1148
|
+
});
|
|
1149
|
+
}
|
|
1150
|
+
} catch (error) {
|
|
1151
|
+
this.logger.warn(`Failed to list runs from daemon ${registry.projectId}: ${error}`);
|
|
1152
|
+
}
|
|
1153
|
+
}
|
|
1154
|
+
return allRuns;
|
|
1155
|
+
}
|
|
1156
|
+
/**
|
|
1157
|
+
* Claim the next available batch from any daemon.
|
|
1158
|
+
* Searches through all daemons in order until finding one with a pending batch.
|
|
1159
|
+
*/
|
|
945
1160
|
async claimNextBatch(args) {
|
|
946
|
-
|
|
947
|
-
|
|
948
|
-
|
|
949
|
-
|
|
950
|
-
|
|
1161
|
+
const daemons = await this.discoverDaemons();
|
|
1162
|
+
for (const { registry, healthy } of daemons) {
|
|
1163
|
+
if (!healthy) continue;
|
|
1164
|
+
try {
|
|
1165
|
+
const result = await this.rpc(registry, "claimNextBatch", {
|
|
1166
|
+
adapterId: this.adapterId,
|
|
1167
|
+
runId: args.runId,
|
|
1168
|
+
leaseMs: args.leaseMs
|
|
1169
|
+
});
|
|
1170
|
+
if (result) {
|
|
1171
|
+
return {
|
|
1172
|
+
...result,
|
|
1173
|
+
projectId: registry.projectId,
|
|
1174
|
+
projectRoot: registry.projectRoot
|
|
1175
|
+
};
|
|
1176
|
+
}
|
|
1177
|
+
} catch (error) {
|
|
1178
|
+
this.logger.warn(`Failed to claim batch from daemon ${registry.projectId}: ${error}`);
|
|
1179
|
+
}
|
|
1180
|
+
}
|
|
1181
|
+
return null;
|
|
951
1182
|
}
|
|
1183
|
+
/**
|
|
1184
|
+
* Provide mock data for a batch.
|
|
1185
|
+
* Automatically routes to the correct daemon based on batchId.
|
|
1186
|
+
*/
|
|
952
1187
|
async provideBatch(args) {
|
|
953
|
-
|
|
954
|
-
|
|
955
|
-
batchId: args.batchId
|
|
956
|
-
|
|
957
|
-
|
|
958
|
-
})
|
|
1188
|
+
const parts = args.batchId.split(":");
|
|
1189
|
+
if (parts.length < 2) {
|
|
1190
|
+
return { ok: false, message: `Invalid batchId format: ${args.batchId}` };
|
|
1191
|
+
}
|
|
1192
|
+
const daemons = await this.discoverDaemons();
|
|
1193
|
+
for (const { registry, healthy } of daemons) {
|
|
1194
|
+
if (!healthy) continue;
|
|
1195
|
+
try {
|
|
1196
|
+
const result = await this.rpc(registry, "provideBatch", {
|
|
1197
|
+
adapterId: this.adapterId,
|
|
1198
|
+
batchId: args.batchId,
|
|
1199
|
+
claimToken: args.claimToken,
|
|
1200
|
+
mocks: args.mocks
|
|
1201
|
+
});
|
|
1202
|
+
return result;
|
|
1203
|
+
} catch (error) {
|
|
1204
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
1205
|
+
if (msg.includes("not found") || msg.includes("Not found")) {
|
|
1206
|
+
continue;
|
|
1207
|
+
}
|
|
1208
|
+
return { ok: false, message: msg };
|
|
1209
|
+
}
|
|
1210
|
+
}
|
|
1211
|
+
return { ok: false, message: `Batch not found: ${args.batchId}` };
|
|
959
1212
|
}
|
|
1213
|
+
/**
|
|
1214
|
+
* Release a batch.
|
|
1215
|
+
*/
|
|
960
1216
|
async releaseBatch(args) {
|
|
961
|
-
|
|
962
|
-
|
|
963
|
-
|
|
964
|
-
|
|
965
|
-
|
|
966
|
-
|
|
1217
|
+
const daemons = await this.discoverDaemons();
|
|
1218
|
+
for (const { registry, healthy } of daemons) {
|
|
1219
|
+
if (!healthy) continue;
|
|
1220
|
+
try {
|
|
1221
|
+
const result = await this.rpc(registry, "releaseBatch", {
|
|
1222
|
+
adapterId: this.adapterId,
|
|
1223
|
+
batchId: args.batchId,
|
|
1224
|
+
claimToken: args.claimToken,
|
|
1225
|
+
reason: args.reason
|
|
1226
|
+
});
|
|
1227
|
+
return result;
|
|
1228
|
+
} catch (error) {
|
|
1229
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
1230
|
+
if (msg.includes("not found") || msg.includes("Not found")) {
|
|
1231
|
+
continue;
|
|
1232
|
+
}
|
|
1233
|
+
return { ok: false, message: msg };
|
|
1234
|
+
}
|
|
1235
|
+
}
|
|
1236
|
+
return { ok: false, message: `Batch not found: ${args.batchId}` };
|
|
967
1237
|
}
|
|
1238
|
+
/**
|
|
1239
|
+
* Get a specific batch by ID.
|
|
1240
|
+
*/
|
|
968
1241
|
async getBatch(batchId) {
|
|
969
|
-
|
|
1242
|
+
const daemons = await this.discoverDaemons();
|
|
1243
|
+
for (const { registry, healthy } of daemons) {
|
|
1244
|
+
if (!healthy) continue;
|
|
1245
|
+
try {
|
|
1246
|
+
const result = await this.rpc(registry, "getBatch", { batchId });
|
|
1247
|
+
return result;
|
|
1248
|
+
} catch (error) {
|
|
1249
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
1250
|
+
if (msg.includes("not found") || msg.includes("Not found")) {
|
|
1251
|
+
continue;
|
|
1252
|
+
}
|
|
1253
|
+
throw error;
|
|
1254
|
+
}
|
|
1255
|
+
}
|
|
1256
|
+
return null;
|
|
970
1257
|
}
|
|
971
1258
|
// ===========================================================================
|
|
972
|
-
// Internal
|
|
1259
|
+
// Internal RPC
|
|
973
1260
|
// ===========================================================================
|
|
974
|
-
rpc(method, params) {
|
|
1261
|
+
rpc(registry, method, params) {
|
|
975
1262
|
const payload = {
|
|
976
1263
|
jsonrpc: "2.0",
|
|
977
1264
|
id: crypto__default.default.randomUUID(),
|
|
@@ -982,11 +1269,11 @@ var init_daemon_client = __esm({
|
|
|
982
1269
|
const req = http__default.default.request(
|
|
983
1270
|
{
|
|
984
1271
|
method: "POST",
|
|
985
|
-
socketPath:
|
|
1272
|
+
socketPath: registry.ipcPath,
|
|
986
1273
|
path: "/control",
|
|
987
1274
|
headers: {
|
|
988
1275
|
"content-type": "application/json",
|
|
989
|
-
"x-mock-mcp-token":
|
|
1276
|
+
"x-mock-mcp-token": registry.token
|
|
990
1277
|
},
|
|
991
1278
|
timeout: 3e4
|
|
992
1279
|
},
|
|
@@ -1028,12 +1315,19 @@ __export(adapter_exports, {
|
|
|
1028
1315
|
});
|
|
1029
1316
|
async function runAdapter(opts = {}) {
|
|
1030
1317
|
const logger = opts.logger ?? console;
|
|
1031
|
-
const version = opts.version ?? "0.
|
|
1032
|
-
logger.error("\u{1F50D}
|
|
1033
|
-
const
|
|
1034
|
-
const
|
|
1035
|
-
|
|
1036
|
-
|
|
1318
|
+
const version = opts.version ?? "0.5.0";
|
|
1319
|
+
logger.error("\u{1F50D} Initializing mock-mcp adapter (multi-daemon mode)...");
|
|
1320
|
+
const multiDaemon = new exports.MultiDaemonClient({ logger });
|
|
1321
|
+
const daemons = await multiDaemon.discoverDaemons();
|
|
1322
|
+
if (daemons.length > 0) {
|
|
1323
|
+
logger.error(`\u2705 Found ${daemons.length} active daemon(s):`);
|
|
1324
|
+
for (const d of daemons) {
|
|
1325
|
+
const status = d.healthy ? "healthy" : "unhealthy";
|
|
1326
|
+
logger.error(` - ${d.registry.projectId}: ${d.registry.projectRoot} (${status})`);
|
|
1327
|
+
}
|
|
1328
|
+
} else {
|
|
1329
|
+
logger.error("\u2139\uFE0F No active daemons found. Waiting for test processes to start...");
|
|
1330
|
+
}
|
|
1037
1331
|
const server = new index_js.Server(
|
|
1038
1332
|
{
|
|
1039
1333
|
name: "mock-mcp-adapter",
|
|
@@ -1051,15 +1345,15 @@ async function runAdapter(opts = {}) {
|
|
|
1051
1345
|
try {
|
|
1052
1346
|
switch (name) {
|
|
1053
1347
|
case "get_status": {
|
|
1054
|
-
const result = await
|
|
1055
|
-
return buildToolResponse(
|
|
1348
|
+
const result = await multiDaemon.getAggregatedStatus();
|
|
1349
|
+
return buildToolResponse(formatAggregatedStatus(result));
|
|
1056
1350
|
}
|
|
1057
1351
|
case "list_runs": {
|
|
1058
|
-
const result = await
|
|
1059
|
-
return buildToolResponse(
|
|
1352
|
+
const result = await multiDaemon.listAllRuns();
|
|
1353
|
+
return buildToolResponse(formatExtendedRuns(result));
|
|
1060
1354
|
}
|
|
1061
1355
|
case "claim_next_batch": {
|
|
1062
|
-
const result = await
|
|
1356
|
+
const result = await multiDaemon.claimNextBatch({
|
|
1063
1357
|
runId: args?.runId,
|
|
1064
1358
|
leaseMs: args?.leaseMs
|
|
1065
1359
|
});
|
|
@@ -1069,14 +1363,17 @@ async function runAdapter(opts = {}) {
|
|
|
1069
1363
|
if (!args?.batchId) {
|
|
1070
1364
|
throw new Error("batchId is required");
|
|
1071
1365
|
}
|
|
1072
|
-
const result = await
|
|
1366
|
+
const result = await multiDaemon.getBatch(args.batchId);
|
|
1367
|
+
if (!result) {
|
|
1368
|
+
throw new Error(`Batch not found: ${args.batchId}`);
|
|
1369
|
+
}
|
|
1073
1370
|
return buildToolResponse(formatBatch(result));
|
|
1074
1371
|
}
|
|
1075
1372
|
case "provide_batch_mock_data": {
|
|
1076
1373
|
if (!args?.batchId || !args?.claimToken || !args?.mocks) {
|
|
1077
1374
|
throw new Error("batchId, claimToken, and mocks are required");
|
|
1078
1375
|
}
|
|
1079
|
-
const result = await
|
|
1376
|
+
const result = await multiDaemon.provideBatch({
|
|
1080
1377
|
batchId: args.batchId,
|
|
1081
1378
|
claimToken: args.claimToken,
|
|
1082
1379
|
mocks: args.mocks
|
|
@@ -1087,7 +1384,7 @@ async function runAdapter(opts = {}) {
|
|
|
1087
1384
|
if (!args?.batchId || !args?.claimToken) {
|
|
1088
1385
|
throw new Error("batchId and claimToken are required");
|
|
1089
1386
|
}
|
|
1090
|
-
const result = await
|
|
1387
|
+
const result = await multiDaemon.releaseBatch({
|
|
1091
1388
|
batchId: args.batchId,
|
|
1092
1389
|
claimToken: args.claimToken,
|
|
1093
1390
|
reason: args?.reason
|
|
@@ -1099,7 +1396,7 @@ async function runAdapter(opts = {}) {
|
|
|
1099
1396
|
}
|
|
1100
1397
|
} catch (error) {
|
|
1101
1398
|
const message = error instanceof Error ? error.message : String(error);
|
|
1102
|
-
logger.error(`Tool error (${name})
|
|
1399
|
+
logger.error(`Tool error (${name}): ${message}`);
|
|
1103
1400
|
return buildToolResponse(`Error: ${message}`, true);
|
|
1104
1401
|
}
|
|
1105
1402
|
});
|
|
@@ -1113,53 +1410,86 @@ function buildToolResponse(text, isError = false) {
|
|
|
1113
1410
|
isError
|
|
1114
1411
|
};
|
|
1115
1412
|
}
|
|
1116
|
-
function
|
|
1117
|
-
|
|
1413
|
+
function formatAggregatedStatus(status) {
|
|
1414
|
+
if (status.daemons.length === 0) {
|
|
1415
|
+
return `# Mock MCP Status
|
|
1118
1416
|
|
|
1119
|
-
|
|
1120
|
-
- **Project ID**: ${status.projectId}
|
|
1121
|
-
- **Project Root**: ${status.projectRoot}
|
|
1122
|
-
- **PID**: ${status.pid}
|
|
1123
|
-
- **Uptime**: ${Math.round(status.uptime / 1e3)}s
|
|
1124
|
-
|
|
1125
|
-
## Batches
|
|
1126
|
-
- **Pending**: ${status.pending}
|
|
1127
|
-
- **Claimed**: ${status.claimed}
|
|
1128
|
-
- **Active Runs**: ${status.runs}
|
|
1417
|
+
No active daemons found. Start a test with \`MOCK_MCP=1\` to begin.
|
|
1129
1418
|
`;
|
|
1419
|
+
}
|
|
1420
|
+
const lines = [
|
|
1421
|
+
"# Mock MCP Status\n",
|
|
1422
|
+
"## Summary",
|
|
1423
|
+
`- **Active Daemons**: ${status.daemons.filter((d) => d.healthy).length}`,
|
|
1424
|
+
`- **Total Active Runs**: ${status.totalRuns}`,
|
|
1425
|
+
`- **Total Pending Batches**: ${status.totalPending}`,
|
|
1426
|
+
`- **Total Claimed Batches**: ${status.totalClaimed}`,
|
|
1427
|
+
"",
|
|
1428
|
+
"## Daemons\n"
|
|
1429
|
+
];
|
|
1430
|
+
for (const daemon of status.daemons) {
|
|
1431
|
+
const healthIcon = daemon.healthy ? "\u2705" : "\u274C";
|
|
1432
|
+
lines.push(`### ${healthIcon} ${daemon.projectRoot}`);
|
|
1433
|
+
lines.push(`- **Project ID**: ${daemon.projectId}`);
|
|
1434
|
+
lines.push(`- **Version**: ${daemon.version}`);
|
|
1435
|
+
lines.push(`- **PID**: ${daemon.pid}`);
|
|
1436
|
+
if (daemon.healthy) {
|
|
1437
|
+
lines.push(`- **Uptime**: ${Math.round(daemon.uptime / 1e3)}s`);
|
|
1438
|
+
lines.push(`- **Runs**: ${daemon.runs}`);
|
|
1439
|
+
lines.push(`- **Pending**: ${daemon.pending}`);
|
|
1440
|
+
lines.push(`- **Claimed**: ${daemon.claimed}`);
|
|
1441
|
+
} else {
|
|
1442
|
+
lines.push(`- **Status**: Not responding`);
|
|
1443
|
+
}
|
|
1444
|
+
lines.push("");
|
|
1445
|
+
}
|
|
1446
|
+
return lines.join("\n");
|
|
1130
1447
|
}
|
|
1131
|
-
function
|
|
1132
|
-
if (
|
|
1133
|
-
return "No active test runs.";
|
|
1448
|
+
function formatExtendedRuns(runs) {
|
|
1449
|
+
if (runs.length === 0) {
|
|
1450
|
+
return "No active test runs.\n\nStart a test with `MOCK_MCP=1` to begin.";
|
|
1134
1451
|
}
|
|
1135
1452
|
const lines = ["# Active Test Runs\n"];
|
|
1136
|
-
|
|
1137
|
-
|
|
1138
|
-
|
|
1139
|
-
|
|
1140
|
-
|
|
1141
|
-
|
|
1142
|
-
|
|
1143
|
-
|
|
1144
|
-
|
|
1145
|
-
|
|
1146
|
-
|
|
1147
|
-
|
|
1453
|
+
const byProject = /* @__PURE__ */ new Map();
|
|
1454
|
+
for (const run of runs) {
|
|
1455
|
+
const key = run.projectRoot;
|
|
1456
|
+
if (!byProject.has(key)) {
|
|
1457
|
+
byProject.set(key, []);
|
|
1458
|
+
}
|
|
1459
|
+
byProject.get(key).push(run);
|
|
1460
|
+
}
|
|
1461
|
+
for (const [projectRoot, projectRuns] of byProject) {
|
|
1462
|
+
lines.push(`## Project: ${projectRoot}
|
|
1463
|
+
`);
|
|
1464
|
+
for (const run of projectRuns) {
|
|
1465
|
+
lines.push(`### Run: ${run.runId}`);
|
|
1466
|
+
lines.push(`- **PID**: ${run.pid}`);
|
|
1467
|
+
lines.push(`- **CWD**: ${run.cwd}`);
|
|
1468
|
+
lines.push(`- **Started**: ${run.startedAt}`);
|
|
1469
|
+
lines.push(`- **Pending Batches**: ${run.pendingBatches}`);
|
|
1470
|
+
if (run.testMeta) {
|
|
1471
|
+
if (run.testMeta.testFile) {
|
|
1472
|
+
lines.push(`- **Test File**: ${run.testMeta.testFile}`);
|
|
1473
|
+
}
|
|
1474
|
+
if (run.testMeta.testName) {
|
|
1475
|
+
lines.push(`- **Test Name**: ${run.testMeta.testName}`);
|
|
1476
|
+
}
|
|
1148
1477
|
}
|
|
1478
|
+
lines.push("");
|
|
1149
1479
|
}
|
|
1150
|
-
lines.push("");
|
|
1151
1480
|
}
|
|
1152
1481
|
return lines.join("\n");
|
|
1153
1482
|
}
|
|
1154
1483
|
function formatClaimResult(result) {
|
|
1155
1484
|
if (!result) {
|
|
1156
|
-
return "No pending batches available to claim.";
|
|
1485
|
+
return "No pending batches available to claim.\n\nMake sure a test is running with `MOCK_MCP=1` and has pending mock requests.";
|
|
1157
1486
|
}
|
|
1158
1487
|
const lines = [
|
|
1159
1488
|
"# Batch Claimed Successfully\n",
|
|
1160
1489
|
`**Batch ID**: \`${result.batchId}\``,
|
|
1161
1490
|
`**Claim Token**: \`${result.claimToken}\``,
|
|
1162
1491
|
`**Run ID**: ${result.runId}`,
|
|
1492
|
+
`**Project**: ${result.projectRoot}`,
|
|
1163
1493
|
`**Lease Until**: ${new Date(result.leaseUntil).toISOString()}`,
|
|
1164
1494
|
"",
|
|
1165
1495
|
"## Requests\n"
|
|
@@ -1218,8 +1548,7 @@ function formatProvideResult(result) {
|
|
|
1218
1548
|
var TOOLS;
|
|
1219
1549
|
var init_adapter = __esm({
|
|
1220
1550
|
"src/adapter/adapter.ts"() {
|
|
1221
|
-
|
|
1222
|
-
init_discovery();
|
|
1551
|
+
init_multi_daemon_client();
|
|
1223
1552
|
TOOLS = [
|
|
1224
1553
|
{
|
|
1225
1554
|
name: "get_status",
|
|
@@ -1355,7 +1684,100 @@ init_daemon();
|
|
|
1355
1684
|
|
|
1356
1685
|
// src/adapter/index.ts
|
|
1357
1686
|
init_adapter();
|
|
1358
|
-
|
|
1687
|
+
var DaemonClient = class {
|
|
1688
|
+
constructor(ipcPath, token, adapterId) {
|
|
1689
|
+
this.ipcPath = ipcPath;
|
|
1690
|
+
this.token = token;
|
|
1691
|
+
this.adapterId = adapterId;
|
|
1692
|
+
}
|
|
1693
|
+
// ===========================================================================
|
|
1694
|
+
// RPC Methods
|
|
1695
|
+
// ===========================================================================
|
|
1696
|
+
async getStatus() {
|
|
1697
|
+
return this.rpc("getStatus", {});
|
|
1698
|
+
}
|
|
1699
|
+
async listRuns() {
|
|
1700
|
+
return this.rpc("listRuns", {});
|
|
1701
|
+
}
|
|
1702
|
+
async claimNextBatch(args) {
|
|
1703
|
+
return this.rpc("claimNextBatch", {
|
|
1704
|
+
adapterId: this.adapterId,
|
|
1705
|
+
runId: args.runId,
|
|
1706
|
+
leaseMs: args.leaseMs
|
|
1707
|
+
});
|
|
1708
|
+
}
|
|
1709
|
+
async provideBatch(args) {
|
|
1710
|
+
return this.rpc("provideBatch", {
|
|
1711
|
+
adapterId: this.adapterId,
|
|
1712
|
+
batchId: args.batchId,
|
|
1713
|
+
claimToken: args.claimToken,
|
|
1714
|
+
mocks: args.mocks
|
|
1715
|
+
});
|
|
1716
|
+
}
|
|
1717
|
+
async releaseBatch(args) {
|
|
1718
|
+
return this.rpc("releaseBatch", {
|
|
1719
|
+
adapterId: this.adapterId,
|
|
1720
|
+
batchId: args.batchId,
|
|
1721
|
+
claimToken: args.claimToken,
|
|
1722
|
+
reason: args.reason
|
|
1723
|
+
});
|
|
1724
|
+
}
|
|
1725
|
+
async getBatch(batchId) {
|
|
1726
|
+
return this.rpc("getBatch", { batchId });
|
|
1727
|
+
}
|
|
1728
|
+
// ===========================================================================
|
|
1729
|
+
// Internal
|
|
1730
|
+
// ===========================================================================
|
|
1731
|
+
rpc(method, params) {
|
|
1732
|
+
const payload = {
|
|
1733
|
+
jsonrpc: "2.0",
|
|
1734
|
+
id: crypto__default.default.randomUUID(),
|
|
1735
|
+
method,
|
|
1736
|
+
params
|
|
1737
|
+
};
|
|
1738
|
+
return new Promise((resolve, reject) => {
|
|
1739
|
+
const req = http__default.default.request(
|
|
1740
|
+
{
|
|
1741
|
+
method: "POST",
|
|
1742
|
+
socketPath: this.ipcPath,
|
|
1743
|
+
path: "/control",
|
|
1744
|
+
headers: {
|
|
1745
|
+
"content-type": "application/json",
|
|
1746
|
+
"x-mock-mcp-token": this.token
|
|
1747
|
+
},
|
|
1748
|
+
timeout: 3e4
|
|
1749
|
+
},
|
|
1750
|
+
(res) => {
|
|
1751
|
+
let buf = "";
|
|
1752
|
+
res.on("data", (chunk) => buf += chunk);
|
|
1753
|
+
res.on("end", () => {
|
|
1754
|
+
try {
|
|
1755
|
+
const response = JSON.parse(buf);
|
|
1756
|
+
if (response.error) {
|
|
1757
|
+
reject(new Error(response.error.message));
|
|
1758
|
+
} else {
|
|
1759
|
+
resolve(response.result);
|
|
1760
|
+
}
|
|
1761
|
+
} catch (e) {
|
|
1762
|
+
reject(e);
|
|
1763
|
+
}
|
|
1764
|
+
});
|
|
1765
|
+
}
|
|
1766
|
+
);
|
|
1767
|
+
req.on("error", (err) => {
|
|
1768
|
+
reject(new Error(`Daemon connection failed: ${err.message}`));
|
|
1769
|
+
});
|
|
1770
|
+
req.on("timeout", () => {
|
|
1771
|
+
req.destroy();
|
|
1772
|
+
reject(new Error("Daemon request timeout"));
|
|
1773
|
+
});
|
|
1774
|
+
req.end(JSON.stringify(payload));
|
|
1775
|
+
});
|
|
1776
|
+
}
|
|
1777
|
+
};
|
|
1778
|
+
|
|
1779
|
+
// src/adapter/index.ts
|
|
1780
|
+
init_multi_daemon_client();
|
|
1359
1781
|
|
|
1360
1782
|
// src/client/batch-mock-collector.ts
|
|
1361
1783
|
init_discovery();
|
|
@@ -1421,11 +1843,19 @@ var BatchMockCollector = class {
|
|
|
1421
1843
|
}
|
|
1422
1844
|
/**
|
|
1423
1845
|
* Resolve projectRoot from options.
|
|
1424
|
-
* Priority: projectRoot > filePath > undefined (auto-detect)
|
|
1846
|
+
* Priority: projectRoot (if valid) > filePath > projectRoot (fallback) > undefined (auto-detect)
|
|
1847
|
+
*
|
|
1848
|
+
* A projectRoot is "valid" if it contains .git or package.json. This prevents
|
|
1849
|
+
* accidentally using a wrong directory (e.g., user's home directory) when the
|
|
1850
|
+
* caller mistakenly passes process.cwd() as projectRoot.
|
|
1425
1851
|
*/
|
|
1426
1852
|
resolveProjectRootFromOptions(options) {
|
|
1427
1853
|
if (options.projectRoot) {
|
|
1428
|
-
|
|
1854
|
+
const hasGit = this.hasGitOrPackageJson(options.projectRoot);
|
|
1855
|
+
if (hasGit) {
|
|
1856
|
+
return options.projectRoot;
|
|
1857
|
+
}
|
|
1858
|
+
this.logger.warn(`[mock-mcp] Warning: projectRoot "${options.projectRoot}" doesn't contain .git or package.json`);
|
|
1429
1859
|
}
|
|
1430
1860
|
if (options.filePath) {
|
|
1431
1861
|
let filePath = options.filePath;
|
|
@@ -1444,8 +1874,36 @@ var BatchMockCollector = class {
|
|
|
1444
1874
|
this.logger.log(`[mock-mcp] projectRoot: ${resolved}`);
|
|
1445
1875
|
return resolved;
|
|
1446
1876
|
}
|
|
1877
|
+
if (options.projectRoot) {
|
|
1878
|
+
this.logger.warn(`[mock-mcp] Warning: Using projectRoot "${options.projectRoot}" despite missing .git/package.json`);
|
|
1879
|
+
return options.projectRoot;
|
|
1880
|
+
}
|
|
1447
1881
|
return void 0;
|
|
1448
1882
|
}
|
|
1883
|
+
/**
|
|
1884
|
+
* Check if a directory contains .git or package.json
|
|
1885
|
+
*/
|
|
1886
|
+
hasGitOrPackageJson(dir) {
|
|
1887
|
+
try {
|
|
1888
|
+
const gitPath = path__default.default.join(dir, ".git");
|
|
1889
|
+
const pkgPath = path__default.default.join(dir, "package.json");
|
|
1890
|
+
try {
|
|
1891
|
+
const stat = fssync__default.default.statSync(gitPath);
|
|
1892
|
+
if (stat.isDirectory() || stat.isFile()) {
|
|
1893
|
+
return true;
|
|
1894
|
+
}
|
|
1895
|
+
} catch {
|
|
1896
|
+
}
|
|
1897
|
+
try {
|
|
1898
|
+
fssync__default.default.accessSync(pkgPath, fssync__default.default.constants.F_OK);
|
|
1899
|
+
return true;
|
|
1900
|
+
} catch {
|
|
1901
|
+
}
|
|
1902
|
+
return false;
|
|
1903
|
+
} catch {
|
|
1904
|
+
return false;
|
|
1905
|
+
}
|
|
1906
|
+
}
|
|
1449
1907
|
/**
|
|
1450
1908
|
* Ensures the underlying connection is ready for use.
|
|
1451
1909
|
*/
|
|
@@ -2047,7 +2505,7 @@ async function runStatusCommand(_args) {
|
|
|
2047
2505
|
resolveProjectRoot: resolveProjectRoot2,
|
|
2048
2506
|
computeProjectId: computeProjectId2,
|
|
2049
2507
|
getPaths: getPaths2,
|
|
2050
|
-
readRegistry:
|
|
2508
|
+
readRegistry: readRegistry3
|
|
2051
2509
|
} = await Promise.resolve().then(() => (init_discovery(), discovery_exports));
|
|
2052
2510
|
const projectRoot = resolveProjectRoot2();
|
|
2053
2511
|
const projectId = computeProjectId2(projectRoot);
|
|
@@ -2056,7 +2514,7 @@ async function runStatusCommand(_args) {
|
|
|
2056
2514
|
console.log(`Project ID: ${projectId}`);
|
|
2057
2515
|
console.log(`IPC Path: ${ipcPath}`);
|
|
2058
2516
|
console.log("");
|
|
2059
|
-
const registry = await
|
|
2517
|
+
const registry = await readRegistry3(registryPath);
|
|
2060
2518
|
if (!registry) {
|
|
2061
2519
|
console.log("\u274C Daemon is not running (no registry found)");
|
|
2062
2520
|
return;
|
|
@@ -2126,12 +2584,12 @@ async function runStopCommand(_args) {
|
|
|
2126
2584
|
resolveProjectRoot: resolveProjectRoot2,
|
|
2127
2585
|
computeProjectId: computeProjectId2,
|
|
2128
2586
|
getPaths: getPaths2,
|
|
2129
|
-
readRegistry:
|
|
2587
|
+
readRegistry: readRegistry3
|
|
2130
2588
|
} = await Promise.resolve().then(() => (init_discovery(), discovery_exports));
|
|
2131
2589
|
const projectRoot = resolveProjectRoot2();
|
|
2132
2590
|
const projectId = computeProjectId2(projectRoot);
|
|
2133
2591
|
const { registryPath, ipcPath } = getPaths2(projectId);
|
|
2134
|
-
const registry = await
|
|
2592
|
+
const registry = await readRegistry3(registryPath);
|
|
2135
2593
|
if (!registry) {
|
|
2136
2594
|
console.log("Daemon is not running.");
|
|
2137
2595
|
return;
|
|
@@ -2173,6 +2631,8 @@ USAGE:
|
|
|
2173
2631
|
COMMANDS:
|
|
2174
2632
|
adapter Start the MCP adapter (default)
|
|
2175
2633
|
This is what you configure in your MCP client.
|
|
2634
|
+
The adapter automatically discovers ALL active daemons
|
|
2635
|
+
across all projects - no configuration needed!
|
|
2176
2636
|
|
|
2177
2637
|
daemon Start the daemon process
|
|
2178
2638
|
Usually auto-started by adapter/test code.
|
|
@@ -2187,6 +2647,7 @@ COMMANDS:
|
|
|
2187
2647
|
|
|
2188
2648
|
EXAMPLES:
|
|
2189
2649
|
# In your MCP client configuration (Cursor, Claude Desktop, etc.):
|
|
2650
|
+
# Simple configuration - works across all projects automatically!
|
|
2190
2651
|
{
|
|
2191
2652
|
"mcpServers": {
|
|
2192
2653
|
"mock-mcp": {
|
|
@@ -2202,6 +2663,11 @@ EXAMPLES:
|
|
|
2202
2663
|
# Stop daemon:
|
|
2203
2664
|
mock-mcp stop
|
|
2204
2665
|
|
|
2666
|
+
HOW IT WORKS:
|
|
2667
|
+
1. Run your tests with MOCK_MCP=1 to start a daemon and make mock requests
|
|
2668
|
+
2. The MCP adapter discovers all active daemons automatically
|
|
2669
|
+
3. Use list_runs/claim_next_batch tools from any MCP client to provide mocks
|
|
2670
|
+
|
|
2205
2671
|
ENVIRONMENT:
|
|
2206
2672
|
MOCK_MCP=1 Enable mock generation in test code
|
|
2207
2673
|
MOCK_MCP_CACHE_DIR Override cache directory for daemon files
|
|
@@ -2231,8 +2697,12 @@ if (isCliExecution) {
|
|
|
2231
2697
|
}
|
|
2232
2698
|
|
|
2233
2699
|
exports.BatchMockCollector = BatchMockCollector;
|
|
2700
|
+
exports.DaemonClient = DaemonClient;
|
|
2701
|
+
exports.cleanupGlobalIndex = cleanupGlobalIndex;
|
|
2234
2702
|
exports.computeProjectId = computeProjectId;
|
|
2235
2703
|
exports.connect = connect;
|
|
2704
|
+
exports.discoverAllDaemons = discoverAllDaemons;
|
|
2236
2705
|
exports.ensureDaemonRunning = ensureDaemonRunning;
|
|
2706
|
+
exports.readGlobalIndex = readGlobalIndex;
|
|
2237
2707
|
exports.resolveProjectRoot = resolveProjectRoot;
|
|
2238
2708
|
exports.runAdapter = runAdapter;
|