mock-mcp 0.3.1 → 0.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +212 -124
- package/dist/adapter/index.cjs +875 -0
- package/dist/adapter/index.d.cts +142 -0
- package/dist/adapter/index.d.ts +142 -0
- package/dist/adapter/index.js +835 -0
- package/dist/client/connect.cjs +991 -0
- package/dist/client/connect.d.cts +218 -0
- package/dist/client/connect.d.ts +211 -7
- package/dist/client/connect.js +941 -20
- package/dist/client/index.cjs +992 -0
- package/dist/client/index.d.cts +3 -0
- package/dist/client/index.d.ts +3 -2
- package/dist/client/index.js +951 -2
- package/dist/daemon/index.cjs +717 -0
- package/dist/daemon/index.d.cts +62 -0
- package/dist/daemon/index.d.ts +62 -0
- package/dist/daemon/index.js +678 -0
- package/dist/index.cjs +2708 -0
- package/dist/index.d.cts +602 -0
- package/dist/index.d.ts +602 -11
- package/dist/index.js +2651 -53
- package/dist/shared/index.cjs +506 -0
- package/dist/shared/index.d.cts +241 -0
- package/dist/shared/index.d.ts +241 -0
- package/dist/shared/index.js +423 -0
- package/dist/types-bEGXLBF0.d.cts +190 -0
- package/dist/types-bEGXLBF0.d.ts +190 -0
- package/package.json +45 -4
- package/dist/client/batch-mock-collector.d.ts +0 -111
- package/dist/client/batch-mock-collector.js +0 -308
- package/dist/client/util.d.ts +0 -1
- package/dist/client/util.js +0 -3
- package/dist/connect.cjs +0 -400
- package/dist/connect.d.cts +0 -82
- package/dist/server/index.d.ts +0 -1
- package/dist/server/index.js +0 -1
- package/dist/server/test-mock-mcp-server.d.ts +0 -73
- package/dist/server/test-mock-mcp-server.js +0 -419
- package/dist/types.d.ts +0 -45
- package/dist/types.js +0 -2
|
@@ -0,0 +1,835 @@
|
|
|
1
|
+
import { Server } from '@modelcontextprotocol/sdk/server/index.js';
|
|
2
|
+
import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
|
|
3
|
+
import { ListToolsRequestSchema, CallToolRequestSchema } from '@modelcontextprotocol/sdk/types.js';
|
|
4
|
+
import http from 'http';
|
|
5
|
+
import crypto2 from 'crypto';
|
|
6
|
+
import fs from 'fs/promises';
|
|
7
|
+
import 'fs';
|
|
8
|
+
import os from 'os';
|
|
9
|
+
import path from 'path';
|
|
10
|
+
import 'child_process';
|
|
11
|
+
import { fileURLToPath } from 'url';
|
|
12
|
+
import 'module';
|
|
13
|
+
|
|
14
|
+
// src/adapter/adapter.ts
|
|
15
|
+
function debugLog(_msg) {
|
|
16
|
+
}
|
|
17
|
+
(() => {
|
|
18
|
+
try {
|
|
19
|
+
const metaUrl = import.meta.url;
|
|
20
|
+
if (metaUrl && typeof metaUrl === "string" && metaUrl.startsWith("file://")) {
|
|
21
|
+
return path.dirname(fileURLToPath(metaUrl));
|
|
22
|
+
}
|
|
23
|
+
} catch {
|
|
24
|
+
}
|
|
25
|
+
return process.cwd();
|
|
26
|
+
})();
|
|
27
|
+
function getCacheDir(override) {
|
|
28
|
+
if (override) {
|
|
29
|
+
return override;
|
|
30
|
+
}
|
|
31
|
+
const envCacheDir = process.env.MOCK_MCP_CACHE_DIR;
|
|
32
|
+
if (envCacheDir) {
|
|
33
|
+
return envCacheDir;
|
|
34
|
+
}
|
|
35
|
+
const xdg = process.env.XDG_CACHE_HOME;
|
|
36
|
+
if (xdg) {
|
|
37
|
+
return xdg;
|
|
38
|
+
}
|
|
39
|
+
if (process.platform === "win32" && process.env.LOCALAPPDATA) {
|
|
40
|
+
return process.env.LOCALAPPDATA;
|
|
41
|
+
}
|
|
42
|
+
const home = os.homedir();
|
|
43
|
+
if (home) {
|
|
44
|
+
return path.join(home, ".cache");
|
|
45
|
+
}
|
|
46
|
+
return os.tmpdir();
|
|
47
|
+
}
|
|
48
|
+
async function readRegistry(registryPath) {
|
|
49
|
+
try {
|
|
50
|
+
const txt = await fs.readFile(registryPath, "utf-8");
|
|
51
|
+
return JSON.parse(txt);
|
|
52
|
+
} catch (error) {
|
|
53
|
+
debugLog(`readRegistry error for ${registryPath}: ${error instanceof Error ? error.message : String(error)}`);
|
|
54
|
+
return null;
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
async function healthCheck(ipcPath, timeoutMs = 2e3) {
|
|
58
|
+
return new Promise((resolve) => {
|
|
59
|
+
const req = http.request(
|
|
60
|
+
{
|
|
61
|
+
method: "GET",
|
|
62
|
+
socketPath: ipcPath,
|
|
63
|
+
path: "/health",
|
|
64
|
+
timeout: timeoutMs
|
|
65
|
+
},
|
|
66
|
+
(res) => {
|
|
67
|
+
resolve(res.statusCode === 200);
|
|
68
|
+
}
|
|
69
|
+
);
|
|
70
|
+
req.on("error", () => resolve(false));
|
|
71
|
+
req.on("timeout", () => {
|
|
72
|
+
req.destroy();
|
|
73
|
+
resolve(false);
|
|
74
|
+
});
|
|
75
|
+
req.end();
|
|
76
|
+
});
|
|
77
|
+
}
|
|
78
|
+
function getGlobalIndexPath(cacheDir) {
|
|
79
|
+
const base = path.join(getCacheDir(cacheDir), "mock-mcp");
|
|
80
|
+
return path.join(base, "active-daemons.json");
|
|
81
|
+
}
|
|
82
|
+
async function readGlobalIndex(cacheDir) {
|
|
83
|
+
const indexPath = getGlobalIndexPath(cacheDir);
|
|
84
|
+
try {
|
|
85
|
+
const txt = await fs.readFile(indexPath, "utf-8");
|
|
86
|
+
return JSON.parse(txt);
|
|
87
|
+
} catch {
|
|
88
|
+
return { daemons: [], updatedAt: (/* @__PURE__ */ new Date()).toISOString() };
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
async function writeGlobalIndex(index, cacheDir) {
|
|
92
|
+
const indexPath = getGlobalIndexPath(cacheDir);
|
|
93
|
+
const base = path.dirname(indexPath);
|
|
94
|
+
await fs.mkdir(base, { recursive: true });
|
|
95
|
+
await fs.writeFile(indexPath, JSON.stringify(index, null, 2), {
|
|
96
|
+
encoding: "utf-8",
|
|
97
|
+
mode: 384
|
|
98
|
+
});
|
|
99
|
+
}
|
|
100
|
+
async function cleanupGlobalIndex(cacheDir) {
|
|
101
|
+
const index = await readGlobalIndex(cacheDir);
|
|
102
|
+
const validDaemons = [];
|
|
103
|
+
for (const entry of index.daemons) {
|
|
104
|
+
try {
|
|
105
|
+
process.kill(entry.pid, 0);
|
|
106
|
+
const healthy = await healthCheck(entry.ipcPath, 1e3);
|
|
107
|
+
if (healthy) {
|
|
108
|
+
validDaemons.push(entry);
|
|
109
|
+
} else {
|
|
110
|
+
debugLog(`Removing unhealthy daemon ${entry.projectId} (pid ${entry.pid})`);
|
|
111
|
+
}
|
|
112
|
+
} catch {
|
|
113
|
+
debugLog(`Removing dead daemon ${entry.projectId} (pid ${entry.pid})`);
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
if (validDaemons.length !== index.daemons.length) {
|
|
117
|
+
index.daemons = validDaemons;
|
|
118
|
+
index.updatedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
119
|
+
await writeGlobalIndex(index, cacheDir);
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
async function discoverAllDaemons(cacheDir) {
|
|
123
|
+
await cleanupGlobalIndex(cacheDir);
|
|
124
|
+
const index = await readGlobalIndex(cacheDir);
|
|
125
|
+
const results = [];
|
|
126
|
+
for (const entry of index.daemons) {
|
|
127
|
+
const registry = await readRegistry(entry.registryPath);
|
|
128
|
+
if (registry) {
|
|
129
|
+
const healthy = await healthCheck(entry.ipcPath, 2e3);
|
|
130
|
+
results.push({ registry, healthy });
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
return results;
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
// src/adapter/multi-daemon-client.ts
|
|
137
|
+
var MultiDaemonClient = class {
|
|
138
|
+
logger;
|
|
139
|
+
cacheDir;
|
|
140
|
+
adapterId;
|
|
141
|
+
constructor(opts = {}) {
|
|
142
|
+
this.logger = opts.logger ?? console;
|
|
143
|
+
this.cacheDir = opts.cacheDir;
|
|
144
|
+
this.adapterId = crypto2.randomUUID();
|
|
145
|
+
}
|
|
146
|
+
// ===========================================================================
|
|
147
|
+
// Discovery
|
|
148
|
+
// ===========================================================================
|
|
149
|
+
/**
|
|
150
|
+
* Discover all active and healthy daemons.
|
|
151
|
+
*/
|
|
152
|
+
async discoverDaemons() {
|
|
153
|
+
return discoverAllDaemons(this.cacheDir);
|
|
154
|
+
}
|
|
155
|
+
// ===========================================================================
|
|
156
|
+
// Aggregated RPC Methods
|
|
157
|
+
// ===========================================================================
|
|
158
|
+
/**
|
|
159
|
+
* Get aggregated status from all daemons.
|
|
160
|
+
*/
|
|
161
|
+
async getAggregatedStatus() {
|
|
162
|
+
const daemons = await this.discoverDaemons();
|
|
163
|
+
const statuses = [];
|
|
164
|
+
let totalRuns = 0;
|
|
165
|
+
let totalPending = 0;
|
|
166
|
+
let totalClaimed = 0;
|
|
167
|
+
for (const { registry, healthy } of daemons) {
|
|
168
|
+
if (!healthy) {
|
|
169
|
+
statuses.push({
|
|
170
|
+
version: registry.version,
|
|
171
|
+
projectId: registry.projectId,
|
|
172
|
+
projectRoot: registry.projectRoot,
|
|
173
|
+
pid: registry.pid,
|
|
174
|
+
uptime: 0,
|
|
175
|
+
runs: 0,
|
|
176
|
+
pending: 0,
|
|
177
|
+
claimed: 0,
|
|
178
|
+
totalBatches: 0,
|
|
179
|
+
healthy: false
|
|
180
|
+
});
|
|
181
|
+
continue;
|
|
182
|
+
}
|
|
183
|
+
try {
|
|
184
|
+
const status = await this.rpc(registry, "getStatus", {});
|
|
185
|
+
statuses.push({ ...status, healthy: true });
|
|
186
|
+
totalRuns += status.runs;
|
|
187
|
+
totalPending += status.pending;
|
|
188
|
+
totalClaimed += status.claimed;
|
|
189
|
+
} catch (error) {
|
|
190
|
+
this.logger.warn(`Failed to get status from daemon ${registry.projectId}: ${error}`);
|
|
191
|
+
statuses.push({
|
|
192
|
+
version: registry.version,
|
|
193
|
+
projectId: registry.projectId,
|
|
194
|
+
projectRoot: registry.projectRoot,
|
|
195
|
+
pid: registry.pid,
|
|
196
|
+
uptime: 0,
|
|
197
|
+
runs: 0,
|
|
198
|
+
pending: 0,
|
|
199
|
+
claimed: 0,
|
|
200
|
+
totalBatches: 0,
|
|
201
|
+
healthy: false
|
|
202
|
+
});
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
return { daemons: statuses, totalRuns, totalPending, totalClaimed };
|
|
206
|
+
}
|
|
207
|
+
/**
|
|
208
|
+
* List all runs across all daemons.
|
|
209
|
+
*/
|
|
210
|
+
async listAllRuns() {
|
|
211
|
+
const daemons = await this.discoverDaemons();
|
|
212
|
+
const allRuns = [];
|
|
213
|
+
for (const { registry, healthy } of daemons) {
|
|
214
|
+
if (!healthy) continue;
|
|
215
|
+
try {
|
|
216
|
+
const result = await this.rpc(registry, "listRuns", {});
|
|
217
|
+
for (const run of result.runs) {
|
|
218
|
+
allRuns.push({
|
|
219
|
+
...run,
|
|
220
|
+
projectId: registry.projectId,
|
|
221
|
+
projectRoot: registry.projectRoot
|
|
222
|
+
});
|
|
223
|
+
}
|
|
224
|
+
} catch (error) {
|
|
225
|
+
this.logger.warn(`Failed to list runs from daemon ${registry.projectId}: ${error}`);
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
return allRuns;
|
|
229
|
+
}
|
|
230
|
+
/**
|
|
231
|
+
* Claim the next available batch from any daemon.
|
|
232
|
+
* Searches through all daemons in order until finding one with a pending batch.
|
|
233
|
+
*/
|
|
234
|
+
async claimNextBatch(args) {
|
|
235
|
+
const daemons = await this.discoverDaemons();
|
|
236
|
+
for (const { registry, healthy } of daemons) {
|
|
237
|
+
if (!healthy) continue;
|
|
238
|
+
try {
|
|
239
|
+
const result = await this.rpc(registry, "claimNextBatch", {
|
|
240
|
+
adapterId: this.adapterId,
|
|
241
|
+
runId: args.runId,
|
|
242
|
+
leaseMs: args.leaseMs
|
|
243
|
+
});
|
|
244
|
+
if (result) {
|
|
245
|
+
return {
|
|
246
|
+
...result,
|
|
247
|
+
projectId: registry.projectId,
|
|
248
|
+
projectRoot: registry.projectRoot
|
|
249
|
+
};
|
|
250
|
+
}
|
|
251
|
+
} catch (error) {
|
|
252
|
+
this.logger.warn(`Failed to claim batch from daemon ${registry.projectId}: ${error}`);
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
return null;
|
|
256
|
+
}
|
|
257
|
+
/**
|
|
258
|
+
* Provide mock data for a batch.
|
|
259
|
+
* Automatically routes to the correct daemon based on batchId.
|
|
260
|
+
*/
|
|
261
|
+
async provideBatch(args) {
|
|
262
|
+
const parts = args.batchId.split(":");
|
|
263
|
+
if (parts.length < 2) {
|
|
264
|
+
return { ok: false, message: `Invalid batchId format: ${args.batchId}` };
|
|
265
|
+
}
|
|
266
|
+
const daemons = await this.discoverDaemons();
|
|
267
|
+
for (const { registry, healthy } of daemons) {
|
|
268
|
+
if (!healthy) continue;
|
|
269
|
+
try {
|
|
270
|
+
const result = await this.rpc(registry, "provideBatch", {
|
|
271
|
+
adapterId: this.adapterId,
|
|
272
|
+
batchId: args.batchId,
|
|
273
|
+
claimToken: args.claimToken,
|
|
274
|
+
mocks: args.mocks
|
|
275
|
+
});
|
|
276
|
+
return result;
|
|
277
|
+
} catch (error) {
|
|
278
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
279
|
+
if (msg.includes("not found") || msg.includes("Not found")) {
|
|
280
|
+
continue;
|
|
281
|
+
}
|
|
282
|
+
return { ok: false, message: msg };
|
|
283
|
+
}
|
|
284
|
+
}
|
|
285
|
+
return { ok: false, message: `Batch not found: ${args.batchId}` };
|
|
286
|
+
}
|
|
287
|
+
/**
|
|
288
|
+
* Release a batch.
|
|
289
|
+
*/
|
|
290
|
+
async releaseBatch(args) {
|
|
291
|
+
const daemons = await this.discoverDaemons();
|
|
292
|
+
for (const { registry, healthy } of daemons) {
|
|
293
|
+
if (!healthy) continue;
|
|
294
|
+
try {
|
|
295
|
+
const result = await this.rpc(registry, "releaseBatch", {
|
|
296
|
+
adapterId: this.adapterId,
|
|
297
|
+
batchId: args.batchId,
|
|
298
|
+
claimToken: args.claimToken,
|
|
299
|
+
reason: args.reason
|
|
300
|
+
});
|
|
301
|
+
return result;
|
|
302
|
+
} catch (error) {
|
|
303
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
304
|
+
if (msg.includes("not found") || msg.includes("Not found")) {
|
|
305
|
+
continue;
|
|
306
|
+
}
|
|
307
|
+
return { ok: false, message: msg };
|
|
308
|
+
}
|
|
309
|
+
}
|
|
310
|
+
return { ok: false, message: `Batch not found: ${args.batchId}` };
|
|
311
|
+
}
|
|
312
|
+
/**
|
|
313
|
+
* Get a specific batch by ID.
|
|
314
|
+
*/
|
|
315
|
+
async getBatch(batchId) {
|
|
316
|
+
const daemons = await this.discoverDaemons();
|
|
317
|
+
for (const { registry, healthy } of daemons) {
|
|
318
|
+
if (!healthy) continue;
|
|
319
|
+
try {
|
|
320
|
+
const result = await this.rpc(registry, "getBatch", { batchId });
|
|
321
|
+
return result;
|
|
322
|
+
} catch (error) {
|
|
323
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
324
|
+
if (msg.includes("not found") || msg.includes("Not found")) {
|
|
325
|
+
continue;
|
|
326
|
+
}
|
|
327
|
+
throw error;
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
return null;
|
|
331
|
+
}
|
|
332
|
+
// ===========================================================================
|
|
333
|
+
// Internal RPC
|
|
334
|
+
// ===========================================================================
|
|
335
|
+
rpc(registry, method, params) {
|
|
336
|
+
const payload = {
|
|
337
|
+
jsonrpc: "2.0",
|
|
338
|
+
id: crypto2.randomUUID(),
|
|
339
|
+
method,
|
|
340
|
+
params
|
|
341
|
+
};
|
|
342
|
+
return new Promise((resolve, reject) => {
|
|
343
|
+
const req = http.request(
|
|
344
|
+
{
|
|
345
|
+
method: "POST",
|
|
346
|
+
socketPath: registry.ipcPath,
|
|
347
|
+
path: "/control",
|
|
348
|
+
headers: {
|
|
349
|
+
"content-type": "application/json",
|
|
350
|
+
"x-mock-mcp-token": registry.token
|
|
351
|
+
},
|
|
352
|
+
timeout: 3e4
|
|
353
|
+
},
|
|
354
|
+
(res) => {
|
|
355
|
+
let buf = "";
|
|
356
|
+
res.on("data", (chunk) => buf += chunk);
|
|
357
|
+
res.on("end", () => {
|
|
358
|
+
try {
|
|
359
|
+
const response = JSON.parse(buf);
|
|
360
|
+
if (response.error) {
|
|
361
|
+
reject(new Error(response.error.message));
|
|
362
|
+
} else {
|
|
363
|
+
resolve(response.result);
|
|
364
|
+
}
|
|
365
|
+
} catch (e) {
|
|
366
|
+
reject(e);
|
|
367
|
+
}
|
|
368
|
+
});
|
|
369
|
+
}
|
|
370
|
+
);
|
|
371
|
+
req.on("error", (err) => {
|
|
372
|
+
reject(new Error(`Daemon connection failed: ${err.message}`));
|
|
373
|
+
});
|
|
374
|
+
req.on("timeout", () => {
|
|
375
|
+
req.destroy();
|
|
376
|
+
reject(new Error("Daemon request timeout"));
|
|
377
|
+
});
|
|
378
|
+
req.end(JSON.stringify(payload));
|
|
379
|
+
});
|
|
380
|
+
}
|
|
381
|
+
};
|
|
382
|
+
|
|
383
|
+
// src/adapter/adapter.ts
|
|
384
|
+
var TOOLS = [
|
|
385
|
+
{
|
|
386
|
+
name: "get_status",
|
|
387
|
+
description: "Get the current status of the mock-mcp daemon, including active test runs and pending batches.",
|
|
388
|
+
inputSchema: {
|
|
389
|
+
type: "object",
|
|
390
|
+
properties: {},
|
|
391
|
+
required: []
|
|
392
|
+
}
|
|
393
|
+
},
|
|
394
|
+
{
|
|
395
|
+
name: "list_runs",
|
|
396
|
+
description: "List all active test runs connected to the daemon.",
|
|
397
|
+
inputSchema: {
|
|
398
|
+
type: "object",
|
|
399
|
+
properties: {},
|
|
400
|
+
required: []
|
|
401
|
+
}
|
|
402
|
+
},
|
|
403
|
+
{
|
|
404
|
+
name: "claim_next_batch",
|
|
405
|
+
description: `Claim the next pending mock batch for processing. This acquires a lease on the batch.
|
|
406
|
+
|
|
407
|
+
You MUST call this before provide_batch_mock_data. The batch will be locked for 30 seconds (configurable via leaseMs).
|
|
408
|
+
If you don't provide mock data within the lease time, the batch will be released for another adapter to claim.`,
|
|
409
|
+
inputSchema: {
|
|
410
|
+
type: "object",
|
|
411
|
+
properties: {
|
|
412
|
+
runId: {
|
|
413
|
+
type: "string",
|
|
414
|
+
description: "Optional: Filter to only claim batches from a specific test run."
|
|
415
|
+
},
|
|
416
|
+
leaseMs: {
|
|
417
|
+
type: "number",
|
|
418
|
+
description: "Optional: Lease duration in milliseconds. Default: 30000 (30 seconds)."
|
|
419
|
+
}
|
|
420
|
+
},
|
|
421
|
+
required: []
|
|
422
|
+
}
|
|
423
|
+
},
|
|
424
|
+
{
|
|
425
|
+
name: "get_batch",
|
|
426
|
+
description: "Get details of a specific batch by ID (read-only, does not claim).",
|
|
427
|
+
inputSchema: {
|
|
428
|
+
type: "object",
|
|
429
|
+
properties: {
|
|
430
|
+
batchId: {
|
|
431
|
+
type: "string",
|
|
432
|
+
description: "The batch ID to retrieve."
|
|
433
|
+
}
|
|
434
|
+
},
|
|
435
|
+
required: ["batchId"]
|
|
436
|
+
}
|
|
437
|
+
},
|
|
438
|
+
{
|
|
439
|
+
name: "provide_batch_mock_data",
|
|
440
|
+
description: `Provide mock response data for a claimed batch.
|
|
441
|
+
|
|
442
|
+
You MUST first call claim_next_batch to get the batchId and claimToken.
|
|
443
|
+
The mocks array must contain exactly one mock for each request in the batch.`,
|
|
444
|
+
inputSchema: {
|
|
445
|
+
type: "object",
|
|
446
|
+
properties: {
|
|
447
|
+
batchId: {
|
|
448
|
+
type: "string",
|
|
449
|
+
description: "The batch ID (from claim_next_batch)."
|
|
450
|
+
},
|
|
451
|
+
claimToken: {
|
|
452
|
+
type: "string",
|
|
453
|
+
description: "The claim token (from claim_next_batch)."
|
|
454
|
+
},
|
|
455
|
+
mocks: {
|
|
456
|
+
type: "array",
|
|
457
|
+
description: "Array of mock responses, one for each request in the batch.",
|
|
458
|
+
items: {
|
|
459
|
+
type: "object",
|
|
460
|
+
properties: {
|
|
461
|
+
requestId: {
|
|
462
|
+
type: "string",
|
|
463
|
+
description: "The requestId from the original request."
|
|
464
|
+
},
|
|
465
|
+
data: {
|
|
466
|
+
description: "The mock response data (any JSON value)."
|
|
467
|
+
},
|
|
468
|
+
status: {
|
|
469
|
+
type: "number",
|
|
470
|
+
description: "Optional HTTP status code. Default: 200."
|
|
471
|
+
},
|
|
472
|
+
headers: {
|
|
473
|
+
type: "object",
|
|
474
|
+
description: "Optional response headers."
|
|
475
|
+
},
|
|
476
|
+
delayMs: {
|
|
477
|
+
type: "number",
|
|
478
|
+
description: "Optional delay before returning the mock (ms)."
|
|
479
|
+
}
|
|
480
|
+
},
|
|
481
|
+
required: ["requestId", "data"]
|
|
482
|
+
}
|
|
483
|
+
}
|
|
484
|
+
},
|
|
485
|
+
required: ["batchId", "claimToken", "mocks"]
|
|
486
|
+
}
|
|
487
|
+
},
|
|
488
|
+
{
|
|
489
|
+
name: "release_batch",
|
|
490
|
+
description: "Release a claimed batch without providing mock data. Use this if you cannot generate appropriate mocks.",
|
|
491
|
+
inputSchema: {
|
|
492
|
+
type: "object",
|
|
493
|
+
properties: {
|
|
494
|
+
batchId: {
|
|
495
|
+
type: "string",
|
|
496
|
+
description: "The batch ID to release."
|
|
497
|
+
},
|
|
498
|
+
claimToken: {
|
|
499
|
+
type: "string",
|
|
500
|
+
description: "The claim token."
|
|
501
|
+
},
|
|
502
|
+
reason: {
|
|
503
|
+
type: "string",
|
|
504
|
+
description: "Optional reason for releasing."
|
|
505
|
+
}
|
|
506
|
+
},
|
|
507
|
+
required: ["batchId", "claimToken"]
|
|
508
|
+
}
|
|
509
|
+
}
|
|
510
|
+
];
|
|
511
|
+
async function runAdapter(opts = {}) {
|
|
512
|
+
const logger = opts.logger ?? console;
|
|
513
|
+
const version = opts.version ?? "0.5.0";
|
|
514
|
+
logger.error("\u{1F50D} Initializing mock-mcp adapter (multi-daemon mode)...");
|
|
515
|
+
const multiDaemon = new MultiDaemonClient({ logger });
|
|
516
|
+
const daemons = await multiDaemon.discoverDaemons();
|
|
517
|
+
if (daemons.length > 0) {
|
|
518
|
+
logger.error(`\u2705 Found ${daemons.length} active daemon(s):`);
|
|
519
|
+
for (const d of daemons) {
|
|
520
|
+
const status = d.healthy ? "healthy" : "unhealthy";
|
|
521
|
+
logger.error(` - ${d.registry.projectId}: ${d.registry.projectRoot} (${status})`);
|
|
522
|
+
}
|
|
523
|
+
} else {
|
|
524
|
+
logger.error("\u2139\uFE0F No active daemons found. Waiting for test processes to start...");
|
|
525
|
+
}
|
|
526
|
+
const server = new Server(
|
|
527
|
+
{
|
|
528
|
+
name: "mock-mcp-adapter",
|
|
529
|
+
version
|
|
530
|
+
},
|
|
531
|
+
{
|
|
532
|
+
capabilities: { tools: {} }
|
|
533
|
+
}
|
|
534
|
+
);
|
|
535
|
+
server.setRequestHandler(ListToolsRequestSchema, async () => ({
|
|
536
|
+
tools: [...TOOLS]
|
|
537
|
+
}));
|
|
538
|
+
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
539
|
+
const { name, arguments: args } = request.params;
|
|
540
|
+
try {
|
|
541
|
+
switch (name) {
|
|
542
|
+
case "get_status": {
|
|
543
|
+
const result = await multiDaemon.getAggregatedStatus();
|
|
544
|
+
return buildToolResponse(formatAggregatedStatus(result));
|
|
545
|
+
}
|
|
546
|
+
case "list_runs": {
|
|
547
|
+
const result = await multiDaemon.listAllRuns();
|
|
548
|
+
return buildToolResponse(formatExtendedRuns(result));
|
|
549
|
+
}
|
|
550
|
+
case "claim_next_batch": {
|
|
551
|
+
const result = await multiDaemon.claimNextBatch({
|
|
552
|
+
runId: args?.runId,
|
|
553
|
+
leaseMs: args?.leaseMs
|
|
554
|
+
});
|
|
555
|
+
return buildToolResponse(formatClaimResult(result));
|
|
556
|
+
}
|
|
557
|
+
case "get_batch": {
|
|
558
|
+
if (!args?.batchId) {
|
|
559
|
+
throw new Error("batchId is required");
|
|
560
|
+
}
|
|
561
|
+
const result = await multiDaemon.getBatch(args.batchId);
|
|
562
|
+
if (!result) {
|
|
563
|
+
throw new Error(`Batch not found: ${args.batchId}`);
|
|
564
|
+
}
|
|
565
|
+
return buildToolResponse(formatBatch(result));
|
|
566
|
+
}
|
|
567
|
+
case "provide_batch_mock_data": {
|
|
568
|
+
if (!args?.batchId || !args?.claimToken || !args?.mocks) {
|
|
569
|
+
throw new Error("batchId, claimToken, and mocks are required");
|
|
570
|
+
}
|
|
571
|
+
const result = await multiDaemon.provideBatch({
|
|
572
|
+
batchId: args.batchId,
|
|
573
|
+
claimToken: args.claimToken,
|
|
574
|
+
mocks: args.mocks
|
|
575
|
+
});
|
|
576
|
+
return buildToolResponse(formatProvideResult(result));
|
|
577
|
+
}
|
|
578
|
+
case "release_batch": {
|
|
579
|
+
if (!args?.batchId || !args?.claimToken) {
|
|
580
|
+
throw new Error("batchId and claimToken are required");
|
|
581
|
+
}
|
|
582
|
+
const result = await multiDaemon.releaseBatch({
|
|
583
|
+
batchId: args.batchId,
|
|
584
|
+
claimToken: args.claimToken,
|
|
585
|
+
reason: args?.reason
|
|
586
|
+
});
|
|
587
|
+
return buildToolResponse(JSON.stringify(result, null, 2));
|
|
588
|
+
}
|
|
589
|
+
default:
|
|
590
|
+
throw new Error(`Unknown tool: ${name}`);
|
|
591
|
+
}
|
|
592
|
+
} catch (error) {
|
|
593
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
594
|
+
logger.error(`Tool error (${name}): ${message}`);
|
|
595
|
+
return buildToolResponse(`Error: ${message}`, true);
|
|
596
|
+
}
|
|
597
|
+
});
|
|
598
|
+
const transport = new StdioServerTransport();
|
|
599
|
+
await server.connect(transport);
|
|
600
|
+
logger.error("\u2705 MCP adapter ready (stdio transport)");
|
|
601
|
+
}
|
|
602
|
+
function buildToolResponse(text, isError = false) {
|
|
603
|
+
return {
|
|
604
|
+
content: [{ type: "text", text }],
|
|
605
|
+
isError
|
|
606
|
+
};
|
|
607
|
+
}
|
|
608
|
+
function formatAggregatedStatus(status) {
|
|
609
|
+
if (status.daemons.length === 0) {
|
|
610
|
+
return `# Mock MCP Status
|
|
611
|
+
|
|
612
|
+
No active daemons found. Start a test with \`MOCK_MCP=1\` to begin.
|
|
613
|
+
`;
|
|
614
|
+
}
|
|
615
|
+
const lines = [
|
|
616
|
+
"# Mock MCP Status\n",
|
|
617
|
+
"## Summary",
|
|
618
|
+
`- **Active Daemons**: ${status.daemons.filter((d) => d.healthy).length}`,
|
|
619
|
+
`- **Total Active Runs**: ${status.totalRuns}`,
|
|
620
|
+
`- **Total Pending Batches**: ${status.totalPending}`,
|
|
621
|
+
`- **Total Claimed Batches**: ${status.totalClaimed}`,
|
|
622
|
+
"",
|
|
623
|
+
"## Daemons\n"
|
|
624
|
+
];
|
|
625
|
+
for (const daemon of status.daemons) {
|
|
626
|
+
const healthIcon = daemon.healthy ? "\u2705" : "\u274C";
|
|
627
|
+
lines.push(`### ${healthIcon} ${daemon.projectRoot}`);
|
|
628
|
+
lines.push(`- **Project ID**: ${daemon.projectId}`);
|
|
629
|
+
lines.push(`- **Version**: ${daemon.version}`);
|
|
630
|
+
lines.push(`- **PID**: ${daemon.pid}`);
|
|
631
|
+
if (daemon.healthy) {
|
|
632
|
+
lines.push(`- **Uptime**: ${Math.round(daemon.uptime / 1e3)}s`);
|
|
633
|
+
lines.push(`- **Runs**: ${daemon.runs}`);
|
|
634
|
+
lines.push(`- **Pending**: ${daemon.pending}`);
|
|
635
|
+
lines.push(`- **Claimed**: ${daemon.claimed}`);
|
|
636
|
+
} else {
|
|
637
|
+
lines.push(`- **Status**: Not responding`);
|
|
638
|
+
}
|
|
639
|
+
lines.push("");
|
|
640
|
+
}
|
|
641
|
+
return lines.join("\n");
|
|
642
|
+
}
|
|
643
|
+
function formatExtendedRuns(runs) {
|
|
644
|
+
if (runs.length === 0) {
|
|
645
|
+
return "No active test runs.\n\nStart a test with `MOCK_MCP=1` to begin.";
|
|
646
|
+
}
|
|
647
|
+
const lines = ["# Active Test Runs\n"];
|
|
648
|
+
const byProject = /* @__PURE__ */ new Map();
|
|
649
|
+
for (const run of runs) {
|
|
650
|
+
const key = run.projectRoot;
|
|
651
|
+
if (!byProject.has(key)) {
|
|
652
|
+
byProject.set(key, []);
|
|
653
|
+
}
|
|
654
|
+
byProject.get(key).push(run);
|
|
655
|
+
}
|
|
656
|
+
for (const [projectRoot, projectRuns] of byProject) {
|
|
657
|
+
lines.push(`## Project: ${projectRoot}
|
|
658
|
+
`);
|
|
659
|
+
for (const run of projectRuns) {
|
|
660
|
+
lines.push(`### Run: ${run.runId}`);
|
|
661
|
+
lines.push(`- **PID**: ${run.pid}`);
|
|
662
|
+
lines.push(`- **CWD**: ${run.cwd}`);
|
|
663
|
+
lines.push(`- **Started**: ${run.startedAt}`);
|
|
664
|
+
lines.push(`- **Pending Batches**: ${run.pendingBatches}`);
|
|
665
|
+
if (run.testMeta) {
|
|
666
|
+
if (run.testMeta.testFile) {
|
|
667
|
+
lines.push(`- **Test File**: ${run.testMeta.testFile}`);
|
|
668
|
+
}
|
|
669
|
+
if (run.testMeta.testName) {
|
|
670
|
+
lines.push(`- **Test Name**: ${run.testMeta.testName}`);
|
|
671
|
+
}
|
|
672
|
+
}
|
|
673
|
+
lines.push("");
|
|
674
|
+
}
|
|
675
|
+
}
|
|
676
|
+
return lines.join("\n");
|
|
677
|
+
}
|
|
678
|
+
function formatClaimResult(result) {
|
|
679
|
+
if (!result) {
|
|
680
|
+
return "No pending batches available to claim.\n\nMake sure a test is running with `MOCK_MCP=1` and has pending mock requests.";
|
|
681
|
+
}
|
|
682
|
+
const lines = [
|
|
683
|
+
"# Batch Claimed Successfully\n",
|
|
684
|
+
`**Batch ID**: \`${result.batchId}\``,
|
|
685
|
+
`**Claim Token**: \`${result.claimToken}\``,
|
|
686
|
+
`**Run ID**: ${result.runId}`,
|
|
687
|
+
`**Project**: ${result.projectRoot}`,
|
|
688
|
+
`**Lease Until**: ${new Date(result.leaseUntil).toISOString()}`,
|
|
689
|
+
"",
|
|
690
|
+
"## Requests\n"
|
|
691
|
+
];
|
|
692
|
+
for (const req of result.requests) {
|
|
693
|
+
lines.push(`### ${req.method} ${req.endpoint}`);
|
|
694
|
+
lines.push(`- **Request ID**: \`${req.requestId}\``);
|
|
695
|
+
if (req.body !== void 0) {
|
|
696
|
+
lines.push(`- **Body**: \`\`\`json
|
|
697
|
+
${JSON.stringify(req.body, null, 2)}
|
|
698
|
+
\`\`\``);
|
|
699
|
+
}
|
|
700
|
+
if (req.headers) {
|
|
701
|
+
lines.push(`- **Headers**: ${JSON.stringify(req.headers)}`);
|
|
702
|
+
}
|
|
703
|
+
if (req.metadata) {
|
|
704
|
+
lines.push(`- **Metadata**: ${JSON.stringify(req.metadata)}`);
|
|
705
|
+
}
|
|
706
|
+
lines.push("");
|
|
707
|
+
}
|
|
708
|
+
lines.push("---");
|
|
709
|
+
lines.push("**Next step**: Call `provide_batch_mock_data` with the batch ID, claim token, and mock data for each request.");
|
|
710
|
+
return lines.join("\n");
|
|
711
|
+
}
|
|
712
|
+
function formatBatch(result) {
|
|
713
|
+
const lines = [
|
|
714
|
+
`# Batch: ${result.batchId}
|
|
715
|
+
`,
|
|
716
|
+
`**Status**: ${result.status}`,
|
|
717
|
+
`**Run ID**: ${result.runId}`,
|
|
718
|
+
`**Created**: ${new Date(result.createdAt).toISOString()}`
|
|
719
|
+
];
|
|
720
|
+
if (result.claim) {
|
|
721
|
+
lines.push(`**Claimed by**: ${result.claim.adapterId}`);
|
|
722
|
+
lines.push(`**Lease until**: ${new Date(result.claim.leaseUntil).toISOString()}`);
|
|
723
|
+
}
|
|
724
|
+
lines.push("", "## Requests\n");
|
|
725
|
+
for (const req of result.requests) {
|
|
726
|
+
lines.push(`### ${req.method} ${req.endpoint}`);
|
|
727
|
+
lines.push(`- **Request ID**: \`${req.requestId}\``);
|
|
728
|
+
if (req.body !== void 0) {
|
|
729
|
+
lines.push(`- **Body**: \`\`\`json
|
|
730
|
+
${JSON.stringify(req.body, null, 2)}
|
|
731
|
+
\`\`\``);
|
|
732
|
+
}
|
|
733
|
+
lines.push("");
|
|
734
|
+
}
|
|
735
|
+
return lines.join("\n");
|
|
736
|
+
}
|
|
737
|
+
function formatProvideResult(result) {
|
|
738
|
+
if (result.ok) {
|
|
739
|
+
return `\u2705 ${result.message ?? "Mock data provided successfully."}`;
|
|
740
|
+
}
|
|
741
|
+
return `\u274C Failed to provide mock data: ${result.message}`;
|
|
742
|
+
}
|
|
743
|
+
var DaemonClient = class {
|
|
744
|
+
constructor(ipcPath, token, adapterId) {
|
|
745
|
+
this.ipcPath = ipcPath;
|
|
746
|
+
this.token = token;
|
|
747
|
+
this.adapterId = adapterId;
|
|
748
|
+
}
|
|
749
|
+
// ===========================================================================
|
|
750
|
+
// RPC Methods
|
|
751
|
+
// ===========================================================================
|
|
752
|
+
async getStatus() {
|
|
753
|
+
return this.rpc("getStatus", {});
|
|
754
|
+
}
|
|
755
|
+
async listRuns() {
|
|
756
|
+
return this.rpc("listRuns", {});
|
|
757
|
+
}
|
|
758
|
+
async claimNextBatch(args) {
|
|
759
|
+
return this.rpc("claimNextBatch", {
|
|
760
|
+
adapterId: this.adapterId,
|
|
761
|
+
runId: args.runId,
|
|
762
|
+
leaseMs: args.leaseMs
|
|
763
|
+
});
|
|
764
|
+
}
|
|
765
|
+
async provideBatch(args) {
|
|
766
|
+
return this.rpc("provideBatch", {
|
|
767
|
+
adapterId: this.adapterId,
|
|
768
|
+
batchId: args.batchId,
|
|
769
|
+
claimToken: args.claimToken,
|
|
770
|
+
mocks: args.mocks
|
|
771
|
+
});
|
|
772
|
+
}
|
|
773
|
+
async releaseBatch(args) {
|
|
774
|
+
return this.rpc("releaseBatch", {
|
|
775
|
+
adapterId: this.adapterId,
|
|
776
|
+
batchId: args.batchId,
|
|
777
|
+
claimToken: args.claimToken,
|
|
778
|
+
reason: args.reason
|
|
779
|
+
});
|
|
780
|
+
}
|
|
781
|
+
async getBatch(batchId) {
|
|
782
|
+
return this.rpc("getBatch", { batchId });
|
|
783
|
+
}
|
|
784
|
+
// ===========================================================================
|
|
785
|
+
// Internal
|
|
786
|
+
// ===========================================================================
|
|
787
|
+
rpc(method, params) {
|
|
788
|
+
const payload = {
|
|
789
|
+
jsonrpc: "2.0",
|
|
790
|
+
id: crypto2.randomUUID(),
|
|
791
|
+
method,
|
|
792
|
+
params
|
|
793
|
+
};
|
|
794
|
+
return new Promise((resolve, reject) => {
|
|
795
|
+
const req = http.request(
|
|
796
|
+
{
|
|
797
|
+
method: "POST",
|
|
798
|
+
socketPath: this.ipcPath,
|
|
799
|
+
path: "/control",
|
|
800
|
+
headers: {
|
|
801
|
+
"content-type": "application/json",
|
|
802
|
+
"x-mock-mcp-token": this.token
|
|
803
|
+
},
|
|
804
|
+
timeout: 3e4
|
|
805
|
+
},
|
|
806
|
+
(res) => {
|
|
807
|
+
let buf = "";
|
|
808
|
+
res.on("data", (chunk) => buf += chunk);
|
|
809
|
+
res.on("end", () => {
|
|
810
|
+
try {
|
|
811
|
+
const response = JSON.parse(buf);
|
|
812
|
+
if (response.error) {
|
|
813
|
+
reject(new Error(response.error.message));
|
|
814
|
+
} else {
|
|
815
|
+
resolve(response.result);
|
|
816
|
+
}
|
|
817
|
+
} catch (e) {
|
|
818
|
+
reject(e);
|
|
819
|
+
}
|
|
820
|
+
});
|
|
821
|
+
}
|
|
822
|
+
);
|
|
823
|
+
req.on("error", (err) => {
|
|
824
|
+
reject(new Error(`Daemon connection failed: ${err.message}`));
|
|
825
|
+
});
|
|
826
|
+
req.on("timeout", () => {
|
|
827
|
+
req.destroy();
|
|
828
|
+
reject(new Error("Daemon request timeout"));
|
|
829
|
+
});
|
|
830
|
+
req.end(JSON.stringify(payload));
|
|
831
|
+
});
|
|
832
|
+
}
|
|
833
|
+
};
|
|
834
|
+
|
|
835
|
+
export { DaemonClient, MultiDaemonClient, runAdapter };
|