mock-mcp 0.3.0 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +217 -128
- package/dist/adapter/index.cjs +712 -0
- package/dist/adapter/index.d.cts +55 -0
- package/dist/adapter/index.d.ts +55 -0
- package/dist/adapter/index.js +672 -0
- package/dist/client/connect.cjs +913 -0
- package/dist/client/connect.d.cts +211 -0
- package/dist/client/connect.d.ts +209 -6
- package/dist/client/connect.js +867 -10
- package/dist/client/index.cjs +914 -0
- package/dist/client/index.d.cts +4 -0
- package/dist/client/index.d.ts +4 -2
- package/dist/client/index.js +873 -2
- package/dist/daemon/index.cjs +667 -0
- package/dist/daemon/index.d.cts +62 -0
- package/dist/daemon/index.d.ts +62 -0
- package/dist/daemon/index.js +628 -0
- package/dist/discovery-Dc2LdF8q.d.cts +105 -0
- package/dist/discovery-Dc2LdF8q.d.ts +105 -0
- package/dist/index.cjs +2238 -0
- package/dist/index.d.cts +472 -0
- package/dist/index.d.ts +472 -10
- package/dist/index.js +2185 -53
- package/dist/protocol-CiwaQFOt.d.ts +239 -0
- package/dist/protocol-xZu-wb0n.d.cts +239 -0
- package/dist/shared/index.cjs +386 -0
- package/dist/shared/index.d.cts +4 -0
- package/dist/shared/index.d.ts +4 -0
- package/dist/shared/index.js +310 -0
- package/dist/types-BKREdsyr.d.cts +32 -0
- package/dist/types-BKREdsyr.d.ts +32 -0
- package/package.json +44 -4
- package/dist/client/batch-mock-collector.d.ts +0 -87
- package/dist/client/batch-mock-collector.js +0 -223
- package/dist/client/util.d.ts +0 -1
- package/dist/client/util.js +0 -3
- package/dist/connect.cjs +0 -299
- package/dist/connect.d.cts +0 -95
- package/dist/server/index.d.ts +0 -1
- package/dist/server/index.js +0 -1
- package/dist/server/test-mock-mcp-server.d.ts +0 -73
- package/dist/server/test-mock-mcp-server.js +0 -392
- package/dist/types.d.ts +0 -42
- package/dist/types.js +0 -2
|
@@ -0,0 +1,712 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var index_js = require('@modelcontextprotocol/sdk/server/index.js');
|
|
4
|
+
var stdio_js = require('@modelcontextprotocol/sdk/server/stdio.js');
|
|
5
|
+
var types_js = require('@modelcontextprotocol/sdk/types.js');
|
|
6
|
+
var crypto2 = require('crypto');
|
|
7
|
+
var http = require('http');
|
|
8
|
+
var fs = require('fs/promises');
|
|
9
|
+
var fssync = require('fs');
|
|
10
|
+
var os = require('os');
|
|
11
|
+
var path = require('path');
|
|
12
|
+
var child_process = require('child_process');
|
|
13
|
+
var url = require('url');
|
|
14
|
+
var module$1 = require('module');
|
|
15
|
+
|
|
16
|
+
function _interopDefault (e) { return e && e.__esModule ? e : { default: e }; }
|
|
17
|
+
|
|
18
|
+
var crypto2__default = /*#__PURE__*/_interopDefault(crypto2);
|
|
19
|
+
var http__default = /*#__PURE__*/_interopDefault(http);
|
|
20
|
+
var fs__default = /*#__PURE__*/_interopDefault(fs);
|
|
21
|
+
var fssync__default = /*#__PURE__*/_interopDefault(fssync);
|
|
22
|
+
var os__default = /*#__PURE__*/_interopDefault(os);
|
|
23
|
+
var path__default = /*#__PURE__*/_interopDefault(path);
|
|
24
|
+
|
|
25
|
+
var __importMetaUrl = (function() {
|
|
26
|
+
if (typeof document !== 'undefined') {
|
|
27
|
+
return document.currentScript && document.currentScript.src || new URL('main.js', document.baseURI).href;
|
|
28
|
+
}
|
|
29
|
+
// Node.js CJS context
|
|
30
|
+
// When this bundle is re-bundled by another tool (e.g., esbuild, webpack),
|
|
31
|
+
// __filename may not be defined or may not be a valid file path.
|
|
32
|
+
// We need to handle these cases gracefully.
|
|
33
|
+
try {
|
|
34
|
+
if (typeof __filename !== 'undefined' && __filename) {
|
|
35
|
+
var url = require('url');
|
|
36
|
+
var path = require('path');
|
|
37
|
+
// Check if __filename looks like a valid file path
|
|
38
|
+
if (path.isAbsolute(__filename) || __filename.startsWith('./') || __filename.startsWith('../')) {
|
|
39
|
+
return url.pathToFileURL(__filename).href;
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
} catch (e) {
|
|
43
|
+
// Fallback if pathToFileURL fails
|
|
44
|
+
}
|
|
45
|
+
// Fallback: use process.cwd() as the base URL
|
|
46
|
+
// This is not perfect but allows the code to continue working
|
|
47
|
+
try {
|
|
48
|
+
var url = require('url');
|
|
49
|
+
return url.pathToFileURL(require('path').join(process.cwd(), 'index.cjs')).href;
|
|
50
|
+
} catch (e) {
|
|
51
|
+
return 'file:///unknown';
|
|
52
|
+
}
|
|
53
|
+
})();
|
|
54
|
+
var DaemonClient = class {
|
|
55
|
+
constructor(ipcPath, token, adapterId) {
|
|
56
|
+
this.ipcPath = ipcPath;
|
|
57
|
+
this.token = token;
|
|
58
|
+
this.adapterId = adapterId;
|
|
59
|
+
}
|
|
60
|
+
// ===========================================================================
|
|
61
|
+
// RPC Methods
|
|
62
|
+
// ===========================================================================
|
|
63
|
+
async getStatus() {
|
|
64
|
+
return this.rpc("getStatus", {});
|
|
65
|
+
}
|
|
66
|
+
async listRuns() {
|
|
67
|
+
return this.rpc("listRuns", {});
|
|
68
|
+
}
|
|
69
|
+
async claimNextBatch(args) {
|
|
70
|
+
return this.rpc("claimNextBatch", {
|
|
71
|
+
adapterId: this.adapterId,
|
|
72
|
+
runId: args.runId,
|
|
73
|
+
leaseMs: args.leaseMs
|
|
74
|
+
});
|
|
75
|
+
}
|
|
76
|
+
async provideBatch(args) {
|
|
77
|
+
return this.rpc("provideBatch", {
|
|
78
|
+
adapterId: this.adapterId,
|
|
79
|
+
batchId: args.batchId,
|
|
80
|
+
claimToken: args.claimToken,
|
|
81
|
+
mocks: args.mocks
|
|
82
|
+
});
|
|
83
|
+
}
|
|
84
|
+
async releaseBatch(args) {
|
|
85
|
+
return this.rpc("releaseBatch", {
|
|
86
|
+
adapterId: this.adapterId,
|
|
87
|
+
batchId: args.batchId,
|
|
88
|
+
claimToken: args.claimToken,
|
|
89
|
+
reason: args.reason
|
|
90
|
+
});
|
|
91
|
+
}
|
|
92
|
+
async getBatch(batchId) {
|
|
93
|
+
return this.rpc("getBatch", { batchId });
|
|
94
|
+
}
|
|
95
|
+
// ===========================================================================
|
|
96
|
+
// Internal
|
|
97
|
+
// ===========================================================================
|
|
98
|
+
rpc(method, params) {
|
|
99
|
+
const payload = {
|
|
100
|
+
jsonrpc: "2.0",
|
|
101
|
+
id: crypto2__default.default.randomUUID(),
|
|
102
|
+
method,
|
|
103
|
+
params
|
|
104
|
+
};
|
|
105
|
+
return new Promise((resolve, reject) => {
|
|
106
|
+
const req = http__default.default.request(
|
|
107
|
+
{
|
|
108
|
+
method: "POST",
|
|
109
|
+
socketPath: this.ipcPath,
|
|
110
|
+
path: "/control",
|
|
111
|
+
headers: {
|
|
112
|
+
"content-type": "application/json",
|
|
113
|
+
"x-mock-mcp-token": this.token
|
|
114
|
+
},
|
|
115
|
+
timeout: 3e4
|
|
116
|
+
},
|
|
117
|
+
(res) => {
|
|
118
|
+
let buf = "";
|
|
119
|
+
res.on("data", (chunk) => buf += chunk);
|
|
120
|
+
res.on("end", () => {
|
|
121
|
+
try {
|
|
122
|
+
const response = JSON.parse(buf);
|
|
123
|
+
if (response.error) {
|
|
124
|
+
reject(new Error(response.error.message));
|
|
125
|
+
} else {
|
|
126
|
+
resolve(response.result);
|
|
127
|
+
}
|
|
128
|
+
} catch (e) {
|
|
129
|
+
reject(e);
|
|
130
|
+
}
|
|
131
|
+
});
|
|
132
|
+
}
|
|
133
|
+
);
|
|
134
|
+
req.on("error", (err) => {
|
|
135
|
+
reject(new Error(`Daemon connection failed: ${err.message}`));
|
|
136
|
+
});
|
|
137
|
+
req.on("timeout", () => {
|
|
138
|
+
req.destroy();
|
|
139
|
+
reject(new Error("Daemon request timeout"));
|
|
140
|
+
});
|
|
141
|
+
req.end(JSON.stringify(payload));
|
|
142
|
+
});
|
|
143
|
+
}
|
|
144
|
+
};
|
|
145
|
+
function debugLog(_msg) {
|
|
146
|
+
}
|
|
147
|
+
var __curDirname = (() => {
|
|
148
|
+
try {
|
|
149
|
+
const metaUrl = __importMetaUrl;
|
|
150
|
+
if (metaUrl && typeof metaUrl === "string" && metaUrl.startsWith("file://")) {
|
|
151
|
+
return path__default.default.dirname(url.fileURLToPath(metaUrl));
|
|
152
|
+
}
|
|
153
|
+
} catch {
|
|
154
|
+
}
|
|
155
|
+
return process.cwd();
|
|
156
|
+
})();
|
|
157
|
+
function resolveProjectRoot(startDir = process.cwd()) {
|
|
158
|
+
let current = path__default.default.resolve(startDir);
|
|
159
|
+
const root = path__default.default.parse(current).root;
|
|
160
|
+
while (current !== root) {
|
|
161
|
+
const gitPath = path__default.default.join(current, ".git");
|
|
162
|
+
try {
|
|
163
|
+
const stat = fssync__default.default.statSync(gitPath);
|
|
164
|
+
if (stat.isDirectory() || stat.isFile()) {
|
|
165
|
+
return current;
|
|
166
|
+
}
|
|
167
|
+
} catch {
|
|
168
|
+
}
|
|
169
|
+
const pkgPath = path__default.default.join(current, "package.json");
|
|
170
|
+
try {
|
|
171
|
+
fssync__default.default.accessSync(pkgPath, fssync__default.default.constants.F_OK);
|
|
172
|
+
return current;
|
|
173
|
+
} catch {
|
|
174
|
+
}
|
|
175
|
+
current = path__default.default.dirname(current);
|
|
176
|
+
}
|
|
177
|
+
return path__default.default.resolve(startDir);
|
|
178
|
+
}
|
|
179
|
+
function computeProjectId(projectRoot) {
|
|
180
|
+
const real = fssync__default.default.realpathSync(projectRoot);
|
|
181
|
+
return crypto2__default.default.createHash("sha256").update(real).digest("hex").slice(0, 16);
|
|
182
|
+
}
|
|
183
|
+
function getCacheDir(override) {
|
|
184
|
+
if (override) {
|
|
185
|
+
return override;
|
|
186
|
+
}
|
|
187
|
+
const envCacheDir = process.env.MOCK_MCP_CACHE_DIR;
|
|
188
|
+
if (envCacheDir) {
|
|
189
|
+
return envCacheDir;
|
|
190
|
+
}
|
|
191
|
+
const xdg = process.env.XDG_CACHE_HOME;
|
|
192
|
+
if (xdg) {
|
|
193
|
+
return xdg;
|
|
194
|
+
}
|
|
195
|
+
if (process.platform === "win32" && process.env.LOCALAPPDATA) {
|
|
196
|
+
return process.env.LOCALAPPDATA;
|
|
197
|
+
}
|
|
198
|
+
const home = os__default.default.homedir();
|
|
199
|
+
if (home) {
|
|
200
|
+
return path__default.default.join(home, ".cache");
|
|
201
|
+
}
|
|
202
|
+
return os__default.default.tmpdir();
|
|
203
|
+
}
|
|
204
|
+
function getPaths(projectId, cacheDir) {
|
|
205
|
+
const base = path__default.default.join(getCacheDir(cacheDir), "mock-mcp");
|
|
206
|
+
const registryPath = path__default.default.join(base, `${projectId}.json`);
|
|
207
|
+
const lockPath = path__default.default.join(base, `${projectId}.lock`);
|
|
208
|
+
const ipcPath = process.platform === "win32" ? `\\\\.\\pipe\\mock-mcp-${projectId}` : path__default.default.join(base, `${projectId}.sock`);
|
|
209
|
+
return { base, registryPath, lockPath, ipcPath };
|
|
210
|
+
}
|
|
211
|
+
async function readRegistry(registryPath) {
|
|
212
|
+
try {
|
|
213
|
+
const txt = await fs__default.default.readFile(registryPath, "utf-8");
|
|
214
|
+
return JSON.parse(txt);
|
|
215
|
+
} catch (error) {
|
|
216
|
+
debugLog(`readRegistry error for ${registryPath}: ${error instanceof Error ? error.message : String(error)}`);
|
|
217
|
+
return null;
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
async function healthCheck(ipcPath, timeoutMs = 2e3) {
|
|
221
|
+
return new Promise((resolve) => {
|
|
222
|
+
const req = http__default.default.request(
|
|
223
|
+
{
|
|
224
|
+
method: "GET",
|
|
225
|
+
socketPath: ipcPath,
|
|
226
|
+
path: "/health",
|
|
227
|
+
timeout: timeoutMs
|
|
228
|
+
},
|
|
229
|
+
(res) => {
|
|
230
|
+
resolve(res.statusCode === 200);
|
|
231
|
+
}
|
|
232
|
+
);
|
|
233
|
+
req.on("error", () => resolve(false));
|
|
234
|
+
req.on("timeout", () => {
|
|
235
|
+
req.destroy();
|
|
236
|
+
resolve(false);
|
|
237
|
+
});
|
|
238
|
+
req.end();
|
|
239
|
+
});
|
|
240
|
+
}
|
|
241
|
+
async function tryAcquireLock(lockPath) {
|
|
242
|
+
try {
|
|
243
|
+
const fh = await fs__default.default.open(lockPath, "wx");
|
|
244
|
+
await fh.write(`${process.pid}
|
|
245
|
+
`);
|
|
246
|
+
return fh;
|
|
247
|
+
} catch {
|
|
248
|
+
return null;
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
async function releaseLock(lockPath, fh) {
|
|
252
|
+
await fh.close();
|
|
253
|
+
await fs__default.default.rm(lockPath).catch(() => {
|
|
254
|
+
});
|
|
255
|
+
}
|
|
256
|
+
function randomToken() {
|
|
257
|
+
return crypto2__default.default.randomBytes(24).toString("base64url");
|
|
258
|
+
}
|
|
259
|
+
function getDaemonEntryPath() {
|
|
260
|
+
try {
|
|
261
|
+
const cwdRequire = module$1.createRequire(url.pathToFileURL(path__default.default.join(process.cwd(), "index.js")).href);
|
|
262
|
+
const resolved = cwdRequire.resolve("mock-mcp");
|
|
263
|
+
const distDir = path__default.default.dirname(resolved);
|
|
264
|
+
const daemonEntry = path__default.default.join(distDir, "index.js");
|
|
265
|
+
if (fssync__default.default.existsSync(daemonEntry)) {
|
|
266
|
+
return daemonEntry;
|
|
267
|
+
}
|
|
268
|
+
} catch {
|
|
269
|
+
}
|
|
270
|
+
try {
|
|
271
|
+
const packageRoot = resolveProjectRoot(__curDirname);
|
|
272
|
+
const distPath = path__default.default.join(packageRoot, "dist", "index.js");
|
|
273
|
+
if (fssync__default.default.existsSync(distPath)) {
|
|
274
|
+
return distPath;
|
|
275
|
+
}
|
|
276
|
+
} catch {
|
|
277
|
+
}
|
|
278
|
+
if (process.argv[1]) {
|
|
279
|
+
return process.argv[1];
|
|
280
|
+
}
|
|
281
|
+
return path__default.default.join(process.cwd(), "dist", "index.js");
|
|
282
|
+
}
|
|
283
|
+
async function ensureDaemonRunning(opts = {}) {
|
|
284
|
+
const projectRoot = opts.projectRoot ?? resolveProjectRoot();
|
|
285
|
+
const projectId = computeProjectId(projectRoot);
|
|
286
|
+
const { base, registryPath, lockPath, ipcPath } = getPaths(
|
|
287
|
+
projectId,
|
|
288
|
+
opts.cacheDir
|
|
289
|
+
);
|
|
290
|
+
const timeoutMs = opts.timeoutMs ?? 1e4;
|
|
291
|
+
await fs__default.default.mkdir(base, { recursive: true });
|
|
292
|
+
const existing = await readRegistry(registryPath);
|
|
293
|
+
debugLog(`Registry read result: ${existing ? "Found (PID " + existing.pid + ")" : "Null"}`);
|
|
294
|
+
if (existing) {
|
|
295
|
+
let healthy = false;
|
|
296
|
+
for (let i = 0; i < 3; i++) {
|
|
297
|
+
debugLog(`Checking health attempt ${i + 1}/3 on ${existing.ipcPath}`);
|
|
298
|
+
healthy = await healthCheck(existing.ipcPath);
|
|
299
|
+
if (healthy) break;
|
|
300
|
+
await new Promise((r) => setTimeout(r, 200));
|
|
301
|
+
}
|
|
302
|
+
if (healthy) {
|
|
303
|
+
return existing;
|
|
304
|
+
}
|
|
305
|
+
}
|
|
306
|
+
if (process.platform !== "win32") {
|
|
307
|
+
try {
|
|
308
|
+
await fs__default.default.rm(ipcPath);
|
|
309
|
+
} catch {
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
const lock = await tryAcquireLock(lockPath);
|
|
313
|
+
if (lock) {
|
|
314
|
+
try {
|
|
315
|
+
const recheckReg = await readRegistry(registryPath);
|
|
316
|
+
if (recheckReg && await healthCheck(recheckReg.ipcPath)) {
|
|
317
|
+
return recheckReg;
|
|
318
|
+
}
|
|
319
|
+
const token = randomToken();
|
|
320
|
+
const daemonEntry = getDaemonEntryPath();
|
|
321
|
+
const child = child_process.spawn(
|
|
322
|
+
process.execPath,
|
|
323
|
+
[daemonEntry, "daemon", "--project-root", projectRoot, "--token", token],
|
|
324
|
+
{
|
|
325
|
+
detached: true,
|
|
326
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
327
|
+
env: {
|
|
328
|
+
...process.env,
|
|
329
|
+
MOCK_MCP_CACHE_DIR: opts.cacheDir ?? ""
|
|
330
|
+
}
|
|
331
|
+
}
|
|
332
|
+
);
|
|
333
|
+
let daemonStderr = "";
|
|
334
|
+
let daemonStdout = "";
|
|
335
|
+
child.stdout?.on("data", (data) => {
|
|
336
|
+
const str = data.toString();
|
|
337
|
+
debugLog(`Daemon stdout: ${str}`);
|
|
338
|
+
});
|
|
339
|
+
child.stderr?.on("data", (data) => {
|
|
340
|
+
daemonStderr += data.toString();
|
|
341
|
+
debugLog(`Daemon stderr: ${data.toString()}`);
|
|
342
|
+
});
|
|
343
|
+
child.on("error", (err) => {
|
|
344
|
+
console.error(`[mock-mcp] Daemon spawn error: ${err.message}`);
|
|
345
|
+
});
|
|
346
|
+
child.on("exit", (code, signal) => {
|
|
347
|
+
if (code !== null && code !== 0) {
|
|
348
|
+
console.error(`[mock-mcp] Daemon exited with code: ${code}`);
|
|
349
|
+
if (daemonStderr) {
|
|
350
|
+
console.error(`[mock-mcp] Daemon stderr: ${daemonStderr.slice(0, 500)}`);
|
|
351
|
+
}
|
|
352
|
+
} else if (signal) {
|
|
353
|
+
console.error(`[mock-mcp] Daemon killed by signal: ${signal}`);
|
|
354
|
+
}
|
|
355
|
+
});
|
|
356
|
+
child.unref();
|
|
357
|
+
const deadline2 = Date.now() + timeoutMs;
|
|
358
|
+
while (Date.now() < deadline2) {
|
|
359
|
+
const reg = await readRegistry(registryPath);
|
|
360
|
+
if (reg && await healthCheck(reg.ipcPath)) {
|
|
361
|
+
return reg;
|
|
362
|
+
}
|
|
363
|
+
await sleep(50);
|
|
364
|
+
}
|
|
365
|
+
console.error("[mock-mcp] Daemon failed to start within timeout");
|
|
366
|
+
if (daemonStderr) {
|
|
367
|
+
console.error(`[mock-mcp] Daemon stderr:
|
|
368
|
+
${daemonStderr}`);
|
|
369
|
+
}
|
|
370
|
+
throw new Error(
|
|
371
|
+
`Daemon start timeout after ${timeoutMs}ms. Check logs for details.`
|
|
372
|
+
);
|
|
373
|
+
} finally {
|
|
374
|
+
await releaseLock(lockPath, lock);
|
|
375
|
+
}
|
|
376
|
+
}
|
|
377
|
+
const deadline = Date.now() + timeoutMs;
|
|
378
|
+
while (Date.now() < deadline) {
|
|
379
|
+
const reg = await readRegistry(registryPath);
|
|
380
|
+
if (reg && await healthCheck(reg.ipcPath)) {
|
|
381
|
+
return reg;
|
|
382
|
+
}
|
|
383
|
+
await sleep(50);
|
|
384
|
+
}
|
|
385
|
+
throw new Error(
|
|
386
|
+
`Waiting for daemon timed out after ${timeoutMs}ms. Another process may have failed to start it.`
|
|
387
|
+
);
|
|
388
|
+
}
|
|
389
|
+
function sleep(ms) {
|
|
390
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
// src/adapter/adapter.ts
|
|
394
|
+
var TOOLS = [
|
|
395
|
+
{
|
|
396
|
+
name: "get_status",
|
|
397
|
+
description: "Get the current status of the mock-mcp daemon, including active test runs and pending batches.",
|
|
398
|
+
inputSchema: {
|
|
399
|
+
type: "object",
|
|
400
|
+
properties: {},
|
|
401
|
+
required: []
|
|
402
|
+
}
|
|
403
|
+
},
|
|
404
|
+
{
|
|
405
|
+
name: "list_runs",
|
|
406
|
+
description: "List all active test runs connected to the daemon.",
|
|
407
|
+
inputSchema: {
|
|
408
|
+
type: "object",
|
|
409
|
+
properties: {},
|
|
410
|
+
required: []
|
|
411
|
+
}
|
|
412
|
+
},
|
|
413
|
+
{
|
|
414
|
+
name: "claim_next_batch",
|
|
415
|
+
description: `Claim the next pending mock batch for processing. This acquires a lease on the batch.
|
|
416
|
+
|
|
417
|
+
You MUST call this before provide_batch_mock_data. The batch will be locked for 30 seconds (configurable via leaseMs).
|
|
418
|
+
If you don't provide mock data within the lease time, the batch will be released for another adapter to claim.`,
|
|
419
|
+
inputSchema: {
|
|
420
|
+
type: "object",
|
|
421
|
+
properties: {
|
|
422
|
+
runId: {
|
|
423
|
+
type: "string",
|
|
424
|
+
description: "Optional: Filter to only claim batches from a specific test run."
|
|
425
|
+
},
|
|
426
|
+
leaseMs: {
|
|
427
|
+
type: "number",
|
|
428
|
+
description: "Optional: Lease duration in milliseconds. Default: 30000 (30 seconds)."
|
|
429
|
+
}
|
|
430
|
+
},
|
|
431
|
+
required: []
|
|
432
|
+
}
|
|
433
|
+
},
|
|
434
|
+
{
|
|
435
|
+
name: "get_batch",
|
|
436
|
+
description: "Get details of a specific batch by ID (read-only, does not claim).",
|
|
437
|
+
inputSchema: {
|
|
438
|
+
type: "object",
|
|
439
|
+
properties: {
|
|
440
|
+
batchId: {
|
|
441
|
+
type: "string",
|
|
442
|
+
description: "The batch ID to retrieve."
|
|
443
|
+
}
|
|
444
|
+
},
|
|
445
|
+
required: ["batchId"]
|
|
446
|
+
}
|
|
447
|
+
},
|
|
448
|
+
{
|
|
449
|
+
name: "provide_batch_mock_data",
|
|
450
|
+
description: `Provide mock response data for a claimed batch.
|
|
451
|
+
|
|
452
|
+
You MUST first call claim_next_batch to get the batchId and claimToken.
|
|
453
|
+
The mocks array must contain exactly one mock for each request in the batch.`,
|
|
454
|
+
inputSchema: {
|
|
455
|
+
type: "object",
|
|
456
|
+
properties: {
|
|
457
|
+
batchId: {
|
|
458
|
+
type: "string",
|
|
459
|
+
description: "The batch ID (from claim_next_batch)."
|
|
460
|
+
},
|
|
461
|
+
claimToken: {
|
|
462
|
+
type: "string",
|
|
463
|
+
description: "The claim token (from claim_next_batch)."
|
|
464
|
+
},
|
|
465
|
+
mocks: {
|
|
466
|
+
type: "array",
|
|
467
|
+
description: "Array of mock responses, one for each request in the batch.",
|
|
468
|
+
items: {
|
|
469
|
+
type: "object",
|
|
470
|
+
properties: {
|
|
471
|
+
requestId: {
|
|
472
|
+
type: "string",
|
|
473
|
+
description: "The requestId from the original request."
|
|
474
|
+
},
|
|
475
|
+
data: {
|
|
476
|
+
description: "The mock response data (any JSON value)."
|
|
477
|
+
},
|
|
478
|
+
status: {
|
|
479
|
+
type: "number",
|
|
480
|
+
description: "Optional HTTP status code. Default: 200."
|
|
481
|
+
},
|
|
482
|
+
headers: {
|
|
483
|
+
type: "object",
|
|
484
|
+
description: "Optional response headers."
|
|
485
|
+
},
|
|
486
|
+
delayMs: {
|
|
487
|
+
type: "number",
|
|
488
|
+
description: "Optional delay before returning the mock (ms)."
|
|
489
|
+
}
|
|
490
|
+
},
|
|
491
|
+
required: ["requestId", "data"]
|
|
492
|
+
}
|
|
493
|
+
}
|
|
494
|
+
},
|
|
495
|
+
required: ["batchId", "claimToken", "mocks"]
|
|
496
|
+
}
|
|
497
|
+
},
|
|
498
|
+
{
|
|
499
|
+
name: "release_batch",
|
|
500
|
+
description: "Release a claimed batch without providing mock data. Use this if you cannot generate appropriate mocks.",
|
|
501
|
+
inputSchema: {
|
|
502
|
+
type: "object",
|
|
503
|
+
properties: {
|
|
504
|
+
batchId: {
|
|
505
|
+
type: "string",
|
|
506
|
+
description: "The batch ID to release."
|
|
507
|
+
},
|
|
508
|
+
claimToken: {
|
|
509
|
+
type: "string",
|
|
510
|
+
description: "The claim token."
|
|
511
|
+
},
|
|
512
|
+
reason: {
|
|
513
|
+
type: "string",
|
|
514
|
+
description: "Optional reason for releasing."
|
|
515
|
+
}
|
|
516
|
+
},
|
|
517
|
+
required: ["batchId", "claimToken"]
|
|
518
|
+
}
|
|
519
|
+
}
|
|
520
|
+
];
|
|
521
|
+
async function runAdapter(opts = {}) {
|
|
522
|
+
const logger = opts.logger ?? console;
|
|
523
|
+
const version = opts.version ?? "0.4.0";
|
|
524
|
+
logger.error("\u{1F50D} Connecting to mock-mcp daemon...");
|
|
525
|
+
const registry = await ensureDaemonRunning();
|
|
526
|
+
const adapterId = crypto2__default.default.randomUUID();
|
|
527
|
+
const daemon = new DaemonClient(registry.ipcPath, registry.token, adapterId);
|
|
528
|
+
logger.error(`\u2705 Connected to daemon (project: ${registry.projectId})`);
|
|
529
|
+
const server = new index_js.Server(
|
|
530
|
+
{
|
|
531
|
+
name: "mock-mcp-adapter",
|
|
532
|
+
version
|
|
533
|
+
},
|
|
534
|
+
{
|
|
535
|
+
capabilities: { tools: {} }
|
|
536
|
+
}
|
|
537
|
+
);
|
|
538
|
+
server.setRequestHandler(types_js.ListToolsRequestSchema, async () => ({
|
|
539
|
+
tools: [...TOOLS]
|
|
540
|
+
}));
|
|
541
|
+
server.setRequestHandler(types_js.CallToolRequestSchema, async (request) => {
|
|
542
|
+
const { name, arguments: args } = request.params;
|
|
543
|
+
try {
|
|
544
|
+
switch (name) {
|
|
545
|
+
case "get_status": {
|
|
546
|
+
const result = await daemon.getStatus();
|
|
547
|
+
return buildToolResponse(formatStatus(result));
|
|
548
|
+
}
|
|
549
|
+
case "list_runs": {
|
|
550
|
+
const result = await daemon.listRuns();
|
|
551
|
+
return buildToolResponse(formatRuns(result));
|
|
552
|
+
}
|
|
553
|
+
case "claim_next_batch": {
|
|
554
|
+
const result = await daemon.claimNextBatch({
|
|
555
|
+
runId: args?.runId,
|
|
556
|
+
leaseMs: args?.leaseMs
|
|
557
|
+
});
|
|
558
|
+
return buildToolResponse(formatClaimResult(result));
|
|
559
|
+
}
|
|
560
|
+
case "get_batch": {
|
|
561
|
+
if (!args?.batchId) {
|
|
562
|
+
throw new Error("batchId is required");
|
|
563
|
+
}
|
|
564
|
+
const result = await daemon.getBatch(args.batchId);
|
|
565
|
+
return buildToolResponse(formatBatch(result));
|
|
566
|
+
}
|
|
567
|
+
case "provide_batch_mock_data": {
|
|
568
|
+
if (!args?.batchId || !args?.claimToken || !args?.mocks) {
|
|
569
|
+
throw new Error("batchId, claimToken, and mocks are required");
|
|
570
|
+
}
|
|
571
|
+
const result = await daemon.provideBatch({
|
|
572
|
+
batchId: args.batchId,
|
|
573
|
+
claimToken: args.claimToken,
|
|
574
|
+
mocks: args.mocks
|
|
575
|
+
});
|
|
576
|
+
return buildToolResponse(formatProvideResult(result));
|
|
577
|
+
}
|
|
578
|
+
case "release_batch": {
|
|
579
|
+
if (!args?.batchId || !args?.claimToken) {
|
|
580
|
+
throw new Error("batchId and claimToken are required");
|
|
581
|
+
}
|
|
582
|
+
const result = await daemon.releaseBatch({
|
|
583
|
+
batchId: args.batchId,
|
|
584
|
+
claimToken: args.claimToken,
|
|
585
|
+
reason: args?.reason
|
|
586
|
+
});
|
|
587
|
+
return buildToolResponse(JSON.stringify(result, null, 2));
|
|
588
|
+
}
|
|
589
|
+
default:
|
|
590
|
+
throw new Error(`Unknown tool: ${name}`);
|
|
591
|
+
}
|
|
592
|
+
} catch (error) {
|
|
593
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
594
|
+
logger.error(`Tool error (${name}):`, message);
|
|
595
|
+
return buildToolResponse(`Error: ${message}`, true);
|
|
596
|
+
}
|
|
597
|
+
});
|
|
598
|
+
const transport = new stdio_js.StdioServerTransport();
|
|
599
|
+
await server.connect(transport);
|
|
600
|
+
logger.error("\u2705 MCP adapter ready (stdio transport)");
|
|
601
|
+
}
|
|
602
|
+
function buildToolResponse(text, isError = false) {
|
|
603
|
+
return {
|
|
604
|
+
content: [{ type: "text", text }],
|
|
605
|
+
isError
|
|
606
|
+
};
|
|
607
|
+
}
|
|
608
|
+
function formatStatus(status) {
|
|
609
|
+
return `# Mock MCP Daemon Status
|
|
610
|
+
|
|
611
|
+
- **Version**: ${status.version}
|
|
612
|
+
- **Project ID**: ${status.projectId}
|
|
613
|
+
- **Project Root**: ${status.projectRoot}
|
|
614
|
+
- **PID**: ${status.pid}
|
|
615
|
+
- **Uptime**: ${Math.round(status.uptime / 1e3)}s
|
|
616
|
+
|
|
617
|
+
## Batches
|
|
618
|
+
- **Pending**: ${status.pending}
|
|
619
|
+
- **Claimed**: ${status.claimed}
|
|
620
|
+
- **Active Runs**: ${status.runs}
|
|
621
|
+
`;
|
|
622
|
+
}
|
|
623
|
+
function formatRuns(result) {
|
|
624
|
+
if (result.runs.length === 0) {
|
|
625
|
+
return "No active test runs.";
|
|
626
|
+
}
|
|
627
|
+
const lines = ["# Active Test Runs\n"];
|
|
628
|
+
for (const run of result.runs) {
|
|
629
|
+
lines.push(`## Run: ${run.runId}`);
|
|
630
|
+
lines.push(`- **PID**: ${run.pid}`);
|
|
631
|
+
lines.push(`- **CWD**: ${run.cwd}`);
|
|
632
|
+
lines.push(`- **Started**: ${run.startedAt}`);
|
|
633
|
+
lines.push(`- **Pending Batches**: ${run.pendingBatches}`);
|
|
634
|
+
if (run.testMeta) {
|
|
635
|
+
if (run.testMeta.testFile) {
|
|
636
|
+
lines.push(`- **Test File**: ${run.testMeta.testFile}`);
|
|
637
|
+
}
|
|
638
|
+
if (run.testMeta.testName) {
|
|
639
|
+
lines.push(`- **Test Name**: ${run.testMeta.testName}`);
|
|
640
|
+
}
|
|
641
|
+
}
|
|
642
|
+
lines.push("");
|
|
643
|
+
}
|
|
644
|
+
return lines.join("\n");
|
|
645
|
+
}
|
|
646
|
+
function formatClaimResult(result) {
|
|
647
|
+
if (!result) {
|
|
648
|
+
return "No pending batches available to claim.";
|
|
649
|
+
}
|
|
650
|
+
const lines = [
|
|
651
|
+
"# Batch Claimed Successfully\n",
|
|
652
|
+
`**Batch ID**: \`${result.batchId}\``,
|
|
653
|
+
`**Claim Token**: \`${result.claimToken}\``,
|
|
654
|
+
`**Run ID**: ${result.runId}`,
|
|
655
|
+
`**Lease Until**: ${new Date(result.leaseUntil).toISOString()}`,
|
|
656
|
+
"",
|
|
657
|
+
"## Requests\n"
|
|
658
|
+
];
|
|
659
|
+
for (const req of result.requests) {
|
|
660
|
+
lines.push(`### ${req.method} ${req.endpoint}`);
|
|
661
|
+
lines.push(`- **Request ID**: \`${req.requestId}\``);
|
|
662
|
+
if (req.body !== void 0) {
|
|
663
|
+
lines.push(`- **Body**: \`\`\`json
|
|
664
|
+
${JSON.stringify(req.body, null, 2)}
|
|
665
|
+
\`\`\``);
|
|
666
|
+
}
|
|
667
|
+
if (req.headers) {
|
|
668
|
+
lines.push(`- **Headers**: ${JSON.stringify(req.headers)}`);
|
|
669
|
+
}
|
|
670
|
+
if (req.metadata) {
|
|
671
|
+
lines.push(`- **Metadata**: ${JSON.stringify(req.metadata)}`);
|
|
672
|
+
}
|
|
673
|
+
lines.push("");
|
|
674
|
+
}
|
|
675
|
+
lines.push("---");
|
|
676
|
+
lines.push("**Next step**: Call `provide_batch_mock_data` with the batch ID, claim token, and mock data for each request.");
|
|
677
|
+
return lines.join("\n");
|
|
678
|
+
}
|
|
679
|
+
function formatBatch(result) {
|
|
680
|
+
const lines = [
|
|
681
|
+
`# Batch: ${result.batchId}
|
|
682
|
+
`,
|
|
683
|
+
`**Status**: ${result.status}`,
|
|
684
|
+
`**Run ID**: ${result.runId}`,
|
|
685
|
+
`**Created**: ${new Date(result.createdAt).toISOString()}`
|
|
686
|
+
];
|
|
687
|
+
if (result.claim) {
|
|
688
|
+
lines.push(`**Claimed by**: ${result.claim.adapterId}`);
|
|
689
|
+
lines.push(`**Lease until**: ${new Date(result.claim.leaseUntil).toISOString()}`);
|
|
690
|
+
}
|
|
691
|
+
lines.push("", "## Requests\n");
|
|
692
|
+
for (const req of result.requests) {
|
|
693
|
+
lines.push(`### ${req.method} ${req.endpoint}`);
|
|
694
|
+
lines.push(`- **Request ID**: \`${req.requestId}\``);
|
|
695
|
+
if (req.body !== void 0) {
|
|
696
|
+
lines.push(`- **Body**: \`\`\`json
|
|
697
|
+
${JSON.stringify(req.body, null, 2)}
|
|
698
|
+
\`\`\``);
|
|
699
|
+
}
|
|
700
|
+
lines.push("");
|
|
701
|
+
}
|
|
702
|
+
return lines.join("\n");
|
|
703
|
+
}
|
|
704
|
+
function formatProvideResult(result) {
|
|
705
|
+
if (result.ok) {
|
|
706
|
+
return `\u2705 ${result.message ?? "Mock data provided successfully."}`;
|
|
707
|
+
}
|
|
708
|
+
return `\u274C Failed to provide mock data: ${result.message}`;
|
|
709
|
+
}
|
|
710
|
+
|
|
711
|
+
exports.DaemonClient = DaemonClient;
|
|
712
|
+
exports.runAdapter = runAdapter;
|