mock-mcp 0.5.0 → 0.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/adapter/index.cjs +465 -302
- package/dist/adapter/index.d.cts +93 -6
- package/dist/adapter/index.d.ts +93 -6
- package/dist/adapter/index.js +465 -302
- package/dist/client/connect.cjs +83 -5
- package/dist/client/connect.d.cts +10 -3
- package/dist/client/connect.d.ts +10 -3
- package/dist/client/connect.js +82 -4
- package/dist/client/index.cjs +83 -5
- package/dist/client/index.d.cts +1 -2
- package/dist/client/index.d.ts +1 -2
- package/dist/client/index.js +82 -4
- package/dist/daemon/index.cjs +55 -5
- package/dist/daemon/index.js +54 -4
- package/dist/index.cjs +559 -89
- package/dist/index.d.cts +137 -7
- package/dist/index.d.ts +137 -7
- package/dist/index.js +556 -90
- package/dist/shared/index.cjs +121 -1
- package/dist/shared/index.d.cts +240 -3
- package/dist/shared/index.d.ts +240 -3
- package/dist/shared/index.js +115 -2
- package/dist/{discovery-Dc2LdF8q.d.cts → types-bEGXLBF0.d.cts} +86 -1
- package/dist/{discovery-Dc2LdF8q.d.ts → types-bEGXLBF0.d.ts} +86 -1
- package/package.json +2 -1
- package/dist/protocol-CiwaQFOt.d.ts +0 -239
- package/dist/protocol-xZu-wb0n.d.cts +0 -239
- package/dist/types-BKREdsyr.d.cts +0 -32
- package/dist/types-BKREdsyr.d.ts +0 -32
package/dist/adapter/index.cjs
CHANGED
|
@@ -3,22 +3,21 @@
|
|
|
3
3
|
var index_js = require('@modelcontextprotocol/sdk/server/index.js');
|
|
4
4
|
var stdio_js = require('@modelcontextprotocol/sdk/server/stdio.js');
|
|
5
5
|
var types_js = require('@modelcontextprotocol/sdk/types.js');
|
|
6
|
-
var crypto2 = require('crypto');
|
|
7
6
|
var http = require('http');
|
|
7
|
+
var crypto2 = require('crypto');
|
|
8
8
|
var fs = require('fs/promises');
|
|
9
|
-
|
|
9
|
+
require('fs');
|
|
10
10
|
var os = require('os');
|
|
11
11
|
var path = require('path');
|
|
12
|
-
|
|
12
|
+
require('child_process');
|
|
13
13
|
var url = require('url');
|
|
14
|
-
|
|
14
|
+
require('module');
|
|
15
15
|
|
|
16
16
|
function _interopDefault (e) { return e && e.__esModule ? e : { default: e }; }
|
|
17
17
|
|
|
18
|
-
var crypto2__default = /*#__PURE__*/_interopDefault(crypto2);
|
|
19
18
|
var http__default = /*#__PURE__*/_interopDefault(http);
|
|
19
|
+
var crypto2__default = /*#__PURE__*/_interopDefault(crypto2);
|
|
20
20
|
var fs__default = /*#__PURE__*/_interopDefault(fs);
|
|
21
|
-
var fssync__default = /*#__PURE__*/_interopDefault(fssync);
|
|
22
21
|
var os__default = /*#__PURE__*/_interopDefault(os);
|
|
23
22
|
var path__default = /*#__PURE__*/_interopDefault(path);
|
|
24
23
|
|
|
@@ -51,100 +50,9 @@ var __importMetaUrl = (function() {
|
|
|
51
50
|
return 'file:///unknown';
|
|
52
51
|
}
|
|
53
52
|
})();
|
|
54
|
-
var DaemonClient = class {
|
|
55
|
-
constructor(ipcPath, token, adapterId) {
|
|
56
|
-
this.ipcPath = ipcPath;
|
|
57
|
-
this.token = token;
|
|
58
|
-
this.adapterId = adapterId;
|
|
59
|
-
}
|
|
60
|
-
// ===========================================================================
|
|
61
|
-
// RPC Methods
|
|
62
|
-
// ===========================================================================
|
|
63
|
-
async getStatus() {
|
|
64
|
-
return this.rpc("getStatus", {});
|
|
65
|
-
}
|
|
66
|
-
async listRuns() {
|
|
67
|
-
return this.rpc("listRuns", {});
|
|
68
|
-
}
|
|
69
|
-
async claimNextBatch(args) {
|
|
70
|
-
return this.rpc("claimNextBatch", {
|
|
71
|
-
adapterId: this.adapterId,
|
|
72
|
-
runId: args.runId,
|
|
73
|
-
leaseMs: args.leaseMs
|
|
74
|
-
});
|
|
75
|
-
}
|
|
76
|
-
async provideBatch(args) {
|
|
77
|
-
return this.rpc("provideBatch", {
|
|
78
|
-
adapterId: this.adapterId,
|
|
79
|
-
batchId: args.batchId,
|
|
80
|
-
claimToken: args.claimToken,
|
|
81
|
-
mocks: args.mocks
|
|
82
|
-
});
|
|
83
|
-
}
|
|
84
|
-
async releaseBatch(args) {
|
|
85
|
-
return this.rpc("releaseBatch", {
|
|
86
|
-
adapterId: this.adapterId,
|
|
87
|
-
batchId: args.batchId,
|
|
88
|
-
claimToken: args.claimToken,
|
|
89
|
-
reason: args.reason
|
|
90
|
-
});
|
|
91
|
-
}
|
|
92
|
-
async getBatch(batchId) {
|
|
93
|
-
return this.rpc("getBatch", { batchId });
|
|
94
|
-
}
|
|
95
|
-
// ===========================================================================
|
|
96
|
-
// Internal
|
|
97
|
-
// ===========================================================================
|
|
98
|
-
rpc(method, params) {
|
|
99
|
-
const payload = {
|
|
100
|
-
jsonrpc: "2.0",
|
|
101
|
-
id: crypto2__default.default.randomUUID(),
|
|
102
|
-
method,
|
|
103
|
-
params
|
|
104
|
-
};
|
|
105
|
-
return new Promise((resolve, reject) => {
|
|
106
|
-
const req = http__default.default.request(
|
|
107
|
-
{
|
|
108
|
-
method: "POST",
|
|
109
|
-
socketPath: this.ipcPath,
|
|
110
|
-
path: "/control",
|
|
111
|
-
headers: {
|
|
112
|
-
"content-type": "application/json",
|
|
113
|
-
"x-mock-mcp-token": this.token
|
|
114
|
-
},
|
|
115
|
-
timeout: 3e4
|
|
116
|
-
},
|
|
117
|
-
(res) => {
|
|
118
|
-
let buf = "";
|
|
119
|
-
res.on("data", (chunk) => buf += chunk);
|
|
120
|
-
res.on("end", () => {
|
|
121
|
-
try {
|
|
122
|
-
const response = JSON.parse(buf);
|
|
123
|
-
if (response.error) {
|
|
124
|
-
reject(new Error(response.error.message));
|
|
125
|
-
} else {
|
|
126
|
-
resolve(response.result);
|
|
127
|
-
}
|
|
128
|
-
} catch (e) {
|
|
129
|
-
reject(e);
|
|
130
|
-
}
|
|
131
|
-
});
|
|
132
|
-
}
|
|
133
|
-
);
|
|
134
|
-
req.on("error", (err) => {
|
|
135
|
-
reject(new Error(`Daemon connection failed: ${err.message}`));
|
|
136
|
-
});
|
|
137
|
-
req.on("timeout", () => {
|
|
138
|
-
req.destroy();
|
|
139
|
-
reject(new Error("Daemon request timeout"));
|
|
140
|
-
});
|
|
141
|
-
req.end(JSON.stringify(payload));
|
|
142
|
-
});
|
|
143
|
-
}
|
|
144
|
-
};
|
|
145
53
|
function debugLog(_msg) {
|
|
146
54
|
}
|
|
147
|
-
|
|
55
|
+
(() => {
|
|
148
56
|
try {
|
|
149
57
|
const metaUrl = __importMetaUrl;
|
|
150
58
|
if (metaUrl && typeof metaUrl === "string" && metaUrl.startsWith("file://")) {
|
|
@@ -154,32 +62,6 @@ var __curDirname = (() => {
|
|
|
154
62
|
}
|
|
155
63
|
return process.cwd();
|
|
156
64
|
})();
|
|
157
|
-
function resolveProjectRoot(startDir = process.cwd()) {
|
|
158
|
-
let current = path__default.default.resolve(startDir);
|
|
159
|
-
const root = path__default.default.parse(current).root;
|
|
160
|
-
while (current !== root) {
|
|
161
|
-
const gitPath = path__default.default.join(current, ".git");
|
|
162
|
-
try {
|
|
163
|
-
const stat = fssync__default.default.statSync(gitPath);
|
|
164
|
-
if (stat.isDirectory() || stat.isFile()) {
|
|
165
|
-
return current;
|
|
166
|
-
}
|
|
167
|
-
} catch {
|
|
168
|
-
}
|
|
169
|
-
const pkgPath = path__default.default.join(current, "package.json");
|
|
170
|
-
try {
|
|
171
|
-
fssync__default.default.accessSync(pkgPath, fssync__default.default.constants.F_OK);
|
|
172
|
-
return current;
|
|
173
|
-
} catch {
|
|
174
|
-
}
|
|
175
|
-
current = path__default.default.dirname(current);
|
|
176
|
-
}
|
|
177
|
-
return path__default.default.resolve(startDir);
|
|
178
|
-
}
|
|
179
|
-
function computeProjectId(projectRoot) {
|
|
180
|
-
const real = fssync__default.default.realpathSync(projectRoot);
|
|
181
|
-
return crypto2__default.default.createHash("sha256").update(real).digest("hex").slice(0, 16);
|
|
182
|
-
}
|
|
183
65
|
function getCacheDir(override) {
|
|
184
66
|
if (override) {
|
|
185
67
|
return override;
|
|
@@ -201,13 +83,6 @@ function getCacheDir(override) {
|
|
|
201
83
|
}
|
|
202
84
|
return os__default.default.tmpdir();
|
|
203
85
|
}
|
|
204
|
-
function getPaths(projectId, cacheDir) {
|
|
205
|
-
const base = path__default.default.join(getCacheDir(cacheDir), "mock-mcp");
|
|
206
|
-
const registryPath = path__default.default.join(base, `${projectId}.json`);
|
|
207
|
-
const lockPath = path__default.default.join(base, `${projectId}.lock`);
|
|
208
|
-
const ipcPath = process.platform === "win32" ? `\\\\.\\pipe\\mock-mcp-${projectId}` : path__default.default.join(base, `${projectId}.sock`);
|
|
209
|
-
return { base, registryPath, lockPath, ipcPath };
|
|
210
|
-
}
|
|
211
86
|
async function readRegistry(registryPath) {
|
|
212
87
|
try {
|
|
213
88
|
const txt = await fs__default.default.readFile(registryPath, "utf-8");
|
|
@@ -238,157 +113,310 @@ async function healthCheck(ipcPath, timeoutMs = 2e3) {
|
|
|
238
113
|
req.end();
|
|
239
114
|
});
|
|
240
115
|
}
|
|
241
|
-
|
|
116
|
+
function getGlobalIndexPath(cacheDir) {
|
|
117
|
+
const base = path__default.default.join(getCacheDir(cacheDir), "mock-mcp");
|
|
118
|
+
return path__default.default.join(base, "active-daemons.json");
|
|
119
|
+
}
|
|
120
|
+
async function readGlobalIndex(cacheDir) {
|
|
121
|
+
const indexPath = getGlobalIndexPath(cacheDir);
|
|
242
122
|
try {
|
|
243
|
-
const
|
|
244
|
-
|
|
245
|
-
`);
|
|
246
|
-
return fh;
|
|
123
|
+
const txt = await fs__default.default.readFile(indexPath, "utf-8");
|
|
124
|
+
return JSON.parse(txt);
|
|
247
125
|
} catch {
|
|
248
|
-
return
|
|
126
|
+
return { daemons: [], updatedAt: (/* @__PURE__ */ new Date()).toISOString() };
|
|
249
127
|
}
|
|
250
128
|
}
|
|
251
|
-
async function
|
|
252
|
-
|
|
253
|
-
|
|
129
|
+
async function writeGlobalIndex(index, cacheDir) {
|
|
130
|
+
const indexPath = getGlobalIndexPath(cacheDir);
|
|
131
|
+
const base = path__default.default.dirname(indexPath);
|
|
132
|
+
await fs__default.default.mkdir(base, { recursive: true });
|
|
133
|
+
await fs__default.default.writeFile(indexPath, JSON.stringify(index, null, 2), {
|
|
134
|
+
encoding: "utf-8",
|
|
135
|
+
mode: 384
|
|
254
136
|
});
|
|
255
137
|
}
|
|
256
|
-
function
|
|
257
|
-
|
|
138
|
+
async function cleanupGlobalIndex(cacheDir) {
|
|
139
|
+
const index = await readGlobalIndex(cacheDir);
|
|
140
|
+
const validDaemons = [];
|
|
141
|
+
for (const entry of index.daemons) {
|
|
142
|
+
try {
|
|
143
|
+
process.kill(entry.pid, 0);
|
|
144
|
+
const healthy = await healthCheck(entry.ipcPath, 1e3);
|
|
145
|
+
if (healthy) {
|
|
146
|
+
validDaemons.push(entry);
|
|
147
|
+
} else {
|
|
148
|
+
debugLog(`Removing unhealthy daemon ${entry.projectId} (pid ${entry.pid})`);
|
|
149
|
+
}
|
|
150
|
+
} catch {
|
|
151
|
+
debugLog(`Removing dead daemon ${entry.projectId} (pid ${entry.pid})`);
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
if (validDaemons.length !== index.daemons.length) {
|
|
155
|
+
index.daemons = validDaemons;
|
|
156
|
+
index.updatedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
157
|
+
await writeGlobalIndex(index, cacheDir);
|
|
158
|
+
}
|
|
258
159
|
}
|
|
259
|
-
function
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
const
|
|
265
|
-
if (
|
|
266
|
-
|
|
160
|
+
async function discoverAllDaemons(cacheDir) {
|
|
161
|
+
await cleanupGlobalIndex(cacheDir);
|
|
162
|
+
const index = await readGlobalIndex(cacheDir);
|
|
163
|
+
const results = [];
|
|
164
|
+
for (const entry of index.daemons) {
|
|
165
|
+
const registry = await readRegistry(entry.registryPath);
|
|
166
|
+
if (registry) {
|
|
167
|
+
const healthy = await healthCheck(entry.ipcPath, 2e3);
|
|
168
|
+
results.push({ registry, healthy });
|
|
267
169
|
}
|
|
268
|
-
} catch {
|
|
269
170
|
}
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
171
|
+
return results;
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
// src/adapter/multi-daemon-client.ts
|
|
175
|
+
var MultiDaemonClient = class {
|
|
176
|
+
logger;
|
|
177
|
+
cacheDir;
|
|
178
|
+
adapterId;
|
|
179
|
+
constructor(opts = {}) {
|
|
180
|
+
this.logger = opts.logger ?? console;
|
|
181
|
+
this.cacheDir = opts.cacheDir;
|
|
182
|
+
this.adapterId = crypto2__default.default.randomUUID();
|
|
183
|
+
}
|
|
184
|
+
// ===========================================================================
|
|
185
|
+
// Discovery
|
|
186
|
+
// ===========================================================================
|
|
187
|
+
/**
|
|
188
|
+
* Discover all active and healthy daemons.
|
|
189
|
+
*/
|
|
190
|
+
async discoverDaemons() {
|
|
191
|
+
return discoverAllDaemons(this.cacheDir);
|
|
192
|
+
}
|
|
193
|
+
// ===========================================================================
|
|
194
|
+
// Aggregated RPC Methods
|
|
195
|
+
// ===========================================================================
|
|
196
|
+
/**
|
|
197
|
+
* Get aggregated status from all daemons.
|
|
198
|
+
*/
|
|
199
|
+
async getAggregatedStatus() {
|
|
200
|
+
const daemons = await this.discoverDaemons();
|
|
201
|
+
const statuses = [];
|
|
202
|
+
let totalRuns = 0;
|
|
203
|
+
let totalPending = 0;
|
|
204
|
+
let totalClaimed = 0;
|
|
205
|
+
for (const { registry, healthy } of daemons) {
|
|
206
|
+
if (!healthy) {
|
|
207
|
+
statuses.push({
|
|
208
|
+
version: registry.version,
|
|
209
|
+
projectId: registry.projectId,
|
|
210
|
+
projectRoot: registry.projectRoot,
|
|
211
|
+
pid: registry.pid,
|
|
212
|
+
uptime: 0,
|
|
213
|
+
runs: 0,
|
|
214
|
+
pending: 0,
|
|
215
|
+
claimed: 0,
|
|
216
|
+
totalBatches: 0,
|
|
217
|
+
healthy: false
|
|
218
|
+
});
|
|
219
|
+
continue;
|
|
220
|
+
}
|
|
221
|
+
try {
|
|
222
|
+
const status = await this.rpc(registry, "getStatus", {});
|
|
223
|
+
statuses.push({ ...status, healthy: true });
|
|
224
|
+
totalRuns += status.runs;
|
|
225
|
+
totalPending += status.pending;
|
|
226
|
+
totalClaimed += status.claimed;
|
|
227
|
+
} catch (error) {
|
|
228
|
+
this.logger.warn(`Failed to get status from daemon ${registry.projectId}: ${error}`);
|
|
229
|
+
statuses.push({
|
|
230
|
+
version: registry.version,
|
|
231
|
+
projectId: registry.projectId,
|
|
232
|
+
projectRoot: registry.projectRoot,
|
|
233
|
+
pid: registry.pid,
|
|
234
|
+
uptime: 0,
|
|
235
|
+
runs: 0,
|
|
236
|
+
pending: 0,
|
|
237
|
+
claimed: 0,
|
|
238
|
+
totalBatches: 0,
|
|
239
|
+
healthy: false
|
|
240
|
+
});
|
|
241
|
+
}
|
|
275
242
|
}
|
|
276
|
-
|
|
243
|
+
return { daemons: statuses, totalRuns, totalPending, totalClaimed };
|
|
244
|
+
}
|
|
245
|
+
/**
|
|
246
|
+
* List all runs across all daemons.
|
|
247
|
+
*/
|
|
248
|
+
async listAllRuns() {
|
|
249
|
+
const daemons = await this.discoverDaemons();
|
|
250
|
+
const allRuns = [];
|
|
251
|
+
for (const { registry, healthy } of daemons) {
|
|
252
|
+
if (!healthy) continue;
|
|
253
|
+
try {
|
|
254
|
+
const result = await this.rpc(registry, "listRuns", {});
|
|
255
|
+
for (const run of result.runs) {
|
|
256
|
+
allRuns.push({
|
|
257
|
+
...run,
|
|
258
|
+
projectId: registry.projectId,
|
|
259
|
+
projectRoot: registry.projectRoot
|
|
260
|
+
});
|
|
261
|
+
}
|
|
262
|
+
} catch (error) {
|
|
263
|
+
this.logger.warn(`Failed to list runs from daemon ${registry.projectId}: ${error}`);
|
|
264
|
+
}
|
|
265
|
+
}
|
|
266
|
+
return allRuns;
|
|
277
267
|
}
|
|
278
|
-
|
|
279
|
-
|
|
268
|
+
/**
|
|
269
|
+
* Claim the next available batch from any daemon.
|
|
270
|
+
* Searches through all daemons in order until finding one with a pending batch.
|
|
271
|
+
*/
|
|
272
|
+
async claimNextBatch(args) {
|
|
273
|
+
const daemons = await this.discoverDaemons();
|
|
274
|
+
for (const { registry, healthy } of daemons) {
|
|
275
|
+
if (!healthy) continue;
|
|
276
|
+
try {
|
|
277
|
+
const result = await this.rpc(registry, "claimNextBatch", {
|
|
278
|
+
adapterId: this.adapterId,
|
|
279
|
+
runId: args.runId,
|
|
280
|
+
leaseMs: args.leaseMs
|
|
281
|
+
});
|
|
282
|
+
if (result) {
|
|
283
|
+
return {
|
|
284
|
+
...result,
|
|
285
|
+
projectId: registry.projectId,
|
|
286
|
+
projectRoot: registry.projectRoot
|
|
287
|
+
};
|
|
288
|
+
}
|
|
289
|
+
} catch (error) {
|
|
290
|
+
this.logger.warn(`Failed to claim batch from daemon ${registry.projectId}: ${error}`);
|
|
291
|
+
}
|
|
292
|
+
}
|
|
293
|
+
return null;
|
|
280
294
|
}
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
);
|
|
290
|
-
const timeoutMs = opts.timeoutMs ?? 1e4;
|
|
291
|
-
await fs__default.default.mkdir(base, { recursive: true });
|
|
292
|
-
const existing = await readRegistry(registryPath);
|
|
293
|
-
debugLog(`Registry read result: ${existing ? "Found (PID " + existing.pid + ")" : "Null"}`);
|
|
294
|
-
if (existing) {
|
|
295
|
-
let healthy = false;
|
|
296
|
-
for (let i = 0; i < 3; i++) {
|
|
297
|
-
debugLog(`Checking health attempt ${i + 1}/3 on ${existing.ipcPath}`);
|
|
298
|
-
healthy = await healthCheck(existing.ipcPath);
|
|
299
|
-
if (healthy) break;
|
|
300
|
-
await new Promise((r) => setTimeout(r, 200));
|
|
295
|
+
/**
|
|
296
|
+
* Provide mock data for a batch.
|
|
297
|
+
* Automatically routes to the correct daemon based on batchId.
|
|
298
|
+
*/
|
|
299
|
+
async provideBatch(args) {
|
|
300
|
+
const parts = args.batchId.split(":");
|
|
301
|
+
if (parts.length < 2) {
|
|
302
|
+
return { ok: false, message: `Invalid batchId format: ${args.batchId}` };
|
|
301
303
|
}
|
|
302
|
-
|
|
303
|
-
|
|
304
|
+
const daemons = await this.discoverDaemons();
|
|
305
|
+
for (const { registry, healthy } of daemons) {
|
|
306
|
+
if (!healthy) continue;
|
|
307
|
+
try {
|
|
308
|
+
const result = await this.rpc(registry, "provideBatch", {
|
|
309
|
+
adapterId: this.adapterId,
|
|
310
|
+
batchId: args.batchId,
|
|
311
|
+
claimToken: args.claimToken,
|
|
312
|
+
mocks: args.mocks
|
|
313
|
+
});
|
|
314
|
+
return result;
|
|
315
|
+
} catch (error) {
|
|
316
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
317
|
+
if (msg.includes("not found") || msg.includes("Not found")) {
|
|
318
|
+
continue;
|
|
319
|
+
}
|
|
320
|
+
return { ok: false, message: msg };
|
|
321
|
+
}
|
|
304
322
|
}
|
|
323
|
+
return { ok: false, message: `Batch not found: ${args.batchId}` };
|
|
305
324
|
}
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
325
|
+
/**
|
|
326
|
+
* Release a batch.
|
|
327
|
+
*/
|
|
328
|
+
async releaseBatch(args) {
|
|
329
|
+
const daemons = await this.discoverDaemons();
|
|
330
|
+
for (const { registry, healthy } of daemons) {
|
|
331
|
+
if (!healthy) continue;
|
|
332
|
+
try {
|
|
333
|
+
const result = await this.rpc(registry, "releaseBatch", {
|
|
334
|
+
adapterId: this.adapterId,
|
|
335
|
+
batchId: args.batchId,
|
|
336
|
+
claimToken: args.claimToken,
|
|
337
|
+
reason: args.reason
|
|
338
|
+
});
|
|
339
|
+
return result;
|
|
340
|
+
} catch (error) {
|
|
341
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
342
|
+
if (msg.includes("not found") || msg.includes("Not found")) {
|
|
343
|
+
continue;
|
|
344
|
+
}
|
|
345
|
+
return { ok: false, message: msg };
|
|
346
|
+
}
|
|
310
347
|
}
|
|
348
|
+
return { ok: false, message: `Batch not found: ${args.batchId}` };
|
|
311
349
|
}
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
350
|
+
/**
|
|
351
|
+
* Get a specific batch by ID.
|
|
352
|
+
*/
|
|
353
|
+
async getBatch(batchId) {
|
|
354
|
+
const daemons = await this.discoverDaemons();
|
|
355
|
+
for (const { registry, healthy } of daemons) {
|
|
356
|
+
if (!healthy) continue;
|
|
357
|
+
try {
|
|
358
|
+
const result = await this.rpc(registry, "getBatch", { batchId });
|
|
359
|
+
return result;
|
|
360
|
+
} catch (error) {
|
|
361
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
362
|
+
if (msg.includes("not found") || msg.includes("Not found")) {
|
|
363
|
+
continue;
|
|
364
|
+
}
|
|
365
|
+
throw error;
|
|
318
366
|
}
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
367
|
+
}
|
|
368
|
+
return null;
|
|
369
|
+
}
|
|
370
|
+
// ===========================================================================
|
|
371
|
+
// Internal RPC
|
|
372
|
+
// ===========================================================================
|
|
373
|
+
rpc(registry, method, params) {
|
|
374
|
+
const payload = {
|
|
375
|
+
jsonrpc: "2.0",
|
|
376
|
+
id: crypto2__default.default.randomUUID(),
|
|
377
|
+
method,
|
|
378
|
+
params
|
|
379
|
+
};
|
|
380
|
+
return new Promise((resolve, reject) => {
|
|
381
|
+
const req = http__default.default.request(
|
|
324
382
|
{
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
383
|
+
method: "POST",
|
|
384
|
+
socketPath: registry.ipcPath,
|
|
385
|
+
path: "/control",
|
|
386
|
+
headers: {
|
|
387
|
+
"content-type": "application/json",
|
|
388
|
+
"x-mock-mcp-token": registry.token
|
|
389
|
+
},
|
|
390
|
+
timeout: 3e4
|
|
391
|
+
},
|
|
392
|
+
(res) => {
|
|
393
|
+
let buf = "";
|
|
394
|
+
res.on("data", (chunk) => buf += chunk);
|
|
395
|
+
res.on("end", () => {
|
|
396
|
+
try {
|
|
397
|
+
const response = JSON.parse(buf);
|
|
398
|
+
if (response.error) {
|
|
399
|
+
reject(new Error(response.error.message));
|
|
400
|
+
} else {
|
|
401
|
+
resolve(response.result);
|
|
402
|
+
}
|
|
403
|
+
} catch (e) {
|
|
404
|
+
reject(e);
|
|
405
|
+
}
|
|
406
|
+
});
|
|
331
407
|
}
|
|
332
408
|
);
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
child.stdout?.on("data", (data) => {
|
|
336
|
-
const str = data.toString();
|
|
337
|
-
debugLog(`Daemon stdout: ${str}`);
|
|
338
|
-
});
|
|
339
|
-
child.stderr?.on("data", (data) => {
|
|
340
|
-
daemonStderr += data.toString();
|
|
341
|
-
debugLog(`Daemon stderr: ${data.toString()}`);
|
|
342
|
-
});
|
|
343
|
-
child.on("error", (err) => {
|
|
344
|
-
console.error(`[mock-mcp] Daemon spawn error: ${err.message}`);
|
|
409
|
+
req.on("error", (err) => {
|
|
410
|
+
reject(new Error(`Daemon connection failed: ${err.message}`));
|
|
345
411
|
});
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
if (daemonStderr) {
|
|
350
|
-
console.error(`[mock-mcp] Daemon stderr: ${daemonStderr.slice(0, 500)}`);
|
|
351
|
-
}
|
|
352
|
-
} else if (signal) {
|
|
353
|
-
console.error(`[mock-mcp] Daemon killed by signal: ${signal}`);
|
|
354
|
-
}
|
|
412
|
+
req.on("timeout", () => {
|
|
413
|
+
req.destroy();
|
|
414
|
+
reject(new Error("Daemon request timeout"));
|
|
355
415
|
});
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
while (Date.now() < deadline2) {
|
|
359
|
-
const reg = await readRegistry(registryPath);
|
|
360
|
-
if (reg && await healthCheck(reg.ipcPath)) {
|
|
361
|
-
return reg;
|
|
362
|
-
}
|
|
363
|
-
await sleep(50);
|
|
364
|
-
}
|
|
365
|
-
console.error("[mock-mcp] Daemon failed to start within timeout");
|
|
366
|
-
if (daemonStderr) {
|
|
367
|
-
console.error(`[mock-mcp] Daemon stderr:
|
|
368
|
-
${daemonStderr}`);
|
|
369
|
-
}
|
|
370
|
-
throw new Error(
|
|
371
|
-
`Daemon start timeout after ${timeoutMs}ms. Check logs for details.`
|
|
372
|
-
);
|
|
373
|
-
} finally {
|
|
374
|
-
await releaseLock(lockPath, lock);
|
|
375
|
-
}
|
|
376
|
-
}
|
|
377
|
-
const deadline = Date.now() + timeoutMs;
|
|
378
|
-
while (Date.now() < deadline) {
|
|
379
|
-
const reg = await readRegistry(registryPath);
|
|
380
|
-
if (reg && await healthCheck(reg.ipcPath)) {
|
|
381
|
-
return reg;
|
|
382
|
-
}
|
|
383
|
-
await sleep(50);
|
|
416
|
+
req.end(JSON.stringify(payload));
|
|
417
|
+
});
|
|
384
418
|
}
|
|
385
|
-
|
|
386
|
-
`Waiting for daemon timed out after ${timeoutMs}ms. Another process may have failed to start it.`
|
|
387
|
-
);
|
|
388
|
-
}
|
|
389
|
-
function sleep(ms) {
|
|
390
|
-
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
391
|
-
}
|
|
419
|
+
};
|
|
392
420
|
|
|
393
421
|
// src/adapter/adapter.ts
|
|
394
422
|
var TOOLS = [
|
|
@@ -520,12 +548,19 @@ The mocks array must contain exactly one mock for each request in the batch.`,
|
|
|
520
548
|
];
|
|
521
549
|
async function runAdapter(opts = {}) {
|
|
522
550
|
const logger = opts.logger ?? console;
|
|
523
|
-
const version = opts.version ?? "0.
|
|
524
|
-
logger.error("\u{1F50D}
|
|
525
|
-
const
|
|
526
|
-
const
|
|
527
|
-
|
|
528
|
-
|
|
551
|
+
const version = opts.version ?? "0.5.0";
|
|
552
|
+
logger.error("\u{1F50D} Initializing mock-mcp adapter (multi-daemon mode)...");
|
|
553
|
+
const multiDaemon = new MultiDaemonClient({ logger });
|
|
554
|
+
const daemons = await multiDaemon.discoverDaemons();
|
|
555
|
+
if (daemons.length > 0) {
|
|
556
|
+
logger.error(`\u2705 Found ${daemons.length} active daemon(s):`);
|
|
557
|
+
for (const d of daemons) {
|
|
558
|
+
const status = d.healthy ? "healthy" : "unhealthy";
|
|
559
|
+
logger.error(` - ${d.registry.projectId}: ${d.registry.projectRoot} (${status})`);
|
|
560
|
+
}
|
|
561
|
+
} else {
|
|
562
|
+
logger.error("\u2139\uFE0F No active daemons found. Waiting for test processes to start...");
|
|
563
|
+
}
|
|
529
564
|
const server = new index_js.Server(
|
|
530
565
|
{
|
|
531
566
|
name: "mock-mcp-adapter",
|
|
@@ -543,15 +578,15 @@ async function runAdapter(opts = {}) {
|
|
|
543
578
|
try {
|
|
544
579
|
switch (name) {
|
|
545
580
|
case "get_status": {
|
|
546
|
-
const result = await
|
|
547
|
-
return buildToolResponse(
|
|
581
|
+
const result = await multiDaemon.getAggregatedStatus();
|
|
582
|
+
return buildToolResponse(formatAggregatedStatus(result));
|
|
548
583
|
}
|
|
549
584
|
case "list_runs": {
|
|
550
|
-
const result = await
|
|
551
|
-
return buildToolResponse(
|
|
585
|
+
const result = await multiDaemon.listAllRuns();
|
|
586
|
+
return buildToolResponse(formatExtendedRuns(result));
|
|
552
587
|
}
|
|
553
588
|
case "claim_next_batch": {
|
|
554
|
-
const result = await
|
|
589
|
+
const result = await multiDaemon.claimNextBatch({
|
|
555
590
|
runId: args?.runId,
|
|
556
591
|
leaseMs: args?.leaseMs
|
|
557
592
|
});
|
|
@@ -561,14 +596,17 @@ async function runAdapter(opts = {}) {
|
|
|
561
596
|
if (!args?.batchId) {
|
|
562
597
|
throw new Error("batchId is required");
|
|
563
598
|
}
|
|
564
|
-
const result = await
|
|
599
|
+
const result = await multiDaemon.getBatch(args.batchId);
|
|
600
|
+
if (!result) {
|
|
601
|
+
throw new Error(`Batch not found: ${args.batchId}`);
|
|
602
|
+
}
|
|
565
603
|
return buildToolResponse(formatBatch(result));
|
|
566
604
|
}
|
|
567
605
|
case "provide_batch_mock_data": {
|
|
568
606
|
if (!args?.batchId || !args?.claimToken || !args?.mocks) {
|
|
569
607
|
throw new Error("batchId, claimToken, and mocks are required");
|
|
570
608
|
}
|
|
571
|
-
const result = await
|
|
609
|
+
const result = await multiDaemon.provideBatch({
|
|
572
610
|
batchId: args.batchId,
|
|
573
611
|
claimToken: args.claimToken,
|
|
574
612
|
mocks: args.mocks
|
|
@@ -579,7 +617,7 @@ async function runAdapter(opts = {}) {
|
|
|
579
617
|
if (!args?.batchId || !args?.claimToken) {
|
|
580
618
|
throw new Error("batchId and claimToken are required");
|
|
581
619
|
}
|
|
582
|
-
const result = await
|
|
620
|
+
const result = await multiDaemon.releaseBatch({
|
|
583
621
|
batchId: args.batchId,
|
|
584
622
|
claimToken: args.claimToken,
|
|
585
623
|
reason: args?.reason
|
|
@@ -591,7 +629,7 @@ async function runAdapter(opts = {}) {
|
|
|
591
629
|
}
|
|
592
630
|
} catch (error) {
|
|
593
631
|
const message = error instanceof Error ? error.message : String(error);
|
|
594
|
-
logger.error(`Tool error (${name})
|
|
632
|
+
logger.error(`Tool error (${name}): ${message}`);
|
|
595
633
|
return buildToolResponse(`Error: ${message}`, true);
|
|
596
634
|
}
|
|
597
635
|
});
|
|
@@ -605,53 +643,86 @@ function buildToolResponse(text, isError = false) {
|
|
|
605
643
|
isError
|
|
606
644
|
};
|
|
607
645
|
}
|
|
608
|
-
function
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
- **Version**: ${status.version}
|
|
612
|
-
- **Project ID**: ${status.projectId}
|
|
613
|
-
- **Project Root**: ${status.projectRoot}
|
|
614
|
-
- **PID**: ${status.pid}
|
|
615
|
-
- **Uptime**: ${Math.round(status.uptime / 1e3)}s
|
|
646
|
+
function formatAggregatedStatus(status) {
|
|
647
|
+
if (status.daemons.length === 0) {
|
|
648
|
+
return `# Mock MCP Status
|
|
616
649
|
|
|
617
|
-
|
|
618
|
-
- **Pending**: ${status.pending}
|
|
619
|
-
- **Claimed**: ${status.claimed}
|
|
620
|
-
- **Active Runs**: ${status.runs}
|
|
650
|
+
No active daemons found. Start a test with \`MOCK_MCP=1\` to begin.
|
|
621
651
|
`;
|
|
652
|
+
}
|
|
653
|
+
const lines = [
|
|
654
|
+
"# Mock MCP Status\n",
|
|
655
|
+
"## Summary",
|
|
656
|
+
`- **Active Daemons**: ${status.daemons.filter((d) => d.healthy).length}`,
|
|
657
|
+
`- **Total Active Runs**: ${status.totalRuns}`,
|
|
658
|
+
`- **Total Pending Batches**: ${status.totalPending}`,
|
|
659
|
+
`- **Total Claimed Batches**: ${status.totalClaimed}`,
|
|
660
|
+
"",
|
|
661
|
+
"## Daemons\n"
|
|
662
|
+
];
|
|
663
|
+
for (const daemon of status.daemons) {
|
|
664
|
+
const healthIcon = daemon.healthy ? "\u2705" : "\u274C";
|
|
665
|
+
lines.push(`### ${healthIcon} ${daemon.projectRoot}`);
|
|
666
|
+
lines.push(`- **Project ID**: ${daemon.projectId}`);
|
|
667
|
+
lines.push(`- **Version**: ${daemon.version}`);
|
|
668
|
+
lines.push(`- **PID**: ${daemon.pid}`);
|
|
669
|
+
if (daemon.healthy) {
|
|
670
|
+
lines.push(`- **Uptime**: ${Math.round(daemon.uptime / 1e3)}s`);
|
|
671
|
+
lines.push(`- **Runs**: ${daemon.runs}`);
|
|
672
|
+
lines.push(`- **Pending**: ${daemon.pending}`);
|
|
673
|
+
lines.push(`- **Claimed**: ${daemon.claimed}`);
|
|
674
|
+
} else {
|
|
675
|
+
lines.push(`- **Status**: Not responding`);
|
|
676
|
+
}
|
|
677
|
+
lines.push("");
|
|
678
|
+
}
|
|
679
|
+
return lines.join("\n");
|
|
622
680
|
}
|
|
623
|
-
function
|
|
624
|
-
if (
|
|
625
|
-
return "No active test runs.";
|
|
681
|
+
function formatExtendedRuns(runs) {
|
|
682
|
+
if (runs.length === 0) {
|
|
683
|
+
return "No active test runs.\n\nStart a test with `MOCK_MCP=1` to begin.";
|
|
626
684
|
}
|
|
627
685
|
const lines = ["# Active Test Runs\n"];
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
686
|
+
const byProject = /* @__PURE__ */ new Map();
|
|
687
|
+
for (const run of runs) {
|
|
688
|
+
const key = run.projectRoot;
|
|
689
|
+
if (!byProject.has(key)) {
|
|
690
|
+
byProject.set(key, []);
|
|
691
|
+
}
|
|
692
|
+
byProject.get(key).push(run);
|
|
693
|
+
}
|
|
694
|
+
for (const [projectRoot, projectRuns] of byProject) {
|
|
695
|
+
lines.push(`## Project: ${projectRoot}
|
|
696
|
+
`);
|
|
697
|
+
for (const run of projectRuns) {
|
|
698
|
+
lines.push(`### Run: ${run.runId}`);
|
|
699
|
+
lines.push(`- **PID**: ${run.pid}`);
|
|
700
|
+
lines.push(`- **CWD**: ${run.cwd}`);
|
|
701
|
+
lines.push(`- **Started**: ${run.startedAt}`);
|
|
702
|
+
lines.push(`- **Pending Batches**: ${run.pendingBatches}`);
|
|
703
|
+
if (run.testMeta) {
|
|
704
|
+
if (run.testMeta.testFile) {
|
|
705
|
+
lines.push(`- **Test File**: ${run.testMeta.testFile}`);
|
|
706
|
+
}
|
|
707
|
+
if (run.testMeta.testName) {
|
|
708
|
+
lines.push(`- **Test Name**: ${run.testMeta.testName}`);
|
|
709
|
+
}
|
|
640
710
|
}
|
|
711
|
+
lines.push("");
|
|
641
712
|
}
|
|
642
|
-
lines.push("");
|
|
643
713
|
}
|
|
644
714
|
return lines.join("\n");
|
|
645
715
|
}
|
|
646
716
|
function formatClaimResult(result) {
|
|
647
717
|
if (!result) {
|
|
648
|
-
return "No pending batches available to claim.";
|
|
718
|
+
return "No pending batches available to claim.\n\nMake sure a test is running with `MOCK_MCP=1` and has pending mock requests.";
|
|
649
719
|
}
|
|
650
720
|
const lines = [
|
|
651
721
|
"# Batch Claimed Successfully\n",
|
|
652
722
|
`**Batch ID**: \`${result.batchId}\``,
|
|
653
723
|
`**Claim Token**: \`${result.claimToken}\``,
|
|
654
724
|
`**Run ID**: ${result.runId}`,
|
|
725
|
+
`**Project**: ${result.projectRoot}`,
|
|
655
726
|
`**Lease Until**: ${new Date(result.leaseUntil).toISOString()}`,
|
|
656
727
|
"",
|
|
657
728
|
"## Requests\n"
|
|
@@ -707,6 +778,98 @@ function formatProvideResult(result) {
|
|
|
707
778
|
}
|
|
708
779
|
return `\u274C Failed to provide mock data: ${result.message}`;
|
|
709
780
|
}
|
|
781
|
+
var DaemonClient = class {
|
|
782
|
+
constructor(ipcPath, token, adapterId) {
|
|
783
|
+
this.ipcPath = ipcPath;
|
|
784
|
+
this.token = token;
|
|
785
|
+
this.adapterId = adapterId;
|
|
786
|
+
}
|
|
787
|
+
// ===========================================================================
|
|
788
|
+
// RPC Methods
|
|
789
|
+
// ===========================================================================
|
|
790
|
+
async getStatus() {
|
|
791
|
+
return this.rpc("getStatus", {});
|
|
792
|
+
}
|
|
793
|
+
async listRuns() {
|
|
794
|
+
return this.rpc("listRuns", {});
|
|
795
|
+
}
|
|
796
|
+
async claimNextBatch(args) {
|
|
797
|
+
return this.rpc("claimNextBatch", {
|
|
798
|
+
adapterId: this.adapterId,
|
|
799
|
+
runId: args.runId,
|
|
800
|
+
leaseMs: args.leaseMs
|
|
801
|
+
});
|
|
802
|
+
}
|
|
803
|
+
async provideBatch(args) {
|
|
804
|
+
return this.rpc("provideBatch", {
|
|
805
|
+
adapterId: this.adapterId,
|
|
806
|
+
batchId: args.batchId,
|
|
807
|
+
claimToken: args.claimToken,
|
|
808
|
+
mocks: args.mocks
|
|
809
|
+
});
|
|
810
|
+
}
|
|
811
|
+
async releaseBatch(args) {
|
|
812
|
+
return this.rpc("releaseBatch", {
|
|
813
|
+
adapterId: this.adapterId,
|
|
814
|
+
batchId: args.batchId,
|
|
815
|
+
claimToken: args.claimToken,
|
|
816
|
+
reason: args.reason
|
|
817
|
+
});
|
|
818
|
+
}
|
|
819
|
+
async getBatch(batchId) {
|
|
820
|
+
return this.rpc("getBatch", { batchId });
|
|
821
|
+
}
|
|
822
|
+
// ===========================================================================
|
|
823
|
+
// Internal
|
|
824
|
+
// ===========================================================================
|
|
825
|
+
rpc(method, params) {
|
|
826
|
+
const payload = {
|
|
827
|
+
jsonrpc: "2.0",
|
|
828
|
+
id: crypto2__default.default.randomUUID(),
|
|
829
|
+
method,
|
|
830
|
+
params
|
|
831
|
+
};
|
|
832
|
+
return new Promise((resolve, reject) => {
|
|
833
|
+
const req = http__default.default.request(
|
|
834
|
+
{
|
|
835
|
+
method: "POST",
|
|
836
|
+
socketPath: this.ipcPath,
|
|
837
|
+
path: "/control",
|
|
838
|
+
headers: {
|
|
839
|
+
"content-type": "application/json",
|
|
840
|
+
"x-mock-mcp-token": this.token
|
|
841
|
+
},
|
|
842
|
+
timeout: 3e4
|
|
843
|
+
},
|
|
844
|
+
(res) => {
|
|
845
|
+
let buf = "";
|
|
846
|
+
res.on("data", (chunk) => buf += chunk);
|
|
847
|
+
res.on("end", () => {
|
|
848
|
+
try {
|
|
849
|
+
const response = JSON.parse(buf);
|
|
850
|
+
if (response.error) {
|
|
851
|
+
reject(new Error(response.error.message));
|
|
852
|
+
} else {
|
|
853
|
+
resolve(response.result);
|
|
854
|
+
}
|
|
855
|
+
} catch (e) {
|
|
856
|
+
reject(e);
|
|
857
|
+
}
|
|
858
|
+
});
|
|
859
|
+
}
|
|
860
|
+
);
|
|
861
|
+
req.on("error", (err) => {
|
|
862
|
+
reject(new Error(`Daemon connection failed: ${err.message}`));
|
|
863
|
+
});
|
|
864
|
+
req.on("timeout", () => {
|
|
865
|
+
req.destroy();
|
|
866
|
+
reject(new Error("Daemon request timeout"));
|
|
867
|
+
});
|
|
868
|
+
req.end(JSON.stringify(payload));
|
|
869
|
+
});
|
|
870
|
+
}
|
|
871
|
+
};
|
|
710
872
|
|
|
711
873
|
exports.DaemonClient = DaemonClient;
|
|
874
|
+
exports.MultiDaemonClient = MultiDaemonClient;
|
|
712
875
|
exports.runAdapter = runAdapter;
|