@mhingston5/conduit 1.1.2 → 1.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +27 -3
- package/dist/assets/deno-shim.ts +93 -0
- package/dist/assets/python-shim.py +21 -0
- package/dist/executors/pyodide.worker.d.ts +2 -0
- package/dist/executors/pyodide.worker.js +163 -0
- package/dist/executors/pyodide.worker.js.map +1 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +3250 -0
- package/dist/index.js.map +1 -0
- package/docs/ARCHITECTURE.md +1 -1
- package/package.json +6 -1
- package/src/auth.cmd.ts +95 -0
- package/src/core/config.service.ts +5 -2
- package/src/core/request.controller.ts +4 -0
- package/src/core/security.service.ts +3 -3
- package/src/gateway/auth.service.ts +19 -18
- package/src/index.ts +52 -8
- package/src/transport/socket.transport.ts +8 -3
- package/tests/auth.service.test.ts +8 -13
- package/tests/config.service.test.ts +31 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,3250 @@
|
|
|
1
|
+
// src/index.ts
|
|
2
|
+
import { Command } from "commander";
|
|
3
|
+
|
|
4
|
+
// src/core/config.service.ts
|
|
5
|
+
import { z } from "zod";
|
|
6
|
+
import dotenv from "dotenv";
|
|
7
|
+
import fs from "fs";
|
|
8
|
+
import path from "path";
|
|
9
|
+
import yaml from "js-yaml";
|
|
10
|
+
var originalWrite = process.stdout.write;
|
|
11
|
+
process.stdout.write = () => true;
|
|
12
|
+
dotenv.config();
|
|
13
|
+
process.stdout.write = originalWrite;
|
|
14
|
+
var ResourceLimitsSchema = z.object({
|
|
15
|
+
timeoutMs: z.number().default(3e4),
|
|
16
|
+
memoryLimitMb: z.number().default(256),
|
|
17
|
+
maxOutputBytes: z.number().default(1024 * 1024),
|
|
18
|
+
// 1MB
|
|
19
|
+
maxLogEntries: z.number().default(1e4)
|
|
20
|
+
});
|
|
21
|
+
var UpstreamCredentialsSchema = z.object({
|
|
22
|
+
type: z.enum(["oauth2", "apiKey", "bearer"]),
|
|
23
|
+
// Align with AuthType
|
|
24
|
+
clientId: z.string().optional(),
|
|
25
|
+
clientSecret: z.string().optional(),
|
|
26
|
+
tokenUrl: z.string().optional(),
|
|
27
|
+
refreshToken: z.string().optional(),
|
|
28
|
+
scopes: z.array(z.string()).optional(),
|
|
29
|
+
apiKey: z.string().optional(),
|
|
30
|
+
bearerToken: z.string().optional(),
|
|
31
|
+
headerName: z.string().optional()
|
|
32
|
+
});
|
|
33
|
+
var HttpUpstreamSchema = z.object({
|
|
34
|
+
id: z.string(),
|
|
35
|
+
type: z.literal("http").optional().default("http"),
|
|
36
|
+
url: z.string(),
|
|
37
|
+
credentials: UpstreamCredentialsSchema.optional()
|
|
38
|
+
});
|
|
39
|
+
var StdioUpstreamSchema = z.object({
|
|
40
|
+
id: z.string(),
|
|
41
|
+
type: z.literal("stdio"),
|
|
42
|
+
command: z.string(),
|
|
43
|
+
args: z.array(z.string()).optional(),
|
|
44
|
+
env: z.record(z.string(), z.string()).optional()
|
|
45
|
+
});
|
|
46
|
+
var UpstreamInfoSchema = z.union([HttpUpstreamSchema, StdioUpstreamSchema]);
|
|
47
|
+
var ConfigSchema = z.object({
|
|
48
|
+
port: z.union([z.string(), z.number()]).default("3000").transform((v) => Number(v)),
|
|
49
|
+
nodeEnv: z.enum(["development", "production", "test"]).default("development"),
|
|
50
|
+
logLevel: z.enum(["debug", "info", "warn", "error"]).default("info"),
|
|
51
|
+
resourceLimits: ResourceLimitsSchema.default({
|
|
52
|
+
timeoutMs: 3e4,
|
|
53
|
+
memoryLimitMb: 256,
|
|
54
|
+
maxOutputBytes: 1024 * 1024,
|
|
55
|
+
maxLogEntries: 1e4
|
|
56
|
+
}),
|
|
57
|
+
secretRedactionPatterns: z.array(z.string()).default([
|
|
58
|
+
"[A-Za-z0-9-_]{20,}"
|
|
59
|
+
// Default pattern from spec
|
|
60
|
+
]),
|
|
61
|
+
ipcBearerToken: z.string().optional().default(() => Math.random().toString(36).substring(7)),
|
|
62
|
+
maxConcurrent: z.number().default(10),
|
|
63
|
+
denoMaxPoolSize: z.number().default(10),
|
|
64
|
+
pyodideMaxPoolSize: z.number().default(3),
|
|
65
|
+
metricsUrl: z.string().default("http://127.0.0.1:9464/metrics"),
|
|
66
|
+
opsPort: z.number().optional(),
|
|
67
|
+
transport: z.enum(["socket", "stdio"]).default("socket"),
|
|
68
|
+
upstreams: z.array(UpstreamInfoSchema).default([])
|
|
69
|
+
});
|
|
70
|
+
var ConfigService = class {
|
|
71
|
+
config;
|
|
72
|
+
constructor(overrides = {}) {
|
|
73
|
+
const fileConfig = this.loadConfigFile();
|
|
74
|
+
const envConfig = {
|
|
75
|
+
port: process.env.PORT,
|
|
76
|
+
nodeEnv: process.env.NODE_ENV,
|
|
77
|
+
logLevel: process.env.LOG_LEVEL,
|
|
78
|
+
metricsUrl: process.env.METRICS_URL,
|
|
79
|
+
ipcBearerToken: process.env.IPC_BEARER_TOKEN,
|
|
80
|
+
transport: process.argv.includes("--stdio") ? "stdio" : void 0
|
|
81
|
+
// upstreams: process.env.UPSTREAMS ? JSON.parse(process.env.UPSTREAMS) : undefined, // Removed per user request
|
|
82
|
+
};
|
|
83
|
+
Object.keys(envConfig).forEach((key) => envConfig[key] === void 0 && delete envConfig[key]);
|
|
84
|
+
const mergedConfig = {
|
|
85
|
+
...fileConfig,
|
|
86
|
+
...envConfig,
|
|
87
|
+
...overrides
|
|
88
|
+
};
|
|
89
|
+
const result = ConfigSchema.safeParse(mergedConfig);
|
|
90
|
+
if (!result.success) {
|
|
91
|
+
const error = result.error.format();
|
|
92
|
+
throw new Error(`Invalid configuration: ${JSON.stringify(error, null, 2)}`);
|
|
93
|
+
}
|
|
94
|
+
this.config = result.data;
|
|
95
|
+
if (this.config.opsPort === void 0) {
|
|
96
|
+
if (this.config.transport === "stdio") {
|
|
97
|
+
this.config.opsPort = 0;
|
|
98
|
+
} else {
|
|
99
|
+
this.config.opsPort = this.config.port === 0 ? 0 : this.config.port + 1;
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
get(key) {
|
|
104
|
+
return this.config[key];
|
|
105
|
+
}
|
|
106
|
+
get all() {
|
|
107
|
+
return { ...this.config };
|
|
108
|
+
}
|
|
109
|
+
loadConfigFile() {
|
|
110
|
+
const configPath = process.env.CONFIG_FILE || (fs.existsSync(path.resolve(process.cwd(), "conduit.yaml")) ? "conduit.yaml" : fs.existsSync(path.resolve(process.cwd(), "conduit.json")) ? "conduit.json" : null);
|
|
111
|
+
if (!configPath) return {};
|
|
112
|
+
try {
|
|
113
|
+
const fullPath = path.resolve(process.cwd(), configPath);
|
|
114
|
+
let fileContent = fs.readFileSync(fullPath, "utf-8");
|
|
115
|
+
fileContent = fileContent.replace(/\$\{([a-zA-Z0-9_]+)(?::-([^}]+))?\}/g, (match, varName, defaultValue) => {
|
|
116
|
+
const value = process.env[varName];
|
|
117
|
+
if (value !== void 0) {
|
|
118
|
+
return value;
|
|
119
|
+
}
|
|
120
|
+
return defaultValue !== void 0 ? defaultValue : "";
|
|
121
|
+
});
|
|
122
|
+
if (configPath.endsWith(".yaml") || configPath.endsWith(".yml")) {
|
|
123
|
+
return yaml.load(fileContent);
|
|
124
|
+
} else {
|
|
125
|
+
return JSON.parse(fileContent);
|
|
126
|
+
}
|
|
127
|
+
} catch (error) {
|
|
128
|
+
console.warn(`Failed to load config file ${configPath}:`, error);
|
|
129
|
+
return {};
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
};
|
|
133
|
+
|
|
134
|
+
// src/core/logger.ts
|
|
135
|
+
import pino from "pino";
|
|
136
|
+
import { AsyncLocalStorage } from "async_hooks";
|
|
137
|
+
var loggerStorage = new AsyncLocalStorage();
|
|
138
|
+
function createLogger(configService) {
|
|
139
|
+
const logLevel = configService.get("logLevel");
|
|
140
|
+
const redactionPatterns = configService.get("secretRedactionPatterns");
|
|
141
|
+
const secretPatterns = redactionPatterns.map((p) => new RegExp(p, "g"));
|
|
142
|
+
const redactString = (str) => {
|
|
143
|
+
let result = str;
|
|
144
|
+
for (const pattern of secretPatterns) {
|
|
145
|
+
result = result.replace(pattern, "[REDACTED]");
|
|
146
|
+
}
|
|
147
|
+
return result;
|
|
148
|
+
};
|
|
149
|
+
return pino(
|
|
150
|
+
{
|
|
151
|
+
level: logLevel,
|
|
152
|
+
hooks: {
|
|
153
|
+
logMethod(inputArgs, method) {
|
|
154
|
+
const redactedArgs = inputArgs.map((arg) => {
|
|
155
|
+
try {
|
|
156
|
+
if (typeof arg === "string") {
|
|
157
|
+
return redactString(arg);
|
|
158
|
+
}
|
|
159
|
+
if (typeof arg === "object" && arg !== null) {
|
|
160
|
+
const clone = { ...arg };
|
|
161
|
+
for (const key in clone) {
|
|
162
|
+
if (typeof clone[key] === "string") {
|
|
163
|
+
clone[key] = redactString(clone[key]);
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
return clone;
|
|
167
|
+
}
|
|
168
|
+
} catch (err) {
|
|
169
|
+
return "[REDACTION_ERROR]";
|
|
170
|
+
}
|
|
171
|
+
return arg;
|
|
172
|
+
});
|
|
173
|
+
return method.apply(this, redactedArgs);
|
|
174
|
+
}
|
|
175
|
+
},
|
|
176
|
+
redact: {
|
|
177
|
+
paths: ["toolParams.*", "headers.Authorization", "headers.authorization", "params.token"],
|
|
178
|
+
censor: "[REDACTED]"
|
|
179
|
+
},
|
|
180
|
+
mixin() {
|
|
181
|
+
const store = loggerStorage.getStore();
|
|
182
|
+
return {
|
|
183
|
+
correlationId: store?.correlationId
|
|
184
|
+
};
|
|
185
|
+
},
|
|
186
|
+
// In stdio mode, never use pino-pretty to avoid stdout pollution
|
|
187
|
+
transport: configService.get("transport") !== "stdio" && configService.get("nodeEnv") === "development" ? { target: "pino-pretty", options: { colorize: true } } : void 0
|
|
188
|
+
},
|
|
189
|
+
configService.get("transport") === "stdio" ? pino.destination(2) : void 0
|
|
190
|
+
);
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
// src/transport/socket.transport.ts
|
|
194
|
+
import net from "net";
|
|
195
|
+
import os from "os";
|
|
196
|
+
import path2 from "path";
|
|
197
|
+
|
|
198
|
+
// src/core/execution.context.ts
|
|
199
|
+
import { v4 as uuidv4 } from "uuid";
|
|
200
|
+
var ExecutionContext = class {
|
|
201
|
+
correlationId;
|
|
202
|
+
startTime;
|
|
203
|
+
tenantId;
|
|
204
|
+
logger;
|
|
205
|
+
allowedTools;
|
|
206
|
+
remoteAddress;
|
|
207
|
+
strictValidation;
|
|
208
|
+
constructor(options) {
|
|
209
|
+
this.correlationId = uuidv4();
|
|
210
|
+
this.startTime = Date.now();
|
|
211
|
+
this.tenantId = options.tenantId;
|
|
212
|
+
this.allowedTools = options.allowedTools;
|
|
213
|
+
this.remoteAddress = options.remoteAddress;
|
|
214
|
+
this.strictValidation = options.strictValidation ?? false;
|
|
215
|
+
this.logger = options.logger.child({
|
|
216
|
+
correlationId: this.correlationId,
|
|
217
|
+
tenantId: this.tenantId
|
|
218
|
+
});
|
|
219
|
+
}
|
|
220
|
+
getDuration() {
|
|
221
|
+
return Date.now() - this.startTime;
|
|
222
|
+
}
|
|
223
|
+
};
|
|
224
|
+
|
|
225
|
+
// src/transport/socket.transport.ts
|
|
226
|
+
var SocketTransport = class {
|
|
227
|
+
server;
|
|
228
|
+
logger;
|
|
229
|
+
requestController;
|
|
230
|
+
concurrencyService;
|
|
231
|
+
constructor(logger, requestController, concurrencyService) {
|
|
232
|
+
this.logger = logger;
|
|
233
|
+
this.requestController = requestController;
|
|
234
|
+
this.concurrencyService = concurrencyService;
|
|
235
|
+
this.server = net.createServer((socket) => {
|
|
236
|
+
this.handleConnection(socket);
|
|
237
|
+
});
|
|
238
|
+
this.server.on("error", (err) => {
|
|
239
|
+
this.logger.error({ err }, "Server error");
|
|
240
|
+
});
|
|
241
|
+
}
|
|
242
|
+
async listen(options) {
|
|
243
|
+
return new Promise((resolve, reject) => {
|
|
244
|
+
if (options.path) {
|
|
245
|
+
const socketPath = this.formatSocketPath(options.path);
|
|
246
|
+
this.logger.info({ socketPath }, "Binding to IPC socket");
|
|
247
|
+
if (os.platform() !== "win32" && path2.isAbsolute(socketPath)) {
|
|
248
|
+
}
|
|
249
|
+
this.server.listen(socketPath, () => {
|
|
250
|
+
this.resolveAddress(resolve);
|
|
251
|
+
});
|
|
252
|
+
} else if (options.port !== void 0) {
|
|
253
|
+
this.logger.info({ port: options.port, host: options.host }, "Binding to TCP port");
|
|
254
|
+
this.server.listen(options.port, options.host || "127.0.0.1", () => {
|
|
255
|
+
this.resolveAddress(resolve);
|
|
256
|
+
});
|
|
257
|
+
} else {
|
|
258
|
+
reject(new Error("Invalid transport configuration: neither path nor port provided"));
|
|
259
|
+
return;
|
|
260
|
+
}
|
|
261
|
+
this.server.on("error", reject);
|
|
262
|
+
});
|
|
263
|
+
}
|
|
264
|
+
resolveAddress(resolve) {
|
|
265
|
+
const address = this.server.address();
|
|
266
|
+
const addressStr = typeof address === "string" ? address : `${address?.address}:${address?.port}`;
|
|
267
|
+
this.logger.info({ address: addressStr }, "Transport server listening");
|
|
268
|
+
resolve(addressStr);
|
|
269
|
+
}
|
|
270
|
+
formatSocketPath(inputPath) {
|
|
271
|
+
if (os.platform() === "win32") {
|
|
272
|
+
if (!inputPath.startsWith("\\\\.\\pipe\\")) {
|
|
273
|
+
return `\\\\.\\pipe\\${inputPath}`;
|
|
274
|
+
}
|
|
275
|
+
return inputPath;
|
|
276
|
+
} else {
|
|
277
|
+
return path2.isAbsolute(inputPath) ? inputPath : path2.join(os.tmpdir(), inputPath);
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
handleConnection(socket) {
|
|
281
|
+
const remoteAddress = socket.remoteAddress || "pipe";
|
|
282
|
+
this.logger.debug({ remoteAddress }, "New connection established");
|
|
283
|
+
socket.setEncoding("utf8");
|
|
284
|
+
let buffer = "";
|
|
285
|
+
const MAX_BUFFER_SIZE = 10 * 1024 * 1024;
|
|
286
|
+
socket.on("data", async (chunk) => {
|
|
287
|
+
buffer += chunk;
|
|
288
|
+
if (buffer.length > MAX_BUFFER_SIZE) {
|
|
289
|
+
this.logger.error({ remoteAddress }, "Connection exceeded max buffer size, closing");
|
|
290
|
+
socket.destroy();
|
|
291
|
+
return;
|
|
292
|
+
}
|
|
293
|
+
socket.pause();
|
|
294
|
+
try {
|
|
295
|
+
let pos;
|
|
296
|
+
while ((pos = buffer.indexOf("\n")) >= 0) {
|
|
297
|
+
const line = buffer.substring(0, pos).trim();
|
|
298
|
+
buffer = buffer.substring(pos + 1);
|
|
299
|
+
if (!line) continue;
|
|
300
|
+
let request;
|
|
301
|
+
try {
|
|
302
|
+
request = JSON.parse(line);
|
|
303
|
+
} catch (err) {
|
|
304
|
+
this.logger.error({ err, line }, "Failed to parse JSON-RPC request");
|
|
305
|
+
const errorResponse = {
|
|
306
|
+
jsonrpc: "2.0",
|
|
307
|
+
id: null,
|
|
308
|
+
error: {
|
|
309
|
+
code: -32700,
|
|
310
|
+
message: "Parse error"
|
|
311
|
+
}
|
|
312
|
+
};
|
|
313
|
+
socket.write(JSON.stringify(errorResponse) + "\n");
|
|
314
|
+
continue;
|
|
315
|
+
}
|
|
316
|
+
const context = new ExecutionContext({
|
|
317
|
+
logger: this.logger,
|
|
318
|
+
remoteAddress
|
|
319
|
+
});
|
|
320
|
+
await loggerStorage.run({ correlationId: context.correlationId }, async () => {
|
|
321
|
+
try {
|
|
322
|
+
const response = await this.concurrencyService.run(
|
|
323
|
+
() => this.requestController.handleRequest(request, context)
|
|
324
|
+
);
|
|
325
|
+
socket.write(JSON.stringify(response) + "\n");
|
|
326
|
+
} catch (err) {
|
|
327
|
+
if (err.name === "QueueFullError") {
|
|
328
|
+
socket.write(JSON.stringify({
|
|
329
|
+
jsonrpc: "2.0",
|
|
330
|
+
id: request.id,
|
|
331
|
+
error: {
|
|
332
|
+
code: -32e3 /* ServerBusy */,
|
|
333
|
+
message: "Server busy"
|
|
334
|
+
}
|
|
335
|
+
}) + "\n");
|
|
336
|
+
} else {
|
|
337
|
+
this.logger.error({ err, requestId: request.id }, "Request handling failed");
|
|
338
|
+
socket.write(JSON.stringify({
|
|
339
|
+
jsonrpc: "2.0",
|
|
340
|
+
id: request.id,
|
|
341
|
+
error: {
|
|
342
|
+
code: -32603 /* InternalError */,
|
|
343
|
+
message: "Internal server error"
|
|
344
|
+
}
|
|
345
|
+
}) + "\n");
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
});
|
|
349
|
+
}
|
|
350
|
+
} catch (err) {
|
|
351
|
+
this.logger.error({ err }, "Unexpected error in socket data handler");
|
|
352
|
+
socket.destroy();
|
|
353
|
+
} finally {
|
|
354
|
+
socket.resume();
|
|
355
|
+
}
|
|
356
|
+
});
|
|
357
|
+
socket.on("close", () => {
|
|
358
|
+
this.logger.debug({ remoteAddress }, "Connection closed");
|
|
359
|
+
});
|
|
360
|
+
socket.on("error", (err) => {
|
|
361
|
+
this.logger.error({ err, remoteAddress }, "Socket error");
|
|
362
|
+
});
|
|
363
|
+
}
|
|
364
|
+
async close() {
|
|
365
|
+
return new Promise((resolve) => {
|
|
366
|
+
if (this.server.listening) {
|
|
367
|
+
this.server.close(() => {
|
|
368
|
+
this.logger.info("Transport server closed");
|
|
369
|
+
resolve();
|
|
370
|
+
});
|
|
371
|
+
} else {
|
|
372
|
+
resolve();
|
|
373
|
+
}
|
|
374
|
+
});
|
|
375
|
+
}
|
|
376
|
+
};
|
|
377
|
+
|
|
378
|
+
// src/transport/stdio.transport.ts
|
|
379
|
+
var StdioTransport = class {
|
|
380
|
+
logger;
|
|
381
|
+
requestController;
|
|
382
|
+
concurrencyService;
|
|
383
|
+
buffer = "";
|
|
384
|
+
constructor(logger, requestController, concurrencyService) {
|
|
385
|
+
this.logger = logger;
|
|
386
|
+
this.requestController = requestController;
|
|
387
|
+
this.concurrencyService = concurrencyService;
|
|
388
|
+
}
|
|
389
|
+
async start() {
|
|
390
|
+
this.logger.info("Starting Stdio transport");
|
|
391
|
+
process.stdin.setEncoding("utf8");
|
|
392
|
+
process.stdin.on("data", this.handleData.bind(this));
|
|
393
|
+
process.stdin.on("end", () => {
|
|
394
|
+
this.logger.info("Stdin closed");
|
|
395
|
+
});
|
|
396
|
+
}
|
|
397
|
+
handleData(chunk) {
|
|
398
|
+
this.buffer += chunk;
|
|
399
|
+
let pos;
|
|
400
|
+
while ((pos = this.buffer.indexOf("\n")) >= 0) {
|
|
401
|
+
const line = this.buffer.substring(0, pos).trim();
|
|
402
|
+
this.buffer = this.buffer.substring(pos + 1);
|
|
403
|
+
if (!line) continue;
|
|
404
|
+
this.processLine(line);
|
|
405
|
+
}
|
|
406
|
+
}
|
|
407
|
+
async processLine(line) {
|
|
408
|
+
let request;
|
|
409
|
+
try {
|
|
410
|
+
request = JSON.parse(line);
|
|
411
|
+
} catch (err) {
|
|
412
|
+
this.logger.error({ err, line }, "Failed to parse JSON-RPC request");
|
|
413
|
+
const errorResponse = {
|
|
414
|
+
jsonrpc: "2.0",
|
|
415
|
+
id: null,
|
|
416
|
+
error: {
|
|
417
|
+
code: -32700,
|
|
418
|
+
message: "Parse error"
|
|
419
|
+
}
|
|
420
|
+
};
|
|
421
|
+
this.sendResponse(errorResponse);
|
|
422
|
+
return;
|
|
423
|
+
}
|
|
424
|
+
const context = new ExecutionContext({
|
|
425
|
+
logger: this.logger,
|
|
426
|
+
remoteAddress: "stdio"
|
|
427
|
+
});
|
|
428
|
+
await loggerStorage.run({ correlationId: context.correlationId }, async () => {
|
|
429
|
+
try {
|
|
430
|
+
const response = await this.concurrencyService.run(
|
|
431
|
+
() => this.requestController.handleRequest(request, context)
|
|
432
|
+
);
|
|
433
|
+
if (response !== null) {
|
|
434
|
+
this.sendResponse(response);
|
|
435
|
+
}
|
|
436
|
+
} catch (err) {
|
|
437
|
+
if (err.name === "QueueFullError") {
|
|
438
|
+
this.sendResponse({
|
|
439
|
+
jsonrpc: "2.0",
|
|
440
|
+
id: request.id,
|
|
441
|
+
error: {
|
|
442
|
+
code: -32e3 /* ServerBusy */,
|
|
443
|
+
message: "Server busy"
|
|
444
|
+
}
|
|
445
|
+
});
|
|
446
|
+
} else {
|
|
447
|
+
this.logger.error({ err, requestId: request.id }, "Request handling failed");
|
|
448
|
+
this.sendResponse({
|
|
449
|
+
jsonrpc: "2.0",
|
|
450
|
+
id: request.id,
|
|
451
|
+
error: {
|
|
452
|
+
code: -32603 /* InternalError */,
|
|
453
|
+
message: "Internal server error"
|
|
454
|
+
}
|
|
455
|
+
});
|
|
456
|
+
}
|
|
457
|
+
}
|
|
458
|
+
});
|
|
459
|
+
}
|
|
460
|
+
sendResponse(response) {
|
|
461
|
+
process.stdout.write(JSON.stringify(response) + "\n");
|
|
462
|
+
}
|
|
463
|
+
async close() {
|
|
464
|
+
process.stdin.removeAllListeners();
|
|
465
|
+
return Promise.resolve();
|
|
466
|
+
}
|
|
467
|
+
};
|
|
468
|
+
|
|
469
|
+
// src/core/ops.server.ts
|
|
470
|
+
import Fastify from "fastify";
|
|
471
|
+
import axios from "axios";
|
|
472
|
+
var OpsServer = class {
|
|
473
|
+
fastify = Fastify();
|
|
474
|
+
logger;
|
|
475
|
+
config;
|
|
476
|
+
gatewayService;
|
|
477
|
+
requestController;
|
|
478
|
+
constructor(logger, config, gatewayService, requestController) {
|
|
479
|
+
this.logger = logger;
|
|
480
|
+
this.config = config;
|
|
481
|
+
this.gatewayService = gatewayService;
|
|
482
|
+
this.requestController = requestController;
|
|
483
|
+
this.setupRoutes();
|
|
484
|
+
}
|
|
485
|
+
setupRoutes() {
|
|
486
|
+
this.fastify.get("/health", async (request, reply) => {
|
|
487
|
+
const gatewayHealth = await this.gatewayService.healthCheck();
|
|
488
|
+
const requestHealth = await this.requestController.healthCheck();
|
|
489
|
+
const overallStatus = gatewayHealth.status === "ok" && requestHealth.status === "ok" ? "ok" : "error";
|
|
490
|
+
return reply.status(overallStatus === "ok" ? 200 : 503).send({
|
|
491
|
+
status: overallStatus,
|
|
492
|
+
version: "1.0.0",
|
|
493
|
+
gateway: gatewayHealth,
|
|
494
|
+
request: requestHealth
|
|
495
|
+
});
|
|
496
|
+
});
|
|
497
|
+
this.fastify.get("/metrics", async (request, reply) => {
|
|
498
|
+
try {
|
|
499
|
+
const metricsUrl = this.config.metricsUrl || "http://127.0.0.1:9464/metrics";
|
|
500
|
+
const response = await axios.get(metricsUrl);
|
|
501
|
+
return reply.type("text/plain").send(response.data);
|
|
502
|
+
} catch (err) {
|
|
503
|
+
this.logger.error({ err }, "Failed to fetch OTEL metrics");
|
|
504
|
+
const fallback = `# Metrics consolidated into OpenTelemetry. Check port 9464.
|
|
505
|
+
conduit_uptime_seconds ${process.uptime()}
|
|
506
|
+
conduit_memory_rss_bytes ${process.memoryUsage().rss}
|
|
507
|
+
`;
|
|
508
|
+
return reply.type("text/plain").send(fallback);
|
|
509
|
+
}
|
|
510
|
+
});
|
|
511
|
+
}
|
|
512
|
+
async listen() {
|
|
513
|
+
const port = this.config.opsPort !== void 0 ? this.config.opsPort : 3001;
|
|
514
|
+
try {
|
|
515
|
+
const address = await this.fastify.listen({ port, host: "0.0.0.0" });
|
|
516
|
+
this.logger.info({ address }, "Ops server listening");
|
|
517
|
+
return address;
|
|
518
|
+
} catch (err) {
|
|
519
|
+
this.logger.error({ err }, "Failed to start Ops server");
|
|
520
|
+
throw err;
|
|
521
|
+
}
|
|
522
|
+
}
|
|
523
|
+
async close() {
|
|
524
|
+
await this.fastify.close();
|
|
525
|
+
}
|
|
526
|
+
};
|
|
527
|
+
|
|
528
|
+
// src/core/concurrency.service.ts
|
|
529
|
+
import pLimit from "p-limit";
|
|
530
|
+
import { trace } from "@opentelemetry/api";
|
|
531
|
+
|
|
532
|
+
// src/core/metrics.service.ts
|
|
533
|
+
import { metrics as otelMetrics, ValueType } from "@opentelemetry/api";
|
|
534
|
+
var MetricsService = class _MetricsService {
|
|
535
|
+
static instance;
|
|
536
|
+
meter = otelMetrics.getMeter("conduit");
|
|
537
|
+
executionCounter;
|
|
538
|
+
cacheHitsCounter;
|
|
539
|
+
cacheMissesCounter;
|
|
540
|
+
executionLatency;
|
|
541
|
+
toolExecutionDuration;
|
|
542
|
+
requestQueueLength;
|
|
543
|
+
activeExecutionsGauge;
|
|
544
|
+
activeExecutionsCount = 0;
|
|
545
|
+
queueLengthCallback = () => 0;
|
|
546
|
+
constructor() {
|
|
547
|
+
this.executionCounter = this.meter.createCounter("conduit.executions.total", {
|
|
548
|
+
description: "Total number of executions"
|
|
549
|
+
});
|
|
550
|
+
this.cacheHitsCounter = this.meter.createCounter("conduit.cache.hits.total", {
|
|
551
|
+
description: "Total number of schema cache hits"
|
|
552
|
+
});
|
|
553
|
+
this.cacheMissesCounter = this.meter.createCounter("conduit.cache.misses.total", {
|
|
554
|
+
description: "Total number of schema cache misses"
|
|
555
|
+
});
|
|
556
|
+
this.executionLatency = this.meter.createHistogram("conduit.executions.latency", {
|
|
557
|
+
description: "Execution latency in milliseconds",
|
|
558
|
+
unit: "ms",
|
|
559
|
+
valueType: ValueType.DOUBLE
|
|
560
|
+
});
|
|
561
|
+
this.toolExecutionDuration = this.meter.createHistogram("conduit.tool.execution_duration_seconds", {
|
|
562
|
+
description: "Duration of tool executions",
|
|
563
|
+
unit: "s",
|
|
564
|
+
valueType: ValueType.DOUBLE
|
|
565
|
+
});
|
|
566
|
+
this.requestQueueLength = this.meter.createObservableGauge("conduit.request_queue_length", {
|
|
567
|
+
description: "Current request queue depth",
|
|
568
|
+
valueType: ValueType.INT
|
|
569
|
+
});
|
|
570
|
+
this.activeExecutionsGauge = this.meter.createObservableGauge("conduit.executions.active", {
|
|
571
|
+
description: "Current number of active executions"
|
|
572
|
+
});
|
|
573
|
+
this.activeExecutionsGauge.addCallback((result) => {
|
|
574
|
+
result.observe(this.activeExecutionsCount);
|
|
575
|
+
});
|
|
576
|
+
this.requestQueueLength.addCallback((result) => {
|
|
577
|
+
result.observe(this.queueLengthCallback());
|
|
578
|
+
});
|
|
579
|
+
}
|
|
580
|
+
static getInstance() {
|
|
581
|
+
if (!_MetricsService.instance) {
|
|
582
|
+
_MetricsService.instance = new _MetricsService();
|
|
583
|
+
}
|
|
584
|
+
return _MetricsService.instance;
|
|
585
|
+
}
|
|
586
|
+
recordExecutionStart() {
|
|
587
|
+
this.activeExecutionsCount++;
|
|
588
|
+
this.executionCounter.add(1);
|
|
589
|
+
}
|
|
590
|
+
recordExecutionEnd(durationMs, toolName) {
|
|
591
|
+
this.activeExecutionsCount = Math.max(0, this.activeExecutionsCount - 1);
|
|
592
|
+
this.executionLatency.record(durationMs, { tool: toolName || "unknown" });
|
|
593
|
+
}
|
|
594
|
+
recordToolExecution(durationMs, toolName, success) {
|
|
595
|
+
this.toolExecutionDuration.record(durationMs / 1e3, {
|
|
596
|
+
tool_name: toolName,
|
|
597
|
+
success: String(success)
|
|
598
|
+
});
|
|
599
|
+
}
|
|
600
|
+
recordCacheHit() {
|
|
601
|
+
this.cacheHitsCounter.add(1);
|
|
602
|
+
}
|
|
603
|
+
recordCacheMiss() {
|
|
604
|
+
this.cacheMissesCounter.add(1);
|
|
605
|
+
}
|
|
606
|
+
// This is now handled by OTEL Prometheus exporter,
|
|
607
|
+
// but we can provide a way to get the endpoint data if needed.
|
|
608
|
+
getMetrics() {
|
|
609
|
+
return {
|
|
610
|
+
activeExecutions: this.activeExecutionsCount,
|
|
611
|
+
uptime: process.uptime(),
|
|
612
|
+
memory: process.memoryUsage()
|
|
613
|
+
};
|
|
614
|
+
}
|
|
615
|
+
registerQueueLengthProvider(provider) {
|
|
616
|
+
this.queueLengthCallback = provider;
|
|
617
|
+
}
|
|
618
|
+
};
|
|
619
|
+
var metrics = MetricsService.getInstance();
|
|
620
|
+
|
|
621
|
+
// src/core/concurrency.service.ts
|
|
622
|
+
var QueueFullError = class extends Error {
|
|
623
|
+
constructor(message) {
|
|
624
|
+
super(message);
|
|
625
|
+
this.name = "QueueFullError";
|
|
626
|
+
}
|
|
627
|
+
};
|
|
628
|
+
var ConcurrencyService = class {
|
|
629
|
+
limit;
|
|
630
|
+
logger;
|
|
631
|
+
maxQueueSize;
|
|
632
|
+
queueDepthHistogram;
|
|
633
|
+
// Using explicit type locally if needed, or rely on metrics service later. Using direct OTEL for now involves refactor.
|
|
634
|
+
// Let's rely on internal state for rejection and let MetricsService handle reporting if possible, or add it here.
|
|
635
|
+
// simpler: usage of metrics service is better pattern.
|
|
636
|
+
constructor(logger, options) {
|
|
637
|
+
this.logger = logger;
|
|
638
|
+
this.limit = pLimit(options.maxConcurrent);
|
|
639
|
+
this.maxQueueSize = options.maxQueueSize || 100;
|
|
640
|
+
metrics.registerQueueLengthProvider(() => this.limit.pendingCount);
|
|
641
|
+
}
|
|
642
|
+
async run(fn) {
|
|
643
|
+
if (this.limit.pendingCount >= this.maxQueueSize) {
|
|
644
|
+
this.logger.warn({ pending: this.limit.pendingCount, max: this.maxQueueSize }, "Request queue full, rejecting request");
|
|
645
|
+
throw new QueueFullError("Server is too busy, please try again later");
|
|
646
|
+
}
|
|
647
|
+
const active = this.limit.activeCount;
|
|
648
|
+
const pending = this.limit.pendingCount;
|
|
649
|
+
this.logger.debug({ active, pending }, "Concurrency status before task");
|
|
650
|
+
const span = trace.getActiveSpan();
|
|
651
|
+
if (span) {
|
|
652
|
+
span.setAttributes({
|
|
653
|
+
"concurrency.active": active,
|
|
654
|
+
"concurrency.pending": pending
|
|
655
|
+
});
|
|
656
|
+
}
|
|
657
|
+
try {
|
|
658
|
+
return await this.limit(fn);
|
|
659
|
+
} finally {
|
|
660
|
+
this.logger.debug({
|
|
661
|
+
active: this.limit.activeCount,
|
|
662
|
+
pending: this.limit.pendingCount
|
|
663
|
+
}, "Concurrency status after task");
|
|
664
|
+
}
|
|
665
|
+
}
|
|
666
|
+
get stats() {
|
|
667
|
+
return {
|
|
668
|
+
activeCount: this.limit.activeCount,
|
|
669
|
+
pendingCount: this.limit.pendingCount
|
|
670
|
+
};
|
|
671
|
+
}
|
|
672
|
+
};
|
|
673
|
+
|
|
674
|
+
// src/core/request.controller.ts
|
|
675
|
+
var RequestController = class {
|
|
676
|
+
logger;
|
|
677
|
+
executionService;
|
|
678
|
+
gatewayService;
|
|
679
|
+
middlewares = [];
|
|
680
|
+
constructor(logger, executionService, gatewayService, middlewares = []) {
|
|
681
|
+
this.logger = logger;
|
|
682
|
+
this.executionService = executionService;
|
|
683
|
+
this.gatewayService = gatewayService;
|
|
684
|
+
this.middlewares = middlewares;
|
|
685
|
+
}
|
|
686
|
+
use(middleware) {
|
|
687
|
+
this.middlewares.push(middleware);
|
|
688
|
+
}
|
|
689
|
+
async handleRequest(request, context) {
|
|
690
|
+
return this.executePipeline(request, context);
|
|
691
|
+
}
|
|
692
|
+
async executePipeline(request, context) {
|
|
693
|
+
let index = -1;
|
|
694
|
+
const dispatch = async (i) => {
|
|
695
|
+
if (i <= index) throw new Error("next() called multiple times");
|
|
696
|
+
index = i;
|
|
697
|
+
const middleware = this.middlewares[i];
|
|
698
|
+
if (middleware) {
|
|
699
|
+
return middleware.handle(request, context, () => dispatch(i + 1));
|
|
700
|
+
}
|
|
701
|
+
return this.finalHandler(request, context);
|
|
702
|
+
};
|
|
703
|
+
return dispatch(0);
|
|
704
|
+
}
|
|
705
|
+
async handleValidateTool(request, context) {
|
|
706
|
+
const params = request.params;
|
|
707
|
+
if (!params || !params.toolName || !params.args) {
|
|
708
|
+
return {
|
|
709
|
+
jsonrpc: "2.0",
|
|
710
|
+
id: request.id,
|
|
711
|
+
error: {
|
|
712
|
+
code: -32602,
|
|
713
|
+
message: "Missing toolName or args params"
|
|
714
|
+
}
|
|
715
|
+
};
|
|
716
|
+
}
|
|
717
|
+
try {
|
|
718
|
+
const result = await this.gatewayService.validateTool(params.toolName, params.args, context);
|
|
719
|
+
return {
|
|
720
|
+
jsonrpc: "2.0",
|
|
721
|
+
id: request.id,
|
|
722
|
+
result
|
|
723
|
+
};
|
|
724
|
+
} catch (error) {
|
|
725
|
+
return {
|
|
726
|
+
jsonrpc: "2.0",
|
|
727
|
+
id: request.id,
|
|
728
|
+
error: {
|
|
729
|
+
code: -32603,
|
|
730
|
+
message: error.message || "Validation failed"
|
|
731
|
+
}
|
|
732
|
+
};
|
|
733
|
+
}
|
|
734
|
+
}
|
|
735
|
+
async finalHandler(request, context) {
|
|
736
|
+
const { method, params, id } = request;
|
|
737
|
+
switch (method) {
|
|
738
|
+
case "tools/list":
|
|
739
|
+
// Standard MCP method name
|
|
740
|
+
case "mcp_discover_tools":
|
|
741
|
+
return this.handleDiscoverTools(params, context, id);
|
|
742
|
+
case "mcp_list_tool_packages":
|
|
743
|
+
return this.handleListToolPackages(params, context, id);
|
|
744
|
+
case "mcp_list_tool_stubs":
|
|
745
|
+
return this.handleListToolStubs(params, context, id);
|
|
746
|
+
case "mcp_read_tool_schema":
|
|
747
|
+
return this.handleReadToolSchema(params, context, id);
|
|
748
|
+
case "mcp_validate_tool":
|
|
749
|
+
return this.handleValidateTool(request, context);
|
|
750
|
+
case "mcp_call_tool":
|
|
751
|
+
case "tools/call":
|
|
752
|
+
return this.handleCallTool(params, context, id);
|
|
753
|
+
case "mcp_execute_typescript":
|
|
754
|
+
return this.handleExecuteTypeScript(params, context, id);
|
|
755
|
+
case "mcp_execute_python":
|
|
756
|
+
return this.handleExecutePython(params, context, id);
|
|
757
|
+
case "mcp_execute_isolate":
|
|
758
|
+
return this.handleExecuteIsolate(params, context, id);
|
|
759
|
+
case "initialize":
|
|
760
|
+
return this.handleInitialize(params, context, id);
|
|
761
|
+
case "notifications/initialized":
|
|
762
|
+
return null;
|
|
763
|
+
// Notifications don't get responses per MCP spec
|
|
764
|
+
case "ping":
|
|
765
|
+
return { jsonrpc: "2.0", id, result: {} };
|
|
766
|
+
default:
|
|
767
|
+
return this.errorResponse(id, -32601, `Method not found: ${method}`);
|
|
768
|
+
}
|
|
769
|
+
}
|
|
770
|
+
async handleDiscoverTools(params, context, id) {
|
|
771
|
+
const tools = await this.gatewayService.discoverTools(context);
|
|
772
|
+
const standardizedTools = tools.map((t) => ({
|
|
773
|
+
name: t.name,
|
|
774
|
+
description: t.description,
|
|
775
|
+
inputSchema: t.inputSchema
|
|
776
|
+
}));
|
|
777
|
+
return {
|
|
778
|
+
jsonrpc: "2.0",
|
|
779
|
+
id,
|
|
780
|
+
result: {
|
|
781
|
+
tools: standardizedTools
|
|
782
|
+
}
|
|
783
|
+
};
|
|
784
|
+
}
|
|
785
|
+
async handleListToolPackages(params, context, id) {
|
|
786
|
+
const packages = await this.gatewayService.listToolPackages();
|
|
787
|
+
return {
|
|
788
|
+
jsonrpc: "2.0",
|
|
789
|
+
id,
|
|
790
|
+
result: {
|
|
791
|
+
packages
|
|
792
|
+
}
|
|
793
|
+
};
|
|
794
|
+
}
|
|
795
|
+
async handleListToolStubs(params, context, id) {
|
|
796
|
+
const { packageId } = params;
|
|
797
|
+
if (!packageId) {
|
|
798
|
+
return this.errorResponse(id, -32602, "Missing packageId parameter");
|
|
799
|
+
}
|
|
800
|
+
try {
|
|
801
|
+
const stubs = await this.gatewayService.listToolStubs(packageId, context);
|
|
802
|
+
return {
|
|
803
|
+
jsonrpc: "2.0",
|
|
804
|
+
id,
|
|
805
|
+
result: {
|
|
806
|
+
stubs
|
|
807
|
+
}
|
|
808
|
+
};
|
|
809
|
+
} catch (error) {
|
|
810
|
+
return this.errorResponse(id, -32001, error.message);
|
|
811
|
+
}
|
|
812
|
+
}
|
|
813
|
+
async handleReadToolSchema(params, context, id) {
|
|
814
|
+
const { toolId } = params;
|
|
815
|
+
if (!toolId) {
|
|
816
|
+
return this.errorResponse(id, -32602, "Missing toolId parameter");
|
|
817
|
+
}
|
|
818
|
+
try {
|
|
819
|
+
const schema = await this.gatewayService.getToolSchema(toolId, context);
|
|
820
|
+
if (!schema) {
|
|
821
|
+
return this.errorResponse(id, -32001, `Tool not found: ${toolId}`);
|
|
822
|
+
}
|
|
823
|
+
return {
|
|
824
|
+
jsonrpc: "2.0",
|
|
825
|
+
id,
|
|
826
|
+
result: {
|
|
827
|
+
schema
|
|
828
|
+
}
|
|
829
|
+
};
|
|
830
|
+
} catch (error) {
|
|
831
|
+
return this.errorResponse(id, -32003, error.message);
|
|
832
|
+
}
|
|
833
|
+
}
|
|
834
|
+
async handleCallTool(params, context, id) {
|
|
835
|
+
const { name, arguments: toolArgs } = params;
|
|
836
|
+
switch (name) {
|
|
837
|
+
case "mcp_execute_typescript":
|
|
838
|
+
return this.handleExecuteTypeScript(toolArgs, context, id);
|
|
839
|
+
case "mcp_execute_python":
|
|
840
|
+
return this.handleExecutePython(toolArgs, context, id);
|
|
841
|
+
case "mcp_execute_isolate":
|
|
842
|
+
return this.handleExecuteIsolate(toolArgs, context, id);
|
|
843
|
+
}
|
|
844
|
+
const response = await this.gatewayService.callTool(name, toolArgs, context);
|
|
845
|
+
return { ...response, id };
|
|
846
|
+
}
|
|
847
|
+
async handleExecuteTypeScript(params, context, id) {
|
|
848
|
+
const { code, limits, allowedTools } = params;
|
|
849
|
+
if (Array.isArray(allowedTools)) {
|
|
850
|
+
context.allowedTools = allowedTools;
|
|
851
|
+
}
|
|
852
|
+
const result = await this.executionService.executeTypeScript(code, limits, context, allowedTools);
|
|
853
|
+
if (result.error) {
|
|
854
|
+
return this.errorResponse(id, result.error.code, result.error.message);
|
|
855
|
+
}
|
|
856
|
+
return {
|
|
857
|
+
jsonrpc: "2.0",
|
|
858
|
+
id,
|
|
859
|
+
result: {
|
|
860
|
+
stdout: result.stdout,
|
|
861
|
+
stderr: result.stderr,
|
|
862
|
+
exitCode: result.exitCode
|
|
863
|
+
}
|
|
864
|
+
};
|
|
865
|
+
}
|
|
866
|
+
async handleExecutePython(params, context, id) {
|
|
867
|
+
const { code, limits, allowedTools } = params;
|
|
868
|
+
if (Array.isArray(allowedTools)) {
|
|
869
|
+
context.allowedTools = allowedTools;
|
|
870
|
+
}
|
|
871
|
+
const result = await this.executionService.executePython(code, limits, context, allowedTools);
|
|
872
|
+
if (result.error) {
|
|
873
|
+
return this.errorResponse(id, result.error.code, result.error.message);
|
|
874
|
+
}
|
|
875
|
+
return {
|
|
876
|
+
jsonrpc: "2.0",
|
|
877
|
+
id,
|
|
878
|
+
result: {
|
|
879
|
+
stdout: result.stdout,
|
|
880
|
+
stderr: result.stderr,
|
|
881
|
+
exitCode: result.exitCode
|
|
882
|
+
}
|
|
883
|
+
};
|
|
884
|
+
}
|
|
885
|
+
async handleInitialize(params, context, id) {
|
|
886
|
+
const clientVersion = params?.protocolVersion || "2025-06-18";
|
|
887
|
+
return {
|
|
888
|
+
jsonrpc: "2.0",
|
|
889
|
+
id,
|
|
890
|
+
result: {
|
|
891
|
+
protocolVersion: clientVersion,
|
|
892
|
+
capabilities: {
|
|
893
|
+
tools: {
|
|
894
|
+
listChanged: true
|
|
895
|
+
},
|
|
896
|
+
resources: {
|
|
897
|
+
listChanged: true,
|
|
898
|
+
subscribe: true
|
|
899
|
+
}
|
|
900
|
+
},
|
|
901
|
+
serverInfo: {
|
|
902
|
+
name: "conduit",
|
|
903
|
+
version: process.env.npm_package_version || "1.1.0"
|
|
904
|
+
}
|
|
905
|
+
}
|
|
906
|
+
};
|
|
907
|
+
}
|
|
908
|
+
async handleExecuteIsolate(params, context, id) {
|
|
909
|
+
const { code, limits, allowedTools } = params;
|
|
910
|
+
if (Array.isArray(allowedTools)) {
|
|
911
|
+
context.allowedTools = allowedTools;
|
|
912
|
+
}
|
|
913
|
+
const result = await this.executionService.executeIsolate(code, limits, context, allowedTools);
|
|
914
|
+
if (result.error) {
|
|
915
|
+
return this.errorResponse(id, result.error.code, result.error.message);
|
|
916
|
+
}
|
|
917
|
+
return {
|
|
918
|
+
jsonrpc: "2.0",
|
|
919
|
+
id,
|
|
920
|
+
result: {
|
|
921
|
+
stdout: result.stdout,
|
|
922
|
+
stderr: result.stderr,
|
|
923
|
+
exitCode: result.exitCode
|
|
924
|
+
}
|
|
925
|
+
};
|
|
926
|
+
}
|
|
927
|
+
errorResponse(id, code, message) {
|
|
928
|
+
return {
|
|
929
|
+
jsonrpc: "2.0",
|
|
930
|
+
id,
|
|
931
|
+
error: {
|
|
932
|
+
code,
|
|
933
|
+
message
|
|
934
|
+
}
|
|
935
|
+
};
|
|
936
|
+
}
|
|
937
|
+
async shutdown() {
|
|
938
|
+
await this.executionService.shutdown();
|
|
939
|
+
}
|
|
940
|
+
async healthCheck() {
|
|
941
|
+
const pyodideHealth = await this.executionService.healthCheck();
|
|
942
|
+
return {
|
|
943
|
+
status: pyodideHealth.status === "ok" ? "ok" : "error",
|
|
944
|
+
pyodide: pyodideHealth
|
|
945
|
+
};
|
|
946
|
+
}
|
|
947
|
+
async warmup() {
|
|
948
|
+
await this.executionService.warmup();
|
|
949
|
+
}
|
|
950
|
+
};
|
|
951
|
+
|
|
952
|
+
// src/gateway/upstream.client.ts
|
|
953
|
+
import axios2 from "axios";
|
|
954
|
+
import { Client } from "@modelcontextprotocol/sdk/client/index.js";
|
|
955
|
+
import { StdioClientTransport } from "@modelcontextprotocol/sdk/client/stdio.js";
|
|
956
|
+
import { z as z2 } from "zod";
|
|
957
|
+
var UpstreamClient = class {
|
|
958
|
+
logger;
|
|
959
|
+
info;
|
|
960
|
+
authService;
|
|
961
|
+
urlValidator;
|
|
962
|
+
mcpClient;
|
|
963
|
+
transport;
|
|
964
|
+
constructor(logger, info, authService, urlValidator) {
|
|
965
|
+
this.logger = logger.child({ upstreamId: info.id });
|
|
966
|
+
this.info = info;
|
|
967
|
+
this.authService = authService;
|
|
968
|
+
this.urlValidator = urlValidator;
|
|
969
|
+
if (this.info.type === "stdio") {
|
|
970
|
+
const env = { ...process.env, ...this.info.env };
|
|
971
|
+
const cleanEnv = Object.entries(env).reduce((acc, [k, v]) => {
|
|
972
|
+
if (v !== void 0) acc[k] = v;
|
|
973
|
+
return acc;
|
|
974
|
+
}, {});
|
|
975
|
+
this.transport = new StdioClientTransport({
|
|
976
|
+
command: this.info.command,
|
|
977
|
+
args: this.info.args,
|
|
978
|
+
env: cleanEnv
|
|
979
|
+
});
|
|
980
|
+
this.mcpClient = new Client({
|
|
981
|
+
name: "conduit-gateway",
|
|
982
|
+
version: "1.0.0"
|
|
983
|
+
}, {
|
|
984
|
+
capabilities: {}
|
|
985
|
+
});
|
|
986
|
+
}
|
|
987
|
+
}
|
|
988
|
+
async ensureConnected() {
|
|
989
|
+
if (!this.mcpClient || !this.transport) return;
|
|
990
|
+
try {
|
|
991
|
+
if (!this.transport.connection) {
|
|
992
|
+
await this.mcpClient.connect(this.transport);
|
|
993
|
+
}
|
|
994
|
+
} catch (e) {
|
|
995
|
+
}
|
|
996
|
+
}
|
|
997
|
+
async call(request, context) {
|
|
998
|
+
const isStdio = (info) => info.type === "stdio";
|
|
999
|
+
if (isStdio(this.info)) {
|
|
1000
|
+
return this.callStdio(request);
|
|
1001
|
+
} else {
|
|
1002
|
+
return this.callHttp(request, context);
|
|
1003
|
+
}
|
|
1004
|
+
}
|
|
1005
|
+
async callStdio(request) {
|
|
1006
|
+
if (!this.mcpClient) {
|
|
1007
|
+
return { jsonrpc: "2.0", id: request.id, error: { code: -32603, message: "Stdio client not initialized" } };
|
|
1008
|
+
}
|
|
1009
|
+
try {
|
|
1010
|
+
await this.ensureConnected();
|
|
1011
|
+
if (request.method === "list_tools") {
|
|
1012
|
+
const result = await this.mcpClient.listTools();
|
|
1013
|
+
return {
|
|
1014
|
+
jsonrpc: "2.0",
|
|
1015
|
+
id: request.id,
|
|
1016
|
+
result
|
|
1017
|
+
};
|
|
1018
|
+
} else if (request.method === "call_tool") {
|
|
1019
|
+
const params = request.params;
|
|
1020
|
+
const result = await this.mcpClient.callTool({
|
|
1021
|
+
name: params.name,
|
|
1022
|
+
arguments: params.arguments
|
|
1023
|
+
});
|
|
1024
|
+
return {
|
|
1025
|
+
jsonrpc: "2.0",
|
|
1026
|
+
id: request.id,
|
|
1027
|
+
result
|
|
1028
|
+
};
|
|
1029
|
+
} else {
|
|
1030
|
+
const result = await this.mcpClient.request(
|
|
1031
|
+
{ method: request.method, params: request.params },
|
|
1032
|
+
z2.any()
|
|
1033
|
+
);
|
|
1034
|
+
return {
|
|
1035
|
+
jsonrpc: "2.0",
|
|
1036
|
+
id: request.id,
|
|
1037
|
+
result
|
|
1038
|
+
};
|
|
1039
|
+
}
|
|
1040
|
+
} catch (error) {
|
|
1041
|
+
this.logger.error({ err: error }, "Stdio call failed");
|
|
1042
|
+
return {
|
|
1043
|
+
jsonrpc: "2.0",
|
|
1044
|
+
id: request.id,
|
|
1045
|
+
error: {
|
|
1046
|
+
code: error.code || -32603,
|
|
1047
|
+
message: error.message || "Internal error in stdio transport"
|
|
1048
|
+
}
|
|
1049
|
+
};
|
|
1050
|
+
}
|
|
1051
|
+
}
|
|
1052
|
+
async callHttp(request, context) {
|
|
1053
|
+
if (this.info.type === "stdio") throw new Error("Unreachable");
|
|
1054
|
+
const url = this.info.url;
|
|
1055
|
+
const headers = {
|
|
1056
|
+
"Content-Type": "application/json",
|
|
1057
|
+
"X-Correlation-Id": context.correlationId
|
|
1058
|
+
};
|
|
1059
|
+
if (context.tenantId) {
|
|
1060
|
+
headers["X-Tenant-Id"] = context.tenantId;
|
|
1061
|
+
}
|
|
1062
|
+
if (this.info.credentials) {
|
|
1063
|
+
const authHeaders = await this.authService.getAuthHeaders(this.info.credentials);
|
|
1064
|
+
Object.assign(headers, authHeaders);
|
|
1065
|
+
}
|
|
1066
|
+
const securityResult = await this.urlValidator.validateUrl(url);
|
|
1067
|
+
if (!securityResult.valid) {
|
|
1068
|
+
this.logger.error({ url }, "Blocked upstream URL (SSRF)");
|
|
1069
|
+
return {
|
|
1070
|
+
jsonrpc: "2.0",
|
|
1071
|
+
id: request.id,
|
|
1072
|
+
error: {
|
|
1073
|
+
code: -32003,
|
|
1074
|
+
message: securityResult.message || "Forbidden URL"
|
|
1075
|
+
}
|
|
1076
|
+
};
|
|
1077
|
+
}
|
|
1078
|
+
try {
|
|
1079
|
+
this.logger.debug({ method: request.method }, "Calling upstream MCP");
|
|
1080
|
+
const originalUrl = new URL(url);
|
|
1081
|
+
const requestUrl = securityResult.resolvedIp ? `${originalUrl.protocol}//${securityResult.resolvedIp}${originalUrl.port ? ":" + originalUrl.port : ""}${originalUrl.pathname}${originalUrl.search}${originalUrl.hash}` : url;
|
|
1082
|
+
headers["Host"] = originalUrl.hostname;
|
|
1083
|
+
const response = await axios2.post(requestUrl, request, {
|
|
1084
|
+
headers,
|
|
1085
|
+
timeout: 1e4,
|
|
1086
|
+
maxRedirects: 0
|
|
1087
|
+
});
|
|
1088
|
+
return response.data;
|
|
1089
|
+
} catch (err) {
|
|
1090
|
+
this.logger.error({ err: err.message }, "Upstream MCP call failed");
|
|
1091
|
+
return {
|
|
1092
|
+
jsonrpc: "2.0",
|
|
1093
|
+
id: request.id,
|
|
1094
|
+
error: {
|
|
1095
|
+
code: -32008,
|
|
1096
|
+
message: `Upstream error: ${err.message}`
|
|
1097
|
+
}
|
|
1098
|
+
};
|
|
1099
|
+
}
|
|
1100
|
+
}
|
|
1101
|
+
async getManifest(context) {
|
|
1102
|
+
if (this.info.type !== "http") return null;
|
|
1103
|
+
try {
|
|
1104
|
+
const baseUrl = this.info.url.replace(/\/$/, "");
|
|
1105
|
+
const manifestUrl = `${baseUrl}/conduit.manifest.json`;
|
|
1106
|
+
const headers = {
|
|
1107
|
+
"X-Correlation-Id": context.correlationId
|
|
1108
|
+
};
|
|
1109
|
+
if (this.info.credentials) {
|
|
1110
|
+
const authHeaders = await this.authService.getAuthHeaders(this.info.credentials);
|
|
1111
|
+
Object.assign(headers, authHeaders);
|
|
1112
|
+
}
|
|
1113
|
+
const securityResult = await this.urlValidator.validateUrl(manifestUrl);
|
|
1114
|
+
if (!securityResult.valid) {
|
|
1115
|
+
this.logger.warn({ url: manifestUrl }, "Blocked manifest URL (SSRF)");
|
|
1116
|
+
return null;
|
|
1117
|
+
}
|
|
1118
|
+
const originalUrl = new URL(manifestUrl);
|
|
1119
|
+
const requestUrl = securityResult.resolvedIp ? `${originalUrl.protocol}//${securityResult.resolvedIp}${originalUrl.port ? ":" + originalUrl.port : ""}${originalUrl.pathname}${originalUrl.search}${originalUrl.hash}` : manifestUrl;
|
|
1120
|
+
headers["Host"] = originalUrl.hostname;
|
|
1121
|
+
const response = await axios2.get(requestUrl, {
|
|
1122
|
+
headers,
|
|
1123
|
+
timeout: 5e3,
|
|
1124
|
+
maxRedirects: 0
|
|
1125
|
+
});
|
|
1126
|
+
if (response.status === 200 && response.data && Array.isArray(response.data.tools)) {
|
|
1127
|
+
return response.data;
|
|
1128
|
+
}
|
|
1129
|
+
} catch (error) {
|
|
1130
|
+
this.logger.debug({ err: error }, "Failed to fetch manifest (will fallback)");
|
|
1131
|
+
}
|
|
1132
|
+
return null;
|
|
1133
|
+
}
|
|
1134
|
+
};
|
|
1135
|
+
|
|
1136
|
+
// src/gateway/auth.service.ts
|
|
1137
|
+
import axios3 from "axios";
|
|
1138
|
+
var AuthService = class {
|
|
1139
|
+
logger;
|
|
1140
|
+
// Cache tokens separately from credentials to avoid mutation
|
|
1141
|
+
tokenCache = /* @__PURE__ */ new Map();
|
|
1142
|
+
// Prevent concurrent refresh requests for the same client
|
|
1143
|
+
refreshLocks = /* @__PURE__ */ new Map();
|
|
1144
|
+
constructor(logger) {
|
|
1145
|
+
this.logger = logger;
|
|
1146
|
+
}
|
|
1147
|
+
async getAuthHeaders(creds) {
|
|
1148
|
+
switch (creds.type) {
|
|
1149
|
+
case "apiKey":
|
|
1150
|
+
return { "X-API-Key": creds.apiKey || "" };
|
|
1151
|
+
case "bearer":
|
|
1152
|
+
return { "Authorization": `Bearer ${creds.bearerToken}` };
|
|
1153
|
+
case "oauth2":
|
|
1154
|
+
return { "Authorization": await this.getOAuth2Token(creds) };
|
|
1155
|
+
default:
|
|
1156
|
+
throw new Error(`Unsupported auth type: ${creds.type}`);
|
|
1157
|
+
}
|
|
1158
|
+
}
|
|
1159
|
+
async getOAuth2Token(creds) {
|
|
1160
|
+
if (!creds.tokenUrl || !creds.clientId) {
|
|
1161
|
+
throw new Error("OAuth2 credentials missing required fields (tokenUrl, clientId)");
|
|
1162
|
+
}
|
|
1163
|
+
const cacheKey = `${creds.clientId}:${creds.tokenUrl}`;
|
|
1164
|
+
const cached = this.tokenCache.get(cacheKey);
|
|
1165
|
+
if (cached && cached.expiresAt > Date.now() + 3e4) {
|
|
1166
|
+
return `Bearer ${cached.accessToken}`;
|
|
1167
|
+
}
|
|
1168
|
+
const existingRefresh = this.refreshLocks.get(cacheKey);
|
|
1169
|
+
if (existingRefresh) {
|
|
1170
|
+
return existingRefresh;
|
|
1171
|
+
}
|
|
1172
|
+
const refreshPromise = this.doRefresh(creds, cacheKey);
|
|
1173
|
+
this.refreshLocks.set(cacheKey, refreshPromise);
|
|
1174
|
+
try {
|
|
1175
|
+
return await refreshPromise;
|
|
1176
|
+
} finally {
|
|
1177
|
+
this.refreshLocks.delete(cacheKey);
|
|
1178
|
+
}
|
|
1179
|
+
}
|
|
1180
|
+
async doRefresh(creds, cacheKey) {
|
|
1181
|
+
if (!creds.tokenUrl || !creds.refreshToken || !creds.clientId || !creds.clientSecret) {
|
|
1182
|
+
throw new Error("OAuth2 credentials missing required fields for refresh");
|
|
1183
|
+
}
|
|
1184
|
+
this.logger.info({ tokenUrl: creds.tokenUrl, clientId: creds.clientId }, "Refreshing OAuth2 token");
|
|
1185
|
+
try {
|
|
1186
|
+
const response = await axios3.post(creds.tokenUrl, {
|
|
1187
|
+
grant_type: "refresh_token",
|
|
1188
|
+
refresh_token: creds.refreshToken,
|
|
1189
|
+
client_id: creds.clientId,
|
|
1190
|
+
client_secret: creds.clientSecret
|
|
1191
|
+
});
|
|
1192
|
+
const { access_token, expires_in } = response.data;
|
|
1193
|
+
this.tokenCache.set(cacheKey, {
|
|
1194
|
+
accessToken: access_token,
|
|
1195
|
+
expiresAt: Date.now() + expires_in * 1e3
|
|
1196
|
+
});
|
|
1197
|
+
return `Bearer ${access_token}`;
|
|
1198
|
+
} catch (err) {
|
|
1199
|
+
const errorMsg = err.response?.data?.error_description || err.response?.data?.error || err.message;
|
|
1200
|
+
this.logger.error({ err: errorMsg }, "Failed to refresh OAuth2 token");
|
|
1201
|
+
throw new Error(`OAuth2 refresh failed: ${errorMsg}`);
|
|
1202
|
+
}
|
|
1203
|
+
}
|
|
1204
|
+
};
|
|
1205
|
+
|
|
1206
|
+
// src/gateway/schema.cache.ts
|
|
1207
|
+
import { LRUCache } from "lru-cache";
|
|
1208
|
+
var SchemaCache = class {
|
|
1209
|
+
cache;
|
|
1210
|
+
logger;
|
|
1211
|
+
constructor(logger, max = 100, ttl = 1e3 * 60 * 60) {
|
|
1212
|
+
this.logger = logger;
|
|
1213
|
+
this.cache = new LRUCache({
|
|
1214
|
+
max,
|
|
1215
|
+
ttl
|
|
1216
|
+
});
|
|
1217
|
+
}
|
|
1218
|
+
get(upstreamId) {
|
|
1219
|
+
const result = this.cache.get(upstreamId);
|
|
1220
|
+
if (result) {
|
|
1221
|
+
metrics.recordCacheHit();
|
|
1222
|
+
} else {
|
|
1223
|
+
metrics.recordCacheMiss();
|
|
1224
|
+
}
|
|
1225
|
+
return result;
|
|
1226
|
+
}
|
|
1227
|
+
set(upstreamId, tools) {
|
|
1228
|
+
this.logger.debug({ upstreamId, count: tools.length }, "Caching tool schemas");
|
|
1229
|
+
this.cache.set(upstreamId, tools);
|
|
1230
|
+
}
|
|
1231
|
+
invalidate(upstreamId) {
|
|
1232
|
+
this.logger.debug({ upstreamId }, "Invalidating schema cache");
|
|
1233
|
+
this.cache.delete(upstreamId);
|
|
1234
|
+
}
|
|
1235
|
+
clear() {
|
|
1236
|
+
this.cache.clear();
|
|
1237
|
+
}
|
|
1238
|
+
};
|
|
1239
|
+
|
|
1240
|
+
// src/core/policy.service.ts
|
|
1241
|
+
var PolicyService = class {
|
|
1242
|
+
/**
|
|
1243
|
+
* Parse a qualified tool name string into a structured ToolIdentifier.
|
|
1244
|
+
* @param qualifiedName - e.g., "github__createIssue" or "github__api__listRepos"
|
|
1245
|
+
*/
|
|
1246
|
+
parseToolName(qualifiedName) {
|
|
1247
|
+
const separatorIndex = qualifiedName.indexOf("__");
|
|
1248
|
+
if (separatorIndex === -1) {
|
|
1249
|
+
return { namespace: "", name: qualifiedName };
|
|
1250
|
+
}
|
|
1251
|
+
return {
|
|
1252
|
+
namespace: qualifiedName.substring(0, separatorIndex),
|
|
1253
|
+
name: qualifiedName.substring(separatorIndex + 2)
|
|
1254
|
+
};
|
|
1255
|
+
}
|
|
1256
|
+
/**
|
|
1257
|
+
* Format a ToolIdentifier back to a qualified string.
|
|
1258
|
+
*/
|
|
1259
|
+
formatToolName(tool) {
|
|
1260
|
+
if (!tool.namespace) {
|
|
1261
|
+
return tool.name;
|
|
1262
|
+
}
|
|
1263
|
+
return `${tool.namespace}__${tool.name}`;
|
|
1264
|
+
}
|
|
1265
|
+
/**
|
|
1266
|
+
* Check if a tool matches any pattern in the allowlist.
|
|
1267
|
+
* Supports:
|
|
1268
|
+
* - Exact match: "github.createIssue" matches "github__createIssue"
|
|
1269
|
+
* - Wildcard: "github.*" matches any tool in the github namespace
|
|
1270
|
+
*
|
|
1271
|
+
* @param tool - ToolIdentifier or qualified string
|
|
1272
|
+
* @param allowedTools - Array of patterns (dot-notation, e.g., "github.*" or "github.createIssue")
|
|
1273
|
+
*/
|
|
1274
|
+
isToolAllowed(tool, allowedTools) {
|
|
1275
|
+
const toolId = typeof tool === "string" ? this.parseToolName(tool) : tool;
|
|
1276
|
+
const toolParts = [toolId.namespace, ...toolId.name.split("__")].filter((p) => p);
|
|
1277
|
+
return allowedTools.some((pattern) => {
|
|
1278
|
+
const patternParts = pattern.split(".");
|
|
1279
|
+
if (patternParts[patternParts.length - 1] === "*") {
|
|
1280
|
+
const prefixParts = patternParts.slice(0, -1);
|
|
1281
|
+
if (prefixParts.length > toolParts.length) return false;
|
|
1282
|
+
for (let i = 0; i < prefixParts.length; i++) {
|
|
1283
|
+
if (prefixParts[i] !== toolParts[i]) return false;
|
|
1284
|
+
}
|
|
1285
|
+
return true;
|
|
1286
|
+
}
|
|
1287
|
+
if (patternParts.length !== toolParts.length) return false;
|
|
1288
|
+
for (let i = 0; i < patternParts.length; i++) {
|
|
1289
|
+
if (patternParts[i] !== toolParts[i]) return false;
|
|
1290
|
+
}
|
|
1291
|
+
return true;
|
|
1292
|
+
});
|
|
1293
|
+
}
|
|
1294
|
+
};
|
|
1295
|
+
|
|
1296
|
+
// src/gateway/gateway.service.ts
|
|
1297
|
+
import { Ajv } from "ajv";
|
|
1298
|
+
import addFormats from "ajv-formats";
|
|
1299
|
+
var BUILT_IN_TOOLS = [
|
|
1300
|
+
{
|
|
1301
|
+
name: "mcp_execute_typescript",
|
|
1302
|
+
description: "Executes TypeScript code in a secure sandbox with access to `tools.*` SDK.",
|
|
1303
|
+
inputSchema: {
|
|
1304
|
+
type: "object",
|
|
1305
|
+
properties: {
|
|
1306
|
+
code: {
|
|
1307
|
+
type: "string",
|
|
1308
|
+
description: "The TypeScript code to execute."
|
|
1309
|
+
},
|
|
1310
|
+
allowedTools: {
|
|
1311
|
+
type: "array",
|
|
1312
|
+
items: { type: "string" },
|
|
1313
|
+
description: 'Optional list of tools the script is allowed to call (e.g. ["github.*"]).'
|
|
1314
|
+
}
|
|
1315
|
+
},
|
|
1316
|
+
required: ["code"]
|
|
1317
|
+
}
|
|
1318
|
+
},
|
|
1319
|
+
{
|
|
1320
|
+
name: "mcp_execute_python",
|
|
1321
|
+
description: "Executes Python code in a secure sandbox with access to `tools.*` SDK.",
|
|
1322
|
+
inputSchema: {
|
|
1323
|
+
type: "object",
|
|
1324
|
+
properties: {
|
|
1325
|
+
code: {
|
|
1326
|
+
type: "string",
|
|
1327
|
+
description: "The Python code to execute."
|
|
1328
|
+
},
|
|
1329
|
+
allowedTools: {
|
|
1330
|
+
type: "array",
|
|
1331
|
+
items: { type: "string" },
|
|
1332
|
+
description: 'Optional list of tools the script is allowed to call (e.g. ["github.*"]).'
|
|
1333
|
+
}
|
|
1334
|
+
},
|
|
1335
|
+
required: ["code"]
|
|
1336
|
+
}
|
|
1337
|
+
},
|
|
1338
|
+
{
|
|
1339
|
+
name: "mcp_execute_isolate",
|
|
1340
|
+
description: "Executes JavaScript code in a high-speed V8 isolate (no Deno/Node APIs).",
|
|
1341
|
+
inputSchema: {
|
|
1342
|
+
type: "object",
|
|
1343
|
+
properties: {
|
|
1344
|
+
code: {
|
|
1345
|
+
type: "string",
|
|
1346
|
+
description: "The JavaScript code to execute."
|
|
1347
|
+
},
|
|
1348
|
+
allowedTools: {
|
|
1349
|
+
type: "array",
|
|
1350
|
+
items: { type: "string" },
|
|
1351
|
+
description: "Optional list of tools the script is allowed to call."
|
|
1352
|
+
}
|
|
1353
|
+
},
|
|
1354
|
+
required: ["code"]
|
|
1355
|
+
}
|
|
1356
|
+
}
|
|
1357
|
+
];
|
|
1358
|
+
var GatewayService = class {
|
|
1359
|
+
logger;
|
|
1360
|
+
clients = /* @__PURE__ */ new Map();
|
|
1361
|
+
authService;
|
|
1362
|
+
schemaCache;
|
|
1363
|
+
urlValidator;
|
|
1364
|
+
policyService;
|
|
1365
|
+
ajv;
|
|
1366
|
+
// Cache compiled validators to avoid recompilation on every call
|
|
1367
|
+
validatorCache = /* @__PURE__ */ new Map();
|
|
1368
|
+
constructor(logger, urlValidator, policyService) {
|
|
1369
|
+
this.logger = logger;
|
|
1370
|
+
this.urlValidator = urlValidator;
|
|
1371
|
+
this.authService = new AuthService(logger);
|
|
1372
|
+
this.schemaCache = new SchemaCache(logger);
|
|
1373
|
+
this.policyService = policyService ?? new PolicyService();
|
|
1374
|
+
this.ajv = new Ajv({ strict: false });
|
|
1375
|
+
addFormats.default(this.ajv);
|
|
1376
|
+
}
|
|
1377
|
+
registerUpstream(info) {
|
|
1378
|
+
const client = new UpstreamClient(this.logger, info, this.authService, this.urlValidator);
|
|
1379
|
+
this.clients.set(info.id, client);
|
|
1380
|
+
this.logger.info({ upstreamId: info.id }, "Registered upstream MCP");
|
|
1381
|
+
}
|
|
1382
|
+
async listToolPackages() {
|
|
1383
|
+
return Array.from(this.clients.entries()).map(([id, client]) => ({
|
|
1384
|
+
id,
|
|
1385
|
+
description: `Upstream ${id}`,
|
|
1386
|
+
// NOTE: Upstream description fetching deferred to V2
|
|
1387
|
+
version: "1.0.0"
|
|
1388
|
+
}));
|
|
1389
|
+
}
|
|
1390
|
+
async listToolStubs(packageId, context) {
|
|
1391
|
+
const client = this.clients.get(packageId);
|
|
1392
|
+
if (!client) {
|
|
1393
|
+
throw new Error(`Upstream package not found: ${packageId}`);
|
|
1394
|
+
}
|
|
1395
|
+
let tools = this.schemaCache.get(packageId);
|
|
1396
|
+
if (!tools) {
|
|
1397
|
+
try {
|
|
1398
|
+
const manifest = await client.getManifest(context);
|
|
1399
|
+
if (manifest) {
|
|
1400
|
+
const stubs2 = manifest.tools.map((t) => ({
|
|
1401
|
+
id: `${packageId}__${t.name}`,
|
|
1402
|
+
name: t.name,
|
|
1403
|
+
description: t.description
|
|
1404
|
+
}));
|
|
1405
|
+
if (context.allowedTools) {
|
|
1406
|
+
return stubs2.filter((t) => this.policyService.isToolAllowed(t.id, context.allowedTools));
|
|
1407
|
+
}
|
|
1408
|
+
return stubs2;
|
|
1409
|
+
}
|
|
1410
|
+
} catch (e) {
|
|
1411
|
+
this.logger.debug({ packageId, err: e }, "Manifest fetch failed, falling back to RPC");
|
|
1412
|
+
}
|
|
1413
|
+
const response = await client.call({
|
|
1414
|
+
jsonrpc: "2.0",
|
|
1415
|
+
id: "discovery",
|
|
1416
|
+
method: "list_tools"
|
|
1417
|
+
}, context);
|
|
1418
|
+
if (response.result?.tools) {
|
|
1419
|
+
tools = response.result.tools;
|
|
1420
|
+
this.schemaCache.set(packageId, tools);
|
|
1421
|
+
} else {
|
|
1422
|
+
this.logger.warn({ upstreamId: packageId, error: response.error }, "Failed to discover tools from upstream");
|
|
1423
|
+
tools = [];
|
|
1424
|
+
}
|
|
1425
|
+
}
|
|
1426
|
+
const stubs = tools.map((t) => ({
|
|
1427
|
+
id: `${packageId}__${t.name}`,
|
|
1428
|
+
name: t.name,
|
|
1429
|
+
description: t.description
|
|
1430
|
+
}));
|
|
1431
|
+
if (context.allowedTools) {
|
|
1432
|
+
return stubs.filter((t) => this.policyService.isToolAllowed(t.id, context.allowedTools));
|
|
1433
|
+
}
|
|
1434
|
+
return stubs;
|
|
1435
|
+
}
|
|
1436
|
+
async getToolSchema(toolId, context) {
|
|
1437
|
+
if (context.allowedTools && !this.policyService.isToolAllowed(toolId, context.allowedTools)) {
|
|
1438
|
+
throw new Error(`Access to tool ${toolId} is forbidden by allowlist`);
|
|
1439
|
+
}
|
|
1440
|
+
const parsed = this.policyService.parseToolName(toolId);
|
|
1441
|
+
const toolName = parsed.name;
|
|
1442
|
+
const builtIn = BUILT_IN_TOOLS.find((t) => t.name === toolId);
|
|
1443
|
+
if (builtIn) return builtIn;
|
|
1444
|
+
const upstreamId = parsed.namespace;
|
|
1445
|
+
if (!this.schemaCache.get(upstreamId)) {
|
|
1446
|
+
await this.listToolStubs(upstreamId, context);
|
|
1447
|
+
}
|
|
1448
|
+
const tools = this.schemaCache.get(upstreamId) || [];
|
|
1449
|
+
const tool = tools.find((t) => t.name === toolName);
|
|
1450
|
+
if (!tool) return null;
|
|
1451
|
+
return {
|
|
1452
|
+
...tool,
|
|
1453
|
+
name: toolId
|
|
1454
|
+
};
|
|
1455
|
+
}
|
|
1456
|
+
async discoverTools(context) {
|
|
1457
|
+
const allTools = [...BUILT_IN_TOOLS];
|
|
1458
|
+
for (const [id, client] of this.clients.entries()) {
|
|
1459
|
+
let tools = this.schemaCache.get(id);
|
|
1460
|
+
if (!tools) {
|
|
1461
|
+
const response = await client.call({
|
|
1462
|
+
jsonrpc: "2.0",
|
|
1463
|
+
id: "discovery",
|
|
1464
|
+
method: "list_tools"
|
|
1465
|
+
// Standard MCP method
|
|
1466
|
+
}, context);
|
|
1467
|
+
if (response.result?.tools) {
|
|
1468
|
+
tools = response.result.tools;
|
|
1469
|
+
this.schemaCache.set(id, tools);
|
|
1470
|
+
} else {
|
|
1471
|
+
this.logger.warn({ upstreamId: id, error: response.error }, "Failed to discover tools from upstream");
|
|
1472
|
+
tools = [];
|
|
1473
|
+
}
|
|
1474
|
+
}
|
|
1475
|
+
const prefixedTools = tools.map((t) => ({ ...t, name: `${id}__${t.name}` }));
|
|
1476
|
+
if (context.allowedTools) {
|
|
1477
|
+
allTools.push(...prefixedTools.filter((t) => this.policyService.isToolAllowed(t.name, context.allowedTools)));
|
|
1478
|
+
} else {
|
|
1479
|
+
allTools.push(...prefixedTools);
|
|
1480
|
+
}
|
|
1481
|
+
}
|
|
1482
|
+
return allTools;
|
|
1483
|
+
}
|
|
1484
|
+
async callTool(name, params, context) {
|
|
1485
|
+
if (context.allowedTools && !this.policyService.isToolAllowed(name, context.allowedTools)) {
|
|
1486
|
+
this.logger.warn({ name, allowedTools: context.allowedTools }, "Tool call blocked by allowlist");
|
|
1487
|
+
return {
|
|
1488
|
+
jsonrpc: "2.0",
|
|
1489
|
+
id: 0,
|
|
1490
|
+
error: {
|
|
1491
|
+
code: -32003,
|
|
1492
|
+
message: `Authorization failed: tool ${name} is not in the allowlist`
|
|
1493
|
+
}
|
|
1494
|
+
};
|
|
1495
|
+
}
|
|
1496
|
+
const toolId = this.policyService.parseToolName(name);
|
|
1497
|
+
const upstreamId = toolId.namespace;
|
|
1498
|
+
const toolName = toolId.name;
|
|
1499
|
+
const client = this.clients.get(upstreamId);
|
|
1500
|
+
if (!client) {
|
|
1501
|
+
return {
|
|
1502
|
+
jsonrpc: "2.0",
|
|
1503
|
+
id: 0,
|
|
1504
|
+
error: {
|
|
1505
|
+
code: -32003,
|
|
1506
|
+
message: `Upstream not found: ${upstreamId}`
|
|
1507
|
+
}
|
|
1508
|
+
};
|
|
1509
|
+
}
|
|
1510
|
+
if (!this.schemaCache.get(upstreamId)) {
|
|
1511
|
+
await this.listToolStubs(upstreamId, context);
|
|
1512
|
+
}
|
|
1513
|
+
const tools = this.schemaCache.get(upstreamId) || [];
|
|
1514
|
+
const toolSchema = tools.find((t) => t.name === toolName);
|
|
1515
|
+
if (context.strictValidation) {
|
|
1516
|
+
if (!toolSchema) {
|
|
1517
|
+
return {
|
|
1518
|
+
jsonrpc: "2.0",
|
|
1519
|
+
id: 0,
|
|
1520
|
+
error: {
|
|
1521
|
+
code: -32601,
|
|
1522
|
+
// Method not found / Schema missing
|
|
1523
|
+
message: `Strict mode: Tool schema for ${name} not found`
|
|
1524
|
+
}
|
|
1525
|
+
};
|
|
1526
|
+
}
|
|
1527
|
+
if (!toolSchema.inputSchema) {
|
|
1528
|
+
return {
|
|
1529
|
+
jsonrpc: "2.0",
|
|
1530
|
+
id: 0,
|
|
1531
|
+
error: {
|
|
1532
|
+
code: -32602,
|
|
1533
|
+
// Invalid params
|
|
1534
|
+
message: `Strict mode: Tool ${name} has no input schema defined`
|
|
1535
|
+
}
|
|
1536
|
+
};
|
|
1537
|
+
}
|
|
1538
|
+
}
|
|
1539
|
+
if (toolSchema && toolSchema.inputSchema) {
|
|
1540
|
+
const cacheKey = `${upstreamId}__${toolName}`;
|
|
1541
|
+
let validate = this.validatorCache.get(cacheKey);
|
|
1542
|
+
if (!validate) {
|
|
1543
|
+
validate = this.ajv.compile(toolSchema.inputSchema);
|
|
1544
|
+
this.validatorCache.set(cacheKey, validate);
|
|
1545
|
+
}
|
|
1546
|
+
const valid = validate(params);
|
|
1547
|
+
if (!valid) {
|
|
1548
|
+
return {
|
|
1549
|
+
jsonrpc: "2.0",
|
|
1550
|
+
id: 0,
|
|
1551
|
+
error: {
|
|
1552
|
+
code: -32602,
|
|
1553
|
+
// Invalid params
|
|
1554
|
+
message: `Invalid parameters for tool ${name}: ${this.ajv.errorsText(validate.errors)}`
|
|
1555
|
+
}
|
|
1556
|
+
};
|
|
1557
|
+
}
|
|
1558
|
+
}
|
|
1559
|
+
const startTime = performance.now();
|
|
1560
|
+
let success = false;
|
|
1561
|
+
let response;
|
|
1562
|
+
try {
|
|
1563
|
+
response = await client.call({
|
|
1564
|
+
jsonrpc: "2.0",
|
|
1565
|
+
id: context.correlationId,
|
|
1566
|
+
method: "call_tool",
|
|
1567
|
+
params: {
|
|
1568
|
+
name: toolName,
|
|
1569
|
+
arguments: params
|
|
1570
|
+
}
|
|
1571
|
+
}, context);
|
|
1572
|
+
success = !response.error;
|
|
1573
|
+
} catch (error) {
|
|
1574
|
+
success = false;
|
|
1575
|
+
throw error;
|
|
1576
|
+
} finally {
|
|
1577
|
+
const duration = performance.now() - startTime;
|
|
1578
|
+
metrics.recordToolExecution(duration, toolName, success);
|
|
1579
|
+
}
|
|
1580
|
+
if (response.error && response.error.code === -32008) {
|
|
1581
|
+
this.schemaCache.invalidate(upstreamId);
|
|
1582
|
+
}
|
|
1583
|
+
return response;
|
|
1584
|
+
}
|
|
1585
|
+
async healthCheck() {
|
|
1586
|
+
const upstreamStatus = {};
|
|
1587
|
+
const context = new ExecutionContext({ logger: this.logger });
|
|
1588
|
+
await Promise.all(
|
|
1589
|
+
Array.from(this.clients.entries()).map(async ([id, client]) => {
|
|
1590
|
+
try {
|
|
1591
|
+
const response = await client.call({
|
|
1592
|
+
jsonrpc: "2.0",
|
|
1593
|
+
id: "health",
|
|
1594
|
+
method: "list_tools"
|
|
1595
|
+
}, context);
|
|
1596
|
+
upstreamStatus[id] = response.error ? "degraded" : "active";
|
|
1597
|
+
} catch (err) {
|
|
1598
|
+
upstreamStatus[id] = "error";
|
|
1599
|
+
}
|
|
1600
|
+
})
|
|
1601
|
+
);
|
|
1602
|
+
const allOk = Object.values(upstreamStatus).every((s) => s === "active");
|
|
1603
|
+
return {
|
|
1604
|
+
status: allOk ? "ok" : "degraded",
|
|
1605
|
+
upstreams: upstreamStatus
|
|
1606
|
+
};
|
|
1607
|
+
}
|
|
1608
|
+
async validateTool(name, params, context) {
|
|
1609
|
+
const toolId = this.policyService.parseToolName(name);
|
|
1610
|
+
const upstreamId = toolId.namespace;
|
|
1611
|
+
const toolName = toolId.name;
|
|
1612
|
+
if (!this.schemaCache.get(upstreamId)) {
|
|
1613
|
+
await this.listToolStubs(upstreamId, context);
|
|
1614
|
+
}
|
|
1615
|
+
const tools = this.schemaCache.get(upstreamId) || [];
|
|
1616
|
+
const toolSchema = tools.find((t) => t.name === toolName);
|
|
1617
|
+
if (!toolSchema) {
|
|
1618
|
+
return { valid: false, errors: [`Tool ${name} not found`] };
|
|
1619
|
+
}
|
|
1620
|
+
if (context.strictValidation) {
|
|
1621
|
+
if (!toolSchema.inputSchema) {
|
|
1622
|
+
return { valid: false, errors: [`Strict mode: Tool ${name} has no input schema defined`] };
|
|
1623
|
+
}
|
|
1624
|
+
}
|
|
1625
|
+
if (!toolSchema.inputSchema) {
|
|
1626
|
+
return { valid: true };
|
|
1627
|
+
}
|
|
1628
|
+
const validate = this.ajv.compile(toolSchema.inputSchema);
|
|
1629
|
+
const valid = validate(params);
|
|
1630
|
+
if (!valid) {
|
|
1631
|
+
return {
|
|
1632
|
+
valid: false,
|
|
1633
|
+
errors: validate.errors?.map((e) => this.ajv.errorsText([e])) || ["Unknown validation error"]
|
|
1634
|
+
};
|
|
1635
|
+
}
|
|
1636
|
+
return { valid: true };
|
|
1637
|
+
}
|
|
1638
|
+
};
|
|
1639
|
+
|
|
1640
|
+
// src/core/network.policy.service.ts
|
|
1641
|
+
import dns from "dns/promises";
|
|
1642
|
+
import net2 from "net";
|
|
1643
|
+
import { LRUCache as LRUCache2 } from "lru-cache";
|
|
1644
|
+
var NetworkPolicyService = class {
|
|
1645
|
+
logger;
|
|
1646
|
+
privateRanges = [
|
|
1647
|
+
/^127\./,
|
|
1648
|
+
/^10\./,
|
|
1649
|
+
/^172\.(1[6-9]|2[0-9]|3[0-1])\./,
|
|
1650
|
+
/^192\.168\./,
|
|
1651
|
+
/^169\.254\./,
|
|
1652
|
+
// Link-local
|
|
1653
|
+
/^localhost$/i,
|
|
1654
|
+
/^0\.0\.0\.0$/,
|
|
1655
|
+
/^::1$/,
|
|
1656
|
+
// IPv6 localhost
|
|
1657
|
+
/^fc00:/i,
|
|
1658
|
+
// IPv6 private
|
|
1659
|
+
/^fe80:/i
|
|
1660
|
+
// IPv6 link-local
|
|
1661
|
+
];
|
|
1662
|
+
RATE_LIMIT = 30;
|
|
1663
|
+
WINDOW_MS = 6e4;
|
|
1664
|
+
// Use LRUCache to prevent unbounded memory growth
|
|
1665
|
+
requestCounts;
|
|
1666
|
+
constructor(logger) {
|
|
1667
|
+
this.logger = logger;
|
|
1668
|
+
this.requestCounts = new LRUCache2({
|
|
1669
|
+
max: 1e4,
|
|
1670
|
+
ttl: this.WINDOW_MS
|
|
1671
|
+
});
|
|
1672
|
+
}
|
|
1673
|
+
async validateUrl(url) {
|
|
1674
|
+
try {
|
|
1675
|
+
const parsed = new URL(url);
|
|
1676
|
+
const hostname = parsed.hostname;
|
|
1677
|
+
for (const range of this.privateRanges) {
|
|
1678
|
+
if (range.test(hostname)) {
|
|
1679
|
+
this.logger.warn({ hostname }, "SSRF attempt detected: private range access");
|
|
1680
|
+
return { valid: false, message: "Access denied: private network access forbidden" };
|
|
1681
|
+
}
|
|
1682
|
+
}
|
|
1683
|
+
if (!net2.isIP(hostname)) {
|
|
1684
|
+
try {
|
|
1685
|
+
const lookup = await dns.lookup(hostname, { all: true });
|
|
1686
|
+
const resolvedIps = [];
|
|
1687
|
+
for (const address of lookup) {
|
|
1688
|
+
let ip = address.address;
|
|
1689
|
+
if (ip.startsWith("::ffff:")) {
|
|
1690
|
+
ip = ip.substring(7);
|
|
1691
|
+
}
|
|
1692
|
+
for (const range of this.privateRanges) {
|
|
1693
|
+
if (range.test(ip)) {
|
|
1694
|
+
this.logger.warn({ hostname, ip }, "SSRF attempt detected: DNS resolves to private IP");
|
|
1695
|
+
return { valid: false, message: "Access denied: hostname resolves to private network" };
|
|
1696
|
+
}
|
|
1697
|
+
}
|
|
1698
|
+
resolvedIps.push(ip);
|
|
1699
|
+
}
|
|
1700
|
+
return { valid: true, resolvedIp: resolvedIps[0] };
|
|
1701
|
+
} catch (err) {
|
|
1702
|
+
this.logger.warn({ hostname, err: err.message }, "DNS lookup failed during URL validation, blocking request");
|
|
1703
|
+
return { valid: false, message: "Access denied: hostname resolution failed" };
|
|
1704
|
+
}
|
|
1705
|
+
}
|
|
1706
|
+
return { valid: true, resolvedIp: hostname };
|
|
1707
|
+
} catch (err) {
|
|
1708
|
+
return { valid: false, message: `Invalid URL: ${err.message}` };
|
|
1709
|
+
}
|
|
1710
|
+
}
|
|
1711
|
+
checkRateLimit(key) {
|
|
1712
|
+
const now = Date.now();
|
|
1713
|
+
const record = this.requestCounts.get(key);
|
|
1714
|
+
if (!record || now > record.resetTime) {
|
|
1715
|
+
this.requestCounts.set(key, { count: 1, resetTime: now + this.WINDOW_MS });
|
|
1716
|
+
return true;
|
|
1717
|
+
}
|
|
1718
|
+
if (record.count >= this.RATE_LIMIT) {
|
|
1719
|
+
this.logger.warn({ key }, "Rate limit exceeded");
|
|
1720
|
+
return false;
|
|
1721
|
+
}
|
|
1722
|
+
record.count++;
|
|
1723
|
+
return true;
|
|
1724
|
+
}
|
|
1725
|
+
};
|
|
1726
|
+
|
|
1727
|
+
// src/core/session.manager.ts
|
|
1728
|
+
import { v4 as uuidv42 } from "uuid";
|
|
1729
|
+
import { LRUCache as LRUCache3 } from "lru-cache";
|
|
1730
|
+
var SessionManager = class {
|
|
1731
|
+
logger;
|
|
1732
|
+
sessions;
|
|
1733
|
+
SESSION_TTL_MS = 36e5;
|
|
1734
|
+
// 1 hour
|
|
1735
|
+
constructor(logger) {
|
|
1736
|
+
this.logger = logger;
|
|
1737
|
+
this.sessions = new LRUCache3({
|
|
1738
|
+
max: 1e4,
|
|
1739
|
+
ttl: this.SESSION_TTL_MS
|
|
1740
|
+
});
|
|
1741
|
+
}
|
|
1742
|
+
createSession(allowedTools) {
|
|
1743
|
+
const token = uuidv42();
|
|
1744
|
+
this.sessions.set(token, {
|
|
1745
|
+
allowedTools,
|
|
1746
|
+
createdAt: Date.now()
|
|
1747
|
+
});
|
|
1748
|
+
return token;
|
|
1749
|
+
}
|
|
1750
|
+
getSession(token) {
|
|
1751
|
+
return this.sessions.get(token);
|
|
1752
|
+
}
|
|
1753
|
+
invalidateSession(token) {
|
|
1754
|
+
this.sessions.delete(token);
|
|
1755
|
+
}
|
|
1756
|
+
cleanupSessions() {
|
|
1757
|
+
this.sessions.purgeStale();
|
|
1758
|
+
}
|
|
1759
|
+
};
|
|
1760
|
+
|
|
1761
|
+
// src/core/security.service.ts
|
|
1762
|
+
import crypto from "crypto";
|
|
1763
|
+
var SecurityService = class {
|
|
1764
|
+
logger;
|
|
1765
|
+
ipcToken;
|
|
1766
|
+
networkPolicy;
|
|
1767
|
+
sessionManager;
|
|
1768
|
+
constructor(logger, ipcToken) {
|
|
1769
|
+
this.logger = logger;
|
|
1770
|
+
this.ipcToken = ipcToken;
|
|
1771
|
+
this.networkPolicy = new NetworkPolicyService(logger);
|
|
1772
|
+
this.sessionManager = new SessionManager(logger);
|
|
1773
|
+
}
|
|
1774
|
+
validateCode(code) {
|
|
1775
|
+
if (!code || code.length > 1024 * 1024) {
|
|
1776
|
+
return { valid: false, message: "Code size exceeds limit or is empty" };
|
|
1777
|
+
}
|
|
1778
|
+
return { valid: true };
|
|
1779
|
+
}
|
|
1780
|
+
async validateUrl(url) {
|
|
1781
|
+
return this.networkPolicy.validateUrl(url);
|
|
1782
|
+
}
|
|
1783
|
+
checkRateLimit(key) {
|
|
1784
|
+
return this.networkPolicy.checkRateLimit(key);
|
|
1785
|
+
}
|
|
1786
|
+
validateIpcToken(token) {
|
|
1787
|
+
if (!this.ipcToken) {
|
|
1788
|
+
return true;
|
|
1789
|
+
}
|
|
1790
|
+
const expected = Buffer.from(this.ipcToken);
|
|
1791
|
+
const actual = Buffer.from(token);
|
|
1792
|
+
if (expected.length === actual.length && crypto.timingSafeEqual(expected, actual)) {
|
|
1793
|
+
return true;
|
|
1794
|
+
}
|
|
1795
|
+
return !!this.sessionManager.getSession(token);
|
|
1796
|
+
}
|
|
1797
|
+
createSession(allowedTools) {
|
|
1798
|
+
return this.sessionManager.createSession(allowedTools);
|
|
1799
|
+
}
|
|
1800
|
+
getSession(token) {
|
|
1801
|
+
return this.sessionManager.getSession(token);
|
|
1802
|
+
}
|
|
1803
|
+
invalidateSession(token) {
|
|
1804
|
+
this.sessionManager.invalidateSession(token);
|
|
1805
|
+
}
|
|
1806
|
+
getIpcToken() {
|
|
1807
|
+
return this.ipcToken;
|
|
1808
|
+
}
|
|
1809
|
+
};
|
|
1810
|
+
|
|
1811
|
+
// src/core/otel.service.ts
|
|
1812
|
+
import { NodeSDK } from "@opentelemetry/sdk-node";
|
|
1813
|
+
import { getNodeAutoInstrumentations } from "@opentelemetry/auto-instrumentations-node";
|
|
1814
|
+
import { PrometheusExporter } from "@opentelemetry/exporter-prometheus";
|
|
1815
|
+
import { resourceFromAttributes } from "@opentelemetry/resources";
|
|
1816
|
+
import { SemanticResourceAttributes } from "@opentelemetry/semantic-conventions";
|
|
1817
|
+
import { PinoInstrumentation } from "@opentelemetry/instrumentation-pino";
|
|
1818
|
+
var OtelService = class {
|
|
1819
|
+
constructor(logger) {
|
|
1820
|
+
this.logger = logger;
|
|
1821
|
+
}
|
|
1822
|
+
sdk = null;
|
|
1823
|
+
async start() {
|
|
1824
|
+
this.sdk = new NodeSDK({
|
|
1825
|
+
resource: resourceFromAttributes({
|
|
1826
|
+
[SemanticResourceAttributes.SERVICE_NAME]: "conduit"
|
|
1827
|
+
}),
|
|
1828
|
+
metricReader: new PrometheusExporter({
|
|
1829
|
+
port: 9464
|
|
1830
|
+
// Default prometheus exporter port
|
|
1831
|
+
}),
|
|
1832
|
+
instrumentations: [
|
|
1833
|
+
getNodeAutoInstrumentations(),
|
|
1834
|
+
new PinoInstrumentation()
|
|
1835
|
+
]
|
|
1836
|
+
});
|
|
1837
|
+
try {
|
|
1838
|
+
await this.sdk.start();
|
|
1839
|
+
this.logger.info("OpenTelemetry SDK started");
|
|
1840
|
+
} catch (error) {
|
|
1841
|
+
this.logger.error({ error }, "Error starting OpenTelemetry SDK");
|
|
1842
|
+
}
|
|
1843
|
+
}
|
|
1844
|
+
async shutdown() {
|
|
1845
|
+
if (this.sdk) {
|
|
1846
|
+
await this.sdk.shutdown();
|
|
1847
|
+
this.logger.info("OpenTelemetry SDK shut down");
|
|
1848
|
+
}
|
|
1849
|
+
}
|
|
1850
|
+
};
|
|
1851
|
+
|
|
1852
|
+
// src/executors/deno.executor.ts
|
|
1853
|
+
import { spawn, exec } from "child_process";
|
|
1854
|
+
import { promisify } from "util";
|
|
1855
|
+
import fs3 from "fs";
|
|
1856
|
+
import path4 from "path";
|
|
1857
|
+
import { platform } from "os";
|
|
1858
|
+
import { fileURLToPath as fileURLToPath2 } from "url";
|
|
1859
|
+
|
|
1860
|
+
// src/core/asset.utils.ts
|
|
1861
|
+
import path3 from "path";
|
|
1862
|
+
import fs2 from "fs";
|
|
1863
|
+
import { fileURLToPath } from "url";
|
|
1864
|
+
var __dirname = path3.dirname(fileURLToPath(import.meta.url));
|
|
1865
|
+
function resolveAssetPath(filename) {
|
|
1866
|
+
const candidates = [
|
|
1867
|
+
// Source structure: src/core/asset.utils.ts -> src/assets/
|
|
1868
|
+
path3.resolve(__dirname, "../assets", filename),
|
|
1869
|
+
// Dist structure possibility 1: dist/ (flat) with assets/ subdir
|
|
1870
|
+
path3.resolve(__dirname, "./assets", filename),
|
|
1871
|
+
// Dist structure possibility 2: dist/core/ -> dist/assets/
|
|
1872
|
+
path3.resolve(__dirname, "../../assets", filename),
|
|
1873
|
+
// Dist structure possibility 3: dist/ -> assets/ (if called from root)
|
|
1874
|
+
path3.resolve(process.cwd(), "assets", filename),
|
|
1875
|
+
// Dist structure possibility 4: dist/assets/ (from root)
|
|
1876
|
+
path3.resolve(process.cwd(), "dist/assets", filename)
|
|
1877
|
+
];
|
|
1878
|
+
for (const candidate of candidates) {
|
|
1879
|
+
if (fs2.existsSync(candidate)) {
|
|
1880
|
+
return candidate;
|
|
1881
|
+
}
|
|
1882
|
+
}
|
|
1883
|
+
throw new Error(`Asset not found: ${filename}. Checked paths: ${candidates.join(", ")}`);
|
|
1884
|
+
}
|
|
1885
|
+
|
|
1886
|
+
// src/executors/deno.executor.ts
|
|
1887
|
+
var execAsync = promisify(exec);
|
|
1888
|
+
var __dirname2 = path4.dirname(fileURLToPath2(import.meta.url));
|
|
1889
|
+
var DenoExecutor = class {
|
|
1890
|
+
shimContent = "";
|
|
1891
|
+
// Track active processes for cleanup
|
|
1892
|
+
// Using 'any' for the Set because ChildProcess type import can be finicky across node versions/types
|
|
1893
|
+
// but at runtime it is a ChildProcess
|
|
1894
|
+
activeProcesses = /* @__PURE__ */ new Set();
|
|
1895
|
+
maxConcurrentProcesses;
|
|
1896
|
+
constructor(maxConcurrentProcesses = 10) {
|
|
1897
|
+
this.maxConcurrentProcesses = maxConcurrentProcesses;
|
|
1898
|
+
}
|
|
1899
|
+
getShim() {
|
|
1900
|
+
if (this.shimContent) return this.shimContent;
|
|
1901
|
+
try {
|
|
1902
|
+
const assetPath = resolveAssetPath("deno-shim.ts");
|
|
1903
|
+
this.shimContent = fs3.readFileSync(assetPath, "utf-8");
|
|
1904
|
+
return this.shimContent;
|
|
1905
|
+
} catch (err) {
|
|
1906
|
+
throw new Error(`Failed to load Deno shim: ${err.message}`);
|
|
1907
|
+
}
|
|
1908
|
+
}
|
|
1909
|
+
async execute(code, limits, context, config) {
|
|
1910
|
+
const { logger } = context;
|
|
1911
|
+
if (this.activeProcesses.size >= this.maxConcurrentProcesses) {
|
|
1912
|
+
return {
|
|
1913
|
+
stdout: "",
|
|
1914
|
+
stderr: "",
|
|
1915
|
+
exitCode: null,
|
|
1916
|
+
error: {
|
|
1917
|
+
code: -32e3 /* ServerBusy */,
|
|
1918
|
+
message: "Too many concurrent Deno processes"
|
|
1919
|
+
}
|
|
1920
|
+
};
|
|
1921
|
+
}
|
|
1922
|
+
let stdout = "";
|
|
1923
|
+
let stderr = "";
|
|
1924
|
+
let totalOutputBytes = 0;
|
|
1925
|
+
let totalLogEntries = 0;
|
|
1926
|
+
let isTerminated = false;
|
|
1927
|
+
let shim = this.getShim().replace("__CONDUIT_IPC_ADDRESS__", config?.ipcAddress || "").replace("__CONDUIT_IPC_TOKEN__", config?.ipcToken || "");
|
|
1928
|
+
if (shim.includes("__CONDUIT_IPC_ADDRESS__")) {
|
|
1929
|
+
throw new Error("Failed to inject IPC address into Deno shim");
|
|
1930
|
+
}
|
|
1931
|
+
if (shim.includes("__CONDUIT_IPC_TOKEN__")) {
|
|
1932
|
+
throw new Error("Failed to inject IPC token into Deno shim");
|
|
1933
|
+
}
|
|
1934
|
+
if (config?.sdkCode) {
|
|
1935
|
+
shim = shim.replace("// __CONDUIT_SDK_INJECTION__", config.sdkCode);
|
|
1936
|
+
if (shim.includes("// __CONDUIT_SDK_INJECTION__")) {
|
|
1937
|
+
throw new Error("Failed to inject SDK code into Deno shim");
|
|
1938
|
+
}
|
|
1939
|
+
}
|
|
1940
|
+
const fullCode = shim + "\n" + code;
|
|
1941
|
+
const args = [
|
|
1942
|
+
"run",
|
|
1943
|
+
`--v8-flags=--max-heap-size=${limits.memoryLimitMb}`
|
|
1944
|
+
];
|
|
1945
|
+
if (config?.ipcAddress && !config.ipcAddress.includes("/") && !config.ipcAddress.includes("\\")) {
|
|
1946
|
+
try {
|
|
1947
|
+
const url = new URL(`http://${config.ipcAddress}`);
|
|
1948
|
+
let normalizedHost = url.hostname;
|
|
1949
|
+
normalizedHost = normalizedHost.replace(/[\[\]]/g, "");
|
|
1950
|
+
if (normalizedHost === "0.0.0.0" || normalizedHost === "::" || normalizedHost === "::1" || normalizedHost === "") {
|
|
1951
|
+
normalizedHost = "127.0.0.1";
|
|
1952
|
+
}
|
|
1953
|
+
args.push(`--allow-net=${normalizedHost}`);
|
|
1954
|
+
} catch (err) {
|
|
1955
|
+
logger.warn({ address: config.ipcAddress, err }, "Failed to parse IPC address for Deno permissions");
|
|
1956
|
+
}
|
|
1957
|
+
} else {
|
|
1958
|
+
}
|
|
1959
|
+
args.push("-");
|
|
1960
|
+
const child = spawn("deno", args, {
|
|
1961
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
1962
|
+
env: {
|
|
1963
|
+
PATH: process.env.PATH,
|
|
1964
|
+
HOME: process.env.HOME,
|
|
1965
|
+
TMPDIR: process.env.TMPDIR
|
|
1966
|
+
}
|
|
1967
|
+
});
|
|
1968
|
+
this.activeProcesses.add(child);
|
|
1969
|
+
child.on("spawn", () => {
|
|
1970
|
+
});
|
|
1971
|
+
const cleanupProcess = () => {
|
|
1972
|
+
this.activeProcesses.delete(child);
|
|
1973
|
+
};
|
|
1974
|
+
return new Promise((resolve) => {
|
|
1975
|
+
const timeout = setTimeout(() => {
|
|
1976
|
+
if (!isTerminated) {
|
|
1977
|
+
isTerminated = true;
|
|
1978
|
+
if (typeof monitorInterval !== "undefined") clearInterval(monitorInterval);
|
|
1979
|
+
child.kill("SIGKILL");
|
|
1980
|
+
logger.warn("Execution timed out, SIGKILL sent");
|
|
1981
|
+
cleanupProcess();
|
|
1982
|
+
resolve({
|
|
1983
|
+
stdout,
|
|
1984
|
+
stderr,
|
|
1985
|
+
exitCode: null,
|
|
1986
|
+
error: {
|
|
1987
|
+
code: -32008 /* RequestTimeout */,
|
|
1988
|
+
message: "Execution timed out"
|
|
1989
|
+
}
|
|
1990
|
+
});
|
|
1991
|
+
}
|
|
1992
|
+
}, limits.timeoutMs);
|
|
1993
|
+
const isWindows = platform() === "win32";
|
|
1994
|
+
const monitorInterval = setInterval(async () => {
|
|
1995
|
+
if (isTerminated || !child.pid) {
|
|
1996
|
+
clearInterval(monitorInterval);
|
|
1997
|
+
return;
|
|
1998
|
+
}
|
|
1999
|
+
try {
|
|
2000
|
+
let rssMb = 0;
|
|
2001
|
+
if (isWindows) {
|
|
2002
|
+
try {
|
|
2003
|
+
const { stdout: tasklistOut } = await execAsync(`tasklist /FI "PID eq ${child.pid}" /FO CSV /NH`);
|
|
2004
|
+
const match = tasklistOut.match(/"([^"]+ K)"$/m);
|
|
2005
|
+
if (match) {
|
|
2006
|
+
const memStr = match[1].replace(/[ K,]/g, "");
|
|
2007
|
+
const memKb = parseInt(memStr, 10);
|
|
2008
|
+
if (!isNaN(memKb)) {
|
|
2009
|
+
rssMb = memKb / 1024;
|
|
2010
|
+
}
|
|
2011
|
+
}
|
|
2012
|
+
} catch (e) {
|
|
2013
|
+
}
|
|
2014
|
+
} else {
|
|
2015
|
+
const { stdout: rssStdout } = await execAsync(`ps -o rss= -p ${child.pid}`);
|
|
2016
|
+
const rssKb = parseInt(rssStdout.trim());
|
|
2017
|
+
if (!isNaN(rssKb)) {
|
|
2018
|
+
rssMb = rssKb / 1024;
|
|
2019
|
+
}
|
|
2020
|
+
}
|
|
2021
|
+
if (rssMb > limits.memoryLimitMb) {
|
|
2022
|
+
isTerminated = true;
|
|
2023
|
+
if (typeof monitorInterval !== "undefined") clearInterval(monitorInterval);
|
|
2024
|
+
child.kill("SIGKILL");
|
|
2025
|
+
logger.warn({ rssMb, limitMb: limits.memoryLimitMb }, "Deno RSS limit exceeded, SIGKILL sent");
|
|
2026
|
+
cleanupProcess();
|
|
2027
|
+
resolve({
|
|
2028
|
+
stdout,
|
|
2029
|
+
stderr,
|
|
2030
|
+
exitCode: null,
|
|
2031
|
+
error: {
|
|
2032
|
+
code: -32009 /* MemoryLimitExceeded */,
|
|
2033
|
+
message: `Memory limit exceeded: ${rssMb.toFixed(2)}MB > ${limits.memoryLimitMb}MB`
|
|
2034
|
+
}
|
|
2035
|
+
});
|
|
2036
|
+
}
|
|
2037
|
+
} catch (err) {
|
|
2038
|
+
clearInterval(monitorInterval);
|
|
2039
|
+
}
|
|
2040
|
+
}, 2e3);
|
|
2041
|
+
child.stdout.on("data", (chunk) => {
|
|
2042
|
+
if (isTerminated) return;
|
|
2043
|
+
totalOutputBytes += chunk.length;
|
|
2044
|
+
const newLines = (chunk.toString().match(/\n/g) || []).length;
|
|
2045
|
+
totalLogEntries += newLines;
|
|
2046
|
+
if (totalOutputBytes > limits.maxOutputBytes || totalLogEntries > limits.maxLogEntries) {
|
|
2047
|
+
isTerminated = true;
|
|
2048
|
+
if (typeof monitorInterval !== "undefined") clearInterval(monitorInterval);
|
|
2049
|
+
child.kill("SIGKILL");
|
|
2050
|
+
logger.warn({ bytes: totalOutputBytes, lines: totalLogEntries }, "Limits exceeded, SIGKILL sent");
|
|
2051
|
+
cleanupProcess();
|
|
2052
|
+
resolve({
|
|
2053
|
+
stdout: stdout + chunk.toString().slice(0, limits.maxOutputBytes - (totalOutputBytes - chunk.length)),
|
|
2054
|
+
stderr,
|
|
2055
|
+
exitCode: null,
|
|
2056
|
+
error: {
|
|
2057
|
+
code: totalOutputBytes > limits.maxOutputBytes ? -32013 /* OutputLimitExceeded */ : -32014 /* LogLimitExceeded */,
|
|
2058
|
+
message: totalOutputBytes > limits.maxOutputBytes ? "Output limit exceeded" : "Log entry limit exceeded"
|
|
2059
|
+
}
|
|
2060
|
+
});
|
|
2061
|
+
return;
|
|
2062
|
+
}
|
|
2063
|
+
stdout += chunk.toString();
|
|
2064
|
+
});
|
|
2065
|
+
child.stderr.on("data", (chunk) => {
|
|
2066
|
+
if (isTerminated) return;
|
|
2067
|
+
totalOutputBytes += chunk.length;
|
|
2068
|
+
const newLines = (chunk.toString().match(/\n/g) || []).length;
|
|
2069
|
+
totalLogEntries += newLines;
|
|
2070
|
+
if (totalOutputBytes > limits.maxOutputBytes || totalLogEntries > limits.maxLogEntries) {
|
|
2071
|
+
isTerminated = true;
|
|
2072
|
+
if (typeof monitorInterval !== "undefined") clearInterval(monitorInterval);
|
|
2073
|
+
child.kill("SIGKILL");
|
|
2074
|
+
logger.warn({ bytes: totalOutputBytes, lines: totalLogEntries }, "Limits exceeded, SIGKILL sent");
|
|
2075
|
+
cleanupProcess();
|
|
2076
|
+
resolve({
|
|
2077
|
+
stdout,
|
|
2078
|
+
stderr: stderr + chunk.toString().slice(0, limits.maxOutputBytes - (totalOutputBytes - chunk.length)),
|
|
2079
|
+
exitCode: null,
|
|
2080
|
+
error: {
|
|
2081
|
+
code: totalOutputBytes > limits.maxOutputBytes ? -32013 /* OutputLimitExceeded */ : -32014 /* LogLimitExceeded */,
|
|
2082
|
+
message: totalOutputBytes > limits.maxOutputBytes ? "Output limit exceeded" : "Log entry limit exceeded"
|
|
2083
|
+
}
|
|
2084
|
+
});
|
|
2085
|
+
return;
|
|
2086
|
+
}
|
|
2087
|
+
stderr += chunk.toString();
|
|
2088
|
+
});
|
|
2089
|
+
child.on("close", (code2) => {
|
|
2090
|
+
clearTimeout(timeout);
|
|
2091
|
+
if (typeof monitorInterval !== "undefined") clearInterval(monitorInterval);
|
|
2092
|
+
cleanupProcess();
|
|
2093
|
+
if (isTerminated) return;
|
|
2094
|
+
resolve({
|
|
2095
|
+
stdout,
|
|
2096
|
+
stderr,
|
|
2097
|
+
exitCode: code2
|
|
2098
|
+
});
|
|
2099
|
+
});
|
|
2100
|
+
child.on("error", (err) => {
|
|
2101
|
+
clearTimeout(timeout);
|
|
2102
|
+
logger.error({ err }, "Child process error");
|
|
2103
|
+
cleanupProcess();
|
|
2104
|
+
let message = err.message;
|
|
2105
|
+
if (err.code === "ENOENT") {
|
|
2106
|
+
message = "Deno executable not found in PATH. Please ensure Deno is installed.";
|
|
2107
|
+
}
|
|
2108
|
+
resolve({
|
|
2109
|
+
stdout,
|
|
2110
|
+
stderr,
|
|
2111
|
+
exitCode: null,
|
|
2112
|
+
error: {
|
|
2113
|
+
code: -32603 /* InternalError */,
|
|
2114
|
+
message
|
|
2115
|
+
}
|
|
2116
|
+
});
|
|
2117
|
+
});
|
|
2118
|
+
child.stdin.write(fullCode);
|
|
2119
|
+
child.stdin.end();
|
|
2120
|
+
});
|
|
2121
|
+
}
|
|
2122
|
+
async shutdown() {
|
|
2123
|
+
for (const child of this.activeProcesses) {
|
|
2124
|
+
try {
|
|
2125
|
+
child.kill("SIGKILL");
|
|
2126
|
+
} catch (err) {
|
|
2127
|
+
}
|
|
2128
|
+
}
|
|
2129
|
+
this.activeProcesses.clear();
|
|
2130
|
+
}
|
|
2131
|
+
async healthCheck() {
|
|
2132
|
+
try {
|
|
2133
|
+
const { stdout } = await execAsync("deno --version");
|
|
2134
|
+
return { status: "ok", detail: stdout.split("\n")[0] };
|
|
2135
|
+
} catch (err) {
|
|
2136
|
+
return { status: "error", detail: err.message };
|
|
2137
|
+
}
|
|
2138
|
+
}
|
|
2139
|
+
async warmup() {
|
|
2140
|
+
}
|
|
2141
|
+
};
|
|
2142
|
+
|
|
2143
|
+
// src/executors/pyodide.executor.ts
|
|
2144
|
+
import { Worker } from "worker_threads";
|
|
2145
|
+
import fs4 from "fs";
|
|
2146
|
+
import path5 from "path";
|
|
2147
|
+
import { fileURLToPath as fileURLToPath3 } from "url";
|
|
2148
|
+
var __dirname3 = path5.dirname(fileURLToPath3(import.meta.url));
|
|
2149
|
+
var PyodideExecutor = class {
|
|
2150
|
+
shimContent = "";
|
|
2151
|
+
pool = [];
|
|
2152
|
+
maxPoolSize;
|
|
2153
|
+
maxRunsPerWorker = 1;
|
|
2154
|
+
constructor(maxPoolSize = 3) {
|
|
2155
|
+
this.maxPoolSize = maxPoolSize;
|
|
2156
|
+
}
|
|
2157
|
+
getShim() {
|
|
2158
|
+
if (this.shimContent) return this.shimContent;
|
|
2159
|
+
try {
|
|
2160
|
+
const assetPath = resolveAssetPath("python-shim.py");
|
|
2161
|
+
this.shimContent = fs4.readFileSync(assetPath, "utf-8");
|
|
2162
|
+
return this.shimContent;
|
|
2163
|
+
} catch (err) {
|
|
2164
|
+
throw new Error(`Failed to load Python shim: ${err.message}`);
|
|
2165
|
+
}
|
|
2166
|
+
}
|
|
2167
|
+
waitQueue = [];
|
|
2168
|
+
async getWorker(logger, limits) {
|
|
2169
|
+
let pooled = this.pool.find((w) => !w.busy);
|
|
2170
|
+
if (pooled) {
|
|
2171
|
+
pooled.busy = true;
|
|
2172
|
+
return pooled;
|
|
2173
|
+
}
|
|
2174
|
+
if (this.pool.length < this.maxPoolSize) {
|
|
2175
|
+
logger.info("Creating new Pyodide worker for pool");
|
|
2176
|
+
const worker = this.createWorker(limits);
|
|
2177
|
+
pooled = { worker, busy: true, runs: 0, lastUsed: Date.now() };
|
|
2178
|
+
this.pool.push(pooled);
|
|
2179
|
+
await new Promise((resolve, reject) => {
|
|
2180
|
+
const onMessage = (msg) => {
|
|
2181
|
+
if (msg.type === "ready") {
|
|
2182
|
+
worker.off("message", onMessage);
|
|
2183
|
+
resolve();
|
|
2184
|
+
}
|
|
2185
|
+
};
|
|
2186
|
+
worker.on("message", onMessage);
|
|
2187
|
+
worker.on("error", reject);
|
|
2188
|
+
setTimeout(() => {
|
|
2189
|
+
worker.terminate();
|
|
2190
|
+
this.pool = this.pool.filter((p) => p !== pooled);
|
|
2191
|
+
reject(new Error("Worker init timeout"));
|
|
2192
|
+
}, 1e4);
|
|
2193
|
+
});
|
|
2194
|
+
return pooled;
|
|
2195
|
+
}
|
|
2196
|
+
return new Promise((resolve) => {
|
|
2197
|
+
this.waitQueue.push(resolve);
|
|
2198
|
+
});
|
|
2199
|
+
}
|
|
2200
|
+
createWorker(limits) {
|
|
2201
|
+
let workerPath = path5.resolve(__dirname3, "./pyodide.worker.js");
|
|
2202
|
+
if (!fs4.existsSync(workerPath)) {
|
|
2203
|
+
workerPath = path5.resolve(__dirname3, "./pyodide.worker.ts");
|
|
2204
|
+
}
|
|
2205
|
+
return new Worker(workerPath, {
|
|
2206
|
+
execArgv: process.execArgv.includes("--loader") ? process.execArgv : [],
|
|
2207
|
+
resourceLimits: limits ? {
|
|
2208
|
+
maxOldSpaceSizeMb: limits.memoryLimitMb
|
|
2209
|
+
// Stack size and young generation are usually fine with defaults
|
|
2210
|
+
} : void 0
|
|
2211
|
+
});
|
|
2212
|
+
}
|
|
2213
|
+
async warmup(limits) {
|
|
2214
|
+
const needed = this.maxPoolSize - this.pool.length;
|
|
2215
|
+
if (needed <= 0) return;
|
|
2216
|
+
console.error(`Pre-warming ${needed} Pyodide workers...`);
|
|
2217
|
+
const promises = [];
|
|
2218
|
+
for (let i = 0; i < needed; i++) {
|
|
2219
|
+
promises.push(this.createAndPoolWorker(limits));
|
|
2220
|
+
}
|
|
2221
|
+
await Promise.all(promises);
|
|
2222
|
+
console.error(`Pyodide pool pre-warmed with ${this.pool.length} workers.`);
|
|
2223
|
+
}
|
|
2224
|
+
async createAndPoolWorker(limits) {
|
|
2225
|
+
if (this.pool.length >= this.maxPoolSize) return;
|
|
2226
|
+
const worker = this.createWorker(limits);
|
|
2227
|
+
const pooled = { worker, busy: true, runs: 0, lastUsed: Date.now() };
|
|
2228
|
+
this.pool.push(pooled);
|
|
2229
|
+
try {
|
|
2230
|
+
await new Promise((resolve, reject) => {
|
|
2231
|
+
const onMessage = (msg) => {
|
|
2232
|
+
if (msg.type === "ready") {
|
|
2233
|
+
worker.off("message", onMessage);
|
|
2234
|
+
resolve();
|
|
2235
|
+
}
|
|
2236
|
+
};
|
|
2237
|
+
worker.on("message", onMessage);
|
|
2238
|
+
worker.on("error", reject);
|
|
2239
|
+
setTimeout(() => reject(new Error("Worker init timeout")), 1e4);
|
|
2240
|
+
});
|
|
2241
|
+
pooled.busy = false;
|
|
2242
|
+
if (this.waitQueue.length > 0) {
|
|
2243
|
+
const nextResolve = this.waitQueue.shift();
|
|
2244
|
+
if (nextResolve) {
|
|
2245
|
+
pooled.busy = true;
|
|
2246
|
+
nextResolve(pooled);
|
|
2247
|
+
}
|
|
2248
|
+
}
|
|
2249
|
+
} catch (err) {
|
|
2250
|
+
this.pool = this.pool.filter((p) => p !== pooled);
|
|
2251
|
+
worker.terminate();
|
|
2252
|
+
}
|
|
2253
|
+
}
|
|
2254
|
+
async execute(code, limits, context, config) {
|
|
2255
|
+
const { logger } = context;
|
|
2256
|
+
const pooledWorker = await this.getWorker(logger, limits);
|
|
2257
|
+
const worker = pooledWorker.worker;
|
|
2258
|
+
return new Promise((resolve) => {
|
|
2259
|
+
const timeout = setTimeout(() => {
|
|
2260
|
+
logger.warn("Python execution timed out, terminating worker");
|
|
2261
|
+
worker.terminate();
|
|
2262
|
+
this.pool = this.pool.filter((w) => w !== pooledWorker);
|
|
2263
|
+
resolve({
|
|
2264
|
+
stdout: "",
|
|
2265
|
+
stderr: "Execution timed out",
|
|
2266
|
+
exitCode: null,
|
|
2267
|
+
error: {
|
|
2268
|
+
code: -32008 /* RequestTimeout */,
|
|
2269
|
+
message: "Execution timed out"
|
|
2270
|
+
}
|
|
2271
|
+
});
|
|
2272
|
+
}, limits.timeoutMs);
|
|
2273
|
+
const onMessage = (msg) => {
|
|
2274
|
+
if (msg.type === "ready" || msg.type === "pong") return;
|
|
2275
|
+
clearTimeout(timeout);
|
|
2276
|
+
worker.off("message", onMessage);
|
|
2277
|
+
worker.off("error", onError);
|
|
2278
|
+
pooledWorker.busy = false;
|
|
2279
|
+
if (this.waitQueue.length > 0) {
|
|
2280
|
+
const nextResolve = this.waitQueue.shift();
|
|
2281
|
+
if (nextResolve) {
|
|
2282
|
+
pooledWorker.busy = true;
|
|
2283
|
+
nextResolve(pooledWorker);
|
|
2284
|
+
}
|
|
2285
|
+
}
|
|
2286
|
+
pooledWorker.runs++;
|
|
2287
|
+
pooledWorker.lastUsed = Date.now();
|
|
2288
|
+
if (pooledWorker.runs >= this.maxRunsPerWorker) {
|
|
2289
|
+
logger.info("Recycling Pyodide worker after max runs");
|
|
2290
|
+
worker.terminate();
|
|
2291
|
+
this.pool = this.pool.filter((w) => w !== pooledWorker);
|
|
2292
|
+
}
|
|
2293
|
+
if (msg.success) {
|
|
2294
|
+
resolve({
|
|
2295
|
+
stdout: msg.stdout,
|
|
2296
|
+
stderr: msg.stderr,
|
|
2297
|
+
exitCode: 0
|
|
2298
|
+
});
|
|
2299
|
+
} else {
|
|
2300
|
+
logger.warn({ error: msg.error }, "Python execution failed or limit breached, terminating worker");
|
|
2301
|
+
worker.terminate();
|
|
2302
|
+
this.pool = this.pool.filter((w) => w !== pooledWorker);
|
|
2303
|
+
logger.debug({ error: msg.error }, "Python execution error from worker");
|
|
2304
|
+
const normalizedError = (msg.error || "").toLowerCase();
|
|
2305
|
+
const limitBreached = msg.limitBreached || "";
|
|
2306
|
+
const isLogLimit = limitBreached === "log" || normalizedError.includes("[limit_log]");
|
|
2307
|
+
const isOutputLimit = limitBreached === "output" || normalizedError.includes("[limit_output]");
|
|
2308
|
+
const isAmbiguousLimit = !isOutputLimit && !isLogLimit && (normalizedError.includes("i/o error") || normalizedError.includes("errno 29") || normalizedError.includes("limit exceeded"));
|
|
2309
|
+
resolve({
|
|
2310
|
+
stdout: msg.stdout,
|
|
2311
|
+
stderr: msg.stderr,
|
|
2312
|
+
exitCode: 1,
|
|
2313
|
+
error: {
|
|
2314
|
+
code: isLogLimit ? -32014 /* LogLimitExceeded */ : isOutputLimit || isAmbiguousLimit ? -32013 /* OutputLimitExceeded */ : -32603 /* InternalError */,
|
|
2315
|
+
message: isLogLimit ? "Log entry limit exceeded" : isOutputLimit || isAmbiguousLimit ? "Output limit exceeded" : msg.error
|
|
2316
|
+
}
|
|
2317
|
+
});
|
|
2318
|
+
}
|
|
2319
|
+
};
|
|
2320
|
+
const onError = (err) => {
|
|
2321
|
+
clearTimeout(timeout);
|
|
2322
|
+
worker.off("message", onMessage);
|
|
2323
|
+
worker.off("error", onError);
|
|
2324
|
+
logger.error({ err }, "Pyodide worker error");
|
|
2325
|
+
worker.terminate();
|
|
2326
|
+
this.pool = this.pool.filter((w) => w !== pooledWorker);
|
|
2327
|
+
resolve({
|
|
2328
|
+
stdout: "",
|
|
2329
|
+
stderr: err.message,
|
|
2330
|
+
exitCode: null,
|
|
2331
|
+
error: {
|
|
2332
|
+
code: -32603 /* InternalError */,
|
|
2333
|
+
message: err.message
|
|
2334
|
+
}
|
|
2335
|
+
});
|
|
2336
|
+
};
|
|
2337
|
+
worker.on("message", onMessage);
|
|
2338
|
+
worker.on("error", onError);
|
|
2339
|
+
let shim = this.getShim();
|
|
2340
|
+
if (config?.sdkCode) {
|
|
2341
|
+
shim = shim.replace("# __CONDUIT_SDK_INJECTION__", config.sdkCode);
|
|
2342
|
+
}
|
|
2343
|
+
worker.postMessage({
|
|
2344
|
+
type: "execute",
|
|
2345
|
+
data: { code, limits, ipcInfo: config, shim }
|
|
2346
|
+
});
|
|
2347
|
+
});
|
|
2348
|
+
}
|
|
2349
|
+
async shutdown() {
|
|
2350
|
+
for (const pooled of this.pool) {
|
|
2351
|
+
await pooled.worker.terminate();
|
|
2352
|
+
}
|
|
2353
|
+
this.pool = [];
|
|
2354
|
+
}
|
|
2355
|
+
async healthCheck() {
|
|
2356
|
+
try {
|
|
2357
|
+
const pooled = await this.getWorker(console, {
|
|
2358
|
+
timeoutMs: 5e3,
|
|
2359
|
+
memoryLimitMb: 128,
|
|
2360
|
+
maxOutputBytes: 1024,
|
|
2361
|
+
maxLogEntries: 10
|
|
2362
|
+
});
|
|
2363
|
+
return new Promise((resolve) => {
|
|
2364
|
+
let timeout;
|
|
2365
|
+
const onMessage = (msg) => {
|
|
2366
|
+
if (msg.type === "pong") {
|
|
2367
|
+
cleanup();
|
|
2368
|
+
pooled.busy = false;
|
|
2369
|
+
resolve({ status: "ok", workers: this.pool.length });
|
|
2370
|
+
}
|
|
2371
|
+
};
|
|
2372
|
+
const cleanup = () => {
|
|
2373
|
+
clearTimeout(timeout);
|
|
2374
|
+
pooled.worker.off("message", onMessage);
|
|
2375
|
+
};
|
|
2376
|
+
timeout = setTimeout(() => {
|
|
2377
|
+
cleanup();
|
|
2378
|
+
pooled.busy = false;
|
|
2379
|
+
resolve({ status: "error", workers: this.pool.length, detail: "Health check timeout" });
|
|
2380
|
+
}, 2e3);
|
|
2381
|
+
pooled.worker.on("message", onMessage);
|
|
2382
|
+
pooled.worker.postMessage({ type: "ping" });
|
|
2383
|
+
});
|
|
2384
|
+
} catch (err) {
|
|
2385
|
+
return { status: "error", workers: this.pool.length, detail: err.message };
|
|
2386
|
+
}
|
|
2387
|
+
}
|
|
2388
|
+
};
|
|
2389
|
+
|
|
2390
|
+
// src/executors/isolate.executor.ts
|
|
2391
|
+
import ivm from "isolated-vm";
|
|
2392
|
+
var IsolateExecutor = class {
|
|
2393
|
+
logger;
|
|
2394
|
+
gatewayService;
|
|
2395
|
+
constructor(logger, gatewayService) {
|
|
2396
|
+
this.logger = logger;
|
|
2397
|
+
this.gatewayService = gatewayService;
|
|
2398
|
+
}
|
|
2399
|
+
async execute(code, limits, context, config) {
|
|
2400
|
+
const logs = [];
|
|
2401
|
+
const errors = [];
|
|
2402
|
+
let isolate = null;
|
|
2403
|
+
try {
|
|
2404
|
+
isolate = new ivm.Isolate({ memoryLimit: limits.memoryLimitMb });
|
|
2405
|
+
const ctx = await isolate.createContext();
|
|
2406
|
+
const jail = ctx.global;
|
|
2407
|
+
let currentLogBytes = 0;
|
|
2408
|
+
let currentErrorBytes = 0;
|
|
2409
|
+
await jail.set("__log", new ivm.Callback((msg) => {
|
|
2410
|
+
if (currentLogBytes + msg.length + 1 > limits.maxOutputBytes) {
|
|
2411
|
+
throw new Error("[LIMIT_LOG]");
|
|
2412
|
+
}
|
|
2413
|
+
if (currentLogBytes < limits.maxOutputBytes) {
|
|
2414
|
+
logs.push(msg);
|
|
2415
|
+
currentLogBytes += msg.length + 1;
|
|
2416
|
+
}
|
|
2417
|
+
}));
|
|
2418
|
+
await jail.set("__error", new ivm.Callback((msg) => {
|
|
2419
|
+
if (currentErrorBytes + msg.length + 1 > limits.maxOutputBytes) {
|
|
2420
|
+
throw new Error("[LIMIT_OUTPUT]");
|
|
2421
|
+
}
|
|
2422
|
+
if (currentErrorBytes < limits.maxOutputBytes) {
|
|
2423
|
+
errors.push(msg);
|
|
2424
|
+
currentErrorBytes += msg.length + 1;
|
|
2425
|
+
}
|
|
2426
|
+
}));
|
|
2427
|
+
let requestIdCounter = 0;
|
|
2428
|
+
const pendingToolCalls = /* @__PURE__ */ new Map();
|
|
2429
|
+
await jail.set("__dispatchToolCall", new ivm.Callback((nameStr, argsStr) => {
|
|
2430
|
+
const requestId = ++requestIdCounter;
|
|
2431
|
+
const name = nameStr;
|
|
2432
|
+
let args = {};
|
|
2433
|
+
try {
|
|
2434
|
+
args = JSON.parse(argsStr);
|
|
2435
|
+
} catch (e) {
|
|
2436
|
+
}
|
|
2437
|
+
this.gatewayService.callTool(name, args, context).then((res) => {
|
|
2438
|
+
return ctx.evalClosure(`resolveRequest($0, $1, null)`, [requestId, JSON.stringify(res)], { arguments: { copy: true } });
|
|
2439
|
+
}).catch((err) => {
|
|
2440
|
+
return ctx.evalClosure(`resolveRequest($0, null, $1)`, [requestId, err.message || "Unknown error"], { arguments: { copy: true } });
|
|
2441
|
+
}).catch((e) => {
|
|
2442
|
+
});
|
|
2443
|
+
return requestId;
|
|
2444
|
+
}));
|
|
2445
|
+
const bootstrap = `
|
|
2446
|
+
const requests = new Map();
|
|
2447
|
+
|
|
2448
|
+
// Host calls this to resolve requests
|
|
2449
|
+
globalThis.resolveRequest = (id, resultJson, error) => {
|
|
2450
|
+
const req = requests.get(id);
|
|
2451
|
+
if (req) {
|
|
2452
|
+
requests.delete(id);
|
|
2453
|
+
if (error) req.reject(new Error(error));
|
|
2454
|
+
else req.resolve(resultJson);
|
|
2455
|
+
}
|
|
2456
|
+
};
|
|
2457
|
+
|
|
2458
|
+
// Internal tool call wrapper
|
|
2459
|
+
globalThis.__callTool = (name, argsJson) => {
|
|
2460
|
+
return new Promise((resolve, reject) => {
|
|
2461
|
+
const id = __dispatchToolCall(name, argsJson);
|
|
2462
|
+
requests.set(id, { resolve, reject });
|
|
2463
|
+
});
|
|
2464
|
+
};
|
|
2465
|
+
|
|
2466
|
+
const format = (arg) => {
|
|
2467
|
+
if (typeof arg === 'string') return arg;
|
|
2468
|
+
if (arg instanceof Error) return arg.stack || arg.message;
|
|
2469
|
+
if (typeof arg === 'object' && arg !== null && arg.message && arg.stack) return arg.stack; // Duck typing
|
|
2470
|
+
return JSON.stringify(arg);
|
|
2471
|
+
};
|
|
2472
|
+
const console = {
|
|
2473
|
+
log: (...args) => __log(args.map(format).join(' ')),
|
|
2474
|
+
error: (...args) => __error(args.map(format).join(' ')),
|
|
2475
|
+
};
|
|
2476
|
+
`;
|
|
2477
|
+
const bootstrapScript = await isolate.compileScript(bootstrap);
|
|
2478
|
+
await bootstrapScript.run(ctx, { timeout: 1e3 });
|
|
2479
|
+
const sdkScript = config?.sdkCode || `
|
|
2480
|
+
const tools = {
|
|
2481
|
+
$raw: async (name, args) => {
|
|
2482
|
+
const resStr = await __callTool(name, JSON.stringify(args || {}));
|
|
2483
|
+
return JSON.parse(resStr);
|
|
2484
|
+
}
|
|
2485
|
+
};
|
|
2486
|
+
`;
|
|
2487
|
+
const compiledSdk = await isolate.compileScript(sdkScript);
|
|
2488
|
+
await compiledSdk.run(ctx, { timeout: 1e3 });
|
|
2489
|
+
let executionPromiseResolve;
|
|
2490
|
+
const executionPromise = new Promise((resolve) => {
|
|
2491
|
+
executionPromiseResolve = resolve;
|
|
2492
|
+
});
|
|
2493
|
+
await jail.set("__done", new ivm.Callback(() => {
|
|
2494
|
+
if (executionPromiseResolve) executionPromiseResolve();
|
|
2495
|
+
}));
|
|
2496
|
+
let scriptFailed = false;
|
|
2497
|
+
await jail.set("__setFailed", new ivm.Callback(() => {
|
|
2498
|
+
scriptFailed = true;
|
|
2499
|
+
}));
|
|
2500
|
+
const wrappedCode = `void (async () => {
|
|
2501
|
+
try {
|
|
2502
|
+
${code}
|
|
2503
|
+
} catch (err) {
|
|
2504
|
+
console.error(err);
|
|
2505
|
+
__setFailed();
|
|
2506
|
+
} finally {
|
|
2507
|
+
__done();
|
|
2508
|
+
}
|
|
2509
|
+
})()`;
|
|
2510
|
+
const script = await isolate.compileScript(wrappedCode);
|
|
2511
|
+
await script.run(ctx, { timeout: limits.timeoutMs });
|
|
2512
|
+
let timedOut = false;
|
|
2513
|
+
const timeoutPromise = new Promise((_, reject) => {
|
|
2514
|
+
setTimeout(() => {
|
|
2515
|
+
timedOut = true;
|
|
2516
|
+
reject(new Error("Script execution timed out"));
|
|
2517
|
+
}, limits.timeoutMs);
|
|
2518
|
+
});
|
|
2519
|
+
try {
|
|
2520
|
+
await Promise.race([executionPromise, timeoutPromise]);
|
|
2521
|
+
} catch (err) {
|
|
2522
|
+
if (err.message === "Script execution timed out") {
|
|
2523
|
+
return {
|
|
2524
|
+
stdout: logs.join("\n"),
|
|
2525
|
+
stderr: errors.join("\n"),
|
|
2526
|
+
exitCode: null,
|
|
2527
|
+
error: {
|
|
2528
|
+
code: -32008 /* RequestTimeout */,
|
|
2529
|
+
message: "Execution timed out"
|
|
2530
|
+
}
|
|
2531
|
+
};
|
|
2532
|
+
}
|
|
2533
|
+
throw err;
|
|
2534
|
+
}
|
|
2535
|
+
return {
|
|
2536
|
+
stdout: logs.join("\n"),
|
|
2537
|
+
stderr: errors.join("\n"),
|
|
2538
|
+
exitCode: scriptFailed ? 1 : 0
|
|
2539
|
+
};
|
|
2540
|
+
} catch (err) {
|
|
2541
|
+
const message = err.message || "Unknown error";
|
|
2542
|
+
if (message.includes("Script execution timed out")) {
|
|
2543
|
+
return {
|
|
2544
|
+
stdout: logs.join("\n"),
|
|
2545
|
+
stderr: errors.join("\n"),
|
|
2546
|
+
exitCode: null,
|
|
2547
|
+
error: {
|
|
2548
|
+
code: -32008 /* RequestTimeout */,
|
|
2549
|
+
message: "Execution timed out"
|
|
2550
|
+
}
|
|
2551
|
+
};
|
|
2552
|
+
}
|
|
2553
|
+
if (message.includes("memory limit") || message.includes("disposed")) {
|
|
2554
|
+
return {
|
|
2555
|
+
stdout: logs.join("\n"),
|
|
2556
|
+
stderr: errors.join("\n"),
|
|
2557
|
+
exitCode: null,
|
|
2558
|
+
error: {
|
|
2559
|
+
code: -32009 /* MemoryLimitExceeded */,
|
|
2560
|
+
message: "Memory limit exceeded"
|
|
2561
|
+
}
|
|
2562
|
+
};
|
|
2563
|
+
}
|
|
2564
|
+
this.logger.error({ err }, "Isolate execution failed");
|
|
2565
|
+
return {
|
|
2566
|
+
stdout: logs.join("\n"),
|
|
2567
|
+
stderr: message,
|
|
2568
|
+
exitCode: 1,
|
|
2569
|
+
error: {
|
|
2570
|
+
code: -32603 /* InternalError */,
|
|
2571
|
+
message
|
|
2572
|
+
}
|
|
2573
|
+
};
|
|
2574
|
+
} finally {
|
|
2575
|
+
if (isolate) {
|
|
2576
|
+
isolate.dispose();
|
|
2577
|
+
}
|
|
2578
|
+
}
|
|
2579
|
+
}
|
|
2580
|
+
async shutdown() {
|
|
2581
|
+
}
|
|
2582
|
+
async healthCheck() {
|
|
2583
|
+
try {
|
|
2584
|
+
const isolate = new ivm.Isolate({ memoryLimit: 8 });
|
|
2585
|
+
isolate.dispose();
|
|
2586
|
+
return { status: "ok" };
|
|
2587
|
+
} catch (err) {
|
|
2588
|
+
return { status: "error", detail: err.message };
|
|
2589
|
+
}
|
|
2590
|
+
}
|
|
2591
|
+
async warmup() {
|
|
2592
|
+
}
|
|
2593
|
+
};
|
|
2594
|
+
|
|
2595
|
+
// src/core/registries/executor.registry.ts
|
|
2596
|
+
var ExecutorRegistry = class {
|
|
2597
|
+
executors = /* @__PURE__ */ new Map();
|
|
2598
|
+
register(name, executor) {
|
|
2599
|
+
this.executors.set(name, executor);
|
|
2600
|
+
}
|
|
2601
|
+
get(name) {
|
|
2602
|
+
return this.executors.get(name);
|
|
2603
|
+
}
|
|
2604
|
+
has(name) {
|
|
2605
|
+
return this.executors.has(name);
|
|
2606
|
+
}
|
|
2607
|
+
async shutdownAll() {
|
|
2608
|
+
for (const executor of this.executors.values()) {
|
|
2609
|
+
if (executor.shutdown) {
|
|
2610
|
+
await executor.shutdown();
|
|
2611
|
+
}
|
|
2612
|
+
}
|
|
2613
|
+
this.executors.clear();
|
|
2614
|
+
}
|
|
2615
|
+
};
|
|
2616
|
+
|
|
2617
|
+
// src/sdk/tool-binding.ts
|
|
2618
|
+
function parseToolName(qualifiedName) {
|
|
2619
|
+
const separatorIndex = qualifiedName.indexOf("__");
|
|
2620
|
+
if (separatorIndex === -1) {
|
|
2621
|
+
return { namespace: "", name: qualifiedName };
|
|
2622
|
+
}
|
|
2623
|
+
return {
|
|
2624
|
+
namespace: qualifiedName.substring(0, separatorIndex),
|
|
2625
|
+
name: qualifiedName.substring(separatorIndex + 2)
|
|
2626
|
+
};
|
|
2627
|
+
}
|
|
2628
|
+
function toToolBinding(name, inputSchema, description) {
|
|
2629
|
+
const toolId = parseToolName(name);
|
|
2630
|
+
return {
|
|
2631
|
+
name,
|
|
2632
|
+
namespace: toolId.namespace || "default",
|
|
2633
|
+
methodName: toolId.name || name,
|
|
2634
|
+
inputSchema,
|
|
2635
|
+
description
|
|
2636
|
+
};
|
|
2637
|
+
}
|
|
2638
|
+
function groupByNamespace(bindings) {
|
|
2639
|
+
const groups = /* @__PURE__ */ new Map();
|
|
2640
|
+
for (const binding of bindings) {
|
|
2641
|
+
const existing = groups.get(binding.namespace) || [];
|
|
2642
|
+
existing.push(binding);
|
|
2643
|
+
groups.set(binding.namespace, existing);
|
|
2644
|
+
}
|
|
2645
|
+
return groups;
|
|
2646
|
+
}
|
|
2647
|
+
|
|
2648
|
+
// src/sdk/sdk-generator.ts
|
|
2649
|
+
var SDKGenerator = class {
|
|
2650
|
+
/**
|
|
2651
|
+
* Convert camelCase to snake_case for Python
|
|
2652
|
+
*/
|
|
2653
|
+
toSnakeCase(str) {
|
|
2654
|
+
return str.replace(/([A-Z])/g, "_$1").toLowerCase().replace(/^_/, "");
|
|
2655
|
+
}
|
|
2656
|
+
/**
|
|
2657
|
+
* Escape a string for use in generated code
|
|
2658
|
+
*/
|
|
2659
|
+
escapeString(str) {
|
|
2660
|
+
return str.replace(/\\/g, "\\\\").replace(/'/g, "\\'").replace(/\n/g, "\\n");
|
|
2661
|
+
}
|
|
2662
|
+
/**
|
|
2663
|
+
* Generate TypeScript SDK code to be injected into Deno sandbox.
|
|
2664
|
+
* Creates: tools.namespace.method(args) => __internalCallTool("namespace__method", args)
|
|
2665
|
+
* @param bindings Tool bindings to generate SDK for
|
|
2666
|
+
* @param allowedTools Optional allowlist for $raw() enforcement
|
|
2667
|
+
* @param enableRawFallback Enable $raw() escape hatch (default: true)
|
|
2668
|
+
*/
|
|
2669
|
+
generateTypeScript(bindings, allowedTools, enableRawFallback = true) {
|
|
2670
|
+
const grouped = groupByNamespace(bindings);
|
|
2671
|
+
const lines = [];
|
|
2672
|
+
lines.push("// Generated SDK - Do not edit");
|
|
2673
|
+
if (allowedTools && allowedTools.length > 0) {
|
|
2674
|
+
const normalizedList = allowedTools.map((t) => t.replace(/\./g, "__"));
|
|
2675
|
+
lines.push(`const __allowedTools = ${JSON.stringify(normalizedList)};`);
|
|
2676
|
+
} else {
|
|
2677
|
+
lines.push("const __allowedTools = null;");
|
|
2678
|
+
}
|
|
2679
|
+
lines.push("const tools = {");
|
|
2680
|
+
for (const [namespace, tools] of grouped.entries()) {
|
|
2681
|
+
const safeNamespace = this.isValidIdentifier(namespace) ? namespace : `["${this.escapeString(namespace)}"]`;
|
|
2682
|
+
if (this.isValidIdentifier(namespace)) {
|
|
2683
|
+
lines.push(` ${namespace}: {`);
|
|
2684
|
+
} else {
|
|
2685
|
+
lines.push(` "${this.escapeString(namespace)}": {`);
|
|
2686
|
+
}
|
|
2687
|
+
for (const tool of tools) {
|
|
2688
|
+
const methodName = this.isValidIdentifier(tool.methodName) ? tool.methodName : `["${this.escapeString(tool.methodName)}"]`;
|
|
2689
|
+
if (tool.description) {
|
|
2690
|
+
lines.push(` /** ${this.escapeString(tool.description)} */`);
|
|
2691
|
+
}
|
|
2692
|
+
if (this.isValidIdentifier(tool.methodName)) {
|
|
2693
|
+
lines.push(` async ${tool.methodName}(args) {`);
|
|
2694
|
+
} else {
|
|
2695
|
+
lines.push(` "${this.escapeString(tool.methodName)}": async function(args) {`);
|
|
2696
|
+
}
|
|
2697
|
+
lines.push(` return await __internalCallTool("${this.escapeString(tool.name)}", args);`);
|
|
2698
|
+
lines.push(` },`);
|
|
2699
|
+
}
|
|
2700
|
+
lines.push(` },`);
|
|
2701
|
+
}
|
|
2702
|
+
if (enableRawFallback) {
|
|
2703
|
+
lines.push(` /** Call a tool by its full name (escape hatch for dynamic/unknown tools) */`);
|
|
2704
|
+
lines.push(` async $raw(name, args) {`);
|
|
2705
|
+
lines.push(` const normalized = name.replace(/\\./g, '__');`);
|
|
2706
|
+
lines.push(` if (__allowedTools) {`);
|
|
2707
|
+
lines.push(` const allowed = __allowedTools.some(p => {`);
|
|
2708
|
+
lines.push(` if (p.endsWith('__*')) return normalized.startsWith(p.slice(0, -1));`);
|
|
2709
|
+
lines.push(` return normalized === p;`);
|
|
2710
|
+
lines.push(` });`);
|
|
2711
|
+
lines.push(` if (!allowed) throw new Error(\`Tool \${name} is not in the allowlist\`);`);
|
|
2712
|
+
lines.push(` }`);
|
|
2713
|
+
lines.push(` return await __internalCallTool(normalized, args);`);
|
|
2714
|
+
lines.push(` },`);
|
|
2715
|
+
}
|
|
2716
|
+
lines.push("};");
|
|
2717
|
+
lines.push("(globalThis as any).tools = tools;");
|
|
2718
|
+
return lines.join("\n");
|
|
2719
|
+
}
|
|
2720
|
+
/**
|
|
2721
|
+
* Generate Python SDK code to be injected into Pyodide sandbox.
|
|
2722
|
+
* Creates: tools.namespace.method(args) => _internal_call_tool("namespace__method", args)
|
|
2723
|
+
* @param bindings Tool bindings to generate SDK for
|
|
2724
|
+
* @param allowedTools Optional allowlist for raw() enforcement
|
|
2725
|
+
* @param enableRawFallback Enable raw() escape hatch (default: true)
|
|
2726
|
+
*/
|
|
2727
|
+
generatePython(bindings, allowedTools, enableRawFallback = true) {
|
|
2728
|
+
const grouped = groupByNamespace(bindings);
|
|
2729
|
+
const lines = [];
|
|
2730
|
+
lines.push("# Generated SDK - Do not edit");
|
|
2731
|
+
if (allowedTools && allowedTools.length > 0) {
|
|
2732
|
+
const normalizedList = allowedTools.map((t) => t.replace(/\./g, "__"));
|
|
2733
|
+
lines.push(`_allowed_tools = ${JSON.stringify(normalizedList)}`);
|
|
2734
|
+
} else {
|
|
2735
|
+
lines.push("_allowed_tools = None");
|
|
2736
|
+
}
|
|
2737
|
+
lines.push("");
|
|
2738
|
+
lines.push("class _ToolNamespace:");
|
|
2739
|
+
lines.push(" def __init__(self, methods):");
|
|
2740
|
+
lines.push(" for name, fn in methods.items():");
|
|
2741
|
+
lines.push(" setattr(self, name, fn)");
|
|
2742
|
+
lines.push("");
|
|
2743
|
+
lines.push("class _Tools:");
|
|
2744
|
+
lines.push(" def __init__(self):");
|
|
2745
|
+
for (const [namespace, tools] of grouped.entries()) {
|
|
2746
|
+
const safeNamespace = this.toSnakeCase(namespace);
|
|
2747
|
+
const methodsDict = [];
|
|
2748
|
+
for (const tool of tools) {
|
|
2749
|
+
const methodName = this.toSnakeCase(tool.methodName);
|
|
2750
|
+
const fullName = tool.name;
|
|
2751
|
+
methodsDict.push(` "${methodName}": lambda args, n="${this.escapeString(fullName)}": _internal_call_tool(n, args)`);
|
|
2752
|
+
}
|
|
2753
|
+
lines.push(` self.${safeNamespace} = _ToolNamespace({`);
|
|
2754
|
+
lines.push(methodsDict.join(",\n"));
|
|
2755
|
+
lines.push(` })`);
|
|
2756
|
+
}
|
|
2757
|
+
if (enableRawFallback) {
|
|
2758
|
+
lines.push("");
|
|
2759
|
+
lines.push(" async def raw(self, name, args):");
|
|
2760
|
+
lines.push(' """Call a tool by its full name (escape hatch for dynamic/unknown tools)"""');
|
|
2761
|
+
lines.push(' normalized = name.replace(".", "__")');
|
|
2762
|
+
lines.push(" if _allowed_tools is not None:");
|
|
2763
|
+
lines.push(" allowed = any(");
|
|
2764
|
+
lines.push(' normalized.startswith(p[:-1]) if p.endswith("__*") else normalized == p');
|
|
2765
|
+
lines.push(" for p in _allowed_tools");
|
|
2766
|
+
lines.push(" )");
|
|
2767
|
+
lines.push(" if not allowed:");
|
|
2768
|
+
lines.push(' raise PermissionError(f"Tool {name} is not in the allowlist")');
|
|
2769
|
+
lines.push(" return await _internal_call_tool(normalized, args)");
|
|
2770
|
+
}
|
|
2771
|
+
lines.push("");
|
|
2772
|
+
lines.push("tools = _Tools()");
|
|
2773
|
+
return lines.join("\n");
|
|
2774
|
+
}
|
|
2775
|
+
/**
|
|
2776
|
+
* Generate JavaScript SDK code for isolated-vm (V8 Isolate).
|
|
2777
|
+
* Creates: tools.namespace.method(args) => __callToolSync("namespace__method", JSON.stringify(args))
|
|
2778
|
+
* @param bindings Tool bindings to generate SDK for
|
|
2779
|
+
* @param allowedTools Optional allowlist for $raw() enforcement
|
|
2780
|
+
* @param enableRawFallback Enable $raw() escape hatch (default: true)
|
|
2781
|
+
*/
|
|
2782
|
+
generateIsolateSDK(bindings, allowedTools, enableRawFallback = true) {
|
|
2783
|
+
const grouped = groupByNamespace(bindings);
|
|
2784
|
+
const lines = [];
|
|
2785
|
+
lines.push("// Generated SDK for isolated-vm");
|
|
2786
|
+
if (allowedTools && allowedTools.length > 0) {
|
|
2787
|
+
const normalizedList = allowedTools.map((t) => t.replace(/\./g, "__"));
|
|
2788
|
+
lines.push(`const __allowedTools = ${JSON.stringify(normalizedList)};`);
|
|
2789
|
+
} else {
|
|
2790
|
+
lines.push("const __allowedTools = null;");
|
|
2791
|
+
}
|
|
2792
|
+
lines.push("const tools = {");
|
|
2793
|
+
for (const [namespace, tools] of grouped.entries()) {
|
|
2794
|
+
const safeNamespace = this.isValidIdentifier(namespace) ? namespace : `["${this.escapeString(namespace)}"]`;
|
|
2795
|
+
if (this.isValidIdentifier(namespace)) {
|
|
2796
|
+
lines.push(` ${namespace}: {`);
|
|
2797
|
+
} else {
|
|
2798
|
+
lines.push(` "${this.escapeString(namespace)}": {`);
|
|
2799
|
+
}
|
|
2800
|
+
for (const tool of tools) {
|
|
2801
|
+
const methodName = this.isValidIdentifier(tool.methodName) ? tool.methodName : `["${this.escapeString(tool.methodName)}"]`;
|
|
2802
|
+
if (this.isValidIdentifier(tool.methodName)) {
|
|
2803
|
+
lines.push(` async ${methodName}(args) {`);
|
|
2804
|
+
} else {
|
|
2805
|
+
lines.push(` "${this.escapeString(tool.methodName)}": async function(args) {`);
|
|
2806
|
+
}
|
|
2807
|
+
lines.push(` const resStr = await __callTool("${this.escapeString(tool.name)}", JSON.stringify(args || {}));`);
|
|
2808
|
+
lines.push(` return JSON.parse(resStr);`);
|
|
2809
|
+
lines.push(` },`);
|
|
2810
|
+
}
|
|
2811
|
+
lines.push(` },`);
|
|
2812
|
+
}
|
|
2813
|
+
if (enableRawFallback) {
|
|
2814
|
+
lines.push(` async $raw(name, args) {`);
|
|
2815
|
+
lines.push(` const normalized = name.replace(/\\./g, '__');`);
|
|
2816
|
+
lines.push(` if (__allowedTools) {`);
|
|
2817
|
+
lines.push(` const allowed = __allowedTools.some(p => {`);
|
|
2818
|
+
lines.push(` if (p.endsWith('__*')) return normalized.startsWith(p.slice(0, -1));`);
|
|
2819
|
+
lines.push(` return normalized === p;`);
|
|
2820
|
+
lines.push(` });`);
|
|
2821
|
+
lines.push(` if (!allowed) throw new Error(\`Tool \${name} is not in the allowlist\`);`);
|
|
2822
|
+
lines.push(` }`);
|
|
2823
|
+
lines.push(` const resStr = await __callTool(normalized, JSON.stringify(args || {}));`);
|
|
2824
|
+
lines.push(` return JSON.parse(resStr);`);
|
|
2825
|
+
lines.push(` },`);
|
|
2826
|
+
}
|
|
2827
|
+
lines.push("};");
|
|
2828
|
+
return lines.join("\n");
|
|
2829
|
+
}
|
|
2830
|
+
/**
|
|
2831
|
+
* Check if a string is a valid JavaScript/Python identifier
|
|
2832
|
+
*/
|
|
2833
|
+
isValidIdentifier(str) {
|
|
2834
|
+
return /^[a-zA-Z_$][a-zA-Z0-9_$]*$/.test(str);
|
|
2835
|
+
}
|
|
2836
|
+
};
|
|
2837
|
+
|
|
2838
|
+
// src/core/execution.service.ts
|
|
2839
|
+
var ExecutionService = class {
|
|
2840
|
+
logger;
|
|
2841
|
+
executorRegistry;
|
|
2842
|
+
sdkGenerator = new SDKGenerator();
|
|
2843
|
+
defaultLimits;
|
|
2844
|
+
gatewayService;
|
|
2845
|
+
securityService;
|
|
2846
|
+
_ipcAddress = "";
|
|
2847
|
+
constructor(logger, defaultLimits, gatewayService, securityService, executorRegistry) {
|
|
2848
|
+
this.logger = logger;
|
|
2849
|
+
this.defaultLimits = defaultLimits;
|
|
2850
|
+
this.gatewayService = gatewayService;
|
|
2851
|
+
this.securityService = securityService;
|
|
2852
|
+
this.executorRegistry = executorRegistry;
|
|
2853
|
+
}
|
|
2854
|
+
set ipcAddress(addr) {
|
|
2855
|
+
this._ipcAddress = addr;
|
|
2856
|
+
}
|
|
2857
|
+
async executeTypeScript(code, limits, context, allowedTools) {
|
|
2858
|
+
const effectiveLimits = { ...this.defaultLimits, ...limits };
|
|
2859
|
+
const securityResult = this.securityService.validateCode(code);
|
|
2860
|
+
if (!securityResult.valid) {
|
|
2861
|
+
return this.createErrorResult(-32003 /* Forbidden */, securityResult.message || "Access denied");
|
|
2862
|
+
}
|
|
2863
|
+
const cleanCode = code.replace(/\/\*[\s\S]*?\*\/|([^:]|^)\/\/.*$/gm, "$1");
|
|
2864
|
+
const hasImports = /^\s*import\s/m.test(cleanCode) || /^\s*export\s/m.test(cleanCode) || /\bDeno\./.test(cleanCode) || /\bDeno\b/.test(cleanCode);
|
|
2865
|
+
if (!hasImports && this.executorRegistry.has("isolate")) {
|
|
2866
|
+
return await this.executeIsolate(code, effectiveLimits, context, allowedTools);
|
|
2867
|
+
}
|
|
2868
|
+
if (!this._ipcAddress) {
|
|
2869
|
+
return this.createErrorResult(-32603 /* InternalError */, "IPC address not initialized");
|
|
2870
|
+
}
|
|
2871
|
+
if (!this.executorRegistry.has("deno")) {
|
|
2872
|
+
return this.createErrorResult(-32603 /* InternalError */, "Deno execution not available");
|
|
2873
|
+
}
|
|
2874
|
+
const executor = this.executorRegistry.get("deno");
|
|
2875
|
+
const bindings = await this.getToolBindings(context);
|
|
2876
|
+
const sdkCode = this.sdkGenerator.generateTypeScript(bindings, allowedTools);
|
|
2877
|
+
const sessionToken = this.securityService.createSession(allowedTools);
|
|
2878
|
+
try {
|
|
2879
|
+
return await executor.execute(code, effectiveLimits, context, {
|
|
2880
|
+
ipcAddress: this._ipcAddress,
|
|
2881
|
+
ipcToken: sessionToken,
|
|
2882
|
+
sdkCode
|
|
2883
|
+
});
|
|
2884
|
+
} finally {
|
|
2885
|
+
this.securityService.invalidateSession(sessionToken);
|
|
2886
|
+
}
|
|
2887
|
+
}
|
|
2888
|
+
async executePython(code, limits, context, allowedTools) {
|
|
2889
|
+
const effectiveLimits = { ...this.defaultLimits, ...limits };
|
|
2890
|
+
if (!this.executorRegistry.has("python")) {
|
|
2891
|
+
return this.createErrorResult(-32603 /* InternalError */, "Python execution not available");
|
|
2892
|
+
}
|
|
2893
|
+
if (!this._ipcAddress) {
|
|
2894
|
+
return this.createErrorResult(-32603 /* InternalError */, "IPC address not initialized");
|
|
2895
|
+
}
|
|
2896
|
+
const executor = this.executorRegistry.get("python");
|
|
2897
|
+
const securityResult = this.securityService.validateCode(code);
|
|
2898
|
+
if (!securityResult.valid) {
|
|
2899
|
+
return this.createErrorResult(-32003 /* Forbidden */, securityResult.message || "Access denied");
|
|
2900
|
+
}
|
|
2901
|
+
const bindings = await this.getToolBindings(context);
|
|
2902
|
+
const sdkCode = this.sdkGenerator.generatePython(bindings, allowedTools);
|
|
2903
|
+
const sessionToken = this.securityService.createSession(allowedTools);
|
|
2904
|
+
try {
|
|
2905
|
+
return await executor.execute(code, effectiveLimits, context, {
|
|
2906
|
+
ipcAddress: this._ipcAddress,
|
|
2907
|
+
ipcToken: sessionToken,
|
|
2908
|
+
sdkCode
|
|
2909
|
+
});
|
|
2910
|
+
} finally {
|
|
2911
|
+
this.securityService.invalidateSession(sessionToken);
|
|
2912
|
+
}
|
|
2913
|
+
}
|
|
2914
|
+
async getToolBindings(context) {
|
|
2915
|
+
const packages = await this.gatewayService.listToolPackages();
|
|
2916
|
+
const allBindings = [];
|
|
2917
|
+
for (const pkg of packages) {
|
|
2918
|
+
try {
|
|
2919
|
+
const stubs = await this.gatewayService.listToolStubs(pkg.id, context);
|
|
2920
|
+
allBindings.push(...stubs.map((s) => toToolBinding(s.id, void 0, s.description)));
|
|
2921
|
+
} catch (err) {
|
|
2922
|
+
this.logger.warn({ packageId: pkg.id, err: err.message }, "Failed to list stubs for package");
|
|
2923
|
+
}
|
|
2924
|
+
}
|
|
2925
|
+
return allBindings;
|
|
2926
|
+
}
|
|
2927
|
+
async executeIsolate(code, limits, context, allowedTools) {
|
|
2928
|
+
if (!this.executorRegistry.has("isolate")) {
|
|
2929
|
+
return this.createErrorResult(-32603 /* InternalError */, "IsolateExecutor not available");
|
|
2930
|
+
}
|
|
2931
|
+
const executor = this.executorRegistry.get("isolate");
|
|
2932
|
+
const effectiveLimits = { ...this.defaultLimits, ...limits };
|
|
2933
|
+
const securityResult = this.securityService.validateCode(code);
|
|
2934
|
+
if (!securityResult.valid) {
|
|
2935
|
+
return this.createErrorResult(-32003 /* Forbidden */, securityResult.message || "Access denied");
|
|
2936
|
+
}
|
|
2937
|
+
const bindings = await this.getToolBindings(context);
|
|
2938
|
+
const sdkCode = this.sdkGenerator.generateIsolateSDK(bindings, allowedTools);
|
|
2939
|
+
try {
|
|
2940
|
+
return await executor.execute(code, effectiveLimits, context, { sdkCode });
|
|
2941
|
+
} catch (err) {
|
|
2942
|
+
return this.createErrorResult(-32603 /* InternalError */, err.message);
|
|
2943
|
+
}
|
|
2944
|
+
}
|
|
2945
|
+
createErrorResult(code, message) {
|
|
2946
|
+
return {
|
|
2947
|
+
stdout: "",
|
|
2948
|
+
stderr: "",
|
|
2949
|
+
exitCode: null,
|
|
2950
|
+
error: { code, message }
|
|
2951
|
+
};
|
|
2952
|
+
}
|
|
2953
|
+
async shutdown() {
|
|
2954
|
+
await this.executorRegistry.shutdownAll();
|
|
2955
|
+
}
|
|
2956
|
+
async warmup() {
|
|
2957
|
+
const pythonExecutor = this.executorRegistry.get("python");
|
|
2958
|
+
if (pythonExecutor && "warmup" in pythonExecutor) {
|
|
2959
|
+
await pythonExecutor.warmup(this.defaultLimits);
|
|
2960
|
+
}
|
|
2961
|
+
}
|
|
2962
|
+
async healthCheck() {
|
|
2963
|
+
const pythonExecutor = this.executorRegistry.get("python");
|
|
2964
|
+
if (pythonExecutor && "healthCheck" in pythonExecutor) {
|
|
2965
|
+
return pythonExecutor.healthCheck();
|
|
2966
|
+
}
|
|
2967
|
+
return { status: "ok" };
|
|
2968
|
+
}
|
|
2969
|
+
};
|
|
2970
|
+
|
|
2971
|
+
// src/core/middleware/error.middleware.ts
|
|
2972
|
+
var ErrorHandlingMiddleware = class {
|
|
2973
|
+
async handle(request, context, next) {
|
|
2974
|
+
try {
|
|
2975
|
+
return await next();
|
|
2976
|
+
} catch (err) {
|
|
2977
|
+
context.logger.error({ err }, "Error handling request");
|
|
2978
|
+
return {
|
|
2979
|
+
jsonrpc: "2.0",
|
|
2980
|
+
id: request.id,
|
|
2981
|
+
error: {
|
|
2982
|
+
code: -32603 /* InternalError */,
|
|
2983
|
+
message: err.message || "Internal Server Error"
|
|
2984
|
+
}
|
|
2985
|
+
};
|
|
2986
|
+
}
|
|
2987
|
+
}
|
|
2988
|
+
};
|
|
2989
|
+
|
|
2990
|
+
// src/core/middleware/logging.middleware.ts
|
|
2991
|
+
var LoggingMiddleware = class {
|
|
2992
|
+
async handle(request, context, next) {
|
|
2993
|
+
const { method, id } = request;
|
|
2994
|
+
const childLogger = context.logger.child({ method, id });
|
|
2995
|
+
context.logger = childLogger;
|
|
2996
|
+
metrics.recordExecutionStart();
|
|
2997
|
+
const startTime = Date.now();
|
|
2998
|
+
try {
|
|
2999
|
+
const response = await next();
|
|
3000
|
+
metrics.recordExecutionEnd(Date.now() - startTime, method);
|
|
3001
|
+
return response;
|
|
3002
|
+
} catch (err) {
|
|
3003
|
+
metrics.recordExecutionEnd(Date.now() - startTime, method);
|
|
3004
|
+
throw err;
|
|
3005
|
+
}
|
|
3006
|
+
}
|
|
3007
|
+
};
|
|
3008
|
+
|
|
3009
|
+
// src/core/middleware/auth.middleware.ts
|
|
3010
|
+
var AuthMiddleware = class {
|
|
3011
|
+
constructor(securityService) {
|
|
3012
|
+
this.securityService = securityService;
|
|
3013
|
+
}
|
|
3014
|
+
async handle(request, context, next) {
|
|
3015
|
+
const providedToken = request.auth?.bearerToken || "";
|
|
3016
|
+
const masterToken = this.securityService.getIpcToken();
|
|
3017
|
+
const isMaster = !masterToken || providedToken === masterToken;
|
|
3018
|
+
const isSession = !isMaster && this.securityService.validateIpcToken(providedToken);
|
|
3019
|
+
if (!isMaster && !isSession) {
|
|
3020
|
+
return {
|
|
3021
|
+
jsonrpc: "2.0",
|
|
3022
|
+
id: request.id,
|
|
3023
|
+
error: {
|
|
3024
|
+
code: -32003 /* Forbidden */,
|
|
3025
|
+
message: "Invalid bearer token"
|
|
3026
|
+
}
|
|
3027
|
+
};
|
|
3028
|
+
}
|
|
3029
|
+
if (isSession) {
|
|
3030
|
+
const allowedMethods = ["initialize", "notifications/initialized", "mcp_discover_tools", "mcp_call_tool", "ping", "tools/list", "tools/call"];
|
|
3031
|
+
if (!allowedMethods.includes(request.method)) {
|
|
3032
|
+
return {
|
|
3033
|
+
jsonrpc: "2.0",
|
|
3034
|
+
id: request.id,
|
|
3035
|
+
error: {
|
|
3036
|
+
code: -32003 /* Forbidden */,
|
|
3037
|
+
message: "Session tokens are restricted to tool discovery and calling only"
|
|
3038
|
+
}
|
|
3039
|
+
};
|
|
3040
|
+
}
|
|
3041
|
+
const session = this.securityService.getSession(providedToken);
|
|
3042
|
+
if (session?.allowedTools && !context.allowedTools) {
|
|
3043
|
+
context.allowedTools = session.allowedTools;
|
|
3044
|
+
}
|
|
3045
|
+
}
|
|
3046
|
+
return next();
|
|
3047
|
+
}
|
|
3048
|
+
};
|
|
3049
|
+
|
|
3050
|
+
// src/core/middleware/ratelimit.middleware.ts
|
|
3051
|
+
var RateLimitMiddleware = class {
|
|
3052
|
+
constructor(securityService) {
|
|
3053
|
+
this.securityService = securityService;
|
|
3054
|
+
}
|
|
3055
|
+
async handle(request, context, next) {
|
|
3056
|
+
const providedToken = request.auth?.bearerToken;
|
|
3057
|
+
const rateLimitKey = providedToken || context.remoteAddress || "unknown";
|
|
3058
|
+
if (!this.securityService.checkRateLimit(rateLimitKey)) {
|
|
3059
|
+
return {
|
|
3060
|
+
jsonrpc: "2.0",
|
|
3061
|
+
id: request.id,
|
|
3062
|
+
error: {
|
|
3063
|
+
code: -32005,
|
|
3064
|
+
// Rate limit exceeded code
|
|
3065
|
+
message: "Rate limit exceeded"
|
|
3066
|
+
}
|
|
3067
|
+
};
|
|
3068
|
+
}
|
|
3069
|
+
return next();
|
|
3070
|
+
}
|
|
3071
|
+
};
|
|
3072
|
+
|
|
3073
|
+
// src/core/middleware/middleware.builder.ts
|
|
3074
|
+
function buildDefaultMiddleware(securityService) {
|
|
3075
|
+
return [
|
|
3076
|
+
new ErrorHandlingMiddleware(),
|
|
3077
|
+
new LoggingMiddleware(),
|
|
3078
|
+
new AuthMiddleware(securityService),
|
|
3079
|
+
new RateLimitMiddleware(securityService)
|
|
3080
|
+
];
|
|
3081
|
+
}
|
|
3082
|
+
|
|
3083
|
+
// src/auth.cmd.ts
|
|
3084
|
+
import Fastify2 from "fastify";
|
|
3085
|
+
import axios4 from "axios";
|
|
3086
|
+
import open from "open";
|
|
3087
|
+
import { v4 as uuidv43 } from "uuid";
|
|
3088
|
+
async function handleAuth(options) {
|
|
3089
|
+
const port = options.port || 3333;
|
|
3090
|
+
const redirectUri = `http://localhost:${port}/callback`;
|
|
3091
|
+
const state = uuidv43();
|
|
3092
|
+
const fastify = Fastify2();
|
|
3093
|
+
return new Promise((resolve, reject) => {
|
|
3094
|
+
fastify.get("/callback", async (request, reply) => {
|
|
3095
|
+
const { code, state: returnedState, error, error_description } = request.query;
|
|
3096
|
+
if (error) {
|
|
3097
|
+
reply.send(`Authentication failed: ${error} - ${error_description}`);
|
|
3098
|
+
reject(new Error(`OAuth error: ${error}`));
|
|
3099
|
+
return;
|
|
3100
|
+
}
|
|
3101
|
+
if (returnedState !== state) {
|
|
3102
|
+
reply.send("Invalid state parameter");
|
|
3103
|
+
reject(new Error("State mismatch"));
|
|
3104
|
+
return;
|
|
3105
|
+
}
|
|
3106
|
+
try {
|
|
3107
|
+
const response = await axios4.post(options.tokenUrl, {
|
|
3108
|
+
grant_type: "authorization_code",
|
|
3109
|
+
code,
|
|
3110
|
+
redirect_uri: redirectUri,
|
|
3111
|
+
client_id: options.clientId,
|
|
3112
|
+
client_secret: options.clientSecret
|
|
3113
|
+
});
|
|
3114
|
+
const { refresh_token, access_token } = response.data;
|
|
3115
|
+
console.log("\n--- Authentication Successful ---\n");
|
|
3116
|
+
console.log("Use these values in your conduit.yaml:\n");
|
|
3117
|
+
console.log("credentials:");
|
|
3118
|
+
console.log(" type: oauth2");
|
|
3119
|
+
console.log(` clientId: ${options.clientId}`);
|
|
3120
|
+
console.log(` clientSecret: ${options.clientSecret}`);
|
|
3121
|
+
console.log(` tokenUrl: "${options.tokenUrl}"`);
|
|
3122
|
+
console.log(` refreshToken: "${refresh_token || "N/A (No refresh token returned)"}"`);
|
|
3123
|
+
if (!refresh_token) {
|
|
3124
|
+
console.log('\nWarning: No refresh token was returned. Ensure your app has "offline_access" scope or similar.');
|
|
3125
|
+
}
|
|
3126
|
+
console.log("\nRaw response data:", JSON.stringify(response.data, null, 2));
|
|
3127
|
+
reply.send("Authentication successful! You can close this window and return to the terminal.");
|
|
3128
|
+
resolve();
|
|
3129
|
+
} catch (err) {
|
|
3130
|
+
const msg = err.response?.data?.error_description || err.response?.data?.error || err.message;
|
|
3131
|
+
reply.send(`Failed to exchange code for token: ${msg}`);
|
|
3132
|
+
reject(new Error(`Token exchange failed: ${msg}`));
|
|
3133
|
+
} finally {
|
|
3134
|
+
setTimeout(() => fastify.close(), 1e3);
|
|
3135
|
+
}
|
|
3136
|
+
});
|
|
3137
|
+
fastify.listen({ port, host: "127.0.0.1" }, async (err) => {
|
|
3138
|
+
if (err) {
|
|
3139
|
+
reject(err);
|
|
3140
|
+
return;
|
|
3141
|
+
}
|
|
3142
|
+
const authUrl = new URL(options.authUrl);
|
|
3143
|
+
authUrl.searchParams.append("client_id", options.clientId);
|
|
3144
|
+
authUrl.searchParams.append("redirect_uri", redirectUri);
|
|
3145
|
+
authUrl.searchParams.append("response_type", "code");
|
|
3146
|
+
authUrl.searchParams.append("state", state);
|
|
3147
|
+
if (options.scopes) {
|
|
3148
|
+
authUrl.searchParams.append("scope", options.scopes);
|
|
3149
|
+
}
|
|
3150
|
+
console.log(`Opening browser to: ${authUrl.toString()}`);
|
|
3151
|
+
console.log("Waiting for callback...");
|
|
3152
|
+
await open(authUrl.toString());
|
|
3153
|
+
});
|
|
3154
|
+
});
|
|
3155
|
+
}
|
|
3156
|
+
|
|
3157
|
+
// src/index.ts
|
|
3158
|
+
var program = new Command();
|
|
3159
|
+
program.name("conduit").description("A secure Code Mode execution substrate for MCP agents").version("1.0.0");
|
|
3160
|
+
program.command("serve", { isDefault: true }).description("Start the Conduit server").option("--stdio", "Use stdio transport").action(async (options) => {
|
|
3161
|
+
try {
|
|
3162
|
+
await startServer();
|
|
3163
|
+
} catch (err) {
|
|
3164
|
+
console.error("Failed to start Conduit:", err);
|
|
3165
|
+
process.exit(1);
|
|
3166
|
+
}
|
|
3167
|
+
});
|
|
3168
|
+
program.command("auth").description("Help set up OAuth for an upstream MCP server").requiredOption("--client-id <id>", "OAuth Client ID").requiredOption("--client-secret <secret>", "OAuth Client Secret").requiredOption("--auth-url <url>", "OAuth Authorization URL").requiredOption("--token-url <url>", "OAuth Token URL").option("--scopes <scopes>", "OAuth Scopes (comma separated)").option("--port <port>", "Port for the local callback server", "3333").action(async (options) => {
|
|
3169
|
+
try {
|
|
3170
|
+
await handleAuth({
|
|
3171
|
+
clientId: options.clientId,
|
|
3172
|
+
clientSecret: options.clientSecret,
|
|
3173
|
+
authUrl: options.authUrl,
|
|
3174
|
+
tokenUrl: options.tokenUrl,
|
|
3175
|
+
scopes: options.scopes,
|
|
3176
|
+
port: parseInt(options.port, 10)
|
|
3177
|
+
});
|
|
3178
|
+
console.log("\nSuccess! Configuration generated.");
|
|
3179
|
+
} catch (err) {
|
|
3180
|
+
console.error("Authentication helper failed:", err.message);
|
|
3181
|
+
process.exit(1);
|
|
3182
|
+
}
|
|
3183
|
+
});
|
|
3184
|
+
async function startServer() {
|
|
3185
|
+
const configService = new ConfigService();
|
|
3186
|
+
const logger = createLogger(configService);
|
|
3187
|
+
const otelService = new OtelService(logger);
|
|
3188
|
+
await otelService.start();
|
|
3189
|
+
await loggerStorage.run({ correlationId: "system" }, async () => {
|
|
3190
|
+
const isStdio = configService.get("transport") === "stdio";
|
|
3191
|
+
const ipcToken = isStdio ? void 0 : configService.get("ipcBearerToken");
|
|
3192
|
+
const securityService = new SecurityService(logger, ipcToken);
|
|
3193
|
+
const gatewayService = new GatewayService(logger, securityService);
|
|
3194
|
+
const upstreams = configService.get("upstreams") || [];
|
|
3195
|
+
for (const upstream of upstreams) {
|
|
3196
|
+
gatewayService.registerUpstream(upstream);
|
|
3197
|
+
}
|
|
3198
|
+
const executorRegistry = new ExecutorRegistry();
|
|
3199
|
+
executorRegistry.register("deno", new DenoExecutor(configService.get("denoMaxPoolSize")));
|
|
3200
|
+
executorRegistry.register("python", new PyodideExecutor(configService.get("pyodideMaxPoolSize")));
|
|
3201
|
+
const isolateExecutor = new IsolateExecutor(logger, gatewayService);
|
|
3202
|
+
executorRegistry.register("isolate", isolateExecutor);
|
|
3203
|
+
const executionService = new ExecutionService(
|
|
3204
|
+
logger,
|
|
3205
|
+
configService.get("resourceLimits"),
|
|
3206
|
+
gatewayService,
|
|
3207
|
+
securityService,
|
|
3208
|
+
executorRegistry
|
|
3209
|
+
);
|
|
3210
|
+
const requestController = new RequestController(
|
|
3211
|
+
logger,
|
|
3212
|
+
executionService,
|
|
3213
|
+
gatewayService,
|
|
3214
|
+
buildDefaultMiddleware(securityService)
|
|
3215
|
+
);
|
|
3216
|
+
const opsServer = new OpsServer(logger, configService.all, gatewayService, requestController);
|
|
3217
|
+
await opsServer.listen();
|
|
3218
|
+
const concurrencyService = new ConcurrencyService(logger, {
|
|
3219
|
+
maxConcurrent: configService.get("maxConcurrent")
|
|
3220
|
+
});
|
|
3221
|
+
let transport;
|
|
3222
|
+
let address;
|
|
3223
|
+
if (configService.get("transport") === "stdio") {
|
|
3224
|
+
transport = new StdioTransport(logger, requestController, concurrencyService);
|
|
3225
|
+
await transport.start();
|
|
3226
|
+
address = "stdio";
|
|
3227
|
+
} else {
|
|
3228
|
+
transport = new SocketTransport(logger, requestController, concurrencyService);
|
|
3229
|
+
const port = configService.get("port");
|
|
3230
|
+
address = await transport.listen({ port });
|
|
3231
|
+
}
|
|
3232
|
+
executionService.ipcAddress = address;
|
|
3233
|
+
await requestController.warmup();
|
|
3234
|
+
logger.info("Conduit server started");
|
|
3235
|
+
const shutdown = async () => {
|
|
3236
|
+
logger.info("Shutting down...");
|
|
3237
|
+
await Promise.all([
|
|
3238
|
+
transport.close(),
|
|
3239
|
+
opsServer.close(),
|
|
3240
|
+
requestController.shutdown(),
|
|
3241
|
+
otelService.shutdown()
|
|
3242
|
+
]);
|
|
3243
|
+
process.exit(0);
|
|
3244
|
+
};
|
|
3245
|
+
process.on("SIGINT", shutdown);
|
|
3246
|
+
process.on("SIGTERM", shutdown);
|
|
3247
|
+
});
|
|
3248
|
+
}
|
|
3249
|
+
program.parse(process.argv);
|
|
3250
|
+
//# sourceMappingURL=index.js.map
|