@darkiceinteractive/mcp-conductor 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +558 -0
- package/dist/bin/cli.d.ts +8 -0
- package/dist/bin/cli.d.ts.map +1 -0
- package/dist/bin/cli.js +940 -0
- package/dist/bin/cli.js.map +1 -0
- package/dist/bridge/http-server.d.ts +161 -0
- package/dist/bridge/http-server.d.ts.map +1 -0
- package/dist/bridge/http-server.js +367 -0
- package/dist/bridge/http-server.js.map +1 -0
- package/dist/bridge/index.d.ts +5 -0
- package/dist/bridge/index.d.ts.map +1 -0
- package/dist/bridge/index.js +5 -0
- package/dist/bridge/index.js.map +1 -0
- package/dist/config/defaults.d.ts +29 -0
- package/dist/config/defaults.d.ts.map +1 -0
- package/dist/config/defaults.js +60 -0
- package/dist/config/defaults.js.map +1 -0
- package/dist/config/index.d.ts +7 -0
- package/dist/config/index.d.ts.map +1 -0
- package/dist/config/index.js +7 -0
- package/dist/config/index.js.map +1 -0
- package/dist/config/loader.d.ts +49 -0
- package/dist/config/loader.d.ts.map +1 -0
- package/dist/config/loader.js +272 -0
- package/dist/config/loader.js.map +1 -0
- package/dist/config/schema.d.ts +93 -0
- package/dist/config/schema.d.ts.map +1 -0
- package/dist/config/schema.js +5 -0
- package/dist/config/schema.js.map +1 -0
- package/dist/hub/index.d.ts +5 -0
- package/dist/hub/index.d.ts.map +1 -0
- package/dist/hub/index.js +5 -0
- package/dist/hub/index.js.map +1 -0
- package/dist/hub/mcp-hub.d.ts +176 -0
- package/dist/hub/mcp-hub.d.ts.map +1 -0
- package/dist/hub/mcp-hub.js +550 -0
- package/dist/hub/mcp-hub.js.map +1 -0
- package/dist/index.d.ts +9 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +45 -0
- package/dist/index.js.map +1 -0
- package/dist/metrics/index.d.ts +5 -0
- package/dist/metrics/index.d.ts.map +1 -0
- package/dist/metrics/index.js +5 -0
- package/dist/metrics/index.js.map +1 -0
- package/dist/metrics/metrics-collector.d.ts +211 -0
- package/dist/metrics/metrics-collector.d.ts.map +1 -0
- package/dist/metrics/metrics-collector.js +437 -0
- package/dist/metrics/metrics-collector.js.map +1 -0
- package/dist/modes/index.d.ts +5 -0
- package/dist/modes/index.d.ts.map +1 -0
- package/dist/modes/index.js +5 -0
- package/dist/modes/index.js.map +1 -0
- package/dist/modes/mode-handler.d.ts +132 -0
- package/dist/modes/mode-handler.d.ts.map +1 -0
- package/dist/modes/mode-handler.js +252 -0
- package/dist/modes/mode-handler.js.map +1 -0
- package/dist/runtime/executor.d.ts +57 -0
- package/dist/runtime/executor.d.ts.map +1 -0
- package/dist/runtime/executor.js +700 -0
- package/dist/runtime/executor.js.map +1 -0
- package/dist/runtime/index.d.ts +5 -0
- package/dist/runtime/index.d.ts.map +1 -0
- package/dist/runtime/index.js +5 -0
- package/dist/runtime/index.js.map +1 -0
- package/dist/server/index.d.ts +5 -0
- package/dist/server/index.d.ts.map +1 -0
- package/dist/server/index.js +5 -0
- package/dist/server/index.js.map +1 -0
- package/dist/server/mcp-server.d.ts +62 -0
- package/dist/server/mcp-server.d.ts.map +1 -0
- package/dist/server/mcp-server.js +1272 -0
- package/dist/server/mcp-server.js.map +1 -0
- package/dist/skills/index.d.ts +5 -0
- package/dist/skills/index.d.ts.map +1 -0
- package/dist/skills/index.js +5 -0
- package/dist/skills/index.js.map +1 -0
- package/dist/skills/skills-engine.d.ts +157 -0
- package/dist/skills/skills-engine.d.ts.map +1 -0
- package/dist/skills/skills-engine.js +405 -0
- package/dist/skills/skills-engine.js.map +1 -0
- package/dist/streaming/execution-stream.d.ts +158 -0
- package/dist/streaming/execution-stream.d.ts.map +1 -0
- package/dist/streaming/execution-stream.js +320 -0
- package/dist/streaming/execution-stream.js.map +1 -0
- package/dist/streaming/index.d.ts +5 -0
- package/dist/streaming/index.d.ts.map +1 -0
- package/dist/streaming/index.js +5 -0
- package/dist/streaming/index.js.map +1 -0
- package/dist/utils/errors.d.ts +36 -0
- package/dist/utils/errors.d.ts.map +1 -0
- package/dist/utils/errors.js +68 -0
- package/dist/utils/errors.js.map +1 -0
- package/dist/utils/helpers.d.ts +44 -0
- package/dist/utils/helpers.d.ts.map +1 -0
- package/dist/utils/helpers.js +95 -0
- package/dist/utils/helpers.js.map +1 -0
- package/dist/utils/index.d.ts +9 -0
- package/dist/utils/index.d.ts.map +1 -0
- package/dist/utils/index.js +9 -0
- package/dist/utils/index.js.map +1 -0
- package/dist/utils/logger.d.ts +13 -0
- package/dist/utils/logger.d.ts.map +1 -0
- package/dist/utils/logger.js +48 -0
- package/dist/utils/logger.js.map +1 -0
- package/dist/utils/permissions.d.ts +97 -0
- package/dist/utils/permissions.d.ts.map +1 -0
- package/dist/utils/permissions.js +165 -0
- package/dist/utils/permissions.js.map +1 -0
- package/dist/utils/rate-limiter.d.ts +87 -0
- package/dist/utils/rate-limiter.d.ts.map +1 -0
- package/dist/utils/rate-limiter.js +187 -0
- package/dist/utils/rate-limiter.js.map +1 -0
- package/dist/watcher/config-watcher.d.ts +67 -0
- package/dist/watcher/config-watcher.d.ts.map +1 -0
- package/dist/watcher/config-watcher.js +150 -0
- package/dist/watcher/config-watcher.js.map +1 -0
- package/dist/watcher/index.d.ts +5 -0
- package/dist/watcher/index.d.ts.map +1 -0
- package/dist/watcher/index.js +5 -0
- package/dist/watcher/index.js.map +1 -0
- package/package.json +86 -0
- package/templates/CLAUDE.md +137 -0
- package/templates/skill-mcp-conductor.md +64 -0
|
@@ -0,0 +1,700 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Deno Sandbox Executor
|
|
3
|
+
*
|
|
4
|
+
* Executes user code in an isolated Deno subprocess with access to the MCP API.
|
|
5
|
+
*/
|
|
6
|
+
import { spawn } from 'node:child_process';
|
|
7
|
+
import { writeFileSync, unlinkSync, mkdirSync, existsSync } from 'node:fs';
|
|
8
|
+
import { join } from 'node:path';
|
|
9
|
+
import { tmpdir } from 'node:os';
|
|
10
|
+
import { logger, generateExecutionId } from '../utils/index.js';
|
|
11
|
+
/**
|
|
12
|
+
* Generate the sandbox code template that wraps user code
|
|
13
|
+
*/
|
|
14
|
+
function generateSandboxCode(userCode, bridgeUrl, executionId, streamEnabled, timeoutMs) {
|
|
15
|
+
return `
|
|
16
|
+
// MCP Executor Sandbox Runtime
|
|
17
|
+
// Execution ID: ${executionId}
|
|
18
|
+
|
|
19
|
+
const BRIDGE_URL = "${bridgeUrl}";
|
|
20
|
+
const EXECUTION_ID = "${executionId}";
|
|
21
|
+
const STREAM_ENABLED = ${streamEnabled};
|
|
22
|
+
const TIMEOUT_MS = ${timeoutMs};
|
|
23
|
+
|
|
24
|
+
// Metrics tracking
|
|
25
|
+
const __metrics = {
|
|
26
|
+
toolCalls: 0,
|
|
27
|
+
dataProcessedBytes: 0,
|
|
28
|
+
};
|
|
29
|
+
|
|
30
|
+
// Logs collection
|
|
31
|
+
const __logs: string[] = [];
|
|
32
|
+
|
|
33
|
+
// Rate limit tracking per server
|
|
34
|
+
const __rateLimits: Record<string, { detected: boolean; delayMs: number; lastError: number }> = {};
|
|
35
|
+
|
|
36
|
+
// Helper to detect if response is a rate limit error
|
|
37
|
+
function __isRateLimitError(result: unknown): boolean {
|
|
38
|
+
if (typeof result === 'string') {
|
|
39
|
+
return result.toLowerCase().includes('rate limit');
|
|
40
|
+
}
|
|
41
|
+
if (result && typeof result === 'object') {
|
|
42
|
+
const str = JSON.stringify(result).toLowerCase();
|
|
43
|
+
return str.includes('rate limit') || str.includes('rate_limit') || str.includes('429');
|
|
44
|
+
}
|
|
45
|
+
return false;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
// Sleep helper
|
|
49
|
+
const __sleep = (ms: number) => new Promise(r => setTimeout(r, ms));
|
|
50
|
+
|
|
51
|
+
// Streaming helper - sends events to the bridge when streaming is enabled
|
|
52
|
+
async function __streamEvent(endpoint: string, data: Record<string, unknown>): Promise<void> {
|
|
53
|
+
if (!STREAM_ENABLED) return;
|
|
54
|
+
try {
|
|
55
|
+
await fetch(\`\${BRIDGE_URL}\${endpoint}\`, {
|
|
56
|
+
method: 'POST',
|
|
57
|
+
headers: { 'Content-Type': 'application/json' },
|
|
58
|
+
body: JSON.stringify({ executionId: EXECUTION_ID, ...data }),
|
|
59
|
+
});
|
|
60
|
+
} catch {
|
|
61
|
+
// Silently ignore streaming errors to not affect execution
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
// Override console.log to capture output and optionally stream
|
|
66
|
+
const originalConsoleLog = console.log;
|
|
67
|
+
console.log = (...args: unknown[]) => {
|
|
68
|
+
const message = args.map(a => typeof a === 'object' ? JSON.stringify(a) : String(a)).join(' ');
|
|
69
|
+
__logs.push(message);
|
|
70
|
+
originalConsoleLog(...args);
|
|
71
|
+
|
|
72
|
+
// Stream logs in real-time when enabled
|
|
73
|
+
if (STREAM_ENABLED) {
|
|
74
|
+
__streamEvent('/log', { message, level: 'info' });
|
|
75
|
+
}
|
|
76
|
+
};
|
|
77
|
+
|
|
78
|
+
// MCP Server Client with streaming support
|
|
79
|
+
class MCPServerClient {
|
|
80
|
+
constructor(public readonly name: string) {}
|
|
81
|
+
|
|
82
|
+
async call(tool: string, params: Record<string, unknown> = {}): Promise<unknown> {
|
|
83
|
+
__metrics.toolCalls++;
|
|
84
|
+
const startTime = Date.now();
|
|
85
|
+
|
|
86
|
+
// Report tool call start when streaming
|
|
87
|
+
if (STREAM_ENABLED) {
|
|
88
|
+
await __streamEvent('/tool-event', {
|
|
89
|
+
server: this.name,
|
|
90
|
+
tool,
|
|
91
|
+
status: 'started',
|
|
92
|
+
});
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
try {
|
|
96
|
+
const response = await fetch(\`\${BRIDGE_URL}/call\`, {
|
|
97
|
+
method: 'POST',
|
|
98
|
+
headers: { 'Content-Type': 'application/json' },
|
|
99
|
+
body: JSON.stringify({
|
|
100
|
+
server: this.name,
|
|
101
|
+
tool,
|
|
102
|
+
params,
|
|
103
|
+
}),
|
|
104
|
+
});
|
|
105
|
+
|
|
106
|
+
const data = await response.json();
|
|
107
|
+
const durationMs = Date.now() - startTime;
|
|
108
|
+
|
|
109
|
+
if (data.metrics?.dataSize) {
|
|
110
|
+
__metrics.dataProcessedBytes += data.metrics.dataSize;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
if (data.error) {
|
|
114
|
+
// Report tool call error when streaming
|
|
115
|
+
if (STREAM_ENABLED) {
|
|
116
|
+
await __streamEvent('/tool-event', {
|
|
117
|
+
server: this.name,
|
|
118
|
+
tool,
|
|
119
|
+
status: 'error',
|
|
120
|
+
durationMs,
|
|
121
|
+
error: data.error.message,
|
|
122
|
+
});
|
|
123
|
+
}
|
|
124
|
+
throw new Error(\`Tool error (\${this.name}.\${tool}): \${data.error.message}\`);
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
// Report tool call completion when streaming
|
|
128
|
+
if (STREAM_ENABLED) {
|
|
129
|
+
await __streamEvent('/tool-event', {
|
|
130
|
+
server: this.name,
|
|
131
|
+
tool,
|
|
132
|
+
status: 'completed',
|
|
133
|
+
durationMs,
|
|
134
|
+
});
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
return data.result;
|
|
138
|
+
} catch (error) {
|
|
139
|
+
const durationMs = Date.now() - startTime;
|
|
140
|
+
|
|
141
|
+
// Report tool call error when streaming
|
|
142
|
+
if (STREAM_ENABLED) {
|
|
143
|
+
await __streamEvent('/tool-event', {
|
|
144
|
+
server: this.name,
|
|
145
|
+
tool,
|
|
146
|
+
status: 'error',
|
|
147
|
+
durationMs,
|
|
148
|
+
error: String(error),
|
|
149
|
+
});
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
throw error;
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
async tools(): Promise<Array<{ name: string; description: string }>> {
|
|
157
|
+
const response = await fetch(\`\${BRIDGE_URL}/servers/\${this.name}/tools\`);
|
|
158
|
+
const data = await response.json();
|
|
159
|
+
return data.tools || [];
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
hasTool(name: string): boolean {
|
|
163
|
+
// Synchronous check not available in sandbox, return true and let call() fail
|
|
164
|
+
return true;
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
// MCP Global API (internal - will be wrapped with Proxy)
|
|
169
|
+
const __mcpBase = {
|
|
170
|
+
// List available servers
|
|
171
|
+
servers(): string[] {
|
|
172
|
+
// Will be populated from sync call if needed
|
|
173
|
+
return [];
|
|
174
|
+
},
|
|
175
|
+
|
|
176
|
+
// Get a server client by name
|
|
177
|
+
server(name: string): MCPServerClient {
|
|
178
|
+
return new MCPServerClient(name);
|
|
179
|
+
},
|
|
180
|
+
|
|
181
|
+
// Search tools across all servers
|
|
182
|
+
async searchTools(query: string): Promise<Array<{ server: string; tool: string; description: string }>> {
|
|
183
|
+
const response = await fetch(\`\${BRIDGE_URL}/search?q=\${encodeURIComponent(query)}\`);
|
|
184
|
+
const data = await response.json();
|
|
185
|
+
return data.results || [];
|
|
186
|
+
},
|
|
187
|
+
|
|
188
|
+
// Synchronous search (from cached list)
|
|
189
|
+
searchToolsSync(query: string): Array<{ server: string; tool: string; description: string }> {
|
|
190
|
+
// Not available in sandbox without pre-cached data
|
|
191
|
+
return [];
|
|
192
|
+
},
|
|
193
|
+
|
|
194
|
+
// Report progress - streams to bridge when enabled, always logs
|
|
195
|
+
async progress(percent: number, message?: string): Promise<void> {
|
|
196
|
+
const logMessage = \`[PROGRESS] \${percent}%\${message ? ': ' + message : ''}\`;
|
|
197
|
+
__logs.push(logMessage);
|
|
198
|
+
originalConsoleLog(logMessage);
|
|
199
|
+
|
|
200
|
+
if (STREAM_ENABLED) {
|
|
201
|
+
await __streamEvent('/progress', { percent, message });
|
|
202
|
+
}
|
|
203
|
+
},
|
|
204
|
+
|
|
205
|
+
// Log helper with optional level
|
|
206
|
+
log(...args: unknown[]): void {
|
|
207
|
+
console.log(...args);
|
|
208
|
+
},
|
|
209
|
+
|
|
210
|
+
// Log with specific level (when streaming, this sends the level)
|
|
211
|
+
async logLevel(level: 'info' | 'warn' | 'error' | 'debug', ...args: unknown[]): Promise<void> {
|
|
212
|
+
const message = args.map(a => typeof a === 'object' ? JSON.stringify(a) : String(a)).join(' ');
|
|
213
|
+
__logs.push(\`[\${level.toUpperCase()}] \${message}\`);
|
|
214
|
+
originalConsoleLog(\`[\${level.toUpperCase()}]\`, ...args);
|
|
215
|
+
|
|
216
|
+
if (STREAM_ENABLED) {
|
|
217
|
+
await __streamEvent('/log', { message, level });
|
|
218
|
+
}
|
|
219
|
+
},
|
|
220
|
+
|
|
221
|
+
// Execution context
|
|
222
|
+
context: {
|
|
223
|
+
timeout_ms: TIMEOUT_MS,
|
|
224
|
+
execution_id: EXECUTION_ID,
|
|
225
|
+
stream_enabled: STREAM_ENABLED,
|
|
226
|
+
loaded_servers: [] as string[],
|
|
227
|
+
},
|
|
228
|
+
|
|
229
|
+
// Skills API placeholder for MVP
|
|
230
|
+
skills: {
|
|
231
|
+
list(): Array<{ name: string; category: string; description: string }> {
|
|
232
|
+
return [];
|
|
233
|
+
},
|
|
234
|
+
load(name: string): Record<string, (...args: unknown[]) => Promise<unknown>> {
|
|
235
|
+
throw new Error(\`Skills not available in MVP: \${name}\`);
|
|
236
|
+
},
|
|
237
|
+
search(query: string): Array<{ name: string; description: string; relevance: number }> {
|
|
238
|
+
return [];
|
|
239
|
+
},
|
|
240
|
+
},
|
|
241
|
+
|
|
242
|
+
/**
|
|
243
|
+
* Smart batch execution with automatic rate limit detection and handling.
|
|
244
|
+
* Attempts parallel execution first, falls back to sequential with delays if rate limited.
|
|
245
|
+
*
|
|
246
|
+
* @param calls Array of { server, tool, params } objects
|
|
247
|
+
* @param options Optional { maxParallel, retryDelayMs }
|
|
248
|
+
* @returns Array of results in same order as calls
|
|
249
|
+
*/
|
|
250
|
+
async batch<T = unknown>(
|
|
251
|
+
calls: Array<{ server: string; tool: string; params?: Record<string, unknown> }>,
|
|
252
|
+
options: { maxParallel?: number; retryDelayMs?: number; forceParallel?: boolean } = {}
|
|
253
|
+
): Promise<T[]> {
|
|
254
|
+
const { maxParallel = calls.length, retryDelayMs = 1100, forceParallel = false } = options;
|
|
255
|
+
const results: T[] = new Array(calls.length);
|
|
256
|
+
|
|
257
|
+
// Clear rate limit cache if forcing parallel (e.g., after API upgrade)
|
|
258
|
+
if (forceParallel) {
|
|
259
|
+
for (const call of calls) {
|
|
260
|
+
delete __rateLimits[call.server];
|
|
261
|
+
}
|
|
262
|
+
mcp.log(\`🔄 Force parallel mode enabled - rate limit cache cleared\`);
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
// Check if any server has known rate limits
|
|
266
|
+
const hasKnownRateLimit = !forceParallel && calls.some(c => __rateLimits[c.server]?.detected);
|
|
267
|
+
|
|
268
|
+
if (hasKnownRateLimit) {
|
|
269
|
+
// Sequential with delays for rate-limited servers
|
|
270
|
+
for (let i = 0; i < calls.length; i++) {
|
|
271
|
+
const { server, tool, params = {} } = calls[i];
|
|
272
|
+
const rateLimit = __rateLimits[server];
|
|
273
|
+
if (rateLimit?.detected && i > 0) {
|
|
274
|
+
await __sleep(rateLimit.delayMs);
|
|
275
|
+
}
|
|
276
|
+
const client = new MCPServerClient(server);
|
|
277
|
+
results[i] = await client.call(tool, params) as T;
|
|
278
|
+
}
|
|
279
|
+
return results;
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
// Try parallel execution first
|
|
283
|
+
const parallelResults = await Promise.all(
|
|
284
|
+
calls.map(async ({ server, tool, params = {} }) => {
|
|
285
|
+
const client = new MCPServerClient(server);
|
|
286
|
+
return client.call(tool, params);
|
|
287
|
+
})
|
|
288
|
+
);
|
|
289
|
+
|
|
290
|
+
// Check for rate limit errors
|
|
291
|
+
const rateLimitedIndices: number[] = [];
|
|
292
|
+
for (let i = 0; i < parallelResults.length; i++) {
|
|
293
|
+
if (__isRateLimitError(parallelResults[i])) {
|
|
294
|
+
rateLimitedIndices.push(i);
|
|
295
|
+
// Mark this server as rate limited
|
|
296
|
+
__rateLimits[calls[i].server] = {
|
|
297
|
+
detected: true,
|
|
298
|
+
delayMs: retryDelayMs,
|
|
299
|
+
lastError: Date.now(),
|
|
300
|
+
};
|
|
301
|
+
mcp.log(\`⚠️ RATE LIMITED: \${calls[i].server} - Free tier limit hit. Retrying with delays...\`);
|
|
302
|
+
mcp.log(\`💡 TIP: Upgrade your API plan for parallel execution: https://brave.com/search/api/\`);
|
|
303
|
+
} else {
|
|
304
|
+
results[i] = parallelResults[i] as T;
|
|
305
|
+
}
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
// Retry rate-limited calls sequentially
|
|
309
|
+
if (rateLimitedIndices.length > 0) {
|
|
310
|
+
for (const idx of rateLimitedIndices) {
|
|
311
|
+
await __sleep(retryDelayMs);
|
|
312
|
+
const { server, tool, params = {} } = calls[idx];
|
|
313
|
+
const client = new MCPServerClient(server);
|
|
314
|
+
results[idx] = await client.call(tool, params) as T;
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
return results;
|
|
319
|
+
},
|
|
320
|
+
|
|
321
|
+
/**
|
|
322
|
+
* Convenience method for batched web searches via brave-search.
|
|
323
|
+
* Automatically handles rate limiting and parses results.
|
|
324
|
+
*
|
|
325
|
+
* @param queries Array of search query strings
|
|
326
|
+
* @param options { topN: number of results per query (default 3) }
|
|
327
|
+
* @returns Object mapping queries to parsed results
|
|
328
|
+
*/
|
|
329
|
+
async batchSearch(
|
|
330
|
+
queries: string[],
|
|
331
|
+
options: { topN?: number; forceParallel?: boolean } = {}
|
|
332
|
+
): Promise<Record<string, Array<{ title: string; url: string; description?: string }>>> {
|
|
333
|
+
const { topN = 3, forceParallel = false } = options;
|
|
334
|
+
|
|
335
|
+
// Parser for brave-search text response
|
|
336
|
+
const parseResults = (text: unknown): Array<{ title: string; url: string; description?: string }> => {
|
|
337
|
+
if (typeof text !== 'string' || text.startsWith('Error:')) return [];
|
|
338
|
+
return text.split(/\\n\\nTitle:/).map((block: string, i: number) => {
|
|
339
|
+
const b = i === 0 ? block : 'Title:' + block;
|
|
340
|
+
const title = b.match(/Title:\\s*([^\\n]+)/)?.[1]?.trim();
|
|
341
|
+
const url = b.match(/URL:\\s*([^\\n]+)/)?.[1]?.trim();
|
|
342
|
+
const desc = b.match(/Description:\\s*([^\\n]+)/)?.[1]?.trim();
|
|
343
|
+
return title && url ? { title, url, description: desc } : null;
|
|
344
|
+
}).filter((r): r is { title: string; url: string; description?: string } => r !== null);
|
|
345
|
+
};
|
|
346
|
+
|
|
347
|
+
const calls = queries.map(query => ({
|
|
348
|
+
server: 'brave-search',
|
|
349
|
+
tool: 'brave_web_search',
|
|
350
|
+
params: { query },
|
|
351
|
+
}));
|
|
352
|
+
|
|
353
|
+
const rawResults = await this.batch<string>(calls, { forceParallel });
|
|
354
|
+
const result: Record<string, Array<{ title: string; url: string; description?: string }>> = {};
|
|
355
|
+
|
|
356
|
+
for (let i = 0; i < queries.length; i++) {
|
|
357
|
+
result[queries[i]] = parseResults(rawResults[i]).slice(0, topN);
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
return result;
|
|
361
|
+
},
|
|
362
|
+
};
|
|
363
|
+
|
|
364
|
+
// Create Proxy to allow mcp.serverName syntax (e.g., mcp.github instead of mcp.server('github'))
|
|
365
|
+
const mcp = new Proxy(__mcpBase, {
|
|
366
|
+
get(target, prop) {
|
|
367
|
+
if (prop in target) {
|
|
368
|
+
return (target as Record<string | symbol, unknown>)[prop];
|
|
369
|
+
}
|
|
370
|
+
// Treat as server name for attribute-style access
|
|
371
|
+
if (typeof prop === 'string') {
|
|
372
|
+
return new MCPServerClient(prop);
|
|
373
|
+
}
|
|
374
|
+
return undefined;
|
|
375
|
+
},
|
|
376
|
+
});
|
|
377
|
+
|
|
378
|
+
// Main execution wrapper
|
|
379
|
+
async function __execute() {
|
|
380
|
+
${userCode}
|
|
381
|
+
}
|
|
382
|
+
|
|
383
|
+
// Run and output result
|
|
384
|
+
(async () => {
|
|
385
|
+
try {
|
|
386
|
+
// Report start when streaming
|
|
387
|
+
if (STREAM_ENABLED) {
|
|
388
|
+
await __streamEvent('/log', { message: 'Execution started', level: 'info' });
|
|
389
|
+
await __streamEvent('/progress', { percent: 0, message: 'Starting execution' });
|
|
390
|
+
}
|
|
391
|
+
|
|
392
|
+
const result = await __execute();
|
|
393
|
+
|
|
394
|
+
// Report completion when streaming
|
|
395
|
+
if (STREAM_ENABLED) {
|
|
396
|
+
await __streamEvent('/progress', { percent: 100, message: 'Execution complete' });
|
|
397
|
+
}
|
|
398
|
+
|
|
399
|
+
// Output structured result for parent process to parse
|
|
400
|
+
// When streaming, logs have already been sent in real-time, so omit them to save tokens
|
|
401
|
+
console.log('__RESULT_START__');
|
|
402
|
+
console.log(JSON.stringify({
|
|
403
|
+
success: true,
|
|
404
|
+
result,
|
|
405
|
+
logs: STREAM_ENABLED ? [] : __logs,
|
|
406
|
+
metrics: __metrics,
|
|
407
|
+
}));
|
|
408
|
+
console.log('__RESULT_END__');
|
|
409
|
+
} catch (error) {
|
|
410
|
+
const err = error as Error;
|
|
411
|
+
|
|
412
|
+
// Report error when streaming
|
|
413
|
+
if (STREAM_ENABLED) {
|
|
414
|
+
await __streamEvent('/log', { message: \`Error: \${err.message}\`, level: 'error' });
|
|
415
|
+
}
|
|
416
|
+
|
|
417
|
+
console.log('__RESULT_START__');
|
|
418
|
+
console.log(JSON.stringify({
|
|
419
|
+
success: false,
|
|
420
|
+
error: {
|
|
421
|
+
type: 'runtime',
|
|
422
|
+
message: err.message,
|
|
423
|
+
stack: err.stack,
|
|
424
|
+
},
|
|
425
|
+
logs: STREAM_ENABLED ? [] : __logs,
|
|
426
|
+
metrics: __metrics,
|
|
427
|
+
}));
|
|
428
|
+
console.log('__RESULT_END__');
|
|
429
|
+
}
|
|
430
|
+
})();
|
|
431
|
+
`;
|
|
432
|
+
}
|
|
433
|
+
/**
|
|
434
|
+
* Deno Sandbox Executor
|
|
435
|
+
*/
|
|
436
|
+
export class DenoExecutor {
|
|
437
|
+
config;
|
|
438
|
+
tempDir;
|
|
439
|
+
constructor(config) {
|
|
440
|
+
this.config = config;
|
|
441
|
+
this.tempDir = join(tmpdir(), 'mcp-executor');
|
|
442
|
+
// Ensure temp directory exists
|
|
443
|
+
if (!existsSync(this.tempDir)) {
|
|
444
|
+
mkdirSync(this.tempDir, { recursive: true });
|
|
445
|
+
}
|
|
446
|
+
}
|
|
447
|
+
/**
|
|
448
|
+
* Check if Deno is available
|
|
449
|
+
*/
|
|
450
|
+
async checkDeno() {
|
|
451
|
+
return new Promise((resolve) => {
|
|
452
|
+
const proc = spawn('deno', ['--version'], { stdio: 'pipe' });
|
|
453
|
+
proc.on('close', (code) => resolve(code === 0));
|
|
454
|
+
proc.on('error', () => resolve(false));
|
|
455
|
+
});
|
|
456
|
+
}
|
|
457
|
+
/**
|
|
458
|
+
* Execute code in the Deno sandbox
|
|
459
|
+
*/
|
|
460
|
+
async execute(code, options) {
|
|
461
|
+
const executionId = generateExecutionId();
|
|
462
|
+
const startTime = Date.now();
|
|
463
|
+
const streamEnabled = options.stream ?? false;
|
|
464
|
+
logger.debug('Starting execution', { executionId, streamEnabled });
|
|
465
|
+
// Validate Deno is available
|
|
466
|
+
const hasDenoAvailable = await this.checkDeno();
|
|
467
|
+
if (!hasDenoAvailable) {
|
|
468
|
+
return {
|
|
469
|
+
executionId,
|
|
470
|
+
success: false,
|
|
471
|
+
error: {
|
|
472
|
+
type: 'runtime',
|
|
473
|
+
message: 'Deno runtime not found. Please install Deno: https://deno.land/#installation',
|
|
474
|
+
},
|
|
475
|
+
logs: [],
|
|
476
|
+
metrics: {
|
|
477
|
+
executionTimeMs: Date.now() - startTime,
|
|
478
|
+
toolCalls: 0,
|
|
479
|
+
dataProcessedBytes: 0,
|
|
480
|
+
resultSizeBytes: 0,
|
|
481
|
+
},
|
|
482
|
+
};
|
|
483
|
+
}
|
|
484
|
+
// Generate sandbox code with streaming support
|
|
485
|
+
const sandboxCode = generateSandboxCode(code, options.bridgeUrl, executionId, streamEnabled, options.timeoutMs);
|
|
486
|
+
// Write to temp file
|
|
487
|
+
const tempFile = join(this.tempDir, `exec_${executionId}.ts`);
|
|
488
|
+
writeFileSync(tempFile, sandboxCode);
|
|
489
|
+
try {
|
|
490
|
+
const result = await this.runDeno(tempFile, options.timeoutMs, executionId, options.bridgeUrl);
|
|
491
|
+
return {
|
|
492
|
+
...result,
|
|
493
|
+
executionId,
|
|
494
|
+
metrics: {
|
|
495
|
+
...result.metrics,
|
|
496
|
+
executionTimeMs: Date.now() - startTime,
|
|
497
|
+
resultSizeBytes: result.result ? JSON.stringify(result.result).length : 0,
|
|
498
|
+
},
|
|
499
|
+
};
|
|
500
|
+
}
|
|
501
|
+
finally {
|
|
502
|
+
// Clean up temp file
|
|
503
|
+
try {
|
|
504
|
+
unlinkSync(tempFile);
|
|
505
|
+
}
|
|
506
|
+
catch {
|
|
507
|
+
// Ignore cleanup errors
|
|
508
|
+
}
|
|
509
|
+
}
|
|
510
|
+
}
|
|
511
|
+
/**
|
|
512
|
+
* Get an execution ID without executing (for pre-creating streams)
|
|
513
|
+
*/
|
|
514
|
+
generateExecutionId() {
|
|
515
|
+
return generateExecutionId();
|
|
516
|
+
}
|
|
517
|
+
/**
|
|
518
|
+
* Run Deno subprocess
|
|
519
|
+
*/
|
|
520
|
+
runDeno(filePath, timeoutMs, executionId, bridgeUrl) {
|
|
521
|
+
return new Promise((resolve) => {
|
|
522
|
+
// Extract port from bridge URL
|
|
523
|
+
let bridgePort = 9847; // Default
|
|
524
|
+
try {
|
|
525
|
+
const url = new URL(bridgeUrl);
|
|
526
|
+
bridgePort = parseInt(url.port, 10) || 9847;
|
|
527
|
+
}
|
|
528
|
+
catch {
|
|
529
|
+
// Use default if URL parsing fails
|
|
530
|
+
}
|
|
531
|
+
// Build allowed hosts list - add port if not specified
|
|
532
|
+
// Include both localhost and 127.0.0.1 as Deno treats them differently
|
|
533
|
+
const baseHosts = this.config.allowedNetHosts;
|
|
534
|
+
const expandedHosts = [];
|
|
535
|
+
for (const h of baseHosts) {
|
|
536
|
+
if (h.includes(':')) {
|
|
537
|
+
expandedHosts.push(h);
|
|
538
|
+
}
|
|
539
|
+
else {
|
|
540
|
+
// Add both hostname and 127.0.0.1 with the bridge port
|
|
541
|
+
expandedHosts.push(`${h}:${bridgePort}`);
|
|
542
|
+
if (h === 'localhost') {
|
|
543
|
+
expandedHosts.push(`127.0.0.1:${bridgePort}`);
|
|
544
|
+
}
|
|
545
|
+
}
|
|
546
|
+
}
|
|
547
|
+
const allowedHosts = expandedHosts.join(',');
|
|
548
|
+
const args = [
|
|
549
|
+
'run',
|
|
550
|
+
`--allow-net=${allowedHosts}`,
|
|
551
|
+
'--no-prompt',
|
|
552
|
+
`--v8-flags=--max-old-space-size=${this.config.maxMemoryMb}`,
|
|
553
|
+
filePath,
|
|
554
|
+
];
|
|
555
|
+
logger.debug('Spawning Deno', { executionId, args: args.join(' ') });
|
|
556
|
+
let stdout = '';
|
|
557
|
+
let stderr = '';
|
|
558
|
+
let killed = false;
|
|
559
|
+
const proc = spawn('deno', args, {
|
|
560
|
+
stdio: ['ignore', 'pipe', 'pipe'],
|
|
561
|
+
env: {
|
|
562
|
+
...process.env,
|
|
563
|
+
NO_COLOR: '1', // Disable colour output
|
|
564
|
+
},
|
|
565
|
+
});
|
|
566
|
+
// Set timeout
|
|
567
|
+
const timer = setTimeout(() => {
|
|
568
|
+
killed = true;
|
|
569
|
+
proc.kill('SIGKILL');
|
|
570
|
+
}, timeoutMs);
|
|
571
|
+
proc.stdout?.on('data', (data) => {
|
|
572
|
+
stdout += data.toString();
|
|
573
|
+
});
|
|
574
|
+
proc.stderr?.on('data', (data) => {
|
|
575
|
+
stderr += data.toString();
|
|
576
|
+
});
|
|
577
|
+
proc.on('close', (code) => {
|
|
578
|
+
clearTimeout(timer);
|
|
579
|
+
if (killed) {
|
|
580
|
+
resolve({
|
|
581
|
+
executionId,
|
|
582
|
+
success: false,
|
|
583
|
+
error: {
|
|
584
|
+
type: 'timeout',
|
|
585
|
+
message: `Execution timed out after ${timeoutMs}ms`,
|
|
586
|
+
},
|
|
587
|
+
logs: [],
|
|
588
|
+
metrics: {
|
|
589
|
+
executionTimeMs: timeoutMs,
|
|
590
|
+
toolCalls: 0,
|
|
591
|
+
dataProcessedBytes: 0,
|
|
592
|
+
resultSizeBytes: 0,
|
|
593
|
+
},
|
|
594
|
+
});
|
|
595
|
+
return;
|
|
596
|
+
}
|
|
597
|
+
// Parse structured result from stdout
|
|
598
|
+
const resultMatch = stdout.match(/__RESULT_START__\n([\s\S]*?)\n__RESULT_END__/);
|
|
599
|
+
if (resultMatch && resultMatch[1]) {
|
|
600
|
+
try {
|
|
601
|
+
const parsed = JSON.parse(resultMatch[1]);
|
|
602
|
+
resolve({
|
|
603
|
+
executionId,
|
|
604
|
+
success: parsed.success,
|
|
605
|
+
result: parsed.result,
|
|
606
|
+
error: parsed.error,
|
|
607
|
+
logs: parsed.logs || [],
|
|
608
|
+
metrics: {
|
|
609
|
+
executionTimeMs: 0, // Will be set by caller
|
|
610
|
+
toolCalls: parsed.metrics?.toolCalls || 0,
|
|
611
|
+
dataProcessedBytes: parsed.metrics?.dataProcessedBytes || 0,
|
|
612
|
+
resultSizeBytes: 0, // Will be set by caller
|
|
613
|
+
},
|
|
614
|
+
});
|
|
615
|
+
return;
|
|
616
|
+
}
|
|
617
|
+
catch (parseError) {
|
|
618
|
+
logger.error('Failed to parse execution result', { executionId, parseError });
|
|
619
|
+
}
|
|
620
|
+
}
|
|
621
|
+
// Handle execution errors
|
|
622
|
+
if (code !== 0 || stderr) {
|
|
623
|
+
// Check for syntax errors
|
|
624
|
+
const syntaxMatch = stderr.match(/error: (.*?) at .*?:(\d+):\d+/);
|
|
625
|
+
if (syntaxMatch) {
|
|
626
|
+
resolve({
|
|
627
|
+
executionId,
|
|
628
|
+
success: false,
|
|
629
|
+
error: {
|
|
630
|
+
type: 'syntax',
|
|
631
|
+
message: syntaxMatch[1] || 'Syntax error',
|
|
632
|
+
line: parseInt(syntaxMatch[2] || '0', 10),
|
|
633
|
+
},
|
|
634
|
+
logs: [],
|
|
635
|
+
metrics: {
|
|
636
|
+
executionTimeMs: 0,
|
|
637
|
+
toolCalls: 0,
|
|
638
|
+
dataProcessedBytes: 0,
|
|
639
|
+
resultSizeBytes: 0,
|
|
640
|
+
},
|
|
641
|
+
});
|
|
642
|
+
return;
|
|
643
|
+
}
|
|
644
|
+
resolve({
|
|
645
|
+
executionId,
|
|
646
|
+
success: false,
|
|
647
|
+
error: {
|
|
648
|
+
type: 'runtime',
|
|
649
|
+
message: stderr || `Process exited with code ${code}`,
|
|
650
|
+
stack: stderr,
|
|
651
|
+
},
|
|
652
|
+
logs: [],
|
|
653
|
+
metrics: {
|
|
654
|
+
executionTimeMs: 0,
|
|
655
|
+
toolCalls: 0,
|
|
656
|
+
dataProcessedBytes: 0,
|
|
657
|
+
resultSizeBytes: 0,
|
|
658
|
+
},
|
|
659
|
+
});
|
|
660
|
+
return;
|
|
661
|
+
}
|
|
662
|
+
// No result found
|
|
663
|
+
resolve({
|
|
664
|
+
executionId,
|
|
665
|
+
success: false,
|
|
666
|
+
error: {
|
|
667
|
+
type: 'runtime',
|
|
668
|
+
message: 'No result returned from execution',
|
|
669
|
+
},
|
|
670
|
+
logs: [],
|
|
671
|
+
metrics: {
|
|
672
|
+
executionTimeMs: 0,
|
|
673
|
+
toolCalls: 0,
|
|
674
|
+
dataProcessedBytes: 0,
|
|
675
|
+
resultSizeBytes: 0,
|
|
676
|
+
},
|
|
677
|
+
});
|
|
678
|
+
});
|
|
679
|
+
proc.on('error', (error) => {
|
|
680
|
+
clearTimeout(timer);
|
|
681
|
+
resolve({
|
|
682
|
+
executionId,
|
|
683
|
+
success: false,
|
|
684
|
+
error: {
|
|
685
|
+
type: 'runtime',
|
|
686
|
+
message: `Failed to spawn Deno: ${error.message}`,
|
|
687
|
+
},
|
|
688
|
+
logs: [],
|
|
689
|
+
metrics: {
|
|
690
|
+
executionTimeMs: 0,
|
|
691
|
+
toolCalls: 0,
|
|
692
|
+
dataProcessedBytes: 0,
|
|
693
|
+
resultSizeBytes: 0,
|
|
694
|
+
},
|
|
695
|
+
});
|
|
696
|
+
});
|
|
697
|
+
});
|
|
698
|
+
}
|
|
699
|
+
}
|
|
700
|
+
//# sourceMappingURL=executor.js.map
|