@inkeep/agents-run-api 0.39.1 → 0.39.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/SandboxExecutorFactory.cjs +895 -0
- package/dist/SandboxExecutorFactory.js +893 -0
- package/dist/SandboxExecutorFactory.js.map +1 -0
- package/dist/chunk-VBDAOXYI.cjs +927 -0
- package/dist/chunk-VBDAOXYI.js +832 -0
- package/dist/chunk-VBDAOXYI.js.map +1 -0
- package/dist/chunk.cjs +34 -0
- package/dist/conversations.cjs +7 -0
- package/dist/conversations.js +7 -0
- package/dist/conversations2.cjs +209 -0
- package/dist/conversations2.js +180 -0
- package/dist/conversations2.js.map +1 -0
- package/dist/dbClient.cjs +9676 -0
- package/dist/dbClient.js +9670 -0
- package/dist/dbClient.js.map +1 -0
- package/dist/dbClient2.cjs +5 -0
- package/dist/dbClient2.js +5 -0
- package/dist/env.cjs +59 -0
- package/dist/env.js +54 -0
- package/dist/env.js.map +1 -0
- package/dist/execution-limits.cjs +260 -0
- package/dist/execution-limits.js +63 -0
- package/dist/execution-limits.js.map +1 -0
- package/dist/index.cjs +10545 -20565
- package/dist/index.d.cts +26 -22
- package/dist/index.d.cts.map +1 -0
- package/dist/index.d.ts +27 -22
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +10545 -12888
- package/dist/index.js.map +1 -0
- package/dist/instrumentation.cjs +12 -121
- package/dist/instrumentation.d.cts +9 -7
- package/dist/instrumentation.d.cts.map +1 -0
- package/dist/instrumentation.d.ts +9 -7
- package/dist/instrumentation.d.ts.map +1 -0
- package/dist/instrumentation.js +5 -1
- package/dist/instrumentation2.cjs +116 -0
- package/dist/instrumentation2.js +69 -0
- package/dist/instrumentation2.js.map +1 -0
- package/dist/json-postprocessor.cjs +20 -0
- package/dist/json-postprocessor.js +20 -0
- package/dist/json-postprocessor.js.map +1 -0
- package/dist/logger.cjs +5 -0
- package/dist/logger.js +3 -0
- package/dist/logger2.cjs +1 -0
- package/dist/logger2.js +3 -0
- package/dist/nodefs.cjs +29 -0
- package/dist/nodefs.js +27 -0
- package/dist/nodefs.js.map +1 -0
- package/dist/opfs-ahp.cjs +367 -0
- package/dist/opfs-ahp.js +368 -0
- package/dist/opfs-ahp.js.map +1 -0
- package/package.json +3 -3
- package/dist/SandboxExecutorFactory-2N27SE3B.js +0 -943
- package/dist/chunk-A2S7GSHL.js +0 -1
- package/dist/chunk-EVOISBFH.js +0 -5070
- package/dist/chunk-JCVMVG3J.js +0 -592
- package/dist/chunk-KBZIYCPJ.js +0 -40
- package/dist/chunk-KCJWSIDZ.js +0 -246
- package/dist/chunk-THWNUGWP.js +0 -204
- package/dist/chunk-UC2EPLSW.js +0 -75
- package/dist/conversations-XPSTWUMK.js +0 -1
- package/dist/dbClient-MAHUR4TO.js +0 -1
- package/dist/json-postprocessor-IGYTSWFB.js +0 -12
- package/dist/logger-3EE6BUSU.js +0 -1
- package/dist/nodefs-RPE52Q4Z.js +0 -21
- package/dist/opfs-ahp-QL4REJJW.js +0 -318
|
@@ -1,943 +0,0 @@
|
|
|
1
|
-
import { getLogger } from './chunk-A2S7GSHL.js';
|
|
2
|
-
import { FUNCTION_TOOL_SANDBOX_POOL_TTL_MS, FUNCTION_TOOL_SANDBOX_MAX_USE_COUNT, FUNCTION_TOOL_SANDBOX_CLEANUP_INTERVAL_MS, FUNCTION_TOOL_EXECUTION_TIMEOUT_MS_DEFAULT, FUNCTION_TOOL_SANDBOX_MAX_OUTPUT_SIZE_BYTES, FUNCTION_TOOL_SANDBOX_QUEUE_WAIT_TIMEOUT_MS } from './chunk-THWNUGWP.js';
|
|
3
|
-
import { spawn } from 'child_process';
|
|
4
|
-
import crypto, { createHash } from 'crypto';
|
|
5
|
-
import { mkdirSync, existsSync, rmSync, writeFileSync } from 'fs';
|
|
6
|
-
import { tmpdir } from 'os';
|
|
7
|
-
import { join } from 'path';
|
|
8
|
-
import { Sandbox } from '@vercel/sandbox';
|
|
9
|
-
|
|
10
|
-
// src/tools/sandbox-utils.ts
|
|
11
|
-
function createExecutionWrapper(executeCode, args) {
|
|
12
|
-
return `
|
|
13
|
-
// Function tool execution wrapper
|
|
14
|
-
const args = ${JSON.stringify(args, null, 2)};
|
|
15
|
-
|
|
16
|
-
// User's function code
|
|
17
|
-
const execute = ${executeCode}
|
|
18
|
-
|
|
19
|
-
// Execute the function and output the result
|
|
20
|
-
(async () => {
|
|
21
|
-
try {
|
|
22
|
-
const result = await execute(args);
|
|
23
|
-
// Output result as JSON on the last line
|
|
24
|
-
console.log(JSON.stringify({ success: true, result }));
|
|
25
|
-
} catch (error) {
|
|
26
|
-
console.error(JSON.stringify({
|
|
27
|
-
success: false,
|
|
28
|
-
error: error instanceof Error ? error.message : String(error)
|
|
29
|
-
}));
|
|
30
|
-
process.exit(1);
|
|
31
|
-
}
|
|
32
|
-
})();
|
|
33
|
-
`;
|
|
34
|
-
}
|
|
35
|
-
function parseExecutionResult(stdout, functionId, logger4) {
|
|
36
|
-
try {
|
|
37
|
-
const outputLines = stdout.split("\n").filter((line) => line.trim());
|
|
38
|
-
const resultLine = outputLines[outputLines.length - 1];
|
|
39
|
-
return JSON.parse(resultLine);
|
|
40
|
-
} catch (parseError) {
|
|
41
|
-
if (logger4) {
|
|
42
|
-
logger4.warn(
|
|
43
|
-
{
|
|
44
|
-
functionId,
|
|
45
|
-
stdout,
|
|
46
|
-
parseError
|
|
47
|
-
},
|
|
48
|
-
"Failed to parse execution result"
|
|
49
|
-
);
|
|
50
|
-
}
|
|
51
|
-
return stdout;
|
|
52
|
-
}
|
|
53
|
-
}
|
|
54
|
-
|
|
55
|
-
// src/tools/NativeSandboxExecutor.ts
|
|
56
|
-
var logger = getLogger("native-sandbox-executor");
|
|
57
|
-
var ExecutionSemaphore = class {
|
|
58
|
-
permits;
|
|
59
|
-
waitQueue = [];
|
|
60
|
-
maxWaitTime;
|
|
61
|
-
constructor(permits, maxWaitTimeMs = FUNCTION_TOOL_SANDBOX_QUEUE_WAIT_TIMEOUT_MS) {
|
|
62
|
-
this.permits = Math.max(1, permits);
|
|
63
|
-
this.maxWaitTime = maxWaitTimeMs;
|
|
64
|
-
}
|
|
65
|
-
async acquire(fn) {
|
|
66
|
-
await new Promise((resolve, reject) => {
|
|
67
|
-
if (this.permits > 0) {
|
|
68
|
-
this.permits--;
|
|
69
|
-
resolve();
|
|
70
|
-
return;
|
|
71
|
-
}
|
|
72
|
-
const timeoutId = setTimeout(() => {
|
|
73
|
-
const index = this.waitQueue.findIndex((item) => item.resolve === resolve);
|
|
74
|
-
if (index !== -1) {
|
|
75
|
-
this.waitQueue.splice(index, 1);
|
|
76
|
-
reject(
|
|
77
|
-
new Error(
|
|
78
|
-
`Function execution queue timeout after ${this.maxWaitTime}ms. Too many concurrent executions.`
|
|
79
|
-
)
|
|
80
|
-
);
|
|
81
|
-
}
|
|
82
|
-
}, this.maxWaitTime);
|
|
83
|
-
this.waitQueue.push({
|
|
84
|
-
resolve: () => {
|
|
85
|
-
clearTimeout(timeoutId);
|
|
86
|
-
this.permits--;
|
|
87
|
-
resolve();
|
|
88
|
-
},
|
|
89
|
-
reject
|
|
90
|
-
});
|
|
91
|
-
});
|
|
92
|
-
try {
|
|
93
|
-
return await fn();
|
|
94
|
-
} finally {
|
|
95
|
-
this.permits++;
|
|
96
|
-
const next = this.waitQueue.shift();
|
|
97
|
-
if (next) {
|
|
98
|
-
next.resolve();
|
|
99
|
-
}
|
|
100
|
-
}
|
|
101
|
-
}
|
|
102
|
-
getAvailablePermits() {
|
|
103
|
-
return this.permits;
|
|
104
|
-
}
|
|
105
|
-
getQueueLength() {
|
|
106
|
-
return this.waitQueue.length;
|
|
107
|
-
}
|
|
108
|
-
};
|
|
109
|
-
var NativeSandboxExecutor = class _NativeSandboxExecutor {
|
|
110
|
-
tempDir;
|
|
111
|
-
sandboxPool = {};
|
|
112
|
-
static instance = null;
|
|
113
|
-
executionSemaphores = /* @__PURE__ */ new Map();
|
|
114
|
-
constructor() {
|
|
115
|
-
this.tempDir = join(tmpdir(), "inkeep-sandboxes");
|
|
116
|
-
this.ensureTempDir();
|
|
117
|
-
this.startPoolCleanup();
|
|
118
|
-
}
|
|
119
|
-
static getInstance() {
|
|
120
|
-
if (!_NativeSandboxExecutor.instance) {
|
|
121
|
-
_NativeSandboxExecutor.instance = new _NativeSandboxExecutor();
|
|
122
|
-
}
|
|
123
|
-
return _NativeSandboxExecutor.instance;
|
|
124
|
-
}
|
|
125
|
-
getSemaphore(vcpus) {
|
|
126
|
-
const effectiveVcpus = Math.max(1, vcpus || 1);
|
|
127
|
-
if (!this.executionSemaphores.has(effectiveVcpus)) {
|
|
128
|
-
logger.debug({ vcpus: effectiveVcpus }, "Creating new execution semaphore");
|
|
129
|
-
this.executionSemaphores.set(effectiveVcpus, new ExecutionSemaphore(effectiveVcpus));
|
|
130
|
-
}
|
|
131
|
-
const semaphore = this.executionSemaphores.get(effectiveVcpus);
|
|
132
|
-
if (!semaphore) {
|
|
133
|
-
throw new Error(`Failed to create semaphore for ${effectiveVcpus} vCPUs`);
|
|
134
|
-
}
|
|
135
|
-
return semaphore;
|
|
136
|
-
}
|
|
137
|
-
getExecutionStats() {
|
|
138
|
-
const stats = {};
|
|
139
|
-
for (const [vcpus, semaphore] of this.executionSemaphores.entries()) {
|
|
140
|
-
stats[`vcpu_${vcpus}`] = {
|
|
141
|
-
availablePermits: semaphore.getAvailablePermits(),
|
|
142
|
-
queueLength: semaphore.getQueueLength()
|
|
143
|
-
};
|
|
144
|
-
}
|
|
145
|
-
return stats;
|
|
146
|
-
}
|
|
147
|
-
ensureTempDir() {
|
|
148
|
-
try {
|
|
149
|
-
mkdirSync(this.tempDir, { recursive: true });
|
|
150
|
-
} catch {
|
|
151
|
-
}
|
|
152
|
-
}
|
|
153
|
-
generateDependencyHash(dependencies) {
|
|
154
|
-
const sortedDeps = Object.keys(dependencies).sort().map((key) => `${key}@${dependencies[key]}`).join(",");
|
|
155
|
-
return createHash("sha256").update(sortedDeps).digest("hex").substring(0, 16);
|
|
156
|
-
}
|
|
157
|
-
getCachedSandbox(dependencyHash) {
|
|
158
|
-
const poolKey = dependencyHash;
|
|
159
|
-
const sandbox = this.sandboxPool[poolKey];
|
|
160
|
-
if (sandbox && existsSync(sandbox.sandboxDir)) {
|
|
161
|
-
const now = Date.now();
|
|
162
|
-
if (now - sandbox.lastUsed < FUNCTION_TOOL_SANDBOX_POOL_TTL_MS && sandbox.useCount < FUNCTION_TOOL_SANDBOX_MAX_USE_COUNT) {
|
|
163
|
-
sandbox.lastUsed = now;
|
|
164
|
-
sandbox.useCount++;
|
|
165
|
-
logger.debug(
|
|
166
|
-
{
|
|
167
|
-
poolKey,
|
|
168
|
-
useCount: sandbox.useCount,
|
|
169
|
-
sandboxDir: sandbox.sandboxDir,
|
|
170
|
-
lastUsed: new Date(sandbox.lastUsed)
|
|
171
|
-
},
|
|
172
|
-
"Reusing cached sandbox"
|
|
173
|
-
);
|
|
174
|
-
return sandbox.sandboxDir;
|
|
175
|
-
}
|
|
176
|
-
this.cleanupSandbox(sandbox.sandboxDir);
|
|
177
|
-
delete this.sandboxPool[poolKey];
|
|
178
|
-
}
|
|
179
|
-
return null;
|
|
180
|
-
}
|
|
181
|
-
addToPool(dependencyHash, sandboxDir, dependencies) {
|
|
182
|
-
const poolKey = dependencyHash;
|
|
183
|
-
if (this.sandboxPool[poolKey]) {
|
|
184
|
-
this.cleanupSandbox(this.sandboxPool[poolKey].sandboxDir);
|
|
185
|
-
}
|
|
186
|
-
this.sandboxPool[poolKey] = {
|
|
187
|
-
sandboxDir,
|
|
188
|
-
lastUsed: Date.now(),
|
|
189
|
-
useCount: 1,
|
|
190
|
-
dependencies
|
|
191
|
-
};
|
|
192
|
-
logger.debug({ poolKey, sandboxDir }, "Added sandbox to pool");
|
|
193
|
-
}
|
|
194
|
-
cleanupSandbox(sandboxDir) {
|
|
195
|
-
try {
|
|
196
|
-
rmSync(sandboxDir, { recursive: true, force: true });
|
|
197
|
-
logger.debug({ sandboxDir }, "Cleaned up sandbox");
|
|
198
|
-
} catch (error) {
|
|
199
|
-
logger.warn({ sandboxDir, error }, "Failed to clean up sandbox");
|
|
200
|
-
}
|
|
201
|
-
}
|
|
202
|
-
startPoolCleanup() {
|
|
203
|
-
setInterval(() => {
|
|
204
|
-
const now = Date.now();
|
|
205
|
-
const keysToDelete = [];
|
|
206
|
-
for (const [key, sandbox] of Object.entries(this.sandboxPool)) {
|
|
207
|
-
if (now - sandbox.lastUsed > FUNCTION_TOOL_SANDBOX_POOL_TTL_MS || sandbox.useCount >= FUNCTION_TOOL_SANDBOX_MAX_USE_COUNT) {
|
|
208
|
-
this.cleanupSandbox(sandbox.sandboxDir);
|
|
209
|
-
keysToDelete.push(key);
|
|
210
|
-
}
|
|
211
|
-
}
|
|
212
|
-
keysToDelete.forEach((key) => {
|
|
213
|
-
delete this.sandboxPool[key];
|
|
214
|
-
});
|
|
215
|
-
if (keysToDelete.length > 0) {
|
|
216
|
-
logger.debug({ cleanedCount: keysToDelete.length }, "Cleaned up expired sandboxes");
|
|
217
|
-
}
|
|
218
|
-
}, FUNCTION_TOOL_SANDBOX_CLEANUP_INTERVAL_MS);
|
|
219
|
-
}
|
|
220
|
-
detectModuleType(executeCode, configuredRuntime) {
|
|
221
|
-
const esmPatterns = [
|
|
222
|
-
/import\s+.*\s+from\s+['"]/g,
|
|
223
|
-
// import ... from '...'
|
|
224
|
-
/import\s*\(/g,
|
|
225
|
-
// import(...)
|
|
226
|
-
/export\s+(default|const|let|var|function|class)/g,
|
|
227
|
-
// export statements
|
|
228
|
-
/export\s*\{/g
|
|
229
|
-
// export { ... }
|
|
230
|
-
];
|
|
231
|
-
const cjsPatterns = [
|
|
232
|
-
/require\s*\(/g,
|
|
233
|
-
// require(...)
|
|
234
|
-
/module\.exports/g,
|
|
235
|
-
// module.exports
|
|
236
|
-
/exports\./g
|
|
237
|
-
// exports.something
|
|
238
|
-
];
|
|
239
|
-
const hasEsmSyntax = esmPatterns.some((pattern) => pattern.test(executeCode));
|
|
240
|
-
const hasCjsSyntax = cjsPatterns.some((pattern) => pattern.test(executeCode));
|
|
241
|
-
if (configuredRuntime === "typescript") {
|
|
242
|
-
return hasCjsSyntax ? "cjs" : "esm";
|
|
243
|
-
}
|
|
244
|
-
if (hasEsmSyntax && hasCjsSyntax) {
|
|
245
|
-
logger.warn(
|
|
246
|
-
{ executeCode: `${executeCode.substring(0, 100)}...` },
|
|
247
|
-
"Both ESM and CommonJS syntax detected, defaulting to ESM"
|
|
248
|
-
);
|
|
249
|
-
return "esm";
|
|
250
|
-
}
|
|
251
|
-
if (hasEsmSyntax) {
|
|
252
|
-
return "esm";
|
|
253
|
-
}
|
|
254
|
-
if (hasCjsSyntax) {
|
|
255
|
-
return "cjs";
|
|
256
|
-
}
|
|
257
|
-
return "cjs";
|
|
258
|
-
}
|
|
259
|
-
async executeFunctionTool(toolId, args, config) {
|
|
260
|
-
const vcpus = config.sandboxConfig?.vcpus || 1;
|
|
261
|
-
const semaphore = this.getSemaphore(vcpus);
|
|
262
|
-
logger.debug(
|
|
263
|
-
{
|
|
264
|
-
toolId,
|
|
265
|
-
vcpus,
|
|
266
|
-
availablePermits: semaphore.getAvailablePermits(),
|
|
267
|
-
queueLength: semaphore.getQueueLength(),
|
|
268
|
-
sandboxConfig: config.sandboxConfig,
|
|
269
|
-
poolSize: Object.keys(this.sandboxPool).length
|
|
270
|
-
},
|
|
271
|
-
"Acquiring execution slot for function tool"
|
|
272
|
-
);
|
|
273
|
-
return semaphore.acquire(async () => {
|
|
274
|
-
return this.executeInSandbox_Internal(toolId, args, config);
|
|
275
|
-
});
|
|
276
|
-
}
|
|
277
|
-
async executeInSandbox_Internal(toolId, args, config) {
|
|
278
|
-
const dependencies = config.dependencies || {};
|
|
279
|
-
const dependencyHash = this.generateDependencyHash(dependencies);
|
|
280
|
-
logger.debug(
|
|
281
|
-
{
|
|
282
|
-
toolId,
|
|
283
|
-
dependencies,
|
|
284
|
-
dependencyHash,
|
|
285
|
-
sandboxConfig: config.sandboxConfig,
|
|
286
|
-
poolSize: Object.keys(this.sandboxPool).length
|
|
287
|
-
},
|
|
288
|
-
"Executing function tool"
|
|
289
|
-
);
|
|
290
|
-
let sandboxDir = this.getCachedSandbox(dependencyHash);
|
|
291
|
-
let isNewSandbox = false;
|
|
292
|
-
if (!sandboxDir) {
|
|
293
|
-
sandboxDir = join(this.tempDir, `sandbox-${dependencyHash}-${Date.now()}`);
|
|
294
|
-
mkdirSync(sandboxDir, { recursive: true });
|
|
295
|
-
isNewSandbox = true;
|
|
296
|
-
logger.debug(
|
|
297
|
-
{
|
|
298
|
-
toolId,
|
|
299
|
-
dependencyHash,
|
|
300
|
-
sandboxDir,
|
|
301
|
-
dependencies
|
|
302
|
-
},
|
|
303
|
-
"Creating new sandbox"
|
|
304
|
-
);
|
|
305
|
-
const moduleType = this.detectModuleType(config.executeCode, config.sandboxConfig?.runtime);
|
|
306
|
-
const packageJson = {
|
|
307
|
-
name: `function-tool-${toolId}`,
|
|
308
|
-
version: "1.0.0",
|
|
309
|
-
...moduleType === "esm" && { type: "module" },
|
|
310
|
-
dependencies,
|
|
311
|
-
scripts: {
|
|
312
|
-
start: moduleType === "esm" ? "node index.mjs" : "node index.js"
|
|
313
|
-
}
|
|
314
|
-
};
|
|
315
|
-
writeFileSync(join(sandboxDir, "package.json"), JSON.stringify(packageJson, null, 2), "utf8");
|
|
316
|
-
if (Object.keys(dependencies).length > 0) {
|
|
317
|
-
await this.installDependencies(sandboxDir);
|
|
318
|
-
}
|
|
319
|
-
this.addToPool(dependencyHash, sandboxDir, dependencies);
|
|
320
|
-
}
|
|
321
|
-
try {
|
|
322
|
-
const moduleType = this.detectModuleType(config.executeCode, config.sandboxConfig?.runtime);
|
|
323
|
-
const executionCode = createExecutionWrapper(config.executeCode, args);
|
|
324
|
-
const fileExtension = moduleType === "esm" ? "mjs" : "js";
|
|
325
|
-
writeFileSync(join(sandboxDir, `index.${fileExtension}`), executionCode, "utf8");
|
|
326
|
-
const result = await this.executeInSandbox(
|
|
327
|
-
sandboxDir,
|
|
328
|
-
config.sandboxConfig?.timeout || FUNCTION_TOOL_EXECUTION_TIMEOUT_MS_DEFAULT,
|
|
329
|
-
moduleType,
|
|
330
|
-
config.sandboxConfig
|
|
331
|
-
);
|
|
332
|
-
return result;
|
|
333
|
-
} catch (error) {
|
|
334
|
-
if (isNewSandbox) {
|
|
335
|
-
this.cleanupSandbox(sandboxDir);
|
|
336
|
-
const poolKey = dependencyHash;
|
|
337
|
-
delete this.sandboxPool[poolKey];
|
|
338
|
-
}
|
|
339
|
-
throw error;
|
|
340
|
-
}
|
|
341
|
-
}
|
|
342
|
-
async installDependencies(sandboxDir) {
|
|
343
|
-
return new Promise((resolve, reject) => {
|
|
344
|
-
const npmEnv = {
|
|
345
|
-
...process.env,
|
|
346
|
-
npm_config_cache: join(sandboxDir, ".npm-cache"),
|
|
347
|
-
npm_config_logs_dir: join(sandboxDir, ".npm-logs"),
|
|
348
|
-
npm_config_tmp: join(sandboxDir, ".npm-tmp"),
|
|
349
|
-
HOME: sandboxDir,
|
|
350
|
-
npm_config_update_notifier: "false",
|
|
351
|
-
npm_config_progress: "false",
|
|
352
|
-
npm_config_loglevel: "error"
|
|
353
|
-
};
|
|
354
|
-
const npm = spawn("npm", ["install", "--no-audit", "--no-fund"], {
|
|
355
|
-
cwd: sandboxDir,
|
|
356
|
-
stdio: "pipe",
|
|
357
|
-
env: npmEnv
|
|
358
|
-
});
|
|
359
|
-
let stderr = "";
|
|
360
|
-
npm.stdout?.on("data", () => {
|
|
361
|
-
});
|
|
362
|
-
npm.stderr?.on("data", (data) => {
|
|
363
|
-
stderr += data.toString();
|
|
364
|
-
});
|
|
365
|
-
npm.on("close", (code) => {
|
|
366
|
-
if (code === 0) {
|
|
367
|
-
logger.debug({ sandboxDir }, "Dependencies installed successfully");
|
|
368
|
-
resolve();
|
|
369
|
-
} else {
|
|
370
|
-
logger.error({ sandboxDir, code, stderr }, "Failed to install dependencies");
|
|
371
|
-
reject(new Error(`npm install failed with code ${code}: ${stderr}`));
|
|
372
|
-
}
|
|
373
|
-
});
|
|
374
|
-
npm.on("error", (err) => {
|
|
375
|
-
logger.error({ sandboxDir, error: err }, "Failed to spawn npm install");
|
|
376
|
-
reject(err);
|
|
377
|
-
});
|
|
378
|
-
});
|
|
379
|
-
}
|
|
380
|
-
async executeInSandbox(sandboxDir, timeout, moduleType, _sandboxConfig) {
|
|
381
|
-
return new Promise((resolve, reject) => {
|
|
382
|
-
const fileExtension = moduleType === "esm" ? "mjs" : "js";
|
|
383
|
-
const spawnOptions = {
|
|
384
|
-
cwd: sandboxDir,
|
|
385
|
-
stdio: "pipe",
|
|
386
|
-
uid: process.getuid ? process.getuid() : void 0,
|
|
387
|
-
gid: process.getgid ? process.getgid() : void 0
|
|
388
|
-
};
|
|
389
|
-
const node = spawn("node", [`index.${fileExtension}`], spawnOptions);
|
|
390
|
-
let stdout = "";
|
|
391
|
-
let stderr = "";
|
|
392
|
-
let outputSize = 0;
|
|
393
|
-
node.stdout?.on("data", (data) => {
|
|
394
|
-
const dataStr = data.toString();
|
|
395
|
-
outputSize += dataStr.length;
|
|
396
|
-
if (outputSize > FUNCTION_TOOL_SANDBOX_MAX_OUTPUT_SIZE_BYTES) {
|
|
397
|
-
node.kill("SIGTERM");
|
|
398
|
-
reject(
|
|
399
|
-
new Error(
|
|
400
|
-
`Output size exceeded limit of ${FUNCTION_TOOL_SANDBOX_MAX_OUTPUT_SIZE_BYTES} bytes`
|
|
401
|
-
)
|
|
402
|
-
);
|
|
403
|
-
return;
|
|
404
|
-
}
|
|
405
|
-
stdout += dataStr;
|
|
406
|
-
});
|
|
407
|
-
node.stderr?.on("data", (data) => {
|
|
408
|
-
const dataStr = data.toString();
|
|
409
|
-
outputSize += dataStr.length;
|
|
410
|
-
if (outputSize > FUNCTION_TOOL_SANDBOX_MAX_OUTPUT_SIZE_BYTES) {
|
|
411
|
-
node.kill("SIGTERM");
|
|
412
|
-
reject(
|
|
413
|
-
new Error(
|
|
414
|
-
`Output size exceeded limit of ${FUNCTION_TOOL_SANDBOX_MAX_OUTPUT_SIZE_BYTES} bytes`
|
|
415
|
-
)
|
|
416
|
-
);
|
|
417
|
-
return;
|
|
418
|
-
}
|
|
419
|
-
stderr += dataStr;
|
|
420
|
-
});
|
|
421
|
-
const timeoutId = setTimeout(() => {
|
|
422
|
-
logger.warn({ sandboxDir, timeout }, "Function execution timed out, killing process");
|
|
423
|
-
node.kill("SIGTERM");
|
|
424
|
-
const forceKillTimeout = Math.min(Math.max(timeout / 10, 2e3), 5e3);
|
|
425
|
-
setTimeout(() => {
|
|
426
|
-
try {
|
|
427
|
-
node.kill("SIGKILL");
|
|
428
|
-
} catch {
|
|
429
|
-
}
|
|
430
|
-
}, forceKillTimeout);
|
|
431
|
-
reject(new Error(`Function execution timed out after ${timeout}ms`));
|
|
432
|
-
}, timeout);
|
|
433
|
-
node.on("close", (code, signal) => {
|
|
434
|
-
clearTimeout(timeoutId);
|
|
435
|
-
if (code === 0) {
|
|
436
|
-
try {
|
|
437
|
-
const result = parseExecutionResult(stdout, "function", logger);
|
|
438
|
-
if (typeof result === "object" && result !== null && "success" in result) {
|
|
439
|
-
const parsed = result;
|
|
440
|
-
if (parsed.success) {
|
|
441
|
-
resolve(parsed.result);
|
|
442
|
-
} else {
|
|
443
|
-
reject(new Error(parsed.error || "Function execution failed"));
|
|
444
|
-
}
|
|
445
|
-
} else {
|
|
446
|
-
resolve(result);
|
|
447
|
-
}
|
|
448
|
-
} catch (parseError) {
|
|
449
|
-
logger.error({ stdout, stderr, parseError }, "Failed to parse function result");
|
|
450
|
-
reject(new Error(`Invalid function result: ${stdout}`));
|
|
451
|
-
}
|
|
452
|
-
} else {
|
|
453
|
-
const errorMsg = signal ? `Function execution killed by signal ${signal}: ${stderr}` : `Function execution failed with code ${code}: ${stderr}`;
|
|
454
|
-
logger.error({ code, signal, stderr }, "Function execution failed");
|
|
455
|
-
reject(new Error(errorMsg));
|
|
456
|
-
}
|
|
457
|
-
});
|
|
458
|
-
node.on("error", (error) => {
|
|
459
|
-
clearTimeout(timeoutId);
|
|
460
|
-
logger.error({ sandboxDir, error }, "Failed to spawn node process");
|
|
461
|
-
reject(error);
|
|
462
|
-
});
|
|
463
|
-
});
|
|
464
|
-
}
|
|
465
|
-
};
|
|
466
|
-
var logger2 = getLogger("VercelSandboxExecutor");
|
|
467
|
-
var VercelSandboxExecutor = class _VercelSandboxExecutor {
|
|
468
|
-
static instance;
|
|
469
|
-
config;
|
|
470
|
-
sandboxPool = /* @__PURE__ */ new Map();
|
|
471
|
-
cleanupInterval = null;
|
|
472
|
-
constructor(config) {
|
|
473
|
-
this.config = config;
|
|
474
|
-
logger2.info(
|
|
475
|
-
{
|
|
476
|
-
teamId: config.teamId,
|
|
477
|
-
projectId: config.projectId,
|
|
478
|
-
runtime: config.runtime,
|
|
479
|
-
timeout: config.timeout,
|
|
480
|
-
vcpus: config.vcpus
|
|
481
|
-
},
|
|
482
|
-
"VercelSandboxExecutor initialized with pooling"
|
|
483
|
-
);
|
|
484
|
-
this.startPoolCleanup();
|
|
485
|
-
}
|
|
486
|
-
/**
|
|
487
|
-
* Get singleton instance of VercelSandboxExecutor
|
|
488
|
-
*/
|
|
489
|
-
static getInstance(config) {
|
|
490
|
-
if (!_VercelSandboxExecutor.instance) {
|
|
491
|
-
_VercelSandboxExecutor.instance = new _VercelSandboxExecutor(config);
|
|
492
|
-
}
|
|
493
|
-
return _VercelSandboxExecutor.instance;
|
|
494
|
-
}
|
|
495
|
-
/**
|
|
496
|
-
* Generate a hash for dependencies to use as cache key
|
|
497
|
-
*/
|
|
498
|
-
generateDependencyHash(dependencies) {
|
|
499
|
-
const sorted = Object.keys(dependencies).sort().map((key) => `${key}@${dependencies[key]}`).join(",");
|
|
500
|
-
return crypto.createHash("md5").update(sorted).digest("hex").substring(0, 8);
|
|
501
|
-
}
|
|
502
|
-
/**
|
|
503
|
-
* Get a cached sandbox if available and still valid
|
|
504
|
-
*/
|
|
505
|
-
getCachedSandbox(dependencyHash) {
|
|
506
|
-
const cached = this.sandboxPool.get(dependencyHash);
|
|
507
|
-
if (!cached) return null;
|
|
508
|
-
const now = Date.now();
|
|
509
|
-
const age = now - cached.createdAt;
|
|
510
|
-
if (age > FUNCTION_TOOL_SANDBOX_POOL_TTL_MS || cached.useCount >= FUNCTION_TOOL_SANDBOX_MAX_USE_COUNT) {
|
|
511
|
-
logger2.debug(
|
|
512
|
-
{
|
|
513
|
-
dependencyHash,
|
|
514
|
-
age,
|
|
515
|
-
useCount: cached.useCount,
|
|
516
|
-
ttl: FUNCTION_TOOL_SANDBOX_POOL_TTL_MS,
|
|
517
|
-
maxUseCount: FUNCTION_TOOL_SANDBOX_MAX_USE_COUNT
|
|
518
|
-
},
|
|
519
|
-
"Sandbox expired, will create new one"
|
|
520
|
-
);
|
|
521
|
-
this.removeSandbox(dependencyHash);
|
|
522
|
-
return null;
|
|
523
|
-
}
|
|
524
|
-
logger2.debug(
|
|
525
|
-
{
|
|
526
|
-
dependencyHash,
|
|
527
|
-
useCount: cached.useCount,
|
|
528
|
-
age
|
|
529
|
-
},
|
|
530
|
-
"Reusing cached sandbox"
|
|
531
|
-
);
|
|
532
|
-
return cached.sandbox;
|
|
533
|
-
}
|
|
534
|
-
/**
|
|
535
|
-
* Add sandbox to pool
|
|
536
|
-
*/
|
|
537
|
-
addToPool(dependencyHash, sandbox, dependencies) {
|
|
538
|
-
this.sandboxPool.set(dependencyHash, {
|
|
539
|
-
sandbox,
|
|
540
|
-
createdAt: Date.now(),
|
|
541
|
-
useCount: 0,
|
|
542
|
-
dependencies
|
|
543
|
-
});
|
|
544
|
-
logger2.debug(
|
|
545
|
-
{
|
|
546
|
-
dependencyHash,
|
|
547
|
-
poolSize: this.sandboxPool.size
|
|
548
|
-
},
|
|
549
|
-
"Sandbox added to pool"
|
|
550
|
-
);
|
|
551
|
-
}
|
|
552
|
-
/**
|
|
553
|
-
* Increment use count for a sandbox
|
|
554
|
-
*/
|
|
555
|
-
incrementUseCount(dependencyHash) {
|
|
556
|
-
const cached = this.sandboxPool.get(dependencyHash);
|
|
557
|
-
if (cached) {
|
|
558
|
-
cached.useCount++;
|
|
559
|
-
}
|
|
560
|
-
}
|
|
561
|
-
/**
|
|
562
|
-
* Remove and clean up a sandbox
|
|
563
|
-
*/
|
|
564
|
-
async removeSandbox(dependencyHash) {
|
|
565
|
-
const cached = this.sandboxPool.get(dependencyHash);
|
|
566
|
-
if (cached) {
|
|
567
|
-
try {
|
|
568
|
-
await cached.sandbox.stop();
|
|
569
|
-
logger2.debug({ dependencyHash }, "Sandbox stopped");
|
|
570
|
-
} catch (error) {
|
|
571
|
-
logger2.warn({ error, dependencyHash }, "Error stopping sandbox");
|
|
572
|
-
}
|
|
573
|
-
this.sandboxPool.delete(dependencyHash);
|
|
574
|
-
}
|
|
575
|
-
}
|
|
576
|
-
/**
|
|
577
|
-
* Start periodic cleanup of expired sandboxes
|
|
578
|
-
*/
|
|
579
|
-
startPoolCleanup() {
|
|
580
|
-
this.cleanupInterval = setInterval(() => {
|
|
581
|
-
const now = Date.now();
|
|
582
|
-
const toRemove = [];
|
|
583
|
-
for (const [hash, cached] of this.sandboxPool.entries()) {
|
|
584
|
-
const age = now - cached.createdAt;
|
|
585
|
-
if (age > FUNCTION_TOOL_SANDBOX_POOL_TTL_MS || cached.useCount >= FUNCTION_TOOL_SANDBOX_MAX_USE_COUNT) {
|
|
586
|
-
toRemove.push(hash);
|
|
587
|
-
}
|
|
588
|
-
}
|
|
589
|
-
if (toRemove.length > 0) {
|
|
590
|
-
logger2.info(
|
|
591
|
-
{
|
|
592
|
-
count: toRemove.length,
|
|
593
|
-
poolSize: this.sandboxPool.size
|
|
594
|
-
},
|
|
595
|
-
"Cleaning up expired sandboxes"
|
|
596
|
-
);
|
|
597
|
-
for (const hash of toRemove) {
|
|
598
|
-
this.removeSandbox(hash);
|
|
599
|
-
}
|
|
600
|
-
}
|
|
601
|
-
}, FUNCTION_TOOL_SANDBOX_CLEANUP_INTERVAL_MS);
|
|
602
|
-
}
|
|
603
|
-
/**
|
|
604
|
-
* Cleanup all sandboxes and stop cleanup interval
|
|
605
|
-
*/
|
|
606
|
-
async cleanup() {
|
|
607
|
-
if (this.cleanupInterval) {
|
|
608
|
-
clearInterval(this.cleanupInterval);
|
|
609
|
-
this.cleanupInterval = null;
|
|
610
|
-
}
|
|
611
|
-
logger2.info(
|
|
612
|
-
{
|
|
613
|
-
poolSize: this.sandboxPool.size
|
|
614
|
-
},
|
|
615
|
-
"Cleaning up all sandboxes"
|
|
616
|
-
);
|
|
617
|
-
const promises = Array.from(this.sandboxPool.keys()).map((hash) => this.removeSandbox(hash));
|
|
618
|
-
await Promise.all(promises);
|
|
619
|
-
}
|
|
620
|
-
/**
|
|
621
|
-
* Extract environment variable names from code
|
|
622
|
-
* Matches patterns like process.env.VAR_NAME or process.env['VAR_NAME']
|
|
623
|
-
*/
|
|
624
|
-
extractEnvVars(code) {
|
|
625
|
-
const envVars = /* @__PURE__ */ new Set();
|
|
626
|
-
const dotNotationRegex = /process\.env\.([A-Z_][A-Z0-9_]*)/g;
|
|
627
|
-
let match = dotNotationRegex.exec(code);
|
|
628
|
-
while (match !== null) {
|
|
629
|
-
envVars.add(match[1]);
|
|
630
|
-
match = dotNotationRegex.exec(code);
|
|
631
|
-
}
|
|
632
|
-
const bracketNotationRegex = /process\.env\[['"]([A-Z_][A-Z0-9_]*)['"]\]/g;
|
|
633
|
-
match = bracketNotationRegex.exec(code);
|
|
634
|
-
while (match !== null) {
|
|
635
|
-
envVars.add(match[1]);
|
|
636
|
-
match = bracketNotationRegex.exec(code);
|
|
637
|
-
}
|
|
638
|
-
return envVars;
|
|
639
|
-
}
|
|
640
|
-
/**
|
|
641
|
-
* Create .env file content from environment variables
|
|
642
|
-
* Note: Currently creates empty placeholders. Values will be populated in the future.
|
|
643
|
-
*/
|
|
644
|
-
createEnvFileContent(envVarNames) {
|
|
645
|
-
const envLines = [];
|
|
646
|
-
for (const varName of envVarNames) {
|
|
647
|
-
envLines.push(`${varName}=""`);
|
|
648
|
-
logger2.debug({ varName }, "Adding environment variable placeholder to sandbox");
|
|
649
|
-
}
|
|
650
|
-
return envLines.join("\n");
|
|
651
|
-
}
|
|
652
|
-
/**
|
|
653
|
-
* Execute a function tool in Vercel Sandbox with pooling
|
|
654
|
-
*/
|
|
655
|
-
async executeFunctionTool(functionId, args, toolConfig) {
|
|
656
|
-
const startTime = Date.now();
|
|
657
|
-
const logs = [];
|
|
658
|
-
const dependencies = toolConfig.dependencies || {};
|
|
659
|
-
const dependencyHash = this.generateDependencyHash(dependencies);
|
|
660
|
-
try {
|
|
661
|
-
logger2.info(
|
|
662
|
-
{
|
|
663
|
-
functionId,
|
|
664
|
-
functionName: toolConfig.name,
|
|
665
|
-
dependencyHash,
|
|
666
|
-
poolSize: this.sandboxPool.size
|
|
667
|
-
},
|
|
668
|
-
"Executing function in Vercel Sandbox"
|
|
669
|
-
);
|
|
670
|
-
let sandbox = this.getCachedSandbox(dependencyHash);
|
|
671
|
-
let isNewSandbox = false;
|
|
672
|
-
if (!sandbox) {
|
|
673
|
-
isNewSandbox = true;
|
|
674
|
-
sandbox = await Sandbox.create({
|
|
675
|
-
token: this.config.token,
|
|
676
|
-
teamId: this.config.teamId,
|
|
677
|
-
projectId: this.config.projectId,
|
|
678
|
-
timeout: this.config.timeout,
|
|
679
|
-
resources: {
|
|
680
|
-
vcpus: this.config.vcpus || 1
|
|
681
|
-
},
|
|
682
|
-
runtime: this.config.runtime
|
|
683
|
-
});
|
|
684
|
-
logger2.info(
|
|
685
|
-
{
|
|
686
|
-
functionId,
|
|
687
|
-
sandboxId: sandbox.sandboxId,
|
|
688
|
-
dependencyHash
|
|
689
|
-
},
|
|
690
|
-
`New sandbox created for function ${functionId}`
|
|
691
|
-
);
|
|
692
|
-
this.addToPool(dependencyHash, sandbox, dependencies);
|
|
693
|
-
} else {
|
|
694
|
-
logger2.info(
|
|
695
|
-
{
|
|
696
|
-
functionId,
|
|
697
|
-
sandboxId: sandbox.sandboxId,
|
|
698
|
-
dependencyHash
|
|
699
|
-
},
|
|
700
|
-
`Reusing cached sandbox for function ${functionId}`
|
|
701
|
-
);
|
|
702
|
-
}
|
|
703
|
-
this.incrementUseCount(dependencyHash);
|
|
704
|
-
try {
|
|
705
|
-
if (isNewSandbox && toolConfig.dependencies && Object.keys(toolConfig.dependencies).length > 0) {
|
|
706
|
-
logger2.debug(
|
|
707
|
-
{
|
|
708
|
-
functionId,
|
|
709
|
-
functionName: toolConfig.name,
|
|
710
|
-
dependencies: toolConfig.dependencies
|
|
711
|
-
},
|
|
712
|
-
"Installing dependencies in new sandbox"
|
|
713
|
-
);
|
|
714
|
-
const packageJson = {
|
|
715
|
-
dependencies: toolConfig.dependencies
|
|
716
|
-
};
|
|
717
|
-
const packageJsonContent = JSON.stringify(packageJson, null, 2);
|
|
718
|
-
await sandbox.writeFiles([
|
|
719
|
-
{
|
|
720
|
-
path: "package.json",
|
|
721
|
-
content: Buffer.from(packageJsonContent, "utf-8")
|
|
722
|
-
}
|
|
723
|
-
]);
|
|
724
|
-
const installCmd = await sandbox.runCommand({
|
|
725
|
-
cmd: "npm",
|
|
726
|
-
args: ["install", "--omit=dev"]
|
|
727
|
-
});
|
|
728
|
-
const installStdout = await installCmd.stdout();
|
|
729
|
-
const installStderr = await installCmd.stderr();
|
|
730
|
-
if (installStdout) {
|
|
731
|
-
logs.push(installStdout);
|
|
732
|
-
}
|
|
733
|
-
if (installStderr) {
|
|
734
|
-
logs.push(installStderr);
|
|
735
|
-
}
|
|
736
|
-
if (installCmd.exitCode !== 0) {
|
|
737
|
-
throw new Error(`Failed to install dependencies: ${installStderr}`);
|
|
738
|
-
}
|
|
739
|
-
logger2.info(
|
|
740
|
-
{
|
|
741
|
-
functionId,
|
|
742
|
-
dependencyHash
|
|
743
|
-
},
|
|
744
|
-
"Dependencies installed successfully"
|
|
745
|
-
);
|
|
746
|
-
}
|
|
747
|
-
const executionCode = createExecutionWrapper(toolConfig.executeCode, args);
|
|
748
|
-
const envVars = this.extractEnvVars(toolConfig.executeCode);
|
|
749
|
-
const filesToWrite = [];
|
|
750
|
-
const filename = this.config.runtime === "typescript" ? "execute.ts" : "execute.js";
|
|
751
|
-
filesToWrite.push({
|
|
752
|
-
path: filename,
|
|
753
|
-
content: Buffer.from(executionCode, "utf-8")
|
|
754
|
-
});
|
|
755
|
-
if (envVars.size > 0) {
|
|
756
|
-
const envFileContent = this.createEnvFileContent(envVars);
|
|
757
|
-
if (envFileContent) {
|
|
758
|
-
filesToWrite.push({
|
|
759
|
-
path: ".env",
|
|
760
|
-
content: Buffer.from(envFileContent, "utf-8")
|
|
761
|
-
});
|
|
762
|
-
logger2.info(
|
|
763
|
-
{
|
|
764
|
-
functionId,
|
|
765
|
-
envVarCount: envVars.size,
|
|
766
|
-
envVars: Array.from(envVars)
|
|
767
|
-
},
|
|
768
|
-
"Creating environment variable placeholders in sandbox"
|
|
769
|
-
);
|
|
770
|
-
}
|
|
771
|
-
}
|
|
772
|
-
await sandbox.writeFiles(filesToWrite);
|
|
773
|
-
logger2.info(
|
|
774
|
-
{
|
|
775
|
-
functionId,
|
|
776
|
-
runtime: this.config.runtime === "typescript" ? "tsx" : "node",
|
|
777
|
-
hasEnvVars: envVars.size > 0
|
|
778
|
-
},
|
|
779
|
-
`Execution code written to file for runtime ${this.config.runtime}`
|
|
780
|
-
);
|
|
781
|
-
const executeCmd = await (async () => {
|
|
782
|
-
if (envVars.size > 0) {
|
|
783
|
-
return sandbox.runCommand({
|
|
784
|
-
cmd: "npx",
|
|
785
|
-
args: this.config.runtime === "typescript" ? ["--yes", "dotenv-cli", "--", "npx", "tsx", filename] : ["--yes", "dotenv-cli", "--", "node", filename]
|
|
786
|
-
});
|
|
787
|
-
}
|
|
788
|
-
const runtime = this.config.runtime === "typescript" ? "tsx" : "node";
|
|
789
|
-
return sandbox.runCommand({
|
|
790
|
-
cmd: runtime,
|
|
791
|
-
args: [filename]
|
|
792
|
-
});
|
|
793
|
-
})();
|
|
794
|
-
const executeStdout = await executeCmd.stdout();
|
|
795
|
-
const executeStderr = await executeCmd.stderr();
|
|
796
|
-
if (executeStdout) {
|
|
797
|
-
logs.push(executeStdout);
|
|
798
|
-
}
|
|
799
|
-
if (executeStderr) {
|
|
800
|
-
logs.push(executeStderr);
|
|
801
|
-
}
|
|
802
|
-
const executionTime = Date.now() - startTime;
|
|
803
|
-
if (executeCmd.exitCode !== 0) {
|
|
804
|
-
logger2.error(
|
|
805
|
-
{
|
|
806
|
-
functionId,
|
|
807
|
-
exitCode: executeCmd.exitCode,
|
|
808
|
-
stderr: executeStderr
|
|
809
|
-
},
|
|
810
|
-
"Function execution failed"
|
|
811
|
-
);
|
|
812
|
-
return {
|
|
813
|
-
success: false,
|
|
814
|
-
error: executeStderr || "Function execution failed with non-zero exit code",
|
|
815
|
-
logs,
|
|
816
|
-
executionTime
|
|
817
|
-
};
|
|
818
|
-
}
|
|
819
|
-
const result = parseExecutionResult(executeStdout, functionId, logger2);
|
|
820
|
-
logger2.info(
|
|
821
|
-
{
|
|
822
|
-
functionId,
|
|
823
|
-
executionTime
|
|
824
|
-
},
|
|
825
|
-
"Function executed successfully in Vercel Sandbox"
|
|
826
|
-
);
|
|
827
|
-
return {
|
|
828
|
-
success: true,
|
|
829
|
-
result,
|
|
830
|
-
logs,
|
|
831
|
-
executionTime
|
|
832
|
-
};
|
|
833
|
-
} catch (innerError) {
|
|
834
|
-
await this.removeSandbox(dependencyHash);
|
|
835
|
-
throw innerError;
|
|
836
|
-
}
|
|
837
|
-
} catch (error) {
|
|
838
|
-
const executionTime = Date.now() - startTime;
|
|
839
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
840
|
-
logger2.error(
|
|
841
|
-
{
|
|
842
|
-
functionId,
|
|
843
|
-
error: errorMessage,
|
|
844
|
-
executionTime
|
|
845
|
-
},
|
|
846
|
-
"Vercel Sandbox execution error"
|
|
847
|
-
);
|
|
848
|
-
return {
|
|
849
|
-
success: false,
|
|
850
|
-
error: errorMessage,
|
|
851
|
-
logs,
|
|
852
|
-
executionTime
|
|
853
|
-
};
|
|
854
|
-
}
|
|
855
|
-
}
|
|
856
|
-
};
|
|
857
|
-
|
|
858
|
-
// src/tools/SandboxExecutorFactory.ts
|
|
859
|
-
var logger3 = getLogger("SandboxExecutorFactory");
|
|
860
|
-
var SandboxExecutorFactory = class _SandboxExecutorFactory {
|
|
861
|
-
static instance;
|
|
862
|
-
nativeExecutor = null;
|
|
863
|
-
vercelExecutors = /* @__PURE__ */ new Map();
|
|
864
|
-
constructor() {
|
|
865
|
-
logger3.info({}, "SandboxExecutorFactory initialized");
|
|
866
|
-
}
|
|
867
|
-
/**
|
|
868
|
-
* Get singleton instance of SandboxExecutorFactory
|
|
869
|
-
*/
|
|
870
|
-
static getInstance() {
|
|
871
|
-
if (!_SandboxExecutorFactory.instance) {
|
|
872
|
-
_SandboxExecutorFactory.instance = new _SandboxExecutorFactory();
|
|
873
|
-
}
|
|
874
|
-
return _SandboxExecutorFactory.instance;
|
|
875
|
-
}
|
|
876
|
-
/**
|
|
877
|
-
* Execute a function tool using the appropriate sandbox provider
|
|
878
|
-
*/
|
|
879
|
-
async executeFunctionTool(functionId, args, config) {
|
|
880
|
-
const sandboxConfig = config.sandboxConfig;
|
|
881
|
-
if (!sandboxConfig) {
|
|
882
|
-
throw new Error("Sandbox configuration is required for function tool execution");
|
|
883
|
-
}
|
|
884
|
-
if (sandboxConfig.provider === "native") {
|
|
885
|
-
return this.executeInNativeSandbox(functionId, args, config);
|
|
886
|
-
}
|
|
887
|
-
if (sandboxConfig.provider === "vercel") {
|
|
888
|
-
return this.executeInVercelSandbox(functionId, args, config);
|
|
889
|
-
}
|
|
890
|
-
throw new Error(`Unknown sandbox provider: ${sandboxConfig.provider}`);
|
|
891
|
-
}
|
|
892
|
-
/**
|
|
893
|
-
* Execute in native sandbox
|
|
894
|
-
*/
|
|
895
|
-
async executeInNativeSandbox(functionId, args, config) {
|
|
896
|
-
if (!this.nativeExecutor) {
|
|
897
|
-
this.nativeExecutor = NativeSandboxExecutor.getInstance();
|
|
898
|
-
logger3.info({}, "Native sandbox executor created");
|
|
899
|
-
}
|
|
900
|
-
return this.nativeExecutor.executeFunctionTool(functionId, args, config);
|
|
901
|
-
}
|
|
902
|
-
/**
|
|
903
|
-
* Execute in Vercel sandbox
|
|
904
|
-
*/
|
|
905
|
-
async executeInVercelSandbox(functionId, args, config) {
|
|
906
|
-
const vercelConfig = config.sandboxConfig;
|
|
907
|
-
const configKey = `${vercelConfig.teamId}:${vercelConfig.projectId}`;
|
|
908
|
-
if (!this.vercelExecutors.has(configKey)) {
|
|
909
|
-
const executor2 = VercelSandboxExecutor.getInstance(vercelConfig);
|
|
910
|
-
this.vercelExecutors.set(configKey, executor2);
|
|
911
|
-
logger3.info(
|
|
912
|
-
{
|
|
913
|
-
teamId: vercelConfig.teamId,
|
|
914
|
-
projectId: vercelConfig.projectId
|
|
915
|
-
},
|
|
916
|
-
"Vercel sandbox executor created"
|
|
917
|
-
);
|
|
918
|
-
}
|
|
919
|
-
const executor = this.vercelExecutors.get(configKey);
|
|
920
|
-
if (!executor) {
|
|
921
|
-
throw new Error(`Failed to get Vercel executor for config: ${configKey}`);
|
|
922
|
-
}
|
|
923
|
-
const result = await executor.executeFunctionTool(functionId, args, config);
|
|
924
|
-
if (!result.success) {
|
|
925
|
-
throw new Error(result.error || "Vercel sandbox execution failed");
|
|
926
|
-
}
|
|
927
|
-
return result.result;
|
|
928
|
-
}
|
|
929
|
-
/**
|
|
930
|
-
* Clean up all sandbox executors
|
|
931
|
-
*/
|
|
932
|
-
async cleanup() {
|
|
933
|
-
logger3.info({}, "Cleaning up sandbox executors");
|
|
934
|
-
this.nativeExecutor = null;
|
|
935
|
-
for (const [key, executor] of this.vercelExecutors.entries()) {
|
|
936
|
-
await executor.cleanup();
|
|
937
|
-
this.vercelExecutors.delete(key);
|
|
938
|
-
}
|
|
939
|
-
logger3.info({}, "Sandbox executor cleanup completed");
|
|
940
|
-
}
|
|
941
|
-
};
|
|
942
|
-
|
|
943
|
-
export { SandboxExecutorFactory };
|