@inkeep/agents-run-api 0.39.1 → 0.39.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. package/dist/SandboxExecutorFactory.cjs +895 -0
  2. package/dist/SandboxExecutorFactory.js +893 -0
  3. package/dist/SandboxExecutorFactory.js.map +1 -0
  4. package/dist/chunk-VBDAOXYI.cjs +927 -0
  5. package/dist/chunk-VBDAOXYI.js +832 -0
  6. package/dist/chunk-VBDAOXYI.js.map +1 -0
  7. package/dist/chunk.cjs +34 -0
  8. package/dist/conversations.cjs +7 -0
  9. package/dist/conversations.js +7 -0
  10. package/dist/conversations2.cjs +209 -0
  11. package/dist/conversations2.js +180 -0
  12. package/dist/conversations2.js.map +1 -0
  13. package/dist/dbClient.cjs +9676 -0
  14. package/dist/dbClient.js +9670 -0
  15. package/dist/dbClient.js.map +1 -0
  16. package/dist/dbClient2.cjs +5 -0
  17. package/dist/dbClient2.js +5 -0
  18. package/dist/env.cjs +59 -0
  19. package/dist/env.js +54 -0
  20. package/dist/env.js.map +1 -0
  21. package/dist/execution-limits.cjs +260 -0
  22. package/dist/execution-limits.js +63 -0
  23. package/dist/execution-limits.js.map +1 -0
  24. package/dist/index.cjs +10545 -20565
  25. package/dist/index.d.cts +26 -22
  26. package/dist/index.d.cts.map +1 -0
  27. package/dist/index.d.ts +27 -22
  28. package/dist/index.d.ts.map +1 -0
  29. package/dist/index.js +10545 -12888
  30. package/dist/index.js.map +1 -0
  31. package/dist/instrumentation.cjs +12 -121
  32. package/dist/instrumentation.d.cts +9 -7
  33. package/dist/instrumentation.d.cts.map +1 -0
  34. package/dist/instrumentation.d.ts +9 -7
  35. package/dist/instrumentation.d.ts.map +1 -0
  36. package/dist/instrumentation.js +5 -1
  37. package/dist/instrumentation2.cjs +116 -0
  38. package/dist/instrumentation2.js +69 -0
  39. package/dist/instrumentation2.js.map +1 -0
  40. package/dist/json-postprocessor.cjs +20 -0
  41. package/dist/json-postprocessor.js +20 -0
  42. package/dist/json-postprocessor.js.map +1 -0
  43. package/dist/logger.cjs +5 -0
  44. package/dist/logger.js +3 -0
  45. package/dist/logger2.cjs +1 -0
  46. package/dist/logger2.js +3 -0
  47. package/dist/nodefs.cjs +29 -0
  48. package/dist/nodefs.js +27 -0
  49. package/dist/nodefs.js.map +1 -0
  50. package/dist/opfs-ahp.cjs +367 -0
  51. package/dist/opfs-ahp.js +368 -0
  52. package/dist/opfs-ahp.js.map +1 -0
  53. package/package.json +3 -3
  54. package/dist/SandboxExecutorFactory-2N27SE3B.js +0 -943
  55. package/dist/chunk-A2S7GSHL.js +0 -1
  56. package/dist/chunk-EVOISBFH.js +0 -5070
  57. package/dist/chunk-JCVMVG3J.js +0 -592
  58. package/dist/chunk-KBZIYCPJ.js +0 -40
  59. package/dist/chunk-KCJWSIDZ.js +0 -246
  60. package/dist/chunk-THWNUGWP.js +0 -204
  61. package/dist/chunk-UC2EPLSW.js +0 -75
  62. package/dist/conversations-XPSTWUMK.js +0 -1
  63. package/dist/dbClient-MAHUR4TO.js +0 -1
  64. package/dist/json-postprocessor-IGYTSWFB.js +0 -12
  65. package/dist/logger-3EE6BUSU.js +0 -1
  66. package/dist/nodefs-RPE52Q4Z.js +0 -21
  67. package/dist/opfs-ahp-QL4REJJW.js +0 -318
@@ -0,0 +1,893 @@
1
+ import { t as getLogger } from "./logger2.js";
2
+ import { _ as FUNCTION_TOOL_SANDBOX_POOL_TTL_MS, g as FUNCTION_TOOL_SANDBOX_MAX_USE_COUNT, h as FUNCTION_TOOL_SANDBOX_MAX_OUTPUT_SIZE_BYTES, m as FUNCTION_TOOL_SANDBOX_CLEANUP_INTERVAL_MS, p as FUNCTION_TOOL_EXECUTION_TIMEOUT_MS_DEFAULT, v as FUNCTION_TOOL_SANDBOX_QUEUE_WAIT_TIMEOUT_MS } from "./execution-limits.js";
3
+ import { spawn } from "node:child_process";
4
+ import crypto, { createHash } from "node:crypto";
5
+ import { existsSync, mkdirSync, rmSync, writeFileSync } from "node:fs";
6
+ import { tmpdir } from "node:os";
7
+ import { join } from "node:path";
8
+ import { Sandbox } from "@vercel/sandbox";
9
+
10
+ //#region src/tools/sandbox-utils.ts
11
+ /**
12
+ * Shared utilities for sandbox executors
13
+ */
14
+ /**
15
+ * Create an execution wrapper that handles input/output for function tools
16
+ * This is used by both Native and Vercel sandbox executors
17
+ */
18
+ function createExecutionWrapper(executeCode, args) {
19
+ return `
20
+ // Function tool execution wrapper
21
+ const args = ${JSON.stringify(args, null, 2)};
22
+
23
+ // User's function code
24
+ const execute = ${executeCode}
25
+
26
+ // Execute the function and output the result
27
+ (async () => {
28
+ try {
29
+ const result = await execute(args);
30
+ // Output result as JSON on the last line
31
+ console.log(JSON.stringify({ success: true, result }));
32
+ } catch (error) {
33
+ console.error(JSON.stringify({
34
+ success: false,
35
+ error: error instanceof Error ? error.message : String(error)
36
+ }));
37
+ process.exit(1);
38
+ }
39
+ })();
40
+ `;
41
+ }
42
+ /**
43
+ * Parse execution result from stdout
44
+ * Returns the parsed result or the raw stdout if parsing fails
45
+ */
46
+ function parseExecutionResult(stdout, functionId, logger$3) {
47
+ try {
48
+ const outputLines = stdout.split("\n").filter((line) => line.trim());
49
+ const resultLine = outputLines[outputLines.length - 1];
50
+ return JSON.parse(resultLine);
51
+ } catch (parseError) {
52
+ if (logger$3) logger$3.warn({
53
+ functionId,
54
+ stdout,
55
+ parseError
56
+ }, "Failed to parse execution result");
57
+ return stdout;
58
+ }
59
+ }
60
+
61
+ //#endregion
62
+ //#region src/tools/NativeSandboxExecutor.ts
63
+ /**
64
+ * NativeSandboxExecutor - Function Tool Execution Engine
65
+ * ========================================================
66
+ *
67
+ * Executes user-defined function tools in isolated sandboxes using native Node.js processes.
68
+ * The main challenge here is that we can't just eval() user code - that's a security nightmare.
69
+ * Instead, we spin up separate Node.js processes with their own dependency trees.
70
+ *
71
+ * The tricky part is making this fast. Installing deps every time would be brutal
72
+ * (2-5s per execution), so we cache sandboxes based on their dependency fingerprint.
73
+ *
74
+ * How it works:
75
+ *
76
+ * 1. User calls a function tool
77
+ * 2. We hash the dependencies (e.g., "axios@1.6.0,lodash@4.17.21")
78
+ * 3. Check if we already have a sandbox with those deps installed
79
+ * 4. If yes: reuse it. If no: create new one, install deps, cache it
80
+ * 5. Write the user's function code to a temp file
81
+ * 6. Execute it in the sandboxed process with resource limits
82
+ * 7. Return the result
83
+ *
84
+ * Sandbox lifecycle:
85
+ * - Created when first needed for a dependency set
86
+ * - Reused up to 50 times or 5 minutes, whichever comes first
87
+ * - Automatically cleaned up when expired
88
+ * - Failed sandboxes are immediately destroyed
89
+ *
90
+ * Security stuff:
91
+ * - Each execution runs in its own process (not just a function call)
92
+ * - Output limited to 1MB to prevent memory bombs
93
+ * - Timeouts with graceful SIGTERM, then SIGKILL if needed
94
+ * - Runs as non-root when possible
95
+ * - Uses OS temp directory so it gets cleaned up automatically
96
+ *
97
+ * Performance:
98
+ * - Cold start: ~100-500ms (vs 2-5s without caching)
99
+ * - Hot path: ~50-100ms (just execution, no install)
100
+ * - Memory bounded by pool size limits
101
+ *
102
+ * Deployment notes:
103
+ * - Uses /tmp on Linux/macOS, %TEMP% on Windows
104
+ * - Works in Docker, Kubernetes, serverless (Vercel, Lambda)
105
+ * - No files left in project directory (no git pollution)
106
+ *
107
+ * The singleton pattern here is important - we need one shared pool
108
+ * across all tool executions, otherwise caching doesn't work.
109
+ */
110
+ const logger$2 = getLogger("native-sandbox-executor");
111
+ /**
112
+ * Semaphore for limiting concurrent executions based on vCPU allocation
113
+ */
114
+ var ExecutionSemaphore = class {
115
+ permits;
116
+ waitQueue = [];
117
+ maxWaitTime;
118
+ constructor(permits, maxWaitTimeMs = FUNCTION_TOOL_SANDBOX_QUEUE_WAIT_TIMEOUT_MS) {
119
+ this.permits = Math.max(1, permits);
120
+ this.maxWaitTime = maxWaitTimeMs;
121
+ }
122
+ async acquire(fn) {
123
+ await new Promise((resolve, reject) => {
124
+ if (this.permits > 0) {
125
+ this.permits--;
126
+ resolve();
127
+ return;
128
+ }
129
+ const timeoutId = setTimeout(() => {
130
+ const index = this.waitQueue.findIndex((item) => item.resolve === resolve);
131
+ if (index !== -1) {
132
+ this.waitQueue.splice(index, 1);
133
+ reject(/* @__PURE__ */ new Error(`Function execution queue timeout after ${this.maxWaitTime}ms. Too many concurrent executions.`));
134
+ }
135
+ }, this.maxWaitTime);
136
+ this.waitQueue.push({
137
+ resolve: () => {
138
+ clearTimeout(timeoutId);
139
+ this.permits--;
140
+ resolve();
141
+ },
142
+ reject
143
+ });
144
+ });
145
+ try {
146
+ return await fn();
147
+ } finally {
148
+ this.permits++;
149
+ const next = this.waitQueue.shift();
150
+ if (next) next.resolve();
151
+ }
152
+ }
153
+ getAvailablePermits() {
154
+ return this.permits;
155
+ }
156
+ getQueueLength() {
157
+ return this.waitQueue.length;
158
+ }
159
+ };
160
+ var NativeSandboxExecutor = class NativeSandboxExecutor {
161
+ tempDir;
162
+ sandboxPool = {};
163
+ static instance = null;
164
+ executionSemaphores = /* @__PURE__ */ new Map();
165
+ constructor() {
166
+ this.tempDir = join(tmpdir(), "inkeep-sandboxes");
167
+ this.ensureTempDir();
168
+ this.startPoolCleanup();
169
+ }
170
+ static getInstance() {
171
+ if (!NativeSandboxExecutor.instance) NativeSandboxExecutor.instance = new NativeSandboxExecutor();
172
+ return NativeSandboxExecutor.instance;
173
+ }
174
+ getSemaphore(vcpus) {
175
+ const effectiveVcpus = Math.max(1, vcpus || 1);
176
+ if (!this.executionSemaphores.has(effectiveVcpus)) {
177
+ logger$2.debug({ vcpus: effectiveVcpus }, "Creating new execution semaphore");
178
+ this.executionSemaphores.set(effectiveVcpus, new ExecutionSemaphore(effectiveVcpus));
179
+ }
180
+ const semaphore = this.executionSemaphores.get(effectiveVcpus);
181
+ if (!semaphore) throw new Error(`Failed to create semaphore for ${effectiveVcpus} vCPUs`);
182
+ return semaphore;
183
+ }
184
+ getExecutionStats() {
185
+ const stats = {};
186
+ for (const [vcpus, semaphore] of this.executionSemaphores.entries()) stats[`vcpu_${vcpus}`] = {
187
+ availablePermits: semaphore.getAvailablePermits(),
188
+ queueLength: semaphore.getQueueLength()
189
+ };
190
+ return stats;
191
+ }
192
+ ensureTempDir() {
193
+ try {
194
+ mkdirSync(this.tempDir, { recursive: true });
195
+ } catch {}
196
+ }
197
+ generateDependencyHash(dependencies) {
198
+ const sortedDeps = Object.keys(dependencies).sort().map((key) => `${key}@${dependencies[key]}`).join(",");
199
+ return createHash("sha256").update(sortedDeps).digest("hex").substring(0, 16);
200
+ }
201
+ getCachedSandbox(dependencyHash) {
202
+ const poolKey = dependencyHash;
203
+ const sandbox = this.sandboxPool[poolKey];
204
+ if (sandbox && existsSync(sandbox.sandboxDir)) {
205
+ const now = Date.now();
206
+ if (now - sandbox.lastUsed < FUNCTION_TOOL_SANDBOX_POOL_TTL_MS && sandbox.useCount < FUNCTION_TOOL_SANDBOX_MAX_USE_COUNT) {
207
+ sandbox.lastUsed = now;
208
+ sandbox.useCount++;
209
+ logger$2.debug({
210
+ poolKey,
211
+ useCount: sandbox.useCount,
212
+ sandboxDir: sandbox.sandboxDir,
213
+ lastUsed: new Date(sandbox.lastUsed)
214
+ }, "Reusing cached sandbox");
215
+ return sandbox.sandboxDir;
216
+ }
217
+ this.cleanupSandbox(sandbox.sandboxDir);
218
+ delete this.sandboxPool[poolKey];
219
+ }
220
+ return null;
221
+ }
222
+ addToPool(dependencyHash, sandboxDir, dependencies) {
223
+ const poolKey = dependencyHash;
224
+ if (this.sandboxPool[poolKey]) this.cleanupSandbox(this.sandboxPool[poolKey].sandboxDir);
225
+ this.sandboxPool[poolKey] = {
226
+ sandboxDir,
227
+ lastUsed: Date.now(),
228
+ useCount: 1,
229
+ dependencies
230
+ };
231
+ logger$2.debug({
232
+ poolKey,
233
+ sandboxDir
234
+ }, "Added sandbox to pool");
235
+ }
236
+ cleanupSandbox(sandboxDir) {
237
+ try {
238
+ rmSync(sandboxDir, {
239
+ recursive: true,
240
+ force: true
241
+ });
242
+ logger$2.debug({ sandboxDir }, "Cleaned up sandbox");
243
+ } catch (error) {
244
+ logger$2.warn({
245
+ sandboxDir,
246
+ error
247
+ }, "Failed to clean up sandbox");
248
+ }
249
+ }
250
+ startPoolCleanup() {
251
+ setInterval(() => {
252
+ const now = Date.now();
253
+ const keysToDelete = [];
254
+ for (const [key, sandbox] of Object.entries(this.sandboxPool)) if (now - sandbox.lastUsed > FUNCTION_TOOL_SANDBOX_POOL_TTL_MS || sandbox.useCount >= FUNCTION_TOOL_SANDBOX_MAX_USE_COUNT) {
255
+ this.cleanupSandbox(sandbox.sandboxDir);
256
+ keysToDelete.push(key);
257
+ }
258
+ keysToDelete.forEach((key) => {
259
+ delete this.sandboxPool[key];
260
+ });
261
+ if (keysToDelete.length > 0) logger$2.debug({ cleanedCount: keysToDelete.length }, "Cleaned up expired sandboxes");
262
+ }, FUNCTION_TOOL_SANDBOX_CLEANUP_INTERVAL_MS);
263
+ }
264
+ detectModuleType(executeCode, configuredRuntime) {
265
+ const esmPatterns = [
266
+ /import\s+.*\s+from\s+['"]/g,
267
+ /import\s*\(/g,
268
+ /export\s+(default|const|let|var|function|class)/g,
269
+ /export\s*\{/g
270
+ ];
271
+ const cjsPatterns = [
272
+ /require\s*\(/g,
273
+ /module\.exports/g,
274
+ /exports\./g
275
+ ];
276
+ const hasEsmSyntax = esmPatterns.some((pattern) => pattern.test(executeCode));
277
+ const hasCjsSyntax = cjsPatterns.some((pattern) => pattern.test(executeCode));
278
+ if (configuredRuntime === "typescript") return hasCjsSyntax ? "cjs" : "esm";
279
+ if (hasEsmSyntax && hasCjsSyntax) {
280
+ logger$2.warn({ executeCode: `${executeCode.substring(0, 100)}...` }, "Both ESM and CommonJS syntax detected, defaulting to ESM");
281
+ return "esm";
282
+ }
283
+ if (hasEsmSyntax) return "esm";
284
+ if (hasCjsSyntax) return "cjs";
285
+ return "cjs";
286
+ }
287
+ async executeFunctionTool(toolId, args, config) {
288
+ const vcpus = config.sandboxConfig?.vcpus || 1;
289
+ const semaphore = this.getSemaphore(vcpus);
290
+ logger$2.debug({
291
+ toolId,
292
+ vcpus,
293
+ availablePermits: semaphore.getAvailablePermits(),
294
+ queueLength: semaphore.getQueueLength(),
295
+ sandboxConfig: config.sandboxConfig,
296
+ poolSize: Object.keys(this.sandboxPool).length
297
+ }, "Acquiring execution slot for function tool");
298
+ return semaphore.acquire(async () => {
299
+ return this.executeInSandbox_Internal(toolId, args, config);
300
+ });
301
+ }
302
+ async executeInSandbox_Internal(toolId, args, config) {
303
+ const dependencies = config.dependencies || {};
304
+ const dependencyHash = this.generateDependencyHash(dependencies);
305
+ logger$2.debug({
306
+ toolId,
307
+ dependencies,
308
+ dependencyHash,
309
+ sandboxConfig: config.sandboxConfig,
310
+ poolSize: Object.keys(this.sandboxPool).length
311
+ }, "Executing function tool");
312
+ let sandboxDir = this.getCachedSandbox(dependencyHash);
313
+ let isNewSandbox = false;
314
+ if (!sandboxDir) {
315
+ sandboxDir = join(this.tempDir, `sandbox-${dependencyHash}-${Date.now()}`);
316
+ mkdirSync(sandboxDir, { recursive: true });
317
+ isNewSandbox = true;
318
+ logger$2.debug({
319
+ toolId,
320
+ dependencyHash,
321
+ sandboxDir,
322
+ dependencies
323
+ }, "Creating new sandbox");
324
+ const moduleType = this.detectModuleType(config.executeCode, config.sandboxConfig?.runtime);
325
+ const packageJson = {
326
+ name: `function-tool-${toolId}`,
327
+ version: "1.0.0",
328
+ ...moduleType === "esm" && { type: "module" },
329
+ dependencies,
330
+ scripts: { start: moduleType === "esm" ? "node index.mjs" : "node index.js" }
331
+ };
332
+ writeFileSync(join(sandboxDir, "package.json"), JSON.stringify(packageJson, null, 2), "utf8");
333
+ if (Object.keys(dependencies).length > 0) await this.installDependencies(sandboxDir);
334
+ this.addToPool(dependencyHash, sandboxDir, dependencies);
335
+ }
336
+ try {
337
+ const moduleType = this.detectModuleType(config.executeCode, config.sandboxConfig?.runtime);
338
+ const executionCode = createExecutionWrapper(config.executeCode, args);
339
+ writeFileSync(join(sandboxDir, `index.${moduleType === "esm" ? "mjs" : "js"}`), executionCode, "utf8");
340
+ return await this.executeInSandbox(sandboxDir, config.sandboxConfig?.timeout || FUNCTION_TOOL_EXECUTION_TIMEOUT_MS_DEFAULT, moduleType, config.sandboxConfig);
341
+ } catch (error) {
342
+ if (isNewSandbox) {
343
+ this.cleanupSandbox(sandboxDir);
344
+ const poolKey = dependencyHash;
345
+ delete this.sandboxPool[poolKey];
346
+ }
347
+ throw error;
348
+ }
349
+ }
350
+ async installDependencies(sandboxDir) {
351
+ return new Promise((resolve, reject) => {
352
+ const npm = spawn("npm", [
353
+ "install",
354
+ "--no-audit",
355
+ "--no-fund"
356
+ ], {
357
+ cwd: sandboxDir,
358
+ stdio: "pipe",
359
+ env: {
360
+ ...process.env,
361
+ npm_config_cache: join(sandboxDir, ".npm-cache"),
362
+ npm_config_logs_dir: join(sandboxDir, ".npm-logs"),
363
+ npm_config_tmp: join(sandboxDir, ".npm-tmp"),
364
+ HOME: sandboxDir,
365
+ npm_config_update_notifier: "false",
366
+ npm_config_progress: "false",
367
+ npm_config_loglevel: "error"
368
+ }
369
+ });
370
+ let stderr = "";
371
+ npm.stdout?.on("data", () => {});
372
+ npm.stderr?.on("data", (data) => {
373
+ stderr += data.toString();
374
+ });
375
+ npm.on("close", (code) => {
376
+ if (code === 0) {
377
+ logger$2.debug({ sandboxDir }, "Dependencies installed successfully");
378
+ resolve();
379
+ } else {
380
+ logger$2.error({
381
+ sandboxDir,
382
+ code,
383
+ stderr
384
+ }, "Failed to install dependencies");
385
+ reject(/* @__PURE__ */ new Error(`npm install failed with code ${code}: ${stderr}`));
386
+ }
387
+ });
388
+ npm.on("error", (err) => {
389
+ logger$2.error({
390
+ sandboxDir,
391
+ error: err
392
+ }, "Failed to spawn npm install");
393
+ reject(err);
394
+ });
395
+ });
396
+ }
397
+ async executeInSandbox(sandboxDir, timeout, moduleType, _sandboxConfig) {
398
+ return new Promise((resolve, reject) => {
399
+ const fileExtension = moduleType === "esm" ? "mjs" : "js";
400
+ const spawnOptions = {
401
+ cwd: sandboxDir,
402
+ stdio: "pipe",
403
+ uid: process.getuid ? process.getuid() : void 0,
404
+ gid: process.getgid ? process.getgid() : void 0
405
+ };
406
+ const node = spawn("node", [`index.${fileExtension}`], spawnOptions);
407
+ let stdout = "";
408
+ let stderr = "";
409
+ let outputSize = 0;
410
+ node.stdout?.on("data", (data) => {
411
+ const dataStr = data.toString();
412
+ outputSize += dataStr.length;
413
+ if (outputSize > FUNCTION_TOOL_SANDBOX_MAX_OUTPUT_SIZE_BYTES) {
414
+ node.kill("SIGTERM");
415
+ reject(/* @__PURE__ */ new Error(`Output size exceeded limit of ${FUNCTION_TOOL_SANDBOX_MAX_OUTPUT_SIZE_BYTES} bytes`));
416
+ return;
417
+ }
418
+ stdout += dataStr;
419
+ });
420
+ node.stderr?.on("data", (data) => {
421
+ const dataStr = data.toString();
422
+ outputSize += dataStr.length;
423
+ if (outputSize > FUNCTION_TOOL_SANDBOX_MAX_OUTPUT_SIZE_BYTES) {
424
+ node.kill("SIGTERM");
425
+ reject(/* @__PURE__ */ new Error(`Output size exceeded limit of ${FUNCTION_TOOL_SANDBOX_MAX_OUTPUT_SIZE_BYTES} bytes`));
426
+ return;
427
+ }
428
+ stderr += dataStr;
429
+ });
430
+ const timeoutId = setTimeout(() => {
431
+ logger$2.warn({
432
+ sandboxDir,
433
+ timeout
434
+ }, "Function execution timed out, killing process");
435
+ node.kill("SIGTERM");
436
+ const forceKillTimeout = Math.min(Math.max(timeout / 10, 2e3), 5e3);
437
+ setTimeout(() => {
438
+ try {
439
+ node.kill("SIGKILL");
440
+ } catch {}
441
+ }, forceKillTimeout);
442
+ reject(/* @__PURE__ */ new Error(`Function execution timed out after ${timeout}ms`));
443
+ }, timeout);
444
+ node.on("close", (code, signal) => {
445
+ clearTimeout(timeoutId);
446
+ if (code === 0) try {
447
+ const result = parseExecutionResult(stdout, "function", logger$2);
448
+ if (typeof result === "object" && result !== null && "success" in result) {
449
+ const parsed = result;
450
+ if (parsed.success) resolve(parsed.result);
451
+ else reject(new Error(parsed.error || "Function execution failed"));
452
+ } else resolve(result);
453
+ } catch (parseError) {
454
+ logger$2.error({
455
+ stdout,
456
+ stderr,
457
+ parseError
458
+ }, "Failed to parse function result");
459
+ reject(/* @__PURE__ */ new Error(`Invalid function result: ${stdout}`));
460
+ }
461
+ else {
462
+ const errorMsg = signal ? `Function execution killed by signal ${signal}: ${stderr}` : `Function execution failed with code ${code}: ${stderr}`;
463
+ logger$2.error({
464
+ code,
465
+ signal,
466
+ stderr
467
+ }, "Function execution failed");
468
+ reject(new Error(errorMsg));
469
+ }
470
+ });
471
+ node.on("error", (error) => {
472
+ clearTimeout(timeoutId);
473
+ logger$2.error({
474
+ sandboxDir,
475
+ error
476
+ }, "Failed to spawn node process");
477
+ reject(error);
478
+ });
479
+ });
480
+ }
481
+ };
482
+
483
+ //#endregion
484
+ //#region src/tools/VercelSandboxExecutor.ts
485
+ const logger$1 = getLogger("VercelSandboxExecutor");
486
+ /**
487
+ * Vercel Sandbox Executor with pooling/reuse
488
+ * Executes function tools in isolated Vercel Sandbox MicroVMs
489
+ * Caches and reuses sandboxes based on dependencies to improve performance
490
+ */
491
+ var VercelSandboxExecutor = class VercelSandboxExecutor {
492
+ static instance;
493
+ config;
494
+ sandboxPool = /* @__PURE__ */ new Map();
495
+ cleanupInterval = null;
496
+ constructor(config) {
497
+ this.config = config;
498
+ logger$1.info({
499
+ teamId: config.teamId,
500
+ projectId: config.projectId,
501
+ runtime: config.runtime,
502
+ timeout: config.timeout,
503
+ vcpus: config.vcpus
504
+ }, "VercelSandboxExecutor initialized with pooling");
505
+ this.startPoolCleanup();
506
+ }
507
+ /**
508
+ * Get singleton instance of VercelSandboxExecutor
509
+ */
510
+ static getInstance(config) {
511
+ if (!VercelSandboxExecutor.instance) VercelSandboxExecutor.instance = new VercelSandboxExecutor(config);
512
+ return VercelSandboxExecutor.instance;
513
+ }
514
+ /**
515
+ * Generate a hash for dependencies to use as cache key
516
+ */
517
+ generateDependencyHash(dependencies) {
518
+ const sorted = Object.keys(dependencies).sort().map((key) => `${key}@${dependencies[key]}`).join(",");
519
+ return crypto.createHash("md5").update(sorted).digest("hex").substring(0, 8);
520
+ }
521
+ /**
522
+ * Get a cached sandbox if available and still valid
523
+ */
524
+ getCachedSandbox(dependencyHash) {
525
+ const cached = this.sandboxPool.get(dependencyHash);
526
+ if (!cached) return null;
527
+ const age = Date.now() - cached.createdAt;
528
+ if (age > FUNCTION_TOOL_SANDBOX_POOL_TTL_MS || cached.useCount >= FUNCTION_TOOL_SANDBOX_MAX_USE_COUNT) {
529
+ logger$1.debug({
530
+ dependencyHash,
531
+ age,
532
+ useCount: cached.useCount,
533
+ ttl: FUNCTION_TOOL_SANDBOX_POOL_TTL_MS,
534
+ maxUseCount: FUNCTION_TOOL_SANDBOX_MAX_USE_COUNT
535
+ }, "Sandbox expired, will create new one");
536
+ this.removeSandbox(dependencyHash);
537
+ return null;
538
+ }
539
+ logger$1.debug({
540
+ dependencyHash,
541
+ useCount: cached.useCount,
542
+ age
543
+ }, "Reusing cached sandbox");
544
+ return cached.sandbox;
545
+ }
546
+ /**
547
+ * Add sandbox to pool
548
+ */
549
+ addToPool(dependencyHash, sandbox, dependencies) {
550
+ this.sandboxPool.set(dependencyHash, {
551
+ sandbox,
552
+ createdAt: Date.now(),
553
+ useCount: 0,
554
+ dependencies
555
+ });
556
+ logger$1.debug({
557
+ dependencyHash,
558
+ poolSize: this.sandboxPool.size
559
+ }, "Sandbox added to pool");
560
+ }
561
+ /**
562
+ * Increment use count for a sandbox
563
+ */
564
+ incrementUseCount(dependencyHash) {
565
+ const cached = this.sandboxPool.get(dependencyHash);
566
+ if (cached) cached.useCount++;
567
+ }
568
+ /**
569
+ * Remove and clean up a sandbox
570
+ */
571
+ async removeSandbox(dependencyHash) {
572
+ const cached = this.sandboxPool.get(dependencyHash);
573
+ if (cached) {
574
+ try {
575
+ await cached.sandbox.stop();
576
+ logger$1.debug({ dependencyHash }, "Sandbox stopped");
577
+ } catch (error) {
578
+ logger$1.warn({
579
+ error,
580
+ dependencyHash
581
+ }, "Error stopping sandbox");
582
+ }
583
+ this.sandboxPool.delete(dependencyHash);
584
+ }
585
+ }
586
+ /**
587
+ * Start periodic cleanup of expired sandboxes
588
+ */
589
+ startPoolCleanup() {
590
+ this.cleanupInterval = setInterval(() => {
591
+ const now = Date.now();
592
+ const toRemove = [];
593
+ for (const [hash, cached] of this.sandboxPool.entries()) if (now - cached.createdAt > FUNCTION_TOOL_SANDBOX_POOL_TTL_MS || cached.useCount >= FUNCTION_TOOL_SANDBOX_MAX_USE_COUNT) toRemove.push(hash);
594
+ if (toRemove.length > 0) {
595
+ logger$1.info({
596
+ count: toRemove.length,
597
+ poolSize: this.sandboxPool.size
598
+ }, "Cleaning up expired sandboxes");
599
+ for (const hash of toRemove) this.removeSandbox(hash);
600
+ }
601
+ }, FUNCTION_TOOL_SANDBOX_CLEANUP_INTERVAL_MS);
602
+ }
603
+ /**
604
+ * Cleanup all sandboxes and stop cleanup interval
605
+ */
606
+ async cleanup() {
607
+ if (this.cleanupInterval) {
608
+ clearInterval(this.cleanupInterval);
609
+ this.cleanupInterval = null;
610
+ }
611
+ logger$1.info({ poolSize: this.sandboxPool.size }, "Cleaning up all sandboxes");
612
+ const promises = Array.from(this.sandboxPool.keys()).map((hash) => this.removeSandbox(hash));
613
+ await Promise.all(promises);
614
+ }
615
+ /**
616
+ * Extract environment variable names from code
617
+ * Matches patterns like process.env.VAR_NAME or process.env['VAR_NAME']
618
+ */
619
+ extractEnvVars(code) {
620
+ const envVars = /* @__PURE__ */ new Set();
621
+ const dotNotationRegex = /process\.env\.([A-Z_][A-Z0-9_]*)/g;
622
+ let match = dotNotationRegex.exec(code);
623
+ while (match !== null) {
624
+ envVars.add(match[1]);
625
+ match = dotNotationRegex.exec(code);
626
+ }
627
+ const bracketNotationRegex = /process\.env\[['"]([A-Z_][A-Z0-9_]*)['"]\]/g;
628
+ match = bracketNotationRegex.exec(code);
629
+ while (match !== null) {
630
+ envVars.add(match[1]);
631
+ match = bracketNotationRegex.exec(code);
632
+ }
633
+ return envVars;
634
+ }
635
+ /**
636
+ * Create .env file content from environment variables
637
+ * Note: Currently creates empty placeholders. Values will be populated in the future.
638
+ */
639
+ createEnvFileContent(envVarNames) {
640
+ const envLines = [];
641
+ for (const varName of envVarNames) {
642
+ envLines.push(`${varName}=""`);
643
+ logger$1.debug({ varName }, "Adding environment variable placeholder to sandbox");
644
+ }
645
+ return envLines.join("\n");
646
+ }
647
+ /**
648
+ * Execute a function tool in Vercel Sandbox with pooling
649
+ */
650
+ async executeFunctionTool(functionId, args, toolConfig) {
651
+ const startTime = Date.now();
652
+ const logs = [];
653
+ const dependencies = toolConfig.dependencies || {};
654
+ const dependencyHash = this.generateDependencyHash(dependencies);
655
+ try {
656
+ logger$1.info({
657
+ functionId,
658
+ functionName: toolConfig.name,
659
+ dependencyHash,
660
+ poolSize: this.sandboxPool.size
661
+ }, "Executing function in Vercel Sandbox");
662
+ let sandbox = this.getCachedSandbox(dependencyHash);
663
+ let isNewSandbox = false;
664
+ if (!sandbox) {
665
+ isNewSandbox = true;
666
+ sandbox = await Sandbox.create({
667
+ token: this.config.token,
668
+ teamId: this.config.teamId,
669
+ projectId: this.config.projectId,
670
+ timeout: this.config.timeout,
671
+ resources: { vcpus: this.config.vcpus || 1 },
672
+ runtime: this.config.runtime
673
+ });
674
+ logger$1.info({
675
+ functionId,
676
+ sandboxId: sandbox.sandboxId,
677
+ dependencyHash
678
+ }, `New sandbox created for function ${functionId}`);
679
+ this.addToPool(dependencyHash, sandbox, dependencies);
680
+ } else logger$1.info({
681
+ functionId,
682
+ sandboxId: sandbox.sandboxId,
683
+ dependencyHash
684
+ }, `Reusing cached sandbox for function ${functionId}`);
685
+ this.incrementUseCount(dependencyHash);
686
+ try {
687
+ if (isNewSandbox && toolConfig.dependencies && Object.keys(toolConfig.dependencies).length > 0) {
688
+ logger$1.debug({
689
+ functionId,
690
+ functionName: toolConfig.name,
691
+ dependencies: toolConfig.dependencies
692
+ }, "Installing dependencies in new sandbox");
693
+ const packageJson = { dependencies: toolConfig.dependencies };
694
+ const packageJsonContent = JSON.stringify(packageJson, null, 2);
695
+ await sandbox.writeFiles([{
696
+ path: "package.json",
697
+ content: Buffer.from(packageJsonContent, "utf-8")
698
+ }]);
699
+ const installCmd = await sandbox.runCommand({
700
+ cmd: "npm",
701
+ args: ["install", "--omit=dev"]
702
+ });
703
+ const installStdout = await installCmd.stdout();
704
+ const installStderr = await installCmd.stderr();
705
+ if (installStdout) logs.push(installStdout);
706
+ if (installStderr) logs.push(installStderr);
707
+ if (installCmd.exitCode !== 0) throw new Error(`Failed to install dependencies: ${installStderr}`);
708
+ logger$1.info({
709
+ functionId,
710
+ dependencyHash
711
+ }, "Dependencies installed successfully");
712
+ }
713
+ const executionCode = createExecutionWrapper(toolConfig.executeCode, args);
714
+ const envVars = this.extractEnvVars(toolConfig.executeCode);
715
+ const filesToWrite = [];
716
+ const filename = this.config.runtime === "typescript" ? "execute.ts" : "execute.js";
717
+ filesToWrite.push({
718
+ path: filename,
719
+ content: Buffer.from(executionCode, "utf-8")
720
+ });
721
+ if (envVars.size > 0) {
722
+ const envFileContent = this.createEnvFileContent(envVars);
723
+ if (envFileContent) {
724
+ filesToWrite.push({
725
+ path: ".env",
726
+ content: Buffer.from(envFileContent, "utf-8")
727
+ });
728
+ logger$1.info({
729
+ functionId,
730
+ envVarCount: envVars.size,
731
+ envVars: Array.from(envVars)
732
+ }, "Creating environment variable placeholders in sandbox");
733
+ }
734
+ }
735
+ await sandbox.writeFiles(filesToWrite);
736
+ logger$1.info({
737
+ functionId,
738
+ runtime: this.config.runtime === "typescript" ? "tsx" : "node",
739
+ hasEnvVars: envVars.size > 0
740
+ }, `Execution code written to file for runtime ${this.config.runtime}`);
741
+ const executeCmd = await (async () => {
742
+ if (envVars.size > 0) return sandbox.runCommand({
743
+ cmd: "npx",
744
+ args: this.config.runtime === "typescript" ? [
745
+ "--yes",
746
+ "dotenv-cli",
747
+ "--",
748
+ "npx",
749
+ "tsx",
750
+ filename
751
+ ] : [
752
+ "--yes",
753
+ "dotenv-cli",
754
+ "--",
755
+ "node",
756
+ filename
757
+ ]
758
+ });
759
+ const runtime = this.config.runtime === "typescript" ? "tsx" : "node";
760
+ return sandbox.runCommand({
761
+ cmd: runtime,
762
+ args: [filename]
763
+ });
764
+ })();
765
+ const executeStdout = await executeCmd.stdout();
766
+ const executeStderr = await executeCmd.stderr();
767
+ if (executeStdout) logs.push(executeStdout);
768
+ if (executeStderr) logs.push(executeStderr);
769
+ const executionTime = Date.now() - startTime;
770
+ if (executeCmd.exitCode !== 0) {
771
+ logger$1.error({
772
+ functionId,
773
+ exitCode: executeCmd.exitCode,
774
+ stderr: executeStderr
775
+ }, "Function execution failed");
776
+ return {
777
+ success: false,
778
+ error: executeStderr || "Function execution failed with non-zero exit code",
779
+ logs,
780
+ executionTime
781
+ };
782
+ }
783
+ const result = parseExecutionResult(executeStdout, functionId, logger$1);
784
+ logger$1.info({
785
+ functionId,
786
+ executionTime
787
+ }, "Function executed successfully in Vercel Sandbox");
788
+ return {
789
+ success: true,
790
+ result,
791
+ logs,
792
+ executionTime
793
+ };
794
+ } catch (innerError) {
795
+ await this.removeSandbox(dependencyHash);
796
+ throw innerError;
797
+ }
798
+ } catch (error) {
799
+ const executionTime = Date.now() - startTime;
800
+ const errorMessage = error instanceof Error ? error.message : String(error);
801
+ logger$1.error({
802
+ functionId,
803
+ error: errorMessage,
804
+ executionTime
805
+ }, "Vercel Sandbox execution error");
806
+ return {
807
+ success: false,
808
+ error: errorMessage,
809
+ logs,
810
+ executionTime
811
+ };
812
+ }
813
+ }
814
+ };
815
+
816
+ //#endregion
817
+ //#region src/tools/SandboxExecutorFactory.ts
818
+ const logger = getLogger("SandboxExecutorFactory");
819
+ /**
820
+ * Factory for creating and managing sandbox executors
821
+ * Routes execution to the appropriate sandbox provider (native or Vercel)
822
+ */
823
+ var SandboxExecutorFactory = class SandboxExecutorFactory {
824
+ static instance;
825
+ nativeExecutor = null;
826
+ vercelExecutors = /* @__PURE__ */ new Map();
827
+ constructor() {
828
+ logger.info({}, "SandboxExecutorFactory initialized");
829
+ }
830
+ /**
831
+ * Get singleton instance of SandboxExecutorFactory
832
+ */
833
+ static getInstance() {
834
+ if (!SandboxExecutorFactory.instance) SandboxExecutorFactory.instance = new SandboxExecutorFactory();
835
+ return SandboxExecutorFactory.instance;
836
+ }
837
+ /**
838
+ * Execute a function tool using the appropriate sandbox provider
839
+ */
840
+ async executeFunctionTool(functionId, args, config) {
841
+ const sandboxConfig = config.sandboxConfig;
842
+ if (!sandboxConfig) throw new Error("Sandbox configuration is required for function tool execution");
843
+ if (sandboxConfig.provider === "native") return this.executeInNativeSandbox(functionId, args, config);
844
+ if (sandboxConfig.provider === "vercel") return this.executeInVercelSandbox(functionId, args, config);
845
+ throw new Error(`Unknown sandbox provider: ${sandboxConfig.provider}`);
846
+ }
847
+ /**
848
+ * Execute in native sandbox
849
+ */
850
+ async executeInNativeSandbox(functionId, args, config) {
851
+ if (!this.nativeExecutor) {
852
+ this.nativeExecutor = NativeSandboxExecutor.getInstance();
853
+ logger.info({}, "Native sandbox executor created");
854
+ }
855
+ return this.nativeExecutor.executeFunctionTool(functionId, args, config);
856
+ }
857
+ /**
858
+ * Execute in Vercel sandbox
859
+ */
860
+ async executeInVercelSandbox(functionId, args, config) {
861
+ const vercelConfig = config.sandboxConfig;
862
+ const configKey = `${vercelConfig.teamId}:${vercelConfig.projectId}`;
863
+ if (!this.vercelExecutors.has(configKey)) {
864
+ const executor$1 = VercelSandboxExecutor.getInstance(vercelConfig);
865
+ this.vercelExecutors.set(configKey, executor$1);
866
+ logger.info({
867
+ teamId: vercelConfig.teamId,
868
+ projectId: vercelConfig.projectId
869
+ }, "Vercel sandbox executor created");
870
+ }
871
+ const executor = this.vercelExecutors.get(configKey);
872
+ if (!executor) throw new Error(`Failed to get Vercel executor for config: ${configKey}`);
873
+ const result = await executor.executeFunctionTool(functionId, args, config);
874
+ if (!result.success) throw new Error(result.error || "Vercel sandbox execution failed");
875
+ return result.result;
876
+ }
877
+ /**
878
+ * Clean up all sandbox executors
879
+ */
880
+ async cleanup() {
881
+ logger.info({}, "Cleaning up sandbox executors");
882
+ this.nativeExecutor = null;
883
+ for (const [key, executor] of this.vercelExecutors.entries()) {
884
+ await executor.cleanup();
885
+ this.vercelExecutors.delete(key);
886
+ }
887
+ logger.info({}, "Sandbox executor cleanup completed");
888
+ }
889
+ };
890
+
891
+ //#endregion
892
+ export { SandboxExecutorFactory };
893
+ //# sourceMappingURL=SandboxExecutorFactory.js.map