bashkit 0.3.0 → 0.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -0
- package/dist/index.js +27 -7
- package/dist/sandbox/ripgrep.d.ts +11 -0
- package/dist/workflow.d.ts +52 -0
- package/dist/workflow.js +458 -0
- package/package.json +1 -1
- package/dist/tools/web-constants.d.ts +0 -5
package/README.md
CHANGED
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
# bashkit
|
|
2
2
|
|
|
3
|
+
[](https://www.npmjs.com/package/bashkit)
|
|
4
|
+
|
|
3
5
|
Agentic coding tools for Vercel AI SDK. Give AI agents the ability to execute code, read/write files, and perform coding tasks in a sandboxed environment.
|
|
4
6
|
|
|
5
7
|
## Overview
|
package/dist/index.js
CHANGED
|
@@ -65,8 +65,25 @@ function createLazySingleton(factory) {
|
|
|
65
65
|
};
|
|
66
66
|
}
|
|
67
67
|
|
|
68
|
+
// src/sandbox/ripgrep.ts
|
|
69
|
+
async function getBundledRgPath() {
|
|
70
|
+
try {
|
|
71
|
+
const { rgPath } = await import("@vscode/ripgrep");
|
|
72
|
+
return rgPath;
|
|
73
|
+
} catch {
|
|
74
|
+
return;
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
function getBundledRgPathSync() {
|
|
78
|
+
try {
|
|
79
|
+
const { rgPath } = __require("@vscode/ripgrep");
|
|
80
|
+
return rgPath;
|
|
81
|
+
} catch {
|
|
82
|
+
return;
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
|
|
68
86
|
// src/sandbox/ensure-tools.ts
|
|
69
|
-
import { rgPath as bundledRgPath } from "@vscode/ripgrep";
|
|
70
87
|
var RIPGREP_VERSION = "14.1.0";
|
|
71
88
|
var ARCH_MAP = {
|
|
72
89
|
x86_64: "x86_64-unknown-linux-musl",
|
|
@@ -74,10 +91,13 @@ var ARCH_MAP = {
|
|
|
74
91
|
arm64: "aarch64-unknown-linux-gnu"
|
|
75
92
|
};
|
|
76
93
|
async function ensureSandboxTools(sandbox) {
|
|
77
|
-
const
|
|
78
|
-
if (
|
|
79
|
-
sandbox.
|
|
80
|
-
|
|
94
|
+
const bundledRgPath = await getBundledRgPath();
|
|
95
|
+
if (bundledRgPath) {
|
|
96
|
+
const bundledCheck = await sandbox.exec(`test -x "${bundledRgPath}" && echo found`);
|
|
97
|
+
if (bundledCheck.stdout.includes("found")) {
|
|
98
|
+
sandbox.rgPath = bundledRgPath;
|
|
99
|
+
return;
|
|
100
|
+
}
|
|
81
101
|
}
|
|
82
102
|
const tmpCheck = await sandbox.exec("test -x /tmp/rg && echo found");
|
|
83
103
|
if (tmpCheck.stdout.includes("found")) {
|
|
@@ -229,9 +249,9 @@ async function createE2BSandbox(config = {}) {
|
|
|
229
249
|
}
|
|
230
250
|
// src/sandbox/local.ts
|
|
231
251
|
import { existsSync, mkdirSync } from "node:fs";
|
|
232
|
-
import { rgPath as bundledRgPath2 } from "@vscode/ripgrep";
|
|
233
252
|
function createLocalSandbox(config = {}) {
|
|
234
253
|
const workingDirectory = config.cwd || "/tmp";
|
|
254
|
+
const rgPath = getBundledRgPathSync();
|
|
235
255
|
if (!existsSync(workingDirectory)) {
|
|
236
256
|
mkdirSync(workingDirectory, { recursive: true });
|
|
237
257
|
}
|
|
@@ -271,7 +291,7 @@ function createLocalSandbox(config = {}) {
|
|
|
271
291
|
};
|
|
272
292
|
return {
|
|
273
293
|
exec,
|
|
274
|
-
rgPath
|
|
294
|
+
rgPath,
|
|
275
295
|
async readFile(path) {
|
|
276
296
|
const fullPath = path.startsWith("/") ? path : `${workingDirectory}/${path}`;
|
|
277
297
|
const file = Bun.file(fullPath);
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Dynamically imports @vscode/ripgrep to get the bundled binary path.
|
|
3
|
+
* Returns undefined if the package is not installed.
|
|
4
|
+
*/
|
|
5
|
+
export declare function getBundledRgPath(): Promise<string | undefined>;
|
|
6
|
+
/**
|
|
7
|
+
* Synchronously gets the bundled ripgrep path using require.
|
|
8
|
+
* For use in synchronous contexts (e.g., LocalSandbox).
|
|
9
|
+
* Returns undefined if the package is not installed.
|
|
10
|
+
*/
|
|
11
|
+
export declare function getBundledRgPathSync(): string | undefined;
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Bashkit Workflow Integration
|
|
3
|
+
*
|
|
4
|
+
* Durable agent tools for Vercel's Workflow DevKit.
|
|
5
|
+
* Each tool execution is wrapped with "use step" for automatic
|
|
6
|
+
* durability, retries, and checkpointing.
|
|
7
|
+
*
|
|
8
|
+
* @example
|
|
9
|
+
* ```typescript
|
|
10
|
+
* import { createDurableAgentTools } from 'bashkit/workflow';
|
|
11
|
+
* import { DurableAgent } from '@workflow/ai/agent';
|
|
12
|
+
*
|
|
13
|
+
* export async function generateReport(sandboxId: string) {
|
|
14
|
+
* "use workflow";
|
|
15
|
+
*
|
|
16
|
+
* const { tools } = createDurableAgentTools(sandboxId);
|
|
17
|
+
*
|
|
18
|
+
* const agent = new DurableAgent({
|
|
19
|
+
* model: "anthropic/claude-sonnet-4-20250514",
|
|
20
|
+
* tools,
|
|
21
|
+
* });
|
|
22
|
+
*
|
|
23
|
+
* await agent.run({ prompt: "Generate the report" });
|
|
24
|
+
* }
|
|
25
|
+
* ```
|
|
26
|
+
*/
|
|
27
|
+
import { type ToolSet } from "ai";
|
|
28
|
+
import type { AgentConfig } from "./types";
|
|
29
|
+
export interface DurableAgentConfig extends Omit<AgentConfig, "cache"> {
|
|
30
|
+
/**
|
|
31
|
+
* E2B API key (optional, uses ANTHROPIC_API_KEY env var by default)
|
|
32
|
+
*/
|
|
33
|
+
apiKey?: string;
|
|
34
|
+
}
|
|
35
|
+
export interface DurableAgentToolsResult {
|
|
36
|
+
tools: ToolSet;
|
|
37
|
+
}
|
|
38
|
+
/**
|
|
39
|
+
* Creates durable agent tools for Workflow DevKit.
|
|
40
|
+
*
|
|
41
|
+
* Each tool execution:
|
|
42
|
+
* 1. Is wrapped with "use step" for durability
|
|
43
|
+
* 2. Reconnects to the E2B sandbox via sandboxId
|
|
44
|
+
* 3. Can retry independently on failure
|
|
45
|
+
* 4. Works with parallel tool calls (each gets own correlationId)
|
|
46
|
+
*
|
|
47
|
+
* @param sandboxId - E2B sandbox ID to reconnect to
|
|
48
|
+
* @param config - Optional tool configuration
|
|
49
|
+
*/
|
|
50
|
+
export declare function createDurableAgentTools(sandboxId: string, config?: DurableAgentConfig): DurableAgentToolsResult;
|
|
51
|
+
export type { Sandbox } from "./sandbox/interface";
|
|
52
|
+
export type { ToolConfig, AgentConfig } from "./types";
|
package/dist/workflow.js
ADDED
|
@@ -0,0 +1,458 @@
|
|
|
1
|
+
import { createRequire } from "node:module";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
4
|
+
var __defProp = Object.defineProperty;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
7
|
+
var __toESM = (mod, isNodeMode, target) => {
|
|
8
|
+
target = mod != null ? __create(__getProtoOf(mod)) : {};
|
|
9
|
+
const to = isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target;
|
|
10
|
+
for (let key of __getOwnPropNames(mod))
|
|
11
|
+
if (!__hasOwnProp.call(to, key))
|
|
12
|
+
__defProp(to, key, {
|
|
13
|
+
get: () => mod[key],
|
|
14
|
+
enumerable: true
|
|
15
|
+
});
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __require = /* @__PURE__ */ createRequire(import.meta.url);
|
|
19
|
+
|
|
20
|
+
// src/workflow.ts
|
|
21
|
+
import { tool, zodSchema } from "ai";
|
|
22
|
+
import { z } from "zod";
|
|
23
|
+
|
|
24
|
+
// src/sandbox/lazy-singleton.ts
|
|
25
|
+
function createLazySingleton(factory) {
|
|
26
|
+
let promise = null;
|
|
27
|
+
return {
|
|
28
|
+
get: () => {
|
|
29
|
+
if (!promise) {
|
|
30
|
+
promise = factory();
|
|
31
|
+
}
|
|
32
|
+
return promise;
|
|
33
|
+
},
|
|
34
|
+
reset: () => {
|
|
35
|
+
promise = null;
|
|
36
|
+
}
|
|
37
|
+
};
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
// src/sandbox/ensure-tools.ts
|
|
41
|
+
import { rgPath as bundledRgPath } from "@vscode/ripgrep";
|
|
42
|
+
var RIPGREP_VERSION = "14.1.0";
|
|
43
|
+
var ARCH_MAP = {
|
|
44
|
+
x86_64: "x86_64-unknown-linux-musl",
|
|
45
|
+
aarch64: "aarch64-unknown-linux-gnu",
|
|
46
|
+
arm64: "aarch64-unknown-linux-gnu"
|
|
47
|
+
};
|
|
48
|
+
async function ensureSandboxTools(sandbox) {
|
|
49
|
+
const bundledCheck = await sandbox.exec(`test -x "${bundledRgPath}" && echo found`);
|
|
50
|
+
if (bundledCheck.stdout.includes("found")) {
|
|
51
|
+
sandbox.rgPath = bundledRgPath;
|
|
52
|
+
return;
|
|
53
|
+
}
|
|
54
|
+
const tmpCheck = await sandbox.exec("test -x /tmp/rg && echo found");
|
|
55
|
+
if (tmpCheck.stdout.includes("found")) {
|
|
56
|
+
sandbox.rgPath = "/tmp/rg";
|
|
57
|
+
return;
|
|
58
|
+
}
|
|
59
|
+
const systemCheck = await sandbox.exec("which rg 2>/dev/null");
|
|
60
|
+
if (systemCheck.exitCode === 0 && systemCheck.stdout.trim()) {
|
|
61
|
+
sandbox.rgPath = systemCheck.stdout.trim();
|
|
62
|
+
return;
|
|
63
|
+
}
|
|
64
|
+
const archResult = await sandbox.exec("uname -m");
|
|
65
|
+
const arch = archResult.stdout.trim();
|
|
66
|
+
const ripgrepArch = ARCH_MAP[arch];
|
|
67
|
+
if (!ripgrepArch) {
|
|
68
|
+
throw new Error(`Unsupported architecture: ${arch}. Supported: ${Object.keys(ARCH_MAP).join(", ")}`);
|
|
69
|
+
}
|
|
70
|
+
const ripgrepUrl = `https://github.com/BurntSushi/ripgrep/releases/download/${RIPGREP_VERSION}/ripgrep-${RIPGREP_VERSION}-${ripgrepArch}.tar.gz`;
|
|
71
|
+
const tarPath = `ripgrep-${RIPGREP_VERSION}-${ripgrepArch}/rg`;
|
|
72
|
+
const installResult = await sandbox.exec(`
|
|
73
|
+
curl -sL "${ripgrepUrl}" |
|
|
74
|
+
tar xzf - -C /tmp --strip-components=1 ${tarPath} &&
|
|
75
|
+
chmod +x /tmp/rg
|
|
76
|
+
`);
|
|
77
|
+
if (installResult.exitCode !== 0) {
|
|
78
|
+
throw new Error(`Failed to install ripgrep: ${installResult.stderr}`);
|
|
79
|
+
}
|
|
80
|
+
sandbox.rgPath = "/tmp/rg";
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
// src/sandbox/e2b.ts
|
|
84
|
+
async function createE2BSandbox(config = {}) {
|
|
85
|
+
let sandboxId = config.sandboxId;
|
|
86
|
+
const workingDirectory = config.cwd || "/home/user";
|
|
87
|
+
const timeout = config.timeout ?? 300000;
|
|
88
|
+
const sandbox = createLazySingleton(async () => {
|
|
89
|
+
let E2BSandboxSDK;
|
|
90
|
+
try {
|
|
91
|
+
const module = await import("@e2b/code-interpreter");
|
|
92
|
+
E2BSandboxSDK = module.Sandbox;
|
|
93
|
+
} catch {
|
|
94
|
+
throw new Error("E2BSandbox requires @e2b/code-interpreter. Install with: npm install @e2b/code-interpreter");
|
|
95
|
+
}
|
|
96
|
+
let sbx;
|
|
97
|
+
if (config.sandboxId) {
|
|
98
|
+
sbx = await E2BSandboxSDK.connect(config.sandboxId);
|
|
99
|
+
} else {
|
|
100
|
+
sbx = await E2BSandboxSDK.create({
|
|
101
|
+
apiKey: config.apiKey,
|
|
102
|
+
timeoutMs: timeout,
|
|
103
|
+
metadata: config.metadata
|
|
104
|
+
});
|
|
105
|
+
sandboxId = sbx.sandboxId;
|
|
106
|
+
}
|
|
107
|
+
return sbx;
|
|
108
|
+
});
|
|
109
|
+
const exec = async (command, options) => {
|
|
110
|
+
const sbx = await sandbox.get();
|
|
111
|
+
const startTime = performance.now();
|
|
112
|
+
try {
|
|
113
|
+
const result = await sbx.commands.run(command, {
|
|
114
|
+
cwd: options?.cwd || workingDirectory,
|
|
115
|
+
timeoutMs: options?.timeout
|
|
116
|
+
});
|
|
117
|
+
const durationMs = Math.round(performance.now() - startTime);
|
|
118
|
+
return {
|
|
119
|
+
stdout: result.stdout,
|
|
120
|
+
stderr: result.stderr,
|
|
121
|
+
exitCode: result.exitCode,
|
|
122
|
+
durationMs,
|
|
123
|
+
interrupted: false
|
|
124
|
+
};
|
|
125
|
+
} catch (error) {
|
|
126
|
+
const durationMs = Math.round(performance.now() - startTime);
|
|
127
|
+
if (error instanceof Error && error.message.toLowerCase().includes("timeout")) {
|
|
128
|
+
return {
|
|
129
|
+
stdout: "",
|
|
130
|
+
stderr: "Command timed out",
|
|
131
|
+
exitCode: 124,
|
|
132
|
+
durationMs,
|
|
133
|
+
interrupted: true
|
|
134
|
+
};
|
|
135
|
+
}
|
|
136
|
+
if (error instanceof Error) {
|
|
137
|
+
const exitMatch = error.message.match(/exit status (\d+)/i);
|
|
138
|
+
const exitCode = exitMatch ? parseInt(exitMatch[1], 10) : 1;
|
|
139
|
+
return {
|
|
140
|
+
stdout: "",
|
|
141
|
+
stderr: error.message,
|
|
142
|
+
exitCode,
|
|
143
|
+
durationMs,
|
|
144
|
+
interrupted: false
|
|
145
|
+
};
|
|
146
|
+
}
|
|
147
|
+
throw error;
|
|
148
|
+
}
|
|
149
|
+
};
|
|
150
|
+
let rgPath;
|
|
151
|
+
const sandboxObj = {
|
|
152
|
+
exec,
|
|
153
|
+
get id() {
|
|
154
|
+
return sandboxId;
|
|
155
|
+
},
|
|
156
|
+
get rgPath() {
|
|
157
|
+
return rgPath;
|
|
158
|
+
},
|
|
159
|
+
set rgPath(path) {
|
|
160
|
+
rgPath = path;
|
|
161
|
+
},
|
|
162
|
+
async readFile(path) {
|
|
163
|
+
const result = await exec(`cat "${path}"`);
|
|
164
|
+
if (result.exitCode !== 0) {
|
|
165
|
+
throw new Error(`Failed to read file: ${result.stderr}`);
|
|
166
|
+
}
|
|
167
|
+
return result.stdout;
|
|
168
|
+
},
|
|
169
|
+
async writeFile(path, content) {
|
|
170
|
+
const sbx = await sandbox.get();
|
|
171
|
+
await sbx.files.write(path, content);
|
|
172
|
+
},
|
|
173
|
+
async readDir(path) {
|
|
174
|
+
const result = await exec(`ls -1 "${path}"`);
|
|
175
|
+
if (result.exitCode !== 0) {
|
|
176
|
+
throw new Error(`Failed to read directory: ${result.stderr}`);
|
|
177
|
+
}
|
|
178
|
+
return result.stdout.split(`
|
|
179
|
+
`).filter(Boolean);
|
|
180
|
+
},
|
|
181
|
+
async fileExists(path) {
|
|
182
|
+
const result = await exec(`test -e "${path}"`);
|
|
183
|
+
return result.exitCode === 0;
|
|
184
|
+
},
|
|
185
|
+
async isDirectory(path) {
|
|
186
|
+
const result = await exec(`test -d "${path}"`);
|
|
187
|
+
return result.exitCode === 0;
|
|
188
|
+
},
|
|
189
|
+
async destroy() {
|
|
190
|
+
try {
|
|
191
|
+
const sbx = await sandbox.get();
|
|
192
|
+
await sbx.kill();
|
|
193
|
+
} catch {}
|
|
194
|
+
sandbox.reset();
|
|
195
|
+
}
|
|
196
|
+
};
|
|
197
|
+
if (config.ensureTools !== false) {
|
|
198
|
+
await ensureSandboxTools(sandboxObj);
|
|
199
|
+
}
|
|
200
|
+
return sandboxObj;
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
// src/workflow.ts
|
|
204
|
+
async function reconnectSandbox(sandboxId, apiKey) {
|
|
205
|
+
return createE2BSandbox({
|
|
206
|
+
sandboxId,
|
|
207
|
+
apiKey,
|
|
208
|
+
ensureTools: false
|
|
209
|
+
});
|
|
210
|
+
}
|
|
211
|
+
var readInputSchema = z.object({
|
|
212
|
+
file_path: z.string().describe("Absolute path to file or directory"),
|
|
213
|
+
offset: z.number().optional().describe("Line number to start reading from (1-indexed)"),
|
|
214
|
+
limit: z.number().optional().describe("Maximum number of lines to read")
|
|
215
|
+
});
|
|
216
|
+
var READ_DESCRIPTION = `Reads a file from the sandbox filesystem.
|
|
217
|
+
|
|
218
|
+
Usage:
|
|
219
|
+
- The file_path parameter must be an absolute path
|
|
220
|
+
- By default, reads up to 500 lines from the beginning
|
|
221
|
+
- Use offset and limit for large files
|
|
222
|
+
- Returns line numbers starting at 1`;
|
|
223
|
+
var writeInputSchema = z.object({
|
|
224
|
+
file_path: z.string().describe("Absolute path to the file to write"),
|
|
225
|
+
content: z.string().describe("Content to write to the file")
|
|
226
|
+
});
|
|
227
|
+
var WRITE_DESCRIPTION = `Writes content to a file in the sandbox filesystem.
|
|
228
|
+
Creates parent directories if they don't exist.`;
|
|
229
|
+
var editInputSchema = z.object({
|
|
230
|
+
file_path: z.string().describe("Absolute path to the file to edit"),
|
|
231
|
+
old_string: z.string().describe("The exact string to find and replace"),
|
|
232
|
+
new_string: z.string().describe("The string to replace it with")
|
|
233
|
+
});
|
|
234
|
+
var EDIT_DESCRIPTION = `Makes a targeted edit to a file by replacing old_string with new_string.
|
|
235
|
+
The old_string must match exactly (including whitespace and indentation).`;
|
|
236
|
+
var bashInputSchema = z.object({
|
|
237
|
+
command: z.string().describe("The bash command to execute"),
|
|
238
|
+
description: z.string().optional().describe("Brief description of what this command does"),
|
|
239
|
+
timeout: z.number().optional().describe("Timeout in milliseconds (default: 120000)")
|
|
240
|
+
});
|
|
241
|
+
var BASH_DESCRIPTION = `Executes a bash command in the sandbox.
|
|
242
|
+
Use for git, npm, system commands, etc.`;
|
|
243
|
+
var globInputSchema = z.object({
|
|
244
|
+
pattern: z.string().describe('Glob pattern (e.g., "**/*.ts", "src/**/*.json")'),
|
|
245
|
+
path: z.string().optional().describe("Directory to search in")
|
|
246
|
+
});
|
|
247
|
+
var GLOB_DESCRIPTION = `Finds files matching a glob pattern.
|
|
248
|
+
Returns list of matching file paths.`;
|
|
249
|
+
var grepInputSchema = z.object({
|
|
250
|
+
pattern: z.string().describe("Regex pattern to search for"),
|
|
251
|
+
path: z.string().optional().describe("File or directory to search in"),
|
|
252
|
+
glob: z.string().optional().describe('Glob pattern to filter files (e.g., "*.ts")'),
|
|
253
|
+
output_mode: z.enum(["content", "files_with_matches", "count"]).optional().describe("Output mode (default: files_with_matches)")
|
|
254
|
+
});
|
|
255
|
+
var GREP_DESCRIPTION = `Searches for a regex pattern in files.
|
|
256
|
+
Returns matching lines with file paths and line numbers.`;
|
|
257
|
+
async function executeRead(sandbox, params, config) {
|
|
258
|
+
const { file_path, offset, limit } = params;
|
|
259
|
+
if (config?.allowedPaths) {
|
|
260
|
+
const isAllowed = config.allowedPaths.some((allowed) => file_path.startsWith(allowed));
|
|
261
|
+
if (!isAllowed) {
|
|
262
|
+
return { error: `Path not allowed: ${file_path}` };
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
try {
|
|
266
|
+
const exists = await sandbox.fileExists(file_path);
|
|
267
|
+
if (!exists) {
|
|
268
|
+
return { error: `Path not found: ${file_path}` };
|
|
269
|
+
}
|
|
270
|
+
const isDir = await sandbox.isDirectory(file_path);
|
|
271
|
+
if (isDir) {
|
|
272
|
+
const entries = await sandbox.readDir(file_path);
|
|
273
|
+
return { type: "directory", entries, count: entries.length };
|
|
274
|
+
}
|
|
275
|
+
const content = await sandbox.readFile(file_path);
|
|
276
|
+
const allLines = content.split(`
|
|
277
|
+
`);
|
|
278
|
+
const totalLines = allLines.length;
|
|
279
|
+
const maxLinesWithoutLimit = config?.maxFileSize || 500;
|
|
280
|
+
if (!limit && totalLines > maxLinesWithoutLimit) {
|
|
281
|
+
return {
|
|
282
|
+
error: `File is large (${totalLines} lines). Use 'offset' and 'limit' to read in chunks.`
|
|
283
|
+
};
|
|
284
|
+
}
|
|
285
|
+
const startLine = offset ? offset - 1 : 0;
|
|
286
|
+
const endLine = limit ? startLine + limit : allLines.length;
|
|
287
|
+
const selectedLines = allLines.slice(startLine, endLine);
|
|
288
|
+
const lines = selectedLines.map((line, i) => ({
|
|
289
|
+
line_number: startLine + i + 1,
|
|
290
|
+
content: line
|
|
291
|
+
}));
|
|
292
|
+
return {
|
|
293
|
+
type: "text",
|
|
294
|
+
content: selectedLines.join(`
|
|
295
|
+
`),
|
|
296
|
+
lines,
|
|
297
|
+
total_lines: totalLines
|
|
298
|
+
};
|
|
299
|
+
} catch (error) {
|
|
300
|
+
return { error: error instanceof Error ? error.message : "Unknown error" };
|
|
301
|
+
}
|
|
302
|
+
}
|
|
303
|
+
async function executeWrite(sandbox, params) {
|
|
304
|
+
const { file_path, content } = params;
|
|
305
|
+
try {
|
|
306
|
+
const dir = file_path.split("/").slice(0, -1).join("/");
|
|
307
|
+
if (dir) {
|
|
308
|
+
await sandbox.exec(`mkdir -p "${dir}"`);
|
|
309
|
+
}
|
|
310
|
+
await sandbox.writeFile(file_path, content);
|
|
311
|
+
return { success: true, path: file_path, bytes_written: content.length };
|
|
312
|
+
} catch (error) {
|
|
313
|
+
return { error: error instanceof Error ? error.message : "Unknown error" };
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
async function executeEdit(sandbox, params) {
|
|
317
|
+
const { file_path, old_string, new_string } = params;
|
|
318
|
+
try {
|
|
319
|
+
const content = await sandbox.readFile(file_path);
|
|
320
|
+
if (!content.includes(old_string)) {
|
|
321
|
+
return { error: `old_string not found in file: ${file_path}` };
|
|
322
|
+
}
|
|
323
|
+
const newContent = content.replace(old_string, new_string);
|
|
324
|
+
await sandbox.writeFile(file_path, newContent);
|
|
325
|
+
return { success: true, path: file_path };
|
|
326
|
+
} catch (error) {
|
|
327
|
+
return { error: error instanceof Error ? error.message : "Unknown error" };
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
async function executeBash(sandbox, params, config) {
|
|
331
|
+
const { command, timeout } = params;
|
|
332
|
+
if (config?.blockedCommands) {
|
|
333
|
+
const isBlocked = config.blockedCommands.some((blocked) => command.includes(blocked));
|
|
334
|
+
if (isBlocked) {
|
|
335
|
+
return { error: `Command blocked by security policy` };
|
|
336
|
+
}
|
|
337
|
+
}
|
|
338
|
+
try {
|
|
339
|
+
const result = await sandbox.exec(command, {
|
|
340
|
+
timeout: timeout || config?.timeout || 120000
|
|
341
|
+
});
|
|
342
|
+
return {
|
|
343
|
+
stdout: result.stdout,
|
|
344
|
+
stderr: result.stderr,
|
|
345
|
+
exit_code: result.exitCode,
|
|
346
|
+
duration_ms: result.durationMs
|
|
347
|
+
};
|
|
348
|
+
} catch (error) {
|
|
349
|
+
return { error: error instanceof Error ? error.message : "Unknown error" };
|
|
350
|
+
}
|
|
351
|
+
}
|
|
352
|
+
async function executeGlob(sandbox, params) {
|
|
353
|
+
const { pattern, path } = params;
|
|
354
|
+
const searchPath = path || ".";
|
|
355
|
+
try {
|
|
356
|
+
const result = await sandbox.exec(`find ${searchPath} -type f -name "${pattern.replace(/\*\*/g, "*")}" 2>/dev/null | head -200`);
|
|
357
|
+
const files = result.stdout.split(`
|
|
358
|
+
`).filter(Boolean);
|
|
359
|
+
return { files, count: files.length };
|
|
360
|
+
} catch (error) {
|
|
361
|
+
return { error: error instanceof Error ? error.message : "Unknown error" };
|
|
362
|
+
}
|
|
363
|
+
}
|
|
364
|
+
async function executeGrep(sandbox, params) {
|
|
365
|
+
const { pattern, path, glob, output_mode } = params;
|
|
366
|
+
const searchPath = path || ".";
|
|
367
|
+
const mode = output_mode || "files_with_matches";
|
|
368
|
+
try {
|
|
369
|
+
let cmd;
|
|
370
|
+
const rgPath = sandbox.rgPath || "rg";
|
|
371
|
+
if (mode === "files_with_matches") {
|
|
372
|
+
cmd = `${rgPath} -l "${pattern}" ${searchPath}`;
|
|
373
|
+
} else if (mode === "count") {
|
|
374
|
+
cmd = `${rgPath} -c "${pattern}" ${searchPath}`;
|
|
375
|
+
} else {
|
|
376
|
+
cmd = `${rgPath} -n "${pattern}" ${searchPath}`;
|
|
377
|
+
}
|
|
378
|
+
if (glob) {
|
|
379
|
+
cmd += ` --glob "${glob}"`;
|
|
380
|
+
}
|
|
381
|
+
cmd += " 2>/dev/null | head -100";
|
|
382
|
+
const result = await sandbox.exec(cmd);
|
|
383
|
+
const lines = result.stdout.split(`
|
|
384
|
+
`).filter(Boolean);
|
|
385
|
+
if (mode === "files_with_matches") {
|
|
386
|
+
return { files: lines, count: lines.length };
|
|
387
|
+
} else if (mode === "count") {
|
|
388
|
+
return { matches: lines };
|
|
389
|
+
} else {
|
|
390
|
+
return { content: result.stdout, match_count: lines.length };
|
|
391
|
+
}
|
|
392
|
+
} catch (error) {
|
|
393
|
+
return { error: error instanceof Error ? error.message : "Unknown error" };
|
|
394
|
+
}
|
|
395
|
+
}
|
|
396
|
+
function createDurableAgentTools(sandboxId, config) {
|
|
397
|
+
const toolsConfig = config?.tools || {};
|
|
398
|
+
const tools = {
|
|
399
|
+
Read: tool({
|
|
400
|
+
description: READ_DESCRIPTION,
|
|
401
|
+
inputSchema: zodSchema(readInputSchema),
|
|
402
|
+
execute: async (params) => {
|
|
403
|
+
"use step";
|
|
404
|
+
const sandbox = await reconnectSandbox(sandboxId, config?.apiKey);
|
|
405
|
+
return executeRead(sandbox, params, toolsConfig.Read);
|
|
406
|
+
}
|
|
407
|
+
}),
|
|
408
|
+
Write: tool({
|
|
409
|
+
description: WRITE_DESCRIPTION,
|
|
410
|
+
inputSchema: zodSchema(writeInputSchema),
|
|
411
|
+
execute: async (params) => {
|
|
412
|
+
"use step";
|
|
413
|
+
const sandbox = await reconnectSandbox(sandboxId, config?.apiKey);
|
|
414
|
+
return executeWrite(sandbox, params);
|
|
415
|
+
}
|
|
416
|
+
}),
|
|
417
|
+
Edit: tool({
|
|
418
|
+
description: EDIT_DESCRIPTION,
|
|
419
|
+
inputSchema: zodSchema(editInputSchema),
|
|
420
|
+
execute: async (params) => {
|
|
421
|
+
"use step";
|
|
422
|
+
const sandbox = await reconnectSandbox(sandboxId, config?.apiKey);
|
|
423
|
+
return executeEdit(sandbox, params);
|
|
424
|
+
}
|
|
425
|
+
}),
|
|
426
|
+
Bash: tool({
|
|
427
|
+
description: BASH_DESCRIPTION,
|
|
428
|
+
inputSchema: zodSchema(bashInputSchema),
|
|
429
|
+
execute: async (params) => {
|
|
430
|
+
"use step";
|
|
431
|
+
const sandbox = await reconnectSandbox(sandboxId, config?.apiKey);
|
|
432
|
+
return executeBash(sandbox, params, toolsConfig.Bash);
|
|
433
|
+
}
|
|
434
|
+
}),
|
|
435
|
+
Glob: tool({
|
|
436
|
+
description: GLOB_DESCRIPTION,
|
|
437
|
+
inputSchema: zodSchema(globInputSchema),
|
|
438
|
+
execute: async (params) => {
|
|
439
|
+
"use step";
|
|
440
|
+
const sandbox = await reconnectSandbox(sandboxId, config?.apiKey);
|
|
441
|
+
return executeGlob(sandbox, params);
|
|
442
|
+
}
|
|
443
|
+
}),
|
|
444
|
+
Grep: tool({
|
|
445
|
+
description: GREP_DESCRIPTION,
|
|
446
|
+
inputSchema: zodSchema(grepInputSchema),
|
|
447
|
+
execute: async (params) => {
|
|
448
|
+
"use step";
|
|
449
|
+
const sandbox = await reconnectSandbox(sandboxId, config?.apiKey);
|
|
450
|
+
return executeGrep(sandbox, params);
|
|
451
|
+
}
|
|
452
|
+
})
|
|
453
|
+
};
|
|
454
|
+
return { tools };
|
|
455
|
+
}
|
|
456
|
+
export {
|
|
457
|
+
createDurableAgentTools
|
|
458
|
+
};
|
package/package.json
CHANGED