bashkit 0.2.2 → 0.2.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/AGENTS.md +55 -12
- package/README.md +25 -1
- package/dist/index.js +272 -177
- package/dist/tools/index.d.ts +1 -1
- package/dist/tools/web-fetch.d.ts +8 -0
- package/dist/types.d.ts +17 -4
- package/package.json +4 -3
package/AGENTS.md
CHANGED
|
@@ -21,9 +21,9 @@ bun add bashkit ai @ai-sdk/anthropic
|
|
|
21
21
|
Runs commands directly on the local machine. Use for development/testing only.
|
|
22
22
|
|
|
23
23
|
```typescript
|
|
24
|
-
import { createAgentTools,
|
|
24
|
+
import { createAgentTools, createLocalSandbox } from "bashkit";
|
|
25
25
|
|
|
26
|
-
const sandbox =
|
|
26
|
+
const sandbox = createLocalSandbox({ cwd: "/tmp/workspace" });
|
|
27
27
|
const { tools } = createAgentTools(sandbox);
|
|
28
28
|
```
|
|
29
29
|
|
|
@@ -32,9 +32,9 @@ const { tools } = createAgentTools(sandbox);
|
|
|
32
32
|
Runs in isolated Firecracker microVMs on Vercel's infrastructure.
|
|
33
33
|
|
|
34
34
|
```typescript
|
|
35
|
-
import { createAgentTools,
|
|
35
|
+
import { createAgentTools, createVercelSandbox } from "bashkit";
|
|
36
36
|
|
|
37
|
-
const sandbox =
|
|
37
|
+
const sandbox = createVercelSandbox({
|
|
38
38
|
runtime: "node22",
|
|
39
39
|
resources: { vcpus: 2 },
|
|
40
40
|
});
|
|
@@ -44,6 +44,49 @@ const { tools } = createAgentTools(sandbox);
|
|
|
44
44
|
await sandbox.destroy();
|
|
45
45
|
```
|
|
46
46
|
|
|
47
|
+
### E2BSandbox (Production)
|
|
48
|
+
|
|
49
|
+
Runs in E2B's cloud sandboxes. Requires `@e2b/code-interpreter` peer dependency.
|
|
50
|
+
|
|
51
|
+
```typescript
|
|
52
|
+
import { createAgentTools, createE2BSandbox } from "bashkit";
|
|
53
|
+
|
|
54
|
+
const sandbox = createE2BSandbox({
|
|
55
|
+
apiKey: process.env.E2B_API_KEY,
|
|
56
|
+
});
|
|
57
|
+
const { tools } = createAgentTools(sandbox);
|
|
58
|
+
|
|
59
|
+
await sandbox.destroy();
|
|
60
|
+
```
|
|
61
|
+
|
|
62
|
+
### Sandbox Reconnection (Cloud Sandboxes)
|
|
63
|
+
|
|
64
|
+
Cloud sandboxes (E2B, Vercel) support reconnection via the `id` property and `sandboxId` config:
|
|
65
|
+
|
|
66
|
+
```typescript
|
|
67
|
+
// Create a new sandbox
|
|
68
|
+
const sandbox = createE2BSandbox({ apiKey: process.env.E2B_API_KEY });
|
|
69
|
+
|
|
70
|
+
// After first operation, the sandbox ID is available
|
|
71
|
+
await sandbox.exec("echo hello");
|
|
72
|
+
const sandboxId = sandbox.id; // "sbx_abc123..."
|
|
73
|
+
|
|
74
|
+
// Store sandboxId in your database (e.g., chat metadata)
|
|
75
|
+
await db.chat.update({ where: { id: chatId }, data: { sandboxId } });
|
|
76
|
+
|
|
77
|
+
// Later: reconnect to the same sandbox
|
|
78
|
+
const savedId = chat.sandboxId;
|
|
79
|
+
const reconnected = createE2BSandbox({
|
|
80
|
+
apiKey: process.env.E2B_API_KEY,
|
|
81
|
+
sandboxId: savedId, // Reconnects instead of creating new
|
|
82
|
+
});
|
|
83
|
+
```
|
|
84
|
+
|
|
85
|
+
This is useful for:
|
|
86
|
+
- Reusing sandboxes across multiple requests in the same conversation
|
|
87
|
+
- Persisting sandbox state between server restarts
|
|
88
|
+
- Reducing sandbox creation overhead
|
|
89
|
+
|
|
47
90
|
## Available Tools
|
|
48
91
|
|
|
49
92
|
### Default Tools (always included)
|
|
@@ -89,11 +132,11 @@ import { generateText, wrapLanguageModel, stepCountIs } from "ai";
|
|
|
89
132
|
import { anthropic } from "@ai-sdk/anthropic";
|
|
90
133
|
import {
|
|
91
134
|
createAgentTools,
|
|
92
|
-
|
|
135
|
+
createLocalSandbox,
|
|
93
136
|
anthropicPromptCacheMiddleware,
|
|
94
137
|
} from "bashkit";
|
|
95
138
|
|
|
96
|
-
const sandbox =
|
|
139
|
+
const sandbox = createLocalSandbox({ cwd: "/tmp/workspace" });
|
|
97
140
|
const { tools } = createAgentTools(sandbox);
|
|
98
141
|
|
|
99
142
|
// Wrap model with prompt caching (recommended)
|
|
@@ -311,12 +354,12 @@ const skills = await discoverSkills();
|
|
|
311
354
|
### Using Skills with Agents
|
|
312
355
|
|
|
313
356
|
```typescript
|
|
314
|
-
import { discoverSkills, skillsToXml, createAgentTools,
|
|
357
|
+
import { discoverSkills, skillsToXml, createAgentTools, createLocalSandbox } from "bashkit";
|
|
315
358
|
import { generateText, stepCountIs } from "ai";
|
|
316
359
|
import { anthropic } from "@ai-sdk/anthropic";
|
|
317
360
|
|
|
318
361
|
const skills = await discoverSkills();
|
|
319
|
-
const sandbox =
|
|
362
|
+
const sandbox = createLocalSandbox({ cwd: "/tmp/workspace" });
|
|
320
363
|
const { tools } = createAgentTools(sandbox);
|
|
321
364
|
|
|
322
365
|
const result = await generateText({
|
|
@@ -421,13 +464,13 @@ import {
|
|
|
421
464
|
createAgentTools,
|
|
422
465
|
createTaskTool,
|
|
423
466
|
createTodoWriteTool,
|
|
424
|
-
|
|
467
|
+
createLocalSandbox,
|
|
425
468
|
anthropicPromptCacheMiddleware,
|
|
426
469
|
type TodoState,
|
|
427
470
|
} from "bashkit";
|
|
428
471
|
|
|
429
472
|
// 1. Create sandbox
|
|
430
|
-
const sandbox =
|
|
473
|
+
const sandbox = createLocalSandbox({ cwd: "/tmp/workspace" });
|
|
431
474
|
|
|
432
475
|
// 2. Create sandbox tools
|
|
433
476
|
const { tools: sandboxTools } = createAgentTools(sandbox);
|
|
@@ -489,9 +532,9 @@ const { tools } = createAgentTools(sandbox, {
|
|
|
489
532
|
Cache tool execution results to avoid redundant operations:
|
|
490
533
|
|
|
491
534
|
```typescript
|
|
492
|
-
import { createAgentTools,
|
|
535
|
+
import { createAgentTools, createLocalSandbox } from "bashkit";
|
|
493
536
|
|
|
494
|
-
const sandbox =
|
|
537
|
+
const sandbox = createLocalSandbox({ cwd: "/tmp/workspace" });
|
|
495
538
|
|
|
496
539
|
// Enable caching with defaults (LRU, 5min TTL)
|
|
497
540
|
const { tools } = createAgentTools(sandbox, { cache: true });
|
package/README.md
CHANGED
|
@@ -148,6 +148,15 @@ const sandbox = createVercelSandbox({
|
|
|
148
148
|
runtime: 'node22',
|
|
149
149
|
resources: { vcpus: 2 },
|
|
150
150
|
});
|
|
151
|
+
|
|
152
|
+
// After first operation, get the sandbox ID for persistence
|
|
153
|
+
await sandbox.exec('echo hello');
|
|
154
|
+
console.log(sandbox.id); // Sandbox ID for reconnection
|
|
155
|
+
|
|
156
|
+
// Later: reconnect to the same sandbox
|
|
157
|
+
const reconnected = createVercelSandbox({
|
|
158
|
+
sandboxId: 'existing-sandbox-id',
|
|
159
|
+
});
|
|
151
160
|
```
|
|
152
161
|
|
|
153
162
|
### E2BSandbox
|
|
@@ -158,7 +167,17 @@ Runs in E2B's cloud sandboxes. Requires `@e2b/code-interpreter` peer dependency.
|
|
|
158
167
|
import { createE2BSandbox } from 'bashkit';
|
|
159
168
|
|
|
160
169
|
const sandbox = createE2BSandbox({
|
|
161
|
-
|
|
170
|
+
apiKey: process.env.E2B_API_KEY,
|
|
171
|
+
});
|
|
172
|
+
|
|
173
|
+
// After first operation, get the sandbox ID for persistence
|
|
174
|
+
await sandbox.exec('echo hello');
|
|
175
|
+
console.log(sandbox.id); // "sbx_abc123..."
|
|
176
|
+
|
|
177
|
+
// Later: reconnect to the same sandbox
|
|
178
|
+
const reconnected = createE2BSandbox({
|
|
179
|
+
apiKey: process.env.E2B_API_KEY,
|
|
180
|
+
sandboxId: 'sbx_abc123...', // Reconnect to existing sandbox
|
|
162
181
|
});
|
|
163
182
|
```
|
|
164
183
|
|
|
@@ -764,9 +783,14 @@ interface Sandbox {
|
|
|
764
783
|
readDir(path: string): Promise<string[]>;
|
|
765
784
|
fileExists(path: string): Promise<boolean>;
|
|
766
785
|
destroy(): Promise<void>;
|
|
786
|
+
|
|
787
|
+
// Optional: Sandbox ID for reconnection (cloud providers only)
|
|
788
|
+
readonly id?: string;
|
|
767
789
|
}
|
|
768
790
|
```
|
|
769
791
|
|
|
792
|
+
The `id` property is available on cloud sandboxes (E2B, Vercel) after the first operation. Use it to persist the sandbox ID and reconnect later.
|
|
793
|
+
|
|
770
794
|
### Custom Sandbox Example
|
|
771
795
|
|
|
772
796
|
```typescript
|
package/dist/index.js
CHANGED
|
@@ -50,7 +50,6 @@ var anthropicPromptCacheMiddleware = {
|
|
|
50
50
|
transformParams: async ({ params }) => applyCacheMarkers(params)
|
|
51
51
|
};
|
|
52
52
|
// src/sandbox/e2b.ts
|
|
53
|
-
import { Sandbox as E2BSandboxSDK } from "@e2b/code-interpreter";
|
|
54
53
|
function createE2BSandbox(config = {}) {
|
|
55
54
|
let sandbox = null;
|
|
56
55
|
let sandboxId = config.sandboxId;
|
|
@@ -59,6 +58,13 @@ function createE2BSandbox(config = {}) {
|
|
|
59
58
|
const ensureSandbox = async () => {
|
|
60
59
|
if (sandbox)
|
|
61
60
|
return sandbox;
|
|
61
|
+
let E2BSandboxSDK;
|
|
62
|
+
try {
|
|
63
|
+
const module = await import("@e2b/code-interpreter");
|
|
64
|
+
E2BSandboxSDK = module.Sandbox;
|
|
65
|
+
} catch {
|
|
66
|
+
throw new Error("E2BSandbox requires @e2b/code-interpreter. Install with: npm install @e2b/code-interpreter");
|
|
67
|
+
}
|
|
62
68
|
if (config.sandboxId) {
|
|
63
69
|
sandbox = await E2BSandboxSDK.connect(config.sandboxId);
|
|
64
70
|
} else {
|
|
@@ -233,7 +239,6 @@ function createLocalSandbox(config = {}) {
|
|
|
233
239
|
};
|
|
234
240
|
}
|
|
235
241
|
// src/sandbox/vercel.ts
|
|
236
|
-
import { Sandbox as VercelSandboxSDK } from "@vercel/sandbox";
|
|
237
242
|
function createVercelSandbox(config = {}) {
|
|
238
243
|
let sandbox = null;
|
|
239
244
|
let sandboxId = config.sandboxId;
|
|
@@ -246,6 +251,13 @@ function createVercelSandbox(config = {}) {
|
|
|
246
251
|
const ensureSandbox = async () => {
|
|
247
252
|
if (sandbox)
|
|
248
253
|
return sandbox;
|
|
254
|
+
let VercelSandboxSDK;
|
|
255
|
+
try {
|
|
256
|
+
const module = await import("@vercel/sandbox");
|
|
257
|
+
VercelSandboxSDK = module.Sandbox;
|
|
258
|
+
} catch {
|
|
259
|
+
throw new Error("VercelSandbox requires @vercel/sandbox. Install with: npm install @vercel/sandbox");
|
|
260
|
+
}
|
|
249
261
|
const createOptions = {
|
|
250
262
|
runtime: resolvedConfig.runtime,
|
|
251
263
|
resources: resolvedConfig.resources,
|
|
@@ -979,6 +991,7 @@ function createGlobTool(sandbox, config) {
|
|
|
979
991
|
// src/tools/grep.ts
|
|
980
992
|
import { tool as tool7, zodSchema as zodSchema7 } from "ai";
|
|
981
993
|
import { z as z7 } from "zod";
|
|
994
|
+
import { rgPath } from "@vscode/ripgrep";
|
|
982
995
|
var grepInputSchema = z7.object({
|
|
983
996
|
pattern: z7.string().describe("The regular expression pattern to search for in file contents"),
|
|
984
997
|
path: z7.string().optional().describe("File or directory to search in (defaults to cwd)"),
|
|
@@ -992,13 +1005,13 @@ var grepInputSchema = z7.object({
|
|
|
992
1005
|
"-C": z7.number().optional().describe("Number of lines to show before and after each match. Requires output_mode: 'content'."),
|
|
993
1006
|
head_limit: z7.number().optional().describe("Limit output to first N lines/entries. Works across all output modes. Defaults to 0 (unlimited)."),
|
|
994
1007
|
offset: z7.number().optional().describe("Skip first N lines/entries before applying head_limit. Works across all output modes. Defaults to 0."),
|
|
995
|
-
multiline: z7.boolean().optional().describe("Enable multiline mode where patterns can span lines
|
|
1008
|
+
multiline: z7.boolean().optional().describe("Enable multiline mode where patterns can span lines. Default: false.")
|
|
996
1009
|
});
|
|
997
|
-
var GREP_DESCRIPTION = `A powerful content search tool with regex support.
|
|
1010
|
+
var GREP_DESCRIPTION = `A powerful content search tool built on ripgrep with regex support.
|
|
998
1011
|
|
|
999
1012
|
**Usage:**
|
|
1000
1013
|
- ALWAYS use Grep for search tasks. NEVER invoke \`grep\` or \`rg\` as a Bash command.
|
|
1001
|
-
- Supports regex syntax (e.g., "log.*Error", "function\\s+\\w+")
|
|
1014
|
+
- Supports full regex syntax (e.g., "log.*Error", "function\\s+\\w+")
|
|
1002
1015
|
- Filter files with glob parameter (e.g., "*.js", "**/*.tsx") or type parameter (e.g., "js", "py", "rust")
|
|
1003
1016
|
|
|
1004
1017
|
**Output modes:**
|
|
@@ -1013,11 +1026,8 @@ var GREP_DESCRIPTION = `A powerful content search tool with regex support. Use t
|
|
|
1013
1026
|
|
|
1014
1027
|
**Pagination:**
|
|
1015
1028
|
- Use offset to skip results (useful for pagination)
|
|
1016
|
-
- Use head_limit to limit total results returned
|
|
1017
|
-
|
|
1018
|
-
**Note:** Set useRipgrep: true in config for better performance and multiline support (requires ripgrep installed).`;
|
|
1029
|
+
- Use head_limit to limit total results returned`;
|
|
1019
1030
|
function createGrepTool(sandbox, config) {
|
|
1020
|
-
const useRipgrep = config?.useRipgrep ?? false;
|
|
1021
1031
|
return tool7({
|
|
1022
1032
|
description: GREP_DESCRIPTION,
|
|
1023
1033
|
inputSchema: zodSchema7(grepInputSchema),
|
|
@@ -1032,7 +1042,6 @@ function createGrepTool(sandbox, config) {
|
|
|
1032
1042
|
type,
|
|
1033
1043
|
output_mode = "files_with_matches",
|
|
1034
1044
|
"-i": caseInsensitive,
|
|
1035
|
-
"-n": showLineNumbers = true,
|
|
1036
1045
|
"-B": beforeContext,
|
|
1037
1046
|
"-A": afterContext,
|
|
1038
1047
|
"-C": context,
|
|
@@ -1047,98 +1056,26 @@ function createGrepTool(sandbox, config) {
|
|
|
1047
1056
|
return { error: `Path not allowed: ${searchPath}` };
|
|
1048
1057
|
}
|
|
1049
1058
|
}
|
|
1050
|
-
if (multiline && !useRipgrep) {
|
|
1051
|
-
return {
|
|
1052
|
-
error: "Multiline mode requires ripgrep. Set useRipgrep: true in config."
|
|
1053
|
-
};
|
|
1054
|
-
}
|
|
1055
1059
|
try {
|
|
1056
|
-
|
|
1057
|
-
|
|
1058
|
-
|
|
1059
|
-
|
|
1060
|
-
|
|
1061
|
-
|
|
1062
|
-
|
|
1063
|
-
|
|
1064
|
-
|
|
1065
|
-
|
|
1066
|
-
|
|
1067
|
-
|
|
1068
|
-
output_mode,
|
|
1069
|
-
caseInsensitive,
|
|
1070
|
-
showLineNumbers,
|
|
1071
|
-
beforeContext,
|
|
1072
|
-
afterContext,
|
|
1073
|
-
context,
|
|
1074
|
-
glob,
|
|
1075
|
-
type,
|
|
1076
|
-
multiline,
|
|
1077
|
-
paginationSuffix
|
|
1078
|
-
});
|
|
1079
|
-
} else {
|
|
1080
|
-
cmd = buildGrepCommand({
|
|
1081
|
-
pattern,
|
|
1082
|
-
searchPath,
|
|
1083
|
-
output_mode,
|
|
1084
|
-
caseInsensitive,
|
|
1085
|
-
showLineNumbers,
|
|
1086
|
-
beforeContext,
|
|
1087
|
-
afterContext,
|
|
1088
|
-
context,
|
|
1089
|
-
glob,
|
|
1090
|
-
type,
|
|
1091
|
-
paginationSuffix
|
|
1092
|
-
});
|
|
1093
|
-
}
|
|
1060
|
+
const cmd = buildRipgrepCommand({
|
|
1061
|
+
pattern,
|
|
1062
|
+
searchPath,
|
|
1063
|
+
output_mode,
|
|
1064
|
+
caseInsensitive,
|
|
1065
|
+
beforeContext,
|
|
1066
|
+
afterContext,
|
|
1067
|
+
context,
|
|
1068
|
+
glob,
|
|
1069
|
+
type,
|
|
1070
|
+
multiline
|
|
1071
|
+
});
|
|
1094
1072
|
const result = await sandbox.exec(cmd, { timeout: config?.timeout });
|
|
1095
1073
|
if (output_mode === "files_with_matches") {
|
|
1096
|
-
|
|
1097
|
-
`).filter(Boolean);
|
|
1098
|
-
return {
|
|
1099
|
-
files,
|
|
1100
|
-
count: files.length
|
|
1101
|
-
};
|
|
1074
|
+
return parseFilesOutput(result.stdout);
|
|
1102
1075
|
} else if (output_mode === "count") {
|
|
1103
|
-
|
|
1104
|
-
`).filter(Boolean);
|
|
1105
|
-
const counts = lines.map((line) => {
|
|
1106
|
-
const lastColon = line.lastIndexOf(":");
|
|
1107
|
-
return {
|
|
1108
|
-
file: line.slice(0, lastColon),
|
|
1109
|
-
count: parseInt(line.slice(lastColon + 1), 10)
|
|
1110
|
-
};
|
|
1111
|
-
});
|
|
1112
|
-
const total = counts.reduce((sum, c) => sum + c.count, 0);
|
|
1113
|
-
return {
|
|
1114
|
-
counts,
|
|
1115
|
-
total
|
|
1116
|
-
};
|
|
1076
|
+
return parseCountOutput(result.stdout);
|
|
1117
1077
|
} else {
|
|
1118
|
-
|
|
1119
|
-
return {
|
|
1120
|
-
matches: [],
|
|
1121
|
-
total_matches: 0
|
|
1122
|
-
};
|
|
1123
|
-
}
|
|
1124
|
-
const lines = result.stdout.split(`
|
|
1125
|
-
`).filter(Boolean);
|
|
1126
|
-
const matches = [];
|
|
1127
|
-
for (const line of lines) {
|
|
1128
|
-
const colonMatch = line.match(/^(.+?):(\d+)[:|-](.*)$/);
|
|
1129
|
-
if (colonMatch) {
|
|
1130
|
-
const [, file, lineNum, content] = colonMatch;
|
|
1131
|
-
matches.push({
|
|
1132
|
-
file,
|
|
1133
|
-
line_number: parseInt(lineNum, 10),
|
|
1134
|
-
line: content
|
|
1135
|
-
});
|
|
1136
|
-
}
|
|
1137
|
-
}
|
|
1138
|
-
return {
|
|
1139
|
-
matches,
|
|
1140
|
-
total_matches: matches.length
|
|
1141
|
-
};
|
|
1078
|
+
return parseContentOutput(result.stdout, head_limit, offset);
|
|
1142
1079
|
}
|
|
1143
1080
|
} catch (error) {
|
|
1144
1081
|
return {
|
|
@@ -1149,14 +1086,12 @@ function createGrepTool(sandbox, config) {
|
|
|
1149
1086
|
});
|
|
1150
1087
|
}
|
|
1151
1088
|
function buildRipgrepCommand(opts) {
|
|
1152
|
-
const flags = [];
|
|
1089
|
+
const flags = ["--json"];
|
|
1153
1090
|
if (opts.caseInsensitive)
|
|
1154
1091
|
flags.push("-i");
|
|
1155
1092
|
if (opts.multiline)
|
|
1156
1093
|
flags.push("-U", "--multiline-dotall");
|
|
1157
1094
|
if (opts.output_mode === "content") {
|
|
1158
|
-
if (opts.showLineNumbers)
|
|
1159
|
-
flags.push("-n");
|
|
1160
1095
|
if (opts.context) {
|
|
1161
1096
|
flags.push(`-C ${opts.context}`);
|
|
1162
1097
|
} else {
|
|
@@ -1171,42 +1106,137 @@ function buildRipgrepCommand(opts) {
|
|
|
1171
1106
|
if (opts.type)
|
|
1172
1107
|
flags.push(`-t ${opts.type}`);
|
|
1173
1108
|
const flagStr = flags.join(" ");
|
|
1174
|
-
|
|
1175
|
-
|
|
1176
|
-
|
|
1177
|
-
|
|
1178
|
-
|
|
1179
|
-
|
|
1109
|
+
return `${rgPath} ${flagStr} "${opts.pattern}" ${opts.searchPath} 2>/dev/null`;
|
|
1110
|
+
}
|
|
1111
|
+
function parseFilesOutput(stdout) {
|
|
1112
|
+
const files = new Set;
|
|
1113
|
+
for (const line of stdout.split(`
|
|
1114
|
+
`).filter(Boolean)) {
|
|
1115
|
+
try {
|
|
1116
|
+
const msg = JSON.parse(line);
|
|
1117
|
+
if (msg.type === "begin") {
|
|
1118
|
+
const data = msg.data;
|
|
1119
|
+
files.add(data.path.text);
|
|
1120
|
+
}
|
|
1121
|
+
} catch {}
|
|
1180
1122
|
}
|
|
1123
|
+
return {
|
|
1124
|
+
files: Array.from(files),
|
|
1125
|
+
count: files.size
|
|
1126
|
+
};
|
|
1181
1127
|
}
|
|
1182
|
-
function
|
|
1183
|
-
const
|
|
1184
|
-
|
|
1185
|
-
|
|
1186
|
-
|
|
1187
|
-
|
|
1188
|
-
|
|
1189
|
-
|
|
1190
|
-
|
|
1191
|
-
|
|
1192
|
-
|
|
1193
|
-
|
|
1194
|
-
|
|
1195
|
-
|
|
1128
|
+
function parseCountOutput(stdout) {
|
|
1129
|
+
const counts = new Map;
|
|
1130
|
+
for (const line of stdout.split(`
|
|
1131
|
+
`).filter(Boolean)) {
|
|
1132
|
+
try {
|
|
1133
|
+
const msg = JSON.parse(line);
|
|
1134
|
+
if (msg.type === "end") {
|
|
1135
|
+
const data = msg.data;
|
|
1136
|
+
counts.set(data.path.text, data.stats.matches);
|
|
1137
|
+
}
|
|
1138
|
+
} catch {}
|
|
1139
|
+
}
|
|
1140
|
+
const countsArray = Array.from(counts.entries()).map(([file, count]) => ({
|
|
1141
|
+
file,
|
|
1142
|
+
count
|
|
1143
|
+
}));
|
|
1144
|
+
const total = countsArray.reduce((sum, c) => sum + c.count, 0);
|
|
1145
|
+
return {
|
|
1146
|
+
counts: countsArray,
|
|
1147
|
+
total
|
|
1148
|
+
};
|
|
1149
|
+
}
|
|
1150
|
+
function parseContentOutput(stdout, head_limit, offset = 0) {
|
|
1151
|
+
const fileData = new Map;
|
|
1152
|
+
for (const line of stdout.split(`
|
|
1153
|
+
`).filter(Boolean)) {
|
|
1154
|
+
try {
|
|
1155
|
+
const msg = JSON.parse(line);
|
|
1156
|
+
if (msg.type === "begin") {
|
|
1157
|
+
const data = msg.data;
|
|
1158
|
+
fileData.set(data.path.text, { matches: [], contexts: [] });
|
|
1159
|
+
} else if (msg.type === "context") {
|
|
1160
|
+
const data = msg.data;
|
|
1161
|
+
const fd = fileData.get(data.path.text);
|
|
1162
|
+
if (fd) {
|
|
1163
|
+
fd.contexts.push({
|
|
1164
|
+
line_number: data.line_number,
|
|
1165
|
+
text: data.lines.text.replace(/\n$/, "")
|
|
1166
|
+
});
|
|
1167
|
+
}
|
|
1168
|
+
} else if (msg.type === "match") {
|
|
1169
|
+
const data = msg.data;
|
|
1170
|
+
const fd = fileData.get(data.path.text);
|
|
1171
|
+
if (fd) {
|
|
1172
|
+
fd.matches.push({
|
|
1173
|
+
line_number: data.line_number,
|
|
1174
|
+
text: data.lines.text.replace(/\n$/, "")
|
|
1175
|
+
});
|
|
1176
|
+
}
|
|
1177
|
+
}
|
|
1178
|
+
} catch {}
|
|
1179
|
+
}
|
|
1180
|
+
const allMatches = [];
|
|
1181
|
+
for (const [file, { matches, contexts }] of fileData) {
|
|
1182
|
+
matches.sort((a, b) => a.line_number - b.line_number);
|
|
1183
|
+
contexts.sort((a, b) => a.line_number - b.line_number);
|
|
1184
|
+
const matchContexts = new Map;
|
|
1185
|
+
for (const match of matches) {
|
|
1186
|
+
matchContexts.set(match.line_number, { before: [], after: [] });
|
|
1187
|
+
}
|
|
1188
|
+
for (const ctx of contexts) {
|
|
1189
|
+
let bestMatch = null;
|
|
1190
|
+
let bestDistance = Infinity;
|
|
1191
|
+
let isBefore = false;
|
|
1192
|
+
for (const match of matches) {
|
|
1193
|
+
const distance = Math.abs(ctx.line_number - match.line_number);
|
|
1194
|
+
if (distance < bestDistance) {
|
|
1195
|
+
bestDistance = distance;
|
|
1196
|
+
bestMatch = match;
|
|
1197
|
+
isBefore = ctx.line_number < match.line_number;
|
|
1198
|
+
}
|
|
1199
|
+
}
|
|
1200
|
+
if (bestMatch) {
|
|
1201
|
+
const mc = matchContexts.get(bestMatch.line_number);
|
|
1202
|
+
if (mc) {
|
|
1203
|
+
if (isBefore) {
|
|
1204
|
+
mc.before.push(ctx.text);
|
|
1205
|
+
} else {
|
|
1206
|
+
mc.after.push(ctx.text);
|
|
1207
|
+
}
|
|
1208
|
+
}
|
|
1209
|
+
}
|
|
1210
|
+
}
|
|
1211
|
+
for (const match of matches) {
|
|
1212
|
+
const mc = matchContexts.get(match.line_number);
|
|
1213
|
+
allMatches.push({
|
|
1214
|
+
file,
|
|
1215
|
+
line_number: match.line_number,
|
|
1216
|
+
line: match.text,
|
|
1217
|
+
before_context: mc?.before ?? [],
|
|
1218
|
+
after_context: mc?.after ?? []
|
|
1219
|
+
});
|
|
1196
1220
|
}
|
|
1197
1221
|
}
|
|
1198
|
-
|
|
1199
|
-
|
|
1200
|
-
|
|
1201
|
-
|
|
1202
|
-
|
|
1203
|
-
|
|
1204
|
-
|
|
1205
|
-
|
|
1206
|
-
|
|
1207
|
-
|
|
1208
|
-
return `grep ${flagStr} "${opts.pattern}" ${opts.searchPath} 2>/dev/null${opts.paginationSuffix}`;
|
|
1222
|
+
const grepMatches = allMatches.map((m) => ({
|
|
1223
|
+
file: m.file,
|
|
1224
|
+
line_number: m.line_number,
|
|
1225
|
+
line: m.line,
|
|
1226
|
+
before_context: m.before_context.length > 0 ? m.before_context : undefined,
|
|
1227
|
+
after_context: m.after_context.length > 0 ? m.after_context : undefined
|
|
1228
|
+
}));
|
|
1229
|
+
let result = grepMatches;
|
|
1230
|
+
if (offset > 0) {
|
|
1231
|
+
result = result.slice(offset);
|
|
1209
1232
|
}
|
|
1233
|
+
if (head_limit && head_limit > 0) {
|
|
1234
|
+
result = result.slice(0, head_limit);
|
|
1235
|
+
}
|
|
1236
|
+
return {
|
|
1237
|
+
matches: result,
|
|
1238
|
+
total_matches: result.length
|
|
1239
|
+
};
|
|
1210
1240
|
}
|
|
1211
1241
|
|
|
1212
1242
|
// src/tools/read.ts
|
|
@@ -1390,13 +1420,56 @@ function createSkillTool(config) {
|
|
|
1390
1420
|
|
|
1391
1421
|
// src/tools/web-fetch.ts
|
|
1392
1422
|
import { generateText, tool as tool10, zodSchema as zodSchema10 } from "ai";
|
|
1393
|
-
import Parallel from "parallel-web";
|
|
1394
1423
|
import { z as z10 } from "zod";
|
|
1395
1424
|
|
|
1396
1425
|
// src/utils/http-constants.ts
|
|
1397
1426
|
var RETRYABLE_STATUS_CODES = [408, 429, 500, 502, 503];
|
|
1398
1427
|
|
|
1399
1428
|
// src/tools/web-fetch.ts
|
|
1429
|
+
var parallelModule = null;
|
|
1430
|
+
async function getParallelModule() {
|
|
1431
|
+
if (!parallelModule) {
|
|
1432
|
+
try {
|
|
1433
|
+
parallelModule = await import("parallel-web");
|
|
1434
|
+
} catch {
|
|
1435
|
+
throw new Error("WebFetch requires parallel-web. Install with: npm install parallel-web");
|
|
1436
|
+
}
|
|
1437
|
+
}
|
|
1438
|
+
return parallelModule;
|
|
1439
|
+
}
|
|
1440
|
+
async function fetchWithParallel(url, apiKey) {
|
|
1441
|
+
const { default: Parallel } = await getParallelModule();
|
|
1442
|
+
const client = new Parallel({ apiKey });
|
|
1443
|
+
const extract = await client.beta.extract({
|
|
1444
|
+
urls: [url],
|
|
1445
|
+
excerpts: true,
|
|
1446
|
+
full_content: true
|
|
1447
|
+
});
|
|
1448
|
+
if (!extract.results || extract.results.length === 0) {
|
|
1449
|
+
throw new Error("No content extracted from URL");
|
|
1450
|
+
}
|
|
1451
|
+
const result = extract.results[0];
|
|
1452
|
+
const content = result.full_content || result.excerpts?.join(`
|
|
1453
|
+
|
|
1454
|
+
`) || "";
|
|
1455
|
+
if (!content) {
|
|
1456
|
+
throw new Error("No content available from URL");
|
|
1457
|
+
}
|
|
1458
|
+
return {
|
|
1459
|
+
content,
|
|
1460
|
+
finalUrl: result.url
|
|
1461
|
+
};
|
|
1462
|
+
}
|
|
1463
|
+
async function fetchContent(url, apiKey, provider) {
|
|
1464
|
+
switch (provider) {
|
|
1465
|
+
case "parallel":
|
|
1466
|
+
return fetchWithParallel(url, apiKey);
|
|
1467
|
+
default: {
|
|
1468
|
+
const _exhaustive = provider;
|
|
1469
|
+
throw new Error(`Unknown provider: ${_exhaustive}`);
|
|
1470
|
+
}
|
|
1471
|
+
}
|
|
1472
|
+
}
|
|
1400
1473
|
var webFetchInputSchema = z10.object({
|
|
1401
1474
|
url: z10.string().describe("The URL to fetch content from"),
|
|
1402
1475
|
prompt: z10.string().describe("The prompt to run on the fetched content")
|
|
@@ -1418,7 +1491,14 @@ Usage notes:
|
|
|
1418
1491
|
- When a URL redirects to a different host, the tool will inform you and provide the redirect URL. You should then make a new WebFetch request with the redirect URL to fetch the content.
|
|
1419
1492
|
`;
|
|
1420
1493
|
function createWebFetchTool(config) {
|
|
1421
|
-
const {
|
|
1494
|
+
const {
|
|
1495
|
+
provider = "parallel",
|
|
1496
|
+
apiKey,
|
|
1497
|
+
model,
|
|
1498
|
+
strict,
|
|
1499
|
+
needsApproval,
|
|
1500
|
+
providerOptions
|
|
1501
|
+
} = config;
|
|
1422
1502
|
return tool10({
|
|
1423
1503
|
description: WEB_FETCH_DESCRIPTION,
|
|
1424
1504
|
inputSchema: zodSchema10(webFetchInputSchema),
|
|
@@ -1428,30 +1508,7 @@ function createWebFetchTool(config) {
|
|
|
1428
1508
|
execute: async (input) => {
|
|
1429
1509
|
const { url, prompt } = input;
|
|
1430
1510
|
try {
|
|
1431
|
-
const
|
|
1432
|
-
const extract = await client.beta.extract({
|
|
1433
|
-
urls: [url],
|
|
1434
|
-
excerpts: true,
|
|
1435
|
-
full_content: true
|
|
1436
|
-
});
|
|
1437
|
-
if (!extract.results || extract.results.length === 0) {
|
|
1438
|
-
return {
|
|
1439
|
-
error: "No content extracted from URL",
|
|
1440
|
-
status_code: 404,
|
|
1441
|
-
retryable: false
|
|
1442
|
-
};
|
|
1443
|
-
}
|
|
1444
|
-
const extractedResult = extract.results[0];
|
|
1445
|
-
const content = extractedResult.full_content || extractedResult.excerpts?.join(`
|
|
1446
|
-
|
|
1447
|
-
`) || "";
|
|
1448
|
-
if (!content) {
|
|
1449
|
-
return {
|
|
1450
|
-
error: "No content available from URL",
|
|
1451
|
-
status_code: 404,
|
|
1452
|
-
retryable: false
|
|
1453
|
-
};
|
|
1454
|
-
}
|
|
1511
|
+
const { content, finalUrl } = await fetchContent(url, apiKey, provider);
|
|
1455
1512
|
const result = await generateText({
|
|
1456
1513
|
model,
|
|
1457
1514
|
prompt: `${prompt}
|
|
@@ -1463,7 +1520,7 @@ ${content}`
|
|
|
1463
1520
|
return {
|
|
1464
1521
|
response: result.text,
|
|
1465
1522
|
url,
|
|
1466
|
-
final_url:
|
|
1523
|
+
final_url: finalUrl || url
|
|
1467
1524
|
};
|
|
1468
1525
|
} catch (error) {
|
|
1469
1526
|
if (error && typeof error === "object" && "status" in error) {
|
|
@@ -1485,8 +1542,53 @@ ${content}`
|
|
|
1485
1542
|
|
|
1486
1543
|
// src/tools/web-search.ts
|
|
1487
1544
|
import { tool as tool11, zodSchema as zodSchema11 } from "ai";
|
|
1488
|
-
import Parallel2 from "parallel-web";
|
|
1489
1545
|
import { z as z11 } from "zod";
|
|
1546
|
+
var parallelModule2 = null;
|
|
1547
|
+
async function getParallelModule2() {
|
|
1548
|
+
if (!parallelModule2) {
|
|
1549
|
+
try {
|
|
1550
|
+
parallelModule2 = await import("parallel-web");
|
|
1551
|
+
} catch {
|
|
1552
|
+
throw new Error("WebSearch requires parallel-web. Install with: npm install parallel-web");
|
|
1553
|
+
}
|
|
1554
|
+
}
|
|
1555
|
+
return parallelModule2;
|
|
1556
|
+
}
|
|
1557
|
+
async function searchWithParallel(apiKey, options) {
|
|
1558
|
+
const { default: Parallel } = await getParallelModule2();
|
|
1559
|
+
const client = new Parallel({ apiKey });
|
|
1560
|
+
const sourcePolicy = options.allowedDomains || options.blockedDomains ? {
|
|
1561
|
+
...options.allowedDomains && {
|
|
1562
|
+
include_domains: options.allowedDomains
|
|
1563
|
+
},
|
|
1564
|
+
...options.blockedDomains && {
|
|
1565
|
+
exclude_domains: options.blockedDomains
|
|
1566
|
+
}
|
|
1567
|
+
} : undefined;
|
|
1568
|
+
const search = await client.beta.search({
|
|
1569
|
+
mode: "agentic",
|
|
1570
|
+
objective: options.query,
|
|
1571
|
+
max_results: 10,
|
|
1572
|
+
...sourcePolicy && { source_policy: sourcePolicy }
|
|
1573
|
+
});
|
|
1574
|
+
return (search.results || []).map((result) => ({
|
|
1575
|
+
title: result.title ?? "",
|
|
1576
|
+
url: result.url ?? "",
|
|
1577
|
+
snippet: result.excerpts?.join(`
|
|
1578
|
+
`) ?? "",
|
|
1579
|
+
metadata: result.publish_date ? { publish_date: result.publish_date } : undefined
|
|
1580
|
+
}));
|
|
1581
|
+
}
|
|
1582
|
+
async function searchContent(apiKey, provider, options) {
|
|
1583
|
+
switch (provider) {
|
|
1584
|
+
case "parallel":
|
|
1585
|
+
return searchWithParallel(apiKey, options);
|
|
1586
|
+
default: {
|
|
1587
|
+
const _exhaustive = provider;
|
|
1588
|
+
throw new Error(`Unknown provider: ${_exhaustive}`);
|
|
1589
|
+
}
|
|
1590
|
+
}
|
|
1591
|
+
}
|
|
1490
1592
|
var webSearchInputSchema = z11.object({
|
|
1491
1593
|
query: z11.string().describe("The search query to use"),
|
|
1492
1594
|
allowed_domains: z11.array(z11.string()).optional().describe("Only include results from these domains"),
|
|
@@ -1513,7 +1615,13 @@ When searching for recent information, documentation, or current events, use the
|
|
|
1513
1615
|
- allowed_domains: Only include results from these domains
|
|
1514
1616
|
- blocked_domains: Never include results from these domains`;
|
|
1515
1617
|
function createWebSearchTool(config) {
|
|
1516
|
-
const {
|
|
1618
|
+
const {
|
|
1619
|
+
provider = "parallel",
|
|
1620
|
+
apiKey,
|
|
1621
|
+
strict,
|
|
1622
|
+
needsApproval,
|
|
1623
|
+
providerOptions
|
|
1624
|
+
} = config;
|
|
1517
1625
|
return tool11({
|
|
1518
1626
|
description: WEB_SEARCH_DESCRIPTION,
|
|
1519
1627
|
inputSchema: zodSchema11(webSearchInputSchema),
|
|
@@ -1523,24 +1631,11 @@ function createWebSearchTool(config) {
|
|
|
1523
1631
|
execute: async (input) => {
|
|
1524
1632
|
const { query, allowed_domains, blocked_domains } = input;
|
|
1525
1633
|
try {
|
|
1526
|
-
const
|
|
1527
|
-
|
|
1528
|
-
|
|
1529
|
-
|
|
1530
|
-
} : undefined;
|
|
1531
|
-
const search = await client.beta.search({
|
|
1532
|
-
mode: "agentic",
|
|
1533
|
-
objective: query,
|
|
1534
|
-
max_results: 10,
|
|
1535
|
-
...sourcePolicy && { source_policy: sourcePolicy }
|
|
1634
|
+
const results = await searchContent(apiKey, provider, {
|
|
1635
|
+
query,
|
|
1636
|
+
allowedDomains: allowed_domains,
|
|
1637
|
+
blockedDomains: blocked_domains
|
|
1536
1638
|
});
|
|
1537
|
-
const results = (search.results || []).map((result) => ({
|
|
1538
|
-
title: result.title ?? "",
|
|
1539
|
-
url: result.url ?? "",
|
|
1540
|
-
snippet: result.excerpts?.join(`
|
|
1541
|
-
`) ?? "",
|
|
1542
|
-
metadata: result.publish_date ? { publish_date: result.publish_date } : undefined
|
|
1543
|
-
}));
|
|
1544
1639
|
return {
|
|
1545
1640
|
results,
|
|
1546
1641
|
total_results: results.length,
|
package/dist/tools/index.d.ts
CHANGED
|
@@ -69,7 +69,7 @@ export type { SubagentEventData, SubagentStepEvent, SubagentTypeConfig, TaskErro
|
|
|
69
69
|
export { createTaskTool } from "./task";
|
|
70
70
|
export type { TodoItem, TodoState, TodoWriteError, TodoWriteOutput, } from "./todo-write";
|
|
71
71
|
export { createTodoWriteTool } from "./todo-write";
|
|
72
|
-
export type { WebFetchError, WebFetchOutput } from "./web-fetch";
|
|
72
|
+
export type { ExtractResult, WebFetchError, WebFetchOutput } from "./web-fetch";
|
|
73
73
|
export { createWebFetchTool } from "./web-fetch";
|
|
74
74
|
export type { WebSearchError, WebSearchOutput, WebSearchResult, } from "./web-search";
|
|
75
75
|
export { createWebSearchTool } from "./web-search";
|
|
@@ -10,6 +10,14 @@ export interface WebFetchError {
|
|
|
10
10
|
status_code?: number;
|
|
11
11
|
retryable?: boolean;
|
|
12
12
|
}
|
|
13
|
+
/**
|
|
14
|
+
* Result from a web fetch provider's extract operation.
|
|
15
|
+
* New providers should return this shape.
|
|
16
|
+
*/
|
|
17
|
+
export interface ExtractResult {
|
|
18
|
+
content: string;
|
|
19
|
+
finalUrl?: string;
|
|
20
|
+
}
|
|
13
21
|
export declare function createWebFetchTool(config: WebFetchConfig): import("ai").Tool<{
|
|
14
22
|
url: string;
|
|
15
23
|
prompt: string;
|
package/dist/types.d.ts
CHANGED
|
@@ -21,14 +21,27 @@ export type ToolConfig = {
|
|
|
21
21
|
allowedPaths?: string[];
|
|
22
22
|
blockedCommands?: string[];
|
|
23
23
|
} & SDKToolOptions;
|
|
24
|
-
export type GrepToolConfig = ToolConfig
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
24
|
+
export type GrepToolConfig = ToolConfig;
|
|
25
|
+
/**
|
|
26
|
+
* Supported web search providers.
|
|
27
|
+
* Currently only 'parallel' is implemented.
|
|
28
|
+
* Add new providers here as union types (e.g., 'parallel' | 'serper' | 'tavily')
|
|
29
|
+
*/
|
|
30
|
+
export type WebSearchProvider = "parallel";
|
|
31
|
+
/**
|
|
32
|
+
* Supported web fetch providers.
|
|
33
|
+
* Currently only 'parallel' is implemented.
|
|
34
|
+
* Add new providers here as union types (e.g., 'parallel' | 'firecrawl' | 'jina')
|
|
35
|
+
*/
|
|
36
|
+
export type WebFetchProvider = "parallel";
|
|
28
37
|
export type WebSearchConfig = {
|
|
38
|
+
/** Provider to use for web search. Default: 'parallel' */
|
|
39
|
+
provider?: WebSearchProvider;
|
|
29
40
|
apiKey: string;
|
|
30
41
|
} & SDKToolOptions;
|
|
31
42
|
export type WebFetchConfig = {
|
|
43
|
+
/** Provider to use for web fetching. Default: 'parallel' */
|
|
44
|
+
provider?: WebFetchProvider;
|
|
32
45
|
apiKey: string;
|
|
33
46
|
model: LanguageModel;
|
|
34
47
|
} & SDKToolOptions;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "bashkit",
|
|
3
|
-
"version": "0.2.
|
|
3
|
+
"version": "0.2.4",
|
|
4
4
|
"description": "Agentic coding tools for the Vercel AI SDK",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -22,7 +22,7 @@
|
|
|
22
22
|
"scripts": {
|
|
23
23
|
"dev": "bun run src/index.ts",
|
|
24
24
|
"build": "bun run build:js && bun run build:cli && bun run build:types",
|
|
25
|
-
"build:js": "bun build src/index.ts --outdir dist --target node --format esm --external ai --external zod --external @ai-sdk/* --external @vercel/sandbox --external @e2b/code-interpreter --external parallel-web",
|
|
25
|
+
"build:js": "bun build src/index.ts --outdir dist --target node --format esm --external ai --external zod --external @ai-sdk/* --external @vercel/sandbox --external @e2b/code-interpreter --external parallel-web --external @vscode/ripgrep",
|
|
26
26
|
"build:cli": "bun build src/cli/init.ts --outdir dist/cli --target node --format esm --external @clack/prompts && chmod +x dist/cli/init.js",
|
|
27
27
|
"build:types": "tsc -p tsconfig.build.json",
|
|
28
28
|
"typecheck": "tsc --noEmit",
|
|
@@ -51,7 +51,8 @@
|
|
|
51
51
|
"url": "https://github.com/jbreite/bashkit"
|
|
52
52
|
},
|
|
53
53
|
"dependencies": {
|
|
54
|
-
"@clack/prompts": "^0.7.0"
|
|
54
|
+
"@clack/prompts": "^0.7.0",
|
|
55
|
+
"@vscode/ripgrep": "^1.17.0"
|
|
55
56
|
},
|
|
56
57
|
"devDependencies": {
|
|
57
58
|
"@ai-sdk/anthropic": "^3.0.1",
|