deepagentsdk 0.12.0 → 0.14.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/adapters/elements/index.cjs +478 -288
- package/dist/adapters/elements/index.cjs.map +1 -1
- package/dist/adapters/elements/index.d.cts +107 -172
- package/dist/adapters/elements/index.d.mts +107 -172
- package/dist/adapters/elements/index.mjs +471 -284
- package/dist/adapters/elements/index.mjs.map +1 -1
- package/dist/{types-4g9UvXal.d.mts → agent-D0bKkNI-.d.mts} +352 -3
- package/dist/{types-IulnvhFg.d.cts → agent-DwAj5emJ.d.cts} +352 -3
- package/dist/{chunk-CbDLau6x.cjs → chunk-C5azi7Hr.cjs} +33 -0
- package/dist/cli/index.cjs +12 -12
- package/dist/cli/index.cjs.map +1 -1
- package/dist/cli/index.mjs +2 -2
- package/dist/cli/index.mjs.map +1 -1
- package/dist/{agent-Cuks-Idh.cjs → file-saver-BYPKakT4.cjs} +799 -205
- package/dist/file-saver-BYPKakT4.cjs.map +1 -0
- package/dist/{agent-CrH-He58.mjs → file-saver-Hj5so3dV.mjs} +793 -199
- package/dist/file-saver-Hj5so3dV.mjs.map +1 -0
- package/dist/index.cjs +83 -73
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +5 -353
- package/dist/index.d.mts +5 -353
- package/dist/index.mjs +13 -3
- package/dist/index.mjs.map +1 -1
- package/dist/{load-B6CA5js_.mjs → load-BBYEnMwz.mjs} +1 -1
- package/dist/{load-B6CA5js_.mjs.map → load-BBYEnMwz.mjs.map} +1 -1
- package/dist/{load-94gjHorc.mjs → load-BDxe6Cet.mjs} +1 -1
- package/dist/{load-79a2H4m0.cjs → load-BrRAKlO6.cjs} +2 -2
- package/dist/{load-79a2H4m0.cjs.map → load-BrRAKlO6.cjs.map} +1 -1
- package/dist/load-DqllBbDc.cjs +4 -0
- package/package.json +1 -1
- package/dist/agent-CrH-He58.mjs.map +0 -1
- package/dist/agent-Cuks-Idh.cjs.map +0 -1
- package/dist/file-saver-BJCqMIb5.mjs +0 -655
- package/dist/file-saver-BJCqMIb5.mjs.map +0 -1
- package/dist/file-saver-C6O2LAvg.cjs +0 -679
- package/dist/file-saver-C6O2LAvg.cjs.map +0 -1
- package/dist/load-C2qVmZMp.cjs +0 -3
|
@@ -1,679 +0,0 @@
|
|
|
1
|
-
const require_chunk = require('./chunk-CbDLau6x.cjs');
|
|
2
|
-
const require_agent = require('./agent-Cuks-Idh.cjs');
|
|
3
|
-
let child_process = require("child_process");
|
|
4
|
-
let _ai_sdk_anthropic = require("@ai-sdk/anthropic");
|
|
5
|
-
let _ai_sdk_openai = require("@ai-sdk/openai");
|
|
6
|
-
let node_fs = require("node:fs");
|
|
7
|
-
let node_path = require("node:path");
|
|
8
|
-
|
|
9
|
-
//#region src/backends/sandbox.ts
|
|
10
|
-
/**
|
|
11
|
-
* Encode string to base64 for safe shell transmission.
|
|
12
|
-
*/
|
|
13
|
-
function toBase64(str) {
|
|
14
|
-
return Buffer.from(str, "utf-8").toString("base64");
|
|
15
|
-
}
|
|
16
|
-
/**
|
|
17
|
-
* Build a Node.js script command with embedded base64 arguments.
|
|
18
|
-
* This avoids shell argument parsing issues by embedding values directly in the script.
|
|
19
|
-
*/
|
|
20
|
-
function buildNodeScript(script, args) {
|
|
21
|
-
let result = script;
|
|
22
|
-
for (const [key, value] of Object.entries(args)) result = result.replace(new RegExp(`__${key}__`, "g"), value);
|
|
23
|
-
return `node -e '${result}'`;
|
|
24
|
-
}
|
|
25
|
-
/**
|
|
26
|
-
* Abstract base class for sandbox backends.
|
|
27
|
-
*
|
|
28
|
-
* Implements all file operations using shell commands via execute().
|
|
29
|
-
* Subclasses only need to implement execute() and id.
|
|
30
|
-
*
|
|
31
|
-
* @example Creating a custom sandbox backend
|
|
32
|
-
* ```typescript
|
|
33
|
-
* class MyCloudSandbox extends BaseSandbox {
|
|
34
|
-
* readonly id = 'my-cloud-123';
|
|
35
|
-
*
|
|
36
|
-
* async execute(command: string): Promise<ExecuteResponse> {
|
|
37
|
-
* // Call your cloud provider's API
|
|
38
|
-
* const result = await myCloudApi.runCommand(command);
|
|
39
|
-
* return {
|
|
40
|
-
* output: result.stdout + result.stderr,
|
|
41
|
-
* exitCode: result.exitCode,
|
|
42
|
-
* truncated: false,
|
|
43
|
-
* };
|
|
44
|
-
* }
|
|
45
|
-
* }
|
|
46
|
-
* ```
|
|
47
|
-
*/
|
|
48
|
-
var BaseSandbox = class {
|
|
49
|
-
/**
|
|
50
|
-
* List files and directories in a path.
|
|
51
|
-
*/
|
|
52
|
-
async lsInfo(path) {
|
|
53
|
-
const pathB64 = toBase64(path);
|
|
54
|
-
const result = await this.execute(buildNodeScript(`
|
|
55
|
-
const fs = require("fs");
|
|
56
|
-
const path = require("path");
|
|
57
|
-
|
|
58
|
-
const dirPath = Buffer.from("__PATH__", "base64").toString("utf-8");
|
|
59
|
-
|
|
60
|
-
try {
|
|
61
|
-
const entries = fs.readdirSync(dirPath, { withFileTypes: true });
|
|
62
|
-
for (const entry of entries) {
|
|
63
|
-
const fullPath = path.join(dirPath, entry.name);
|
|
64
|
-
try {
|
|
65
|
-
const stat = fs.statSync(fullPath);
|
|
66
|
-
console.log(JSON.stringify({
|
|
67
|
-
path: entry.name,
|
|
68
|
-
is_dir: entry.isDirectory(),
|
|
69
|
-
size: stat.size,
|
|
70
|
-
modified_at: stat.mtime.toISOString()
|
|
71
|
-
}));
|
|
72
|
-
} catch (e) {}
|
|
73
|
-
}
|
|
74
|
-
} catch (e) {}
|
|
75
|
-
`, { PATH: pathB64 }));
|
|
76
|
-
const infos = [];
|
|
77
|
-
for (const line of result.output.trim().split("\n")) {
|
|
78
|
-
if (!line) continue;
|
|
79
|
-
try {
|
|
80
|
-
const data = JSON.parse(line);
|
|
81
|
-
infos.push({
|
|
82
|
-
path: data.path,
|
|
83
|
-
is_dir: data.is_dir,
|
|
84
|
-
size: data.size,
|
|
85
|
-
modified_at: data.modified_at
|
|
86
|
-
});
|
|
87
|
-
} catch {}
|
|
88
|
-
}
|
|
89
|
-
return infos;
|
|
90
|
-
}
|
|
91
|
-
/**
|
|
92
|
-
* Read file content with line numbers.
|
|
93
|
-
*/
|
|
94
|
-
async read(filePath, offset = 0, limit = require_agent.DEFAULT_READ_LIMIT) {
|
|
95
|
-
const pathB64 = toBase64(filePath);
|
|
96
|
-
const script = `
|
|
97
|
-
const fs = require("fs");
|
|
98
|
-
const filePath = Buffer.from("__PATH__", "base64").toString("utf-8");
|
|
99
|
-
const offset = __OFFSET__;
|
|
100
|
-
const limit = __LIMIT__;
|
|
101
|
-
|
|
102
|
-
if (!fs.existsSync(filePath)) {
|
|
103
|
-
console.error("Error: File not found");
|
|
104
|
-
process.exit(1);
|
|
105
|
-
}
|
|
106
|
-
|
|
107
|
-
const stat = fs.statSync(filePath);
|
|
108
|
-
if (stat.size === 0) {
|
|
109
|
-
console.log("${require_agent.SYSTEM_REMINDER_FILE_EMPTY}");
|
|
110
|
-
process.exit(0);
|
|
111
|
-
}
|
|
112
|
-
|
|
113
|
-
const content = fs.readFileSync(filePath, "utf-8");
|
|
114
|
-
const lines = content.split("\\n");
|
|
115
|
-
const selected = lines.slice(offset, offset + limit);
|
|
116
|
-
|
|
117
|
-
for (let i = 0; i < selected.length; i++) {
|
|
118
|
-
const lineNum = (offset + i + 1).toString().padStart(6, " ");
|
|
119
|
-
console.log(lineNum + "\\t" + selected[i]);
|
|
120
|
-
}
|
|
121
|
-
`;
|
|
122
|
-
const result = await this.execute(buildNodeScript(script, {
|
|
123
|
-
PATH: pathB64,
|
|
124
|
-
OFFSET: String(offset),
|
|
125
|
-
LIMIT: String(limit)
|
|
126
|
-
}));
|
|
127
|
-
if (result.exitCode !== 0) {
|
|
128
|
-
if (result.output.includes("Error: File not found")) return require_agent.FILE_NOT_FOUND(filePath);
|
|
129
|
-
return result.output.trim();
|
|
130
|
-
}
|
|
131
|
-
return result.output.trimEnd();
|
|
132
|
-
}
|
|
133
|
-
/**
|
|
134
|
-
* Read raw file data.
|
|
135
|
-
*/
|
|
136
|
-
async readRaw(filePath) {
|
|
137
|
-
const pathB64 = toBase64(filePath);
|
|
138
|
-
const result = await this.execute(buildNodeScript(`
|
|
139
|
-
const fs = require("fs");
|
|
140
|
-
const filePath = Buffer.from("__PATH__", "base64").toString("utf-8");
|
|
141
|
-
|
|
142
|
-
if (!fs.existsSync(filePath)) {
|
|
143
|
-
console.error("Error: File not found");
|
|
144
|
-
process.exit(1);
|
|
145
|
-
}
|
|
146
|
-
|
|
147
|
-
const stat = fs.statSync(filePath);
|
|
148
|
-
const content = fs.readFileSync(filePath, "utf-8");
|
|
149
|
-
|
|
150
|
-
console.log(JSON.stringify({
|
|
151
|
-
content: content.split("\\n"),
|
|
152
|
-
created_at: stat.birthtime.toISOString(),
|
|
153
|
-
modified_at: stat.mtime.toISOString()
|
|
154
|
-
}));
|
|
155
|
-
`, { PATH: pathB64 }));
|
|
156
|
-
if (result.exitCode !== 0) throw new Error(`File '${filePath}' not found`);
|
|
157
|
-
try {
|
|
158
|
-
const data = JSON.parse(result.output.trim());
|
|
159
|
-
return {
|
|
160
|
-
content: data.content,
|
|
161
|
-
created_at: data.created_at,
|
|
162
|
-
modified_at: data.modified_at
|
|
163
|
-
};
|
|
164
|
-
} catch {
|
|
165
|
-
throw new Error(`Failed to parse file data for '${filePath}'`);
|
|
166
|
-
}
|
|
167
|
-
}
|
|
168
|
-
/**
|
|
169
|
-
* Write content to a new file.
|
|
170
|
-
*/
|
|
171
|
-
async write(filePath, content) {
|
|
172
|
-
const pathB64 = toBase64(filePath);
|
|
173
|
-
const contentB64 = toBase64(content);
|
|
174
|
-
const result = await this.execute(buildNodeScript(`
|
|
175
|
-
const fs = require("fs");
|
|
176
|
-
const path = require("path");
|
|
177
|
-
|
|
178
|
-
const filePath = Buffer.from("__PATH__", "base64").toString("utf-8");
|
|
179
|
-
const content = Buffer.from("__CONTENT__", "base64").toString("utf-8");
|
|
180
|
-
|
|
181
|
-
if (fs.existsSync(filePath)) {
|
|
182
|
-
console.error("Error: File already exists");
|
|
183
|
-
process.exit(1);
|
|
184
|
-
}
|
|
185
|
-
|
|
186
|
-
const dir = path.dirname(filePath);
|
|
187
|
-
if (dir && dir !== ".") {
|
|
188
|
-
fs.mkdirSync(dir, { recursive: true });
|
|
189
|
-
}
|
|
190
|
-
|
|
191
|
-
fs.writeFileSync(filePath, content, "utf-8");
|
|
192
|
-
`, {
|
|
193
|
-
PATH: pathB64,
|
|
194
|
-
CONTENT: contentB64
|
|
195
|
-
}));
|
|
196
|
-
if (result.exitCode !== 0) {
|
|
197
|
-
if (result.output.includes("already exists")) return {
|
|
198
|
-
success: false,
|
|
199
|
-
error: `Cannot write to ${filePath} because it already exists. Read and then make an edit, or write to a new path.`
|
|
200
|
-
};
|
|
201
|
-
return {
|
|
202
|
-
success: false,
|
|
203
|
-
error: result.output.trim() || `Failed to write '${filePath}'`
|
|
204
|
-
};
|
|
205
|
-
}
|
|
206
|
-
return {
|
|
207
|
-
success: true,
|
|
208
|
-
path: filePath
|
|
209
|
-
};
|
|
210
|
-
}
|
|
211
|
-
/**
|
|
212
|
-
* Edit a file by replacing string occurrences.
|
|
213
|
-
*/
|
|
214
|
-
async edit(filePath, oldString, newString, replaceAll = false) {
|
|
215
|
-
const pathB64 = toBase64(filePath);
|
|
216
|
-
const oldB64 = toBase64(oldString);
|
|
217
|
-
const newB64 = toBase64(newString);
|
|
218
|
-
const result = await this.execute(buildNodeScript(`
|
|
219
|
-
const fs = require("fs");
|
|
220
|
-
|
|
221
|
-
const filePath = Buffer.from("__PATH__", "base64").toString("utf-8");
|
|
222
|
-
const oldStr = Buffer.from("__OLD__", "base64").toString("utf-8");
|
|
223
|
-
const newStr = Buffer.from("__NEW__", "base64").toString("utf-8");
|
|
224
|
-
const replaceAll = __REPLACE_ALL__;
|
|
225
|
-
|
|
226
|
-
if (!fs.existsSync(filePath)) {
|
|
227
|
-
console.error("Error: File not found");
|
|
228
|
-
process.exit(1);
|
|
229
|
-
}
|
|
230
|
-
|
|
231
|
-
let content = fs.readFileSync(filePath, "utf-8");
|
|
232
|
-
const count = content.split(oldStr).length - 1;
|
|
233
|
-
|
|
234
|
-
if (count === 0) {
|
|
235
|
-
process.exit(2);
|
|
236
|
-
}
|
|
237
|
-
if (count > 1 && !replaceAll) {
|
|
238
|
-
process.exit(3);
|
|
239
|
-
}
|
|
240
|
-
|
|
241
|
-
if (replaceAll) {
|
|
242
|
-
content = content.split(oldStr).join(newStr);
|
|
243
|
-
} else {
|
|
244
|
-
content = content.replace(oldStr, newStr);
|
|
245
|
-
}
|
|
246
|
-
|
|
247
|
-
fs.writeFileSync(filePath, content, "utf-8");
|
|
248
|
-
console.log(count);
|
|
249
|
-
`, {
|
|
250
|
-
PATH: pathB64,
|
|
251
|
-
OLD: oldB64,
|
|
252
|
-
NEW: newB64,
|
|
253
|
-
REPLACE_ALL: String(replaceAll)
|
|
254
|
-
}));
|
|
255
|
-
if (result.exitCode === 1) return {
|
|
256
|
-
success: false,
|
|
257
|
-
error: require_agent.FILE_NOT_FOUND(filePath)
|
|
258
|
-
};
|
|
259
|
-
if (result.exitCode === 2) return {
|
|
260
|
-
success: false,
|
|
261
|
-
error: require_agent.STRING_NOT_FOUND(filePath, oldString)
|
|
262
|
-
};
|
|
263
|
-
if (result.exitCode === 3) return {
|
|
264
|
-
success: false,
|
|
265
|
-
error: `Error: String '${oldString}' appears multiple times. Use replaceAll=true to replace all occurrences.`
|
|
266
|
-
};
|
|
267
|
-
return {
|
|
268
|
-
success: true,
|
|
269
|
-
path: filePath,
|
|
270
|
-
occurrences: parseInt(result.output.trim(), 10) || 1
|
|
271
|
-
};
|
|
272
|
-
}
|
|
273
|
-
/**
|
|
274
|
-
* Search for pattern in files.
|
|
275
|
-
*/
|
|
276
|
-
async grepRaw(pattern, path = "/", glob = null) {
|
|
277
|
-
const patternB64 = toBase64(pattern);
|
|
278
|
-
const pathB64 = toBase64(path);
|
|
279
|
-
const globB64 = glob ? toBase64(glob) : toBase64("**/*");
|
|
280
|
-
const result = await this.execute(buildNodeScript(`
|
|
281
|
-
const fs = require("fs");
|
|
282
|
-
const path = require("path");
|
|
283
|
-
|
|
284
|
-
const pattern = Buffer.from("__PATTERN__", "base64").toString("utf-8");
|
|
285
|
-
const basePath = Buffer.from("__PATH__", "base64").toString("utf-8");
|
|
286
|
-
const fileGlob = Buffer.from("__GLOB__", "base64").toString("utf-8");
|
|
287
|
-
|
|
288
|
-
function walkDir(dir, baseDir) {
|
|
289
|
-
const results = [];
|
|
290
|
-
try {
|
|
291
|
-
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
292
|
-
for (const entry of entries) {
|
|
293
|
-
const fullPath = path.join(dir, entry.name);
|
|
294
|
-
const relativePath = path.relative(baseDir, fullPath);
|
|
295
|
-
|
|
296
|
-
if (entry.isDirectory()) {
|
|
297
|
-
results.push(...walkDir(fullPath, baseDir));
|
|
298
|
-
} else {
|
|
299
|
-
results.push(relativePath);
|
|
300
|
-
}
|
|
301
|
-
}
|
|
302
|
-
} catch (e) {}
|
|
303
|
-
return results;
|
|
304
|
-
}
|
|
305
|
-
|
|
306
|
-
function matchGlob(filepath, pattern) {
|
|
307
|
-
if (!pattern || pattern === "**/*") return true;
|
|
308
|
-
const regex = pattern
|
|
309
|
-
.replace(/\\./g, "\\\\.")
|
|
310
|
-
.replace(/\\*\\*/g, "<<<GLOBSTAR>>>")
|
|
311
|
-
.replace(/\\*/g, "[^/]*")
|
|
312
|
-
.replace(/<<<GLOBSTAR>>>/g, ".*")
|
|
313
|
-
.replace(/\\?/g, ".");
|
|
314
|
-
return new RegExp("^" + regex + "$").test(filepath);
|
|
315
|
-
}
|
|
316
|
-
|
|
317
|
-
const allFiles = walkDir(basePath, basePath);
|
|
318
|
-
const files = allFiles.filter(f => matchGlob(f, fileGlob)).sort();
|
|
319
|
-
|
|
320
|
-
for (const file of files) {
|
|
321
|
-
try {
|
|
322
|
-
const fullPath = path.join(basePath, file);
|
|
323
|
-
const content = fs.readFileSync(fullPath, "utf-8");
|
|
324
|
-
const lines = content.split("\\n");
|
|
325
|
-
|
|
326
|
-
for (let i = 0; i < lines.length; i++) {
|
|
327
|
-
if (lines[i].includes(pattern)) {
|
|
328
|
-
console.log(JSON.stringify({
|
|
329
|
-
path: file,
|
|
330
|
-
line: i + 1,
|
|
331
|
-
text: lines[i]
|
|
332
|
-
}));
|
|
333
|
-
}
|
|
334
|
-
}
|
|
335
|
-
} catch (e) {}
|
|
336
|
-
}
|
|
337
|
-
`, {
|
|
338
|
-
PATTERN: patternB64,
|
|
339
|
-
PATH: pathB64,
|
|
340
|
-
GLOB: globB64
|
|
341
|
-
}));
|
|
342
|
-
const matches = [];
|
|
343
|
-
for (const line of result.output.trim().split("\n")) {
|
|
344
|
-
if (!line) continue;
|
|
345
|
-
try {
|
|
346
|
-
const data = JSON.parse(line);
|
|
347
|
-
matches.push({
|
|
348
|
-
path: data.path,
|
|
349
|
-
line: data.line,
|
|
350
|
-
text: data.text
|
|
351
|
-
});
|
|
352
|
-
} catch {}
|
|
353
|
-
}
|
|
354
|
-
return matches;
|
|
355
|
-
}
|
|
356
|
-
/**
|
|
357
|
-
* Find files matching glob pattern.
|
|
358
|
-
*/
|
|
359
|
-
async globInfo(pattern, path = "/") {
|
|
360
|
-
const pathB64 = toBase64(path);
|
|
361
|
-
const patternB64 = toBase64(pattern);
|
|
362
|
-
const result = await this.execute(buildNodeScript(`
|
|
363
|
-
const fs = require("fs");
|
|
364
|
-
const path = require("path");
|
|
365
|
-
|
|
366
|
-
const basePath = Buffer.from("__PATH__", "base64").toString("utf-8");
|
|
367
|
-
const pattern = Buffer.from("__PATTERN__", "base64").toString("utf-8");
|
|
368
|
-
|
|
369
|
-
function walkDir(dir, baseDir) {
|
|
370
|
-
const results = [];
|
|
371
|
-
try {
|
|
372
|
-
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
373
|
-
for (const entry of entries) {
|
|
374
|
-
const fullPath = path.join(dir, entry.name);
|
|
375
|
-
const relativePath = path.relative(baseDir, fullPath);
|
|
376
|
-
|
|
377
|
-
if (entry.isDirectory()) {
|
|
378
|
-
results.push(...walkDir(fullPath, baseDir));
|
|
379
|
-
} else {
|
|
380
|
-
results.push(relativePath);
|
|
381
|
-
}
|
|
382
|
-
}
|
|
383
|
-
} catch (e) {}
|
|
384
|
-
return results;
|
|
385
|
-
}
|
|
386
|
-
|
|
387
|
-
function matchGlob(filepath, pattern) {
|
|
388
|
-
const regex = pattern
|
|
389
|
-
.replace(/\\./g, "\\\\.")
|
|
390
|
-
.replace(/\\*\\*/g, "<<<GLOBSTAR>>>")
|
|
391
|
-
.replace(/\\*/g, "[^/]*")
|
|
392
|
-
.replace(/<<<GLOBSTAR>>>/g, ".*")
|
|
393
|
-
.replace(/\\?/g, ".");
|
|
394
|
-
return new RegExp("^" + regex + "$").test(filepath);
|
|
395
|
-
}
|
|
396
|
-
|
|
397
|
-
const allFiles = walkDir(basePath, basePath);
|
|
398
|
-
const matches = allFiles.filter(f => matchGlob(f, pattern)).sort();
|
|
399
|
-
|
|
400
|
-
for (const m of matches) {
|
|
401
|
-
try {
|
|
402
|
-
const fullPath = path.join(basePath, m);
|
|
403
|
-
const stat = fs.statSync(fullPath);
|
|
404
|
-
console.log(JSON.stringify({
|
|
405
|
-
path: m,
|
|
406
|
-
is_dir: stat.isDirectory(),
|
|
407
|
-
size: stat.size,
|
|
408
|
-
modified_at: stat.mtime.toISOString()
|
|
409
|
-
}));
|
|
410
|
-
} catch (e) {}
|
|
411
|
-
}
|
|
412
|
-
`, {
|
|
413
|
-
PATH: pathB64,
|
|
414
|
-
PATTERN: patternB64
|
|
415
|
-
}));
|
|
416
|
-
const infos = [];
|
|
417
|
-
for (const line of result.output.trim().split("\n")) {
|
|
418
|
-
if (!line) continue;
|
|
419
|
-
try {
|
|
420
|
-
const data = JSON.parse(line);
|
|
421
|
-
infos.push({
|
|
422
|
-
path: data.path,
|
|
423
|
-
is_dir: data.is_dir,
|
|
424
|
-
size: data.size,
|
|
425
|
-
modified_at: data.modified_at
|
|
426
|
-
});
|
|
427
|
-
} catch {}
|
|
428
|
-
}
|
|
429
|
-
return infos;
|
|
430
|
-
}
|
|
431
|
-
};
|
|
432
|
-
|
|
433
|
-
//#endregion
|
|
434
|
-
//#region src/backends/local-sandbox.ts
|
|
435
|
-
/**
|
|
436
|
-
* LocalSandbox: Execute commands locally using child_process.
|
|
437
|
-
*
|
|
438
|
-
* Useful for local development and testing without cloud sandboxes.
|
|
439
|
-
* All file operations are inherited from BaseSandbox and executed
|
|
440
|
-
* via shell commands in the local filesystem.
|
|
441
|
-
*/
|
|
442
|
-
/**
|
|
443
|
-
* Local sandbox that executes commands using Node.js child_process.
|
|
444
|
-
*
|
|
445
|
-
* All commands are executed in a bash shell with the specified working directory.
|
|
446
|
-
* Inherits all file operations (read, write, edit, ls, grep, glob) from BaseSandbox.
|
|
447
|
-
*
|
|
448
|
-
* @example Basic usage
|
|
449
|
-
* ```typescript
|
|
450
|
-
* import { LocalSandbox } from 'deepagentsdk';
|
|
451
|
-
*
|
|
452
|
-
* const sandbox = new LocalSandbox({ cwd: './workspace' });
|
|
453
|
-
*
|
|
454
|
-
* // Execute commands
|
|
455
|
-
* const result = await sandbox.execute('ls -la');
|
|
456
|
-
* console.log(result.output);
|
|
457
|
-
*
|
|
458
|
-
* // File operations
|
|
459
|
-
* await sandbox.write('./src/index.ts', 'console.log("hello")');
|
|
460
|
-
* const content = await sandbox.read('./src/index.ts');
|
|
461
|
-
* ```
|
|
462
|
-
*
|
|
463
|
-
* @example With timeout and environment
|
|
464
|
-
* ```typescript
|
|
465
|
-
* const sandbox = new LocalSandbox({
|
|
466
|
-
* cwd: './workspace',
|
|
467
|
-
* timeout: 60000, // 60 seconds
|
|
468
|
-
* env: {
|
|
469
|
-
* NODE_ENV: 'development',
|
|
470
|
-
* DEBUG: '*',
|
|
471
|
-
* },
|
|
472
|
-
* });
|
|
473
|
-
* ```
|
|
474
|
-
*
|
|
475
|
-
* @example Error handling
|
|
476
|
-
* ```typescript
|
|
477
|
-
* const result = await sandbox.execute('npm test');
|
|
478
|
-
* if (result.exitCode !== 0) {
|
|
479
|
-
* console.error('Tests failed:', result.output);
|
|
480
|
-
* }
|
|
481
|
-
* ```
|
|
482
|
-
*/
|
|
483
|
-
var LocalSandbox = class extends BaseSandbox {
|
|
484
|
-
cwd;
|
|
485
|
-
timeout;
|
|
486
|
-
env;
|
|
487
|
-
maxOutputSize;
|
|
488
|
-
_id;
|
|
489
|
-
/**
|
|
490
|
-
* Create a new LocalSandbox instance.
|
|
491
|
-
*
|
|
492
|
-
* @param options - Configuration options for the sandbox
|
|
493
|
-
*/
|
|
494
|
-
constructor(options = {}) {
|
|
495
|
-
super();
|
|
496
|
-
this.cwd = options.cwd || process.cwd();
|
|
497
|
-
this.timeout = options.timeout || 3e4;
|
|
498
|
-
this.env = options.env || {};
|
|
499
|
-
this.maxOutputSize = options.maxOutputSize || 1024 * 1024;
|
|
500
|
-
this._id = `local-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
|
|
501
|
-
}
|
|
502
|
-
/**
|
|
503
|
-
* Unique identifier for this sandbox instance.
|
|
504
|
-
* Format: `local-{timestamp}-{random}`
|
|
505
|
-
*/
|
|
506
|
-
get id() {
|
|
507
|
-
return this._id;
|
|
508
|
-
}
|
|
509
|
-
/**
|
|
510
|
-
* Execute a shell command in the local filesystem.
|
|
511
|
-
*
|
|
512
|
-
* Commands are executed using bash with the configured working directory
|
|
513
|
-
* and environment variables. Output is captured from both stdout and stderr.
|
|
514
|
-
*
|
|
515
|
-
* @param command - Shell command to execute
|
|
516
|
-
* @returns ExecuteResponse with output, exit code, and truncation status
|
|
517
|
-
*
|
|
518
|
-
* @example
|
|
519
|
-
* ```typescript
|
|
520
|
-
* const result = await sandbox.execute('echo "Hello" && ls -la');
|
|
521
|
-
* console.log(result.output);
|
|
522
|
-
* console.log('Exit code:', result.exitCode);
|
|
523
|
-
* ```
|
|
524
|
-
*/
|
|
525
|
-
async execute(command) {
|
|
526
|
-
return new Promise((resolve) => {
|
|
527
|
-
const child = (0, child_process.spawn)("bash", ["-c", command], {
|
|
528
|
-
cwd: this.cwd,
|
|
529
|
-
env: {
|
|
530
|
-
...process.env,
|
|
531
|
-
...this.env
|
|
532
|
-
},
|
|
533
|
-
timeout: this.timeout
|
|
534
|
-
});
|
|
535
|
-
let output = "";
|
|
536
|
-
let truncated = false;
|
|
537
|
-
child.stdout.on("data", (data) => {
|
|
538
|
-
if (output.length < this.maxOutputSize) output += data.toString();
|
|
539
|
-
else truncated = true;
|
|
540
|
-
});
|
|
541
|
-
child.stderr.on("data", (data) => {
|
|
542
|
-
if (output.length < this.maxOutputSize) output += data.toString();
|
|
543
|
-
else truncated = true;
|
|
544
|
-
});
|
|
545
|
-
child.on("close", (code) => {
|
|
546
|
-
resolve({
|
|
547
|
-
output,
|
|
548
|
-
exitCode: code,
|
|
549
|
-
truncated
|
|
550
|
-
});
|
|
551
|
-
});
|
|
552
|
-
child.on("error", (err) => {
|
|
553
|
-
resolve({
|
|
554
|
-
output: `Error: ${err.message}`,
|
|
555
|
-
exitCode: 1,
|
|
556
|
-
truncated: false
|
|
557
|
-
});
|
|
558
|
-
});
|
|
559
|
-
});
|
|
560
|
-
}
|
|
561
|
-
};
|
|
562
|
-
|
|
563
|
-
//#endregion
|
|
564
|
-
//#region src/utils/model-parser.ts
|
|
565
|
-
/**
|
|
566
|
-
* Utility to parse model strings into LanguageModel instances.
|
|
567
|
-
* Provides backward compatibility for CLI and other string-based model specifications.
|
|
568
|
-
*/
|
|
569
|
-
/**
|
|
570
|
-
* Parse a model string into a LanguageModel instance.
|
|
571
|
-
*
|
|
572
|
-
* Supports formats like:
|
|
573
|
-
* - "anthropic/claude-sonnet-4-20250514"
|
|
574
|
-
* - "openai/gpt-4o"
|
|
575
|
-
* - "claude-sonnet-4-20250514" (defaults to Anthropic)
|
|
576
|
-
*
|
|
577
|
-
* @param modelString - The model string to parse
|
|
578
|
-
* @returns A LanguageModel instance
|
|
579
|
-
*
|
|
580
|
-
* @example
|
|
581
|
-
* ```typescript
|
|
582
|
-
* const model = parseModelString("anthropic/claude-sonnet-4-20250514");
|
|
583
|
-
* const agent = createDeepAgent({ model });
|
|
584
|
-
* ```
|
|
585
|
-
*/
|
|
586
|
-
function parseModelString(modelString) {
|
|
587
|
-
const [provider, modelName] = modelString.split("/");
|
|
588
|
-
if (provider === "anthropic") return (0, _ai_sdk_anthropic.anthropic)(modelName || "claude-sonnet-4-20250514");
|
|
589
|
-
else if (provider === "openai") return (0, _ai_sdk_openai.openai)(modelName || "gpt-5-mini");
|
|
590
|
-
return (0, _ai_sdk_anthropic.anthropic)(modelString);
|
|
591
|
-
}
|
|
592
|
-
|
|
593
|
-
//#endregion
|
|
594
|
-
//#region src/checkpointer/file-saver.ts
|
|
595
|
-
/**
|
|
596
|
-
* File-based checkpoint saver for local development.
|
|
597
|
-
*/
|
|
598
|
-
/**
|
|
599
|
-
* File-based checkpoint saver.
|
|
600
|
-
*
|
|
601
|
-
* Stores checkpoints as JSON files in a directory. Each thread gets
|
|
602
|
-
* its own file named `{threadId}.json`.
|
|
603
|
-
*
|
|
604
|
-
* @example
|
|
605
|
-
* ```typescript
|
|
606
|
-
* const saver = new FileSaver({ dir: './.checkpoints' });
|
|
607
|
-
* const agent = createDeepAgent({
|
|
608
|
-
* model: anthropic('claude-sonnet-4-20250514'),
|
|
609
|
-
* checkpointer: saver,
|
|
610
|
-
* });
|
|
611
|
-
* ```
|
|
612
|
-
*/
|
|
613
|
-
var FileSaver = class {
|
|
614
|
-
dir;
|
|
615
|
-
constructor(options) {
|
|
616
|
-
this.dir = options.dir;
|
|
617
|
-
if (!(0, node_fs.existsSync)(this.dir)) (0, node_fs.mkdirSync)(this.dir, { recursive: true });
|
|
618
|
-
}
|
|
619
|
-
getFilePath(threadId) {
|
|
620
|
-
const safeId = threadId.replace(/[^a-zA-Z0-9_-]/g, "_");
|
|
621
|
-
return (0, node_path.join)(this.dir, `${safeId}.json`);
|
|
622
|
-
}
|
|
623
|
-
async save(checkpoint) {
|
|
624
|
-
const filePath = this.getFilePath(checkpoint.threadId);
|
|
625
|
-
const data = {
|
|
626
|
-
...checkpoint,
|
|
627
|
-
updatedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
628
|
-
};
|
|
629
|
-
(0, node_fs.writeFileSync)(filePath, JSON.stringify(data, null, 2), "utf-8");
|
|
630
|
-
}
|
|
631
|
-
async load(threadId) {
|
|
632
|
-
const filePath = this.getFilePath(threadId);
|
|
633
|
-
if (!(0, node_fs.existsSync)(filePath)) return;
|
|
634
|
-
try {
|
|
635
|
-
const content = (0, node_fs.readFileSync)(filePath, "utf-8");
|
|
636
|
-
return JSON.parse(content);
|
|
637
|
-
} catch {
|
|
638
|
-
return;
|
|
639
|
-
}
|
|
640
|
-
}
|
|
641
|
-
async list() {
|
|
642
|
-
if (!(0, node_fs.existsSync)(this.dir)) return [];
|
|
643
|
-
return (0, node_fs.readdirSync)(this.dir).filter((f) => f.endsWith(".json")).map((f) => f.replace(".json", ""));
|
|
644
|
-
}
|
|
645
|
-
async delete(threadId) {
|
|
646
|
-
const filePath = this.getFilePath(threadId);
|
|
647
|
-
if ((0, node_fs.existsSync)(filePath)) (0, node_fs.unlinkSync)(filePath);
|
|
648
|
-
}
|
|
649
|
-
async exists(threadId) {
|
|
650
|
-
return (0, node_fs.existsSync)(this.getFilePath(threadId));
|
|
651
|
-
}
|
|
652
|
-
};
|
|
653
|
-
|
|
654
|
-
//#endregion
|
|
655
|
-
Object.defineProperty(exports, 'BaseSandbox', {
|
|
656
|
-
enumerable: true,
|
|
657
|
-
get: function () {
|
|
658
|
-
return BaseSandbox;
|
|
659
|
-
}
|
|
660
|
-
});
|
|
661
|
-
Object.defineProperty(exports, 'FileSaver', {
|
|
662
|
-
enumerable: true,
|
|
663
|
-
get: function () {
|
|
664
|
-
return FileSaver;
|
|
665
|
-
}
|
|
666
|
-
});
|
|
667
|
-
Object.defineProperty(exports, 'LocalSandbox', {
|
|
668
|
-
enumerable: true,
|
|
669
|
-
get: function () {
|
|
670
|
-
return LocalSandbox;
|
|
671
|
-
}
|
|
672
|
-
});
|
|
673
|
-
Object.defineProperty(exports, 'parseModelString', {
|
|
674
|
-
enumerable: true,
|
|
675
|
-
get: function () {
|
|
676
|
-
return parseModelString;
|
|
677
|
-
}
|
|
678
|
-
});
|
|
679
|
-
//# sourceMappingURL=file-saver-C6O2LAvg.cjs.map
|