@cuylabs/agent-core 0.5.0 → 0.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +85 -372
- package/dist/{builder-RcTZuYnO.d.ts → builder-BRvqCcIk.d.ts} +2 -2
- package/dist/{resolver-DOfZ-xuk.d.ts → capability-resolver-CgRGsWVX.d.ts} +1 -1
- package/dist/{chunk-IMGQOTU2.js → chunk-3HNO5SVI.js} +286 -690
- package/dist/chunk-5K7AQVOU.js +619 -0
- package/dist/{chunk-QAQADS4X.js → chunk-BNSHUWCV.js} +1 -0
- package/dist/{chunk-OTUGSCED.js → chunk-CDTV2UYU.js} +159 -1
- package/dist/chunk-IEFIQENH.js +73 -0
- package/dist/chunk-N7P4PN3O.js +84 -0
- package/dist/{chunk-QWFMX226.js → chunk-QGOGIP7T.js} +148 -15
- package/dist/chunk-VNQBHPCT.js +398 -0
- package/dist/{chunk-X635CM2F.js → chunk-ZPMACVZK.js} +1 -1
- package/dist/context/index.js +1 -1
- package/dist/host/index.d.ts +45 -0
- package/dist/host/index.js +8 -0
- package/dist/{index-p0kOsVsE.d.ts → index-C33hlD6H.d.ts} +12 -7
- package/dist/{index-tmhaADz5.d.ts → index-CfBGYrpd.d.ts} +121 -2
- package/dist/index.d.ts +107 -126
- package/dist/index.js +322 -597
- package/dist/inference/index.d.ts +59 -0
- package/dist/inference/index.js +25 -0
- package/dist/middleware/index.d.ts +8 -4
- package/dist/middleware/index.js +5 -3
- package/dist/models/index.d.ts +104 -2
- package/dist/models/index.js +40 -6
- package/dist/prompt/index.d.ts +10 -6
- package/dist/reasoning/index.d.ts +54 -8
- package/dist/reasoning/index.js +2 -3
- package/dist/{registry-CuRWWtcT.d.ts → registry-BDLIHOQB.d.ts} +1 -1
- package/dist/{runner-C7aMP_x3.d.ts → runner-DSKaEz3z.d.ts} +290 -7
- package/dist/runtime/index.d.ts +41 -7
- package/dist/runtime/index.js +15 -6
- package/dist/scope/index.d.ts +10 -0
- package/dist/scope/index.js +14 -0
- package/dist/{session-manager-Uawm2Le7.d.ts → session-manager-B_CWGTsl.d.ts} +1 -1
- package/dist/skill/index.d.ts +7 -5
- package/dist/storage/index.d.ts +2 -2
- package/dist/sub-agent/index.d.ts +12 -8
- package/dist/tool/index.d.ts +8 -4
- package/dist/tool/index.js +4 -3
- package/dist/{tool-pFAnJc5Y.d.ts → tool-Db1Ue-1U.d.ts} +1 -1
- package/dist/{tool-DYp6-cC3.d.ts → tool-HUtkiVBx.d.ts} +5 -99
- package/dist/tracking/index.d.ts +3 -1
- package/dist/types-9jGQUjqW.d.ts +29 -0
- package/dist/types-CHiPh8U2.d.ts +100 -0
- package/dist/types-CqDZTh4d.d.ts +335 -0
- package/dist/types-FRpzzg_9.d.ts +355 -0
- package/package.json +19 -8
- package/dist/capabilities/index.d.ts +0 -97
- package/dist/capabilities/index.js +0 -46
- package/dist/chunk-6TDTQJ4P.js +0 -116
- package/dist/chunk-DWYX7ASF.js +0 -26
- package/dist/chunk-FG4MD5MU.js +0 -54
- package/dist/config-D2xeGEHK.d.ts +0 -52
- package/dist/identifiers-BLUxFqV_.d.ts +0 -12
- package/dist/network-D76DS5ot.d.ts +0 -5
- package/dist/types-MM1JoX5T.d.ts +0 -810
|
@@ -0,0 +1,398 @@
|
|
|
1
|
+
// src/host/local.ts
|
|
2
|
+
import { spawn } from "child_process";
|
|
3
|
+
import fs from "fs/promises";
|
|
4
|
+
import { existsSync } from "fs";
|
|
5
|
+
import path from "path";
|
|
6
|
+
import process from "process";
|
|
7
|
+
function getShell() {
|
|
8
|
+
if (process.platform === "win32") {
|
|
9
|
+
return { shell: "cmd.exe", args: ["/c"] };
|
|
10
|
+
}
|
|
11
|
+
const userShell = process.env.SHELL ?? "/bin/bash";
|
|
12
|
+
return { shell: userShell, args: ["-c"] };
|
|
13
|
+
}
|
|
14
|
+
async function killProcessTree(pid) {
|
|
15
|
+
try {
|
|
16
|
+
if (process.platform !== "win32") {
|
|
17
|
+
process.kill(-pid, "SIGKILL");
|
|
18
|
+
} else {
|
|
19
|
+
const { exec: execCb } = await import("child_process");
|
|
20
|
+
execCb(`taskkill /pid ${pid} /t /f`, () => {
|
|
21
|
+
});
|
|
22
|
+
}
|
|
23
|
+
} catch {
|
|
24
|
+
try {
|
|
25
|
+
process.kill(pid, "SIGKILL");
|
|
26
|
+
} catch {
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
function localHost(defaultCwd) {
|
|
31
|
+
const cwd = defaultCwd ?? process.cwd();
|
|
32
|
+
return {
|
|
33
|
+
name: "local",
|
|
34
|
+
// --------------------------------------------------------------------------
|
|
35
|
+
// File system
|
|
36
|
+
// --------------------------------------------------------------------------
|
|
37
|
+
async readFile(filePath) {
|
|
38
|
+
const abs = path.isAbsolute(filePath) ? filePath : path.resolve(cwd, filePath);
|
|
39
|
+
return fs.readFile(abs, "utf-8");
|
|
40
|
+
},
|
|
41
|
+
async readBytes(filePath, offset, length) {
|
|
42
|
+
const abs = path.isAbsolute(filePath) ? filePath : path.resolve(cwd, filePath);
|
|
43
|
+
const fh = await fs.open(abs, "r");
|
|
44
|
+
try {
|
|
45
|
+
const buf = Buffer.alloc(length);
|
|
46
|
+
await fh.read(buf, 0, length, offset);
|
|
47
|
+
return buf;
|
|
48
|
+
} finally {
|
|
49
|
+
await fh.close();
|
|
50
|
+
}
|
|
51
|
+
},
|
|
52
|
+
async writeFile(filePath, content) {
|
|
53
|
+
const abs = path.isAbsolute(filePath) ? filePath : path.resolve(cwd, filePath);
|
|
54
|
+
await fs.mkdir(path.dirname(abs), { recursive: true });
|
|
55
|
+
await fs.writeFile(abs, content, "utf-8");
|
|
56
|
+
},
|
|
57
|
+
async exists(filePath) {
|
|
58
|
+
const abs = path.isAbsolute(filePath) ? filePath : path.resolve(cwd, filePath);
|
|
59
|
+
return existsSync(abs);
|
|
60
|
+
},
|
|
61
|
+
async stat(filePath) {
|
|
62
|
+
const abs = path.isAbsolute(filePath) ? filePath : path.resolve(cwd, filePath);
|
|
63
|
+
const s = await fs.stat(abs);
|
|
64
|
+
return {
|
|
65
|
+
size: s.size,
|
|
66
|
+
mtime: s.mtime,
|
|
67
|
+
isDirectory: s.isDirectory(),
|
|
68
|
+
isFile: s.isFile()
|
|
69
|
+
};
|
|
70
|
+
},
|
|
71
|
+
async readdir(dirPath) {
|
|
72
|
+
const abs = path.isAbsolute(dirPath) ? dirPath : path.resolve(cwd, dirPath);
|
|
73
|
+
const entries = await fs.readdir(abs, { withFileTypes: true });
|
|
74
|
+
return entries.map((e) => ({
|
|
75
|
+
name: e.name,
|
|
76
|
+
isDirectory: e.isDirectory(),
|
|
77
|
+
isFile: e.isFile()
|
|
78
|
+
}));
|
|
79
|
+
},
|
|
80
|
+
async mkdir(dirPath) {
|
|
81
|
+
const abs = path.isAbsolute(dirPath) ? dirPath : path.resolve(cwd, dirPath);
|
|
82
|
+
await fs.mkdir(abs, { recursive: true });
|
|
83
|
+
},
|
|
84
|
+
// --------------------------------------------------------------------------
|
|
85
|
+
// Process execution
|
|
86
|
+
// --------------------------------------------------------------------------
|
|
87
|
+
exec(command, options) {
|
|
88
|
+
const { shell, args } = getShell();
|
|
89
|
+
const execCwd = options?.cwd ?? cwd;
|
|
90
|
+
const env = {
|
|
91
|
+
...process.env,
|
|
92
|
+
...options?.env,
|
|
93
|
+
// Disable pagers — tools collect output, not interactive
|
|
94
|
+
PAGER: "cat",
|
|
95
|
+
GIT_PAGER: "cat"
|
|
96
|
+
};
|
|
97
|
+
return new Promise((resolve, reject) => {
|
|
98
|
+
let stdout = "";
|
|
99
|
+
let stderr = "";
|
|
100
|
+
let timedOut = false;
|
|
101
|
+
let settled = false;
|
|
102
|
+
const child = spawn(shell, [...args, command], {
|
|
103
|
+
cwd: execCwd,
|
|
104
|
+
detached: process.platform !== "win32",
|
|
105
|
+
env,
|
|
106
|
+
stdio: ["ignore", "pipe", "pipe"]
|
|
107
|
+
});
|
|
108
|
+
child.stdout?.on("data", (data) => {
|
|
109
|
+
stdout += data.toString();
|
|
110
|
+
options?.onStdout?.(data);
|
|
111
|
+
});
|
|
112
|
+
child.stderr?.on("data", (data) => {
|
|
113
|
+
stderr += data.toString();
|
|
114
|
+
options?.onStderr?.(data);
|
|
115
|
+
});
|
|
116
|
+
let timer;
|
|
117
|
+
if (options?.timeout && options.timeout > 0) {
|
|
118
|
+
timer = setTimeout(() => {
|
|
119
|
+
timedOut = true;
|
|
120
|
+
if (child.pid) killProcessTree(child.pid);
|
|
121
|
+
}, options.timeout);
|
|
122
|
+
}
|
|
123
|
+
const onAbort = () => {
|
|
124
|
+
if (child.pid) killProcessTree(child.pid);
|
|
125
|
+
};
|
|
126
|
+
options?.signal?.addEventListener("abort", onAbort, { once: true });
|
|
127
|
+
child.on("close", (code) => {
|
|
128
|
+
if (settled) return;
|
|
129
|
+
settled = true;
|
|
130
|
+
if (timer) clearTimeout(timer);
|
|
131
|
+
options?.signal?.removeEventListener("abort", onAbort);
|
|
132
|
+
resolve({ stdout, stderr, exitCode: code, timedOut });
|
|
133
|
+
});
|
|
134
|
+
child.on("error", (err) => {
|
|
135
|
+
if (settled) return;
|
|
136
|
+
settled = true;
|
|
137
|
+
if (timer) clearTimeout(timer);
|
|
138
|
+
options?.signal?.removeEventListener("abort", onAbort);
|
|
139
|
+
reject(err);
|
|
140
|
+
});
|
|
141
|
+
});
|
|
142
|
+
}
|
|
143
|
+
};
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
// src/host/docker/exec.ts
|
|
147
|
+
async function loadDockerode() {
|
|
148
|
+
try {
|
|
149
|
+
const module = await import("dockerode");
|
|
150
|
+
return module.default ?? module;
|
|
151
|
+
} catch {
|
|
152
|
+
throw new Error(
|
|
153
|
+
"dockerHost requires the 'dockerode' package. Install it with: npm install dockerode"
|
|
154
|
+
);
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
async function resolveDockerRuntime(options) {
|
|
158
|
+
const Dockerode = await loadDockerode();
|
|
159
|
+
if (typeof options.container === "string") {
|
|
160
|
+
const docker = new Dockerode(options.dockerOptions);
|
|
161
|
+
const container2 = docker.getContainer(options.container);
|
|
162
|
+
return {
|
|
163
|
+
docker,
|
|
164
|
+
container: container2,
|
|
165
|
+
label: options.container
|
|
166
|
+
};
|
|
167
|
+
}
|
|
168
|
+
const container = options.container;
|
|
169
|
+
return {
|
|
170
|
+
docker: { modem: container.modem },
|
|
171
|
+
container,
|
|
172
|
+
label: container.id?.slice(0, 12) ?? "unknown"
|
|
173
|
+
};
|
|
174
|
+
}
|
|
175
|
+
async function containerExec(runtime, command, options = {}) {
|
|
176
|
+
const envEntries = options.env ? Object.entries(options.env).filter((entry) => entry[1] !== void 0).map(([key, value]) => `${key}=${value}`) : void 0;
|
|
177
|
+
const exec = await runtime.container.exec({
|
|
178
|
+
Cmd: ["sh", "-c", command],
|
|
179
|
+
...options.user ? { User: options.user } : {},
|
|
180
|
+
...options.workdir ? { WorkingDir: options.workdir } : {},
|
|
181
|
+
...envEntries && envEntries.length > 0 ? { Env: envEntries } : {},
|
|
182
|
+
AttachStdout: true,
|
|
183
|
+
AttachStderr: true
|
|
184
|
+
});
|
|
185
|
+
const stream = await exec.start({});
|
|
186
|
+
let timedOut = false;
|
|
187
|
+
let aborted = false;
|
|
188
|
+
return await new Promise((resolve, reject) => {
|
|
189
|
+
const stdoutChunks = [];
|
|
190
|
+
const stderrChunks = [];
|
|
191
|
+
runtime.docker.modem.demuxStream(
|
|
192
|
+
stream,
|
|
193
|
+
{
|
|
194
|
+
write: (chunk) => {
|
|
195
|
+
stdoutChunks.push(chunk);
|
|
196
|
+
options.onStdout?.(chunk);
|
|
197
|
+
return true;
|
|
198
|
+
},
|
|
199
|
+
end: function() {
|
|
200
|
+
return this;
|
|
201
|
+
}
|
|
202
|
+
},
|
|
203
|
+
{
|
|
204
|
+
write: (chunk) => {
|
|
205
|
+
stderrChunks.push(chunk);
|
|
206
|
+
options.onStderr?.(chunk);
|
|
207
|
+
return true;
|
|
208
|
+
},
|
|
209
|
+
end: function() {
|
|
210
|
+
return this;
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
);
|
|
214
|
+
let timer;
|
|
215
|
+
if (options.timeout && options.timeout > 0) {
|
|
216
|
+
timer = setTimeout(() => {
|
|
217
|
+
timedOut = true;
|
|
218
|
+
stream.destroy?.();
|
|
219
|
+
}, options.timeout);
|
|
220
|
+
}
|
|
221
|
+
const onAbort = () => {
|
|
222
|
+
aborted = true;
|
|
223
|
+
stream.destroy?.();
|
|
224
|
+
};
|
|
225
|
+
options.signal?.addEventListener("abort", onAbort, { once: true });
|
|
226
|
+
stream.on("end", async () => {
|
|
227
|
+
if (timer) {
|
|
228
|
+
clearTimeout(timer);
|
|
229
|
+
}
|
|
230
|
+
options.signal?.removeEventListener("abort", onAbort);
|
|
231
|
+
try {
|
|
232
|
+
const info = await exec.inspect();
|
|
233
|
+
resolve({
|
|
234
|
+
stdout: Buffer.concat(stdoutChunks).toString("utf-8"),
|
|
235
|
+
stderr: Buffer.concat(stderrChunks).toString("utf-8"),
|
|
236
|
+
exitCode: timedOut || aborted ? null : info.ExitCode ?? 0,
|
|
237
|
+
timedOut
|
|
238
|
+
});
|
|
239
|
+
} catch (error) {
|
|
240
|
+
reject(error);
|
|
241
|
+
}
|
|
242
|
+
});
|
|
243
|
+
stream.on("error", (error) => {
|
|
244
|
+
if (timer) {
|
|
245
|
+
clearTimeout(timer);
|
|
246
|
+
}
|
|
247
|
+
options.signal?.removeEventListener("abort", onAbort);
|
|
248
|
+
if (timedOut || aborted) {
|
|
249
|
+
resolve({
|
|
250
|
+
stdout: Buffer.concat(stdoutChunks).toString("utf-8"),
|
|
251
|
+
stderr: Buffer.concat(stderrChunks).toString("utf-8"),
|
|
252
|
+
exitCode: null,
|
|
253
|
+
timedOut
|
|
254
|
+
});
|
|
255
|
+
return;
|
|
256
|
+
}
|
|
257
|
+
reject(error);
|
|
258
|
+
});
|
|
259
|
+
});
|
|
260
|
+
}
|
|
261
|
+
async function runDockerCommand(options) {
|
|
262
|
+
const { runtime, command, defaultUser, defaultWorkdir, execOptions } = options;
|
|
263
|
+
return await containerExec(runtime, command, {
|
|
264
|
+
user: defaultUser,
|
|
265
|
+
workdir: execOptions?.workdir ?? execOptions?.cwd ?? defaultWorkdir,
|
|
266
|
+
env: execOptions?.env,
|
|
267
|
+
timeout: execOptions?.timeout,
|
|
268
|
+
signal: execOptions?.signal,
|
|
269
|
+
onStdout: execOptions?.onStdout,
|
|
270
|
+
onStderr: execOptions?.onStderr
|
|
271
|
+
});
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
// src/host/docker/shell.ts
|
|
275
|
+
function sq(value) {
|
|
276
|
+
return `'${value.replace(/'/g, `'\\''`)}'`;
|
|
277
|
+
}
|
|
278
|
+
function resolveDockerPath(path2, defaultWorkdir) {
|
|
279
|
+
if (path2.startsWith("/")) {
|
|
280
|
+
return path2;
|
|
281
|
+
}
|
|
282
|
+
const base = defaultWorkdir.endsWith("/") ? defaultWorkdir : `${defaultWorkdir}/`;
|
|
283
|
+
return `${base}${path2}`;
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
// src/host/docker/host.ts
|
|
287
|
+
async function dockerHost(options) {
|
|
288
|
+
const runtime = await resolveDockerRuntime(options);
|
|
289
|
+
const defaultUser = options.user;
|
|
290
|
+
const defaultWorkdir = options.workdir ?? "/";
|
|
291
|
+
async function run(command, execOptions) {
|
|
292
|
+
return await runDockerCommand({
|
|
293
|
+
runtime,
|
|
294
|
+
command,
|
|
295
|
+
defaultUser,
|
|
296
|
+
defaultWorkdir,
|
|
297
|
+
execOptions
|
|
298
|
+
});
|
|
299
|
+
}
|
|
300
|
+
return {
|
|
301
|
+
name: `docker:${runtime.label}`,
|
|
302
|
+
async readFile(filePath) {
|
|
303
|
+
const absPath = resolveDockerPath(filePath, defaultWorkdir);
|
|
304
|
+
const result = await run(`cat ${sq(absPath)}`);
|
|
305
|
+
if (result.exitCode !== 0) {
|
|
306
|
+
throw new Error(`readFile failed (${absPath}): ${result.stderr.trim()}`);
|
|
307
|
+
}
|
|
308
|
+
return result.stdout;
|
|
309
|
+
},
|
|
310
|
+
async readBytes(filePath, offset, length) {
|
|
311
|
+
const absPath = resolveDockerPath(filePath, defaultWorkdir);
|
|
312
|
+
const result = await run(
|
|
313
|
+
`dd if=${sq(absPath)} bs=1 skip=${offset} count=${length} 2>/dev/null`
|
|
314
|
+
);
|
|
315
|
+
if (result.exitCode !== 0) {
|
|
316
|
+
throw new Error(
|
|
317
|
+
`readBytes failed (${absPath}): ${result.stderr.trim()}`
|
|
318
|
+
);
|
|
319
|
+
}
|
|
320
|
+
return Buffer.from(result.stdout, "binary");
|
|
321
|
+
},
|
|
322
|
+
async writeFile(filePath, content) {
|
|
323
|
+
const absPath = resolveDockerPath(filePath, defaultWorkdir);
|
|
324
|
+
const dir = absPath.substring(0, absPath.lastIndexOf("/")) || "/";
|
|
325
|
+
await run(`mkdir -p ${sq(dir)}`);
|
|
326
|
+
const encoded = Buffer.from(content, "utf-8").toString("base64");
|
|
327
|
+
const result = await run(`echo ${sq(encoded)} | base64 -d > ${sq(absPath)}`);
|
|
328
|
+
if (result.exitCode !== 0) {
|
|
329
|
+
throw new Error(
|
|
330
|
+
`writeFile failed (${absPath}): ${result.stderr.trim()}`
|
|
331
|
+
);
|
|
332
|
+
}
|
|
333
|
+
},
|
|
334
|
+
async exists(filePath) {
|
|
335
|
+
const absPath = resolveDockerPath(filePath, defaultWorkdir);
|
|
336
|
+
const result = await run(`test -e ${sq(absPath)}`);
|
|
337
|
+
return result.exitCode === 0;
|
|
338
|
+
},
|
|
339
|
+
async stat(filePath) {
|
|
340
|
+
const absPath = resolveDockerPath(filePath, defaultWorkdir);
|
|
341
|
+
const result = await run(
|
|
342
|
+
`stat -c '%s %Y %F' ${sq(absPath)} 2>/dev/null || stat -f '%z %m %HT' ${sq(absPath)}`
|
|
343
|
+
);
|
|
344
|
+
if (result.exitCode !== 0) {
|
|
345
|
+
throw new Error(`stat failed (${absPath}): ${result.stderr.trim()}`);
|
|
346
|
+
}
|
|
347
|
+
const parts = result.stdout.trim().split(/\s+/);
|
|
348
|
+
if (parts.length < 3) {
|
|
349
|
+
throw new Error(
|
|
350
|
+
`stat: unexpected output format for ${absPath}: ${result.stdout}`
|
|
351
|
+
);
|
|
352
|
+
}
|
|
353
|
+
const size = parseInt(parts[0], 10);
|
|
354
|
+
const mtimeSec = parseInt(parts[1], 10);
|
|
355
|
+
const typeStr = parts.slice(2).join(" ").toLowerCase();
|
|
356
|
+
return {
|
|
357
|
+
size,
|
|
358
|
+
mtime: new Date(mtimeSec * 1e3),
|
|
359
|
+
isDirectory: typeStr.includes("directory"),
|
|
360
|
+
isFile: typeStr.includes("regular") || typeStr.includes("file")
|
|
361
|
+
};
|
|
362
|
+
},
|
|
363
|
+
async readdir(dirPath) {
|
|
364
|
+
const absPath = resolveDockerPath(dirPath, defaultWorkdir);
|
|
365
|
+
const result = await run(
|
|
366
|
+
`find ${sq(absPath)} -maxdepth 1 -mindepth 1 -printf '%f\\t%y\\n' 2>/dev/null || for f in ${sq(absPath)}/*; do [ -e "$f" ] || continue; n=$(basename "$f"); if [ -d "$f" ]; then printf '%s\\td\\n' "$n"; else printf '%s\\tf\\n' "$n"; fi; done`
|
|
367
|
+
);
|
|
368
|
+
if (result.exitCode !== 0) {
|
|
369
|
+
throw new Error(
|
|
370
|
+
`readdir failed (${absPath}): ${result.stderr.trim()}`
|
|
371
|
+
);
|
|
372
|
+
}
|
|
373
|
+
return result.stdout.trim().split("\n").filter(Boolean).map((line) => {
|
|
374
|
+
const [name, type] = line.split(" ");
|
|
375
|
+
return {
|
|
376
|
+
name,
|
|
377
|
+
isDirectory: type === "d",
|
|
378
|
+
isFile: type === "f"
|
|
379
|
+
};
|
|
380
|
+
});
|
|
381
|
+
},
|
|
382
|
+
async mkdir(dirPath) {
|
|
383
|
+
const absPath = resolveDockerPath(dirPath, defaultWorkdir);
|
|
384
|
+
const result = await run(`mkdir -p ${sq(absPath)}`);
|
|
385
|
+
if (result.exitCode !== 0) {
|
|
386
|
+
throw new Error(`mkdir failed (${absPath}): ${result.stderr.trim()}`);
|
|
387
|
+
}
|
|
388
|
+
},
|
|
389
|
+
async exec(command, execOptions) {
|
|
390
|
+
return await run(command, execOptions);
|
|
391
|
+
}
|
|
392
|
+
};
|
|
393
|
+
}
|
|
394
|
+
|
|
395
|
+
export {
|
|
396
|
+
localHost,
|
|
397
|
+
dockerHost
|
|
398
|
+
};
|
package/dist/context/index.js
CHANGED
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import { T as ToolHost } from '../types-CHiPh8U2.js';
|
|
2
|
+
export { D as DirEntry, E as ExecOptions, a as ExecResult, F as FileStat } from '../types-CHiPh8U2.js';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* LocalHost — executes tools on the local machine.
|
|
6
|
+
*
|
|
7
|
+
* Default ToolHost implementation. Uses Node's `child_process.spawn`
|
|
8
|
+
* for commands and `node:fs/promises` for file operations.
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* Create a ToolHost that runs everything on the local machine.
|
|
13
|
+
*
|
|
14
|
+
* @param defaultCwd Working directory for commands when none is specified.
|
|
15
|
+
* Defaults to `process.cwd()`.
|
|
16
|
+
*/
|
|
17
|
+
declare function localHost(defaultCwd?: string): ToolHost;
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Configuration for creating a Docker-backed `ToolHost`.
|
|
21
|
+
*/
|
|
22
|
+
interface DockerHostOptions {
|
|
23
|
+
/**
|
|
24
|
+
* The container to connect to. Either:
|
|
25
|
+
* - A string container name or ID
|
|
26
|
+
* - A Dockerode `Container`-like object
|
|
27
|
+
*/
|
|
28
|
+
container: string | {
|
|
29
|
+
id: string;
|
|
30
|
+
modem?: unknown;
|
|
31
|
+
};
|
|
32
|
+
/** User to run commands as inside the container. */
|
|
33
|
+
user?: string;
|
|
34
|
+
/** Default working directory inside the container. */
|
|
35
|
+
workdir?: string;
|
|
36
|
+
/**
|
|
37
|
+
* Dockerode constructor options.
|
|
38
|
+
* Only used when `container` is a string.
|
|
39
|
+
*/
|
|
40
|
+
dockerOptions?: Record<string, unknown>;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
declare function dockerHost(options: DockerHostOptions): Promise<ToolHost>;
|
|
44
|
+
|
|
45
|
+
export { type DockerHostOptions, ToolHost, dockerHost, localHost };
|
|
@@ -1,15 +1,20 @@
|
|
|
1
1
|
import { LanguageModel } from 'ai';
|
|
2
2
|
import { R as ReasoningLevel } from './types-CQaXbRsS.js';
|
|
3
|
-
import { T as Tool } from './tool-
|
|
4
|
-
import {
|
|
5
|
-
import {
|
|
6
|
-
import {
|
|
3
|
+
import { T as Tool } from './tool-Db1Ue-1U.js';
|
|
4
|
+
import { h as AgentMiddleware, d as StreamProvider, P as PromptConfig, A as AgentEvent, M as MiddlewareRunner } from './runner-DSKaEz3z.js';
|
|
5
|
+
import { S as SessionManager, f as SessionContext, i as SessionInfo } from './session-manager-B_CWGTsl.js';
|
|
6
|
+
import { T as ToolHost } from './types-CHiPh8U2.js';
|
|
7
7
|
import { T as TokenUsage, M as Message } from './messages-BYWGn8TY.js';
|
|
8
8
|
import { a as MCPManager } from './types-VQgymC1N.js';
|
|
9
9
|
import { U as UndoResult, T as TurnChangeTracker } from './tracker-DClqYqTj.js';
|
|
10
|
-
import { P as PromptBuilder } from './builder-
|
|
11
|
-
import {
|
|
10
|
+
import { P as PromptBuilder } from './builder-BRvqCcIk.js';
|
|
11
|
+
import { P as PendingIntervention, g as InterventionController } from './types-FRpzzg_9.js';
|
|
12
|
+
import { s as DoomLoopHandler, o as AgentTurnStepRuntimeConfig } from './types-CqDZTh4d.js';
|
|
12
13
|
|
|
14
|
+
/**
|
|
15
|
+
* Tools whose outputs should never be pruned automatically.
|
|
16
|
+
*/
|
|
17
|
+
declare const PRUNE_PROTECTED_TOOLS: readonly ["skill"];
|
|
13
18
|
/**
|
|
14
19
|
* Configuration for automatic context compaction.
|
|
15
20
|
*/
|
|
@@ -1064,4 +1069,4 @@ declare class SubAgentTracker {
|
|
|
1064
1069
|
*/
|
|
1065
1070
|
declare function createSubAgentTools(parent: Agent, config: SubAgentToolConfig): Tool.AnyInfo[];
|
|
1066
1071
|
|
|
1067
|
-
export { type AppliedPreset as A, type CompactionConfig as C, DEFAULT_MAX_CONCURRENT as D, type Preset as P, type SubAgentCompletedResult as S, type TracingConfig as T,
|
|
1072
|
+
export { type AppliedPreset as A, type CompactionConfig as C, DEFAULT_MAX_CONCURRENT as D, type Preset as P, type SubAgentCompletedResult as S, type TracingConfig as T, type AgentConfig as a, Agent as b, type AgentProfile as c, DEFAULT_MAX_SPAWN_DEPTH as d, DEFAULT_SESSION_TITLE_PREFIX as e, PRUNE_PROTECTED_TOOLS as f, type SubAgentHandle as g, type SubAgentStatus as h, type SubAgentToolConfig as i, SubAgentTracker as j, type SubAgentUsage as k, createAgent as l, createSubAgentTools as m };
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { h as AgentMiddleware } from './runner-DSKaEz3z.js';
|
|
2
2
|
import { TelemetrySettings } from 'ai';
|
|
3
3
|
|
|
4
4
|
/**
|
|
@@ -195,4 +195,123 @@ declare function otelMiddleware(config?: OtelMiddlewareConfig): AgentMiddleware;
|
|
|
195
195
|
|
|
196
196
|
declare function createTelemetryConfig(config: TelemetryConfig): TelemetryConfigResult;
|
|
197
197
|
|
|
198
|
-
|
|
198
|
+
/**
|
|
199
|
+
* Prompt Cache Middleware — Type definitions.
|
|
200
|
+
*
|
|
201
|
+
* Defines the configuration for provider-specific prompt caching.
|
|
202
|
+
* Currently supports Anthropic's `cache_control` with ephemeral breakpoints.
|
|
203
|
+
*/
|
|
204
|
+
/**
|
|
205
|
+
* Supported cache TTL values for Anthropic prompt caching.
|
|
206
|
+
*
|
|
207
|
+
* - `"5m"` — 5-minute cache (default, ~90% input cost reduction)
|
|
208
|
+
* - `"1h"` — 1-hour cache (higher write cost, but longer retention)
|
|
209
|
+
*/
|
|
210
|
+
type CacheTTL = "5m" | "1h";
|
|
211
|
+
/**
|
|
212
|
+
* Configuration for the prompt cache middleware.
|
|
213
|
+
*
|
|
214
|
+
* @example
|
|
215
|
+
* ```typescript
|
|
216
|
+
* // Default — auto-detect Anthropic, 5-minute TTL
|
|
217
|
+
* promptCacheMiddleware()
|
|
218
|
+
*
|
|
219
|
+
* // Custom TTL
|
|
220
|
+
* promptCacheMiddleware({ ttl: "1h" })
|
|
221
|
+
*
|
|
222
|
+
* // Custom breakpoint strategy
|
|
223
|
+
* promptCacheMiddleware({
|
|
224
|
+
* messageBreakpoints: 2, // cache system + 2 message boundaries
|
|
225
|
+
* })
|
|
226
|
+
* ```
|
|
227
|
+
*/
|
|
228
|
+
interface PromptCacheConfig {
|
|
229
|
+
/**
|
|
230
|
+
* Cache TTL. Defaults to `"5m"` (Anthropic's default).
|
|
231
|
+
*
|
|
232
|
+
* - `"5m"` — 5 minutes. 1.25× write, 0.1× read.
|
|
233
|
+
* - `"1h"` — 1 hour. Higher write cost, longer retention.
|
|
234
|
+
*/
|
|
235
|
+
ttl?: CacheTTL;
|
|
236
|
+
/**
|
|
237
|
+
* Number of cache breakpoints to place on conversation messages.
|
|
238
|
+
*
|
|
239
|
+
* Anthropic allows up to 4 breakpoints total. The system prompt always
|
|
240
|
+
* gets one. This controls how many additional breakpoints are placed
|
|
241
|
+
* on conversation messages (counting from the end of stable content).
|
|
242
|
+
*
|
|
243
|
+
* Defaults to `1` (system + 1 message boundary = 2 breakpoints).
|
|
244
|
+
*
|
|
245
|
+
* Set to `0` to only cache the system prompt.
|
|
246
|
+
*/
|
|
247
|
+
messageBreakpoints?: number;
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
/**
|
|
251
|
+
* Prompt Cache Middleware — Anthropic prompt caching via `model.input`.
|
|
252
|
+
*
|
|
253
|
+
* Reduces input token costs by marking stable content (system prompt,
|
|
254
|
+
* older conversation messages) with Anthropic `cache_control` breakpoints.
|
|
255
|
+
*
|
|
256
|
+
* How it works:
|
|
257
|
+
*
|
|
258
|
+
* 1. The middleware runs before each `streamText()` call via `model.input`
|
|
259
|
+
* 2. It detects if the model is Anthropic (via provider string or model ID)
|
|
260
|
+
* 3. For Anthropic models, it sets `cache_control: { type: "ephemeral" }`
|
|
261
|
+
* on the system prompt and on stable conversation messages
|
|
262
|
+
* 4. The AI SDK's Anthropic provider translates these into API-level
|
|
263
|
+
* `cache_control` fields that Anthropic uses for prefix caching
|
|
264
|
+
*
|
|
265
|
+
* Cost impact:
|
|
266
|
+
* - First call per cache region: 1.25× write cost (25% premium)
|
|
267
|
+
* - Subsequent calls hitting cache: 0.1× read cost (90% savings)
|
|
268
|
+
* - Cache TTL: 5 minutes (default) or 1 hour
|
|
269
|
+
* - Net savings on multi-turn conversations: ~75%
|
|
270
|
+
*
|
|
271
|
+
* Anthropic limits: max 4 cache breakpoints per request.
|
|
272
|
+
* This middleware uses 1 for system + up to `messageBreakpoints` for messages.
|
|
273
|
+
*
|
|
274
|
+
* @example
|
|
275
|
+
* ```typescript
|
|
276
|
+
* import { promptCacheMiddleware } from "@cuylabs/agent-core/middleware";
|
|
277
|
+
*
|
|
278
|
+
* const agent = createAgent({
|
|
279
|
+
* model: anthropic("claude-sonnet-4-20250514"),
|
|
280
|
+
* middleware: [promptCacheMiddleware()],
|
|
281
|
+
* });
|
|
282
|
+
* ```
|
|
283
|
+
*/
|
|
284
|
+
|
|
285
|
+
/**
|
|
286
|
+
* Create a prompt cache middleware for Anthropic models.
|
|
287
|
+
*
|
|
288
|
+
* Automatically detects Anthropic models and adds `cache_control`
|
|
289
|
+
* breakpoints to the system prompt and stable conversation messages.
|
|
290
|
+
* Non-Anthropic models are passed through unchanged.
|
|
291
|
+
*
|
|
292
|
+
* @param config - Optional configuration (TTL, breakpoint count)
|
|
293
|
+
* @returns An `AgentMiddleware` that enables Anthropic prompt caching
|
|
294
|
+
*
|
|
295
|
+
* @example
|
|
296
|
+
* ```typescript
|
|
297
|
+
* import { anthropic } from "@ai-sdk/anthropic";
|
|
298
|
+
* import { createAgent } from "@cuylabs/agent-core";
|
|
299
|
+
* import { promptCacheMiddleware } from "@cuylabs/agent-core/middleware";
|
|
300
|
+
*
|
|
301
|
+
* const agent = createAgent({
|
|
302
|
+
* model: anthropic("claude-sonnet-4-20250514"),
|
|
303
|
+
* middleware: [promptCacheMiddleware()],
|
|
304
|
+
* });
|
|
305
|
+
*
|
|
306
|
+
* // With custom config
|
|
307
|
+
* const agent2 = createAgent({
|
|
308
|
+
* model: anthropic("claude-sonnet-4-20250514"),
|
|
309
|
+
* middleware: [
|
|
310
|
+
* promptCacheMiddleware({ ttl: "1h", messageBreakpoints: 2 }),
|
|
311
|
+
* ],
|
|
312
|
+
* });
|
|
313
|
+
* ```
|
|
314
|
+
*/
|
|
315
|
+
declare function promptCacheMiddleware(config?: PromptCacheConfig): AgentMiddleware;
|
|
316
|
+
|
|
317
|
+
export { type ApprovalConfig as A, type CacheTTL as C, type OtelMiddlewareConfig as O, type PromptCacheConfig as P, type RiskLevel as R, type TelemetryConfig as T, type ApprovalRule as a, type ApprovalAction as b, type ApprovalMiddlewareConfig as c, type ApprovalRequest as d, type TelemetryConfigResult as e, approvalMiddleware as f, createTelemetryConfig as g, otelMiddleware as o, promptCacheMiddleware as p };
|