@easynet/agent-tool-buildin 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +64 -0
- package/dist/CoreAdapter.d.ts +37 -0
- package/dist/CoreAdapter.d.ts.map +1 -0
- package/dist/CoreToolsModule.d.ts +48 -0
- package/dist/CoreToolsModule.d.ts.map +1 -0
- package/dist/chunk-BUSYA2B4.js +9 -0
- package/dist/chunk-BUSYA2B4.js.map +1 -0
- package/dist/context.d.ts +9 -0
- package/dist/context.d.ts.map +1 -0
- package/dist/exec/runCommand.d.ts +9 -0
- package/dist/exec/runCommand.d.ts.map +1 -0
- package/dist/fs/deletePath.d.ts +8 -0
- package/dist/fs/deletePath.d.ts.map +1 -0
- package/dist/fs/listDir.d.ts +8 -0
- package/dist/fs/listDir.d.ts.map +1 -0
- package/dist/fs/readText.d.ts +8 -0
- package/dist/fs/readText.d.ts.map +1 -0
- package/dist/fs/searchText.d.ts +8 -0
- package/dist/fs/searchText.d.ts.map +1 -0
- package/dist/fs/sha256.d.ts +8 -0
- package/dist/fs/sha256.d.ts.map +1 -0
- package/dist/fs/writeText.d.ts +8 -0
- package/dist/fs/writeText.d.ts.map +1 -0
- package/dist/http/downloadFile.d.ts +8 -0
- package/dist/http/downloadFile.d.ts.map +1 -0
- package/dist/http/duckduckgoSearch.d.ts +9 -0
- package/dist/http/duckduckgoSearch.d.ts.map +1 -0
- package/dist/http/fetchJson.d.ts +8 -0
- package/dist/http/fetchJson.d.ts.map +1 -0
- package/dist/http/fetchPageMainContent.d.ts +9 -0
- package/dist/http/fetchPageMainContent.d.ts.map +1 -0
- package/dist/http/fetchText.d.ts +8 -0
- package/dist/http/fetchText.d.ts.map +1 -0
- package/dist/http/head.d.ts +8 -0
- package/dist/http/head.d.ts.map +1 -0
- package/dist/index.cjs +3840 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.ts +14 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +1791 -0
- package/dist/index.js.map +1 -0
- package/dist/jmespath-6W7SK7AH.js +1514 -0
- package/dist/jmespath-6W7SK7AH.js.map +1 -0
- package/dist/mustache-CS7KHA4H.js +467 -0
- package/dist/mustache-CS7KHA4H.js.map +1 -0
- package/dist/security/sandbox.d.ts +9 -0
- package/dist/security/sandbox.d.ts.map +1 -0
- package/dist/security/ssrf.d.ts +12 -0
- package/dist/security/ssrf.d.ts.map +1 -0
- package/dist/types.d.ts +55 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/util/hashText.d.ts +8 -0
- package/dist/util/hashText.d.ts.map +1 -0
- package/dist/util/jsonSelect.d.ts +8 -0
- package/dist/util/jsonSelect.d.ts.map +1 -0
- package/dist/util/now.d.ts +8 -0
- package/dist/util/now.d.ts.map +1 -0
- package/dist/util/templateRender.d.ts +8 -0
- package/dist/util/templateRender.d.ts.map +1 -0
- package/dist/util/truncate.d.ts +8 -0
- package/dist/util/truncate.d.ts.map +1 -0
- package/package.json +48 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,1791 @@
|
|
|
1
|
+
import "./chunk-BUSYA2B4.js";
|
|
2
|
+
|
|
3
|
+
// context.ts
|
|
4
|
+
import { AsyncLocalStorage } from "async_hooks";
|
|
5
|
+
var storage = new AsyncLocalStorage();
|
|
6
|
+
function getBuiltinContext() {
|
|
7
|
+
const ctx = storage.getStore();
|
|
8
|
+
if (!ctx) throw new Error("Builtin context not set; invoke only through CoreAdapter.");
|
|
9
|
+
return ctx;
|
|
10
|
+
}
|
|
11
|
+
function runWithBuiltinContext(ctx, fn) {
|
|
12
|
+
return storage.run(ctx, fn);
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
// CoreAdapter.ts
|
|
16
|
+
var CoreAdapter = class {
|
|
17
|
+
kind = "core";
|
|
18
|
+
handlers = /* @__PURE__ */ new Map();
|
|
19
|
+
config;
|
|
20
|
+
constructor(config) {
|
|
21
|
+
this.config = config;
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Register a handler for a specific core tool name.
|
|
25
|
+
*/
|
|
26
|
+
registerHandler(toolName, handler) {
|
|
27
|
+
this.handlers.set(toolName, handler);
|
|
28
|
+
}
|
|
29
|
+
/**
|
|
30
|
+
* Unregister a handler.
|
|
31
|
+
*/
|
|
32
|
+
unregisterHandler(toolName) {
|
|
33
|
+
return this.handlers.delete(toolName);
|
|
34
|
+
}
|
|
35
|
+
/**
|
|
36
|
+
* List registered core tool names.
|
|
37
|
+
*/
|
|
38
|
+
getRegisteredTools() {
|
|
39
|
+
return Array.from(this.handlers.keys());
|
|
40
|
+
}
|
|
41
|
+
/**
|
|
42
|
+
* Invoke dispatches to the appropriate handler by spec.name.
|
|
43
|
+
*/
|
|
44
|
+
async invoke(spec, args, ctx) {
|
|
45
|
+
const handler = this.handlers.get(spec.name);
|
|
46
|
+
if (!handler) {
|
|
47
|
+
throw new Error(
|
|
48
|
+
`Core tool handler not found: ${spec.name}. Available: [${this.getRegisteredTools().join(", ")}]`
|
|
49
|
+
);
|
|
50
|
+
}
|
|
51
|
+
const coreCtx = {
|
|
52
|
+
execCtx: ctx,
|
|
53
|
+
config: this.config
|
|
54
|
+
};
|
|
55
|
+
const output = await runWithBuiltinContext(
|
|
56
|
+
coreCtx,
|
|
57
|
+
() => handler(args)
|
|
58
|
+
);
|
|
59
|
+
return {
|
|
60
|
+
result: output.result,
|
|
61
|
+
raw: { evidence: output.evidence }
|
|
62
|
+
};
|
|
63
|
+
}
|
|
64
|
+
};
|
|
65
|
+
|
|
66
|
+
// CoreToolsModule.ts
|
|
67
|
+
import { createToolSpec } from "@easynet/agent-tool/core";
|
|
68
|
+
|
|
69
|
+
// types.ts
|
|
70
|
+
var DEFAULT_CORE_TOOLS_CONFIG = {
|
|
71
|
+
maxReadBytes: 5 * 1024 * 1024,
|
|
72
|
+
maxHttpBytes: 5 * 1024 * 1024,
|
|
73
|
+
maxDownloadBytes: 100 * 1024 * 1024,
|
|
74
|
+
blockedCidrs: [
|
|
75
|
+
"127.0.0.0/8",
|
|
76
|
+
"10.0.0.0/8",
|
|
77
|
+
"172.16.0.0/12",
|
|
78
|
+
"192.168.0.0/16",
|
|
79
|
+
"169.254.0.0/16",
|
|
80
|
+
"::1/128",
|
|
81
|
+
"fc00::/7",
|
|
82
|
+
"fe80::/10"
|
|
83
|
+
],
|
|
84
|
+
defaultTimeoutMs: 15e3,
|
|
85
|
+
httpUserAgent: "agent-tool-core/1.0",
|
|
86
|
+
enableAutoWriteLargeResponses: false,
|
|
87
|
+
allowedCommands: [
|
|
88
|
+
"cat",
|
|
89
|
+
"echo",
|
|
90
|
+
"env",
|
|
91
|
+
"find",
|
|
92
|
+
"grep",
|
|
93
|
+
"head",
|
|
94
|
+
"ls",
|
|
95
|
+
"pwd",
|
|
96
|
+
"tail",
|
|
97
|
+
"wc",
|
|
98
|
+
"whoami"
|
|
99
|
+
],
|
|
100
|
+
maxCommandOutputBytes: 1024 * 1024,
|
|
101
|
+
commandTimeoutMs: 1e4
|
|
102
|
+
};
|
|
103
|
+
|
|
104
|
+
// fs/readText.ts
|
|
105
|
+
import { readFile, stat } from "fs/promises";
|
|
106
|
+
|
|
107
|
+
// security/sandbox.ts
|
|
108
|
+
import { resolve, normalize, dirname, basename } from "path";
|
|
109
|
+
import { realpath, access } from "fs/promises";
|
|
110
|
+
import { createTaggedError } from "@easynet/agent-tool";
|
|
111
|
+
async function resolveSandboxedPath(inputPath, sandboxRoot) {
|
|
112
|
+
let normalizedRoot;
|
|
113
|
+
try {
|
|
114
|
+
normalizedRoot = await realpath(resolve(sandboxRoot));
|
|
115
|
+
} catch {
|
|
116
|
+
normalizedRoot = normalize(resolve(sandboxRoot));
|
|
117
|
+
}
|
|
118
|
+
const resolved = resolve(normalizedRoot, inputPath);
|
|
119
|
+
let real;
|
|
120
|
+
try {
|
|
121
|
+
await access(resolved);
|
|
122
|
+
real = await realpath(resolved);
|
|
123
|
+
} catch {
|
|
124
|
+
const parentDir = dirname(resolved);
|
|
125
|
+
let realParent;
|
|
126
|
+
try {
|
|
127
|
+
await access(parentDir);
|
|
128
|
+
realParent = await realpath(parentDir);
|
|
129
|
+
} catch {
|
|
130
|
+
realParent = normalize(parentDir);
|
|
131
|
+
}
|
|
132
|
+
real = resolve(realParent, basename(resolved));
|
|
133
|
+
}
|
|
134
|
+
if (!isWithinRoot(real, normalizedRoot)) {
|
|
135
|
+
throw createTaggedError(
|
|
136
|
+
"PATH_OUTSIDE_SANDBOX",
|
|
137
|
+
`Path "${inputPath}" resolves to "${real}" which is outside sandbox "${normalizedRoot}"`,
|
|
138
|
+
{ inputPath, resolvedPath: real, sandboxRoot: normalizedRoot }
|
|
139
|
+
);
|
|
140
|
+
}
|
|
141
|
+
return real;
|
|
142
|
+
}
|
|
143
|
+
function isWithinRoot(path, root) {
|
|
144
|
+
const normalizedPath = normalize(path);
|
|
145
|
+
const normalizedRoot = normalize(root);
|
|
146
|
+
return normalizedPath === normalizedRoot || normalizedPath.startsWith(normalizedRoot + "/");
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
// fs/readText.ts
|
|
150
|
+
import { createTaggedError as createTaggedError2 } from "@easynet/agent-tool";
|
|
151
|
+
var readTextHandler = (async (args) => {
|
|
152
|
+
const ctx = getBuiltinContext();
|
|
153
|
+
const inputPath = args.path;
|
|
154
|
+
const maxBytes = args.maxBytes ?? ctx.config.maxReadBytes;
|
|
155
|
+
const resolvedPath = await resolveSandboxedPath(inputPath, ctx.config.sandboxRoot);
|
|
156
|
+
const fileStat = await stat(resolvedPath);
|
|
157
|
+
if (fileStat.size > maxBytes) {
|
|
158
|
+
throw createTaggedError2(
|
|
159
|
+
"FILE_TOO_LARGE",
|
|
160
|
+
`File size ${fileStat.size} bytes exceeds limit of ${maxBytes} bytes`,
|
|
161
|
+
{ path: resolvedPath, size: fileStat.size, limit: maxBytes }
|
|
162
|
+
);
|
|
163
|
+
}
|
|
164
|
+
const text = await readFile(resolvedPath, "utf-8");
|
|
165
|
+
return {
|
|
166
|
+
result: {
|
|
167
|
+
path: resolvedPath,
|
|
168
|
+
text,
|
|
169
|
+
bytes: fileStat.size
|
|
170
|
+
},
|
|
171
|
+
evidence: [
|
|
172
|
+
{
|
|
173
|
+
type: "file",
|
|
174
|
+
ref: resolvedPath,
|
|
175
|
+
summary: `Read ${fileStat.size} bytes from ${resolvedPath}`,
|
|
176
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
177
|
+
}
|
|
178
|
+
]
|
|
179
|
+
};
|
|
180
|
+
});
|
|
181
|
+
|
|
182
|
+
// fs/writeText.ts
|
|
183
|
+
import { writeFile, mkdir } from "fs/promises";
|
|
184
|
+
import { createHash } from "crypto";
|
|
185
|
+
import { dirname as dirname2 } from "path";
|
|
186
|
+
var writeTextHandler = (async (args) => {
|
|
187
|
+
const ctx = getBuiltinContext();
|
|
188
|
+
const inputPath = args.path;
|
|
189
|
+
const text = args.text;
|
|
190
|
+
const overwrite = args.overwrite ?? false;
|
|
191
|
+
const mkdirp = args.mkdirp ?? true;
|
|
192
|
+
const resolvedPath = await resolveSandboxedPath(inputPath, ctx.config.sandboxRoot);
|
|
193
|
+
if (!overwrite) {
|
|
194
|
+
const { access: access2 } = await import("fs/promises");
|
|
195
|
+
try {
|
|
196
|
+
await access2(resolvedPath);
|
|
197
|
+
throw new Error(
|
|
198
|
+
`File already exists: ${resolvedPath}. Set overwrite=true to allow overwriting.`
|
|
199
|
+
);
|
|
200
|
+
} catch (err) {
|
|
201
|
+
if (err instanceof Error && !err.message.includes("already exists")) {
|
|
202
|
+
} else {
|
|
203
|
+
throw err;
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
if (mkdirp) {
|
|
208
|
+
await mkdir(dirname2(resolvedPath), { recursive: true });
|
|
209
|
+
}
|
|
210
|
+
await writeFile(resolvedPath, text, "utf-8");
|
|
211
|
+
const bytes = Buffer.byteLength(text, "utf-8");
|
|
212
|
+
const sha256 = createHash("sha256").update(text).digest("hex");
|
|
213
|
+
return {
|
|
214
|
+
result: {
|
|
215
|
+
path: resolvedPath,
|
|
216
|
+
bytes,
|
|
217
|
+
sha256
|
|
218
|
+
},
|
|
219
|
+
evidence: [
|
|
220
|
+
{
|
|
221
|
+
type: "file",
|
|
222
|
+
ref: resolvedPath,
|
|
223
|
+
summary: `Wrote ${bytes} bytes to ${resolvedPath} (sha256: ${sha256.slice(0, 12)}...)`,
|
|
224
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
225
|
+
}
|
|
226
|
+
]
|
|
227
|
+
};
|
|
228
|
+
});
|
|
229
|
+
|
|
230
|
+
// fs/listDir.ts
|
|
231
|
+
import { readdir, stat as stat2 } from "fs/promises";
|
|
232
|
+
import { resolve as resolve2, join } from "path";
|
|
233
|
+
var listDirHandler = (async (args) => {
|
|
234
|
+
const ctx = getBuiltinContext();
|
|
235
|
+
const inputPath = args.path;
|
|
236
|
+
const maxEntries = args.maxEntries ?? 2e3;
|
|
237
|
+
const includeHidden = args.includeHidden ?? false;
|
|
238
|
+
const recursive = args.recursive ?? false;
|
|
239
|
+
const maxDepth = args.maxDepth ?? 5;
|
|
240
|
+
const resolvedPath = await resolveSandboxedPath(inputPath, ctx.config.sandboxRoot);
|
|
241
|
+
const entries = [];
|
|
242
|
+
let truncated = false;
|
|
243
|
+
await walkDir(resolvedPath, "", entries, {
|
|
244
|
+
maxEntries,
|
|
245
|
+
includeHidden,
|
|
246
|
+
recursive,
|
|
247
|
+
maxDepth,
|
|
248
|
+
currentDepth: 0,
|
|
249
|
+
onTruncate: () => {
|
|
250
|
+
truncated = true;
|
|
251
|
+
}
|
|
252
|
+
});
|
|
253
|
+
return {
|
|
254
|
+
result: {
|
|
255
|
+
path: resolvedPath,
|
|
256
|
+
entries,
|
|
257
|
+
totalEntries: entries.length,
|
|
258
|
+
truncated
|
|
259
|
+
},
|
|
260
|
+
evidence: [
|
|
261
|
+
{
|
|
262
|
+
type: "tool",
|
|
263
|
+
ref: `core/fs.listDir:${resolvedPath}`,
|
|
264
|
+
summary: `Listed ${entries.length} entries in ${resolvedPath}${truncated ? " (truncated)" : ""}`,
|
|
265
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
266
|
+
}
|
|
267
|
+
]
|
|
268
|
+
};
|
|
269
|
+
});
|
|
270
|
+
async function walkDir(basePath, relativePath, entries, options) {
|
|
271
|
+
if (entries.length >= options.maxEntries) {
|
|
272
|
+
options.onTruncate();
|
|
273
|
+
return;
|
|
274
|
+
}
|
|
275
|
+
const fullPath = relativePath ? resolve2(basePath, relativePath) : basePath;
|
|
276
|
+
const dirEntries = await readdir(fullPath, { withFileTypes: true });
|
|
277
|
+
for (const dirent of dirEntries) {
|
|
278
|
+
if (entries.length >= options.maxEntries) {
|
|
279
|
+
options.onTruncate();
|
|
280
|
+
return;
|
|
281
|
+
}
|
|
282
|
+
if (!options.includeHidden && dirent.name.startsWith(".")) {
|
|
283
|
+
continue;
|
|
284
|
+
}
|
|
285
|
+
const entryPath = join(fullPath, dirent.name);
|
|
286
|
+
const entryRelative = relativePath ? join(relativePath, dirent.name) : dirent.name;
|
|
287
|
+
let entryType;
|
|
288
|
+
if (dirent.isSymbolicLink()) {
|
|
289
|
+
entryType = "symlink";
|
|
290
|
+
} else if (dirent.isDirectory()) {
|
|
291
|
+
entryType = "directory";
|
|
292
|
+
} else if (dirent.isFile()) {
|
|
293
|
+
entryType = "file";
|
|
294
|
+
} else {
|
|
295
|
+
entryType = "other";
|
|
296
|
+
}
|
|
297
|
+
let size = 0;
|
|
298
|
+
let mtime = "";
|
|
299
|
+
try {
|
|
300
|
+
const entryStat = await stat2(entryPath);
|
|
301
|
+
size = entryStat.size;
|
|
302
|
+
mtime = entryStat.mtime.toISOString();
|
|
303
|
+
} catch {
|
|
304
|
+
}
|
|
305
|
+
entries.push({
|
|
306
|
+
name: entryRelative,
|
|
307
|
+
type: entryType,
|
|
308
|
+
size,
|
|
309
|
+
mtime
|
|
310
|
+
});
|
|
311
|
+
if (options.recursive && entryType === "directory" && options.currentDepth < options.maxDepth) {
|
|
312
|
+
await walkDir(basePath, entryRelative, entries, {
|
|
313
|
+
...options,
|
|
314
|
+
currentDepth: options.currentDepth + 1
|
|
315
|
+
});
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
// fs/searchText.ts
|
|
321
|
+
import { readdir as readdir2, stat as stat3 } from "fs/promises";
|
|
322
|
+
import { createReadStream } from "fs";
|
|
323
|
+
import { createInterface } from "readline";
|
|
324
|
+
import { join as join2, relative } from "path";
|
|
325
|
+
var searchTextHandler = (async (args) => {
|
|
326
|
+
const ctx = getBuiltinContext();
|
|
327
|
+
const rootPath = args.root;
|
|
328
|
+
const query = args.query;
|
|
329
|
+
const glob = args.glob ?? "**/*.{md,txt,log,json,ts,js,py,java,scala}";
|
|
330
|
+
const maxMatches = args.maxMatches ?? 100;
|
|
331
|
+
const maxFiles = args.maxFiles ?? 500;
|
|
332
|
+
const resolvedRoot = await resolveSandboxedPath(rootPath, ctx.config.sandboxRoot);
|
|
333
|
+
let regex;
|
|
334
|
+
try {
|
|
335
|
+
regex = new RegExp(query, "i");
|
|
336
|
+
} catch {
|
|
337
|
+
regex = new RegExp(escapeRegExp(query), "i");
|
|
338
|
+
}
|
|
339
|
+
const extensions = parseGlobExtensions(glob);
|
|
340
|
+
const files = [];
|
|
341
|
+
await collectFiles(resolvedRoot, files, { maxFiles, extensions });
|
|
342
|
+
const matches = [];
|
|
343
|
+
let filesScanned = 0;
|
|
344
|
+
let truncated = false;
|
|
345
|
+
for (const filePath of files) {
|
|
346
|
+
if (matches.length >= maxMatches) {
|
|
347
|
+
truncated = true;
|
|
348
|
+
break;
|
|
349
|
+
}
|
|
350
|
+
filesScanned++;
|
|
351
|
+
await searchFile(filePath, resolvedRoot, regex, matches, maxMatches);
|
|
352
|
+
}
|
|
353
|
+
if (matches.length >= maxMatches) {
|
|
354
|
+
truncated = true;
|
|
355
|
+
}
|
|
356
|
+
return {
|
|
357
|
+
result: {
|
|
358
|
+
root: resolvedRoot,
|
|
359
|
+
query,
|
|
360
|
+
matches,
|
|
361
|
+
totalMatches: matches.length,
|
|
362
|
+
filesScanned,
|
|
363
|
+
truncated
|
|
364
|
+
},
|
|
365
|
+
evidence: [
|
|
366
|
+
{
|
|
367
|
+
type: "tool",
|
|
368
|
+
ref: `core/fs.searchText:${resolvedRoot}`,
|
|
369
|
+
summary: `Found ${matches.length} matches in ${filesScanned} files under ${resolvedRoot}${truncated ? " (truncated)" : ""}`,
|
|
370
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
371
|
+
}
|
|
372
|
+
]
|
|
373
|
+
};
|
|
374
|
+
});
|
|
375
|
+
async function collectFiles(dirPath, files, options) {
|
|
376
|
+
if (files.length >= options.maxFiles) return;
|
|
377
|
+
let entries;
|
|
378
|
+
try {
|
|
379
|
+
entries = await readdir2(dirPath, { withFileTypes: true });
|
|
380
|
+
} catch {
|
|
381
|
+
return;
|
|
382
|
+
}
|
|
383
|
+
for (const entry of entries) {
|
|
384
|
+
if (files.length >= options.maxFiles) return;
|
|
385
|
+
const fullPath = join2(dirPath, entry.name);
|
|
386
|
+
if (entry.isDirectory()) {
|
|
387
|
+
if (entry.name.startsWith(".") || entry.name === "node_modules") continue;
|
|
388
|
+
await collectFiles(fullPath, files, options);
|
|
389
|
+
} else if (entry.isFile()) {
|
|
390
|
+
if (options.extensions.size > 0) {
|
|
391
|
+
const ext = getExtension(entry.name);
|
|
392
|
+
if (!ext || !options.extensions.has(ext)) continue;
|
|
393
|
+
}
|
|
394
|
+
files.push(fullPath);
|
|
395
|
+
}
|
|
396
|
+
}
|
|
397
|
+
}
|
|
398
|
+
async function searchFile(filePath, root, regex, matches, maxMatches) {
|
|
399
|
+
const fileStat = await stat3(filePath).catch(() => null);
|
|
400
|
+
if (!fileStat || fileStat.size > 1024 * 1024) return;
|
|
401
|
+
const stream = createReadStream(filePath, { encoding: "utf-8" });
|
|
402
|
+
const rl = createInterface({ input: stream, crlfDelay: Infinity });
|
|
403
|
+
let lineNo = 0;
|
|
404
|
+
for await (const line of rl) {
|
|
405
|
+
lineNo++;
|
|
406
|
+
if (matches.length >= maxMatches) {
|
|
407
|
+
stream.destroy();
|
|
408
|
+
break;
|
|
409
|
+
}
|
|
410
|
+
if (regex.test(line)) {
|
|
411
|
+
matches.push({
|
|
412
|
+
file: relative(root, filePath),
|
|
413
|
+
lineNo,
|
|
414
|
+
excerpt: line.slice(0, 200)
|
|
415
|
+
});
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
}
|
|
419
|
+
function parseGlobExtensions(glob) {
|
|
420
|
+
const extensions = /* @__PURE__ */ new Set();
|
|
421
|
+
const braceMatch = glob.match(/\*\.\{([^}]+)\}/);
|
|
422
|
+
if (braceMatch) {
|
|
423
|
+
for (const ext of braceMatch[1].split(",")) {
|
|
424
|
+
extensions.add(ext.trim());
|
|
425
|
+
}
|
|
426
|
+
} else {
|
|
427
|
+
const simpleMatch = glob.match(/\*\.(\w+)/);
|
|
428
|
+
if (simpleMatch) {
|
|
429
|
+
extensions.add(simpleMatch[1]);
|
|
430
|
+
}
|
|
431
|
+
}
|
|
432
|
+
return extensions;
|
|
433
|
+
}
|
|
434
|
+
function getExtension(filename) {
|
|
435
|
+
const dotIdx = filename.lastIndexOf(".");
|
|
436
|
+
if (dotIdx === -1 || dotIdx === 0) return null;
|
|
437
|
+
return filename.slice(dotIdx + 1);
|
|
438
|
+
}
|
|
439
|
+
function escapeRegExp(str) {
|
|
440
|
+
return str.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
441
|
+
}
|
|
442
|
+
|
|
443
|
+
// fs/sha256.ts
|
|
444
|
+
import { createReadStream as createReadStream2 } from "fs";
|
|
445
|
+
import { stat as stat4 } from "fs/promises";
|
|
446
|
+
import { createHash as createHash2 } from "crypto";
|
|
447
|
+
var sha256Handler = (async (args) => {
|
|
448
|
+
const ctx = getBuiltinContext();
|
|
449
|
+
const inputPath = args.path;
|
|
450
|
+
const resolvedPath = await resolveSandboxedPath(inputPath, ctx.config.sandboxRoot);
|
|
451
|
+
const fileStat = await stat4(resolvedPath);
|
|
452
|
+
const hash = await new Promise((resolve3, reject) => {
|
|
453
|
+
const hasher = createHash2("sha256");
|
|
454
|
+
const stream = createReadStream2(resolvedPath);
|
|
455
|
+
stream.on("data", (chunk) => hasher.update(chunk));
|
|
456
|
+
stream.on("end", () => resolve3(hasher.digest("hex")));
|
|
457
|
+
stream.on("error", reject);
|
|
458
|
+
});
|
|
459
|
+
return {
|
|
460
|
+
result: {
|
|
461
|
+
sha256: hash,
|
|
462
|
+
path: resolvedPath,
|
|
463
|
+
bytes: fileStat.size
|
|
464
|
+
},
|
|
465
|
+
evidence: [
|
|
466
|
+
{
|
|
467
|
+
type: "file",
|
|
468
|
+
ref: resolvedPath,
|
|
469
|
+
summary: `SHA-256 of ${resolvedPath} (${fileStat.size} bytes): ${hash.slice(0, 16)}...`,
|
|
470
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
471
|
+
}
|
|
472
|
+
]
|
|
473
|
+
};
|
|
474
|
+
});
|
|
475
|
+
|
|
476
|
+
// fs/deletePath.ts
|
|
477
|
+
import { rm, unlink, rmdir, stat as stat5 } from "fs/promises";
|
|
478
|
+
var deletePathHandler = (async (args) => {
|
|
479
|
+
const ctx = getBuiltinContext();
|
|
480
|
+
const inputPath = args.path;
|
|
481
|
+
const recursive = args.recursive ?? false;
|
|
482
|
+
const confirm = args.confirm;
|
|
483
|
+
if (!confirm) {
|
|
484
|
+
throw new Error(
|
|
485
|
+
"Deletion not confirmed. Set confirm=true to proceed with deletion."
|
|
486
|
+
);
|
|
487
|
+
}
|
|
488
|
+
const resolvedPath = await resolveSandboxedPath(inputPath, ctx.config.sandboxRoot);
|
|
489
|
+
let realSandboxRoot;
|
|
490
|
+
try {
|
|
491
|
+
const { realpath: rp } = await import("fs/promises");
|
|
492
|
+
realSandboxRoot = await rp(ctx.config.sandboxRoot);
|
|
493
|
+
} catch {
|
|
494
|
+
realSandboxRoot = ctx.config.sandboxRoot;
|
|
495
|
+
}
|
|
496
|
+
if (resolvedPath === realSandboxRoot) {
|
|
497
|
+
throw new Error("Cannot delete the sandbox root directory.");
|
|
498
|
+
}
|
|
499
|
+
const fileStat = await stat5(resolvedPath);
|
|
500
|
+
const isDirectory = fileStat.isDirectory();
|
|
501
|
+
if (isDirectory) {
|
|
502
|
+
if (recursive) {
|
|
503
|
+
await rm(resolvedPath, { recursive: true, force: true });
|
|
504
|
+
} else {
|
|
505
|
+
await rmdir(resolvedPath);
|
|
506
|
+
}
|
|
507
|
+
} else {
|
|
508
|
+
await unlink(resolvedPath);
|
|
509
|
+
}
|
|
510
|
+
return {
|
|
511
|
+
result: {
|
|
512
|
+
path: resolvedPath,
|
|
513
|
+
deleted: true,
|
|
514
|
+
type: isDirectory ? "directory" : "file"
|
|
515
|
+
},
|
|
516
|
+
evidence: [
|
|
517
|
+
{
|
|
518
|
+
type: "file",
|
|
519
|
+
ref: resolvedPath,
|
|
520
|
+
summary: `Deleted ${isDirectory ? "directory" : "file"}: ${resolvedPath}${recursive ? " (recursive)" : ""}`,
|
|
521
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
522
|
+
}
|
|
523
|
+
]
|
|
524
|
+
};
|
|
525
|
+
});
|
|
526
|
+
|
|
527
|
+
// security/ssrf.ts
|
|
528
|
+
import { lookup } from "dns/promises";
|
|
529
|
+
import { createTaggedError as createTaggedError3 } from "@easynet/agent-tool";
|
|
530
|
+
async function validateUrl(url, allowedHosts, blockedCidrs) {
|
|
531
|
+
let parsed;
|
|
532
|
+
try {
|
|
533
|
+
parsed = new URL(url);
|
|
534
|
+
} catch {
|
|
535
|
+
throw createTaggedError3(
|
|
536
|
+
"HTTP_DISALLOWED_HOST",
|
|
537
|
+
`Invalid URL: ${url}`,
|
|
538
|
+
{ url }
|
|
539
|
+
);
|
|
540
|
+
}
|
|
541
|
+
if (parsed.protocol !== "http:" && parsed.protocol !== "https:") {
|
|
542
|
+
throw createTaggedError3(
|
|
543
|
+
"HTTP_DISALLOWED_HOST",
|
|
544
|
+
`Protocol not allowed: ${parsed.protocol}. Only http: and https: are supported.`,
|
|
545
|
+
{ url, protocol: parsed.protocol }
|
|
546
|
+
);
|
|
547
|
+
}
|
|
548
|
+
const hostname = parsed.hostname;
|
|
549
|
+
if (!isHostAllowed(hostname, allowedHosts)) {
|
|
550
|
+
throw createTaggedError3(
|
|
551
|
+
"HTTP_DISALLOWED_HOST",
|
|
552
|
+
`Host "${hostname}" is not in the allowed hosts list`,
|
|
553
|
+
{ url, hostname, allowedHosts }
|
|
554
|
+
);
|
|
555
|
+
}
|
|
556
|
+
try {
|
|
557
|
+
const { address } = await lookup(hostname);
|
|
558
|
+
if (isIpInBlockedCidrs(address, blockedCidrs)) {
|
|
559
|
+
throw createTaggedError3(
|
|
560
|
+
"HTTP_DISALLOWED_HOST",
|
|
561
|
+
`Host "${hostname}" resolves to blocked IP: ${address}`,
|
|
562
|
+
{ url, hostname, resolvedIp: address }
|
|
563
|
+
);
|
|
564
|
+
}
|
|
565
|
+
} catch (err) {
|
|
566
|
+
if (err instanceof Error && err.kind === "HTTP_DISALLOWED_HOST") {
|
|
567
|
+
throw err;
|
|
568
|
+
}
|
|
569
|
+
throw createTaggedError3(
|
|
570
|
+
"HTTP_DISALLOWED_HOST",
|
|
571
|
+
`DNS resolution failed for host "${hostname}": ${err instanceof Error ? err.message : String(err)}`,
|
|
572
|
+
{ url, hostname }
|
|
573
|
+
);
|
|
574
|
+
}
|
|
575
|
+
return parsed;
|
|
576
|
+
}
|
|
577
|
+
function isHostAllowed(hostname, allowedHosts) {
|
|
578
|
+
for (const pattern of allowedHosts) {
|
|
579
|
+
if (pattern.startsWith("*.")) {
|
|
580
|
+
const suffix = pattern.slice(1);
|
|
581
|
+
if (hostname.endsWith(suffix) || hostname === pattern.slice(2)) {
|
|
582
|
+
return true;
|
|
583
|
+
}
|
|
584
|
+
} else if (hostname === pattern) {
|
|
585
|
+
return true;
|
|
586
|
+
}
|
|
587
|
+
}
|
|
588
|
+
return false;
|
|
589
|
+
}
|
|
590
|
+
function isIpInBlockedCidrs(ip, cidrs) {
|
|
591
|
+
const normalizedIp = normalizeIp(ip);
|
|
592
|
+
if (!normalizedIp) return false;
|
|
593
|
+
for (const cidr of cidrs) {
|
|
594
|
+
if (cidr.includes(":")) {
|
|
595
|
+
if (!ip.includes(":")) continue;
|
|
596
|
+
if (isIpv6InCidr(ip, cidr)) return true;
|
|
597
|
+
} else {
|
|
598
|
+
if (isIpv4InCidr(normalizedIp, cidr)) return true;
|
|
599
|
+
}
|
|
600
|
+
}
|
|
601
|
+
return false;
|
|
602
|
+
}
|
|
603
|
+
function normalizeIp(ip) {
|
|
604
|
+
if (ip.startsWith("::ffff:")) {
|
|
605
|
+
return ip.slice(7);
|
|
606
|
+
}
|
|
607
|
+
if (/^\d+\.\d+\.\d+\.\d+$/.test(ip)) {
|
|
608
|
+
return ip;
|
|
609
|
+
}
|
|
610
|
+
return null;
|
|
611
|
+
}
|
|
612
|
+
function isIpv4InCidr(ip, cidr) {
|
|
613
|
+
const [cidrIp, prefixStr] = cidr.split("/");
|
|
614
|
+
if (!cidrIp || !prefixStr) return false;
|
|
615
|
+
const prefix = parseInt(prefixStr, 10);
|
|
616
|
+
if (isNaN(prefix) || prefix < 0 || prefix > 32) return false;
|
|
617
|
+
const ipNum = ipv4ToNum(ip);
|
|
618
|
+
const cidrNum = ipv4ToNum(cidrIp);
|
|
619
|
+
if (ipNum === null || cidrNum === null) return false;
|
|
620
|
+
const mask = prefix === 0 ? 0 : ~0 << 32 - prefix >>> 0;
|
|
621
|
+
return (ipNum & mask) === (cidrNum & mask);
|
|
622
|
+
}
|
|
623
|
+
function ipv4ToNum(ip) {
|
|
624
|
+
const parts = ip.split(".");
|
|
625
|
+
if (parts.length !== 4) return null;
|
|
626
|
+
let num = 0;
|
|
627
|
+
for (const part of parts) {
|
|
628
|
+
const n = parseInt(part, 10);
|
|
629
|
+
if (isNaN(n) || n < 0 || n > 255) return null;
|
|
630
|
+
num = num << 8 | n;
|
|
631
|
+
}
|
|
632
|
+
return num >>> 0;
|
|
633
|
+
}
|
|
634
|
+
function isIpv6InCidr(ip, cidr) {
|
|
635
|
+
const [cidrIp, prefixStr] = cidr.split("/");
|
|
636
|
+
if (!cidrIp || !prefixStr) return false;
|
|
637
|
+
const prefix = parseInt(prefixStr, 10);
|
|
638
|
+
if (isNaN(prefix)) return false;
|
|
639
|
+
const ipBytes = expandIpv6(ip);
|
|
640
|
+
const cidrBytes = expandIpv6(cidrIp);
|
|
641
|
+
if (!ipBytes || !cidrBytes) return false;
|
|
642
|
+
const fullBytes = Math.floor(prefix / 8);
|
|
643
|
+
for (let i = 0; i < fullBytes && i < 16; i++) {
|
|
644
|
+
if (ipBytes[i] !== cidrBytes[i]) return false;
|
|
645
|
+
}
|
|
646
|
+
const remainingBits = prefix % 8;
|
|
647
|
+
if (remainingBits > 0 && fullBytes < 16) {
|
|
648
|
+
const mask = ~0 << 8 - remainingBits & 255;
|
|
649
|
+
if ((ipBytes[fullBytes] & mask) !== (cidrBytes[fullBytes] & mask)) return false;
|
|
650
|
+
}
|
|
651
|
+
return true;
|
|
652
|
+
}
|
|
653
|
+
function expandIpv6(ip) {
|
|
654
|
+
const zoneIdx = ip.indexOf("%");
|
|
655
|
+
if (zoneIdx !== -1) ip = ip.slice(0, zoneIdx);
|
|
656
|
+
const parts = ip.split("::");
|
|
657
|
+
if (parts.length > 2) return null;
|
|
658
|
+
const bytes = new Array(16).fill(0);
|
|
659
|
+
const expandGroup = (group) => {
|
|
660
|
+
if (!group) return [];
|
|
661
|
+
return group.split(":").flatMap((hex) => {
|
|
662
|
+
const val = parseInt(hex || "0", 16);
|
|
663
|
+
return [val >> 8 & 255, val & 255];
|
|
664
|
+
});
|
|
665
|
+
};
|
|
666
|
+
if (parts.length === 1) {
|
|
667
|
+
const expanded = expandGroup(parts[0]);
|
|
668
|
+
if (expanded.length !== 16) return null;
|
|
669
|
+
return expanded;
|
|
670
|
+
}
|
|
671
|
+
const left = expandGroup(parts[0]);
|
|
672
|
+
const right = expandGroup(parts[1]);
|
|
673
|
+
if (left.length + right.length > 16) return null;
|
|
674
|
+
for (let i = 0; i < left.length; i++) bytes[i] = left[i];
|
|
675
|
+
for (let i = 0; i < right.length; i++) bytes[16 - right.length + i] = right[i];
|
|
676
|
+
return bytes;
|
|
677
|
+
}
|
|
678
|
+
|
|
679
|
+
// http/fetchText.ts
|
|
680
|
+
import { createTaggedError as createTaggedError4 } from "@easynet/agent-tool";
|
|
681
|
+
var fetchTextHandler = (async (args) => {
|
|
682
|
+
const ctx = getBuiltinContext();
|
|
683
|
+
const url = args.url;
|
|
684
|
+
const method = args.method ?? "GET";
|
|
685
|
+
const headers = args.headers ?? {};
|
|
686
|
+
const body = args.body ?? void 0;
|
|
687
|
+
const timeoutMs = args.timeoutMs ?? ctx.config.defaultTimeoutMs;
|
|
688
|
+
const maxBytes = args.maxBytes ?? ctx.config.maxHttpBytes;
|
|
689
|
+
await validateUrl(url, ctx.config.allowedHosts, ctx.config.blockedCidrs);
|
|
690
|
+
if (!headers["User-Agent"] && !headers["user-agent"]) {
|
|
691
|
+
headers["User-Agent"] = ctx.config.httpUserAgent;
|
|
692
|
+
}
|
|
693
|
+
const controller = new AbortController();
|
|
694
|
+
const timer = setTimeout(() => controller.abort(), timeoutMs);
|
|
695
|
+
let response;
|
|
696
|
+
try {
|
|
697
|
+
response = await fetch(url, {
|
|
698
|
+
method,
|
|
699
|
+
headers,
|
|
700
|
+
body: body ?? void 0,
|
|
701
|
+
signal: controller.signal
|
|
702
|
+
});
|
|
703
|
+
} catch (err) {
|
|
704
|
+
if (err instanceof Error && err.name === "AbortError") {
|
|
705
|
+
throw createTaggedError4(
|
|
706
|
+
"HTTP_TIMEOUT",
|
|
707
|
+
`Request to ${url} timed out after ${timeoutMs}ms`,
|
|
708
|
+
{ url, timeoutMs }
|
|
709
|
+
);
|
|
710
|
+
}
|
|
711
|
+
throw createTaggedError4(
|
|
712
|
+
"UPSTREAM_ERROR",
|
|
713
|
+
`Fetch failed for ${url}: ${err instanceof Error ? err.message : String(err)}`,
|
|
714
|
+
{ url }
|
|
715
|
+
);
|
|
716
|
+
} finally {
|
|
717
|
+
clearTimeout(timer);
|
|
718
|
+
}
|
|
719
|
+
const contentLength = response.headers.get("content-length");
|
|
720
|
+
if (contentLength && parseInt(contentLength, 10) > maxBytes) {
|
|
721
|
+
throw createTaggedError4(
|
|
722
|
+
"HTTP_TOO_LARGE",
|
|
723
|
+
`Response Content-Length ${contentLength} exceeds limit of ${maxBytes} bytes`,
|
|
724
|
+
{ url, contentLength: parseInt(contentLength, 10), limit: maxBytes }
|
|
725
|
+
);
|
|
726
|
+
}
|
|
727
|
+
const text = await readResponseWithLimit(response, maxBytes, url);
|
|
728
|
+
const bytes = Buffer.byteLength(text, "utf-8");
|
|
729
|
+
const responseHeaders = {};
|
|
730
|
+
response.headers.forEach((value, key) => {
|
|
731
|
+
responseHeaders[key] = value;
|
|
732
|
+
});
|
|
733
|
+
return {
|
|
734
|
+
result: {
|
|
735
|
+
url,
|
|
736
|
+
status: response.status,
|
|
737
|
+
headers: responseHeaders,
|
|
738
|
+
text,
|
|
739
|
+
bytes
|
|
740
|
+
},
|
|
741
|
+
evidence: [
|
|
742
|
+
{
|
|
743
|
+
type: "url",
|
|
744
|
+
ref: url,
|
|
745
|
+
summary: `${method} ${url} \u2192 ${response.status} (${bytes} bytes)`,
|
|
746
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
747
|
+
}
|
|
748
|
+
]
|
|
749
|
+
};
|
|
750
|
+
});
|
|
751
|
+
async function readResponseWithLimit(response, maxBytes, url) {
|
|
752
|
+
if (!response.body) {
|
|
753
|
+
return response.text();
|
|
754
|
+
}
|
|
755
|
+
const reader = response.body.getReader();
|
|
756
|
+
const decoder = new TextDecoder();
|
|
757
|
+
const chunks = [];
|
|
758
|
+
let totalBytes = 0;
|
|
759
|
+
try {
|
|
760
|
+
while (true) {
|
|
761
|
+
const { done, value } = await reader.read();
|
|
762
|
+
if (done) break;
|
|
763
|
+
totalBytes += value.byteLength;
|
|
764
|
+
if (totalBytes > maxBytes) {
|
|
765
|
+
reader.cancel();
|
|
766
|
+
throw createTaggedError4(
|
|
767
|
+
"HTTP_TOO_LARGE",
|
|
768
|
+
`Response body exceeded limit of ${maxBytes} bytes while reading from ${url}`,
|
|
769
|
+
{ url, bytesRead: totalBytes, limit: maxBytes }
|
|
770
|
+
);
|
|
771
|
+
}
|
|
772
|
+
chunks.push(decoder.decode(value, { stream: true }));
|
|
773
|
+
}
|
|
774
|
+
chunks.push(decoder.decode());
|
|
775
|
+
} finally {
|
|
776
|
+
reader.releaseLock();
|
|
777
|
+
}
|
|
778
|
+
return chunks.join("");
|
|
779
|
+
}
|
|
780
|
+
|
|
781
|
+
// http/fetchJson.ts
|
|
782
|
+
import { createTaggedError as createTaggedError5 } from "@easynet/agent-tool";
|
|
783
|
+
var fetchJsonHandler = (async (args) => {
|
|
784
|
+
const ctx = getBuiltinContext();
|
|
785
|
+
const url = args.url;
|
|
786
|
+
const method = args.method ?? "GET";
|
|
787
|
+
const headers = args.headers ?? {};
|
|
788
|
+
const body = args.body ?? void 0;
|
|
789
|
+
const timeoutMs = args.timeoutMs ?? ctx.config.defaultTimeoutMs;
|
|
790
|
+
const maxBytes = args.maxBytes ?? ctx.config.maxHttpBytes;
|
|
791
|
+
await validateUrl(url, ctx.config.allowedHosts, ctx.config.blockedCidrs);
|
|
792
|
+
if (!headers["Accept"] && !headers["accept"]) {
|
|
793
|
+
headers["Accept"] = "application/json";
|
|
794
|
+
}
|
|
795
|
+
if (!headers["User-Agent"] && !headers["user-agent"]) {
|
|
796
|
+
headers["User-Agent"] = ctx.config.httpUserAgent;
|
|
797
|
+
}
|
|
798
|
+
const controller = new AbortController();
|
|
799
|
+
const timer = setTimeout(() => controller.abort(), timeoutMs);
|
|
800
|
+
let response;
|
|
801
|
+
try {
|
|
802
|
+
response = await fetch(url, {
|
|
803
|
+
method,
|
|
804
|
+
headers,
|
|
805
|
+
body: body ?? void 0,
|
|
806
|
+
signal: controller.signal
|
|
807
|
+
});
|
|
808
|
+
} catch (err) {
|
|
809
|
+
if (err instanceof Error && err.name === "AbortError") {
|
|
810
|
+
throw createTaggedError5(
|
|
811
|
+
"HTTP_TIMEOUT",
|
|
812
|
+
`Request to ${url} timed out after ${timeoutMs}ms`,
|
|
813
|
+
{ url, timeoutMs }
|
|
814
|
+
);
|
|
815
|
+
}
|
|
816
|
+
throw createTaggedError5(
|
|
817
|
+
"UPSTREAM_ERROR",
|
|
818
|
+
`Fetch failed for ${url}: ${err instanceof Error ? err.message : String(err)}`,
|
|
819
|
+
{ url }
|
|
820
|
+
);
|
|
821
|
+
} finally {
|
|
822
|
+
clearTimeout(timer);
|
|
823
|
+
}
|
|
824
|
+
const contentLength = response.headers.get("content-length");
|
|
825
|
+
if (contentLength && parseInt(contentLength, 10) > maxBytes) {
|
|
826
|
+
throw createTaggedError5(
|
|
827
|
+
"HTTP_TOO_LARGE",
|
|
828
|
+
`Response Content-Length ${contentLength} exceeds limit of ${maxBytes} bytes`,
|
|
829
|
+
{ url, contentLength: parseInt(contentLength, 10), limit: maxBytes }
|
|
830
|
+
);
|
|
831
|
+
}
|
|
832
|
+
const text = await response.text();
|
|
833
|
+
const bytes = Buffer.byteLength(text, "utf-8");
|
|
834
|
+
if (bytes > maxBytes) {
|
|
835
|
+
throw createTaggedError5(
|
|
836
|
+
"HTTP_TOO_LARGE",
|
|
837
|
+
`Response body ${bytes} bytes exceeds limit of ${maxBytes} bytes`,
|
|
838
|
+
{ url, bytes, limit: maxBytes }
|
|
839
|
+
);
|
|
840
|
+
}
|
|
841
|
+
let json;
|
|
842
|
+
try {
|
|
843
|
+
json = JSON.parse(text);
|
|
844
|
+
} catch {
|
|
845
|
+
throw createTaggedError5(
|
|
846
|
+
"UPSTREAM_ERROR",
|
|
847
|
+
`Failed to parse JSON response from ${url}: ${text.slice(0, 200)}`,
|
|
848
|
+
{ url, status: response.status, textPreview: text.slice(0, 500) }
|
|
849
|
+
);
|
|
850
|
+
}
|
|
851
|
+
return {
|
|
852
|
+
result: {
|
|
853
|
+
url,
|
|
854
|
+
status: response.status,
|
|
855
|
+
json,
|
|
856
|
+
bytes
|
|
857
|
+
},
|
|
858
|
+
evidence: [
|
|
859
|
+
{
|
|
860
|
+
type: "url",
|
|
861
|
+
ref: url,
|
|
862
|
+
summary: `${method} ${url} \u2192 ${response.status} JSON (${bytes} bytes)`,
|
|
863
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
864
|
+
}
|
|
865
|
+
]
|
|
866
|
+
};
|
|
867
|
+
});
|
|
868
|
+
|
|
869
|
+
// http/downloadFile.ts
|
|
870
|
+
import { writeFile as writeFile2, mkdir as mkdir2 } from "fs/promises";
|
|
871
|
+
import { createHash as createHash3 } from "crypto";
|
|
872
|
+
import { dirname as dirname3 } from "path";
|
|
873
|
+
import { createTaggedError as createTaggedError6 } from "@easynet/agent-tool";
|
|
874
|
+
var downloadFileHandler = (async (args) => {
|
|
875
|
+
const ctx = getBuiltinContext();
|
|
876
|
+
const url = args.url;
|
|
877
|
+
const destPath = args.destPath;
|
|
878
|
+
const headers = args.headers ?? {};
|
|
879
|
+
const timeoutMs = args.timeoutMs ?? ctx.config.defaultTimeoutMs;
|
|
880
|
+
const maxBytes = args.maxBytes ?? ctx.config.maxDownloadBytes;
|
|
881
|
+
const overwrite = args.overwrite ?? false;
|
|
882
|
+
await validateUrl(url, ctx.config.allowedHosts, ctx.config.blockedCidrs);
|
|
883
|
+
const resolvedDest = await resolveSandboxedPath(destPath, ctx.config.sandboxRoot);
|
|
884
|
+
if (!overwrite) {
|
|
885
|
+
const { access: access2 } = await import("fs/promises");
|
|
886
|
+
try {
|
|
887
|
+
await access2(resolvedDest);
|
|
888
|
+
throw new Error(
|
|
889
|
+
`File already exists: ${resolvedDest}. Set overwrite=true to allow overwriting.`
|
|
890
|
+
);
|
|
891
|
+
} catch (err) {
|
|
892
|
+
if (err instanceof Error && !err.message.includes("already exists")) {
|
|
893
|
+
} else {
|
|
894
|
+
throw err;
|
|
895
|
+
}
|
|
896
|
+
}
|
|
897
|
+
}
|
|
898
|
+
if (!headers["User-Agent"] && !headers["user-agent"]) {
|
|
899
|
+
headers["User-Agent"] = ctx.config.httpUserAgent;
|
|
900
|
+
}
|
|
901
|
+
const controller = new AbortController();
|
|
902
|
+
const timer = setTimeout(() => controller.abort(), timeoutMs);
|
|
903
|
+
let response;
|
|
904
|
+
try {
|
|
905
|
+
response = await fetch(url, {
|
|
906
|
+
method: "GET",
|
|
907
|
+
headers,
|
|
908
|
+
signal: controller.signal
|
|
909
|
+
});
|
|
910
|
+
} catch (err) {
|
|
911
|
+
if (err instanceof Error && err.name === "AbortError") {
|
|
912
|
+
throw createTaggedError6(
|
|
913
|
+
"HTTP_TIMEOUT",
|
|
914
|
+
`Download from ${url} timed out after ${timeoutMs}ms`,
|
|
915
|
+
{ url, timeoutMs }
|
|
916
|
+
);
|
|
917
|
+
}
|
|
918
|
+
throw createTaggedError6(
|
|
919
|
+
"UPSTREAM_ERROR",
|
|
920
|
+
`Download failed for ${url}: ${err instanceof Error ? err.message : String(err)}`,
|
|
921
|
+
{ url }
|
|
922
|
+
);
|
|
923
|
+
} finally {
|
|
924
|
+
clearTimeout(timer);
|
|
925
|
+
}
|
|
926
|
+
const contentLength = response.headers.get("content-length");
|
|
927
|
+
if (contentLength && parseInt(contentLength, 10) > maxBytes) {
|
|
928
|
+
throw createTaggedError6(
|
|
929
|
+
"HTTP_TOO_LARGE",
|
|
930
|
+
`Download Content-Length ${contentLength} exceeds limit of ${maxBytes} bytes`,
|
|
931
|
+
{ url, contentLength: parseInt(contentLength, 10), limit: maxBytes }
|
|
932
|
+
);
|
|
933
|
+
}
|
|
934
|
+
if (!response.body) {
|
|
935
|
+
throw createTaggedError6("UPSTREAM_ERROR", `No response body from ${url}`, { url });
|
|
936
|
+
}
|
|
937
|
+
const reader = response.body.getReader();
|
|
938
|
+
const chunks = [];
|
|
939
|
+
let totalBytes = 0;
|
|
940
|
+
const hasher = createHash3("sha256");
|
|
941
|
+
try {
|
|
942
|
+
while (true) {
|
|
943
|
+
const { done, value } = await reader.read();
|
|
944
|
+
if (done) break;
|
|
945
|
+
totalBytes += value.byteLength;
|
|
946
|
+
if (totalBytes > maxBytes) {
|
|
947
|
+
reader.cancel();
|
|
948
|
+
throw createTaggedError6(
|
|
949
|
+
"HTTP_TOO_LARGE",
|
|
950
|
+
`Download from ${url} exceeded limit of ${maxBytes} bytes (received ${totalBytes})`,
|
|
951
|
+
{ url, bytesRead: totalBytes, limit: maxBytes }
|
|
952
|
+
);
|
|
953
|
+
}
|
|
954
|
+
chunks.push(value);
|
|
955
|
+
hasher.update(value);
|
|
956
|
+
}
|
|
957
|
+
} finally {
|
|
958
|
+
reader.releaseLock();
|
|
959
|
+
}
|
|
960
|
+
const sha256 = hasher.digest("hex");
|
|
961
|
+
await mkdir2(dirname3(resolvedDest), { recursive: true });
|
|
962
|
+
const buffer = Buffer.concat(chunks);
|
|
963
|
+
await writeFile2(resolvedDest, buffer);
|
|
964
|
+
return {
|
|
965
|
+
result: {
|
|
966
|
+
destPath: resolvedDest,
|
|
967
|
+
bytes: totalBytes,
|
|
968
|
+
sha256,
|
|
969
|
+
status: response.status,
|
|
970
|
+
url
|
|
971
|
+
},
|
|
972
|
+
evidence: [
|
|
973
|
+
{
|
|
974
|
+
type: "url",
|
|
975
|
+
ref: url,
|
|
976
|
+
summary: `Downloaded ${totalBytes} bytes from ${url}`,
|
|
977
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
978
|
+
},
|
|
979
|
+
{
|
|
980
|
+
type: "file",
|
|
981
|
+
ref: resolvedDest,
|
|
982
|
+
summary: `Saved to ${resolvedDest} (sha256: ${sha256.slice(0, 12)}...)`,
|
|
983
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
984
|
+
}
|
|
985
|
+
]
|
|
986
|
+
};
|
|
987
|
+
});
|
|
988
|
+
|
|
989
|
+
// http/head.ts
|
|
990
|
+
import { createTaggedError as createTaggedError7 } from "@easynet/agent-tool";
|
|
991
|
+
var headHandler = (async (args) => {
|
|
992
|
+
const ctx = getBuiltinContext();
|
|
993
|
+
const url = args.url;
|
|
994
|
+
const headers = args.headers ?? {};
|
|
995
|
+
const timeoutMs = args.timeoutMs ?? ctx.config.defaultTimeoutMs;
|
|
996
|
+
await validateUrl(url, ctx.config.allowedHosts, ctx.config.blockedCidrs);
|
|
997
|
+
if (!headers["User-Agent"] && !headers["user-agent"]) {
|
|
998
|
+
headers["User-Agent"] = ctx.config.httpUserAgent;
|
|
999
|
+
}
|
|
1000
|
+
const controller = new AbortController();
|
|
1001
|
+
const timer = setTimeout(() => controller.abort(), timeoutMs);
|
|
1002
|
+
let response;
|
|
1003
|
+
try {
|
|
1004
|
+
response = await fetch(url, {
|
|
1005
|
+
method: "HEAD",
|
|
1006
|
+
headers,
|
|
1007
|
+
signal: controller.signal
|
|
1008
|
+
});
|
|
1009
|
+
} catch (err) {
|
|
1010
|
+
if (err instanceof Error && err.name === "AbortError") {
|
|
1011
|
+
throw createTaggedError7(
|
|
1012
|
+
"HTTP_TIMEOUT",
|
|
1013
|
+
`HEAD request to ${url} timed out after ${timeoutMs}ms`,
|
|
1014
|
+
{ url, timeoutMs }
|
|
1015
|
+
);
|
|
1016
|
+
}
|
|
1017
|
+
throw createTaggedError7(
|
|
1018
|
+
"UPSTREAM_ERROR",
|
|
1019
|
+
`HEAD request failed for ${url}: ${err instanceof Error ? err.message : String(err)}`,
|
|
1020
|
+
{ url }
|
|
1021
|
+
);
|
|
1022
|
+
} finally {
|
|
1023
|
+
clearTimeout(timer);
|
|
1024
|
+
}
|
|
1025
|
+
const responseHeaders = {};
|
|
1026
|
+
response.headers.forEach((value, key) => {
|
|
1027
|
+
responseHeaders[key] = value;
|
|
1028
|
+
});
|
|
1029
|
+
return {
|
|
1030
|
+
result: {
|
|
1031
|
+
url,
|
|
1032
|
+
status: response.status,
|
|
1033
|
+
headers: responseHeaders
|
|
1034
|
+
},
|
|
1035
|
+
evidence: [
|
|
1036
|
+
{
|
|
1037
|
+
type: "url",
|
|
1038
|
+
ref: url,
|
|
1039
|
+
summary: `HEAD ${url} \u2192 ${response.status}`,
|
|
1040
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
1041
|
+
}
|
|
1042
|
+
]
|
|
1043
|
+
};
|
|
1044
|
+
});
|
|
1045
|
+
|
|
1046
|
+
// http/duckduckgoSearch.ts
|
|
1047
|
+
import { createTaggedError as createTaggedError8 } from "@easynet/agent-tool";
|
|
1048
|
+
var DUCKDUCKGO_API = "https://api.duckduckgo.com/";
|
|
1049
|
+
var duckduckgoSearchHandler = (async (args) => {
|
|
1050
|
+
const ctx = getBuiltinContext();
|
|
1051
|
+
const query = args.query?.trim();
|
|
1052
|
+
if (!query) {
|
|
1053
|
+
throw createTaggedError8("DUCKDUCKGO_INVALID", "query is required", {});
|
|
1054
|
+
}
|
|
1055
|
+
const timeoutMs = args.timeoutMs ?? ctx.config.defaultTimeoutMs;
|
|
1056
|
+
const maxResults = args.maxResults ?? 10;
|
|
1057
|
+
const url = `${DUCKDUCKGO_API}?q=${encodeURIComponent(query)}&format=json`;
|
|
1058
|
+
await validateUrl(url, ctx.config.allowedHosts, ctx.config.blockedCidrs);
|
|
1059
|
+
const controller = new AbortController();
|
|
1060
|
+
const timer = setTimeout(() => controller.abort(), timeoutMs);
|
|
1061
|
+
let response;
|
|
1062
|
+
try {
|
|
1063
|
+
response = await fetch(url, {
|
|
1064
|
+
method: "GET",
|
|
1065
|
+
headers: { "User-Agent": ctx.config.httpUserAgent },
|
|
1066
|
+
signal: controller.signal
|
|
1067
|
+
});
|
|
1068
|
+
} catch (err) {
|
|
1069
|
+
clearTimeout(timer);
|
|
1070
|
+
if (err instanceof Error && err.name === "AbortError") {
|
|
1071
|
+
throw createTaggedError8(
|
|
1072
|
+
"HTTP_TIMEOUT",
|
|
1073
|
+
`DuckDuckGo search timed out after ${timeoutMs}ms`,
|
|
1074
|
+
{ query, timeoutMs }
|
|
1075
|
+
);
|
|
1076
|
+
}
|
|
1077
|
+
throw createTaggedError8(
|
|
1078
|
+
"UPSTREAM_ERROR",
|
|
1079
|
+
`DuckDuckGo search failed: ${err instanceof Error ? err.message : String(err)}`,
|
|
1080
|
+
{ query }
|
|
1081
|
+
);
|
|
1082
|
+
}
|
|
1083
|
+
clearTimeout(timer);
|
|
1084
|
+
const maxBytes = ctx.config.maxHttpBytes;
|
|
1085
|
+
const text = await response.text();
|
|
1086
|
+
const bytes = Buffer.byteLength(text, "utf-8");
|
|
1087
|
+
if (bytes > maxBytes) {
|
|
1088
|
+
throw createTaggedError8(
|
|
1089
|
+
"HTTP_TOO_LARGE",
|
|
1090
|
+
`DuckDuckGo response ${bytes} bytes exceeds limit of ${maxBytes} bytes`,
|
|
1091
|
+
{ query, bytes, limit: maxBytes }
|
|
1092
|
+
);
|
|
1093
|
+
}
|
|
1094
|
+
let raw;
|
|
1095
|
+
try {
|
|
1096
|
+
raw = JSON.parse(text);
|
|
1097
|
+
} catch {
|
|
1098
|
+
throw createTaggedError8(
|
|
1099
|
+
"UPSTREAM_ERROR",
|
|
1100
|
+
`DuckDuckGo returned invalid JSON`,
|
|
1101
|
+
{ query, textPreview: text.slice(0, 200) }
|
|
1102
|
+
);
|
|
1103
|
+
}
|
|
1104
|
+
const results = [];
|
|
1105
|
+
if (Array.isArray(raw.Results)) {
|
|
1106
|
+
for (const r of raw.Results.slice(0, maxResults)) {
|
|
1107
|
+
if (r.FirstURL) {
|
|
1108
|
+
results.push({
|
|
1109
|
+
url: r.FirstURL,
|
|
1110
|
+
title: r.Text ?? r.FirstURL,
|
|
1111
|
+
snippet: r.Text
|
|
1112
|
+
});
|
|
1113
|
+
}
|
|
1114
|
+
}
|
|
1115
|
+
}
|
|
1116
|
+
const relatedTopics = [];
|
|
1117
|
+
if (Array.isArray(raw.RelatedTopics)) {
|
|
1118
|
+
for (const t of raw.RelatedTopics.slice(0, maxResults)) {
|
|
1119
|
+
const text2 = t.Text ?? t.Result;
|
|
1120
|
+
if (text2) {
|
|
1121
|
+
relatedTopics.push({ text: text2, url: t.FirstURL });
|
|
1122
|
+
}
|
|
1123
|
+
}
|
|
1124
|
+
}
|
|
1125
|
+
const result = {
|
|
1126
|
+
query,
|
|
1127
|
+
abstract: raw.Abstract ?? raw.AbstractText ?? void 0,
|
|
1128
|
+
abstractUrl: raw.AbstractURL ?? void 0,
|
|
1129
|
+
abstractSource: raw.AbstractSource ?? void 0,
|
|
1130
|
+
heading: raw.Heading ?? void 0,
|
|
1131
|
+
results,
|
|
1132
|
+
relatedTopics
|
|
1133
|
+
};
|
|
1134
|
+
return {
|
|
1135
|
+
result,
|
|
1136
|
+
evidence: [
|
|
1137
|
+
{
|
|
1138
|
+
type: "url",
|
|
1139
|
+
ref: url,
|
|
1140
|
+
summary: `DuckDuckGo search: "${query}" \u2192 ${results.length} results`,
|
|
1141
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
1142
|
+
}
|
|
1143
|
+
]
|
|
1144
|
+
};
|
|
1145
|
+
});
|
|
1146
|
+
|
|
1147
|
+
// http/fetchPageMainContent.ts
|
|
1148
|
+
import { parse } from "node-html-parser";
|
|
1149
|
+
import { createTaggedError as createTaggedError9 } from "@easynet/agent-tool";
|
|
1150
|
+
var MAIN_SELECTORS = [
|
|
1151
|
+
"main",
|
|
1152
|
+
"article",
|
|
1153
|
+
"[role='main']",
|
|
1154
|
+
"#content",
|
|
1155
|
+
"#main",
|
|
1156
|
+
".content",
|
|
1157
|
+
".main-content",
|
|
1158
|
+
".article-body",
|
|
1159
|
+
".post-content",
|
|
1160
|
+
".entry-content"
|
|
1161
|
+
];
|
|
1162
|
+
function extractMainContent(html) {
|
|
1163
|
+
const root = parse(html, { comment: false });
|
|
1164
|
+
let title = "";
|
|
1165
|
+
const titleEl = root.querySelector("title");
|
|
1166
|
+
if (titleEl) {
|
|
1167
|
+
title = (titleEl.textContent ?? "").trim().replace(/\s+/g, " ");
|
|
1168
|
+
}
|
|
1169
|
+
let mainEl = null;
|
|
1170
|
+
for (const sel of MAIN_SELECTORS) {
|
|
1171
|
+
mainEl = root.querySelector(sel);
|
|
1172
|
+
if (mainEl) break;
|
|
1173
|
+
}
|
|
1174
|
+
if (!mainEl) {
|
|
1175
|
+
const body = root.querySelector("body");
|
|
1176
|
+
if (body) {
|
|
1177
|
+
for (const tag of ["script", "style", "nav", "header", "footer", "aside", "noscript", "iframe"]) {
|
|
1178
|
+
body.querySelectorAll(tag).forEach((el) => el.remove());
|
|
1179
|
+
}
|
|
1180
|
+
mainEl = body;
|
|
1181
|
+
} else {
|
|
1182
|
+
mainEl = root;
|
|
1183
|
+
}
|
|
1184
|
+
}
|
|
1185
|
+
const text = (mainEl.textContent ?? "").trim().replace(/\s+/g, " ");
|
|
1186
|
+
return { title, mainContent: text };
|
|
1187
|
+
}
|
|
1188
|
+
var fetchPageMainContentHandler = (async (args) => {
|
|
1189
|
+
const ctx = getBuiltinContext();
|
|
1190
|
+
const url = args.url;
|
|
1191
|
+
const timeoutMs = args.timeoutMs ?? ctx.config.defaultTimeoutMs;
|
|
1192
|
+
const maxBytes = args.maxBytes ?? ctx.config.maxHttpBytes;
|
|
1193
|
+
await validateUrl(url, ctx.config.allowedHosts, ctx.config.blockedCidrs);
|
|
1194
|
+
const controller = new AbortController();
|
|
1195
|
+
const timer = setTimeout(() => controller.abort(), timeoutMs);
|
|
1196
|
+
let response;
|
|
1197
|
+
try {
|
|
1198
|
+
response = await fetch(url, {
|
|
1199
|
+
method: "GET",
|
|
1200
|
+
headers: { "User-Agent": ctx.config.httpUserAgent },
|
|
1201
|
+
signal: controller.signal
|
|
1202
|
+
});
|
|
1203
|
+
} catch (err) {
|
|
1204
|
+
clearTimeout(timer);
|
|
1205
|
+
if (err instanceof Error && err.name === "AbortError") {
|
|
1206
|
+
throw createTaggedError9(
|
|
1207
|
+
"HTTP_TIMEOUT",
|
|
1208
|
+
`Request to ${url} timed out after ${timeoutMs}ms`,
|
|
1209
|
+
{ url, timeoutMs }
|
|
1210
|
+
);
|
|
1211
|
+
}
|
|
1212
|
+
throw createTaggedError9(
|
|
1213
|
+
"UPSTREAM_ERROR",
|
|
1214
|
+
`Fetch failed for ${url}: ${err instanceof Error ? err.message : String(err)}`,
|
|
1215
|
+
{ url }
|
|
1216
|
+
);
|
|
1217
|
+
}
|
|
1218
|
+
clearTimeout(timer);
|
|
1219
|
+
const contentLength = response.headers.get("content-length");
|
|
1220
|
+
if (contentLength && parseInt(contentLength, 10) > maxBytes) {
|
|
1221
|
+
throw createTaggedError9(
|
|
1222
|
+
"HTTP_TOO_LARGE",
|
|
1223
|
+
`Response Content-Length ${contentLength} exceeds limit of ${maxBytes} bytes`,
|
|
1224
|
+
{ url, contentLength: parseInt(contentLength, 10), limit: maxBytes }
|
|
1225
|
+
);
|
|
1226
|
+
}
|
|
1227
|
+
const rawText = await response.text();
|
|
1228
|
+
const rawBytes = Buffer.byteLength(rawText, "utf-8");
|
|
1229
|
+
if (rawBytes > maxBytes) {
|
|
1230
|
+
throw createTaggedError9(
|
|
1231
|
+
"HTTP_TOO_LARGE",
|
|
1232
|
+
`Response body ${rawBytes} bytes exceeds limit of ${maxBytes} bytes`,
|
|
1233
|
+
{ url, bytes: rawBytes, limit: maxBytes }
|
|
1234
|
+
);
|
|
1235
|
+
}
|
|
1236
|
+
const contentType = (response.headers.get("content-type") ?? "").toLowerCase();
|
|
1237
|
+
const isHtml = contentType.includes("text/html");
|
|
1238
|
+
let title;
|
|
1239
|
+
let mainContent;
|
|
1240
|
+
if (isHtml && rawText.trim().length > 0) {
|
|
1241
|
+
try {
|
|
1242
|
+
const extracted = extractMainContent(rawText);
|
|
1243
|
+
title = extracted.title || void 0;
|
|
1244
|
+
mainContent = extracted.mainContent;
|
|
1245
|
+
} catch {
|
|
1246
|
+
mainContent = rawText;
|
|
1247
|
+
}
|
|
1248
|
+
} else {
|
|
1249
|
+
mainContent = rawText;
|
|
1250
|
+
}
|
|
1251
|
+
const bytes = Buffer.byteLength(mainContent, "utf-8");
|
|
1252
|
+
return {
|
|
1253
|
+
result: {
|
|
1254
|
+
url,
|
|
1255
|
+
status: response.status,
|
|
1256
|
+
title,
|
|
1257
|
+
mainContent,
|
|
1258
|
+
bytes,
|
|
1259
|
+
isHtml
|
|
1260
|
+
},
|
|
1261
|
+
evidence: [
|
|
1262
|
+
{
|
|
1263
|
+
type: "url",
|
|
1264
|
+
ref: url,
|
|
1265
|
+
summary: `${url} \u2192 ${response.status} main content (${bytes} bytes)`,
|
|
1266
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
1267
|
+
}
|
|
1268
|
+
]
|
|
1269
|
+
};
|
|
1270
|
+
});
|
|
1271
|
+
|
|
1272
|
+
// util/jsonSelect.ts
|
|
1273
|
+
var jsonSelectHandler = (async (args) => {
|
|
1274
|
+
const json = args.json;
|
|
1275
|
+
const path = args.path;
|
|
1276
|
+
let jmespath;
|
|
1277
|
+
try {
|
|
1278
|
+
jmespath = await import("./jmespath-6W7SK7AH.js");
|
|
1279
|
+
} catch {
|
|
1280
|
+
throw new Error(
|
|
1281
|
+
"jmespath package is required for core/util.json.select. Install it with: npm install jmespath"
|
|
1282
|
+
);
|
|
1283
|
+
}
|
|
1284
|
+
let value;
|
|
1285
|
+
try {
|
|
1286
|
+
value = jmespath.search(json, path);
|
|
1287
|
+
} catch (err) {
|
|
1288
|
+
throw new Error(
|
|
1289
|
+
`JMESPath expression error: ${err instanceof Error ? err.message : String(err)}`
|
|
1290
|
+
);
|
|
1291
|
+
}
|
|
1292
|
+
return {
|
|
1293
|
+
result: { value },
|
|
1294
|
+
evidence: [
|
|
1295
|
+
{
|
|
1296
|
+
type: "tool",
|
|
1297
|
+
ref: "core/util.json.select",
|
|
1298
|
+
summary: `Selected "${path}" from JSON \u2192 ${typeof value === "object" ? JSON.stringify(value).slice(0, 100) : String(value).slice(0, 100)}`,
|
|
1299
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
1300
|
+
}
|
|
1301
|
+
]
|
|
1302
|
+
};
|
|
1303
|
+
});
|
|
1304
|
+
|
|
1305
|
+
// util/truncate.ts
|
|
1306
|
+
var truncateHandler = (async (args) => {
|
|
1307
|
+
const text = args.text;
|
|
1308
|
+
const maxChars = args.maxChars;
|
|
1309
|
+
const suffix = args.suffix ?? "...";
|
|
1310
|
+
const originalLength = text.length;
|
|
1311
|
+
if (text.length <= maxChars) {
|
|
1312
|
+
return {
|
|
1313
|
+
result: { text, truncated: false, originalLength },
|
|
1314
|
+
evidence: [
|
|
1315
|
+
{
|
|
1316
|
+
type: "tool",
|
|
1317
|
+
ref: "core/util.text.truncate",
|
|
1318
|
+
summary: `Text not truncated (${originalLength} chars <= ${maxChars} max)`,
|
|
1319
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
1320
|
+
}
|
|
1321
|
+
]
|
|
1322
|
+
};
|
|
1323
|
+
}
|
|
1324
|
+
const truncatedText = text.slice(0, maxChars - suffix.length) + suffix;
|
|
1325
|
+
return {
|
|
1326
|
+
result: { text: truncatedText, truncated: true, originalLength },
|
|
1327
|
+
evidence: [
|
|
1328
|
+
{
|
|
1329
|
+
type: "tool",
|
|
1330
|
+
ref: "core/util.text.truncate",
|
|
1331
|
+
summary: `Truncated ${originalLength} chars to ${truncatedText.length} chars`,
|
|
1332
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
1333
|
+
}
|
|
1334
|
+
]
|
|
1335
|
+
};
|
|
1336
|
+
});
|
|
1337
|
+
|
|
1338
|
+
// util/hashText.ts
|
|
1339
|
+
import { createHash as createHash4 } from "crypto";
|
|
1340
|
+
var hashTextHandler = (async (args) => {
|
|
1341
|
+
const text = args.text;
|
|
1342
|
+
const sha256 = createHash4("sha256").update(text, "utf-8").digest("hex");
|
|
1343
|
+
return {
|
|
1344
|
+
result: { sha256 },
|
|
1345
|
+
evidence: [
|
|
1346
|
+
{
|
|
1347
|
+
type: "tool",
|
|
1348
|
+
ref: "core/util.hash.sha256Text",
|
|
1349
|
+
summary: `SHA-256 of ${text.length} chars: ${sha256.slice(0, 16)}...`,
|
|
1350
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
1351
|
+
}
|
|
1352
|
+
]
|
|
1353
|
+
};
|
|
1354
|
+
});
|
|
1355
|
+
|
|
1356
|
+
// util/now.ts
|
|
1357
|
+
var nowHandler = (async (args) => {
|
|
1358
|
+
const timezone = args.timezone ?? "UTC";
|
|
1359
|
+
const now = /* @__PURE__ */ new Date();
|
|
1360
|
+
let formatted;
|
|
1361
|
+
try {
|
|
1362
|
+
formatted = new Intl.DateTimeFormat("en-US", {
|
|
1363
|
+
timeZone: timezone,
|
|
1364
|
+
year: "numeric",
|
|
1365
|
+
month: "2-digit",
|
|
1366
|
+
day: "2-digit",
|
|
1367
|
+
hour: "2-digit",
|
|
1368
|
+
minute: "2-digit",
|
|
1369
|
+
second: "2-digit",
|
|
1370
|
+
hour12: false,
|
|
1371
|
+
timeZoneName: "short"
|
|
1372
|
+
}).format(now);
|
|
1373
|
+
} catch {
|
|
1374
|
+
formatted = now.toISOString();
|
|
1375
|
+
}
|
|
1376
|
+
return {
|
|
1377
|
+
result: {
|
|
1378
|
+
iso: now.toISOString(),
|
|
1379
|
+
epochMs: now.getTime(),
|
|
1380
|
+
timezone,
|
|
1381
|
+
formatted
|
|
1382
|
+
},
|
|
1383
|
+
evidence: [
|
|
1384
|
+
{
|
|
1385
|
+
type: "tool",
|
|
1386
|
+
ref: "core/util.time.now",
|
|
1387
|
+
summary: `Current time: ${now.toISOString()} (${timezone})`,
|
|
1388
|
+
createdAt: now.toISOString()
|
|
1389
|
+
}
|
|
1390
|
+
]
|
|
1391
|
+
};
|
|
1392
|
+
});
|
|
1393
|
+
|
|
1394
|
+
// util/templateRender.ts
|
|
1395
|
+
var templateRenderHandler = (async (args) => {
|
|
1396
|
+
const template = args.template;
|
|
1397
|
+
const data = args.data;
|
|
1398
|
+
let renderFn;
|
|
1399
|
+
try {
|
|
1400
|
+
const mod = await import("./mustache-CS7KHA4H.js");
|
|
1401
|
+
const mustache = mod.default ?? mod;
|
|
1402
|
+
renderFn = mustache.render.bind(mustache);
|
|
1403
|
+
} catch {
|
|
1404
|
+
throw new Error(
|
|
1405
|
+
"mustache package is required for core/util.template.render. Install it with: npm install mustache"
|
|
1406
|
+
);
|
|
1407
|
+
}
|
|
1408
|
+
let text;
|
|
1409
|
+
try {
|
|
1410
|
+
text = renderFn(template, data);
|
|
1411
|
+
} catch (err) {
|
|
1412
|
+
throw new Error(
|
|
1413
|
+
`Template rendering error: ${err instanceof Error ? err.message : String(err)}`
|
|
1414
|
+
);
|
|
1415
|
+
}
|
|
1416
|
+
return {
|
|
1417
|
+
result: { text },
|
|
1418
|
+
evidence: [
|
|
1419
|
+
{
|
|
1420
|
+
type: "tool",
|
|
1421
|
+
ref: "core/util.template.render",
|
|
1422
|
+
summary: `Rendered template (${template.length} chars) \u2192 ${text.length} chars output`,
|
|
1423
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
1424
|
+
}
|
|
1425
|
+
]
|
|
1426
|
+
};
|
|
1427
|
+
});
|
|
1428
|
+
|
|
1429
|
+
// exec/runCommand.ts
|
|
1430
|
+
import { spawn } from "child_process";
|
|
1431
|
+
import { resolve as pathResolve } from "path";
|
|
1432
|
+
import { createTaggedError as createTaggedError10 } from "@easynet/agent-tool";
|
|
1433
|
+
var runCommandHandler = (async (args) => {
|
|
1434
|
+
const ctx = getBuiltinContext();
|
|
1435
|
+
const { allowedCommands, maxCommandOutputBytes, commandTimeoutMs } = ctx.config;
|
|
1436
|
+
if (!allowedCommands.length) {
|
|
1437
|
+
throw createTaggedError10(
|
|
1438
|
+
"EXEC_DISABLED",
|
|
1439
|
+
"Exec is disabled: allowedCommands is empty",
|
|
1440
|
+
{}
|
|
1441
|
+
);
|
|
1442
|
+
}
|
|
1443
|
+
const rawCommand = args.command?.trim();
|
|
1444
|
+
if (!rawCommand) {
|
|
1445
|
+
throw createTaggedError10("EXEC_INVALID", "command is required", {});
|
|
1446
|
+
}
|
|
1447
|
+
const baseName = rawCommand.replace(/^.*\//, "").trim();
|
|
1448
|
+
if (baseName !== rawCommand || /[;&|$`\s]/.test(rawCommand)) {
|
|
1449
|
+
throw createTaggedError10(
|
|
1450
|
+
"EXEC_INVALID",
|
|
1451
|
+
"command must be a single executable name (no path, no shell chars)",
|
|
1452
|
+
{ command: rawCommand }
|
|
1453
|
+
);
|
|
1454
|
+
}
|
|
1455
|
+
if (!allowedCommands.includes(baseName)) {
|
|
1456
|
+
throw createTaggedError10(
|
|
1457
|
+
"EXEC_NOT_ALLOWED",
|
|
1458
|
+
`Command "${baseName}" is not in allowedCommands`,
|
|
1459
|
+
{ command: baseName, allowed: allowedCommands }
|
|
1460
|
+
);
|
|
1461
|
+
}
|
|
1462
|
+
const cmdArgs = Array.isArray(args.args) ? args.args : [];
|
|
1463
|
+
const timeoutMs = args.timeoutMs ?? commandTimeoutMs;
|
|
1464
|
+
let cwd = pathResolve(ctx.config.sandboxRoot);
|
|
1465
|
+
if (args.cwd != null && args.cwd !== "") {
|
|
1466
|
+
cwd = await resolveSandboxedPath(args.cwd, ctx.config.sandboxRoot);
|
|
1467
|
+
}
|
|
1468
|
+
return new Promise((resolvePromise, rejectPromise) => {
|
|
1469
|
+
const proc = spawn(baseName, cmdArgs, {
|
|
1470
|
+
cwd,
|
|
1471
|
+
shell: false,
|
|
1472
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
1473
|
+
env: { ...process.env }
|
|
1474
|
+
});
|
|
1475
|
+
let stdout = "";
|
|
1476
|
+
let stderr = "";
|
|
1477
|
+
let totalBytes = 0;
|
|
1478
|
+
const append = (chunk, dest) => {
|
|
1479
|
+
const len = Buffer.byteLength(chunk, "utf-8");
|
|
1480
|
+
if (totalBytes + len > maxCommandOutputBytes) {
|
|
1481
|
+
proc.kill("SIGKILL");
|
|
1482
|
+
rejectPromise(
|
|
1483
|
+
createTaggedError10(
|
|
1484
|
+
"EXEC_OUTPUT_TOO_LARGE",
|
|
1485
|
+
`Command output exceeded ${maxCommandOutputBytes} bytes`,
|
|
1486
|
+
{ maxBytes: maxCommandOutputBytes }
|
|
1487
|
+
)
|
|
1488
|
+
);
|
|
1489
|
+
return;
|
|
1490
|
+
}
|
|
1491
|
+
totalBytes += len;
|
|
1492
|
+
if (dest === "stdout") stdout += chunk;
|
|
1493
|
+
else stderr += chunk;
|
|
1494
|
+
};
|
|
1495
|
+
proc.stdout?.setEncoding("utf-8");
|
|
1496
|
+
proc.stderr?.setEncoding("utf-8");
|
|
1497
|
+
proc.stdout?.on("data", (chunk) => append(chunk, "stdout"));
|
|
1498
|
+
proc.stderr?.on("data", (chunk) => append(chunk, "stderr"));
|
|
1499
|
+
const timeout = setTimeout(() => {
|
|
1500
|
+
proc.kill("SIGKILL");
|
|
1501
|
+
resolvePromise({
|
|
1502
|
+
result: {
|
|
1503
|
+
stdout,
|
|
1504
|
+
stderr,
|
|
1505
|
+
exitCode: null,
|
|
1506
|
+
timedOut: true,
|
|
1507
|
+
signal: "SIGKILL"
|
|
1508
|
+
},
|
|
1509
|
+
evidence: [
|
|
1510
|
+
{
|
|
1511
|
+
type: "exec",
|
|
1512
|
+
summary: `Command "${baseName}" timed out after ${timeoutMs}ms`,
|
|
1513
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
1514
|
+
}
|
|
1515
|
+
]
|
|
1516
|
+
});
|
|
1517
|
+
}, timeoutMs);
|
|
1518
|
+
proc.on("error", (err) => {
|
|
1519
|
+
clearTimeout(timeout);
|
|
1520
|
+
rejectPromise(
|
|
1521
|
+
createTaggedError10("EXEC_SPAWN_ERROR", err.message, { command: baseName })
|
|
1522
|
+
);
|
|
1523
|
+
});
|
|
1524
|
+
proc.on("close", (code, signal) => {
|
|
1525
|
+
clearTimeout(timeout);
|
|
1526
|
+
resolvePromise({
|
|
1527
|
+
result: {
|
|
1528
|
+
stdout,
|
|
1529
|
+
stderr,
|
|
1530
|
+
exitCode: code,
|
|
1531
|
+
timedOut: false,
|
|
1532
|
+
signal: signal ?? void 0
|
|
1533
|
+
},
|
|
1534
|
+
evidence: [
|
|
1535
|
+
{
|
|
1536
|
+
type: "exec",
|
|
1537
|
+
ref: baseName,
|
|
1538
|
+
summary: `Ran ${baseName} (exit ${code ?? signal})`,
|
|
1539
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
1540
|
+
}
|
|
1541
|
+
]
|
|
1542
|
+
});
|
|
1543
|
+
});
|
|
1544
|
+
});
|
|
1545
|
+
});
|
|
1546
|
+
|
|
1547
|
+
// CoreToolsModule.ts
|
|
1548
|
+
var CORE_TOOL_MANIFEST = [
|
|
1549
|
+
// Filesystem
|
|
1550
|
+
{
|
|
1551
|
+
name: "core/fs.readText",
|
|
1552
|
+
kind: "core",
|
|
1553
|
+
description: "Read a UTF-8 text file from the sandbox",
|
|
1554
|
+
tags: ["filesystem", "read", "core"],
|
|
1555
|
+
capabilities: ["read:fs"],
|
|
1556
|
+
sideEffect: "none"
|
|
1557
|
+
},
|
|
1558
|
+
{
|
|
1559
|
+
name: "core/fs.writeText",
|
|
1560
|
+
kind: "core",
|
|
1561
|
+
description: "Write UTF-8 text to a file in the sandbox",
|
|
1562
|
+
tags: ["filesystem", "write", "core"],
|
|
1563
|
+
capabilities: ["write:fs"],
|
|
1564
|
+
sideEffect: "local_write"
|
|
1565
|
+
},
|
|
1566
|
+
{
|
|
1567
|
+
name: "core/fs.listDir",
|
|
1568
|
+
kind: "core",
|
|
1569
|
+
description: "List directory contents in the sandbox",
|
|
1570
|
+
tags: ["filesystem", "read", "core"],
|
|
1571
|
+
capabilities: ["read:fs"],
|
|
1572
|
+
sideEffect: "none"
|
|
1573
|
+
},
|
|
1574
|
+
{
|
|
1575
|
+
name: "core/fs.searchText",
|
|
1576
|
+
kind: "core",
|
|
1577
|
+
description: "Search for text patterns in files within the sandbox",
|
|
1578
|
+
tags: ["filesystem", "search", "core"],
|
|
1579
|
+
capabilities: ["read:fs"],
|
|
1580
|
+
sideEffect: "none"
|
|
1581
|
+
},
|
|
1582
|
+
{
|
|
1583
|
+
name: "core/fs.sha256",
|
|
1584
|
+
kind: "core",
|
|
1585
|
+
description: "Compute SHA-256 hash of a file in the sandbox",
|
|
1586
|
+
tags: ["filesystem", "hash", "core"],
|
|
1587
|
+
capabilities: ["read:fs"],
|
|
1588
|
+
sideEffect: "none"
|
|
1589
|
+
},
|
|
1590
|
+
{
|
|
1591
|
+
name: "core/fs.deletePath",
|
|
1592
|
+
kind: "core",
|
|
1593
|
+
description: "Delete a file or directory in the sandbox (dangerous, requires explicit confirmation)",
|
|
1594
|
+
tags: ["filesystem", "delete", "dangerous", "core"],
|
|
1595
|
+
capabilities: ["danger:destructive", "write:fs"],
|
|
1596
|
+
sideEffect: "destructive"
|
|
1597
|
+
},
|
|
1598
|
+
// HTTP
|
|
1599
|
+
{
|
|
1600
|
+
name: "core/http.fetchText",
|
|
1601
|
+
kind: "core",
|
|
1602
|
+
description: "Fetch a URL and return the response as text",
|
|
1603
|
+
tags: ["http", "network", "core"],
|
|
1604
|
+
capabilities: ["network"],
|
|
1605
|
+
sideEffect: "none"
|
|
1606
|
+
},
|
|
1607
|
+
{
|
|
1608
|
+
name: "core/http.fetchJson",
|
|
1609
|
+
kind: "core",
|
|
1610
|
+
description: "Fetch a URL and return the response as parsed JSON",
|
|
1611
|
+
tags: ["http", "network", "json", "core"],
|
|
1612
|
+
capabilities: ["network"],
|
|
1613
|
+
sideEffect: "none"
|
|
1614
|
+
},
|
|
1615
|
+
{
|
|
1616
|
+
name: "core/http.downloadFile",
|
|
1617
|
+
kind: "core",
|
|
1618
|
+
description: "Download a file from a URL to the sandbox",
|
|
1619
|
+
tags: ["http", "network", "download", "core"],
|
|
1620
|
+
capabilities: ["network", "write:fs"],
|
|
1621
|
+
sideEffect: "local_write"
|
|
1622
|
+
},
|
|
1623
|
+
{
|
|
1624
|
+
name: "core/http.head",
|
|
1625
|
+
kind: "core",
|
|
1626
|
+
description: "Send a HEAD request to get response headers without body",
|
|
1627
|
+
tags: ["http", "network", "core"],
|
|
1628
|
+
capabilities: ["network"],
|
|
1629
|
+
sideEffect: "none"
|
|
1630
|
+
},
|
|
1631
|
+
{
|
|
1632
|
+
name: "core/http.duckduckgoSearch",
|
|
1633
|
+
kind: "core",
|
|
1634
|
+
description: "Search DuckDuckGo via Instant Answer API (no API key). Add api.duckduckgo.com to allowedHosts.",
|
|
1635
|
+
tags: ["http", "search", "duckduckgo", "core"],
|
|
1636
|
+
capabilities: ["network"],
|
|
1637
|
+
sideEffect: "none"
|
|
1638
|
+
},
|
|
1639
|
+
{
|
|
1640
|
+
name: "core/http.fetchPageMainContent",
|
|
1641
|
+
kind: "core",
|
|
1642
|
+
description: "Fetch a URL and return only the main content (main/article/body text). Strips nav, header, footer, scripts.",
|
|
1643
|
+
tags: ["http", "network", "html", "main-content", "core"],
|
|
1644
|
+
capabilities: ["network"],
|
|
1645
|
+
sideEffect: "none"
|
|
1646
|
+
},
|
|
1647
|
+
// Utils
|
|
1648
|
+
{
|
|
1649
|
+
name: "core/util.json.select",
|
|
1650
|
+
kind: "core",
|
|
1651
|
+
description: "Select fields from JSON data using JMESPath expressions",
|
|
1652
|
+
tags: ["util", "json", "core"],
|
|
1653
|
+
capabilities: [],
|
|
1654
|
+
sideEffect: "none"
|
|
1655
|
+
},
|
|
1656
|
+
{
|
|
1657
|
+
name: "core/util.text.truncate",
|
|
1658
|
+
kind: "core",
|
|
1659
|
+
description: "Truncate text to a maximum character length with a suffix marker",
|
|
1660
|
+
tags: ["util", "text", "core"],
|
|
1661
|
+
capabilities: [],
|
|
1662
|
+
sideEffect: "none"
|
|
1663
|
+
},
|
|
1664
|
+
{
|
|
1665
|
+
name: "core/util.hash.sha256Text",
|
|
1666
|
+
kind: "core",
|
|
1667
|
+
description: "Compute SHA-256 hash of a text string",
|
|
1668
|
+
tags: ["util", "hash", "core"],
|
|
1669
|
+
capabilities: [],
|
|
1670
|
+
sideEffect: "none"
|
|
1671
|
+
},
|
|
1672
|
+
{
|
|
1673
|
+
name: "core/util.time.now",
|
|
1674
|
+
kind: "core",
|
|
1675
|
+
description: "Get the current time in various formats",
|
|
1676
|
+
tags: ["util", "time", "core"],
|
|
1677
|
+
capabilities: [],
|
|
1678
|
+
sideEffect: "none"
|
|
1679
|
+
},
|
|
1680
|
+
{
|
|
1681
|
+
name: "core/util.template.render",
|
|
1682
|
+
kind: "core",
|
|
1683
|
+
description: "Render a Mustache template with data",
|
|
1684
|
+
tags: ["util", "template", "core"],
|
|
1685
|
+
capabilities: [],
|
|
1686
|
+
sideEffect: "none"
|
|
1687
|
+
},
|
|
1688
|
+
// Exec
|
|
1689
|
+
{
|
|
1690
|
+
name: "core/exec.runCommand",
|
|
1691
|
+
kind: "core",
|
|
1692
|
+
description: "Run a Linux command in the sandbox (allowlist, timeout, no shell). Command name only; args as array.",
|
|
1693
|
+
tags: ["exec", "shell", "linux", "core"],
|
|
1694
|
+
capabilities: ["exec"],
|
|
1695
|
+
sideEffect: "local_write"
|
|
1696
|
+
}
|
|
1697
|
+
];
|
|
1698
|
+
var CORE_TOOL_HANDLERS = [
|
|
1699
|
+
readTextHandler,
|
|
1700
|
+
writeTextHandler,
|
|
1701
|
+
listDirHandler,
|
|
1702
|
+
searchTextHandler,
|
|
1703
|
+
sha256Handler,
|
|
1704
|
+
deletePathHandler,
|
|
1705
|
+
fetchTextHandler,
|
|
1706
|
+
fetchJsonHandler,
|
|
1707
|
+
downloadFileHandler,
|
|
1708
|
+
headHandler,
|
|
1709
|
+
duckduckgoSearchHandler,
|
|
1710
|
+
fetchPageMainContentHandler,
|
|
1711
|
+
jsonSelectHandler,
|
|
1712
|
+
truncateHandler,
|
|
1713
|
+
hashTextHandler,
|
|
1714
|
+
nowHandler,
|
|
1715
|
+
templateRenderHandler,
|
|
1716
|
+
runCommandHandler
|
|
1717
|
+
];
|
|
1718
|
+
var readTextSpec = createToolSpec(CORE_TOOL_MANIFEST[0]);
|
|
1719
|
+
var writeTextSpec = createToolSpec(CORE_TOOL_MANIFEST[1]);
|
|
1720
|
+
var listDirSpec = createToolSpec(CORE_TOOL_MANIFEST[2]);
|
|
1721
|
+
var searchTextSpec = createToolSpec(CORE_TOOL_MANIFEST[3]);
|
|
1722
|
+
var sha256Spec = createToolSpec(CORE_TOOL_MANIFEST[4]);
|
|
1723
|
+
var deletePathSpec = createToolSpec(CORE_TOOL_MANIFEST[5]);
|
|
1724
|
+
var fetchTextSpec = createToolSpec(CORE_TOOL_MANIFEST[6]);
|
|
1725
|
+
var fetchJsonSpec = createToolSpec(CORE_TOOL_MANIFEST[7]);
|
|
1726
|
+
var downloadFileSpec = createToolSpec(CORE_TOOL_MANIFEST[8]);
|
|
1727
|
+
var headSpec = createToolSpec(CORE_TOOL_MANIFEST[9]);
|
|
1728
|
+
var jsonSelectSpec = createToolSpec(CORE_TOOL_MANIFEST[10]);
|
|
1729
|
+
var truncateSpec = createToolSpec(CORE_TOOL_MANIFEST[11]);
|
|
1730
|
+
var hashTextSpec = createToolSpec(CORE_TOOL_MANIFEST[12]);
|
|
1731
|
+
var nowSpec = createToolSpec(CORE_TOOL_MANIFEST[13]);
|
|
1732
|
+
var templateRenderSpec = createToolSpec(CORE_TOOL_MANIFEST[14]);
|
|
1733
|
+
var runCommandSpec = createToolSpec(CORE_TOOL_MANIFEST[15]);
|
|
1734
|
+
var duckduckgoSearchSpec = createToolSpec(CORE_TOOL_MANIFEST[16]);
|
|
1735
|
+
var fetchPageMainContentSpec = createToolSpec(CORE_TOOL_MANIFEST[17]);
|
|
1736
|
+
var CORE_GROUP_PREFIX = {
|
|
1737
|
+
fs: "core/fs.",
|
|
1738
|
+
http: "core/http.",
|
|
1739
|
+
util: "core/util.",
|
|
1740
|
+
exec: "core/exec."
|
|
1741
|
+
};
|
|
1742
|
+
function registerCoreTools(registry, userConfig, options) {
|
|
1743
|
+
const config = {
|
|
1744
|
+
...DEFAULT_CORE_TOOLS_CONFIG,
|
|
1745
|
+
...userConfig
|
|
1746
|
+
};
|
|
1747
|
+
const adapter = new CoreAdapter(config);
|
|
1748
|
+
const onlySet = options?.only?.length ? new Set(options.only) : null;
|
|
1749
|
+
const allowedPrefixes = !onlySet && options?.groups?.length ? options.groups.map((g) => CORE_GROUP_PREFIX[g]) : null;
|
|
1750
|
+
for (let i = 0; i < CORE_TOOL_MANIFEST.length; i++) {
|
|
1751
|
+
const spec = createToolSpec(CORE_TOOL_MANIFEST[i]);
|
|
1752
|
+
if (onlySet && !onlySet.has(spec.name)) continue;
|
|
1753
|
+
if (allowedPrefixes && !allowedPrefixes.some((p) => spec.name.startsWith(p))) {
|
|
1754
|
+
continue;
|
|
1755
|
+
}
|
|
1756
|
+
const handler = CORE_TOOL_HANDLERS[i];
|
|
1757
|
+
registry.register(spec);
|
|
1758
|
+
adapter.registerHandler(spec.name, handler);
|
|
1759
|
+
}
|
|
1760
|
+
return adapter;
|
|
1761
|
+
}
|
|
1762
|
+
export {
|
|
1763
|
+
CoreAdapter,
|
|
1764
|
+
DEFAULT_CORE_TOOLS_CONFIG,
|
|
1765
|
+
deletePathSpec,
|
|
1766
|
+
downloadFileSpec,
|
|
1767
|
+
duckduckgoSearchSpec,
|
|
1768
|
+
fetchJsonSpec,
|
|
1769
|
+
fetchPageMainContentSpec,
|
|
1770
|
+
fetchTextSpec,
|
|
1771
|
+
getBuiltinContext,
|
|
1772
|
+
hashTextSpec,
|
|
1773
|
+
headSpec,
|
|
1774
|
+
isIpInBlockedCidrs,
|
|
1775
|
+
jsonSelectSpec,
|
|
1776
|
+
listDirSpec,
|
|
1777
|
+
nowHandler,
|
|
1778
|
+
nowSpec,
|
|
1779
|
+
readTextSpec,
|
|
1780
|
+
registerCoreTools,
|
|
1781
|
+
resolveSandboxedPath,
|
|
1782
|
+
runCommandSpec,
|
|
1783
|
+
runWithBuiltinContext,
|
|
1784
|
+
searchTextSpec,
|
|
1785
|
+
sha256Spec,
|
|
1786
|
+
templateRenderSpec,
|
|
1787
|
+
truncateSpec,
|
|
1788
|
+
validateUrl,
|
|
1789
|
+
writeTextSpec
|
|
1790
|
+
};
|
|
1791
|
+
//# sourceMappingURL=index.js.map
|