agentool 0.0.1 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (115) hide show
  1. package/README.md +627 -52
  2. package/dist/ask-user/index.cjs +8 -0
  3. package/dist/ask-user/index.d.cts +68 -0
  4. package/dist/ask-user/index.d.ts +68 -0
  5. package/dist/ask-user/index.js +8 -0
  6. package/dist/bash/index.cjs +8 -0
  7. package/dist/bash/index.d.cts +63 -0
  8. package/dist/bash/index.d.ts +63 -0
  9. package/dist/bash/index.js +8 -0
  10. package/dist/chunk-3EPGFWZV.cjs +30 -0
  11. package/dist/chunk-3VO6NETR.cjs +79 -0
  12. package/dist/chunk-4YI2H55A.js +142 -0
  13. package/dist/chunk-5NW4OGRI.cjs +99 -0
  14. package/dist/chunk-6MDPYALY.js +196 -0
  15. package/dist/chunk-6PQLFDGT.js +117 -0
  16. package/dist/chunk-7QL4BQCH.js +40 -0
  17. package/dist/chunk-CAEVLIQB.cjs +117 -0
  18. package/dist/chunk-CGTPF6IS.js +90 -0
  19. package/dist/chunk-EA3YV7ZG.js +79 -0
  20. package/dist/chunk-FAEGCFTO.js +136 -0
  21. package/dist/chunk-FV2R5FFQ.cjs +102 -0
  22. package/dist/chunk-FW3UJ622.cjs +59 -0
  23. package/dist/chunk-G3ITTPGX.js +99 -0
  24. package/dist/chunk-HDKXSKMO.js +30 -0
  25. package/dist/chunk-HZAQRHBT.js +99 -0
  26. package/dist/chunk-I3ONDY7P.js +46 -0
  27. package/dist/chunk-I6KFFQPV.cjs +58 -0
  28. package/dist/chunk-IMZQ7ELK.cjs +196 -0
  29. package/dist/chunk-JCTBB7H2.cjs +40 -0
  30. package/dist/chunk-K77GC2QI.js +59 -0
  31. package/dist/chunk-LPV5CN2K.js +58 -0
  32. package/dist/chunk-MF7CJVIZ.js +40 -0
  33. package/dist/chunk-MIYA7TNR.cjs +123 -0
  34. package/dist/chunk-MJCAXASI.js +123 -0
  35. package/dist/chunk-MXFW3XY6.cjs +73 -0
  36. package/dist/chunk-ONBH74ZV.cjs +90 -0
  37. package/dist/chunk-OXLQ7QVL.cjs +40 -0
  38. package/dist/chunk-QEJV2KZ4.cjs +159 -0
  39. package/dist/chunk-QZ5GS6HW.cjs +46 -0
  40. package/dist/chunk-S6QEY7UY.js +73 -0
  41. package/dist/chunk-SUSAPI5W.cjs +142 -0
  42. package/dist/chunk-TBVHHF3H.cjs +47 -0
  43. package/dist/chunk-U2YMJM25.cjs +115 -0
  44. package/dist/chunk-VLNDEVKS.js +102 -0
  45. package/dist/chunk-XKG2A3EW.js +159 -0
  46. package/dist/chunk-XLD2Y3SS.cjs +136 -0
  47. package/dist/chunk-Y7KOKDFP.js +115 -0
  48. package/dist/chunk-YPPPGGLA.cjs +99 -0
  49. package/dist/chunk-ZHCMEQJJ.js +47 -0
  50. package/dist/context-compaction/index.cjs +8 -0
  51. package/dist/context-compaction/index.d.cts +77 -0
  52. package/dist/context-compaction/index.d.ts +77 -0
  53. package/dist/context-compaction/index.js +8 -0
  54. package/dist/diff/index.cjs +9 -0
  55. package/dist/diff/index.d.cts +72 -0
  56. package/dist/diff/index.d.ts +72 -0
  57. package/dist/diff/index.js +9 -0
  58. package/dist/edit/index.cjs +10 -0
  59. package/dist/edit/index.d.cts +53 -0
  60. package/dist/edit/index.d.ts +53 -0
  61. package/dist/edit/index.js +10 -0
  62. package/dist/glob/index.cjs +10 -0
  63. package/dist/glob/index.d.cts +47 -0
  64. package/dist/glob/index.d.ts +47 -0
  65. package/dist/glob/index.js +10 -0
  66. package/dist/grep/index.cjs +10 -0
  67. package/dist/grep/index.d.cts +50 -0
  68. package/dist/grep/index.d.ts +50 -0
  69. package/dist/grep/index.js +10 -0
  70. package/dist/http-request/index.cjs +8 -0
  71. package/dist/http-request/index.d.cts +60 -0
  72. package/dist/http-request/index.d.ts +60 -0
  73. package/dist/http-request/index.js +8 -0
  74. package/dist/index.cjs +102 -0
  75. package/dist/index.d.cts +18 -0
  76. package/dist/index.d.ts +18 -0
  77. package/dist/index.js +102 -0
  78. package/dist/lsp/index.cjs +10 -0
  79. package/dist/lsp/index.d.cts +38 -0
  80. package/dist/lsp/index.d.ts +38 -0
  81. package/dist/lsp/index.js +10 -0
  82. package/dist/memory/index.cjs +9 -0
  83. package/dist/memory/index.d.cts +63 -0
  84. package/dist/memory/index.d.ts +63 -0
  85. package/dist/memory/index.js +9 -0
  86. package/dist/multi-edit/index.cjs +11 -0
  87. package/dist/multi-edit/index.d.cts +72 -0
  88. package/dist/multi-edit/index.d.ts +72 -0
  89. package/dist/multi-edit/index.js +11 -0
  90. package/dist/read/index.cjs +10 -0
  91. package/dist/read/index.d.cts +67 -0
  92. package/dist/read/index.d.ts +67 -0
  93. package/dist/read/index.js +10 -0
  94. package/dist/sleep/index.cjs +8 -0
  95. package/dist/sleep/index.d.cts +60 -0
  96. package/dist/sleep/index.d.ts +60 -0
  97. package/dist/sleep/index.js +8 -0
  98. package/dist/task/index.cjs +8 -0
  99. package/dist/task/index.d.cts +67 -0
  100. package/dist/task/index.d.ts +67 -0
  101. package/dist/task/index.js +8 -0
  102. package/dist/types-3QPDuCXN.d.cts +45 -0
  103. package/dist/types-3QPDuCXN.d.ts +45 -0
  104. package/dist/web-fetch/index.cjs +8 -0
  105. package/dist/web-fetch/index.d.cts +56 -0
  106. package/dist/web-fetch/index.d.ts +56 -0
  107. package/dist/web-fetch/index.js +8 -0
  108. package/dist/write/index.cjs +10 -0
  109. package/dist/write/index.d.cts +47 -0
  110. package/dist/write/index.d.ts +47 -0
  111. package/dist/write/index.js +10 -0
  112. package/package.json +145 -20
  113. package/dist/core/index.d.ts +0 -20
  114. package/dist/core/index.js +0 -1
  115. package/dist/core/index.js.map +0 -1
@@ -0,0 +1,115 @@
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }// src/web-fetch/index.ts
2
+ var _ai = require('ai');
3
+ var _zod = require('zod');
4
+
5
+ // src/shared/fetch.ts
6
+ var MAX_MARKDOWN_LENGTH = 1e5;
7
+ var DEFAULT_TIMEOUT_MS = 3e4;
8
+ var DEFAULT_MAX_CONTENT_LENGTH = 10 * 1024 * 1024;
9
+ var turndownPromise;
10
+ function getTurndownService() {
11
+ return turndownPromise ??= Promise.resolve().then(() => _interopRequireWildcard(require("turndown"))).then((m) => {
12
+ const Turndown = m.default;
13
+ return new Turndown();
14
+ });
15
+ }
16
+ function composeSignal(timeoutMs, userSignal) {
17
+ const timeoutSignal = AbortSignal.timeout(timeoutMs);
18
+ if (!userSignal) {
19
+ return timeoutSignal;
20
+ }
21
+ return AbortSignal.any([timeoutSignal, userSignal]);
22
+ }
23
+ async function fetchUrl(url, options) {
24
+ const timeoutMs = _nullishCoalesce(_optionalChain([options, 'optionalAccess', _ => _.timeout]), () => ( DEFAULT_TIMEOUT_MS));
25
+ const maxBytes = _nullishCoalesce(_optionalChain([options, 'optionalAccess', _2 => _2.maxContentLength]), () => ( DEFAULT_MAX_CONTENT_LENGTH));
26
+ const signal = composeSignal(timeoutMs, _optionalChain([options, 'optionalAccess', _3 => _3.signal]));
27
+ const headers = {
28
+ Accept: "text/markdown, text/html, */*"
29
+ };
30
+ if (_optionalChain([options, 'optionalAccess', _4 => _4.userAgent])) {
31
+ headers["User-Agent"] = options.userAgent;
32
+ }
33
+ const response = await fetch(url, { signal, headers });
34
+ const chunks = [];
35
+ let totalBytes = 0;
36
+ if (response.body) {
37
+ const reader = response.body.getReader();
38
+ for (; ; ) {
39
+ const { done, value } = await reader.read();
40
+ if (done) break;
41
+ totalBytes += value.byteLength;
42
+ if (totalBytes > maxBytes) {
43
+ await reader.cancel();
44
+ throw new Error(
45
+ `Response body exceeds maxContentLength (${maxBytes} bytes)`
46
+ );
47
+ }
48
+ chunks.push(value);
49
+ }
50
+ }
51
+ const rawBuffer = Buffer.concat(chunks);
52
+ const byteLength = rawBuffer.length;
53
+ const contentType = _nullishCoalesce(response.headers.get("content-type"), () => ( ""));
54
+ const textContent = rawBuffer.toString("utf-8");
55
+ let content;
56
+ if (contentType.includes("text/html")) {
57
+ const td = await getTurndownService();
58
+ content = td.turndown(textContent);
59
+ } else {
60
+ content = textContent;
61
+ }
62
+ let truncated = false;
63
+ if (content.length > MAX_MARKDOWN_LENGTH) {
64
+ content = content.slice(0, MAX_MARKDOWN_LENGTH);
65
+ truncated = true;
66
+ }
67
+ return {
68
+ content,
69
+ contentType,
70
+ statusCode: response.status,
71
+ byteLength,
72
+ truncated
73
+ };
74
+ }
75
+
76
+ // src/web-fetch/index.ts
77
+ function createWebFetch(config = {}) {
78
+ return _ai.tool.call(void 0, {
79
+ description: "Fetch a URL and return its content. HTML pages are automatically converted to markdown for easier reading. JSON and other text content is returned as-is. Content is truncated at 100,000 characters to manage context size.",
80
+ inputSchema: _zod.z.object({
81
+ url: _zod.z.string().describe("The URL to fetch"),
82
+ prompt: _zod.z.string().optional().describe(
83
+ "Optional context about what to extract from the page"
84
+ )
85
+ }),
86
+ execute: async ({ url }) => {
87
+ try {
88
+ const result = await fetchUrl(url, {
89
+ timeout: _nullishCoalesce(config.timeout, () => ( 3e4)),
90
+ maxContentLength: config.maxContentLength,
91
+ userAgent: config.userAgent
92
+ });
93
+ const parts = [];
94
+ parts.push(`URL: ${url}`);
95
+ parts.push(`Status: ${result.statusCode}`);
96
+ parts.push(`Content-Type: ${result.contentType}`);
97
+ if (result.truncated) {
98
+ parts.push("(Content truncated to 100,000 characters)");
99
+ }
100
+ parts.push("");
101
+ parts.push(result.content);
102
+ return parts.join("\n");
103
+ } catch (error) {
104
+ const msg = error instanceof Error ? error.message : String(error);
105
+ return `Error [web-fetch]: Failed to fetch ${url}: ${msg}`;
106
+ }
107
+ }
108
+ });
109
+ }
110
+ var webFetch = createWebFetch();
111
+
112
+
113
+
114
+
115
+ exports.createWebFetch = createWebFetch; exports.webFetch = webFetch;
@@ -0,0 +1,102 @@
1
+ import {
2
+ containsPathTraversal
3
+ } from "./chunk-I3ONDY7P.js";
4
+
5
+ // src/memory/index.ts
6
+ import { tool } from "ai";
7
+ import { z } from "zod";
8
+ import { mkdir, readFile, readdir, unlink, writeFile } from "fs/promises";
9
+ import { join } from "path";
10
+ function sanitizeKey(key) {
11
+ if (!key || key.trim() === "") {
12
+ return { err: "Error [memory]: Key must not be empty." };
13
+ }
14
+ if (containsPathTraversal(key)) {
15
+ return { err: "Error [memory]: Key contains path traversal and was rejected." };
16
+ }
17
+ const cleaned = key.replace(/^\.+/, "");
18
+ if (cleaned === "") {
19
+ return { err: "Error [memory]: Key must not be empty after stripping leading dots." };
20
+ }
21
+ return { ok: cleaned };
22
+ }
23
+ function createMemory(config = {}) {
24
+ const cwd = config.cwd ?? process.cwd();
25
+ const memoryDir = config.memoryDir ?? join(cwd, ".agentool", "memory");
26
+ return tool({
27
+ description: "File-based key-value memory store. Use this to persist notes, context, or any text data across conversations. Supports write, read, list, and delete operations.",
28
+ inputSchema: z.object({
29
+ action: z.enum(["read", "write", "list", "delete"]).describe(
30
+ "The operation to perform: read, write, list, or delete"
31
+ ),
32
+ key: z.string().optional().describe(
33
+ "The memory key (required for read, write, delete)"
34
+ ),
35
+ content: z.string().optional().describe(
36
+ "The content to store (required for write)"
37
+ )
38
+ }),
39
+ execute: async ({ action, key, content }) => {
40
+ try {
41
+ if (action === "list") {
42
+ return await listKeys(memoryDir);
43
+ }
44
+ const result = sanitizeKey(key);
45
+ if ("err" in result) return result.err;
46
+ const safeKey = result.ok;
47
+ switch (action) {
48
+ case "write":
49
+ return await writeEntry(memoryDir, safeKey, content);
50
+ case "read":
51
+ return await readEntry(memoryDir, safeKey);
52
+ case "delete":
53
+ return await deleteEntry(memoryDir, safeKey);
54
+ default:
55
+ return `Error [memory]: Unknown action "${String(action)}".`;
56
+ }
57
+ } catch (error) {
58
+ const msg = error instanceof Error ? error.message : String(error);
59
+ return `Error [memory]: ${msg}`;
60
+ }
61
+ }
62
+ });
63
+ }
64
+ async function writeEntry(dir, key, content) {
65
+ if (!content && content !== "") {
66
+ return "Error [memory]: Content is required for write action.";
67
+ }
68
+ await mkdir(dir, { recursive: true });
69
+ await writeFile(join(dir, `${key}.md`), content, "utf-8");
70
+ return `Saved memory "${key}".`;
71
+ }
72
+ async function readEntry(dir, key) {
73
+ try {
74
+ return await readFile(join(dir, `${key}.md`), "utf-8");
75
+ } catch {
76
+ return `Error [memory]: Key "${key}" not found.`;
77
+ }
78
+ }
79
+ async function listKeys(dir) {
80
+ try {
81
+ const files = await readdir(dir);
82
+ const keys = files.filter((f) => f.endsWith(".md")).map((f) => f.slice(0, -3));
83
+ if (keys.length === 0) return "No memory entries found.";
84
+ return keys.join("\n");
85
+ } catch {
86
+ return "No memory entries found.";
87
+ }
88
+ }
89
+ async function deleteEntry(dir, key) {
90
+ try {
91
+ await unlink(join(dir, `${key}.md`));
92
+ return `Deleted memory "${key}".`;
93
+ } catch {
94
+ return `Error [memory]: Key "${key}" not found.`;
95
+ }
96
+ }
97
+ var memory = createMemory();
98
+
99
+ export {
100
+ createMemory,
101
+ memory
102
+ };
@@ -0,0 +1,159 @@
1
+ import {
2
+ executeRipgrep
3
+ } from "./chunk-MJCAXASI.js";
4
+ import {
5
+ expandPath,
6
+ toRelativePath
7
+ } from "./chunk-I3ONDY7P.js";
8
+
9
+ // src/grep/index.ts
10
+ import { stat } from "fs/promises";
11
+ import { tool } from "ai";
12
+ import { z } from "zod";
13
+ var VCS_DIRS = [".git", ".svn", ".hg", ".bzr", ".jj", ".sl"];
14
+ var DEFAULT_HEAD_LIMIT = 250;
15
+ function applyHeadLimit(items, limit, offset = 0) {
16
+ if (limit === 0) return { items: items.slice(offset), appliedLimit: void 0 };
17
+ const cap = limit ?? DEFAULT_HEAD_LIMIT;
18
+ const sliced = items.slice(offset, offset + cap);
19
+ const truncated = items.length - offset > cap;
20
+ return { items: sliced, appliedLimit: truncated ? cap : void 0 };
21
+ }
22
+ function truncationSuffix(appliedLimit, offset) {
23
+ const parts = [];
24
+ if (appliedLimit !== void 0) parts.push(`limit: ${appliedLimit}`);
25
+ if (offset > 0) parts.push(`offset: ${offset}`);
26
+ return parts.length > 0 ? `
27
+
28
+ [Results truncated. ${parts.join(", ")}]` : "";
29
+ }
30
+ function relativizeLine(line, baseCwd, last = false) {
31
+ const idx = last ? line.lastIndexOf(":") : line.indexOf(":");
32
+ if (idx > 0) {
33
+ return toRelativePath(line.substring(0, idx), baseCwd) + line.substring(idx);
34
+ }
35
+ return line;
36
+ }
37
+ function parseGlobPatterns(globFilter) {
38
+ const patterns = [];
39
+ for (const raw of globFilter.split(/\s+/)) {
40
+ if (raw.includes("{") && raw.includes("}")) {
41
+ patterns.push(raw);
42
+ } else {
43
+ patterns.push(...raw.split(",").filter(Boolean));
44
+ }
45
+ }
46
+ return patterns;
47
+ }
48
+ function createGrep(config = {}) {
49
+ const cwd = config.cwd ?? process.cwd();
50
+ return tool({
51
+ description: 'Search file contents using ripgrep. Supports regex patterns, context lines, and three output modes: "content" (matching lines), "files_with_matches" (file paths), and "count" (match counts).',
52
+ inputSchema: z.object({
53
+ pattern: z.string().describe("Regex pattern to search for"),
54
+ path: z.string().optional().describe("File or directory to search in"),
55
+ glob: z.string().optional().describe('Glob pattern to filter files (e.g. "*.js")'),
56
+ output_mode: z.enum(["content", "files_with_matches", "count"]).optional().describe('Output mode. Defaults to "files_with_matches".'),
57
+ "-B": z.number().optional().describe("Lines of context before each match"),
58
+ "-A": z.number().optional().describe("Lines of context after each match"),
59
+ "-C": z.number().optional().describe("Lines of context around each match"),
60
+ context: z.number().optional().describe("Context lines (alias for -C)"),
61
+ "-n": z.boolean().optional().describe("Show line numbers (content mode, default true)"),
62
+ "-i": z.boolean().optional().describe("Case insensitive search"),
63
+ type: z.string().optional().describe('File type filter (e.g. "js", "py")'),
64
+ head_limit: z.number().optional().describe("Max entries. Default 250, 0 = unlimited."),
65
+ offset: z.number().optional().describe("Skip first N entries. Default 0."),
66
+ multiline: z.boolean().optional().describe("Enable multiline matching")
67
+ }),
68
+ execute: async (input) => {
69
+ try {
70
+ const {
71
+ pattern,
72
+ path,
73
+ glob: globFilter,
74
+ type: typeFilter,
75
+ output_mode: outputMode = "files_with_matches",
76
+ "-B": ctxBefore,
77
+ "-A": ctxAfter,
78
+ "-C": ctxC,
79
+ context: ctxAlias,
80
+ "-n": showLineNumbers = true,
81
+ "-i": caseInsensitive = false,
82
+ head_limit: headLimit,
83
+ offset = 0,
84
+ multiline = false
85
+ } = input;
86
+ const absolutePath = path ? expandPath(path, cwd) : cwd;
87
+ const args = ["--hidden"];
88
+ for (const dir of VCS_DIRS) args.push("--glob", `!${dir}`);
89
+ args.push("--max-columns", "500");
90
+ if (multiline) args.push("-U", "--multiline-dotall");
91
+ if (caseInsensitive) args.push("-i");
92
+ if (outputMode === "files_with_matches") args.push("-l");
93
+ else if (outputMode === "count") args.push("-c");
94
+ if (showLineNumbers && outputMode === "content") args.push("-n");
95
+ if (outputMode === "content") {
96
+ if (ctxAlias !== void 0) args.push("-C", ctxAlias.toString());
97
+ else if (ctxC !== void 0) args.push("-C", ctxC.toString());
98
+ else {
99
+ if (ctxBefore !== void 0) args.push("-B", ctxBefore.toString());
100
+ if (ctxAfter !== void 0) args.push("-A", ctxAfter.toString());
101
+ }
102
+ }
103
+ if (pattern.startsWith("-")) args.push("-e", pattern);
104
+ else args.push(pattern);
105
+ if (typeFilter) args.push("--type", typeFilter);
106
+ if (globFilter) {
107
+ for (const gp of parseGlobPatterns(globFilter)) args.push("--glob", gp);
108
+ }
109
+ const results = await executeRipgrep(args, absolutePath);
110
+ if (results.length === 0) return "No matches found";
111
+ if (outputMode === "content") {
112
+ const { items: items2, appliedLimit: appliedLimit2 } = applyHeadLimit(results, headLimit, offset);
113
+ const lines = items2.map((l) => relativizeLine(l, cwd));
114
+ return lines.join("\n") + truncationSuffix(appliedLimit2, offset);
115
+ }
116
+ if (outputMode === "count") {
117
+ const { items: items2, appliedLimit: appliedLimit2 } = applyHeadLimit(results, headLimit, offset);
118
+ const lines = items2.map((l) => relativizeLine(l, cwd, true));
119
+ let totalMatches = 0;
120
+ let fileCount = 0;
121
+ for (const line of lines) {
122
+ const idx = line.lastIndexOf(":");
123
+ if (idx > 0) {
124
+ const n = parseInt(line.substring(idx + 1), 10);
125
+ if (!isNaN(n)) {
126
+ totalMatches += n;
127
+ fileCount += 1;
128
+ }
129
+ }
130
+ }
131
+ return lines.join("\n") + `
132
+
133
+ Total: ${totalMatches} matches in ${fileCount} files` + truncationSuffix(appliedLimit2, offset);
134
+ }
135
+ const stats = await Promise.allSettled(results.map((f) => stat(f)));
136
+ const sorted = results.map((fp, i) => {
137
+ const r = stats[i];
138
+ const mt = r.status === "fulfilled" ? r.value.mtimeMs ?? 0 : 0;
139
+ return [fp, mt];
140
+ }).sort((a, b) => {
141
+ const d = b[1] - a[1];
142
+ return d !== 0 ? d : a[0].localeCompare(b[0]);
143
+ }).map((e) => e[0]);
144
+ const { items, appliedLimit } = applyHeadLimit(sorted, headLimit, offset);
145
+ const relative = items.map((f) => toRelativePath(f, cwd));
146
+ return relative.join("\n") + truncationSuffix(appliedLimit, offset);
147
+ } catch (error) {
148
+ const msg = error instanceof Error ? error.message : String(error);
149
+ return `Error [grep]: Failed to search: ${msg}`;
150
+ }
151
+ }
152
+ });
153
+ }
154
+ var grep = createGrep();
155
+
156
+ export {
157
+ createGrep,
158
+ grep
159
+ };
@@ -0,0 +1,136 @@
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } }// src/task/index.ts
2
+ var _ai = require('ai');
3
+ var _zod = require('zod');
4
+ var _promises = require('fs/promises');
5
+ var _path = require('path');
6
+ var _crypto = require('crypto');
7
+ function generateId() {
8
+ return _crypto.randomBytes.call(void 0, 4).toString("hex");
9
+ }
10
+ async function loadTasks(filePath) {
11
+ try {
12
+ const data = await _promises.readFile.call(void 0, filePath, "utf-8");
13
+ return JSON.parse(data);
14
+ } catch (e) {
15
+ return [];
16
+ }
17
+ }
18
+ async function saveTasks(filePath, tasks) {
19
+ await _promises.mkdir.call(void 0, _path.dirname.call(void 0, filePath), { recursive: true });
20
+ await _promises.writeFile.call(void 0, filePath, JSON.stringify(tasks, null, 2), "utf-8");
21
+ }
22
+ function formatTask(t) {
23
+ return [
24
+ `ID: ${t.id}`,
25
+ `Subject: ${t.subject}`,
26
+ `Description: ${t.description}`,
27
+ `Status: ${t.status}`,
28
+ `Created: ${t.createdAt}`,
29
+ `Updated: ${t.updatedAt}`
30
+ ].join("\n");
31
+ }
32
+ function createTask(config = {}) {
33
+ const cwd = _nullishCoalesce(config.cwd, () => ( process.cwd()));
34
+ const tasksFile = _nullishCoalesce(config.tasksFile, () => ( _path.join.call(void 0, cwd, ".agentool", "tasks.json")));
35
+ return _ai.tool.call(void 0, {
36
+ description: "JSON file-based task tracker. Use this to create, get, update, list, and delete tasks. Each task has an id, subject, description, status, and timestamps.",
37
+ inputSchema: _zod.z.object({
38
+ action: _zod.z.enum(["create", "get", "update", "list", "delete"]).describe(
39
+ "The operation to perform"
40
+ ),
41
+ id: _zod.z.string().optional().describe(
42
+ "Task id (required for get, update, delete)"
43
+ ),
44
+ subject: _zod.z.string().optional().describe(
45
+ "Task subject (required for create, optional for update)"
46
+ ),
47
+ description: _zod.z.string().optional().describe(
48
+ "Task description (required for create, optional for update)"
49
+ ),
50
+ status: _zod.z.enum(["pending", "in_progress", "completed"]).optional().describe(
51
+ "Task status (optional for create/update, defaults to pending)"
52
+ )
53
+ }),
54
+ execute: async ({ action, id, subject, description, status }) => {
55
+ try {
56
+ switch (action) {
57
+ case "create":
58
+ return await createEntry(tasksFile, subject, description, status);
59
+ case "list":
60
+ return await listEntries(tasksFile);
61
+ case "get":
62
+ return await getEntry(tasksFile, id);
63
+ case "update":
64
+ return await updateEntry(tasksFile, id, subject, description, status);
65
+ case "delete":
66
+ return await deleteEntry(tasksFile, id);
67
+ default:
68
+ return `Error [task]: Unknown action "${String(action)}".`;
69
+ }
70
+ } catch (error) {
71
+ const msg = error instanceof Error ? error.message : String(error);
72
+ return `Error [task]: ${msg}`;
73
+ }
74
+ }
75
+ });
76
+ }
77
+ async function createEntry(filePath, subject, description, status) {
78
+ if (!subject) return "Error [task]: Subject is required for create action.";
79
+ if (!description) return "Error [task]: Description is required for create action.";
80
+ const tasks = await loadTasks(filePath);
81
+ const now = (/* @__PURE__ */ new Date()).toISOString();
82
+ const entry = {
83
+ id: generateId(),
84
+ subject,
85
+ description,
86
+ status: _nullishCoalesce(status, () => ( "pending")),
87
+ createdAt: now,
88
+ updatedAt: now
89
+ };
90
+ tasks.push(entry);
91
+ await saveTasks(filePath, tasks);
92
+ return `Created task ${entry.id}.
93
+ ${formatTask(entry)}`;
94
+ }
95
+ async function listEntries(filePath) {
96
+ const tasks = await loadTasks(filePath);
97
+ if (tasks.length === 0) return "No tasks found.";
98
+ return tasks.map(formatTask).join("\n---\n");
99
+ }
100
+ async function getEntry(filePath, id) {
101
+ if (!id) return "Error [task]: ID is required for get action.";
102
+ const tasks = await loadTasks(filePath);
103
+ const found = tasks.find((t) => t.id === id);
104
+ if (!found) return `Error [task]: Task "${id}" not found.`;
105
+ return formatTask(found);
106
+ }
107
+ async function updateEntry(filePath, id, subject, description, status) {
108
+ if (!id) return "Error [task]: ID is required for update action.";
109
+ const tasks = await loadTasks(filePath);
110
+ const idx = tasks.findIndex((t) => t.id === id);
111
+ if (idx === -1) return `Error [task]: Task "${id}" not found.`;
112
+ const entry = tasks[idx];
113
+ if (subject) entry.subject = subject;
114
+ if (description) entry.description = description;
115
+ if (status) entry.status = status;
116
+ entry.updatedAt = (/* @__PURE__ */ new Date()).toISOString();
117
+ tasks[idx] = entry;
118
+ await saveTasks(filePath, tasks);
119
+ return `Updated task ${id}.
120
+ ${formatTask(entry)}`;
121
+ }
122
+ async function deleteEntry(filePath, id) {
123
+ if (!id) return "Error [task]: ID is required for delete action.";
124
+ const tasks = await loadTasks(filePath);
125
+ const idx = tasks.findIndex((t) => t.id === id);
126
+ if (idx === -1) return `Error [task]: Task "${id}" not found.`;
127
+ tasks.splice(idx, 1);
128
+ await saveTasks(filePath, tasks);
129
+ return `Deleted task "${id}".`;
130
+ }
131
+ var task = createTask();
132
+
133
+
134
+
135
+
136
+ exports.createTask = createTask; exports.task = task;
@@ -0,0 +1,115 @@
1
+ // src/web-fetch/index.ts
2
+ import { tool } from "ai";
3
+ import { z } from "zod";
4
+
5
+ // src/shared/fetch.ts
6
+ var MAX_MARKDOWN_LENGTH = 1e5;
7
+ var DEFAULT_TIMEOUT_MS = 3e4;
8
+ var DEFAULT_MAX_CONTENT_LENGTH = 10 * 1024 * 1024;
9
+ var turndownPromise;
10
+ function getTurndownService() {
11
+ return turndownPromise ??= import("turndown").then((m) => {
12
+ const Turndown = m.default;
13
+ return new Turndown();
14
+ });
15
+ }
16
+ function composeSignal(timeoutMs, userSignal) {
17
+ const timeoutSignal = AbortSignal.timeout(timeoutMs);
18
+ if (!userSignal) {
19
+ return timeoutSignal;
20
+ }
21
+ return AbortSignal.any([timeoutSignal, userSignal]);
22
+ }
23
+ async function fetchUrl(url, options) {
24
+ const timeoutMs = options?.timeout ?? DEFAULT_TIMEOUT_MS;
25
+ const maxBytes = options?.maxContentLength ?? DEFAULT_MAX_CONTENT_LENGTH;
26
+ const signal = composeSignal(timeoutMs, options?.signal);
27
+ const headers = {
28
+ Accept: "text/markdown, text/html, */*"
29
+ };
30
+ if (options?.userAgent) {
31
+ headers["User-Agent"] = options.userAgent;
32
+ }
33
+ const response = await fetch(url, { signal, headers });
34
+ const chunks = [];
35
+ let totalBytes = 0;
36
+ if (response.body) {
37
+ const reader = response.body.getReader();
38
+ for (; ; ) {
39
+ const { done, value } = await reader.read();
40
+ if (done) break;
41
+ totalBytes += value.byteLength;
42
+ if (totalBytes > maxBytes) {
43
+ await reader.cancel();
44
+ throw new Error(
45
+ `Response body exceeds maxContentLength (${maxBytes} bytes)`
46
+ );
47
+ }
48
+ chunks.push(value);
49
+ }
50
+ }
51
+ const rawBuffer = Buffer.concat(chunks);
52
+ const byteLength = rawBuffer.length;
53
+ const contentType = response.headers.get("content-type") ?? "";
54
+ const textContent = rawBuffer.toString("utf-8");
55
+ let content;
56
+ if (contentType.includes("text/html")) {
57
+ const td = await getTurndownService();
58
+ content = td.turndown(textContent);
59
+ } else {
60
+ content = textContent;
61
+ }
62
+ let truncated = false;
63
+ if (content.length > MAX_MARKDOWN_LENGTH) {
64
+ content = content.slice(0, MAX_MARKDOWN_LENGTH);
65
+ truncated = true;
66
+ }
67
+ return {
68
+ content,
69
+ contentType,
70
+ statusCode: response.status,
71
+ byteLength,
72
+ truncated
73
+ };
74
+ }
75
+
76
+ // src/web-fetch/index.ts
77
+ function createWebFetch(config = {}) {
78
+ return tool({
79
+ description: "Fetch a URL and return its content. HTML pages are automatically converted to markdown for easier reading. JSON and other text content is returned as-is. Content is truncated at 100,000 characters to manage context size.",
80
+ inputSchema: z.object({
81
+ url: z.string().describe("The URL to fetch"),
82
+ prompt: z.string().optional().describe(
83
+ "Optional context about what to extract from the page"
84
+ )
85
+ }),
86
+ execute: async ({ url }) => {
87
+ try {
88
+ const result = await fetchUrl(url, {
89
+ timeout: config.timeout ?? 3e4,
90
+ maxContentLength: config.maxContentLength,
91
+ userAgent: config.userAgent
92
+ });
93
+ const parts = [];
94
+ parts.push(`URL: ${url}`);
95
+ parts.push(`Status: ${result.statusCode}`);
96
+ parts.push(`Content-Type: ${result.contentType}`);
97
+ if (result.truncated) {
98
+ parts.push("(Content truncated to 100,000 characters)");
99
+ }
100
+ parts.push("");
101
+ parts.push(result.content);
102
+ return parts.join("\n");
103
+ } catch (error) {
104
+ const msg = error instanceof Error ? error.message : String(error);
105
+ return `Error [web-fetch]: Failed to fetch ${url}: ${msg}`;
106
+ }
107
+ }
108
+ });
109
+ }
110
+ var webFetch = createWebFetch();
111
+
112
+ export {
113
+ createWebFetch,
114
+ webFetch
115
+ };