agentool 0.0.1 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +738 -52
- package/dist/ask-user/index.cjs +8 -0
- package/dist/ask-user/index.d.cts +68 -0
- package/dist/ask-user/index.d.ts +68 -0
- package/dist/ask-user/index.js +8 -0
- package/dist/bash/index.cjs +8 -0
- package/dist/bash/index.d.cts +63 -0
- package/dist/bash/index.d.ts +63 -0
- package/dist/bash/index.js +8 -0
- package/dist/chunk-3EPGFWZV.cjs +30 -0
- package/dist/chunk-3VO6NETR.cjs +79 -0
- package/dist/chunk-44AFQ2B7.js +30 -0
- package/dist/chunk-4HIATLKI.js +112 -0
- package/dist/chunk-4HXAKPQH.cjs +36 -0
- package/dist/chunk-4YI2H55A.js +142 -0
- package/dist/chunk-56CL4JCW.cjs +53 -0
- package/dist/chunk-5NW4OGRI.cjs +99 -0
- package/dist/chunk-5O55DKOB.cjs +112 -0
- package/dist/chunk-5TDZF4IM.cjs +197 -0
- package/dist/chunk-6DJSWTWQ.cjs +40 -0
- package/dist/chunk-6PQLFDGT.js +117 -0
- package/dist/chunk-ACGW44YT.js +47 -0
- package/dist/chunk-CAEVLIQB.cjs +117 -0
- package/dist/chunk-CGTPF6IS.js +90 -0
- package/dist/chunk-E6NBEYZD.js +51 -0
- package/dist/chunk-EA3YV7ZG.js +79 -0
- package/dist/chunk-ECYT46FP.js +40 -0
- package/dist/chunk-FV2R5FFQ.cjs +102 -0
- package/dist/chunk-FW3UJ622.cjs +59 -0
- package/dist/chunk-G3ITTPGX.js +99 -0
- package/dist/chunk-HDKXSKMO.js +30 -0
- package/dist/chunk-HNP7JDQC.cjs +159 -0
- package/dist/chunk-HNUL2CID.cjs +34 -0
- package/dist/chunk-HZAQRHBT.js +99 -0
- package/dist/chunk-I3ONDY7P.js +46 -0
- package/dist/chunk-I6KFFQPV.cjs +58 -0
- package/dist/chunk-IEX4NOVN.cjs +48 -0
- package/dist/chunk-IRRNYFI5.js +48 -0
- package/dist/chunk-K77GC2QI.js +59 -0
- package/dist/chunk-L5JH4I77.cjs +51 -0
- package/dist/chunk-LK6SQH2G.cjs +30 -0
- package/dist/chunk-LPV5CN2K.js +58 -0
- package/dist/chunk-LTE5NG4D.js +53 -0
- package/dist/chunk-MF7CJVIZ.js +40 -0
- package/dist/chunk-MIYA7TNR.cjs +123 -0
- package/dist/chunk-MJCAXASI.js +123 -0
- package/dist/chunk-OM2UFTGS.cjs +47 -0
- package/dist/chunk-ONBH74ZV.cjs +90 -0
- package/dist/chunk-OXLQ7QVL.cjs +40 -0
- package/dist/chunk-P6Z5XFDS.js +73 -0
- package/dist/chunk-QZ5GS6HW.cjs +46 -0
- package/dist/chunk-S7IVHOA6.js +75 -0
- package/dist/chunk-SUSAPI5W.cjs +142 -0
- package/dist/chunk-TMW3XKKJ.js +34 -0
- package/dist/chunk-UDIG7332.js +159 -0
- package/dist/chunk-VLNDEVKS.js +102 -0
- package/dist/chunk-VXZ4RKJI.js +36 -0
- package/dist/chunk-XAQGZ374.js +197 -0
- package/dist/chunk-YPPPGGLA.cjs +99 -0
- package/dist/chunk-ZBLQV6UO.cjs +73 -0
- package/dist/chunk-ZFQZWXOI.cjs +75 -0
- package/dist/context-compaction/index.cjs +8 -0
- package/dist/context-compaction/index.d.cts +77 -0
- package/dist/context-compaction/index.d.ts +77 -0
- package/dist/context-compaction/index.js +8 -0
- package/dist/diff/index.cjs +9 -0
- package/dist/diff/index.d.cts +72 -0
- package/dist/diff/index.d.ts +72 -0
- package/dist/diff/index.js +9 -0
- package/dist/edit/index.cjs +10 -0
- package/dist/edit/index.d.cts +53 -0
- package/dist/edit/index.d.ts +53 -0
- package/dist/edit/index.js +10 -0
- package/dist/glob/index.cjs +10 -0
- package/dist/glob/index.d.cts +47 -0
- package/dist/glob/index.d.ts +47 -0
- package/dist/glob/index.js +10 -0
- package/dist/grep/index.cjs +10 -0
- package/dist/grep/index.d.cts +50 -0
- package/dist/grep/index.d.ts +50 -0
- package/dist/grep/index.js +10 -0
- package/dist/http-request/index.cjs +8 -0
- package/dist/http-request/index.d.cts +60 -0
- package/dist/http-request/index.d.ts +60 -0
- package/dist/http-request/index.js +8 -0
- package/dist/index.cjs +133 -0
- package/dist/index.d.cts +23 -0
- package/dist/index.d.ts +23 -0
- package/dist/index.js +133 -0
- package/dist/lsp/index.cjs +10 -0
- package/dist/lsp/index.d.cts +35 -0
- package/dist/lsp/index.d.ts +35 -0
- package/dist/lsp/index.js +10 -0
- package/dist/memory/index.cjs +9 -0
- package/dist/memory/index.d.cts +63 -0
- package/dist/memory/index.d.ts +63 -0
- package/dist/memory/index.js +9 -0
- package/dist/multi-edit/index.cjs +11 -0
- package/dist/multi-edit/index.d.cts +72 -0
- package/dist/multi-edit/index.d.ts +72 -0
- package/dist/multi-edit/index.js +11 -0
- package/dist/read/index.cjs +10 -0
- package/dist/read/index.d.cts +67 -0
- package/dist/read/index.d.ts +67 -0
- package/dist/read/index.js +10 -0
- package/dist/sleep/index.cjs +8 -0
- package/dist/sleep/index.d.cts +60 -0
- package/dist/sleep/index.d.ts +60 -0
- package/dist/sleep/index.js +8 -0
- package/dist/task-create/index.cjs +9 -0
- package/dist/task-create/index.d.cts +19 -0
- package/dist/task-create/index.d.ts +19 -0
- package/dist/task-create/index.js +9 -0
- package/dist/task-get/index.cjs +9 -0
- package/dist/task-get/index.d.cts +15 -0
- package/dist/task-get/index.d.ts +15 -0
- package/dist/task-get/index.js +9 -0
- package/dist/task-list/index.cjs +9 -0
- package/dist/task-list/index.d.cts +11 -0
- package/dist/task-list/index.d.ts +11 -0
- package/dist/task-list/index.js +9 -0
- package/dist/task-update/index.cjs +9 -0
- package/dist/task-update/index.d.cts +31 -0
- package/dist/task-update/index.d.ts +31 -0
- package/dist/task-update/index.js +9 -0
- package/dist/tool-search/index.cjs +8 -0
- package/dist/tool-search/index.d.cts +18 -0
- package/dist/tool-search/index.d.ts +18 -0
- package/dist/tool-search/index.js +8 -0
- package/dist/types-3QPDuCXN.d.cts +45 -0
- package/dist/types-3QPDuCXN.d.ts +45 -0
- package/dist/web-fetch/index.cjs +8 -0
- package/dist/web-fetch/index.d.cts +54 -0
- package/dist/web-fetch/index.d.ts +54 -0
- package/dist/web-fetch/index.js +8 -0
- package/dist/web-search/index.cjs +8 -0
- package/dist/web-search/index.d.cts +21 -0
- package/dist/web-search/index.d.ts +21 -0
- package/dist/web-search/index.js +8 -0
- package/dist/write/index.cjs +10 -0
- package/dist/write/index.d.cts +47 -0
- package/dist/write/index.d.ts +47 -0
- package/dist/write/index.js +10 -0
- package/package.json +170 -20
- package/dist/core/index.d.ts +0 -20
- package/dist/core/index.js +0 -1
- package/dist/core/index.js.map +0 -1
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
import {
|
|
2
|
+
containsPathTraversal
|
|
3
|
+
} from "./chunk-I3ONDY7P.js";
|
|
4
|
+
|
|
5
|
+
// src/memory/index.ts
|
|
6
|
+
import { tool } from "ai";
|
|
7
|
+
import { z } from "zod";
|
|
8
|
+
import { mkdir, readFile, readdir, unlink, writeFile } from "fs/promises";
|
|
9
|
+
import { join } from "path";
|
|
10
|
+
function sanitizeKey(key) {
|
|
11
|
+
if (!key || key.trim() === "") {
|
|
12
|
+
return { err: "Error [memory]: Key must not be empty." };
|
|
13
|
+
}
|
|
14
|
+
if (containsPathTraversal(key)) {
|
|
15
|
+
return { err: "Error [memory]: Key contains path traversal and was rejected." };
|
|
16
|
+
}
|
|
17
|
+
const cleaned = key.replace(/^\.+/, "");
|
|
18
|
+
if (cleaned === "") {
|
|
19
|
+
return { err: "Error [memory]: Key must not be empty after stripping leading dots." };
|
|
20
|
+
}
|
|
21
|
+
return { ok: cleaned };
|
|
22
|
+
}
|
|
23
|
+
function createMemory(config = {}) {
|
|
24
|
+
const cwd = config.cwd ?? process.cwd();
|
|
25
|
+
const memoryDir = config.memoryDir ?? join(cwd, ".agentool", "memory");
|
|
26
|
+
return tool({
|
|
27
|
+
description: "File-based key-value memory store. Use this to persist notes, context, or any text data across conversations. Supports write, read, list, and delete operations.",
|
|
28
|
+
inputSchema: z.object({
|
|
29
|
+
action: z.enum(["read", "write", "list", "delete"]).describe(
|
|
30
|
+
"The operation to perform: read, write, list, or delete"
|
|
31
|
+
),
|
|
32
|
+
key: z.string().optional().describe(
|
|
33
|
+
"The memory key (required for read, write, delete)"
|
|
34
|
+
),
|
|
35
|
+
content: z.string().optional().describe(
|
|
36
|
+
"The content to store (required for write)"
|
|
37
|
+
)
|
|
38
|
+
}),
|
|
39
|
+
execute: async ({ action, key, content }) => {
|
|
40
|
+
try {
|
|
41
|
+
if (action === "list") {
|
|
42
|
+
return await listKeys(memoryDir);
|
|
43
|
+
}
|
|
44
|
+
const result = sanitizeKey(key);
|
|
45
|
+
if ("err" in result) return result.err;
|
|
46
|
+
const safeKey = result.ok;
|
|
47
|
+
switch (action) {
|
|
48
|
+
case "write":
|
|
49
|
+
return await writeEntry(memoryDir, safeKey, content);
|
|
50
|
+
case "read":
|
|
51
|
+
return await readEntry(memoryDir, safeKey);
|
|
52
|
+
case "delete":
|
|
53
|
+
return await deleteEntry(memoryDir, safeKey);
|
|
54
|
+
default:
|
|
55
|
+
return `Error [memory]: Unknown action "${String(action)}".`;
|
|
56
|
+
}
|
|
57
|
+
} catch (error) {
|
|
58
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
59
|
+
return `Error [memory]: ${msg}`;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
});
|
|
63
|
+
}
|
|
64
|
+
async function writeEntry(dir, key, content) {
|
|
65
|
+
if (!content && content !== "") {
|
|
66
|
+
return "Error [memory]: Content is required for write action.";
|
|
67
|
+
}
|
|
68
|
+
await mkdir(dir, { recursive: true });
|
|
69
|
+
await writeFile(join(dir, `${key}.md`), content, "utf-8");
|
|
70
|
+
return `Saved memory "${key}".`;
|
|
71
|
+
}
|
|
72
|
+
async function readEntry(dir, key) {
|
|
73
|
+
try {
|
|
74
|
+
return await readFile(join(dir, `${key}.md`), "utf-8");
|
|
75
|
+
} catch {
|
|
76
|
+
return `Error [memory]: Key "${key}" not found.`;
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
async function listKeys(dir) {
|
|
80
|
+
try {
|
|
81
|
+
const files = await readdir(dir);
|
|
82
|
+
const keys = files.filter((f) => f.endsWith(".md")).map((f) => f.slice(0, -3));
|
|
83
|
+
if (keys.length === 0) return "No memory entries found.";
|
|
84
|
+
return keys.join("\n");
|
|
85
|
+
} catch {
|
|
86
|
+
return "No memory entries found.";
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
async function deleteEntry(dir, key) {
|
|
90
|
+
try {
|
|
91
|
+
await unlink(join(dir, `${key}.md`));
|
|
92
|
+
return `Deleted memory "${key}".`;
|
|
93
|
+
} catch {
|
|
94
|
+
return `Error [memory]: Key "${key}" not found.`;
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
var memory = createMemory();
|
|
98
|
+
|
|
99
|
+
export {
|
|
100
|
+
createMemory,
|
|
101
|
+
memory
|
|
102
|
+
};
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import {
|
|
2
|
+
formatTask,
|
|
3
|
+
loadTasks
|
|
4
|
+
} from "./chunk-E6NBEYZD.js";
|
|
5
|
+
|
|
6
|
+
// src/task-get/index.ts
|
|
7
|
+
import { tool } from "ai";
|
|
8
|
+
import { z } from "zod";
|
|
9
|
+
import { join } from "path";
|
|
10
|
+
function createTaskGet(config = {}) {
|
|
11
|
+
const cwd = config.cwd ?? process.cwd();
|
|
12
|
+
const tasksFile = config.tasksFile ?? join(cwd, ".agentool", "tasks.json");
|
|
13
|
+
return tool({
|
|
14
|
+
description: "Retrieve a task by its ID to see full details.",
|
|
15
|
+
inputSchema: z.object({
|
|
16
|
+
taskId: z.string().describe("The ID of the task to retrieve")
|
|
17
|
+
}),
|
|
18
|
+
execute: async ({ taskId }) => {
|
|
19
|
+
try {
|
|
20
|
+
const tasks = await loadTasks(tasksFile);
|
|
21
|
+
const found = tasks.find((t) => t.id === taskId);
|
|
22
|
+
if (!found) return `Error [task-get]: Task "${taskId}" not found.`;
|
|
23
|
+
return formatTask(found);
|
|
24
|
+
} catch (error) {
|
|
25
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
26
|
+
return `Error [task-get]: ${msg}`;
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
});
|
|
30
|
+
}
|
|
31
|
+
var taskGet = createTaskGet();
|
|
32
|
+
|
|
33
|
+
export {
|
|
34
|
+
createTaskGet,
|
|
35
|
+
taskGet
|
|
36
|
+
};
|
|
@@ -0,0 +1,197 @@
|
|
|
1
|
+
// src/lsp/index.ts
|
|
2
|
+
import { tool } from "ai";
|
|
3
|
+
import { z } from "zod";
|
|
4
|
+
import { spawn } from "child_process";
|
|
5
|
+
import { readFile } from "fs/promises";
|
|
6
|
+
import { pathToFileURL } from "url";
|
|
7
|
+
import { resolve, extname } from "path";
|
|
8
|
+
var nextId = 1;
|
|
9
|
+
function encodeJsonRpc(msg) {
|
|
10
|
+
const body = JSON.stringify(msg);
|
|
11
|
+
return Buffer.from(`Content-Length: ${Buffer.byteLength(body)}\r
|
|
12
|
+
\r
|
|
13
|
+
${body}`);
|
|
14
|
+
}
|
|
15
|
+
function parseJsonRpcResponses(buffer) {
|
|
16
|
+
const parsed = [];
|
|
17
|
+
let offset = 0;
|
|
18
|
+
while (offset < buffer.length) {
|
|
19
|
+
const headerEnd = buffer.indexOf("\r\n\r\n", offset);
|
|
20
|
+
if (headerEnd === -1) break;
|
|
21
|
+
const match = /Content-Length:\s*(\d+)/i.exec(buffer.subarray(offset, headerEnd).toString());
|
|
22
|
+
if (!match) break;
|
|
23
|
+
const bodyStart = headerEnd + 4;
|
|
24
|
+
const contentLength = parseInt(match[1], 10);
|
|
25
|
+
if (bodyStart + contentLength > buffer.length) break;
|
|
26
|
+
try {
|
|
27
|
+
parsed.push(JSON.parse(buffer.subarray(bodyStart, bodyStart + contentLength).toString()));
|
|
28
|
+
} catch {
|
|
29
|
+
}
|
|
30
|
+
offset = bodyStart + contentLength;
|
|
31
|
+
}
|
|
32
|
+
return parsed;
|
|
33
|
+
}
|
|
34
|
+
function operationToMethod(operation) {
|
|
35
|
+
const map = {
|
|
36
|
+
goToDefinition: "textDocument/definition",
|
|
37
|
+
findReferences: "textDocument/references",
|
|
38
|
+
hover: "textDocument/hover",
|
|
39
|
+
documentSymbol: "textDocument/documentSymbol",
|
|
40
|
+
workspaceSymbol: "workspace/symbol",
|
|
41
|
+
goToImplementation: "textDocument/implementation",
|
|
42
|
+
prepareCallHierarchy: "textDocument/prepareCallHierarchy",
|
|
43
|
+
incomingCalls: "textDocument/prepareCallHierarchy",
|
|
44
|
+
outgoingCalls: "textDocument/prepareCallHierarchy"
|
|
45
|
+
};
|
|
46
|
+
return map[operation] ?? operation;
|
|
47
|
+
}
|
|
48
|
+
function buildRequestParams(op, uri, line, char) {
|
|
49
|
+
if (op === "workspaceSymbol") return { query: "" };
|
|
50
|
+
const td = { uri };
|
|
51
|
+
if (op === "documentSymbol") return { textDocument: td };
|
|
52
|
+
const pos = { line, character: char };
|
|
53
|
+
if (op === "findReferences") return { textDocument: td, position: pos, context: { includeDeclaration: true } };
|
|
54
|
+
return { textDocument: td, position: pos };
|
|
55
|
+
}
|
|
56
|
+
function sendRequest(proc, method, params) {
|
|
57
|
+
const id = nextId++;
|
|
58
|
+
const msg = { jsonrpc: "2.0", id, method, params };
|
|
59
|
+
proc.stdin.write(encodeJsonRpc(msg));
|
|
60
|
+
return id;
|
|
61
|
+
}
|
|
62
|
+
function sendNotification(proc, method, params) {
|
|
63
|
+
const msg = { jsonrpc: "2.0", method, params };
|
|
64
|
+
proc.stdin.write(encodeJsonRpc(msg));
|
|
65
|
+
}
|
|
66
|
+
function waitForResponse(proc, id, timeoutMs) {
|
|
67
|
+
return new Promise((resolveP, reject) => {
|
|
68
|
+
let buf = Buffer.alloc(0);
|
|
69
|
+
const timer = setTimeout(() => {
|
|
70
|
+
cleanup();
|
|
71
|
+
reject(new Error(`LSP request timed out after ${timeoutMs}ms`));
|
|
72
|
+
}, timeoutMs);
|
|
73
|
+
const cleanup = () => {
|
|
74
|
+
clearTimeout(timer);
|
|
75
|
+
proc.stdout.off("data", onData);
|
|
76
|
+
proc.stdout.off("error", onErr);
|
|
77
|
+
};
|
|
78
|
+
function onData(chunk) {
|
|
79
|
+
buf = Buffer.concat([buf, chunk]);
|
|
80
|
+
for (const msg of parseJsonRpcResponses(buf)) {
|
|
81
|
+
const rpc = msg;
|
|
82
|
+
if (rpc.id === id) {
|
|
83
|
+
cleanup();
|
|
84
|
+
resolveP(rpc.error ? { error: rpc.error } : { result: rpc.result });
|
|
85
|
+
return;
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
function onErr(e) {
|
|
90
|
+
cleanup();
|
|
91
|
+
reject(e);
|
|
92
|
+
}
|
|
93
|
+
proc.stdout.on("data", onData);
|
|
94
|
+
proc.stdout.on("error", onErr);
|
|
95
|
+
});
|
|
96
|
+
}
|
|
97
|
+
async function shutdownServer(proc, ms) {
|
|
98
|
+
try {
|
|
99
|
+
const id = sendRequest(proc, "shutdown", {});
|
|
100
|
+
await waitForResponse(proc, id, Math.min(ms, 5e3));
|
|
101
|
+
sendNotification(proc, "exit", {});
|
|
102
|
+
} catch {
|
|
103
|
+
} finally {
|
|
104
|
+
proc.kill();
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
async function executeLspOperation(serverConfig, params, timeoutMs) {
|
|
108
|
+
const absolutePath = resolve(params.cwd, params.filePath);
|
|
109
|
+
const uri = pathToFileURL(absolutePath).href;
|
|
110
|
+
const proc = spawn(serverConfig.command, serverConfig.args ?? [], {
|
|
111
|
+
cwd: params.cwd,
|
|
112
|
+
stdio: ["pipe", "pipe", "pipe"]
|
|
113
|
+
});
|
|
114
|
+
const spawnError = new Promise((_, reject) => {
|
|
115
|
+
proc.on("error", (err) => reject(err));
|
|
116
|
+
});
|
|
117
|
+
try {
|
|
118
|
+
const initId = sendRequest(proc, "initialize", {
|
|
119
|
+
processId: process.pid,
|
|
120
|
+
capabilities: {},
|
|
121
|
+
rootUri: pathToFileURL(params.cwd).href
|
|
122
|
+
});
|
|
123
|
+
const rpc = (id) => Promise.race([waitForResponse(proc, id, timeoutMs), spawnError]);
|
|
124
|
+
await rpc(initId);
|
|
125
|
+
sendNotification(proc, "initialized", {});
|
|
126
|
+
const content = await readFile(absolutePath, "utf-8");
|
|
127
|
+
const langId = extname(absolutePath).replace(".", "") || "plaintext";
|
|
128
|
+
sendNotification(proc, "textDocument/didOpen", {
|
|
129
|
+
textDocument: { uri, languageId: langId, version: 1, text: content }
|
|
130
|
+
});
|
|
131
|
+
const method = operationToMethod(params.operation);
|
|
132
|
+
const reqParams = buildRequestParams(params.operation, uri, params.line ?? 0, params.character ?? 0);
|
|
133
|
+
let response = await rpc(sendRequest(proc, method, reqParams));
|
|
134
|
+
if (response.error) return `Error [lsp]: Server error: ${response.error.message} (code ${response.error.code})`;
|
|
135
|
+
if ((params.operation === "incomingCalls" || params.operation === "outgoingCalls") && Array.isArray(response.result) && response.result.length > 0) {
|
|
136
|
+
const cm = params.operation === "incomingCalls" ? "callHierarchy/incomingCalls" : "callHierarchy/outgoingCalls";
|
|
137
|
+
response = await rpc(sendRequest(proc, cm, { item: response.result[0] }));
|
|
138
|
+
if (response.error) return `Error [lsp]: Server error: ${response.error.message} (code ${response.error.code})`;
|
|
139
|
+
}
|
|
140
|
+
await shutdownServer(proc, timeoutMs);
|
|
141
|
+
return JSON.stringify(response.result, null, 2) ?? "null";
|
|
142
|
+
} catch (error) {
|
|
143
|
+
proc.kill();
|
|
144
|
+
throw error;
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
var LSP_OPERATIONS = [
|
|
148
|
+
"goToDefinition",
|
|
149
|
+
"findReferences",
|
|
150
|
+
"hover",
|
|
151
|
+
"documentSymbol",
|
|
152
|
+
"workspaceSymbol",
|
|
153
|
+
"goToImplementation",
|
|
154
|
+
"prepareCallHierarchy",
|
|
155
|
+
"incomingCalls",
|
|
156
|
+
"outgoingCalls"
|
|
157
|
+
];
|
|
158
|
+
function createLsp(config = {}) {
|
|
159
|
+
const timeoutMs = config.timeout ?? 3e4;
|
|
160
|
+
return tool({
|
|
161
|
+
description: "Perform language server operations like go-to-definition, find-references, and hover. Requires LSP server configuration. Supports 9 operations: goToDefinition, findReferences, hover, documentSymbol, workspaceSymbol, goToImplementation, prepareCallHierarchy, incomingCalls, outgoingCalls.",
|
|
162
|
+
inputSchema: z.object({
|
|
163
|
+
operation: z.enum(LSP_OPERATIONS).describe("The LSP operation to perform"),
|
|
164
|
+
filePath: z.string().describe("Path to the file"),
|
|
165
|
+
line: z.number().int().positive().describe("The line number (1-based, as shown in editors)"),
|
|
166
|
+
character: z.number().int().positive().describe("The character offset (1-based, as shown in editors)")
|
|
167
|
+
}),
|
|
168
|
+
execute: async ({ operation, filePath, line, character }) => {
|
|
169
|
+
if (!config.servers || Object.keys(config.servers).length === 0) {
|
|
170
|
+
return 'Error [lsp]: No LSP servers configured. Provide server configuration via createLsp({ servers: { ".ts": { command: "typescript-language-server", args: ["--stdio"] } } })';
|
|
171
|
+
}
|
|
172
|
+
const ext = extname(filePath) || "." + filePath.split(".").pop();
|
|
173
|
+
const serverConfig = config.servers[ext];
|
|
174
|
+
if (!serverConfig) {
|
|
175
|
+
const available = Object.keys(config.servers).join(", ");
|
|
176
|
+
return `Error [lsp]: No LSP server configured for ${ext} files. Available: ${available}`;
|
|
177
|
+
}
|
|
178
|
+
try {
|
|
179
|
+
return await executeLspOperation(
|
|
180
|
+
serverConfig,
|
|
181
|
+
{ operation, filePath, line: line - 1, character: character - 1, cwd: config.cwd ?? process.cwd() },
|
|
182
|
+
timeoutMs
|
|
183
|
+
);
|
|
184
|
+
} catch (error) {
|
|
185
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
186
|
+
return `Error [lsp]: ${operation} failed for ${filePath}: ${msg}`;
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
});
|
|
190
|
+
}
|
|
191
|
+
var lsp = createLsp();
|
|
192
|
+
|
|
193
|
+
export {
|
|
194
|
+
executeLspOperation,
|
|
195
|
+
createLsp,
|
|
196
|
+
lsp
|
|
197
|
+
};
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }
|
|
2
|
+
|
|
3
|
+
var _chunkMIYA7TNRcjs = require('./chunk-MIYA7TNR.cjs');
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
var _chunkQZ5GS6HWcjs = require('./chunk-QZ5GS6HW.cjs');
|
|
7
|
+
|
|
8
|
+
// src/glob/index.ts
|
|
9
|
+
var _ai = require('ai');
|
|
10
|
+
var _zod = require('zod');
|
|
11
|
+
|
|
12
|
+
// src/shared/glob.ts
|
|
13
|
+
var _path = require('path');
|
|
14
|
+
function extractGlobBaseDirectory(pattern) {
|
|
15
|
+
const globChars = /[*?[{]/;
|
|
16
|
+
const match = pattern.match(globChars);
|
|
17
|
+
if (!match || match.index === void 0) {
|
|
18
|
+
const dir = _path.dirname.call(void 0, pattern);
|
|
19
|
+
const file = _path.basename.call(void 0, pattern);
|
|
20
|
+
return { baseDir: dir, relativePattern: file };
|
|
21
|
+
}
|
|
22
|
+
const staticPrefix = pattern.slice(0, match.index);
|
|
23
|
+
const lastSepIndex = Math.max(
|
|
24
|
+
staticPrefix.lastIndexOf("/"),
|
|
25
|
+
staticPrefix.lastIndexOf(_path.sep)
|
|
26
|
+
);
|
|
27
|
+
if (lastSepIndex === -1) {
|
|
28
|
+
return { baseDir: "", relativePattern: pattern };
|
|
29
|
+
}
|
|
30
|
+
let baseDir = staticPrefix.slice(0, lastSepIndex);
|
|
31
|
+
const relativePattern = pattern.slice(lastSepIndex + 1);
|
|
32
|
+
if (baseDir === "" && lastSepIndex === 0) {
|
|
33
|
+
baseDir = "/";
|
|
34
|
+
}
|
|
35
|
+
return { baseDir, relativePattern };
|
|
36
|
+
}
|
|
37
|
+
async function glob(pattern, cwd, options) {
|
|
38
|
+
const limit = _nullishCoalesce(_optionalChain([options, 'optionalAccess', _ => _.limit]), () => ( 100));
|
|
39
|
+
const offset = _nullishCoalesce(_optionalChain([options, 'optionalAccess', _2 => _2.offset]), () => ( 0));
|
|
40
|
+
let searchDir = cwd;
|
|
41
|
+
let searchPattern = pattern;
|
|
42
|
+
if (_path.isAbsolute.call(void 0, pattern)) {
|
|
43
|
+
const { baseDir, relativePattern } = extractGlobBaseDirectory(pattern);
|
|
44
|
+
if (baseDir) {
|
|
45
|
+
searchDir = baseDir;
|
|
46
|
+
searchPattern = relativePattern;
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
const args = [
|
|
50
|
+
"--files",
|
|
51
|
+
"--glob",
|
|
52
|
+
searchPattern,
|
|
53
|
+
"--sort=modified",
|
|
54
|
+
"--hidden",
|
|
55
|
+
"--no-ignore"
|
|
56
|
+
];
|
|
57
|
+
const allPaths = await _chunkMIYA7TNRcjs.executeRipgrep.call(void 0, args, searchDir, {
|
|
58
|
+
signal: _optionalChain([options, 'optionalAccess', _3 => _3.signal])
|
|
59
|
+
});
|
|
60
|
+
const absolutePaths = allPaths.map(
|
|
61
|
+
(p) => _path.isAbsolute.call(void 0, p) ? p : _path.join.call(void 0, searchDir, p)
|
|
62
|
+
);
|
|
63
|
+
const truncated = absolutePaths.length > offset + limit;
|
|
64
|
+
const files = absolutePaths.slice(offset, offset + limit);
|
|
65
|
+
return { files, truncated };
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
// src/glob/index.ts
|
|
69
|
+
function createGlob(config = {}) {
|
|
70
|
+
const cwd = _nullishCoalesce(config.cwd, () => ( process.cwd()));
|
|
71
|
+
return _ai.tool.call(void 0, {
|
|
72
|
+
description: 'Find files matching a glob pattern. Returns absolute file paths sorted by modification time (newest first). Supports patterns like "**/*.ts", "src/**/*.js", or "*.json".',
|
|
73
|
+
inputSchema: _zod.z.object({
|
|
74
|
+
pattern: _zod.z.string().describe("Glob pattern to match files against"),
|
|
75
|
+
path: _zod.z.string().optional().describe("Directory to search in. Defaults to the working directory.")
|
|
76
|
+
}),
|
|
77
|
+
execute: async ({ pattern, path }) => {
|
|
78
|
+
try {
|
|
79
|
+
const searchDir = path ? _chunkQZ5GS6HWcjs.expandPath.call(void 0, path, cwd) : cwd;
|
|
80
|
+
const { files, truncated } = await glob(pattern, searchDir);
|
|
81
|
+
if (files.length === 0) {
|
|
82
|
+
return "No files found";
|
|
83
|
+
}
|
|
84
|
+
const header = truncated ? `Found ${files.length}+ files (results truncated)` : `Found ${files.length} files`;
|
|
85
|
+
return `${header}
|
|
86
|
+
${files.join("\n")}`;
|
|
87
|
+
} catch (error) {
|
|
88
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
89
|
+
return `Error [glob]: Failed to search for files: ${message}`;
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
});
|
|
93
|
+
}
|
|
94
|
+
var glob2 = createGlob();
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
exports.createGlob = createGlob; exports.glob = glob2;
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true});
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
var _chunk5NW4OGRIcjs = require('./chunk-5NW4OGRI.cjs');
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
var _chunkQZ5GS6HWcjs = require('./chunk-QZ5GS6HW.cjs');
|
|
9
|
+
|
|
10
|
+
// src/edit/index.ts
|
|
11
|
+
var _promises = require('fs/promises');
|
|
12
|
+
var _ai = require('ai');
|
|
13
|
+
var _zod = require('zod');
|
|
14
|
+
function createEdit(config = {}) {
|
|
15
|
+
return _ai.tool.call(void 0, {
|
|
16
|
+
description: "Perform an exact string replacement in a file. Locates old_string in the file and replaces it with new_string. Supports curly-quote fallback matching. When replace_all is false (default), old_string must appear exactly once.",
|
|
17
|
+
inputSchema: _zod.z.object({
|
|
18
|
+
file_path: _zod.z.string().describe("The absolute path to the file to modify"),
|
|
19
|
+
old_string: _zod.z.string().describe("The exact string to find and replace"),
|
|
20
|
+
new_string: _zod.z.string().describe("The replacement string"),
|
|
21
|
+
replace_all: _zod.z.boolean().default(false).optional().describe("Replace all occurrences (default: false)")
|
|
22
|
+
}),
|
|
23
|
+
execute: async ({ file_path, old_string, new_string, replace_all }) => {
|
|
24
|
+
try {
|
|
25
|
+
const resolved = _chunkQZ5GS6HWcjs.expandPath.call(void 0, file_path, config.cwd);
|
|
26
|
+
let content;
|
|
27
|
+
try {
|
|
28
|
+
content = await _promises.readFile.call(void 0, resolved, "utf-8");
|
|
29
|
+
} catch (err) {
|
|
30
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
31
|
+
return `Error [edit]: Cannot read file "${resolved}": ${msg}`;
|
|
32
|
+
}
|
|
33
|
+
if (old_string === new_string) {
|
|
34
|
+
return "Error [edit]: old_string and new_string are identical \u2014 nothing to change.";
|
|
35
|
+
}
|
|
36
|
+
const actualOld = _chunk5NW4OGRIcjs.findActualString.call(void 0, content, old_string);
|
|
37
|
+
if (actualOld === null) {
|
|
38
|
+
const preview = content.slice(0, 200);
|
|
39
|
+
return `Error [edit]: old_string not found in "${resolved}". File starts with:
|
|
40
|
+
${preview}`;
|
|
41
|
+
}
|
|
42
|
+
if (!replace_all) {
|
|
43
|
+
let count = 0;
|
|
44
|
+
let pos = 0;
|
|
45
|
+
while (pos < content.length) {
|
|
46
|
+
const idx = content.indexOf(actualOld, pos);
|
|
47
|
+
if (idx === -1) break;
|
|
48
|
+
count++;
|
|
49
|
+
pos = idx + 1;
|
|
50
|
+
}
|
|
51
|
+
if (count > 1) {
|
|
52
|
+
return `Error [edit]: old_string appears ${count} times in "${resolved}". Use replace_all to replace every occurrence, or provide a more specific string.`;
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
const styledNew = _chunk5NW4OGRIcjs.preserveQuoteStyle.call(void 0, old_string, actualOld, new_string);
|
|
56
|
+
const updated = _chunk5NW4OGRIcjs.applyEditToFile.call(void 0, content, actualOld, styledNew, replace_all);
|
|
57
|
+
await _promises.writeFile.call(void 0, resolved, updated, "utf-8");
|
|
58
|
+
const snippet = styledNew.length > 0 ? styledNew.slice(0, 200) : "(deletion)";
|
|
59
|
+
return `Successfully edited "${resolved}". Replacement snippet:
|
|
60
|
+
${snippet}`;
|
|
61
|
+
} catch (error) {
|
|
62
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
63
|
+
return `Error [edit]: ${msg}`;
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
var edit = createEdit();
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
exports.createEdit = createEdit; exports.edit = edit;
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } }
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
var _chunkL5JH4I77cjs = require('./chunk-L5JH4I77.cjs');
|
|
6
|
+
|
|
7
|
+
// src/task-update/index.ts
|
|
8
|
+
var _ai = require('ai');
|
|
9
|
+
var _zod = require('zod');
|
|
10
|
+
var _path = require('path');
|
|
11
|
+
function createTaskUpdate(config = {}) {
|
|
12
|
+
const cwd = _nullishCoalesce(config.cwd, () => ( process.cwd()));
|
|
13
|
+
const tasksFile = _nullishCoalesce(config.tasksFile, () => ( _path.join.call(void 0, cwd, ".agentool", "tasks.json")));
|
|
14
|
+
return _ai.tool.call(void 0, {
|
|
15
|
+
description: "Update a task by its ID. Can change status, subject, description, owner, metadata, and dependency relationships.",
|
|
16
|
+
inputSchema: _zod.z.object({
|
|
17
|
+
taskId: _zod.z.string().describe("The ID of the task to update"),
|
|
18
|
+
subject: _zod.z.string().optional().describe("New subject for the task"),
|
|
19
|
+
description: _zod.z.string().optional().describe("New description"),
|
|
20
|
+
status: _zod.z.enum(["pending", "in_progress", "completed", "deleted"]).optional().describe("New status for the task"),
|
|
21
|
+
owner: _zod.z.string().optional().describe("New owner for the task"),
|
|
22
|
+
activeForm: _zod.z.string().optional().describe("Present continuous form shown in spinner when in_progress"),
|
|
23
|
+
addBlocks: _zod.z.array(_zod.z.string()).optional().describe("Task IDs that this task blocks"),
|
|
24
|
+
addBlockedBy: _zod.z.array(_zod.z.string()).optional().describe("Task IDs that block this task"),
|
|
25
|
+
metadata: _zod.z.record(_zod.z.string(), _zod.z.unknown()).optional().describe("Metadata keys to merge. Set key to null to delete.")
|
|
26
|
+
}),
|
|
27
|
+
execute: async (input) => {
|
|
28
|
+
try {
|
|
29
|
+
const tasks = await _chunkL5JH4I77cjs.loadTasks.call(void 0, tasksFile);
|
|
30
|
+
const idx = tasks.findIndex((t) => t.id === input.taskId);
|
|
31
|
+
if (idx === -1) return `Error [task-update]: Task "${input.taskId}" not found.`;
|
|
32
|
+
const entry = tasks[idx];
|
|
33
|
+
if (input.subject !== void 0) entry.subject = input.subject;
|
|
34
|
+
if (input.description !== void 0) entry.description = input.description;
|
|
35
|
+
if (input.status !== void 0) entry.status = input.status;
|
|
36
|
+
if (input.owner !== void 0) entry.owner = input.owner;
|
|
37
|
+
if (input.activeForm !== void 0) entry.activeForm = input.activeForm;
|
|
38
|
+
if (input.addBlocks) {
|
|
39
|
+
for (const id of input.addBlocks) {
|
|
40
|
+
if (!entry.blocks.includes(id)) entry.blocks.push(id);
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
if (input.addBlockedBy) {
|
|
44
|
+
for (const id of input.addBlockedBy) {
|
|
45
|
+
if (!entry.blockedBy.includes(id)) entry.blockedBy.push(id);
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
if (input.metadata) {
|
|
49
|
+
if (!entry.metadata) entry.metadata = {};
|
|
50
|
+
for (const [key, value] of Object.entries(input.metadata)) {
|
|
51
|
+
if (value === null) {
|
|
52
|
+
delete entry.metadata[key];
|
|
53
|
+
} else {
|
|
54
|
+
entry.metadata[key] = value;
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
entry.updatedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
59
|
+
tasks[idx] = entry;
|
|
60
|
+
await _chunkL5JH4I77cjs.saveTasks.call(void 0, tasksFile, tasks);
|
|
61
|
+
return `Updated task ${input.taskId}.
|
|
62
|
+
${_chunkL5JH4I77cjs.formatTask.call(void 0, entry)}`;
|
|
63
|
+
} catch (error) {
|
|
64
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
65
|
+
return `Error [task-update]: ${msg}`;
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
});
|
|
69
|
+
}
|
|
70
|
+
var taskUpdate = createTaskUpdate();
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
exports.createTaskUpdate = createTaskUpdate; exports.taskUpdate = taskUpdate;
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true});
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
var _chunkFW3UJ622cjs = require('../chunk-FW3UJ622.cjs');
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
exports.contextCompaction = _chunkFW3UJ622cjs.contextCompaction; exports.createContextCompaction = _chunkFW3UJ622cjs.createContextCompaction;
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
import * as ai from 'ai';
|
|
2
|
+
import { B as BaseToolConfig } from '../types-3QPDuCXN.cjs';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Configuration for the context compaction tool.
|
|
6
|
+
* Extends {@link BaseToolConfig} with summarization options.
|
|
7
|
+
*
|
|
8
|
+
* @example
|
|
9
|
+
* ```typescript
|
|
10
|
+
* import { createContextCompaction } from 'agentool/context-compaction';
|
|
11
|
+
*
|
|
12
|
+
* const compactor = createContextCompaction({
|
|
13
|
+
* summarize: async (msgs) => `Summary of ${msgs.length} messages`,
|
|
14
|
+
* maxTokens: 2048,
|
|
15
|
+
* });
|
|
16
|
+
* ```
|
|
17
|
+
*/
|
|
18
|
+
interface ContextCompactionConfig extends BaseToolConfig {
|
|
19
|
+
/**
|
|
20
|
+
* Function that summarizes messages into a shorter form.
|
|
21
|
+
* Consumer must provide this for compaction to work.
|
|
22
|
+
*/
|
|
23
|
+
summarize?: (messages: Array<{
|
|
24
|
+
role: string;
|
|
25
|
+
content: string;
|
|
26
|
+
}>) => Promise<string>;
|
|
27
|
+
/** Maximum tokens target. Defaults to 4096. */
|
|
28
|
+
maxTokens?: number;
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* Create a context compaction tool with the given configuration.
|
|
32
|
+
* Summarizes conversation history to reduce context size when it
|
|
33
|
+
* exceeds the token budget (estimated as maxTokens * 4 characters).
|
|
34
|
+
*
|
|
35
|
+
* @param config - Configuration including the summarize callback and token budget
|
|
36
|
+
* @returns An AI SDK tool that compacts conversation messages
|
|
37
|
+
*
|
|
38
|
+
* @example
|
|
39
|
+
* ```typescript
|
|
40
|
+
* import { createContextCompaction } from 'agentool/context-compaction';
|
|
41
|
+
*
|
|
42
|
+
* const compactor = createContextCompaction({
|
|
43
|
+
* summarize: async (msgs) => {
|
|
44
|
+
* // Call your LLM to summarize
|
|
45
|
+
* return 'Condensed summary of the conversation';
|
|
46
|
+
* },
|
|
47
|
+
* maxTokens: 4096,
|
|
48
|
+
* });
|
|
49
|
+
* ```
|
|
50
|
+
*/
|
|
51
|
+
declare function createContextCompaction(config?: ContextCompactionConfig): ai.Tool<{
|
|
52
|
+
messages: {
|
|
53
|
+
content: string;
|
|
54
|
+
role: string;
|
|
55
|
+
}[];
|
|
56
|
+
maxTokens?: number | undefined;
|
|
57
|
+
}, string>;
|
|
58
|
+
/**
|
|
59
|
+
* Default context compaction tool instance with no summarize function.
|
|
60
|
+
* Configure with {@link createContextCompaction} for full functionality.
|
|
61
|
+
*
|
|
62
|
+
* @example
|
|
63
|
+
* ```typescript
|
|
64
|
+
* import { contextCompaction } from 'agentool/context-compaction';
|
|
65
|
+
* // Use directly — will return error if messages exceed budget
|
|
66
|
+
* // since no summarize function is configured.
|
|
67
|
+
* ```
|
|
68
|
+
*/
|
|
69
|
+
declare const contextCompaction: ai.Tool<{
|
|
70
|
+
messages: {
|
|
71
|
+
content: string;
|
|
72
|
+
role: string;
|
|
73
|
+
}[];
|
|
74
|
+
maxTokens?: number | undefined;
|
|
75
|
+
}, string>;
|
|
76
|
+
|
|
77
|
+
export { type ContextCompactionConfig, contextCompaction, createContextCompaction };
|