agentool 0.0.1 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +627 -52
- package/dist/ask-user/index.cjs +8 -0
- package/dist/ask-user/index.d.cts +68 -0
- package/dist/ask-user/index.d.ts +68 -0
- package/dist/ask-user/index.js +8 -0
- package/dist/bash/index.cjs +8 -0
- package/dist/bash/index.d.cts +63 -0
- package/dist/bash/index.d.ts +63 -0
- package/dist/bash/index.js +8 -0
- package/dist/chunk-3EPGFWZV.cjs +30 -0
- package/dist/chunk-3VO6NETR.cjs +79 -0
- package/dist/chunk-4YI2H55A.js +142 -0
- package/dist/chunk-5NW4OGRI.cjs +99 -0
- package/dist/chunk-6MDPYALY.js +196 -0
- package/dist/chunk-6PQLFDGT.js +117 -0
- package/dist/chunk-7QL4BQCH.js +40 -0
- package/dist/chunk-CAEVLIQB.cjs +117 -0
- package/dist/chunk-CGTPF6IS.js +90 -0
- package/dist/chunk-EA3YV7ZG.js +79 -0
- package/dist/chunk-FAEGCFTO.js +136 -0
- package/dist/chunk-FV2R5FFQ.cjs +102 -0
- package/dist/chunk-FW3UJ622.cjs +59 -0
- package/dist/chunk-G3ITTPGX.js +99 -0
- package/dist/chunk-HDKXSKMO.js +30 -0
- package/dist/chunk-HZAQRHBT.js +99 -0
- package/dist/chunk-I3ONDY7P.js +46 -0
- package/dist/chunk-I6KFFQPV.cjs +58 -0
- package/dist/chunk-IMZQ7ELK.cjs +196 -0
- package/dist/chunk-JCTBB7H2.cjs +40 -0
- package/dist/chunk-K77GC2QI.js +59 -0
- package/dist/chunk-LPV5CN2K.js +58 -0
- package/dist/chunk-MF7CJVIZ.js +40 -0
- package/dist/chunk-MIYA7TNR.cjs +123 -0
- package/dist/chunk-MJCAXASI.js +123 -0
- package/dist/chunk-MXFW3XY6.cjs +73 -0
- package/dist/chunk-ONBH74ZV.cjs +90 -0
- package/dist/chunk-OXLQ7QVL.cjs +40 -0
- package/dist/chunk-QEJV2KZ4.cjs +159 -0
- package/dist/chunk-QZ5GS6HW.cjs +46 -0
- package/dist/chunk-S6QEY7UY.js +73 -0
- package/dist/chunk-SUSAPI5W.cjs +142 -0
- package/dist/chunk-TBVHHF3H.cjs +47 -0
- package/dist/chunk-U2YMJM25.cjs +115 -0
- package/dist/chunk-VLNDEVKS.js +102 -0
- package/dist/chunk-XKG2A3EW.js +159 -0
- package/dist/chunk-XLD2Y3SS.cjs +136 -0
- package/dist/chunk-Y7KOKDFP.js +115 -0
- package/dist/chunk-YPPPGGLA.cjs +99 -0
- package/dist/chunk-ZHCMEQJJ.js +47 -0
- package/dist/context-compaction/index.cjs +8 -0
- package/dist/context-compaction/index.d.cts +77 -0
- package/dist/context-compaction/index.d.ts +77 -0
- package/dist/context-compaction/index.js +8 -0
- package/dist/diff/index.cjs +9 -0
- package/dist/diff/index.d.cts +72 -0
- package/dist/diff/index.d.ts +72 -0
- package/dist/diff/index.js +9 -0
- package/dist/edit/index.cjs +10 -0
- package/dist/edit/index.d.cts +53 -0
- package/dist/edit/index.d.ts +53 -0
- package/dist/edit/index.js +10 -0
- package/dist/glob/index.cjs +10 -0
- package/dist/glob/index.d.cts +47 -0
- package/dist/glob/index.d.ts +47 -0
- package/dist/glob/index.js +10 -0
- package/dist/grep/index.cjs +10 -0
- package/dist/grep/index.d.cts +50 -0
- package/dist/grep/index.d.ts +50 -0
- package/dist/grep/index.js +10 -0
- package/dist/http-request/index.cjs +8 -0
- package/dist/http-request/index.d.cts +60 -0
- package/dist/http-request/index.d.ts +60 -0
- package/dist/http-request/index.js +8 -0
- package/dist/index.cjs +102 -0
- package/dist/index.d.cts +18 -0
- package/dist/index.d.ts +18 -0
- package/dist/index.js +102 -0
- package/dist/lsp/index.cjs +10 -0
- package/dist/lsp/index.d.cts +38 -0
- package/dist/lsp/index.d.ts +38 -0
- package/dist/lsp/index.js +10 -0
- package/dist/memory/index.cjs +9 -0
- package/dist/memory/index.d.cts +63 -0
- package/dist/memory/index.d.ts +63 -0
- package/dist/memory/index.js +9 -0
- package/dist/multi-edit/index.cjs +11 -0
- package/dist/multi-edit/index.d.cts +72 -0
- package/dist/multi-edit/index.d.ts +72 -0
- package/dist/multi-edit/index.js +11 -0
- package/dist/read/index.cjs +10 -0
- package/dist/read/index.d.cts +67 -0
- package/dist/read/index.d.ts +67 -0
- package/dist/read/index.js +10 -0
- package/dist/sleep/index.cjs +8 -0
- package/dist/sleep/index.d.cts +60 -0
- package/dist/sleep/index.d.ts +60 -0
- package/dist/sleep/index.js +8 -0
- package/dist/task/index.cjs +8 -0
- package/dist/task/index.d.cts +67 -0
- package/dist/task/index.d.ts +67 -0
- package/dist/task/index.js +8 -0
- package/dist/types-3QPDuCXN.d.cts +45 -0
- package/dist/types-3QPDuCXN.d.ts +45 -0
- package/dist/web-fetch/index.cjs +8 -0
- package/dist/web-fetch/index.d.cts +56 -0
- package/dist/web-fetch/index.d.ts +56 -0
- package/dist/web-fetch/index.js +8 -0
- package/dist/write/index.cjs +10 -0
- package/dist/write/index.d.cts +47 -0
- package/dist/write/index.d.ts +47 -0
- package/dist/write/index.js +10 -0
- package/package.json +145 -20
- package/dist/core/index.d.ts +0 -20
- package/dist/core/index.js +0 -1
- package/dist/core/index.js.map +0 -1
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } }// src/lsp/index.ts
|
|
2
|
+
var _ai = require('ai');
|
|
3
|
+
var _zod = require('zod');
|
|
4
|
+
var _child_process = require('child_process');
|
|
5
|
+
var _promises = require('fs/promises');
|
|
6
|
+
var _url = require('url');
|
|
7
|
+
var _path = require('path');
|
|
8
|
+
var nextId = 1;
|
|
9
|
+
function encodeJsonRpc(msg) {
|
|
10
|
+
const body = JSON.stringify(msg);
|
|
11
|
+
return Buffer.from(`Content-Length: ${Buffer.byteLength(body)}\r
|
|
12
|
+
\r
|
|
13
|
+
${body}`);
|
|
14
|
+
}
|
|
15
|
+
function parseJsonRpcResponses(buffer) {
|
|
16
|
+
const parsed = [];
|
|
17
|
+
let offset = 0;
|
|
18
|
+
while (offset < buffer.length) {
|
|
19
|
+
const headerEnd = buffer.indexOf("\r\n\r\n", offset);
|
|
20
|
+
if (headerEnd === -1) break;
|
|
21
|
+
const match = /Content-Length:\s*(\d+)/i.exec(buffer.subarray(offset, headerEnd).toString());
|
|
22
|
+
if (!match) break;
|
|
23
|
+
const bodyStart = headerEnd + 4;
|
|
24
|
+
const contentLength = parseInt(match[1], 10);
|
|
25
|
+
if (bodyStart + contentLength > buffer.length) break;
|
|
26
|
+
try {
|
|
27
|
+
parsed.push(JSON.parse(buffer.subarray(bodyStart, bodyStart + contentLength).toString()));
|
|
28
|
+
} catch (e2) {
|
|
29
|
+
}
|
|
30
|
+
offset = bodyStart + contentLength;
|
|
31
|
+
}
|
|
32
|
+
return parsed;
|
|
33
|
+
}
|
|
34
|
+
function operationToMethod(operation) {
|
|
35
|
+
const map = {
|
|
36
|
+
goToDefinition: "textDocument/definition",
|
|
37
|
+
findReferences: "textDocument/references",
|
|
38
|
+
hover: "textDocument/hover",
|
|
39
|
+
documentSymbol: "textDocument/documentSymbol",
|
|
40
|
+
workspaceSymbol: "workspace/symbol",
|
|
41
|
+
goToImplementation: "textDocument/implementation",
|
|
42
|
+
incomingCalls: "textDocument/prepareCallHierarchy",
|
|
43
|
+
outgoingCalls: "textDocument/prepareCallHierarchy"
|
|
44
|
+
};
|
|
45
|
+
return _nullishCoalesce(map[operation], () => ( operation));
|
|
46
|
+
}
|
|
47
|
+
function buildRequestParams(op, uri, line, char, query) {
|
|
48
|
+
if (op === "workspaceSymbol") return { query: _nullishCoalesce(query, () => ( "")) };
|
|
49
|
+
const td = { uri };
|
|
50
|
+
if (op === "documentSymbol") return { textDocument: td };
|
|
51
|
+
const pos = { line, character: char };
|
|
52
|
+
if (op === "findReferences") return { textDocument: td, position: pos, context: { includeDeclaration: true } };
|
|
53
|
+
return { textDocument: td, position: pos };
|
|
54
|
+
}
|
|
55
|
+
function sendRequest(proc, method, params) {
|
|
56
|
+
const id = nextId++;
|
|
57
|
+
const msg = { jsonrpc: "2.0", id, method, params };
|
|
58
|
+
proc.stdin.write(encodeJsonRpc(msg));
|
|
59
|
+
return id;
|
|
60
|
+
}
|
|
61
|
+
function sendNotification(proc, method, params) {
|
|
62
|
+
const msg = { jsonrpc: "2.0", method, params };
|
|
63
|
+
proc.stdin.write(encodeJsonRpc(msg));
|
|
64
|
+
}
|
|
65
|
+
function waitForResponse(proc, id, timeoutMs) {
|
|
66
|
+
return new Promise((resolveP, reject) => {
|
|
67
|
+
let buf = Buffer.alloc(0);
|
|
68
|
+
const timer = setTimeout(() => {
|
|
69
|
+
cleanup();
|
|
70
|
+
reject(new Error(`LSP request timed out after ${timeoutMs}ms`));
|
|
71
|
+
}, timeoutMs);
|
|
72
|
+
const cleanup = () => {
|
|
73
|
+
clearTimeout(timer);
|
|
74
|
+
proc.stdout.off("data", onData);
|
|
75
|
+
proc.stdout.off("error", onErr);
|
|
76
|
+
};
|
|
77
|
+
function onData(chunk) {
|
|
78
|
+
buf = Buffer.concat([buf, chunk]);
|
|
79
|
+
for (const msg of parseJsonRpcResponses(buf)) {
|
|
80
|
+
const rpc = msg;
|
|
81
|
+
if (rpc.id === id) {
|
|
82
|
+
cleanup();
|
|
83
|
+
resolveP(rpc.error ? { error: rpc.error } : { result: rpc.result });
|
|
84
|
+
return;
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
function onErr(e) {
|
|
89
|
+
cleanup();
|
|
90
|
+
reject(e);
|
|
91
|
+
}
|
|
92
|
+
proc.stdout.on("data", onData);
|
|
93
|
+
proc.stdout.on("error", onErr);
|
|
94
|
+
});
|
|
95
|
+
}
|
|
96
|
+
async function shutdownServer(proc, ms) {
|
|
97
|
+
try {
|
|
98
|
+
const id = sendRequest(proc, "shutdown", {});
|
|
99
|
+
await waitForResponse(proc, id, Math.min(ms, 5e3));
|
|
100
|
+
sendNotification(proc, "exit", {});
|
|
101
|
+
} catch (e3) {
|
|
102
|
+
} finally {
|
|
103
|
+
proc.kill();
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
async function executeLspOperation(serverConfig, params, timeoutMs) {
|
|
107
|
+
const absolutePath = _path.resolve.call(void 0, params.cwd, params.filePath);
|
|
108
|
+
const uri = _url.pathToFileURL.call(void 0, absolutePath).href;
|
|
109
|
+
const proc = _child_process.spawn.call(void 0, serverConfig.command, _nullishCoalesce(serverConfig.args, () => ( [])), {
|
|
110
|
+
cwd: params.cwd,
|
|
111
|
+
stdio: ["pipe", "pipe", "pipe"]
|
|
112
|
+
});
|
|
113
|
+
const spawnError = new Promise((_, reject) => {
|
|
114
|
+
proc.on("error", (err) => reject(err));
|
|
115
|
+
});
|
|
116
|
+
try {
|
|
117
|
+
const initId = sendRequest(proc, "initialize", {
|
|
118
|
+
processId: process.pid,
|
|
119
|
+
capabilities: {},
|
|
120
|
+
rootUri: _url.pathToFileURL.call(void 0, params.cwd).href
|
|
121
|
+
});
|
|
122
|
+
const rpc = (id) => Promise.race([waitForResponse(proc, id, timeoutMs), spawnError]);
|
|
123
|
+
await rpc(initId);
|
|
124
|
+
sendNotification(proc, "initialized", {});
|
|
125
|
+
const content = await _promises.readFile.call(void 0, absolutePath, "utf-8");
|
|
126
|
+
const langId = _path.extname.call(void 0, absolutePath).replace(".", "") || "plaintext";
|
|
127
|
+
sendNotification(proc, "textDocument/didOpen", {
|
|
128
|
+
textDocument: { uri, languageId: langId, version: 1, text: content }
|
|
129
|
+
});
|
|
130
|
+
const method = operationToMethod(params.operation);
|
|
131
|
+
const reqParams = buildRequestParams(params.operation, uri, _nullishCoalesce(params.line, () => ( 0)), _nullishCoalesce(params.character, () => ( 0)), params.query);
|
|
132
|
+
let response = await rpc(sendRequest(proc, method, reqParams));
|
|
133
|
+
if (response.error) return `Error [lsp]: Server error: ${response.error.message} (code ${response.error.code})`;
|
|
134
|
+
if ((params.operation === "incomingCalls" || params.operation === "outgoingCalls") && Array.isArray(response.result) && response.result.length > 0) {
|
|
135
|
+
const cm = params.operation === "incomingCalls" ? "callHierarchy/incomingCalls" : "callHierarchy/outgoingCalls";
|
|
136
|
+
response = await rpc(sendRequest(proc, cm, { item: response.result[0] }));
|
|
137
|
+
if (response.error) return `Error [lsp]: Server error: ${response.error.message} (code ${response.error.code})`;
|
|
138
|
+
}
|
|
139
|
+
await shutdownServer(proc, timeoutMs);
|
|
140
|
+
return _nullishCoalesce(JSON.stringify(response.result, null, 2), () => ( "null"));
|
|
141
|
+
} catch (error) {
|
|
142
|
+
proc.kill();
|
|
143
|
+
throw error;
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
var LSP_OPERATIONS = [
|
|
147
|
+
"goToDefinition",
|
|
148
|
+
"findReferences",
|
|
149
|
+
"hover",
|
|
150
|
+
"documentSymbol",
|
|
151
|
+
"workspaceSymbol",
|
|
152
|
+
"goToImplementation",
|
|
153
|
+
"incomingCalls",
|
|
154
|
+
"outgoingCalls"
|
|
155
|
+
];
|
|
156
|
+
function createLsp(config = {}) {
|
|
157
|
+
const timeoutMs = _nullishCoalesce(config.timeout, () => ( 3e4));
|
|
158
|
+
return _ai.tool.call(void 0, {
|
|
159
|
+
description: "Perform language server operations like go-to-definition, find-references, and hover. Requires LSP server configuration. Supports 8 operations: goToDefinition, findReferences, hover, documentSymbol, workspaceSymbol, goToImplementation, incomingCalls, outgoingCalls.",
|
|
160
|
+
inputSchema: _zod.z.object({
|
|
161
|
+
operation: _zod.z.enum(LSP_OPERATIONS).describe("The LSP operation to perform"),
|
|
162
|
+
filePath: _zod.z.string().describe("Path to the file"),
|
|
163
|
+
line: _zod.z.number().optional().describe("Line number (0-indexed)"),
|
|
164
|
+
character: _zod.z.number().optional().describe("Character offset (0-indexed)"),
|
|
165
|
+
query: _zod.z.string().optional().describe("Search query for workspaceSymbol")
|
|
166
|
+
}),
|
|
167
|
+
execute: async ({ operation, filePath, line, character, query }) => {
|
|
168
|
+
if (!config.servers || Object.keys(config.servers).length === 0) {
|
|
169
|
+
return 'Error [lsp]: No LSP servers configured. Provide server configuration via createLsp({ servers: { ".ts": { command: "typescript-language-server", args: ["--stdio"] } } })';
|
|
170
|
+
}
|
|
171
|
+
const ext = _path.extname.call(void 0, filePath) || "." + filePath.split(".").pop();
|
|
172
|
+
const serverConfig = config.servers[ext];
|
|
173
|
+
if (!serverConfig) {
|
|
174
|
+
const available = Object.keys(config.servers).join(", ");
|
|
175
|
+
return `Error [lsp]: No LSP server configured for ${ext} files. Available: ${available}`;
|
|
176
|
+
}
|
|
177
|
+
try {
|
|
178
|
+
return await executeLspOperation(
|
|
179
|
+
serverConfig,
|
|
180
|
+
{ operation, filePath, line, character, query, cwd: _nullishCoalesce(config.cwd, () => ( process.cwd())) },
|
|
181
|
+
timeoutMs
|
|
182
|
+
);
|
|
183
|
+
} catch (error) {
|
|
184
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
185
|
+
return `Error [lsp]: ${operation} failed for ${filePath}: ${msg}`;
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
});
|
|
189
|
+
}
|
|
190
|
+
var lsp = createLsp();
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
exports.executeLspOperation = executeLspOperation; exports.createLsp = createLsp; exports.lsp = lsp;
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } }
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
var _chunkCAEVLIQBcjs = require('./chunk-CAEVLIQB.cjs');
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
var _chunkQZ5GS6HWcjs = require('./chunk-QZ5GS6HW.cjs');
|
|
8
|
+
|
|
9
|
+
// src/write/index.ts
|
|
10
|
+
var _ai = require('ai');
|
|
11
|
+
var _zod = require('zod');
|
|
12
|
+
function createWrite(config = {}) {
|
|
13
|
+
const cwd = _nullishCoalesce(config.cwd, () => ( process.cwd()));
|
|
14
|
+
return _ai.tool.call(void 0, {
|
|
15
|
+
description: "Write text content to a file, creating parent directories as needed. If the file exists it is overwritten. Use this to create new files or replace existing file contents.",
|
|
16
|
+
inputSchema: _zod.z.object({
|
|
17
|
+
file_path: _zod.z.string().describe("Path to the file to write"),
|
|
18
|
+
content: _zod.z.string().describe("Text content to write to the file")
|
|
19
|
+
}),
|
|
20
|
+
execute: async ({ file_path, content }) => {
|
|
21
|
+
try {
|
|
22
|
+
const absolutePath = _chunkQZ5GS6HWcjs.expandPath.call(void 0, file_path, cwd);
|
|
23
|
+
const existed = await _chunkCAEVLIQBcjs.pathExists.call(void 0, absolutePath);
|
|
24
|
+
await _chunkCAEVLIQBcjs.writeTextContent.call(void 0, absolutePath, content);
|
|
25
|
+
const bytes = Buffer.byteLength(content, "utf-8");
|
|
26
|
+
const verb = existed ? "Updated" : "Created";
|
|
27
|
+
return `${verb} file: ${absolutePath} (${bytes} bytes)`;
|
|
28
|
+
} catch (error) {
|
|
29
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
30
|
+
return `Error [write]: Failed to write file: ${message}`;
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
});
|
|
34
|
+
}
|
|
35
|
+
var write = createWrite();
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
exports.createWrite = createWrite; exports.write = write;
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
// src/context-compaction/index.ts
|
|
2
|
+
import { tool, zodSchema } from "ai";
|
|
3
|
+
import { z } from "zod";
|
|
4
|
+
var parametersSchema = z.object({
|
|
5
|
+
messages: z.array(
|
|
6
|
+
z.object({
|
|
7
|
+
role: z.string().describe("Message role (system, user, assistant)"),
|
|
8
|
+
content: z.string().describe("Message content")
|
|
9
|
+
})
|
|
10
|
+
).describe("The conversation messages to compact"),
|
|
11
|
+
maxTokens: z.number().optional().describe(
|
|
12
|
+
"Target maximum tokens (default: config.maxTokens or 4096)"
|
|
13
|
+
)
|
|
14
|
+
});
|
|
15
|
+
function createContextCompaction(config = {}) {
|
|
16
|
+
return tool({
|
|
17
|
+
description: "Compact conversation history by summarizing older messages to reduce context size. Requires a summarize function in config. Returns compacted messages where total chars < maxTokens * 4.",
|
|
18
|
+
inputSchema: zodSchema(parametersSchema),
|
|
19
|
+
execute: async ({
|
|
20
|
+
messages,
|
|
21
|
+
maxTokens: inputMaxTokens
|
|
22
|
+
}) => {
|
|
23
|
+
const maxTokens = inputMaxTokens ?? config.maxTokens ?? 4096;
|
|
24
|
+
const charBudget = maxTokens * 4;
|
|
25
|
+
const totalChars = messages.reduce(
|
|
26
|
+
(sum, m) => sum + m.content.length,
|
|
27
|
+
0
|
|
28
|
+
);
|
|
29
|
+
if (totalChars <= charBudget) {
|
|
30
|
+
return JSON.stringify({
|
|
31
|
+
compacted: false,
|
|
32
|
+
messages,
|
|
33
|
+
reason: "Already within token budget"
|
|
34
|
+
});
|
|
35
|
+
}
|
|
36
|
+
if (!config.summarize) {
|
|
37
|
+
return "Error [context-compaction]: No summarize function configured. Provide a summarize callback in the tool config.";
|
|
38
|
+
}
|
|
39
|
+
try {
|
|
40
|
+
const summary = await config.summarize(messages);
|
|
41
|
+
const compactedMessages = [{ role: "system", content: summary }];
|
|
42
|
+
return JSON.stringify({
|
|
43
|
+
compacted: true,
|
|
44
|
+
messages: compactedMessages,
|
|
45
|
+
originalCount: messages.length
|
|
46
|
+
});
|
|
47
|
+
} catch (error) {
|
|
48
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
49
|
+
return `Error [context-compaction]: Summarization failed: ${msg}`;
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
});
|
|
53
|
+
}
|
|
54
|
+
var contextCompaction = createContextCompaction();
|
|
55
|
+
|
|
56
|
+
export {
|
|
57
|
+
createContextCompaction,
|
|
58
|
+
contextCompaction
|
|
59
|
+
};
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
// src/http-request/index.ts
|
|
2
|
+
import { tool } from "ai";
|
|
3
|
+
import { z } from "zod";
|
|
4
|
+
function createHttpRequest(config = {}) {
|
|
5
|
+
return tool({
|
|
6
|
+
description: "Make an HTTP request to a URL. Returns the response status, headers, and body. Use this for API interactions, webhook calls, and service health checks. Unlike web-fetch, this returns raw response data without markdown conversion.",
|
|
7
|
+
inputSchema: z.object({
|
|
8
|
+
method: z.enum(["GET", "POST", "PUT", "PATCH", "DELETE", "HEAD"]).describe("HTTP method"),
|
|
9
|
+
url: z.string().describe("The URL to send the request to"),
|
|
10
|
+
headers: z.record(z.string()).optional().describe("Request headers as key-value pairs"),
|
|
11
|
+
body: z.string().optional().describe("Request body (for POST, PUT, PATCH)"),
|
|
12
|
+
timeout: z.number().optional().describe("Request timeout in milliseconds (default: 30000)")
|
|
13
|
+
}),
|
|
14
|
+
execute: async ({ method, url, headers, body, timeout }) => {
|
|
15
|
+
try {
|
|
16
|
+
const timeoutMs = timeout ?? config.timeout ?? 3e4;
|
|
17
|
+
const controller = new AbortController();
|
|
18
|
+
const timeoutId = setTimeout(() => controller.abort(), timeoutMs);
|
|
19
|
+
const mergedHeaders = { ...config.defaultHeaders, ...headers };
|
|
20
|
+
const response = await fetch(url, {
|
|
21
|
+
method,
|
|
22
|
+
headers: mergedHeaders,
|
|
23
|
+
body: ["POST", "PUT", "PATCH"].includes(method) ? body : void 0,
|
|
24
|
+
signal: controller.signal
|
|
25
|
+
});
|
|
26
|
+
clearTimeout(timeoutId);
|
|
27
|
+
const responseBody = await response.text();
|
|
28
|
+
const responseHeaders = {};
|
|
29
|
+
response.headers.forEach((value, key) => {
|
|
30
|
+
responseHeaders[key] = value;
|
|
31
|
+
});
|
|
32
|
+
return JSON.stringify(
|
|
33
|
+
{
|
|
34
|
+
status: response.status,
|
|
35
|
+
statusText: response.statusText,
|
|
36
|
+
headers: responseHeaders,
|
|
37
|
+
body: responseBody
|
|
38
|
+
},
|
|
39
|
+
null,
|
|
40
|
+
2
|
|
41
|
+
);
|
|
42
|
+
} catch (error) {
|
|
43
|
+
const effectiveTimeout = timeout ?? config.timeout ?? 3e4;
|
|
44
|
+
if (error instanceof Error && error.name === "AbortError") {
|
|
45
|
+
return `Error [http-request]: Request timed out after ${effectiveTimeout}ms for ${method} ${url}. Try increasing the timeout or verify the server is responsive.`;
|
|
46
|
+
}
|
|
47
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
48
|
+
return `Error [http-request]: ${method} ${url} failed: ${msg}. Verify the URL is correct and the server is reachable.`;
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
});
|
|
52
|
+
}
|
|
53
|
+
var httpRequest = createHttpRequest();
|
|
54
|
+
|
|
55
|
+
export {
|
|
56
|
+
createHttpRequest,
|
|
57
|
+
httpRequest
|
|
58
|
+
};
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
// src/sleep/index.ts
|
|
2
|
+
import { tool, zodSchema } from "ai";
|
|
3
|
+
import { z } from "zod";
|
|
4
|
+
function createSleep(config = {}) {
|
|
5
|
+
const maxDuration = config.maxDuration ?? 3e5;
|
|
6
|
+
return tool({
|
|
7
|
+
description: "Pause execution for a specified duration. Useful for rate limiting, polling intervals, or waiting for external processes. Maximum duration is 300 seconds (5 minutes).",
|
|
8
|
+
inputSchema: zodSchema(
|
|
9
|
+
z.object({
|
|
10
|
+
durationMs: z.number().describe("Duration to sleep in milliseconds"),
|
|
11
|
+
reason: z.string().optional().describe("Optional reason for the sleep")
|
|
12
|
+
})
|
|
13
|
+
),
|
|
14
|
+
execute: async ({ durationMs, reason }) => {
|
|
15
|
+
try {
|
|
16
|
+
const clamped = Math.max(0, Math.min(durationMs, maxDuration));
|
|
17
|
+
const start = Date.now();
|
|
18
|
+
await new Promise((resolve) => setTimeout(resolve, clamped));
|
|
19
|
+
const elapsed = Date.now() - start;
|
|
20
|
+
const parts = [`Slept for ${elapsed}ms`];
|
|
21
|
+
if (reason) parts.push(`Reason: ${reason}`);
|
|
22
|
+
if (clamped !== durationMs) {
|
|
23
|
+
parts.push(
|
|
24
|
+
`(clamped from ${durationMs}ms to ${clamped}ms, max: ${maxDuration}ms)`
|
|
25
|
+
);
|
|
26
|
+
}
|
|
27
|
+
return parts.join(". ");
|
|
28
|
+
} catch (error) {
|
|
29
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
30
|
+
return `Sleep failed: ${message}`;
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
});
|
|
34
|
+
}
|
|
35
|
+
var sleep = createSleep();
|
|
36
|
+
|
|
37
|
+
export {
|
|
38
|
+
createSleep,
|
|
39
|
+
sleep
|
|
40
|
+
};
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }// src/shared/ripgrep.ts
|
|
2
|
+
var _child_process = require('child_process');
|
|
3
|
+
var MAX_BUFFER_SIZE = 2e7;
|
|
4
|
+
var DEFAULT_TIMEOUT = 2e4;
|
|
5
|
+
var INSTALL_URL = "https://github.com/BurntSushi/ripgrep#installation";
|
|
6
|
+
var RipgrepNotFoundError = class extends Error {
|
|
7
|
+
constructor(message) {
|
|
8
|
+
super(
|
|
9
|
+
_nullishCoalesce(message, () => ( `ripgrep (rg) not found on PATH. Install it from: ${INSTALL_URL}`))
|
|
10
|
+
);
|
|
11
|
+
this.name = "RipgrepNotFoundError";
|
|
12
|
+
}
|
|
13
|
+
};
|
|
14
|
+
var RipgrepTimeoutError = class extends Error {
|
|
15
|
+
/**
|
|
16
|
+
* Lines captured from stdout before the process was killed.
|
|
17
|
+
* May be empty if no output arrived before the timeout.
|
|
18
|
+
*/
|
|
19
|
+
|
|
20
|
+
constructor(message, partialResults) {
|
|
21
|
+
super(message);
|
|
22
|
+
this.name = "RipgrepTimeoutError";
|
|
23
|
+
this.partialResults = partialResults;
|
|
24
|
+
}
|
|
25
|
+
};
|
|
26
|
+
function findRg() {
|
|
27
|
+
try {
|
|
28
|
+
const result = _child_process.execFileSync.call(void 0, "which", ["rg"], {
|
|
29
|
+
encoding: "utf-8",
|
|
30
|
+
timeout: 5e3
|
|
31
|
+
});
|
|
32
|
+
const rgPath = result.trim();
|
|
33
|
+
if (!rgPath) {
|
|
34
|
+
throw new RipgrepNotFoundError();
|
|
35
|
+
}
|
|
36
|
+
return rgPath;
|
|
37
|
+
} catch (error) {
|
|
38
|
+
if (error instanceof RipgrepNotFoundError) {
|
|
39
|
+
throw error;
|
|
40
|
+
}
|
|
41
|
+
throw new RipgrepNotFoundError();
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
function isEagainError(stderr) {
|
|
45
|
+
return stderr.includes("os error 11") || stderr.includes("Resource temporarily unavailable");
|
|
46
|
+
}
|
|
47
|
+
function parseStdout(stdout) {
|
|
48
|
+
return stdout.trim().split("\n").map((line) => line.replace(/\r$/, "")).filter(Boolean);
|
|
49
|
+
}
|
|
50
|
+
async function executeRipgrep(args, target, options) {
|
|
51
|
+
const rgPath = findRg();
|
|
52
|
+
const timeout = _nullishCoalesce(_optionalChain([options, 'optionalAccess', _ => _.timeout]), () => ( DEFAULT_TIMEOUT));
|
|
53
|
+
return new Promise((resolve, reject) => {
|
|
54
|
+
const handleResult = (error, stdout, stderr, isRetry) => {
|
|
55
|
+
if (!error) {
|
|
56
|
+
resolve(parseStdout(stdout));
|
|
57
|
+
return;
|
|
58
|
+
}
|
|
59
|
+
if (error.code === 1) {
|
|
60
|
+
resolve([]);
|
|
61
|
+
return;
|
|
62
|
+
}
|
|
63
|
+
const CRITICAL_CODES = ["ENOENT", "EACCES", "EPERM"];
|
|
64
|
+
if (CRITICAL_CODES.includes(error.code)) {
|
|
65
|
+
reject(error);
|
|
66
|
+
return;
|
|
67
|
+
}
|
|
68
|
+
if (!isRetry && isEagainError(stderr)) {
|
|
69
|
+
_child_process.execFile.call(void 0,
|
|
70
|
+
rgPath,
|
|
71
|
+
["-j", "1", ...args, target],
|
|
72
|
+
{
|
|
73
|
+
maxBuffer: MAX_BUFFER_SIZE,
|
|
74
|
+
signal: _optionalChain([options, 'optionalAccess', _2 => _2.signal]),
|
|
75
|
+
timeout,
|
|
76
|
+
killSignal: "SIGKILL"
|
|
77
|
+
},
|
|
78
|
+
(retryErr, retryStdout, retryStderr) => {
|
|
79
|
+
handleResult(retryErr, retryStdout, retryStderr, true);
|
|
80
|
+
}
|
|
81
|
+
);
|
|
82
|
+
return;
|
|
83
|
+
}
|
|
84
|
+
const hasOutput = stdout && stdout.trim().length > 0;
|
|
85
|
+
const isTimeout = error.signal === "SIGTERM" || error.signal === "SIGKILL" || error.code === "ABORT_ERR";
|
|
86
|
+
const isBufferOverflow = error.code === "ERR_CHILD_PROCESS_STDIO_MAXBUFFER";
|
|
87
|
+
let lines = [];
|
|
88
|
+
if (hasOutput) {
|
|
89
|
+
lines = parseStdout(stdout);
|
|
90
|
+
if (lines.length > 0 && (isTimeout || isBufferOverflow)) {
|
|
91
|
+
lines = lines.slice(0, -1);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
if (isTimeout && lines.length === 0) {
|
|
95
|
+
reject(
|
|
96
|
+
new RipgrepTimeoutError(
|
|
97
|
+
`Ripgrep search timed out after ${timeout / 1e3} seconds. Try a more specific path or pattern.`,
|
|
98
|
+
lines
|
|
99
|
+
)
|
|
100
|
+
);
|
|
101
|
+
return;
|
|
102
|
+
}
|
|
103
|
+
resolve(lines);
|
|
104
|
+
};
|
|
105
|
+
_child_process.execFile.call(void 0,
|
|
106
|
+
rgPath,
|
|
107
|
+
[...args, target],
|
|
108
|
+
{
|
|
109
|
+
maxBuffer: MAX_BUFFER_SIZE,
|
|
110
|
+
signal: _optionalChain([options, 'optionalAccess', _3 => _3.signal]),
|
|
111
|
+
timeout,
|
|
112
|
+
killSignal: "SIGKILL"
|
|
113
|
+
},
|
|
114
|
+
(error, stdout, stderr) => {
|
|
115
|
+
handleResult(error, stdout, stderr, false);
|
|
116
|
+
}
|
|
117
|
+
);
|
|
118
|
+
});
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
exports.executeRipgrep = executeRipgrep;
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
// src/shared/ripgrep.ts
|
|
2
|
+
import { execFile, execFileSync } from "child_process";
|
|
3
|
+
var MAX_BUFFER_SIZE = 2e7;
|
|
4
|
+
var DEFAULT_TIMEOUT = 2e4;
|
|
5
|
+
var INSTALL_URL = "https://github.com/BurntSushi/ripgrep#installation";
|
|
6
|
+
var RipgrepNotFoundError = class extends Error {
|
|
7
|
+
constructor(message) {
|
|
8
|
+
super(
|
|
9
|
+
message ?? `ripgrep (rg) not found on PATH. Install it from: ${INSTALL_URL}`
|
|
10
|
+
);
|
|
11
|
+
this.name = "RipgrepNotFoundError";
|
|
12
|
+
}
|
|
13
|
+
};
|
|
14
|
+
var RipgrepTimeoutError = class extends Error {
|
|
15
|
+
/**
|
|
16
|
+
* Lines captured from stdout before the process was killed.
|
|
17
|
+
* May be empty if no output arrived before the timeout.
|
|
18
|
+
*/
|
|
19
|
+
partialResults;
|
|
20
|
+
constructor(message, partialResults) {
|
|
21
|
+
super(message);
|
|
22
|
+
this.name = "RipgrepTimeoutError";
|
|
23
|
+
this.partialResults = partialResults;
|
|
24
|
+
}
|
|
25
|
+
};
|
|
26
|
+
function findRg() {
|
|
27
|
+
try {
|
|
28
|
+
const result = execFileSync("which", ["rg"], {
|
|
29
|
+
encoding: "utf-8",
|
|
30
|
+
timeout: 5e3
|
|
31
|
+
});
|
|
32
|
+
const rgPath = result.trim();
|
|
33
|
+
if (!rgPath) {
|
|
34
|
+
throw new RipgrepNotFoundError();
|
|
35
|
+
}
|
|
36
|
+
return rgPath;
|
|
37
|
+
} catch (error) {
|
|
38
|
+
if (error instanceof RipgrepNotFoundError) {
|
|
39
|
+
throw error;
|
|
40
|
+
}
|
|
41
|
+
throw new RipgrepNotFoundError();
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
function isEagainError(stderr) {
|
|
45
|
+
return stderr.includes("os error 11") || stderr.includes("Resource temporarily unavailable");
|
|
46
|
+
}
|
|
47
|
+
function parseStdout(stdout) {
|
|
48
|
+
return stdout.trim().split("\n").map((line) => line.replace(/\r$/, "")).filter(Boolean);
|
|
49
|
+
}
|
|
50
|
+
async function executeRipgrep(args, target, options) {
|
|
51
|
+
const rgPath = findRg();
|
|
52
|
+
const timeout = options?.timeout ?? DEFAULT_TIMEOUT;
|
|
53
|
+
return new Promise((resolve, reject) => {
|
|
54
|
+
const handleResult = (error, stdout, stderr, isRetry) => {
|
|
55
|
+
if (!error) {
|
|
56
|
+
resolve(parseStdout(stdout));
|
|
57
|
+
return;
|
|
58
|
+
}
|
|
59
|
+
if (error.code === 1) {
|
|
60
|
+
resolve([]);
|
|
61
|
+
return;
|
|
62
|
+
}
|
|
63
|
+
const CRITICAL_CODES = ["ENOENT", "EACCES", "EPERM"];
|
|
64
|
+
if (CRITICAL_CODES.includes(error.code)) {
|
|
65
|
+
reject(error);
|
|
66
|
+
return;
|
|
67
|
+
}
|
|
68
|
+
if (!isRetry && isEagainError(stderr)) {
|
|
69
|
+
execFile(
|
|
70
|
+
rgPath,
|
|
71
|
+
["-j", "1", ...args, target],
|
|
72
|
+
{
|
|
73
|
+
maxBuffer: MAX_BUFFER_SIZE,
|
|
74
|
+
signal: options?.signal,
|
|
75
|
+
timeout,
|
|
76
|
+
killSignal: "SIGKILL"
|
|
77
|
+
},
|
|
78
|
+
(retryErr, retryStdout, retryStderr) => {
|
|
79
|
+
handleResult(retryErr, retryStdout, retryStderr, true);
|
|
80
|
+
}
|
|
81
|
+
);
|
|
82
|
+
return;
|
|
83
|
+
}
|
|
84
|
+
const hasOutput = stdout && stdout.trim().length > 0;
|
|
85
|
+
const isTimeout = error.signal === "SIGTERM" || error.signal === "SIGKILL" || error.code === "ABORT_ERR";
|
|
86
|
+
const isBufferOverflow = error.code === "ERR_CHILD_PROCESS_STDIO_MAXBUFFER";
|
|
87
|
+
let lines = [];
|
|
88
|
+
if (hasOutput) {
|
|
89
|
+
lines = parseStdout(stdout);
|
|
90
|
+
if (lines.length > 0 && (isTimeout || isBufferOverflow)) {
|
|
91
|
+
lines = lines.slice(0, -1);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
if (isTimeout && lines.length === 0) {
|
|
95
|
+
reject(
|
|
96
|
+
new RipgrepTimeoutError(
|
|
97
|
+
`Ripgrep search timed out after ${timeout / 1e3} seconds. Try a more specific path or pattern.`,
|
|
98
|
+
lines
|
|
99
|
+
)
|
|
100
|
+
);
|
|
101
|
+
return;
|
|
102
|
+
}
|
|
103
|
+
resolve(lines);
|
|
104
|
+
};
|
|
105
|
+
execFile(
|
|
106
|
+
rgPath,
|
|
107
|
+
[...args, target],
|
|
108
|
+
{
|
|
109
|
+
maxBuffer: MAX_BUFFER_SIZE,
|
|
110
|
+
signal: options?.signal,
|
|
111
|
+
timeout,
|
|
112
|
+
killSignal: "SIGKILL"
|
|
113
|
+
},
|
|
114
|
+
(error, stdout, stderr) => {
|
|
115
|
+
handleResult(error, stdout, stderr, false);
|
|
116
|
+
}
|
|
117
|
+
);
|
|
118
|
+
});
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
export {
|
|
122
|
+
executeRipgrep
|
|
123
|
+
};
|