agentool 0.0.1 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +627 -52
- package/dist/ask-user/index.cjs +8 -0
- package/dist/ask-user/index.d.cts +68 -0
- package/dist/ask-user/index.d.ts +68 -0
- package/dist/ask-user/index.js +8 -0
- package/dist/bash/index.cjs +8 -0
- package/dist/bash/index.d.cts +63 -0
- package/dist/bash/index.d.ts +63 -0
- package/dist/bash/index.js +8 -0
- package/dist/chunk-3EPGFWZV.cjs +30 -0
- package/dist/chunk-3VO6NETR.cjs +79 -0
- package/dist/chunk-4YI2H55A.js +142 -0
- package/dist/chunk-5NW4OGRI.cjs +99 -0
- package/dist/chunk-6MDPYALY.js +196 -0
- package/dist/chunk-6PQLFDGT.js +117 -0
- package/dist/chunk-7QL4BQCH.js +40 -0
- package/dist/chunk-CAEVLIQB.cjs +117 -0
- package/dist/chunk-CGTPF6IS.js +90 -0
- package/dist/chunk-EA3YV7ZG.js +79 -0
- package/dist/chunk-FAEGCFTO.js +136 -0
- package/dist/chunk-FV2R5FFQ.cjs +102 -0
- package/dist/chunk-FW3UJ622.cjs +59 -0
- package/dist/chunk-G3ITTPGX.js +99 -0
- package/dist/chunk-HDKXSKMO.js +30 -0
- package/dist/chunk-HZAQRHBT.js +99 -0
- package/dist/chunk-I3ONDY7P.js +46 -0
- package/dist/chunk-I6KFFQPV.cjs +58 -0
- package/dist/chunk-IMZQ7ELK.cjs +196 -0
- package/dist/chunk-JCTBB7H2.cjs +40 -0
- package/dist/chunk-K77GC2QI.js +59 -0
- package/dist/chunk-LPV5CN2K.js +58 -0
- package/dist/chunk-MF7CJVIZ.js +40 -0
- package/dist/chunk-MIYA7TNR.cjs +123 -0
- package/dist/chunk-MJCAXASI.js +123 -0
- package/dist/chunk-MXFW3XY6.cjs +73 -0
- package/dist/chunk-ONBH74ZV.cjs +90 -0
- package/dist/chunk-OXLQ7QVL.cjs +40 -0
- package/dist/chunk-QEJV2KZ4.cjs +159 -0
- package/dist/chunk-QZ5GS6HW.cjs +46 -0
- package/dist/chunk-S6QEY7UY.js +73 -0
- package/dist/chunk-SUSAPI5W.cjs +142 -0
- package/dist/chunk-TBVHHF3H.cjs +47 -0
- package/dist/chunk-U2YMJM25.cjs +115 -0
- package/dist/chunk-VLNDEVKS.js +102 -0
- package/dist/chunk-XKG2A3EW.js +159 -0
- package/dist/chunk-XLD2Y3SS.cjs +136 -0
- package/dist/chunk-Y7KOKDFP.js +115 -0
- package/dist/chunk-YPPPGGLA.cjs +99 -0
- package/dist/chunk-ZHCMEQJJ.js +47 -0
- package/dist/context-compaction/index.cjs +8 -0
- package/dist/context-compaction/index.d.cts +77 -0
- package/dist/context-compaction/index.d.ts +77 -0
- package/dist/context-compaction/index.js +8 -0
- package/dist/diff/index.cjs +9 -0
- package/dist/diff/index.d.cts +72 -0
- package/dist/diff/index.d.ts +72 -0
- package/dist/diff/index.js +9 -0
- package/dist/edit/index.cjs +10 -0
- package/dist/edit/index.d.cts +53 -0
- package/dist/edit/index.d.ts +53 -0
- package/dist/edit/index.js +10 -0
- package/dist/glob/index.cjs +10 -0
- package/dist/glob/index.d.cts +47 -0
- package/dist/glob/index.d.ts +47 -0
- package/dist/glob/index.js +10 -0
- package/dist/grep/index.cjs +10 -0
- package/dist/grep/index.d.cts +50 -0
- package/dist/grep/index.d.ts +50 -0
- package/dist/grep/index.js +10 -0
- package/dist/http-request/index.cjs +8 -0
- package/dist/http-request/index.d.cts +60 -0
- package/dist/http-request/index.d.ts +60 -0
- package/dist/http-request/index.js +8 -0
- package/dist/index.cjs +102 -0
- package/dist/index.d.cts +18 -0
- package/dist/index.d.ts +18 -0
- package/dist/index.js +102 -0
- package/dist/lsp/index.cjs +10 -0
- package/dist/lsp/index.d.cts +38 -0
- package/dist/lsp/index.d.ts +38 -0
- package/dist/lsp/index.js +10 -0
- package/dist/memory/index.cjs +9 -0
- package/dist/memory/index.d.cts +63 -0
- package/dist/memory/index.d.ts +63 -0
- package/dist/memory/index.js +9 -0
- package/dist/multi-edit/index.cjs +11 -0
- package/dist/multi-edit/index.d.cts +72 -0
- package/dist/multi-edit/index.d.ts +72 -0
- package/dist/multi-edit/index.js +11 -0
- package/dist/read/index.cjs +10 -0
- package/dist/read/index.d.cts +67 -0
- package/dist/read/index.d.ts +67 -0
- package/dist/read/index.js +10 -0
- package/dist/sleep/index.cjs +8 -0
- package/dist/sleep/index.d.cts +60 -0
- package/dist/sleep/index.d.ts +60 -0
- package/dist/sleep/index.js +8 -0
- package/dist/task/index.cjs +8 -0
- package/dist/task/index.d.cts +67 -0
- package/dist/task/index.d.ts +67 -0
- package/dist/task/index.js +8 -0
- package/dist/types-3QPDuCXN.d.cts +45 -0
- package/dist/types-3QPDuCXN.d.ts +45 -0
- package/dist/web-fetch/index.cjs +8 -0
- package/dist/web-fetch/index.d.cts +56 -0
- package/dist/web-fetch/index.d.ts +56 -0
- package/dist/web-fetch/index.js +8 -0
- package/dist/write/index.cjs +10 -0
- package/dist/write/index.d.cts +47 -0
- package/dist/write/index.d.ts +47 -0
- package/dist/write/index.js +10 -0
- package/package.json +145 -20
- package/dist/core/index.d.ts +0 -20
- package/dist/core/index.js +0 -1
- package/dist/core/index.js.map +0 -1
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
// src/lsp/index.ts
|
|
2
|
+
import { tool } from "ai";
|
|
3
|
+
import { z } from "zod";
|
|
4
|
+
import { spawn } from "child_process";
|
|
5
|
+
import { readFile } from "fs/promises";
|
|
6
|
+
import { pathToFileURL } from "url";
|
|
7
|
+
import { resolve, extname } from "path";
|
|
8
|
+
var nextId = 1;
|
|
9
|
+
function encodeJsonRpc(msg) {
|
|
10
|
+
const body = JSON.stringify(msg);
|
|
11
|
+
return Buffer.from(`Content-Length: ${Buffer.byteLength(body)}\r
|
|
12
|
+
\r
|
|
13
|
+
${body}`);
|
|
14
|
+
}
|
|
15
|
+
function parseJsonRpcResponses(buffer) {
|
|
16
|
+
const parsed = [];
|
|
17
|
+
let offset = 0;
|
|
18
|
+
while (offset < buffer.length) {
|
|
19
|
+
const headerEnd = buffer.indexOf("\r\n\r\n", offset);
|
|
20
|
+
if (headerEnd === -1) break;
|
|
21
|
+
const match = /Content-Length:\s*(\d+)/i.exec(buffer.subarray(offset, headerEnd).toString());
|
|
22
|
+
if (!match) break;
|
|
23
|
+
const bodyStart = headerEnd + 4;
|
|
24
|
+
const contentLength = parseInt(match[1], 10);
|
|
25
|
+
if (bodyStart + contentLength > buffer.length) break;
|
|
26
|
+
try {
|
|
27
|
+
parsed.push(JSON.parse(buffer.subarray(bodyStart, bodyStart + contentLength).toString()));
|
|
28
|
+
} catch {
|
|
29
|
+
}
|
|
30
|
+
offset = bodyStart + contentLength;
|
|
31
|
+
}
|
|
32
|
+
return parsed;
|
|
33
|
+
}
|
|
34
|
+
function operationToMethod(operation) {
|
|
35
|
+
const map = {
|
|
36
|
+
goToDefinition: "textDocument/definition",
|
|
37
|
+
findReferences: "textDocument/references",
|
|
38
|
+
hover: "textDocument/hover",
|
|
39
|
+
documentSymbol: "textDocument/documentSymbol",
|
|
40
|
+
workspaceSymbol: "workspace/symbol",
|
|
41
|
+
goToImplementation: "textDocument/implementation",
|
|
42
|
+
incomingCalls: "textDocument/prepareCallHierarchy",
|
|
43
|
+
outgoingCalls: "textDocument/prepareCallHierarchy"
|
|
44
|
+
};
|
|
45
|
+
return map[operation] ?? operation;
|
|
46
|
+
}
|
|
47
|
+
function buildRequestParams(op, uri, line, char, query) {
|
|
48
|
+
if (op === "workspaceSymbol") return { query: query ?? "" };
|
|
49
|
+
const td = { uri };
|
|
50
|
+
if (op === "documentSymbol") return { textDocument: td };
|
|
51
|
+
const pos = { line, character: char };
|
|
52
|
+
if (op === "findReferences") return { textDocument: td, position: pos, context: { includeDeclaration: true } };
|
|
53
|
+
return { textDocument: td, position: pos };
|
|
54
|
+
}
|
|
55
|
+
function sendRequest(proc, method, params) {
|
|
56
|
+
const id = nextId++;
|
|
57
|
+
const msg = { jsonrpc: "2.0", id, method, params };
|
|
58
|
+
proc.stdin.write(encodeJsonRpc(msg));
|
|
59
|
+
return id;
|
|
60
|
+
}
|
|
61
|
+
function sendNotification(proc, method, params) {
|
|
62
|
+
const msg = { jsonrpc: "2.0", method, params };
|
|
63
|
+
proc.stdin.write(encodeJsonRpc(msg));
|
|
64
|
+
}
|
|
65
|
+
function waitForResponse(proc, id, timeoutMs) {
|
|
66
|
+
return new Promise((resolveP, reject) => {
|
|
67
|
+
let buf = Buffer.alloc(0);
|
|
68
|
+
const timer = setTimeout(() => {
|
|
69
|
+
cleanup();
|
|
70
|
+
reject(new Error(`LSP request timed out after ${timeoutMs}ms`));
|
|
71
|
+
}, timeoutMs);
|
|
72
|
+
const cleanup = () => {
|
|
73
|
+
clearTimeout(timer);
|
|
74
|
+
proc.stdout.off("data", onData);
|
|
75
|
+
proc.stdout.off("error", onErr);
|
|
76
|
+
};
|
|
77
|
+
function onData(chunk) {
|
|
78
|
+
buf = Buffer.concat([buf, chunk]);
|
|
79
|
+
for (const msg of parseJsonRpcResponses(buf)) {
|
|
80
|
+
const rpc = msg;
|
|
81
|
+
if (rpc.id === id) {
|
|
82
|
+
cleanup();
|
|
83
|
+
resolveP(rpc.error ? { error: rpc.error } : { result: rpc.result });
|
|
84
|
+
return;
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
function onErr(e) {
|
|
89
|
+
cleanup();
|
|
90
|
+
reject(e);
|
|
91
|
+
}
|
|
92
|
+
proc.stdout.on("data", onData);
|
|
93
|
+
proc.stdout.on("error", onErr);
|
|
94
|
+
});
|
|
95
|
+
}
|
|
96
|
+
async function shutdownServer(proc, ms) {
|
|
97
|
+
try {
|
|
98
|
+
const id = sendRequest(proc, "shutdown", {});
|
|
99
|
+
await waitForResponse(proc, id, Math.min(ms, 5e3));
|
|
100
|
+
sendNotification(proc, "exit", {});
|
|
101
|
+
} catch {
|
|
102
|
+
} finally {
|
|
103
|
+
proc.kill();
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
async function executeLspOperation(serverConfig, params, timeoutMs) {
|
|
107
|
+
const absolutePath = resolve(params.cwd, params.filePath);
|
|
108
|
+
const uri = pathToFileURL(absolutePath).href;
|
|
109
|
+
const proc = spawn(serverConfig.command, serverConfig.args ?? [], {
|
|
110
|
+
cwd: params.cwd,
|
|
111
|
+
stdio: ["pipe", "pipe", "pipe"]
|
|
112
|
+
});
|
|
113
|
+
const spawnError = new Promise((_, reject) => {
|
|
114
|
+
proc.on("error", (err) => reject(err));
|
|
115
|
+
});
|
|
116
|
+
try {
|
|
117
|
+
const initId = sendRequest(proc, "initialize", {
|
|
118
|
+
processId: process.pid,
|
|
119
|
+
capabilities: {},
|
|
120
|
+
rootUri: pathToFileURL(params.cwd).href
|
|
121
|
+
});
|
|
122
|
+
const rpc = (id) => Promise.race([waitForResponse(proc, id, timeoutMs), spawnError]);
|
|
123
|
+
await rpc(initId);
|
|
124
|
+
sendNotification(proc, "initialized", {});
|
|
125
|
+
const content = await readFile(absolutePath, "utf-8");
|
|
126
|
+
const langId = extname(absolutePath).replace(".", "") || "plaintext";
|
|
127
|
+
sendNotification(proc, "textDocument/didOpen", {
|
|
128
|
+
textDocument: { uri, languageId: langId, version: 1, text: content }
|
|
129
|
+
});
|
|
130
|
+
const method = operationToMethod(params.operation);
|
|
131
|
+
const reqParams = buildRequestParams(params.operation, uri, params.line ?? 0, params.character ?? 0, params.query);
|
|
132
|
+
let response = await rpc(sendRequest(proc, method, reqParams));
|
|
133
|
+
if (response.error) return `Error [lsp]: Server error: ${response.error.message} (code ${response.error.code})`;
|
|
134
|
+
if ((params.operation === "incomingCalls" || params.operation === "outgoingCalls") && Array.isArray(response.result) && response.result.length > 0) {
|
|
135
|
+
const cm = params.operation === "incomingCalls" ? "callHierarchy/incomingCalls" : "callHierarchy/outgoingCalls";
|
|
136
|
+
response = await rpc(sendRequest(proc, cm, { item: response.result[0] }));
|
|
137
|
+
if (response.error) return `Error [lsp]: Server error: ${response.error.message} (code ${response.error.code})`;
|
|
138
|
+
}
|
|
139
|
+
await shutdownServer(proc, timeoutMs);
|
|
140
|
+
return JSON.stringify(response.result, null, 2) ?? "null";
|
|
141
|
+
} catch (error) {
|
|
142
|
+
proc.kill();
|
|
143
|
+
throw error;
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
var LSP_OPERATIONS = [
|
|
147
|
+
"goToDefinition",
|
|
148
|
+
"findReferences",
|
|
149
|
+
"hover",
|
|
150
|
+
"documentSymbol",
|
|
151
|
+
"workspaceSymbol",
|
|
152
|
+
"goToImplementation",
|
|
153
|
+
"incomingCalls",
|
|
154
|
+
"outgoingCalls"
|
|
155
|
+
];
|
|
156
|
+
function createLsp(config = {}) {
|
|
157
|
+
const timeoutMs = config.timeout ?? 3e4;
|
|
158
|
+
return tool({
|
|
159
|
+
description: "Perform language server operations like go-to-definition, find-references, and hover. Requires LSP server configuration. Supports 8 operations: goToDefinition, findReferences, hover, documentSymbol, workspaceSymbol, goToImplementation, incomingCalls, outgoingCalls.",
|
|
160
|
+
inputSchema: z.object({
|
|
161
|
+
operation: z.enum(LSP_OPERATIONS).describe("The LSP operation to perform"),
|
|
162
|
+
filePath: z.string().describe("Path to the file"),
|
|
163
|
+
line: z.number().optional().describe("Line number (0-indexed)"),
|
|
164
|
+
character: z.number().optional().describe("Character offset (0-indexed)"),
|
|
165
|
+
query: z.string().optional().describe("Search query for workspaceSymbol")
|
|
166
|
+
}),
|
|
167
|
+
execute: async ({ operation, filePath, line, character, query }) => {
|
|
168
|
+
if (!config.servers || Object.keys(config.servers).length === 0) {
|
|
169
|
+
return 'Error [lsp]: No LSP servers configured. Provide server configuration via createLsp({ servers: { ".ts": { command: "typescript-language-server", args: ["--stdio"] } } })';
|
|
170
|
+
}
|
|
171
|
+
const ext = extname(filePath) || "." + filePath.split(".").pop();
|
|
172
|
+
const serverConfig = config.servers[ext];
|
|
173
|
+
if (!serverConfig) {
|
|
174
|
+
const available = Object.keys(config.servers).join(", ");
|
|
175
|
+
return `Error [lsp]: No LSP server configured for ${ext} files. Available: ${available}`;
|
|
176
|
+
}
|
|
177
|
+
try {
|
|
178
|
+
return await executeLspOperation(
|
|
179
|
+
serverConfig,
|
|
180
|
+
{ operation, filePath, line, character, query, cwd: config.cwd ?? process.cwd() },
|
|
181
|
+
timeoutMs
|
|
182
|
+
);
|
|
183
|
+
} catch (error) {
|
|
184
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
185
|
+
return `Error [lsp]: ${operation} failed for ${filePath}: ${msg}`;
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
});
|
|
189
|
+
}
|
|
190
|
+
var lsp = createLsp();
|
|
191
|
+
|
|
192
|
+
export {
|
|
193
|
+
executeLspOperation,
|
|
194
|
+
createLsp,
|
|
195
|
+
lsp
|
|
196
|
+
};
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
// src/shared/file.ts
|
|
2
|
+
import { createReadStream } from "fs";
|
|
3
|
+
import { mkdir, readFile, stat, writeFile } from "fs/promises";
|
|
4
|
+
import { dirname } from "path";
|
|
5
|
+
var FAST_PATH_MAX_SIZE = 10 * 1024 * 1024;
|
|
6
|
+
function addLineNumbers({
|
|
7
|
+
content,
|
|
8
|
+
startLine
|
|
9
|
+
}) {
|
|
10
|
+
if (!content) {
|
|
11
|
+
return "";
|
|
12
|
+
}
|
|
13
|
+
const lines = content.split(/\r?\n/);
|
|
14
|
+
return lines.map((line, index) => `${index + startLine} ${line}`).join("\n");
|
|
15
|
+
}
|
|
16
|
+
async function writeTextContent(filePath, content) {
|
|
17
|
+
await mkdir(dirname(filePath), { recursive: true });
|
|
18
|
+
await writeFile(filePath, content, { encoding: "utf-8" });
|
|
19
|
+
}
|
|
20
|
+
async function pathExists(path) {
|
|
21
|
+
try {
|
|
22
|
+
await stat(path);
|
|
23
|
+
return true;
|
|
24
|
+
} catch {
|
|
25
|
+
return false;
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
async function readFileInRange(filePath, offset = 0, maxLines) {
|
|
29
|
+
const stats = await stat(filePath);
|
|
30
|
+
if (stats.isDirectory()) {
|
|
31
|
+
throw new Error(
|
|
32
|
+
`EISDIR: illegal operation on a directory, read '${filePath}'`
|
|
33
|
+
);
|
|
34
|
+
}
|
|
35
|
+
if (stats.isFile() && stats.size < FAST_PATH_MAX_SIZE) {
|
|
36
|
+
const raw = await readFile(filePath, { encoding: "utf-8" });
|
|
37
|
+
return readFileInRangeFast(raw, offset, maxLines);
|
|
38
|
+
}
|
|
39
|
+
return readFileInRangeStreaming(filePath, offset, maxLines);
|
|
40
|
+
}
|
|
41
|
+
function readFileInRangeFast(raw, offset, maxLines) {
|
|
42
|
+
const text = raw.charCodeAt(0) === 65279 ? raw.slice(1) : raw;
|
|
43
|
+
const allLines = text.replace(/\r/g, "").split("\n");
|
|
44
|
+
const totalLines = allLines.length;
|
|
45
|
+
const endLine = maxLines !== void 0 ? offset + maxLines : totalLines;
|
|
46
|
+
const selected = allLines.slice(offset, endLine);
|
|
47
|
+
return {
|
|
48
|
+
content: selected.join("\n"),
|
|
49
|
+
lineCount: selected.length,
|
|
50
|
+
totalLines
|
|
51
|
+
};
|
|
52
|
+
}
|
|
53
|
+
function readFileInRangeStreaming(filePath, offset, maxLines) {
|
|
54
|
+
return new Promise((resolve, reject) => {
|
|
55
|
+
const endLine = maxLines !== void 0 ? offset + maxLines : Infinity;
|
|
56
|
+
const selectedLines = [];
|
|
57
|
+
let currentLineIndex = 0;
|
|
58
|
+
let partial = "";
|
|
59
|
+
let isFirstChunk = true;
|
|
60
|
+
const stream = createReadStream(filePath, {
|
|
61
|
+
encoding: "utf8",
|
|
62
|
+
highWaterMark: 512 * 1024
|
|
63
|
+
});
|
|
64
|
+
stream.on("data", (raw) => {
|
|
65
|
+
let chunk = String(raw);
|
|
66
|
+
if (isFirstChunk) {
|
|
67
|
+
isFirstChunk = false;
|
|
68
|
+
if (chunk.charCodeAt(0) === 65279) {
|
|
69
|
+
chunk = chunk.slice(1);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
const data = partial.length > 0 ? partial + chunk : chunk;
|
|
73
|
+
partial = "";
|
|
74
|
+
let startPos = 0;
|
|
75
|
+
let newlinePos;
|
|
76
|
+
while ((newlinePos = data.indexOf("\n", startPos)) !== -1) {
|
|
77
|
+
if (currentLineIndex >= offset && currentLineIndex < endLine) {
|
|
78
|
+
let line = data.slice(startPos, newlinePos);
|
|
79
|
+
if (line.endsWith("\r")) {
|
|
80
|
+
line = line.slice(0, -1);
|
|
81
|
+
}
|
|
82
|
+
selectedLines.push(line);
|
|
83
|
+
}
|
|
84
|
+
currentLineIndex++;
|
|
85
|
+
startPos = newlinePos + 1;
|
|
86
|
+
}
|
|
87
|
+
if (startPos < data.length) {
|
|
88
|
+
if (currentLineIndex >= offset && currentLineIndex < endLine) {
|
|
89
|
+
partial = data.slice(startPos);
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
});
|
|
93
|
+
stream.once("end", () => {
|
|
94
|
+
if (partial.length > 0 && currentLineIndex >= offset && currentLineIndex < endLine) {
|
|
95
|
+
let line = partial;
|
|
96
|
+
if (line.endsWith("\r")) {
|
|
97
|
+
line = line.slice(0, -1);
|
|
98
|
+
}
|
|
99
|
+
selectedLines.push(line);
|
|
100
|
+
}
|
|
101
|
+
currentLineIndex++;
|
|
102
|
+
resolve({
|
|
103
|
+
content: selectedLines.join("\n"),
|
|
104
|
+
lineCount: selectedLines.length,
|
|
105
|
+
totalLines: currentLineIndex
|
|
106
|
+
});
|
|
107
|
+
});
|
|
108
|
+
stream.once("error", reject);
|
|
109
|
+
});
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
export {
|
|
113
|
+
addLineNumbers,
|
|
114
|
+
writeTextContent,
|
|
115
|
+
pathExists,
|
|
116
|
+
readFileInRange
|
|
117
|
+
};
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import {
|
|
2
|
+
pathExists,
|
|
3
|
+
writeTextContent
|
|
4
|
+
} from "./chunk-6PQLFDGT.js";
|
|
5
|
+
import {
|
|
6
|
+
expandPath
|
|
7
|
+
} from "./chunk-I3ONDY7P.js";
|
|
8
|
+
|
|
9
|
+
// src/write/index.ts
|
|
10
|
+
import { tool } from "ai";
|
|
11
|
+
import { z } from "zod";
|
|
12
|
+
function createWrite(config = {}) {
|
|
13
|
+
const cwd = config.cwd ?? process.cwd();
|
|
14
|
+
return tool({
|
|
15
|
+
description: "Write text content to a file, creating parent directories as needed. If the file exists it is overwritten. Use this to create new files or replace existing file contents.",
|
|
16
|
+
inputSchema: z.object({
|
|
17
|
+
file_path: z.string().describe("Path to the file to write"),
|
|
18
|
+
content: z.string().describe("Text content to write to the file")
|
|
19
|
+
}),
|
|
20
|
+
execute: async ({ file_path, content }) => {
|
|
21
|
+
try {
|
|
22
|
+
const absolutePath = expandPath(file_path, cwd);
|
|
23
|
+
const existed = await pathExists(absolutePath);
|
|
24
|
+
await writeTextContent(absolutePath, content);
|
|
25
|
+
const bytes = Buffer.byteLength(content, "utf-8");
|
|
26
|
+
const verb = existed ? "Updated" : "Created";
|
|
27
|
+
return `${verb} file: ${absolutePath} (${bytes} bytes)`;
|
|
28
|
+
} catch (error) {
|
|
29
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
30
|
+
return `Error [write]: Failed to write file: ${message}`;
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
});
|
|
34
|
+
}
|
|
35
|
+
var write = createWrite();
|
|
36
|
+
|
|
37
|
+
export {
|
|
38
|
+
createWrite,
|
|
39
|
+
write
|
|
40
|
+
};
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true});// src/shared/file.ts
|
|
2
|
+
var _fs = require('fs');
|
|
3
|
+
var _promises = require('fs/promises');
|
|
4
|
+
var _path = require('path');
|
|
5
|
+
var FAST_PATH_MAX_SIZE = 10 * 1024 * 1024;
|
|
6
|
+
function addLineNumbers({
|
|
7
|
+
content,
|
|
8
|
+
startLine
|
|
9
|
+
}) {
|
|
10
|
+
if (!content) {
|
|
11
|
+
return "";
|
|
12
|
+
}
|
|
13
|
+
const lines = content.split(/\r?\n/);
|
|
14
|
+
return lines.map((line, index) => `${index + startLine} ${line}`).join("\n");
|
|
15
|
+
}
|
|
16
|
+
async function writeTextContent(filePath, content) {
|
|
17
|
+
await _promises.mkdir.call(void 0, _path.dirname.call(void 0, filePath), { recursive: true });
|
|
18
|
+
await _promises.writeFile.call(void 0, filePath, content, { encoding: "utf-8" });
|
|
19
|
+
}
|
|
20
|
+
async function pathExists(path) {
|
|
21
|
+
try {
|
|
22
|
+
await _promises.stat.call(void 0, path);
|
|
23
|
+
return true;
|
|
24
|
+
} catch (e) {
|
|
25
|
+
return false;
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
async function readFileInRange(filePath, offset = 0, maxLines) {
|
|
29
|
+
const stats = await _promises.stat.call(void 0, filePath);
|
|
30
|
+
if (stats.isDirectory()) {
|
|
31
|
+
throw new Error(
|
|
32
|
+
`EISDIR: illegal operation on a directory, read '${filePath}'`
|
|
33
|
+
);
|
|
34
|
+
}
|
|
35
|
+
if (stats.isFile() && stats.size < FAST_PATH_MAX_SIZE) {
|
|
36
|
+
const raw = await _promises.readFile.call(void 0, filePath, { encoding: "utf-8" });
|
|
37
|
+
return readFileInRangeFast(raw, offset, maxLines);
|
|
38
|
+
}
|
|
39
|
+
return readFileInRangeStreaming(filePath, offset, maxLines);
|
|
40
|
+
}
|
|
41
|
+
function readFileInRangeFast(raw, offset, maxLines) {
|
|
42
|
+
const text = raw.charCodeAt(0) === 65279 ? raw.slice(1) : raw;
|
|
43
|
+
const allLines = text.replace(/\r/g, "").split("\n");
|
|
44
|
+
const totalLines = allLines.length;
|
|
45
|
+
const endLine = maxLines !== void 0 ? offset + maxLines : totalLines;
|
|
46
|
+
const selected = allLines.slice(offset, endLine);
|
|
47
|
+
return {
|
|
48
|
+
content: selected.join("\n"),
|
|
49
|
+
lineCount: selected.length,
|
|
50
|
+
totalLines
|
|
51
|
+
};
|
|
52
|
+
}
|
|
53
|
+
function readFileInRangeStreaming(filePath, offset, maxLines) {
|
|
54
|
+
return new Promise((resolve, reject) => {
|
|
55
|
+
const endLine = maxLines !== void 0 ? offset + maxLines : Infinity;
|
|
56
|
+
const selectedLines = [];
|
|
57
|
+
let currentLineIndex = 0;
|
|
58
|
+
let partial = "";
|
|
59
|
+
let isFirstChunk = true;
|
|
60
|
+
const stream = _fs.createReadStream.call(void 0, filePath, {
|
|
61
|
+
encoding: "utf8",
|
|
62
|
+
highWaterMark: 512 * 1024
|
|
63
|
+
});
|
|
64
|
+
stream.on("data", (raw) => {
|
|
65
|
+
let chunk = String(raw);
|
|
66
|
+
if (isFirstChunk) {
|
|
67
|
+
isFirstChunk = false;
|
|
68
|
+
if (chunk.charCodeAt(0) === 65279) {
|
|
69
|
+
chunk = chunk.slice(1);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
const data = partial.length > 0 ? partial + chunk : chunk;
|
|
73
|
+
partial = "";
|
|
74
|
+
let startPos = 0;
|
|
75
|
+
let newlinePos;
|
|
76
|
+
while ((newlinePos = data.indexOf("\n", startPos)) !== -1) {
|
|
77
|
+
if (currentLineIndex >= offset && currentLineIndex < endLine) {
|
|
78
|
+
let line = data.slice(startPos, newlinePos);
|
|
79
|
+
if (line.endsWith("\r")) {
|
|
80
|
+
line = line.slice(0, -1);
|
|
81
|
+
}
|
|
82
|
+
selectedLines.push(line);
|
|
83
|
+
}
|
|
84
|
+
currentLineIndex++;
|
|
85
|
+
startPos = newlinePos + 1;
|
|
86
|
+
}
|
|
87
|
+
if (startPos < data.length) {
|
|
88
|
+
if (currentLineIndex >= offset && currentLineIndex < endLine) {
|
|
89
|
+
partial = data.slice(startPos);
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
});
|
|
93
|
+
stream.once("end", () => {
|
|
94
|
+
if (partial.length > 0 && currentLineIndex >= offset && currentLineIndex < endLine) {
|
|
95
|
+
let line = partial;
|
|
96
|
+
if (line.endsWith("\r")) {
|
|
97
|
+
line = line.slice(0, -1);
|
|
98
|
+
}
|
|
99
|
+
selectedLines.push(line);
|
|
100
|
+
}
|
|
101
|
+
currentLineIndex++;
|
|
102
|
+
resolve({
|
|
103
|
+
content: selectedLines.join("\n"),
|
|
104
|
+
lineCount: selectedLines.length,
|
|
105
|
+
totalLines: currentLineIndex
|
|
106
|
+
});
|
|
107
|
+
});
|
|
108
|
+
stream.once("error", reject);
|
|
109
|
+
});
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
exports.addLineNumbers = addLineNumbers; exports.writeTextContent = writeTextContent; exports.pathExists = pathExists; exports.readFileInRange = readFileInRange;
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
import {
|
|
2
|
+
expandPath
|
|
3
|
+
} from "./chunk-I3ONDY7P.js";
|
|
4
|
+
|
|
5
|
+
// src/diff/index.ts
|
|
6
|
+
import { readFile as readFile2 } from "fs/promises";
|
|
7
|
+
import { tool } from "ai";
|
|
8
|
+
import { z } from "zod";
|
|
9
|
+
|
|
10
|
+
// src/shared/diff.ts
|
|
11
|
+
import { readFile } from "fs/promises";
|
|
12
|
+
import { createTwoFilesPatch } from "diff";
|
|
13
|
+
function diffStrings(oldContent, newContent, options) {
|
|
14
|
+
if (oldContent === newContent) {
|
|
15
|
+
return "No differences found.";
|
|
16
|
+
}
|
|
17
|
+
const context = options?.context ?? 3;
|
|
18
|
+
const oldLabel = options?.oldLabel ?? "a";
|
|
19
|
+
const newLabel = options?.newLabel ?? "b";
|
|
20
|
+
return createTwoFilesPatch(
|
|
21
|
+
oldLabel,
|
|
22
|
+
newLabel,
|
|
23
|
+
oldContent,
|
|
24
|
+
newContent,
|
|
25
|
+
void 0,
|
|
26
|
+
void 0,
|
|
27
|
+
{ context }
|
|
28
|
+
);
|
|
29
|
+
}
|
|
30
|
+
async function diffFiles(oldFilePath, newFilePath, options) {
|
|
31
|
+
const [oldContent, newContent] = await Promise.all([
|
|
32
|
+
readFile(oldFilePath, "utf-8"),
|
|
33
|
+
readFile(newFilePath, "utf-8")
|
|
34
|
+
]);
|
|
35
|
+
return diffStrings(oldContent, newContent, {
|
|
36
|
+
...options,
|
|
37
|
+
oldLabel: oldFilePath,
|
|
38
|
+
newLabel: newFilePath
|
|
39
|
+
});
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
// src/diff/index.ts
|
|
43
|
+
function createDiff(config = {}) {
|
|
44
|
+
const cwd = config.cwd ?? process.cwd();
|
|
45
|
+
return tool({
|
|
46
|
+
description: "Generate a unified diff between two files or two strings. Provide file_path + other_file_path to compare files, or old_content + new_content to compare strings. You can also provide file_path with old_content or new_content to compare a file against provided content.",
|
|
47
|
+
inputSchema: z.object({
|
|
48
|
+
file_path: z.string().optional().describe("Path to the first file (absolute or relative to cwd)"),
|
|
49
|
+
other_file_path: z.string().optional().describe("Path to the second file (absolute or relative to cwd)"),
|
|
50
|
+
old_content: z.string().optional().describe("The original content string"),
|
|
51
|
+
new_content: z.string().optional().describe("The modified content string")
|
|
52
|
+
}),
|
|
53
|
+
execute: async ({ file_path, other_file_path, old_content, new_content }) => {
|
|
54
|
+
try {
|
|
55
|
+
if (file_path && other_file_path) {
|
|
56
|
+
const resolvedOld = expandPath(file_path, cwd);
|
|
57
|
+
const resolvedNew = expandPath(other_file_path, cwd);
|
|
58
|
+
return await diffFiles(resolvedOld, resolvedNew);
|
|
59
|
+
}
|
|
60
|
+
if (old_content !== void 0 && new_content !== void 0 && !file_path) {
|
|
61
|
+
return diffStrings(old_content, new_content);
|
|
62
|
+
}
|
|
63
|
+
if (file_path && (old_content !== void 0 || new_content !== void 0)) {
|
|
64
|
+
const resolvedPath = expandPath(file_path, cwd);
|
|
65
|
+
const fileContent = await readFile2(resolvedPath, "utf-8");
|
|
66
|
+
if (old_content !== void 0) {
|
|
67
|
+
return diffStrings(old_content, fileContent, {
|
|
68
|
+
oldLabel: "provided",
|
|
69
|
+
newLabel: resolvedPath
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
return diffStrings(fileContent, new_content, {
|
|
73
|
+
oldLabel: resolvedPath,
|
|
74
|
+
newLabel: "provided"
|
|
75
|
+
});
|
|
76
|
+
}
|
|
77
|
+
return "Error [diff]: Insufficient parameters. Provide either: (1) file_path + other_file_path, (2) old_content + new_content, or (3) file_path + old_content/new_content.";
|
|
78
|
+
} catch (error) {
|
|
79
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
80
|
+
return `Error [diff]: ${msg}`;
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
});
|
|
84
|
+
}
|
|
85
|
+
var diff = createDiff();
|
|
86
|
+
|
|
87
|
+
export {
|
|
88
|
+
createDiff,
|
|
89
|
+
diff
|
|
90
|
+
};
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
import {
|
|
2
|
+
applyEditToFile,
|
|
3
|
+
findActualString,
|
|
4
|
+
preserveQuoteStyle
|
|
5
|
+
} from "./chunk-G3ITTPGX.js";
|
|
6
|
+
import {
|
|
7
|
+
writeTextContent
|
|
8
|
+
} from "./chunk-6PQLFDGT.js";
|
|
9
|
+
import {
|
|
10
|
+
expandPath
|
|
11
|
+
} from "./chunk-I3ONDY7P.js";
|
|
12
|
+
|
|
13
|
+
// src/multi-edit/index.ts
|
|
14
|
+
import { readFile } from "fs/promises";
|
|
15
|
+
import { tool } from "ai";
|
|
16
|
+
import { z } from "zod";
|
|
17
|
+
function countOccurrences(text, search) {
|
|
18
|
+
if (search.length === 0) return 0;
|
|
19
|
+
let count = 0;
|
|
20
|
+
let pos = 0;
|
|
21
|
+
while ((pos = text.indexOf(search, pos)) !== -1) {
|
|
22
|
+
count++;
|
|
23
|
+
pos += search.length;
|
|
24
|
+
}
|
|
25
|
+
return count;
|
|
26
|
+
}
|
|
27
|
+
function createMultiEdit(config = {}) {
|
|
28
|
+
const cwd = config.cwd ?? process.cwd();
|
|
29
|
+
return tool({
|
|
30
|
+
description: "Atomically apply multiple text edits to a single file. All edits succeed together or none are applied (rollback on failure). Each edit replaces one occurrence of old_string with new_string. Edits are applied sequentially in the order provided.",
|
|
31
|
+
inputSchema: z.object({
|
|
32
|
+
file_path: z.string().describe("Path to the file to edit (absolute or relative to cwd)"),
|
|
33
|
+
edits: z.array(
|
|
34
|
+
z.object({
|
|
35
|
+
old_string: z.string().describe("The exact string to find and replace"),
|
|
36
|
+
new_string: z.string().describe("The replacement string")
|
|
37
|
+
})
|
|
38
|
+
).describe("Ordered list of edits to apply atomically")
|
|
39
|
+
}),
|
|
40
|
+
execute: async ({ file_path, edits }) => {
|
|
41
|
+
try {
|
|
42
|
+
if (edits.length === 0) {
|
|
43
|
+
return "No edits provided. File unchanged.";
|
|
44
|
+
}
|
|
45
|
+
const resolvedPath = expandPath(file_path, cwd);
|
|
46
|
+
const originalContent = await readFile(resolvedPath, "utf-8");
|
|
47
|
+
let content = originalContent;
|
|
48
|
+
for (let i = 0; i < edits.length; i++) {
|
|
49
|
+
const edit = edits[i];
|
|
50
|
+
const actualOldString = findActualString(content, edit.old_string);
|
|
51
|
+
if (actualOldString === null) {
|
|
52
|
+
return `Error [multi-edit]: Edit ${i + 1}/${edits.length} failed \u2014 old_string not found in file. No edits were applied. File: ${resolvedPath}`;
|
|
53
|
+
}
|
|
54
|
+
const occurrences = countOccurrences(content, actualOldString);
|
|
55
|
+
if (occurrences > 1) {
|
|
56
|
+
return `Error [multi-edit]: Edit ${i + 1}/${edits.length} failed \u2014 old_string matches ${occurrences} locations (must be unique). No edits were applied. File: ${resolvedPath}`;
|
|
57
|
+
}
|
|
58
|
+
const styledNewString = preserveQuoteStyle(
|
|
59
|
+
edit.old_string,
|
|
60
|
+
actualOldString,
|
|
61
|
+
edit.new_string
|
|
62
|
+
);
|
|
63
|
+
content = applyEditToFile(content, actualOldString, styledNewString);
|
|
64
|
+
}
|
|
65
|
+
await writeTextContent(resolvedPath, content);
|
|
66
|
+
return `Successfully applied ${edits.length} edit${edits.length === 1 ? "" : "s"} to ${resolvedPath}`;
|
|
67
|
+
} catch (error) {
|
|
68
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
69
|
+
return `Error [multi-edit]: ${msg}`;
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
});
|
|
73
|
+
}
|
|
74
|
+
var multiEdit = createMultiEdit();
|
|
75
|
+
|
|
76
|
+
export {
|
|
77
|
+
createMultiEdit,
|
|
78
|
+
multiEdit
|
|
79
|
+
};
|