agentool 0.0.1 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +627 -52
- package/dist/ask-user/index.cjs +8 -0
- package/dist/ask-user/index.d.cts +68 -0
- package/dist/ask-user/index.d.ts +68 -0
- package/dist/ask-user/index.js +8 -0
- package/dist/bash/index.cjs +8 -0
- package/dist/bash/index.d.cts +63 -0
- package/dist/bash/index.d.ts +63 -0
- package/dist/bash/index.js +8 -0
- package/dist/chunk-3EPGFWZV.cjs +30 -0
- package/dist/chunk-3VO6NETR.cjs +79 -0
- package/dist/chunk-4YI2H55A.js +142 -0
- package/dist/chunk-5NW4OGRI.cjs +99 -0
- package/dist/chunk-6MDPYALY.js +196 -0
- package/dist/chunk-6PQLFDGT.js +117 -0
- package/dist/chunk-7QL4BQCH.js +40 -0
- package/dist/chunk-CAEVLIQB.cjs +117 -0
- package/dist/chunk-CGTPF6IS.js +90 -0
- package/dist/chunk-EA3YV7ZG.js +79 -0
- package/dist/chunk-FAEGCFTO.js +136 -0
- package/dist/chunk-FV2R5FFQ.cjs +102 -0
- package/dist/chunk-FW3UJ622.cjs +59 -0
- package/dist/chunk-G3ITTPGX.js +99 -0
- package/dist/chunk-HDKXSKMO.js +30 -0
- package/dist/chunk-HZAQRHBT.js +99 -0
- package/dist/chunk-I3ONDY7P.js +46 -0
- package/dist/chunk-I6KFFQPV.cjs +58 -0
- package/dist/chunk-IMZQ7ELK.cjs +196 -0
- package/dist/chunk-JCTBB7H2.cjs +40 -0
- package/dist/chunk-K77GC2QI.js +59 -0
- package/dist/chunk-LPV5CN2K.js +58 -0
- package/dist/chunk-MF7CJVIZ.js +40 -0
- package/dist/chunk-MIYA7TNR.cjs +123 -0
- package/dist/chunk-MJCAXASI.js +123 -0
- package/dist/chunk-MXFW3XY6.cjs +73 -0
- package/dist/chunk-ONBH74ZV.cjs +90 -0
- package/dist/chunk-OXLQ7QVL.cjs +40 -0
- package/dist/chunk-QEJV2KZ4.cjs +159 -0
- package/dist/chunk-QZ5GS6HW.cjs +46 -0
- package/dist/chunk-S6QEY7UY.js +73 -0
- package/dist/chunk-SUSAPI5W.cjs +142 -0
- package/dist/chunk-TBVHHF3H.cjs +47 -0
- package/dist/chunk-U2YMJM25.cjs +115 -0
- package/dist/chunk-VLNDEVKS.js +102 -0
- package/dist/chunk-XKG2A3EW.js +159 -0
- package/dist/chunk-XLD2Y3SS.cjs +136 -0
- package/dist/chunk-Y7KOKDFP.js +115 -0
- package/dist/chunk-YPPPGGLA.cjs +99 -0
- package/dist/chunk-ZHCMEQJJ.js +47 -0
- package/dist/context-compaction/index.cjs +8 -0
- package/dist/context-compaction/index.d.cts +77 -0
- package/dist/context-compaction/index.d.ts +77 -0
- package/dist/context-compaction/index.js +8 -0
- package/dist/diff/index.cjs +9 -0
- package/dist/diff/index.d.cts +72 -0
- package/dist/diff/index.d.ts +72 -0
- package/dist/diff/index.js +9 -0
- package/dist/edit/index.cjs +10 -0
- package/dist/edit/index.d.cts +53 -0
- package/dist/edit/index.d.ts +53 -0
- package/dist/edit/index.js +10 -0
- package/dist/glob/index.cjs +10 -0
- package/dist/glob/index.d.cts +47 -0
- package/dist/glob/index.d.ts +47 -0
- package/dist/glob/index.js +10 -0
- package/dist/grep/index.cjs +10 -0
- package/dist/grep/index.d.cts +50 -0
- package/dist/grep/index.d.ts +50 -0
- package/dist/grep/index.js +10 -0
- package/dist/http-request/index.cjs +8 -0
- package/dist/http-request/index.d.cts +60 -0
- package/dist/http-request/index.d.ts +60 -0
- package/dist/http-request/index.js +8 -0
- package/dist/index.cjs +102 -0
- package/dist/index.d.cts +18 -0
- package/dist/index.d.ts +18 -0
- package/dist/index.js +102 -0
- package/dist/lsp/index.cjs +10 -0
- package/dist/lsp/index.d.cts +38 -0
- package/dist/lsp/index.d.ts +38 -0
- package/dist/lsp/index.js +10 -0
- package/dist/memory/index.cjs +9 -0
- package/dist/memory/index.d.cts +63 -0
- package/dist/memory/index.d.ts +63 -0
- package/dist/memory/index.js +9 -0
- package/dist/multi-edit/index.cjs +11 -0
- package/dist/multi-edit/index.d.cts +72 -0
- package/dist/multi-edit/index.d.ts +72 -0
- package/dist/multi-edit/index.js +11 -0
- package/dist/read/index.cjs +10 -0
- package/dist/read/index.d.cts +67 -0
- package/dist/read/index.d.ts +67 -0
- package/dist/read/index.js +10 -0
- package/dist/sleep/index.cjs +8 -0
- package/dist/sleep/index.d.cts +60 -0
- package/dist/sleep/index.d.ts +60 -0
- package/dist/sleep/index.js +8 -0
- package/dist/task/index.cjs +8 -0
- package/dist/task/index.d.cts +67 -0
- package/dist/task/index.d.ts +67 -0
- package/dist/task/index.js +8 -0
- package/dist/types-3QPDuCXN.d.cts +45 -0
- package/dist/types-3QPDuCXN.d.ts +45 -0
- package/dist/web-fetch/index.cjs +8 -0
- package/dist/web-fetch/index.d.cts +56 -0
- package/dist/web-fetch/index.d.ts +56 -0
- package/dist/web-fetch/index.js +8 -0
- package/dist/write/index.cjs +10 -0
- package/dist/write/index.d.cts +47 -0
- package/dist/write/index.d.ts +47 -0
- package/dist/write/index.js +10 -0
- package/package.json +145 -20
- package/dist/core/index.d.ts +0 -20
- package/dist/core/index.js +0 -1
- package/dist/core/index.js.map +0 -1
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
// src/task/index.ts
|
|
2
|
+
import { tool } from "ai";
|
|
3
|
+
import { z } from "zod";
|
|
4
|
+
import { mkdir, readFile, writeFile } from "fs/promises";
|
|
5
|
+
import { dirname, join } from "path";
|
|
6
|
+
import { randomBytes } from "crypto";
|
|
7
|
+
function generateId() {
|
|
8
|
+
return randomBytes(4).toString("hex");
|
|
9
|
+
}
|
|
10
|
+
async function loadTasks(filePath) {
|
|
11
|
+
try {
|
|
12
|
+
const data = await readFile(filePath, "utf-8");
|
|
13
|
+
return JSON.parse(data);
|
|
14
|
+
} catch {
|
|
15
|
+
return [];
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
async function saveTasks(filePath, tasks) {
|
|
19
|
+
await mkdir(dirname(filePath), { recursive: true });
|
|
20
|
+
await writeFile(filePath, JSON.stringify(tasks, null, 2), "utf-8");
|
|
21
|
+
}
|
|
22
|
+
function formatTask(t) {
|
|
23
|
+
return [
|
|
24
|
+
`ID: ${t.id}`,
|
|
25
|
+
`Subject: ${t.subject}`,
|
|
26
|
+
`Description: ${t.description}`,
|
|
27
|
+
`Status: ${t.status}`,
|
|
28
|
+
`Created: ${t.createdAt}`,
|
|
29
|
+
`Updated: ${t.updatedAt}`
|
|
30
|
+
].join("\n");
|
|
31
|
+
}
|
|
32
|
+
function createTask(config = {}) {
|
|
33
|
+
const cwd = config.cwd ?? process.cwd();
|
|
34
|
+
const tasksFile = config.tasksFile ?? join(cwd, ".agentool", "tasks.json");
|
|
35
|
+
return tool({
|
|
36
|
+
description: "JSON file-based task tracker. Use this to create, get, update, list, and delete tasks. Each task has an id, subject, description, status, and timestamps.",
|
|
37
|
+
inputSchema: z.object({
|
|
38
|
+
action: z.enum(["create", "get", "update", "list", "delete"]).describe(
|
|
39
|
+
"The operation to perform"
|
|
40
|
+
),
|
|
41
|
+
id: z.string().optional().describe(
|
|
42
|
+
"Task id (required for get, update, delete)"
|
|
43
|
+
),
|
|
44
|
+
subject: z.string().optional().describe(
|
|
45
|
+
"Task subject (required for create, optional for update)"
|
|
46
|
+
),
|
|
47
|
+
description: z.string().optional().describe(
|
|
48
|
+
"Task description (required for create, optional for update)"
|
|
49
|
+
),
|
|
50
|
+
status: z.enum(["pending", "in_progress", "completed"]).optional().describe(
|
|
51
|
+
"Task status (optional for create/update, defaults to pending)"
|
|
52
|
+
)
|
|
53
|
+
}),
|
|
54
|
+
execute: async ({ action, id, subject, description, status }) => {
|
|
55
|
+
try {
|
|
56
|
+
switch (action) {
|
|
57
|
+
case "create":
|
|
58
|
+
return await createEntry(tasksFile, subject, description, status);
|
|
59
|
+
case "list":
|
|
60
|
+
return await listEntries(tasksFile);
|
|
61
|
+
case "get":
|
|
62
|
+
return await getEntry(tasksFile, id);
|
|
63
|
+
case "update":
|
|
64
|
+
return await updateEntry(tasksFile, id, subject, description, status);
|
|
65
|
+
case "delete":
|
|
66
|
+
return await deleteEntry(tasksFile, id);
|
|
67
|
+
default:
|
|
68
|
+
return `Error [task]: Unknown action "${String(action)}".`;
|
|
69
|
+
}
|
|
70
|
+
} catch (error) {
|
|
71
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
72
|
+
return `Error [task]: ${msg}`;
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
});
|
|
76
|
+
}
|
|
77
|
+
async function createEntry(filePath, subject, description, status) {
|
|
78
|
+
if (!subject) return "Error [task]: Subject is required for create action.";
|
|
79
|
+
if (!description) return "Error [task]: Description is required for create action.";
|
|
80
|
+
const tasks = await loadTasks(filePath);
|
|
81
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
82
|
+
const entry = {
|
|
83
|
+
id: generateId(),
|
|
84
|
+
subject,
|
|
85
|
+
description,
|
|
86
|
+
status: status ?? "pending",
|
|
87
|
+
createdAt: now,
|
|
88
|
+
updatedAt: now
|
|
89
|
+
};
|
|
90
|
+
tasks.push(entry);
|
|
91
|
+
await saveTasks(filePath, tasks);
|
|
92
|
+
return `Created task ${entry.id}.
|
|
93
|
+
${formatTask(entry)}`;
|
|
94
|
+
}
|
|
95
|
+
async function listEntries(filePath) {
|
|
96
|
+
const tasks = await loadTasks(filePath);
|
|
97
|
+
if (tasks.length === 0) return "No tasks found.";
|
|
98
|
+
return tasks.map(formatTask).join("\n---\n");
|
|
99
|
+
}
|
|
100
|
+
async function getEntry(filePath, id) {
|
|
101
|
+
if (!id) return "Error [task]: ID is required for get action.";
|
|
102
|
+
const tasks = await loadTasks(filePath);
|
|
103
|
+
const found = tasks.find((t) => t.id === id);
|
|
104
|
+
if (!found) return `Error [task]: Task "${id}" not found.`;
|
|
105
|
+
return formatTask(found);
|
|
106
|
+
}
|
|
107
|
+
async function updateEntry(filePath, id, subject, description, status) {
|
|
108
|
+
if (!id) return "Error [task]: ID is required for update action.";
|
|
109
|
+
const tasks = await loadTasks(filePath);
|
|
110
|
+
const idx = tasks.findIndex((t) => t.id === id);
|
|
111
|
+
if (idx === -1) return `Error [task]: Task "${id}" not found.`;
|
|
112
|
+
const entry = tasks[idx];
|
|
113
|
+
if (subject) entry.subject = subject;
|
|
114
|
+
if (description) entry.description = description;
|
|
115
|
+
if (status) entry.status = status;
|
|
116
|
+
entry.updatedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
117
|
+
tasks[idx] = entry;
|
|
118
|
+
await saveTasks(filePath, tasks);
|
|
119
|
+
return `Updated task ${id}.
|
|
120
|
+
${formatTask(entry)}`;
|
|
121
|
+
}
|
|
122
|
+
async function deleteEntry(filePath, id) {
|
|
123
|
+
if (!id) return "Error [task]: ID is required for delete action.";
|
|
124
|
+
const tasks = await loadTasks(filePath);
|
|
125
|
+
const idx = tasks.findIndex((t) => t.id === id);
|
|
126
|
+
if (idx === -1) return `Error [task]: Task "${id}" not found.`;
|
|
127
|
+
tasks.splice(idx, 1);
|
|
128
|
+
await saveTasks(filePath, tasks);
|
|
129
|
+
return `Deleted task "${id}".`;
|
|
130
|
+
}
|
|
131
|
+
var task = createTask();
|
|
132
|
+
|
|
133
|
+
export {
|
|
134
|
+
createTask,
|
|
135
|
+
task
|
|
136
|
+
};
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } }
|
|
2
|
+
|
|
3
|
+
var _chunkQZ5GS6HWcjs = require('./chunk-QZ5GS6HW.cjs');
|
|
4
|
+
|
|
5
|
+
// src/memory/index.ts
|
|
6
|
+
var _ai = require('ai');
|
|
7
|
+
var _zod = require('zod');
|
|
8
|
+
var _promises = require('fs/promises');
|
|
9
|
+
var _path = require('path');
|
|
10
|
+
function sanitizeKey(key) {
|
|
11
|
+
if (!key || key.trim() === "") {
|
|
12
|
+
return { err: "Error [memory]: Key must not be empty." };
|
|
13
|
+
}
|
|
14
|
+
if (_chunkQZ5GS6HWcjs.containsPathTraversal.call(void 0, key)) {
|
|
15
|
+
return { err: "Error [memory]: Key contains path traversal and was rejected." };
|
|
16
|
+
}
|
|
17
|
+
const cleaned = key.replace(/^\.+/, "");
|
|
18
|
+
if (cleaned === "") {
|
|
19
|
+
return { err: "Error [memory]: Key must not be empty after stripping leading dots." };
|
|
20
|
+
}
|
|
21
|
+
return { ok: cleaned };
|
|
22
|
+
}
|
|
23
|
+
function createMemory(config = {}) {
|
|
24
|
+
const cwd = _nullishCoalesce(config.cwd, () => ( process.cwd()));
|
|
25
|
+
const memoryDir = _nullishCoalesce(config.memoryDir, () => ( _path.join.call(void 0, cwd, ".agentool", "memory")));
|
|
26
|
+
return _ai.tool.call(void 0, {
|
|
27
|
+
description: "File-based key-value memory store. Use this to persist notes, context, or any text data across conversations. Supports write, read, list, and delete operations.",
|
|
28
|
+
inputSchema: _zod.z.object({
|
|
29
|
+
action: _zod.z.enum(["read", "write", "list", "delete"]).describe(
|
|
30
|
+
"The operation to perform: read, write, list, or delete"
|
|
31
|
+
),
|
|
32
|
+
key: _zod.z.string().optional().describe(
|
|
33
|
+
"The memory key (required for read, write, delete)"
|
|
34
|
+
),
|
|
35
|
+
content: _zod.z.string().optional().describe(
|
|
36
|
+
"The content to store (required for write)"
|
|
37
|
+
)
|
|
38
|
+
}),
|
|
39
|
+
execute: async ({ action, key, content }) => {
|
|
40
|
+
try {
|
|
41
|
+
if (action === "list") {
|
|
42
|
+
return await listKeys(memoryDir);
|
|
43
|
+
}
|
|
44
|
+
const result = sanitizeKey(key);
|
|
45
|
+
if ("err" in result) return result.err;
|
|
46
|
+
const safeKey = result.ok;
|
|
47
|
+
switch (action) {
|
|
48
|
+
case "write":
|
|
49
|
+
return await writeEntry(memoryDir, safeKey, content);
|
|
50
|
+
case "read":
|
|
51
|
+
return await readEntry(memoryDir, safeKey);
|
|
52
|
+
case "delete":
|
|
53
|
+
return await deleteEntry(memoryDir, safeKey);
|
|
54
|
+
default:
|
|
55
|
+
return `Error [memory]: Unknown action "${String(action)}".`;
|
|
56
|
+
}
|
|
57
|
+
} catch (error) {
|
|
58
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
59
|
+
return `Error [memory]: ${msg}`;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
});
|
|
63
|
+
}
|
|
64
|
+
async function writeEntry(dir, key, content) {
|
|
65
|
+
if (!content && content !== "") {
|
|
66
|
+
return "Error [memory]: Content is required for write action.";
|
|
67
|
+
}
|
|
68
|
+
await _promises.mkdir.call(void 0, dir, { recursive: true });
|
|
69
|
+
await _promises.writeFile.call(void 0, _path.join.call(void 0, dir, `${key}.md`), content, "utf-8");
|
|
70
|
+
return `Saved memory "${key}".`;
|
|
71
|
+
}
|
|
72
|
+
async function readEntry(dir, key) {
|
|
73
|
+
try {
|
|
74
|
+
return await _promises.readFile.call(void 0, _path.join.call(void 0, dir, `${key}.md`), "utf-8");
|
|
75
|
+
} catch (e) {
|
|
76
|
+
return `Error [memory]: Key "${key}" not found.`;
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
async function listKeys(dir) {
|
|
80
|
+
try {
|
|
81
|
+
const files = await _promises.readdir.call(void 0, dir);
|
|
82
|
+
const keys = files.filter((f) => f.endsWith(".md")).map((f) => f.slice(0, -3));
|
|
83
|
+
if (keys.length === 0) return "No memory entries found.";
|
|
84
|
+
return keys.join("\n");
|
|
85
|
+
} catch (e2) {
|
|
86
|
+
return "No memory entries found.";
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
async function deleteEntry(dir, key) {
|
|
90
|
+
try {
|
|
91
|
+
await _promises.unlink.call(void 0, _path.join.call(void 0, dir, `${key}.md`));
|
|
92
|
+
return `Deleted memory "${key}".`;
|
|
93
|
+
} catch (e3) {
|
|
94
|
+
return `Error [memory]: Key "${key}" not found.`;
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
var memory = createMemory();
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
exports.createMemory = createMemory; exports.memory = memory;
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } }// src/context-compaction/index.ts
|
|
2
|
+
var _ai = require('ai');
|
|
3
|
+
var _zod = require('zod');
|
|
4
|
+
var parametersSchema = _zod.z.object({
|
|
5
|
+
messages: _zod.z.array(
|
|
6
|
+
_zod.z.object({
|
|
7
|
+
role: _zod.z.string().describe("Message role (system, user, assistant)"),
|
|
8
|
+
content: _zod.z.string().describe("Message content")
|
|
9
|
+
})
|
|
10
|
+
).describe("The conversation messages to compact"),
|
|
11
|
+
maxTokens: _zod.z.number().optional().describe(
|
|
12
|
+
"Target maximum tokens (default: config.maxTokens or 4096)"
|
|
13
|
+
)
|
|
14
|
+
});
|
|
15
|
+
function createContextCompaction(config = {}) {
|
|
16
|
+
return _ai.tool.call(void 0, {
|
|
17
|
+
description: "Compact conversation history by summarizing older messages to reduce context size. Requires a summarize function in config. Returns compacted messages where total chars < maxTokens * 4.",
|
|
18
|
+
inputSchema: _ai.zodSchema.call(void 0, parametersSchema),
|
|
19
|
+
execute: async ({
|
|
20
|
+
messages,
|
|
21
|
+
maxTokens: inputMaxTokens
|
|
22
|
+
}) => {
|
|
23
|
+
const maxTokens = _nullishCoalesce(_nullishCoalesce(inputMaxTokens, () => ( config.maxTokens)), () => ( 4096));
|
|
24
|
+
const charBudget = maxTokens * 4;
|
|
25
|
+
const totalChars = messages.reduce(
|
|
26
|
+
(sum, m) => sum + m.content.length,
|
|
27
|
+
0
|
|
28
|
+
);
|
|
29
|
+
if (totalChars <= charBudget) {
|
|
30
|
+
return JSON.stringify({
|
|
31
|
+
compacted: false,
|
|
32
|
+
messages,
|
|
33
|
+
reason: "Already within token budget"
|
|
34
|
+
});
|
|
35
|
+
}
|
|
36
|
+
if (!config.summarize) {
|
|
37
|
+
return "Error [context-compaction]: No summarize function configured. Provide a summarize callback in the tool config.";
|
|
38
|
+
}
|
|
39
|
+
try {
|
|
40
|
+
const summary = await config.summarize(messages);
|
|
41
|
+
const compactedMessages = [{ role: "system", content: summary }];
|
|
42
|
+
return JSON.stringify({
|
|
43
|
+
compacted: true,
|
|
44
|
+
messages: compactedMessages,
|
|
45
|
+
originalCount: messages.length
|
|
46
|
+
});
|
|
47
|
+
} catch (error) {
|
|
48
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
49
|
+
return `Error [context-compaction]: Summarization failed: ${msg}`;
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
});
|
|
53
|
+
}
|
|
54
|
+
var contextCompaction = createContextCompaction();
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
exports.createContextCompaction = createContextCompaction; exports.contextCompaction = contextCompaction;
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
// src/shared/edit-helpers.ts
|
|
2
|
+
var LEFT_SINGLE_CURLY_QUOTE = "\u2018";
|
|
3
|
+
var RIGHT_SINGLE_CURLY_QUOTE = "\u2019";
|
|
4
|
+
var LEFT_DOUBLE_CURLY_QUOTE = "\u201C";
|
|
5
|
+
var RIGHT_DOUBLE_CURLY_QUOTE = "\u201D";
|
|
6
|
+
function normalizeQuotes(str) {
|
|
7
|
+
return str.replaceAll(LEFT_SINGLE_CURLY_QUOTE, "'").replaceAll(RIGHT_SINGLE_CURLY_QUOTE, "'").replaceAll(LEFT_DOUBLE_CURLY_QUOTE, '"').replaceAll(RIGHT_DOUBLE_CURLY_QUOTE, '"');
|
|
8
|
+
}
|
|
9
|
+
function findActualString(fileContent, searchString) {
|
|
10
|
+
if (fileContent.includes(searchString)) {
|
|
11
|
+
return searchString;
|
|
12
|
+
}
|
|
13
|
+
const normalizedSearch = normalizeQuotes(searchString);
|
|
14
|
+
const normalizedFile = normalizeQuotes(fileContent);
|
|
15
|
+
const searchIndex = normalizedFile.indexOf(normalizedSearch);
|
|
16
|
+
if (searchIndex !== -1) {
|
|
17
|
+
return fileContent.substring(
|
|
18
|
+
searchIndex,
|
|
19
|
+
searchIndex + searchString.length
|
|
20
|
+
);
|
|
21
|
+
}
|
|
22
|
+
return null;
|
|
23
|
+
}
|
|
24
|
+
function preserveQuoteStyle(oldString, actualOldString, newString) {
|
|
25
|
+
if (oldString === actualOldString) {
|
|
26
|
+
return newString;
|
|
27
|
+
}
|
|
28
|
+
const hasDoubleQuotes = actualOldString.includes(LEFT_DOUBLE_CURLY_QUOTE) || actualOldString.includes(RIGHT_DOUBLE_CURLY_QUOTE);
|
|
29
|
+
const hasSingleQuotes = actualOldString.includes(LEFT_SINGLE_CURLY_QUOTE) || actualOldString.includes(RIGHT_SINGLE_CURLY_QUOTE);
|
|
30
|
+
if (!hasDoubleQuotes && !hasSingleQuotes) {
|
|
31
|
+
return newString;
|
|
32
|
+
}
|
|
33
|
+
let result = newString;
|
|
34
|
+
if (hasDoubleQuotes) {
|
|
35
|
+
result = applyCurlyDoubleQuotes(result);
|
|
36
|
+
}
|
|
37
|
+
if (hasSingleQuotes) {
|
|
38
|
+
result = applyCurlySingleQuotes(result);
|
|
39
|
+
}
|
|
40
|
+
return result;
|
|
41
|
+
}
|
|
42
|
+
function applyEditToFile(originalContent, oldString, newString, replaceAll = false) {
|
|
43
|
+
const f = replaceAll ? (content, search, replace) => content.replaceAll(search, () => replace) : (content, search, replace) => content.replace(search, () => replace);
|
|
44
|
+
if (newString !== "") {
|
|
45
|
+
return f(originalContent, oldString, newString);
|
|
46
|
+
}
|
|
47
|
+
const stripTrailingNewline = !oldString.endsWith("\n") && originalContent.includes(oldString + "\n");
|
|
48
|
+
return stripTrailingNewline ? f(originalContent, oldString + "\n", newString) : f(originalContent, oldString, newString);
|
|
49
|
+
}
|
|
50
|
+
function isOpeningContext(chars, index) {
|
|
51
|
+
if (index === 0) {
|
|
52
|
+
return true;
|
|
53
|
+
}
|
|
54
|
+
const prev = chars[index - 1];
|
|
55
|
+
return prev === " " || prev === " " || prev === "\n" || prev === "\r" || prev === "(" || prev === "[" || prev === "{" || prev === "\u2014" || // em dash
|
|
56
|
+
prev === "\u2013";
|
|
57
|
+
}
|
|
58
|
+
function applyCurlyDoubleQuotes(str) {
|
|
59
|
+
const chars = [...str];
|
|
60
|
+
const result = [];
|
|
61
|
+
for (let i = 0; i < chars.length; i++) {
|
|
62
|
+
if (chars[i] === '"') {
|
|
63
|
+
result.push(
|
|
64
|
+
isOpeningContext(chars, i) ? LEFT_DOUBLE_CURLY_QUOTE : RIGHT_DOUBLE_CURLY_QUOTE
|
|
65
|
+
);
|
|
66
|
+
} else {
|
|
67
|
+
result.push(chars[i]);
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
return result.join("");
|
|
71
|
+
}
|
|
72
|
+
function applyCurlySingleQuotes(str) {
|
|
73
|
+
const chars = [...str];
|
|
74
|
+
const result = [];
|
|
75
|
+
for (let i = 0; i < chars.length; i++) {
|
|
76
|
+
if (chars[i] === "'") {
|
|
77
|
+
const prev = i > 0 ? chars[i - 1] : void 0;
|
|
78
|
+
const next = i < chars.length - 1 ? chars[i + 1] : void 0;
|
|
79
|
+
const prevIsLetter = prev !== void 0 && new RegExp("\\p{L}", "u").test(prev);
|
|
80
|
+
const nextIsLetter = next !== void 0 && new RegExp("\\p{L}", "u").test(next);
|
|
81
|
+
if (prevIsLetter && nextIsLetter) {
|
|
82
|
+
result.push(RIGHT_SINGLE_CURLY_QUOTE);
|
|
83
|
+
} else {
|
|
84
|
+
result.push(
|
|
85
|
+
isOpeningContext(chars, i) ? LEFT_SINGLE_CURLY_QUOTE : RIGHT_SINGLE_CURLY_QUOTE
|
|
86
|
+
);
|
|
87
|
+
}
|
|
88
|
+
} else {
|
|
89
|
+
result.push(chars[i]);
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
return result.join("");
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
export {
|
|
96
|
+
findActualString,
|
|
97
|
+
preserveQuoteStyle,
|
|
98
|
+
applyEditToFile
|
|
99
|
+
};
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
// src/ask-user/index.ts
|
|
2
|
+
import { tool } from "ai";
|
|
3
|
+
import { z } from "zod";
|
|
4
|
+
function createAskUser(config = {}) {
|
|
5
|
+
return tool({
|
|
6
|
+
description: "Ask the user a question and wait for their response. Use this when you need clarification, confirmation, or additional information from the user before proceeding.",
|
|
7
|
+
inputSchema: z.object({
|
|
8
|
+
question: z.string().describe("The question to ask the user"),
|
|
9
|
+
options: z.array(z.string()).optional().describe("Optional list of suggested response options")
|
|
10
|
+
}),
|
|
11
|
+
execute: async ({ question, options }) => {
|
|
12
|
+
if (!config.onQuestion) {
|
|
13
|
+
return "Error [ask-user]: No onQuestion callback configured. Provide an onQuestion function in the tool config to enable user interaction.";
|
|
14
|
+
}
|
|
15
|
+
try {
|
|
16
|
+
const response = await config.onQuestion(question, options);
|
|
17
|
+
return response;
|
|
18
|
+
} catch (error) {
|
|
19
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
20
|
+
return `Error [ask-user]: Failed to get user response: ${msg}`;
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
});
|
|
24
|
+
}
|
|
25
|
+
var askUser = createAskUser();
|
|
26
|
+
|
|
27
|
+
export {
|
|
28
|
+
createAskUser,
|
|
29
|
+
askUser
|
|
30
|
+
};
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
import {
|
|
2
|
+
executeRipgrep
|
|
3
|
+
} from "./chunk-MJCAXASI.js";
|
|
4
|
+
import {
|
|
5
|
+
expandPath
|
|
6
|
+
} from "./chunk-I3ONDY7P.js";
|
|
7
|
+
|
|
8
|
+
// src/glob/index.ts
|
|
9
|
+
import { tool } from "ai";
|
|
10
|
+
import { z } from "zod";
|
|
11
|
+
|
|
12
|
+
// src/shared/glob.ts
|
|
13
|
+
import { isAbsolute, join, basename, dirname, sep } from "path";
|
|
14
|
+
function extractGlobBaseDirectory(pattern) {
|
|
15
|
+
const globChars = /[*?[{]/;
|
|
16
|
+
const match = pattern.match(globChars);
|
|
17
|
+
if (!match || match.index === void 0) {
|
|
18
|
+
const dir = dirname(pattern);
|
|
19
|
+
const file = basename(pattern);
|
|
20
|
+
return { baseDir: dir, relativePattern: file };
|
|
21
|
+
}
|
|
22
|
+
const staticPrefix = pattern.slice(0, match.index);
|
|
23
|
+
const lastSepIndex = Math.max(
|
|
24
|
+
staticPrefix.lastIndexOf("/"),
|
|
25
|
+
staticPrefix.lastIndexOf(sep)
|
|
26
|
+
);
|
|
27
|
+
if (lastSepIndex === -1) {
|
|
28
|
+
return { baseDir: "", relativePattern: pattern };
|
|
29
|
+
}
|
|
30
|
+
let baseDir = staticPrefix.slice(0, lastSepIndex);
|
|
31
|
+
const relativePattern = pattern.slice(lastSepIndex + 1);
|
|
32
|
+
if (baseDir === "" && lastSepIndex === 0) {
|
|
33
|
+
baseDir = "/";
|
|
34
|
+
}
|
|
35
|
+
return { baseDir, relativePattern };
|
|
36
|
+
}
|
|
37
|
+
async function glob(pattern, cwd, options) {
|
|
38
|
+
const limit = options?.limit ?? 100;
|
|
39
|
+
const offset = options?.offset ?? 0;
|
|
40
|
+
let searchDir = cwd;
|
|
41
|
+
let searchPattern = pattern;
|
|
42
|
+
if (isAbsolute(pattern)) {
|
|
43
|
+
const { baseDir, relativePattern } = extractGlobBaseDirectory(pattern);
|
|
44
|
+
if (baseDir) {
|
|
45
|
+
searchDir = baseDir;
|
|
46
|
+
searchPattern = relativePattern;
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
const args = [
|
|
50
|
+
"--files",
|
|
51
|
+
"--glob",
|
|
52
|
+
searchPattern,
|
|
53
|
+
"--sort=modified",
|
|
54
|
+
"--hidden",
|
|
55
|
+
"--no-ignore"
|
|
56
|
+
];
|
|
57
|
+
const allPaths = await executeRipgrep(args, searchDir, {
|
|
58
|
+
signal: options?.signal
|
|
59
|
+
});
|
|
60
|
+
const absolutePaths = allPaths.map(
|
|
61
|
+
(p) => isAbsolute(p) ? p : join(searchDir, p)
|
|
62
|
+
);
|
|
63
|
+
const truncated = absolutePaths.length > offset + limit;
|
|
64
|
+
const files = absolutePaths.slice(offset, offset + limit);
|
|
65
|
+
return { files, truncated };
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
// src/glob/index.ts
|
|
69
|
+
function createGlob(config = {}) {
|
|
70
|
+
const cwd = config.cwd ?? process.cwd();
|
|
71
|
+
return tool({
|
|
72
|
+
description: 'Find files matching a glob pattern. Returns absolute file paths sorted by modification time (newest first). Supports patterns like "**/*.ts", "src/**/*.js", or "*.json".',
|
|
73
|
+
inputSchema: z.object({
|
|
74
|
+
pattern: z.string().describe("Glob pattern to match files against"),
|
|
75
|
+
path: z.string().optional().describe("Directory to search in. Defaults to the working directory.")
|
|
76
|
+
}),
|
|
77
|
+
execute: async ({ pattern, path }) => {
|
|
78
|
+
try {
|
|
79
|
+
const searchDir = path ? expandPath(path, cwd) : cwd;
|
|
80
|
+
const { files, truncated } = await glob(pattern, searchDir);
|
|
81
|
+
if (files.length === 0) {
|
|
82
|
+
return "No files found";
|
|
83
|
+
}
|
|
84
|
+
const header = truncated ? `Found ${files.length}+ files (results truncated)` : `Found ${files.length} files`;
|
|
85
|
+
return `${header}
|
|
86
|
+
${files.join("\n")}`;
|
|
87
|
+
} catch (error) {
|
|
88
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
89
|
+
return `Error [glob]: Failed to search for files: ${message}`;
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
});
|
|
93
|
+
}
|
|
94
|
+
var glob2 = createGlob();
|
|
95
|
+
|
|
96
|
+
export {
|
|
97
|
+
createGlob,
|
|
98
|
+
glob2 as glob
|
|
99
|
+
};
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
// src/shared/path.ts
|
|
2
|
+
import { homedir } from "os";
|
|
3
|
+
import { isAbsolute, join, normalize, relative, resolve } from "path";
|
|
4
|
+
function expandPath(inputPath, baseDir) {
|
|
5
|
+
const actualBaseDir = baseDir ?? process.cwd();
|
|
6
|
+
if (typeof inputPath !== "string") {
|
|
7
|
+
throw new TypeError(
|
|
8
|
+
`Path must be a string, received ${typeof inputPath}`
|
|
9
|
+
);
|
|
10
|
+
}
|
|
11
|
+
if (typeof actualBaseDir !== "string") {
|
|
12
|
+
throw new TypeError(
|
|
13
|
+
`Base directory must be a string, received ${typeof actualBaseDir}`
|
|
14
|
+
);
|
|
15
|
+
}
|
|
16
|
+
if (inputPath.includes("\0") || actualBaseDir.includes("\0")) {
|
|
17
|
+
throw new Error("Path contains null bytes");
|
|
18
|
+
}
|
|
19
|
+
const trimmed = inputPath.trim();
|
|
20
|
+
if (!trimmed) {
|
|
21
|
+
return normalize(actualBaseDir);
|
|
22
|
+
}
|
|
23
|
+
if (trimmed === "~") {
|
|
24
|
+
return homedir();
|
|
25
|
+
}
|
|
26
|
+
if (trimmed.startsWith("~/")) {
|
|
27
|
+
return join(homedir(), trimmed.slice(2));
|
|
28
|
+
}
|
|
29
|
+
if (isAbsolute(trimmed)) {
|
|
30
|
+
return normalize(trimmed);
|
|
31
|
+
}
|
|
32
|
+
return resolve(actualBaseDir, trimmed);
|
|
33
|
+
}
|
|
34
|
+
function toRelativePath(absolutePath, baseDir) {
|
|
35
|
+
const rel = relative(baseDir ?? process.cwd(), absolutePath);
|
|
36
|
+
return rel.startsWith("..") ? absolutePath : rel;
|
|
37
|
+
}
|
|
38
|
+
function containsPathTraversal(inputPath) {
|
|
39
|
+
return /(?:^|[\\/])\.\.(?:[\\/]|$)/.test(inputPath);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
export {
|
|
43
|
+
expandPath,
|
|
44
|
+
toRelativePath,
|
|
45
|
+
containsPathTraversal
|
|
46
|
+
};
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } }// src/http-request/index.ts
|
|
2
|
+
var _ai = require('ai');
|
|
3
|
+
var _zod = require('zod');
|
|
4
|
+
function createHttpRequest(config = {}) {
|
|
5
|
+
return _ai.tool.call(void 0, {
|
|
6
|
+
description: "Make an HTTP request to a URL. Returns the response status, headers, and body. Use this for API interactions, webhook calls, and service health checks. Unlike web-fetch, this returns raw response data without markdown conversion.",
|
|
7
|
+
inputSchema: _zod.z.object({
|
|
8
|
+
method: _zod.z.enum(["GET", "POST", "PUT", "PATCH", "DELETE", "HEAD"]).describe("HTTP method"),
|
|
9
|
+
url: _zod.z.string().describe("The URL to send the request to"),
|
|
10
|
+
headers: _zod.z.record(_zod.z.string()).optional().describe("Request headers as key-value pairs"),
|
|
11
|
+
body: _zod.z.string().optional().describe("Request body (for POST, PUT, PATCH)"),
|
|
12
|
+
timeout: _zod.z.number().optional().describe("Request timeout in milliseconds (default: 30000)")
|
|
13
|
+
}),
|
|
14
|
+
execute: async ({ method, url, headers, body, timeout }) => {
|
|
15
|
+
try {
|
|
16
|
+
const timeoutMs = _nullishCoalesce(_nullishCoalesce(timeout, () => ( config.timeout)), () => ( 3e4));
|
|
17
|
+
const controller = new AbortController();
|
|
18
|
+
const timeoutId = setTimeout(() => controller.abort(), timeoutMs);
|
|
19
|
+
const mergedHeaders = { ...config.defaultHeaders, ...headers };
|
|
20
|
+
const response = await fetch(url, {
|
|
21
|
+
method,
|
|
22
|
+
headers: mergedHeaders,
|
|
23
|
+
body: ["POST", "PUT", "PATCH"].includes(method) ? body : void 0,
|
|
24
|
+
signal: controller.signal
|
|
25
|
+
});
|
|
26
|
+
clearTimeout(timeoutId);
|
|
27
|
+
const responseBody = await response.text();
|
|
28
|
+
const responseHeaders = {};
|
|
29
|
+
response.headers.forEach((value, key) => {
|
|
30
|
+
responseHeaders[key] = value;
|
|
31
|
+
});
|
|
32
|
+
return JSON.stringify(
|
|
33
|
+
{
|
|
34
|
+
status: response.status,
|
|
35
|
+
statusText: response.statusText,
|
|
36
|
+
headers: responseHeaders,
|
|
37
|
+
body: responseBody
|
|
38
|
+
},
|
|
39
|
+
null,
|
|
40
|
+
2
|
|
41
|
+
);
|
|
42
|
+
} catch (error) {
|
|
43
|
+
const effectiveTimeout = _nullishCoalesce(_nullishCoalesce(timeout, () => ( config.timeout)), () => ( 3e4));
|
|
44
|
+
if (error instanceof Error && error.name === "AbortError") {
|
|
45
|
+
return `Error [http-request]: Request timed out after ${effectiveTimeout}ms for ${method} ${url}. Try increasing the timeout or verify the server is responsive.`;
|
|
46
|
+
}
|
|
47
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
48
|
+
return `Error [http-request]: ${method} ${url} failed: ${msg}. Verify the URL is correct and the server is reachable.`;
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
});
|
|
52
|
+
}
|
|
53
|
+
var httpRequest = createHttpRequest();
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
exports.createHttpRequest = createHttpRequest; exports.httpRequest = httpRequest;
|