@morphllm/morphsdk 0.2.57 → 0.2.59
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/anthropic-CaFUHxBW.d.ts +89 -0
- package/dist/{chunk-6X5UOY7B.js → chunk-2CASO3ZO.js} +46 -79
- package/dist/chunk-2CASO3ZO.js.map +1 -0
- package/dist/chunk-374N3GIA.js +118 -0
- package/dist/chunk-374N3GIA.js.map +1 -0
- package/dist/chunk-3IQIT6MC.js +65 -0
- package/dist/chunk-3IQIT6MC.js.map +1 -0
- package/dist/chunk-4VGOBA2J.js +57 -0
- package/dist/chunk-4VGOBA2J.js.map +1 -0
- package/dist/chunk-527P5X2E.js +98 -0
- package/dist/chunk-527P5X2E.js.map +1 -0
- package/dist/chunk-6N6ZYZYD.js +74 -0
- package/dist/chunk-6N6ZYZYD.js.map +1 -0
- package/dist/chunk-6Y5JB4JC.js +195 -0
- package/dist/chunk-6Y5JB4JC.js.map +1 -0
- package/dist/{chunk-TICMYDII.js → chunk-APP75CBN.js} +33 -16
- package/dist/chunk-APP75CBN.js.map +1 -0
- package/dist/{chunk-QFIHUCTF.js → chunk-FN4EP3WY.js} +19 -19
- package/dist/chunk-FN4EP3WY.js.map +1 -0
- package/dist/chunk-ILJ3J5IA.js +72 -0
- package/dist/chunk-ILJ3J5IA.js.map +1 -0
- package/dist/chunk-ISWL67SF.js +1 -0
- package/dist/chunk-KW7OEGZK.js +9 -0
- package/dist/chunk-KW7OEGZK.js.map +1 -0
- package/dist/chunk-Q5AHGIQO.js +205 -0
- package/dist/chunk-Q5AHGIQO.js.map +1 -0
- package/dist/{chunk-OXHGFHEU.js → chunk-VJU3BRET.js} +3 -3
- package/dist/chunk-VJU3BRET.js.map +1 -0
- package/dist/{chunk-TJIUA27P.js → chunk-XT5ZO6ES.js} +9 -5
- package/dist/chunk-XT5ZO6ES.js.map +1 -0
- package/dist/{chunk-LVPVVLTI.js → chunk-YV75OQTE.js} +105 -17
- package/dist/chunk-YV75OQTE.js.map +1 -0
- package/dist/{chunk-ZJIIICRA.js → chunk-ZO4PPFCZ.js} +60 -29
- package/dist/chunk-ZO4PPFCZ.js.map +1 -0
- package/dist/{client-CFoR--IU.d.ts → client-CextMMm9.d.ts} +10 -15
- package/dist/client.cjs +689 -343
- package/dist/client.cjs.map +1 -1
- package/dist/client.d.ts +3 -2
- package/dist/client.js +15 -15
- package/dist/finish-kXAcUJyB.d.ts +33 -0
- package/dist/gemini-CE80Pbdy.d.ts +117 -0
- package/dist/git/client.cjs +2 -2
- package/dist/git/client.cjs.map +1 -1
- package/dist/git/client.d.ts +1 -1
- package/dist/git/client.js +1 -1
- package/dist/git/index.cjs +2 -2
- package/dist/git/index.cjs.map +1 -1
- package/dist/git/index.js +1 -1
- package/dist/git/types.cjs.map +1 -1
- package/dist/git/types.d.ts +1 -1
- package/dist/index.cjs +702 -343
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +4 -3
- package/dist/index.js +17 -16
- package/dist/openai-Fvpqln7F.d.ts +89 -0
- package/dist/tools/warp_grep/agent/config.cjs +8 -4
- package/dist/tools/warp_grep/agent/config.cjs.map +1 -1
- package/dist/tools/warp_grep/agent/config.d.ts +7 -2
- package/dist/tools/warp_grep/agent/config.js +1 -1
- package/dist/tools/warp_grep/agent/formatter.cjs +32 -15
- package/dist/tools/warp_grep/agent/formatter.cjs.map +1 -1
- package/dist/tools/warp_grep/agent/formatter.d.ts +1 -1
- package/dist/tools/warp_grep/agent/formatter.js +1 -1
- package/dist/tools/warp_grep/agent/parser.cjs +104 -17
- package/dist/tools/warp_grep/agent/parser.cjs.map +1 -1
- package/dist/tools/warp_grep/agent/parser.d.ts +3 -5
- package/dist/tools/warp_grep/agent/parser.js +1 -3
- package/dist/tools/warp_grep/agent/prompt.cjs +132 -56
- package/dist/tools/warp_grep/agent/prompt.cjs.map +1 -1
- package/dist/tools/warp_grep/agent/prompt.d.ts +1 -1
- package/dist/tools/warp_grep/agent/prompt.js +1 -1
- package/dist/tools/warp_grep/agent/runner.cjs +459 -192
- package/dist/tools/warp_grep/agent/runner.cjs.map +1 -1
- package/dist/tools/warp_grep/agent/runner.d.ts +1 -0
- package/dist/tools/warp_grep/agent/runner.js +6 -8
- package/dist/tools/warp_grep/agent/types.cjs.map +1 -1
- package/dist/tools/warp_grep/agent/types.d.ts +9 -2
- package/dist/tools/warp_grep/anthropic.cjs +650 -260
- package/dist/tools/warp_grep/anthropic.cjs.map +1 -1
- package/dist/tools/warp_grep/anthropic.d.ts +4 -74
- package/dist/tools/warp_grep/anthropic.js +13 -15
- package/dist/tools/warp_grep/client.cjs +1593 -0
- package/dist/tools/warp_grep/client.cjs.map +1 -0
- package/dist/tools/warp_grep/client.d.ts +87 -0
- package/dist/tools/warp_grep/client.js +26 -0
- package/dist/tools/warp_grep/gemini.cjs +1587 -0
- package/dist/tools/warp_grep/gemini.cjs.map +1 -0
- package/dist/tools/warp_grep/gemini.d.ts +7 -0
- package/dist/tools/warp_grep/gemini.js +34 -0
- package/dist/tools/warp_grep/harness.cjs +556 -220
- package/dist/tools/warp_grep/harness.cjs.map +1 -1
- package/dist/tools/warp_grep/harness.d.ts +50 -119
- package/dist/tools/warp_grep/harness.js +33 -41
- package/dist/tools/warp_grep/harness.js.map +1 -1
- package/dist/tools/warp_grep/index.cjs +812 -346
- package/dist/tools/warp_grep/index.cjs.map +1 -1
- package/dist/tools/warp_grep/index.d.ts +11 -6
- package/dist/tools/warp_grep/index.js +43 -22
- package/dist/tools/warp_grep/openai.cjs +650 -258
- package/dist/tools/warp_grep/openai.cjs.map +1 -1
- package/dist/tools/warp_grep/openai.d.ts +4 -74
- package/dist/tools/warp_grep/openai.js +13 -13
- package/dist/tools/warp_grep/providers/local.cjs +66 -27
- package/dist/tools/warp_grep/providers/local.cjs.map +1 -1
- package/dist/tools/warp_grep/providers/local.d.ts +4 -9
- package/dist/tools/warp_grep/providers/local.js +2 -2
- package/dist/tools/warp_grep/providers/remote.cjs +211 -0
- package/dist/tools/warp_grep/providers/remote.cjs.map +1 -0
- package/dist/tools/warp_grep/providers/remote.d.ts +67 -0
- package/dist/tools/warp_grep/providers/remote.js +9 -0
- package/dist/tools/warp_grep/providers/types.cjs.map +1 -1
- package/dist/tools/warp_grep/providers/types.d.ts +7 -15
- package/dist/tools/warp_grep/vercel.cjs +662 -277
- package/dist/tools/warp_grep/vercel.cjs.map +1 -1
- package/dist/tools/warp_grep/vercel.d.ts +4 -51
- package/dist/tools/warp_grep/vercel.js +16 -14
- package/dist/types-a_hxdPI6.d.ts +144 -0
- package/dist/vercel-3yjvfmVB.d.ts +66 -0
- package/package.json +12 -2
- package/dist/chunk-6X5UOY7B.js.map +0 -1
- package/dist/chunk-73RQWOQC.js +0 -16
- package/dist/chunk-73RQWOQC.js.map +0 -1
- package/dist/chunk-7OQOOB3R.js +0 -1
- package/dist/chunk-CFF636UC.js +0 -70
- package/dist/chunk-CFF636UC.js.map +0 -1
- package/dist/chunk-EK7OQPWD.js +0 -44
- package/dist/chunk-EK7OQPWD.js.map +0 -1
- package/dist/chunk-GJ5TYNRD.js +0 -107
- package/dist/chunk-GJ5TYNRD.js.map +0 -1
- package/dist/chunk-HQO45BAJ.js +0 -14
- package/dist/chunk-HQO45BAJ.js.map +0 -1
- package/dist/chunk-IMYQOKFO.js +0 -83
- package/dist/chunk-IMYQOKFO.js.map +0 -1
- package/dist/chunk-KBQWGT5L.js +0 -77
- package/dist/chunk-KBQWGT5L.js.map +0 -1
- package/dist/chunk-LVPVVLTI.js.map +0 -1
- package/dist/chunk-OXHGFHEU.js.map +0 -1
- package/dist/chunk-QFIHUCTF.js.map +0 -1
- package/dist/chunk-TICMYDII.js.map +0 -1
- package/dist/chunk-TJIUA27P.js.map +0 -1
- package/dist/chunk-WETRQJGU.js +0 -129
- package/dist/chunk-WETRQJGU.js.map +0 -1
- package/dist/chunk-ZJIIICRA.js.map +0 -1
- package/dist/core-CpkYEi_T.d.ts +0 -158
- package/dist/tools/warp_grep/tools/analyse.cjs +0 -40
- package/dist/tools/warp_grep/tools/analyse.cjs.map +0 -1
- package/dist/tools/warp_grep/tools/analyse.d.ts +0 -10
- package/dist/tools/warp_grep/tools/analyse.js +0 -8
- package/dist/tools/warp_grep/tools/finish.cjs +0 -69
- package/dist/tools/warp_grep/tools/finish.cjs.map +0 -1
- package/dist/tools/warp_grep/tools/finish.d.ts +0 -10
- package/dist/tools/warp_grep/tools/finish.js +0 -10
- package/dist/tools/warp_grep/tools/grep.cjs +0 -38
- package/dist/tools/warp_grep/tools/grep.cjs.map +0 -1
- package/dist/tools/warp_grep/tools/grep.d.ts +0 -8
- package/dist/tools/warp_grep/tools/grep.js +0 -15
- package/dist/tools/warp_grep/tools/grep.js.map +0 -1
- package/dist/tools/warp_grep/tools/read.cjs +0 -38
- package/dist/tools/warp_grep/tools/read.cjs.map +0 -1
- package/dist/tools/warp_grep/tools/read.d.ts +0 -9
- package/dist/tools/warp_grep/tools/read.js +0 -8
- package/dist/tools/warp_grep/utils/format.cjs +0 -42
- package/dist/tools/warp_grep/utils/format.cjs.map +0 -1
- package/dist/tools/warp_grep/utils/format.d.ts +0 -4
- package/dist/tools/warp_grep/utils/format.js +0 -18
- package/dist/tools/warp_grep/utils/format.js.map +0 -1
- /package/dist/{chunk-7OQOOB3R.js.map → chunk-ISWL67SF.js.map} +0 -0
- /package/dist/tools/warp_grep/{tools/analyse.js.map → client.js.map} +0 -0
- /package/dist/tools/warp_grep/{tools/finish.js.map → gemini.js.map} +0 -0
- /package/dist/tools/warp_grep/{tools/read.js.map → providers/remote.js.map} +0 -0
|
@@ -1,13 +1,103 @@
|
|
|
1
1
|
// tools/warp_grep/agent/parser.ts
|
|
2
|
-
var
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
2
|
+
var VALID_COMMANDS = ["list_directory", "grep", "read", "finish"];
|
|
3
|
+
function isValidCommand(name) {
|
|
4
|
+
return VALID_COMMANDS.includes(name);
|
|
5
|
+
}
|
|
6
|
+
function getXmlElementText(xml, tagName) {
|
|
7
|
+
const regex = new RegExp(`<${tagName}>([\\s\\S]*?)</${tagName}>`, "i");
|
|
8
|
+
const match = xml.match(regex);
|
|
9
|
+
return match ? match[1].trim() : null;
|
|
10
|
+
}
|
|
11
|
+
function parseNestedXmlTools(text) {
|
|
12
|
+
const tools = [];
|
|
13
|
+
const toolRegex = /<([a-z_][a-z0-9_]*)>([\s\S]*?)<\/\1>/gi;
|
|
14
|
+
let match;
|
|
15
|
+
while ((match = toolRegex.exec(text)) !== null) {
|
|
16
|
+
const rawToolName = match[1].toLowerCase();
|
|
17
|
+
const content = match[2];
|
|
18
|
+
if (!isValidCommand(rawToolName)) continue;
|
|
19
|
+
const toolName = rawToolName;
|
|
20
|
+
if (toolName === "list_directory") {
|
|
21
|
+
const path = getXmlElementText(content, "path");
|
|
22
|
+
const pattern = getXmlElementText(content, "pattern");
|
|
23
|
+
if (path) {
|
|
24
|
+
tools.push({ name: "list_directory", arguments: { path, pattern } });
|
|
25
|
+
}
|
|
26
|
+
} else if (toolName === "grep") {
|
|
27
|
+
const pattern = getXmlElementText(content, "pattern");
|
|
28
|
+
const subDir = getXmlElementText(content, "sub_dir");
|
|
29
|
+
const glob = getXmlElementText(content, "glob");
|
|
30
|
+
if (pattern) {
|
|
31
|
+
tools.push({
|
|
32
|
+
name: "grep",
|
|
33
|
+
arguments: {
|
|
34
|
+
pattern,
|
|
35
|
+
path: subDir || ".",
|
|
36
|
+
...glob && { glob }
|
|
37
|
+
}
|
|
38
|
+
});
|
|
39
|
+
}
|
|
40
|
+
} else if (toolName === "read") {
|
|
41
|
+
const path = getXmlElementText(content, "path");
|
|
42
|
+
const linesStr = getXmlElementText(content, "lines");
|
|
43
|
+
if (path) {
|
|
44
|
+
const args = { path };
|
|
45
|
+
if (linesStr) {
|
|
46
|
+
const ranges = [];
|
|
47
|
+
for (const rangeStr of linesStr.split(",")) {
|
|
48
|
+
const trimmed = rangeStr.trim();
|
|
49
|
+
if (!trimmed) continue;
|
|
50
|
+
const [s, e] = trimmed.split("-").map((v) => parseInt(v.trim(), 10));
|
|
51
|
+
if (Number.isFinite(s) && Number.isFinite(e)) {
|
|
52
|
+
ranges.push([s, e]);
|
|
53
|
+
} else if (Number.isFinite(s)) {
|
|
54
|
+
ranges.push([s, s]);
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
if (ranges.length === 1) {
|
|
58
|
+
args.start = ranges[0][0];
|
|
59
|
+
args.end = ranges[0][1];
|
|
60
|
+
} else if (ranges.length > 1) {
|
|
61
|
+
args.lines = ranges;
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
tools.push({ name: "read", arguments: args });
|
|
65
|
+
}
|
|
66
|
+
} else if (toolName === "finish") {
|
|
67
|
+
const fileRegex = /<file>([\s\S]*?)<\/file>/gi;
|
|
68
|
+
const files = [];
|
|
69
|
+
let fileMatch;
|
|
70
|
+
while ((fileMatch = fileRegex.exec(content)) !== null) {
|
|
71
|
+
const fileContent = fileMatch[1];
|
|
72
|
+
const filePath = getXmlElementText(fileContent, "path");
|
|
73
|
+
const linesStr = getXmlElementText(fileContent, "lines");
|
|
74
|
+
if (filePath && linesStr) {
|
|
75
|
+
const ranges = [];
|
|
76
|
+
for (const rangeStr of linesStr.split(",")) {
|
|
77
|
+
if (rangeStr.trim() === "*") {
|
|
78
|
+
ranges.push([1, 999999]);
|
|
79
|
+
} else {
|
|
80
|
+
const [s, e] = rangeStr.split("-").map((v) => parseInt(v.trim(), 10));
|
|
81
|
+
if (Number.isFinite(s) && Number.isFinite(e)) {
|
|
82
|
+
ranges.push([s, e]);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
if (ranges.length > 0) {
|
|
87
|
+
files.push({ path: filePath, lines: ranges });
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
if (files.length > 0) {
|
|
92
|
+
tools.push({ name: "finish", arguments: { files } });
|
|
93
|
+
}
|
|
94
|
+
}
|
|
6
95
|
}
|
|
7
|
-
|
|
8
|
-
|
|
96
|
+
return tools;
|
|
97
|
+
}
|
|
9
98
|
function preprocessText(text) {
|
|
10
99
|
let processed = text.replace(/<think>[\s\S]*?<\/think>/gi, "");
|
|
100
|
+
const nestedTools = parseNestedXmlTools(processed);
|
|
11
101
|
const openingTagRegex = /<tool_call>|<tool>/gi;
|
|
12
102
|
const closingTagRegex = /<\/tool_call>|<\/tool>/gi;
|
|
13
103
|
const openingMatches = processed.match(openingTagRegex) || [];
|
|
@@ -44,7 +134,7 @@ function preprocessText(text) {
|
|
|
44
134
|
}
|
|
45
135
|
}
|
|
46
136
|
}
|
|
47
|
-
return toolCallLines;
|
|
137
|
+
return { lines: toolCallLines, nestedTools };
|
|
48
138
|
}
|
|
49
139
|
var LLMResponseParser = class {
|
|
50
140
|
finishSpecSplitRe = /,(?=[^,\s]+:)/;
|
|
@@ -52,8 +142,8 @@ var LLMResponseParser = class {
|
|
|
52
142
|
if (typeof text !== "string") {
|
|
53
143
|
throw new TypeError("Command text must be a string.");
|
|
54
144
|
}
|
|
55
|
-
const lines = preprocessText(text);
|
|
56
|
-
const commands = [];
|
|
145
|
+
const { lines, nestedTools } = preprocessText(text);
|
|
146
|
+
const commands = [...nestedTools];
|
|
57
147
|
let finishAccumulator = null;
|
|
58
148
|
lines.forEach((line) => {
|
|
59
149
|
if (!line || line.startsWith("#")) return;
|
|
@@ -61,8 +151,8 @@ var LLMResponseParser = class {
|
|
|
61
151
|
if (parts.length === 0) return;
|
|
62
152
|
const cmd = parts[0];
|
|
63
153
|
switch (cmd) {
|
|
64
|
-
case "
|
|
65
|
-
this.
|
|
154
|
+
case "list_directory":
|
|
155
|
+
this.handleListDirectory(parts, line, commands);
|
|
66
156
|
break;
|
|
67
157
|
case "grep":
|
|
68
158
|
this.handleGrep(parts, line, commands);
|
|
@@ -113,18 +203,17 @@ var LLMResponseParser = class {
|
|
|
113
203
|
skip(message) {
|
|
114
204
|
return { name: "_skip", arguments: { message } };
|
|
115
205
|
}
|
|
116
|
-
|
|
206
|
+
handleListDirectory(parts, rawLine, commands) {
|
|
117
207
|
if (parts.length < 2) {
|
|
118
208
|
commands.push(this.skip(
|
|
119
|
-
`[SKIPPED] Your command "${rawLine}" is missing a path. Correct format:
|
|
209
|
+
`[SKIPPED] Your command "${rawLine}" is missing a path. Correct format: list_directory <path> [pattern]. Example: list_directory src/`
|
|
120
210
|
));
|
|
121
211
|
return;
|
|
122
212
|
}
|
|
123
213
|
const path = parts[1];
|
|
124
214
|
const pattern = parts[2]?.replace(/^"|"$/g, "") ?? null;
|
|
125
|
-
commands.push({ name: "
|
|
215
|
+
commands.push({ name: "list_directory", arguments: { path, pattern } });
|
|
126
216
|
}
|
|
127
|
-
// no glob tool in MCP
|
|
128
217
|
handleGrep(parts, rawLine, commands) {
|
|
129
218
|
if (parts.length < 3) {
|
|
130
219
|
commands.push(this.skip(
|
|
@@ -196,7 +285,6 @@ var LLMResponseParser = class {
|
|
|
196
285
|
};
|
|
197
286
|
|
|
198
287
|
export {
|
|
199
|
-
LLMResponseParseError,
|
|
200
288
|
LLMResponseParser
|
|
201
289
|
};
|
|
202
|
-
//# sourceMappingURL=chunk-
|
|
290
|
+
//# sourceMappingURL=chunk-YV75OQTE.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../tools/warp_grep/agent/parser.ts"],"sourcesContent":["// Parses assistant lines into structured tool calls\nimport type { ToolCall } from './types.js';\n\nconst VALID_COMMANDS = ['list_directory', 'grep', 'read', 'finish'] as const;\ntype ValidCommand = typeof VALID_COMMANDS[number];\n\nfunction isValidCommand(name: string): name is ValidCommand {\n return VALID_COMMANDS.includes(name as ValidCommand);\n}\n\nfunction getXmlElementText(xml: string, tagName: string): string | null {\n const regex = new RegExp(`<${tagName}>([\\\\s\\\\S]*?)</${tagName}>`, 'i');\n const match = xml.match(regex);\n return match ? match[1].trim() : null;\n}\n\nfunction parseNestedXmlTools(text: string): ToolCall[] {\n const tools: ToolCall[] = [];\n \n // Match any XML tool tags - this allows graceful handling of unknown tools\n // that might be added in future versions (they are simply ignored)\n const toolRegex = /<([a-z_][a-z0-9_]*)>([\\s\\S]*?)<\\/\\1>/gi;\n let match;\n \n while ((match = toolRegex.exec(text)) !== null) {\n const rawToolName = match[1].toLowerCase();\n const content = match[2];\n \n // Skip unknown tools silently - enables forward compatibility\n if (!isValidCommand(rawToolName)) continue;\n \n const toolName = rawToolName;\n \n if (toolName === 'list_directory') {\n const path = getXmlElementText(content, 'path');\n const pattern = getXmlElementText(content, 'pattern');\n if (path) {\n tools.push({ name: 'list_directory', arguments: { path, pattern } });\n }\n } else if (toolName === 'grep') {\n const pattern = getXmlElementText(content, 'pattern');\n const subDir = getXmlElementText(content, 'sub_dir');\n const glob = getXmlElementText(content, 'glob');\n if (pattern) {\n tools.push({ \n name: 'grep', \n arguments: { \n pattern, \n path: subDir || '.', \n ...(glob && { glob }) \n } \n });\n }\n } else if (toolName === 'read') {\n const path = getXmlElementText(content, 'path');\n const linesStr = getXmlElementText(content, 'lines');\n if (path) {\n const args: Record<string, unknown> = { path };\n if (linesStr) {\n const ranges: Array<[number, number]> = [];\n for (const rangeStr of linesStr.split(',')) {\n const trimmed = rangeStr.trim();\n if (!trimmed) continue;\n const [s, e] = trimmed.split('-').map(v => parseInt(v.trim(), 10));\n if (Number.isFinite(s) && Number.isFinite(e)) {\n ranges.push([s, e]);\n } else if (Number.isFinite(s)) {\n // Single line like \"100\"\n ranges.push([s, s]);\n }\n }\n if (ranges.length === 1) {\n args.start = ranges[0][0];\n args.end = ranges[0][1];\n } else if (ranges.length > 1) {\n args.lines = ranges;\n }\n }\n tools.push({ name: 'read', arguments: args });\n }\n } else if (toolName === 'finish') {\n // Parse nested <file> elements\n const fileRegex = /<file>([\\s\\S]*?)<\\/file>/gi;\n const files: Array<{ path: string; lines: Array<[number, number]> }> = [];\n let fileMatch;\n \n while ((fileMatch = fileRegex.exec(content)) !== null) {\n const fileContent = fileMatch[1];\n const filePath = getXmlElementText(fileContent, 'path');\n const linesStr = getXmlElementText(fileContent, 'lines');\n \n if (filePath && linesStr) {\n const ranges: Array<[number, number]> = [];\n for (const rangeStr of linesStr.split(',')) {\n if (rangeStr.trim() === '*') {\n ranges.push([1, 999999]); // Entire file\n } else {\n const [s, e] = rangeStr.split('-').map(v => parseInt(v.trim(), 10));\n if (Number.isFinite(s) && Number.isFinite(e)) {\n ranges.push([s, e]);\n }\n }\n }\n if (ranges.length > 0) {\n files.push({ path: filePath, lines: ranges });\n }\n }\n }\n \n if (files.length > 0) {\n tools.push({ name: 'finish', arguments: { files } });\n }\n }\n }\n \n return tools;\n}\n\nfunction preprocessText(text: string): { lines: string[]; nestedTools: ToolCall[] } {\n let processed = text.replace(/<think>[\\s\\S]*?<\\/think>/gi, '');\n const nestedTools = parseNestedXmlTools(processed);\n const openingTagRegex = /<tool_call>|<tool>/gi;\n const closingTagRegex = /<\\/tool_call>|<\\/tool>/gi;\n \n const openingMatches = processed.match(openingTagRegex) || [];\n const closingMatches = processed.match(closingTagRegex) || [];\n \n if (openingMatches.length > closingMatches.length) {\n const lastClosingMatch = /<\\/tool_call>|<\\/tool>/gi;\n let lastClosingIndex = -1;\n let match;\n while ((match = lastClosingMatch.exec(processed)) !== null) {\n lastClosingIndex = match.index + match[0].length;\n }\n if (lastClosingIndex > 0) {\n processed = processed.slice(0, lastClosingIndex);\n }\n }\n const toolCallLines: string[] = [];\n const toolTagRegex = /<tool_call>([\\s\\S]*?)<\\/tool_call>|<tool>([\\s\\S]*?)<\\/tool>/gi;\n let tagMatch;\n \n while ((tagMatch = toolTagRegex.exec(processed)) !== null) {\n const content = (tagMatch[1] || tagMatch[2] || '').trim();\n if (content) {\n const lines = content.split(/\\r?\\n/).map(l => l.trim()).filter(l => l);\n toolCallLines.push(...lines);\n }\n }\n \n // Also extract raw tool calls (lines starting with valid commands)\n const allLines = processed.split(/\\r?\\n/).map(l => l.trim());\n for (const line of allLines) {\n if (!line) continue;\n if (line.startsWith('<')) continue;\n \n const firstWord = line.split(/\\s/)[0];\n if (VALID_COMMANDS.includes(firstWord as ValidCommand)) {\n if (!toolCallLines.includes(line)) {\n toolCallLines.push(line);\n }\n }\n }\n \n return { lines: toolCallLines, nestedTools };\n}\n\nexport class LLMResponseParser {\n private readonly finishSpecSplitRe = /,(?=[^,\\s]+:)/;\n\n parse(text: string): ToolCall[] {\n if (typeof text !== 'string') {\n throw new TypeError('Command text must be a string.');\n }\n \n // Preprocess to handle XML tags\n const { lines, nestedTools } = preprocessText(text);\n \n // Start with nested XML tools (new format)\n const commands: ToolCall[] = [...nestedTools];\n let finishAccumulator: Map<string, number[][]> | null = null;\n\n lines.forEach((line) => {\n if (!line || line.startsWith('#')) return;\n const parts = this.splitLine(line);\n if (parts.length === 0) return;\n const cmd = parts[0];\n \n switch (cmd) {\n case 'list_directory':\n this.handleListDirectory(parts, line, commands);\n break;\n case 'grep':\n this.handleGrep(parts, line, commands);\n break;\n case 'read':\n this.handleRead(parts, line, commands);\n break;\n case 'finish':\n finishAccumulator = this.handleFinish(parts, line, commands, finishAccumulator);\n break;\n default:\n // Silently ignore unknown commands\n break;\n }\n });\n\n if (finishAccumulator) {\n const map = finishAccumulator as Map<string, number[][]>;\n const entries = [...map.entries()];\n const filesPayload = entries.map(([path, ranges]) => ({\n path,\n lines: [...ranges].sort((a, b) => a[0] - b[0]) as Array<[number, number]>,\n }));\n commands.push({ name: 'finish', arguments: { files: filesPayload } });\n }\n return commands;\n }\n\n private splitLine(line: string): string[] {\n // Split by whitespace but keep quoted blocks as one\n const parts: string[] = [];\n let current = '';\n let inSingle = false;\n for (let i = 0; i < line.length; i++) {\n const ch = line[i];\n if (ch === \"'\" && line[i - 1] !== '\\\\') {\n inSingle = !inSingle;\n current += ch;\n } else if (!inSingle && /\\s/.test(ch)) {\n if (current) {\n parts.push(current);\n current = '';\n }\n } else {\n current += ch;\n }\n }\n if (current) parts.push(current);\n return parts;\n }\n\n /** Helper to create a _skip tool call with an error message */\n private skip(message: string): ToolCall {\n return { name: '_skip', arguments: { message } };\n }\n\n private handleListDirectory(parts: string[], rawLine: string, commands: ToolCall[]) {\n // list_directory <path> [pattern]\n if (parts.length < 2) {\n commands.push(this.skip(\n `[SKIPPED] Your command \"${rawLine}\" is missing a path. ` +\n `Correct format: list_directory <path> [pattern]. Example: list_directory src/`\n ));\n return;\n }\n const path = parts[1];\n const pattern = parts[2]?.replace(/^\"|\"$/g, '') ?? null;\n commands.push({ name: 'list_directory', arguments: { path, pattern } });\n }\n\n private handleGrep(parts: string[], rawLine: string, commands: ToolCall[]) {\n // grep '<pattern>' <path>\n if (parts.length < 3) {\n commands.push(this.skip(\n `[SKIPPED] Your command \"${rawLine}\" is missing arguments. ` +\n `Correct format: grep '<pattern>' <path>. Example: grep 'TODO' src/`\n ));\n return;\n }\n let pat = parts[1];\n // Be lenient: accept unquoted patterns by treating the first arg as the pattern\n if (pat.startsWith(\"'\") && pat.endsWith(\"'\")) {\n pat = pat.slice(1, -1);\n }\n // If pattern is empty after processing, skip\n if (!pat) {\n commands.push(this.skip(\n `[SKIPPED] Your command \"${rawLine}\" has an empty pattern. ` +\n `Provide a non-empty search pattern. Example: grep 'function' src/`\n ));\n return;\n }\n commands.push({ name: 'grep', arguments: { pattern: pat, path: parts[2] } });\n }\n\n private handleRead(parts: string[], rawLine: string, commands: ToolCall[]) {\n // read <path>[:start-end]\n if (parts.length < 2) {\n commands.push(this.skip(\n `[SKIPPED] Your command \"${rawLine}\" is missing a path. ` +\n `Correct format: read <path> or read <path>:<start>-<end>. Example: read src/index.ts:1-50`\n ));\n return;\n }\n const spec = parts[1];\n const rangeIdx = spec.indexOf(':');\n if (rangeIdx === -1) {\n commands.push({ name: 'read', arguments: { path: spec } });\n return;\n }\n const filePath = spec.slice(0, rangeIdx);\n const range = spec.slice(rangeIdx + 1);\n const [s, e] = range.split('-').map(v => parseInt(v, 10));\n // If range is invalid, fallback to reading the whole file\n if (!Number.isFinite(s) || !Number.isFinite(e)) {\n commands.push({ name: 'read', arguments: { path: filePath } });\n return;\n }\n commands.push({ name: 'read', arguments: { path: filePath, start: s, end: e } });\n }\n\n private handleFinish(parts: string[], rawLine: string, commands: ToolCall[], acc: Map<string, number[][]> | null) {\n // finish file1:1-10,20-30 file2:5-7\n const map = acc ?? new Map<string, number[][]>();\n const args = parts.slice(1);\n for (const token of args) {\n const [filePath, rangesText] = token.split(':', 2);\n if (!filePath || !rangesText) {\n // Skip this malformed token, continue processing others\n commands.push(this.skip(\n `[SKIPPED] Invalid finish token \"${token}\". ` +\n `Correct format: finish <path>:<start>-<end>. Example: finish src/index.ts:1-50`\n ));\n continue;\n }\n const rangeSpecs = rangesText.split(',').filter(Boolean);\n for (const spec of rangeSpecs) {\n const [s, e] = spec.split('-').map(v => parseInt(v, 10));\n if (!Number.isFinite(s) || !Number.isFinite(e) || e < s) {\n // Skip this invalid range, continue with others\n commands.push(this.skip(\n `[SKIPPED] Invalid range \"${spec}\" in \"${token}\". ` +\n `Ranges must be <start>-<end> where start <= end. Example: 1-50`\n ));\n continue;\n }\n const arr = map.get(filePath) ?? [];\n arr.push([s, e]);\n map.set(filePath, arr);\n }\n }\n return map;\n }\n}\n"],"mappings":";AAGA,IAAM,iBAAiB,CAAC,kBAAkB,QAAQ,QAAQ,QAAQ;AAGlE,SAAS,eAAe,MAAoC;AAC1D,SAAO,eAAe,SAAS,IAAoB;AACrD;AAEA,SAAS,kBAAkB,KAAa,SAAgC;AACtE,QAAM,QAAQ,IAAI,OAAO,IAAI,OAAO,kBAAkB,OAAO,KAAK,GAAG;AACrE,QAAM,QAAQ,IAAI,MAAM,KAAK;AAC7B,SAAO,QAAQ,MAAM,CAAC,EAAE,KAAK,IAAI;AACnC;AAEA,SAAS,oBAAoB,MAA0B;AACrD,QAAM,QAAoB,CAAC;AAI3B,QAAM,YAAY;AAClB,MAAI;AAEJ,UAAQ,QAAQ,UAAU,KAAK,IAAI,OAAO,MAAM;AAC9C,UAAM,cAAc,MAAM,CAAC,EAAE,YAAY;AACzC,UAAM,UAAU,MAAM,CAAC;AAGvB,QAAI,CAAC,eAAe,WAAW,EAAG;AAElC,UAAM,WAAW;AAEjB,QAAI,aAAa,kBAAkB;AACjC,YAAM,OAAO,kBAAkB,SAAS,MAAM;AAC9C,YAAM,UAAU,kBAAkB,SAAS,SAAS;AACpD,UAAI,MAAM;AACR,cAAM,KAAK,EAAE,MAAM,kBAAkB,WAAW,EAAE,MAAM,QAAQ,EAAE,CAAC;AAAA,MACrE;AAAA,IACF,WAAW,aAAa,QAAQ;AAC9B,YAAM,UAAU,kBAAkB,SAAS,SAAS;AACpD,YAAM,SAAS,kBAAkB,SAAS,SAAS;AACnD,YAAM,OAAO,kBAAkB,SAAS,MAAM;AAC9C,UAAI,SAAS;AACX,cAAM,KAAK;AAAA,UACT,MAAM;AAAA,UACN,WAAW;AAAA,YACT;AAAA,YACA,MAAM,UAAU;AAAA,YAChB,GAAI,QAAQ,EAAE,KAAK;AAAA,UACrB;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF,WAAW,aAAa,QAAQ;AAC9B,YAAM,OAAO,kBAAkB,SAAS,MAAM;AAC9C,YAAM,WAAW,kBAAkB,SAAS,OAAO;AACnD,UAAI,MAAM;AACR,cAAM,OAAgC,EAAE,KAAK;AAC7C,YAAI,UAAU;AACZ,gBAAM,SAAkC,CAAC;AACzC,qBAAW,YAAY,SAAS,MAAM,GAAG,GAAG;AAC1C,kBAAM,UAAU,SAAS,KAAK;AAC9B,gBAAI,CAAC,QAAS;AACd,kBAAM,CAAC,GAAG,CAAC,IAAI,QAAQ,MAAM,GAAG,EAAE,IAAI,OAAK,SAAS,EAAE,KAAK,GAAG,EAAE,CAAC;AACjE,gBAAI,OAAO,SAAS,CAAC,KAAK,OAAO,SAAS,CAAC,GAAG;AAC5C,qBAAO,KAAK,CAAC,GAAG,CAAC,CAAC;AAAA,YACpB,WAAW,OAAO,SAAS,CAAC,GAAG;AAE7B,qBAAO,KAAK,CAAC,GAAG,CAAC,CAAC;AAAA,YACpB;AAAA,UACF;AACA,cAAI,OAAO,WAAW,GAAG;AACvB,iBAAK,QAAQ,OAAO,CAAC,EAAE,CAAC;AACxB,iBAAK,MAAM,OAAO,CAAC,EAAE,CAAC;AAAA,UACxB,WAAW,OAAO,SAAS,GAAG;AAC5B,iBAAK,QAAQ;AAAA,UACf;AAAA,QACF;AACA,cAAM,KAAK,EAAE,MAAM,QAAQ,WAAW,KAAK,CAAC;AAAA,MAC9C;AAAA,IACF,WAAW,aAAa,UAAU;AAEhC,YAAM,YAAY;AAClB,YAAM,QAAiE,CAAC;AACxE,UAAI;AAEJ,cAAQ,YAAY,UAAU,KAAK,OAAO,OAAO,MAAM;AACrD,cAAM,cAAc,UAAU,CAAC;AAC/B,cAAM,WAAW,kBAAkB,aAAa,MAAM;AACtD,cAAM,WAAW,kBAAkB,aAAa,OAAO;AAEvD,YAAI,YAAY,UAAU;AACxB,gBAAM,SAAkC,CAAC;AACzC,qBAAW,YAAY,SAAS,MAAM,GAAG,GAAG;AAC1C,gBAAI,SAAS,KAAK,MAAM,KAAK;AAC3B,qBAAO,KAAK,CAAC,GAAG,MAAM,CAAC;AAAA,YACzB,OAAO;AACL,oBAAM,CAAC,GAAG,CAAC,IAAI,SAAS,MAAM,GAAG,EAAE,IAAI,OAAK,SAAS,EAAE,KAAK,GAAG,EAAE,CAAC;AAClE,kBAAI,OAAO,SAAS,CAAC,KAAK,OAAO,SAAS,CAAC,GAAG;AAC5C,uBAAO,KAAK,CAAC,GAAG,CAAC,CAAC;AAAA,cACpB;AAAA,YACF;AAAA,UACF;AACA,cAAI,OAAO,SAAS,GAAG;AACrB,kBAAM,KAAK,EAAE,MAAM,UAAU,OAAO,OAAO,CAAC;AAAA,UAC9C;AAAA,QACF;AAAA,MACF;AAEA,UAAI,MAAM,SAAS,GAAG;AACpB,cAAM,KAAK,EAAE,MAAM,UAAU,WAAW,EAAE,MAAM,EAAE,CAAC;AAAA,MACrD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,eAAe,MAA4D;AAClF,MAAI,YAAY,KAAK,QAAQ,8BAA8B,EAAE;AAC7D,QAAM,cAAc,oBAAoB,SAAS;AACjD,QAAM,kBAAkB;AACxB,QAAM,kBAAkB;AAExB,QAAM,iBAAiB,UAAU,MAAM,eAAe,KAAK,CAAC;AAC5D,QAAM,iBAAiB,UAAU,MAAM,eAAe,KAAK,CAAC;AAE5D,MAAI,eAAe,SAAS,eAAe,QAAQ;AACjD,UAAM,mBAAmB;AACzB,QAAI,mBAAmB;AACvB,QAAI;AACJ,YAAQ,QAAQ,iBAAiB,KAAK,SAAS,OAAO,MAAM;AAC1D,yBAAmB,MAAM,QAAQ,MAAM,CAAC,EAAE;AAAA,IAC5C;AACA,QAAI,mBAAmB,GAAG;AACxB,kBAAY,UAAU,MAAM,GAAG,gBAAgB;AAAA,IACjD;AAAA,EACF;AACA,QAAM,gBAA0B,CAAC;AACjC,QAAM,eAAe;AACrB,MAAI;AAEJ,UAAQ,WAAW,aAAa,KAAK,SAAS,OAAO,MAAM;AACzD,UAAM,WAAW,SAAS,CAAC,KAAK,SAAS,CAAC,KAAK,IAAI,KAAK;AACxD,QAAI,SAAS;AACX,YAAM,QAAQ,QAAQ,MAAM,OAAO,EAAE,IAAI,OAAK,EAAE,KAAK,CAAC,EAAE,OAAO,OAAK,CAAC;AACrE,oBAAc,KAAK,GAAG,KAAK;AAAA,IAC7B;AAAA,EACF;AAGA,QAAM,WAAW,UAAU,MAAM,OAAO,EAAE,IAAI,OAAK,EAAE,KAAK,CAAC;AAC3D,aAAW,QAAQ,UAAU;AAC3B,QAAI,CAAC,KAAM;AACX,QAAI,KAAK,WAAW,GAAG,EAAG;AAE1B,UAAM,YAAY,KAAK,MAAM,IAAI,EAAE,CAAC;AACpC,QAAI,eAAe,SAAS,SAAyB,GAAG;AACtD,UAAI,CAAC,cAAc,SAAS,IAAI,GAAG;AACjC,sBAAc,KAAK,IAAI;AAAA,MACzB;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,OAAO,eAAe,YAAY;AAC7C;AAEO,IAAM,oBAAN,MAAwB;AAAA,EACZ,oBAAoB;AAAA,EAErC,MAAM,MAA0B;AAC9B,QAAI,OAAO,SAAS,UAAU;AAC5B,YAAM,IAAI,UAAU,gCAAgC;AAAA,IACtD;AAGA,UAAM,EAAE,OAAO,YAAY,IAAI,eAAe,IAAI;AAGlD,UAAM,WAAuB,CAAC,GAAG,WAAW;AAC5C,QAAI,oBAAoD;AAExD,UAAM,QAAQ,CAAC,SAAS;AACtB,UAAI,CAAC,QAAQ,KAAK,WAAW,GAAG,EAAG;AACnC,YAAM,QAAQ,KAAK,UAAU,IAAI;AACjC,UAAI,MAAM,WAAW,EAAG;AACxB,YAAM,MAAM,MAAM,CAAC;AAEnB,cAAQ,KAAK;AAAA,QACX,KAAK;AACH,eAAK,oBAAoB,OAAO,MAAM,QAAQ;AAC9C;AAAA,QACF,KAAK;AACH,eAAK,WAAW,OAAO,MAAM,QAAQ;AACrC;AAAA,QACF,KAAK;AACH,eAAK,WAAW,OAAO,MAAM,QAAQ;AACrC;AAAA,QACF,KAAK;AACH,8BAAoB,KAAK,aAAa,OAAO,MAAM,UAAU,iBAAiB;AAC9E;AAAA,QACF;AAEE;AAAA,MACJ;AAAA,IACF,CAAC;AAED,QAAI,mBAAmB;AACrB,YAAM,MAAM;AACZ,YAAM,UAAU,CAAC,GAAG,IAAI,QAAQ,CAAC;AACjC,YAAM,eAAe,QAAQ,IAAI,CAAC,CAAC,MAAM,MAAM,OAAO;AAAA,QACpD;AAAA,QACA,OAAO,CAAC,GAAG,MAAM,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC;AAAA,MAC/C,EAAE;AACF,eAAS,KAAK,EAAE,MAAM,UAAU,WAAW,EAAE,OAAO,aAAa,EAAE,CAAC;AAAA,IACtE;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,UAAU,MAAwB;AAExC,UAAM,QAAkB,CAAC;AACzB,QAAI,UAAU;AACd,QAAI,WAAW;AACf,aAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,YAAM,KAAK,KAAK,CAAC;AACjB,UAAI,OAAO,OAAO,KAAK,IAAI,CAAC,MAAM,MAAM;AACtC,mBAAW,CAAC;AACZ,mBAAW;AAAA,MACb,WAAW,CAAC,YAAY,KAAK,KAAK,EAAE,GAAG;AACrC,YAAI,SAAS;AACX,gBAAM,KAAK,OAAO;AAClB,oBAAU;AAAA,QACZ;AAAA,MACF,OAAO;AACL,mBAAW;AAAA,MACb;AAAA,IACF;AACA,QAAI,QAAS,OAAM,KAAK,OAAO;AAC/B,WAAO;AAAA,EACT;AAAA;AAAA,EAGQ,KAAK,SAA2B;AACtC,WAAO,EAAE,MAAM,SAAS,WAAW,EAAE,QAAQ,EAAE;AAAA,EACjD;AAAA,EAEQ,oBAAoB,OAAiB,SAAiB,UAAsB;AAElF,QAAI,MAAM,SAAS,GAAG;AACpB,eAAS,KAAK,KAAK;AAAA,QACjB,2BAA2B,OAAO;AAAA,MAEpC,CAAC;AACD;AAAA,IACF;AACA,UAAM,OAAO,MAAM,CAAC;AACpB,UAAM,UAAU,MAAM,CAAC,GAAG,QAAQ,UAAU,EAAE,KAAK;AACnD,aAAS,KAAK,EAAE,MAAM,kBAAkB,WAAW,EAAE,MAAM,QAAQ,EAAE,CAAC;AAAA,EACxE;AAAA,EAEQ,WAAW,OAAiB,SAAiB,UAAsB;AAEzE,QAAI,MAAM,SAAS,GAAG;AACpB,eAAS,KAAK,KAAK;AAAA,QACjB,2BAA2B,OAAO;AAAA,MAEpC,CAAC;AACD;AAAA,IACF;AACA,QAAI,MAAM,MAAM,CAAC;AAEjB,QAAI,IAAI,WAAW,GAAG,KAAK,IAAI,SAAS,GAAG,GAAG;AAC5C,YAAM,IAAI,MAAM,GAAG,EAAE;AAAA,IACvB;AAEA,QAAI,CAAC,KAAK;AACR,eAAS,KAAK,KAAK;AAAA,QACjB,2BAA2B,OAAO;AAAA,MAEpC,CAAC;AACD;AAAA,IACF;AACA,aAAS,KAAK,EAAE,MAAM,QAAQ,WAAW,EAAE,SAAS,KAAK,MAAM,MAAM,CAAC,EAAE,EAAE,CAAC;AAAA,EAC7E;AAAA,EAEQ,WAAW,OAAiB,SAAiB,UAAsB;AAEzE,QAAI,MAAM,SAAS,GAAG;AACpB,eAAS,KAAK,KAAK;AAAA,QACjB,2BAA2B,OAAO;AAAA,MAEpC,CAAC;AACD;AAAA,IACF;AACA,UAAM,OAAO,MAAM,CAAC;AACpB,UAAM,WAAW,KAAK,QAAQ,GAAG;AACjC,QAAI,aAAa,IAAI;AACnB,eAAS,KAAK,EAAE,MAAM,QAAQ,WAAW,EAAE,MAAM,KAAK,EAAE,CAAC;AACzD;AAAA,IACF;AACA,UAAM,WAAW,KAAK,MAAM,GAAG,QAAQ;AACvC,UAAM,QAAQ,KAAK,MAAM,WAAW,CAAC;AACrC,UAAM,CAAC,GAAG,CAAC,IAAI,MAAM,MAAM,GAAG,EAAE,IAAI,OAAK,SAAS,GAAG,EAAE,CAAC;AAExD,QAAI,CAAC,OAAO,SAAS,CAAC,KAAK,CAAC,OAAO,SAAS,CAAC,GAAG;AAC9C,eAAS,KAAK,EAAE,MAAM,QAAQ,WAAW,EAAE,MAAM,SAAS,EAAE,CAAC;AAC7D;AAAA,IACF;AACA,aAAS,KAAK,EAAE,MAAM,QAAQ,WAAW,EAAE,MAAM,UAAU,OAAO,GAAG,KAAK,EAAE,EAAE,CAAC;AAAA,EACjF;AAAA,EAEQ,aAAa,OAAiB,SAAiB,UAAsB,KAAqC;AAEhH,UAAM,MAAM,OAAO,oBAAI,IAAwB;AAC/C,UAAM,OAAO,MAAM,MAAM,CAAC;AAC1B,eAAW,SAAS,MAAM;AACxB,YAAM,CAAC,UAAU,UAAU,IAAI,MAAM,MAAM,KAAK,CAAC;AACjD,UAAI,CAAC,YAAY,CAAC,YAAY;AAE5B,iBAAS,KAAK,KAAK;AAAA,UACjB,mCAAmC,KAAK;AAAA,QAE1C,CAAC;AACD;AAAA,MACF;AACA,YAAM,aAAa,WAAW,MAAM,GAAG,EAAE,OAAO,OAAO;AACvD,iBAAW,QAAQ,YAAY;AAC7B,cAAM,CAAC,GAAG,CAAC,IAAI,KAAK,MAAM,GAAG,EAAE,IAAI,OAAK,SAAS,GAAG,EAAE,CAAC;AACvD,YAAI,CAAC,OAAO,SAAS,CAAC,KAAK,CAAC,OAAO,SAAS,CAAC,KAAK,IAAI,GAAG;AAEvD,mBAAS,KAAK,KAAK;AAAA,YACjB,4BAA4B,IAAI,SAAS,KAAK;AAAA,UAEhD,CAAC;AACD;AAAA,QACF;AACA,cAAM,MAAM,IAAI,IAAI,QAAQ,KAAK,CAAC;AAClC,YAAI,KAAK,CAAC,GAAG,CAAC,CAAC;AACf,YAAI,IAAI,UAAU,GAAG;AAAA,MACvB;AAAA,IACF;AACA,WAAO;AAAA,EACT;AACF;","names":[]}
|
|
@@ -11,8 +11,9 @@ import {
|
|
|
11
11
|
runRipgrep
|
|
12
12
|
} from "./chunk-TPP2UGQP.js";
|
|
13
13
|
import {
|
|
14
|
+
AGENT_CONFIG,
|
|
14
15
|
DEFAULT_EXCLUDES
|
|
15
|
-
} from "./chunk-
|
|
16
|
+
} from "./chunk-XT5ZO6ES.js";
|
|
16
17
|
|
|
17
18
|
// tools/warp_grep/providers/local.ts
|
|
18
19
|
import fs from "fs/promises";
|
|
@@ -23,7 +24,15 @@ var LocalRipgrepProvider = class {
|
|
|
23
24
|
this.excludes = excludes;
|
|
24
25
|
}
|
|
25
26
|
async grep(params) {
|
|
26
|
-
|
|
27
|
+
let abs;
|
|
28
|
+
try {
|
|
29
|
+
abs = resolveUnderRepo(this.repoRoot, params.path);
|
|
30
|
+
} catch (err) {
|
|
31
|
+
return {
|
|
32
|
+
lines: [],
|
|
33
|
+
error: `[PATH ERROR] ${err instanceof Error ? err.message : String(err)}`
|
|
34
|
+
};
|
|
35
|
+
}
|
|
27
36
|
const stat = await fs.stat(abs).catch(() => null);
|
|
28
37
|
if (!stat) return { lines: [] };
|
|
29
38
|
const targetArg = abs === path.resolve(this.repoRoot) ? "." : toRepoRelative(this.repoRoot, abs);
|
|
@@ -35,6 +44,9 @@ var LocalRipgrepProvider = class {
|
|
|
35
44
|
"--color=never",
|
|
36
45
|
"--trim",
|
|
37
46
|
"--max-columns=400",
|
|
47
|
+
"-C",
|
|
48
|
+
"1",
|
|
49
|
+
...params.glob ? ["--glob", params.glob] : [],
|
|
38
50
|
...this.excludes.flatMap((e) => ["-g", `!${e}`]),
|
|
39
51
|
params.pattern,
|
|
40
52
|
targetArg || "."
|
|
@@ -59,29 +71,24 @@ Details: ${res.stderr}` : ""}`
|
|
|
59
71
|
};
|
|
60
72
|
}
|
|
61
73
|
const lines = (res.stdout || "").trim().split(/\r?\n/).filter((l) => l.length > 0);
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
const args = [
|
|
68
|
-
"--no-config",
|
|
69
|
-
"--files",
|
|
70
|
-
"-g",
|
|
71
|
-
params.pattern,
|
|
72
|
-
...this.excludes.flatMap((e) => ["-g", `!${e}`]),
|
|
73
|
-
targetArg || "."
|
|
74
|
-
];
|
|
75
|
-
const res = await runRipgrep(args, { cwd: this.repoRoot });
|
|
76
|
-
if (res.exitCode === -1) {
|
|
77
|
-
console.warn(`[warp_grep] ripgrep not available for glob: ${res.stderr || "execution failed"}`);
|
|
78
|
-
return { files: [] };
|
|
74
|
+
if (lines.length > AGENT_CONFIG.MAX_OUTPUT_LINES) {
|
|
75
|
+
return {
|
|
76
|
+
lines: [],
|
|
77
|
+
error: "query not specific enough, tool tried to return too much context and failed"
|
|
78
|
+
};
|
|
79
79
|
}
|
|
80
|
-
|
|
81
|
-
return { files };
|
|
80
|
+
return { lines };
|
|
82
81
|
}
|
|
83
82
|
async read(params) {
|
|
84
|
-
|
|
83
|
+
let abs;
|
|
84
|
+
try {
|
|
85
|
+
abs = resolveUnderRepo(this.repoRoot, params.path);
|
|
86
|
+
} catch (err) {
|
|
87
|
+
return {
|
|
88
|
+
lines: [],
|
|
89
|
+
error: `[PATH ERROR] ${err instanceof Error ? err.message : String(err)}`
|
|
90
|
+
};
|
|
91
|
+
}
|
|
85
92
|
const stat = await fs.stat(abs).catch(() => null);
|
|
86
93
|
if (!stat || !stat.isFile()) {
|
|
87
94
|
return {
|
|
@@ -101,7 +108,15 @@ Details: ${res.stderr}` : ""}`
|
|
|
101
108
|
error: `[UNREADABLE FILE] You tried to read "${params.path}" but this file is either too large or not a text file, so it cannot be read. Try a different file.`
|
|
102
109
|
};
|
|
103
110
|
}
|
|
104
|
-
|
|
111
|
+
let lines;
|
|
112
|
+
try {
|
|
113
|
+
lines = await readAllLines(abs);
|
|
114
|
+
} catch (err) {
|
|
115
|
+
return {
|
|
116
|
+
lines: [],
|
|
117
|
+
error: `[READ ERROR] Failed to read "${params.path}": ${err instanceof Error ? err.message : String(err)}`
|
|
118
|
+
};
|
|
119
|
+
}
|
|
105
120
|
const total = lines.length;
|
|
106
121
|
let s = params.start ?? 1;
|
|
107
122
|
let e = Math.min(params.end ?? total, total);
|
|
@@ -114,27 +129,43 @@ Details: ${res.stderr}` : ""}`
|
|
|
114
129
|
const content = lines[i - 1] ?? "";
|
|
115
130
|
out.push(`${i}|${content}`);
|
|
116
131
|
}
|
|
132
|
+
if (out.length > AGENT_CONFIG.MAX_READ_LINES) {
|
|
133
|
+
const truncated = out.slice(0, AGENT_CONFIG.MAX_READ_LINES);
|
|
134
|
+
truncated.push(`... [truncated: showing ${AGENT_CONFIG.MAX_READ_LINES} of ${out.length} lines]`);
|
|
135
|
+
return { lines: truncated };
|
|
136
|
+
}
|
|
117
137
|
return { lines: out };
|
|
118
138
|
}
|
|
119
|
-
async
|
|
120
|
-
|
|
139
|
+
async listDirectory(params) {
|
|
140
|
+
let abs;
|
|
141
|
+
try {
|
|
142
|
+
abs = resolveUnderRepo(this.repoRoot, params.path);
|
|
143
|
+
} catch {
|
|
144
|
+
return [];
|
|
145
|
+
}
|
|
121
146
|
const stat = await fs.stat(abs).catch(() => null);
|
|
122
147
|
if (!stat || !stat.isDirectory()) {
|
|
123
148
|
return [];
|
|
124
149
|
}
|
|
125
|
-
const maxResults = params.maxResults ??
|
|
126
|
-
const maxDepth = params.maxDepth ??
|
|
150
|
+
const maxResults = params.maxResults ?? AGENT_CONFIG.MAX_OUTPUT_LINES;
|
|
151
|
+
const maxDepth = params.maxDepth ?? AGENT_CONFIG.MAX_LIST_DEPTH;
|
|
127
152
|
const regex = params.pattern ? new RegExp(params.pattern) : null;
|
|
128
153
|
const results = [];
|
|
154
|
+
let timedOut = false;
|
|
155
|
+
const startTime = Date.now();
|
|
129
156
|
async function walk(dir, depth) {
|
|
157
|
+
if (Date.now() - startTime > AGENT_CONFIG.LIST_TIMEOUT_MS) {
|
|
158
|
+
timedOut = true;
|
|
159
|
+
return;
|
|
160
|
+
}
|
|
130
161
|
if (depth > maxDepth || results.length >= maxResults) return;
|
|
131
162
|
const entries = await fs.readdir(dir, { withFileTypes: true });
|
|
132
163
|
for (const entry of entries) {
|
|
164
|
+
if (timedOut || results.length >= maxResults) break;
|
|
133
165
|
const full = path.join(dir, entry.name);
|
|
134
166
|
const rel = toRepoRelative(abs, full).replace(/^[.][/\\]?/, "");
|
|
135
167
|
if (DEFAULT_EXCLUDES.some((ex) => rel.split(path.sep).includes(ex))) continue;
|
|
136
168
|
if (regex && !regex.test(entry.name)) continue;
|
|
137
|
-
if (results.length >= maxResults) break;
|
|
138
169
|
results.push({
|
|
139
170
|
name: entry.name,
|
|
140
171
|
path: toRepoRelative(path.resolve(""), full),
|
|
@@ -155,4 +186,4 @@ Details: ${res.stderr}` : ""}`
|
|
|
155
186
|
export {
|
|
156
187
|
LocalRipgrepProvider
|
|
157
188
|
};
|
|
158
|
-
//# sourceMappingURL=chunk-
|
|
189
|
+
//# sourceMappingURL=chunk-ZO4PPFCZ.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../tools/warp_grep/providers/local.ts"],"sourcesContent":["import fs from 'fs/promises';\nimport fssync from 'fs';\nimport path from 'path';\nimport { runRipgrep } from '../utils/ripgrep.js';\nimport { ensureWithinRepo, resolveUnderRepo, toRepoRelative, isSymlink, isTextualFile } from '../utils/paths.js';\nimport type { WarpGrepProvider, GrepResult, ReadResult, ListDirectoryEntry } from './types.js';\nimport { readAllLines } from '../utils/files.js';\nimport { DEFAULT_EXCLUDES, AGENT_CONFIG } from '../agent/config.js';\n\nexport class LocalRipgrepProvider implements WarpGrepProvider {\n constructor(private readonly repoRoot: string, private readonly excludes: string[] = DEFAULT_EXCLUDES) {}\n\n async grep(params: { pattern: string; path: string; glob?: string }): Promise<GrepResult> {\n let abs: string;\n try {\n abs = resolveUnderRepo(this.repoRoot, params.path);\n } catch (err) {\n return {\n lines: [],\n error: `[PATH ERROR] ${err instanceof Error ? err.message : String(err)}`,\n };\n }\n const stat = await fs.stat(abs).catch(() => null);\n if (!stat) return { lines: [] };\n const targetArg = abs === path.resolve(this.repoRoot) ? '.' : toRepoRelative(this.repoRoot, abs);\n const args = [\n '--no-config',\n '--no-heading',\n '--with-filename',\n '--line-number',\n '--color=never',\n '--trim',\n '--max-columns=400',\n '-C', '1',\n ...(params.glob ? ['--glob', params.glob] : []),\n ...this.excludes.flatMap((e) => ['-g', `!${e}`]),\n params.pattern,\n targetArg || '.',\n ];\n const res = await runRipgrep(args, { cwd: this.repoRoot });\n \n // Gracefully handle ripgrep not being available\n if (res.exitCode === -1) {\n return {\n lines: [],\n error: `[RIPGREP NOT AVAILABLE] ripgrep (rg) is required but failed to execute. Please install it:\\n` +\n ` • macOS: brew install ripgrep\\n` +\n ` • Ubuntu/Debian: apt install ripgrep\\n` +\n ` • Windows: choco install ripgrep\\n` +\n ` • Or visit: https://github.com/BurntSushi/ripgrep#installation\\n` +\n `Exit code: ${res.exitCode}${res.stderr ? `\\nDetails: ${res.stderr}` : ''}`,\n };\n }\n \n // Handle other ripgrep errors gracefully\n if (res.exitCode !== 0 && res.exitCode !== 1) {\n return {\n lines: [],\n error: `[RIPGREP ERROR] grep failed with exit code ${res.exitCode}${res.stderr ? `: ${res.stderr}` : ''}`,\n };\n }\n \n const lines = (res.stdout || '')\n .trim()\n .split(/\\r?\\n/)\n .filter((l) => l.length > 0);\n if (lines.length > AGENT_CONFIG.MAX_OUTPUT_LINES) {\n return {\n lines: [],\n error: 'query not specific enough, tool tried to return too much context and failed',\n };\n }\n \n return { lines };\n }\n\n async read(params: { path: string; start?: number; end?: number }): Promise<ReadResult> {\n let abs: string;\n try {\n abs = resolveUnderRepo(this.repoRoot, params.path);\n } catch (err) {\n return {\n lines: [],\n error: `[PATH ERROR] ${err instanceof Error ? err.message : String(err)}`,\n };\n }\n const stat = await fs.stat(abs).catch(() => null);\n \n // Gracefully handle file not found / not a file\n if (!stat || !stat.isFile()) {\n return {\n lines: [],\n error: `[FILE NOT FOUND] You tried to read \"${params.path}\" but there is no file at this path. ` +\n `Double-check the path exists and is spelled correctly.`,\n };\n }\n \n // Gracefully handle symlinks\n if (isSymlink(abs)) {\n return {\n lines: [],\n error: `[SYMLINK] You tried to read \"${params.path}\" but this is a symlink. ` +\n `Try reading the actual file it points to instead.`,\n };\n }\n \n // Gracefully handle non-text or too-large files\n if (!isTextualFile(abs)) {\n return {\n lines: [],\n error: `[UNREADABLE FILE] You tried to read \"${params.path}\" but this file is either too large ` +\n `or not a text file, so it cannot be read. Try a different file.`,\n };\n }\n \n let lines: string[];\n try {\n lines = await readAllLines(abs);\n } catch (err) {\n return {\n lines: [],\n error: `[READ ERROR] Failed to read \"${params.path}\": ${err instanceof Error ? err.message : String(err)}`,\n };\n }\n const total = lines.length;\n let s = params.start ?? 1;\n let e = Math.min(params.end ?? total, total);\n if (s > total && total > 0) {\n // Model hallucinated range - fallback to full file\n s = 1;\n e = total;\n }\n const out: string[] = [];\n for (let i = s; i <= e; i += 1) {\n const content = lines[i - 1] ?? '';\n out.push(`${i}|${content}`);\n }\n if (out.length > AGENT_CONFIG.MAX_READ_LINES) {\n const truncated = out.slice(0, AGENT_CONFIG.MAX_READ_LINES);\n truncated.push(`... [truncated: showing ${AGENT_CONFIG.MAX_READ_LINES} of ${out.length} lines]`);\n return { lines: truncated };\n }\n \n return { lines: out };\n }\n\n async listDirectory(params: { path: string; pattern?: string | null; maxResults?: number; maxDepth?: number }): Promise<ListDirectoryEntry[]> {\n let abs: string;\n try {\n abs = resolveUnderRepo(this.repoRoot, params.path);\n } catch {\n // Path outside repo - return empty (graceful failure)\n return [];\n }\n const stat = await fs.stat(abs).catch(() => null);\n if (!stat || !stat.isDirectory()) {\n return [];\n }\n const maxResults = params.maxResults ?? AGENT_CONFIG.MAX_OUTPUT_LINES;\n const maxDepth = params.maxDepth ?? AGENT_CONFIG.MAX_LIST_DEPTH;\n const regex = params.pattern ? new RegExp(params.pattern) : null;\n\n const results: ListDirectoryEntry[] = [];\n let timedOut = false;\n const startTime = Date.now();\n \n async function walk(dir: string, depth: number) {\n if (Date.now() - startTime > AGENT_CONFIG.LIST_TIMEOUT_MS) {\n timedOut = true;\n return;\n }\n if (depth > maxDepth || results.length >= maxResults) return;\n const entries = await fs.readdir(dir, { withFileTypes: true });\n for (const entry of entries) {\n if (timedOut || results.length >= maxResults) break;\n const full = path.join(dir, entry.name);\n const rel = toRepoRelative(abs, full).replace(/^[.][/\\\\]?/, '');\n if (DEFAULT_EXCLUDES.some((ex) => rel.split(path.sep).includes(ex))) continue;\n if (regex && !regex.test(entry.name)) continue;\n results.push({\n name: entry.name,\n path: toRepoRelative(path.resolve(''), full), // relative display\n type: entry.isDirectory() ? 'dir' : 'file',\n depth,\n });\n if (entry.isDirectory()) {\n await walk(full, depth + 1);\n }\n }\n }\n await walk(abs, 0);\n return results;\n }\n}\n\n\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA,OAAO,QAAQ;AAEf,OAAO,UAAU;AAOV,IAAM,uBAAN,MAAuD;AAAA,EAC5D,YAA6B,UAAmC,WAAqB,kBAAkB;AAA1E;AAAmC;AAAA,EAAwC;AAAA,EAExG,MAAM,KAAK,QAA+E;AACxF,QAAI;AACJ,QAAI;AACF,YAAM,iBAAiB,KAAK,UAAU,OAAO,IAAI;AAAA,IACnD,SAAS,KAAK;AACZ,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,gBAAgB,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MACzE;AAAA,IACF;AACA,UAAM,OAAO,MAAM,GAAG,KAAK,GAAG,EAAE,MAAM,MAAM,IAAI;AAChD,QAAI,CAAC,KAAM,QAAO,EAAE,OAAO,CAAC,EAAE;AAC9B,UAAM,YAAY,QAAQ,KAAK,QAAQ,KAAK,QAAQ,IAAI,MAAM,eAAe,KAAK,UAAU,GAAG;AAC/F,UAAM,OAAO;AAAA,MACX;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MAAM;AAAA,MACN,GAAI,OAAO,OAAO,CAAC,UAAU,OAAO,IAAI,IAAI,CAAC;AAAA,MAC7C,GAAG,KAAK,SAAS,QAAQ,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,EAAE,CAAC;AAAA,MAC/C,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AACA,UAAM,MAAM,MAAM,WAAW,MAAM,EAAE,KAAK,KAAK,SAAS,CAAC;AAGzD,QAAI,IAAI,aAAa,IAAI;AACvB,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA,aAKc,IAAI,QAAQ,GAAG,IAAI,SAAS;AAAA,WAAc,IAAI,MAAM,KAAK,EAAE;AAAA,MAClF;AAAA,IACF;AAGA,QAAI,IAAI,aAAa,KAAK,IAAI,aAAa,GAAG;AAC5C,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,8CAA8C,IAAI,QAAQ,GAAG,IAAI,SAAS,KAAK,IAAI,MAAM,KAAK,EAAE;AAAA,MACzG;AAAA,IACF;AAEA,UAAM,SAAS,IAAI,UAAU,IAC1B,KAAK,EACL,MAAM,OAAO,EACb,OAAO,CAAC,MAAM,EAAE,SAAS,CAAC;AAC7B,QAAI,MAAM,SAAS,aAAa,kBAAkB;AAChD,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO,EAAE,MAAM;AAAA,EACjB;AAAA,EAEA,MAAM,KAAK,QAA6E;AACtF,QAAI;AACJ,QAAI;AACF,YAAM,iBAAiB,KAAK,UAAU,OAAO,IAAI;AAAA,IACnD,SAAS,KAAK;AACZ,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,gBAAgB,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MACzE;AAAA,IACF;AACA,UAAM,OAAO,MAAM,GAAG,KAAK,GAAG,EAAE,MAAM,MAAM,IAAI;AAGhD,QAAI,CAAC,QAAQ,CAAC,KAAK,OAAO,GAAG;AAC3B,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,uCAAuC,OAAO,IAAI;AAAA,MAE3D;AAAA,IACF;AAGA,QAAI,UAAU,GAAG,GAAG;AAClB,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,gCAAgC,OAAO,IAAI;AAAA,MAEpD;AAAA,IACF;AAGA,QAAI,CAAC,cAAc,GAAG,GAAG;AACvB,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,wCAAwC,OAAO,IAAI;AAAA,MAE5D;AAAA,IACF;AAEA,QAAI;AACJ,QAAI;AACF,cAAQ,MAAM,aAAa,GAAG;AAAA,IAChC,SAAS,KAAK;AACZ,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,gCAAgC,OAAO,IAAI,MAAM,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MAC1G;AAAA,IACF;AACA,UAAM,QAAQ,MAAM;AACpB,QAAI,IAAI,OAAO,SAAS;AACxB,QAAI,IAAI,KAAK,IAAI,OAAO,OAAO,OAAO,KAAK;AAC3C,QAAI,IAAI,SAAS,QAAQ,GAAG;AAE1B,UAAI;AACJ,UAAI;AAAA,IACN;AACA,UAAM,MAAgB,CAAC;AACvB,aAAS,IAAI,GAAG,KAAK,GAAG,KAAK,GAAG;AAC9B,YAAM,UAAU,MAAM,IAAI,CAAC,KAAK;AAChC,UAAI,KAAK,GAAG,CAAC,IAAI,OAAO,EAAE;AAAA,IAC5B;AACA,QAAI,IAAI,SAAS,aAAa,gBAAgB;AAC5C,YAAM,YAAY,IAAI,MAAM,GAAG,aAAa,cAAc;AAC1D,gBAAU,KAAK,2BAA2B,aAAa,cAAc,OAAO,IAAI,MAAM,SAAS;AAC/F,aAAO,EAAE,OAAO,UAAU;AAAA,IAC5B;AAEA,WAAO,EAAE,OAAO,IAAI;AAAA,EACtB;AAAA,EAEA,MAAM,cAAc,QAA0H;AAC5I,QAAI;AACJ,QAAI;AACF,YAAM,iBAAiB,KAAK,UAAU,OAAO,IAAI;AAAA,IACnD,QAAQ;AAEN,aAAO,CAAC;AAAA,IACV;AACA,UAAM,OAAO,MAAM,GAAG,KAAK,GAAG,EAAE,MAAM,MAAM,IAAI;AAChD,QAAI,CAAC,QAAQ,CAAC,KAAK,YAAY,GAAG;AAChC,aAAO,CAAC;AAAA,IACV;AACA,UAAM,aAAa,OAAO,cAAc,aAAa;AACrD,UAAM,WAAW,OAAO,YAAY,aAAa;AACjD,UAAM,QAAQ,OAAO,UAAU,IAAI,OAAO,OAAO,OAAO,IAAI;AAE5D,UAAM,UAAgC,CAAC;AACvC,QAAI,WAAW;AACf,UAAM,YAAY,KAAK,IAAI;AAE3B,mBAAe,KAAK,KAAa,OAAe;AAC9C,UAAI,KAAK,IAAI,IAAI,YAAY,aAAa,iBAAiB;AACzD,mBAAW;AACX;AAAA,MACF;AACA,UAAI,QAAQ,YAAY,QAAQ,UAAU,WAAY;AACtD,YAAM,UAAU,MAAM,GAAG,QAAQ,KAAK,EAAE,eAAe,KAAK,CAAC;AAC7D,iBAAW,SAAS,SAAS;AAC3B,YAAI,YAAY,QAAQ,UAAU,WAAY;AAC9C,cAAM,OAAO,KAAK,KAAK,KAAK,MAAM,IAAI;AACtC,cAAM,MAAM,eAAe,KAAK,IAAI,EAAE,QAAQ,cAAc,EAAE;AAC9D,YAAI,iBAAiB,KAAK,CAAC,OAAO,IAAI,MAAM,KAAK,GAAG,EAAE,SAAS,EAAE,CAAC,EAAG;AACrE,YAAI,SAAS,CAAC,MAAM,KAAK,MAAM,IAAI,EAAG;AACtC,gBAAQ,KAAK;AAAA,UACX,MAAM,MAAM;AAAA,UACZ,MAAM,eAAe,KAAK,QAAQ,EAAE,GAAG,IAAI;AAAA;AAAA,UAC3C,MAAM,MAAM,YAAY,IAAI,QAAQ;AAAA,UACpC;AAAA,QACF,CAAC;AACD,YAAI,MAAM,YAAY,GAAG;AACvB,gBAAM,KAAK,MAAM,QAAQ,CAAC;AAAA,QAC5B;AAAA,MACF;AAAA,IACF;AACA,UAAM,KAAK,KAAK,CAAC;AACjB,WAAO;AAAA,EACT;AACF;","names":[]}
|
|
@@ -2,11 +2,12 @@ import { RetryConfig } from './tools/utils/resilience.js';
|
|
|
2
2
|
import { FastApplyClient } from './tools/fastapply/core.js';
|
|
3
3
|
import { CodebaseSearchClient } from './tools/codebase_search/core.js';
|
|
4
4
|
import { BrowserClient } from './tools/browser/core.js';
|
|
5
|
-
import {
|
|
5
|
+
import { WarpGrepClient } from './tools/warp_grep/client.js';
|
|
6
6
|
import { MorphGit } from './git/client.js';
|
|
7
7
|
import { OpenAIRouter, AnthropicRouter, GeminiRouter, RawRouter } from './modelrouter/core.js';
|
|
8
8
|
import { EditFileConfig, EditFileInput, EditFileResult, EditChanges } from './tools/fastapply/types.js';
|
|
9
9
|
import { CodebaseSearchConfig, CodebaseSearchInput, CodebaseSearchResult } from './tools/codebase_search/types.js';
|
|
10
|
+
import { d as WarpGrepToolConfig, b as WarpGrepResult, c as WarpGrepContext } from './types-a_hxdPI6.js';
|
|
10
11
|
import * as openai_resources_index_mjs from 'openai/resources/index.mjs';
|
|
11
12
|
import * as _anthropic_ai_sdk_resources_messages_mjs from '@anthropic-ai/sdk/resources/messages.mjs';
|
|
12
13
|
import * as ai from 'ai';
|
|
@@ -36,10 +37,10 @@ declare class OpenAIToolFactory {
|
|
|
36
37
|
/**
|
|
37
38
|
* Create an OpenAI-compatible warp grep tool
|
|
38
39
|
*
|
|
39
|
-
* @param toolConfig - Tool configuration (
|
|
40
|
+
* @param toolConfig - Tool configuration (morphApiKey inherited from MorphClient)
|
|
40
41
|
* @returns OpenAI ChatCompletionTool with execute and formatResult methods
|
|
41
42
|
*/
|
|
42
|
-
createWarpGrepTool(toolConfig: Omit<WarpGrepToolConfig, '
|
|
43
|
+
createWarpGrepTool(toolConfig: Omit<WarpGrepToolConfig, 'morphApiKey'>): openai_resources_index_mjs.ChatCompletionTool & {
|
|
43
44
|
execute: (input: unknown) => Promise<WarpGrepResult>;
|
|
44
45
|
formatResult: (result: WarpGrepResult) => string;
|
|
45
46
|
getSystemPrompt: () => string;
|
|
@@ -93,10 +94,10 @@ declare class AnthropicToolFactory {
|
|
|
93
94
|
/**
|
|
94
95
|
* Create an Anthropic-compatible warp grep tool
|
|
95
96
|
*
|
|
96
|
-
* @param toolConfig - Tool configuration (
|
|
97
|
+
* @param toolConfig - Tool configuration (morphApiKey inherited from MorphClient)
|
|
97
98
|
* @returns Anthropic Tool with execute and formatResult methods
|
|
98
99
|
*/
|
|
99
|
-
createWarpGrepTool(toolConfig: Omit<WarpGrepToolConfig, '
|
|
100
|
+
createWarpGrepTool(toolConfig: Omit<WarpGrepToolConfig, 'morphApiKey'>): _anthropic_ai_sdk_resources_messages_mjs.Tool & {
|
|
100
101
|
execute: (input: unknown) => Promise<WarpGrepResult>;
|
|
101
102
|
formatResult: (result: WarpGrepResult) => string;
|
|
102
103
|
getSystemPrompt: () => string;
|
|
@@ -150,21 +151,15 @@ declare class VercelToolFactory {
|
|
|
150
151
|
/**
|
|
151
152
|
* Create a Vercel AI SDK-compatible warp grep tool
|
|
152
153
|
*
|
|
153
|
-
* @param toolConfig - Tool configuration (
|
|
154
|
+
* @param toolConfig - Tool configuration (morphApiKey inherited from MorphClient)
|
|
154
155
|
* @returns Vercel AI SDK tool
|
|
155
156
|
*/
|
|
156
|
-
createWarpGrepTool(toolConfig: Omit<WarpGrepToolConfig, '
|
|
157
|
+
createWarpGrepTool(toolConfig: Omit<WarpGrepToolConfig, 'morphApiKey'>): ai.Tool<{
|
|
157
158
|
query: string;
|
|
158
159
|
}, {
|
|
159
160
|
success: boolean;
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
summary?: undefined;
|
|
163
|
-
} | {
|
|
164
|
-
success: boolean;
|
|
165
|
-
contexts: WarpGrepContext[];
|
|
166
|
-
summary: string;
|
|
167
|
-
error?: undefined;
|
|
161
|
+
contexts: WarpGrepContext[] | undefined;
|
|
162
|
+
summary: string | undefined;
|
|
168
163
|
}>;
|
|
169
164
|
/**
|
|
170
165
|
* Create a Vercel AI SDK-compatible codebase search tool
|