@librechat/agents 3.1.77 → 3.1.78
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/common/enum.cjs +54 -0
- package/dist/cjs/common/enum.cjs.map +1 -1
- package/dist/cjs/graphs/Graph.cjs +155 -4
- package/dist/cjs/graphs/Graph.cjs.map +1 -1
- package/dist/cjs/hooks/createWorkspacePolicyHook.cjs +291 -0
- package/dist/cjs/hooks/createWorkspacePolicyHook.cjs.map +1 -0
- package/dist/cjs/main.cjs +90 -0
- package/dist/cjs/main.cjs.map +1 -1
- package/dist/cjs/messages/anthropicToolCache.cjs +102 -0
- package/dist/cjs/messages/anthropicToolCache.cjs.map +1 -0
- package/dist/cjs/messages/prune.cjs +27 -0
- package/dist/cjs/messages/prune.cjs.map +1 -1
- package/dist/cjs/messages/recency.cjs +99 -0
- package/dist/cjs/messages/recency.cjs.map +1 -0
- package/dist/cjs/run.cjs +30 -0
- package/dist/cjs/run.cjs.map +1 -1
- package/dist/cjs/summarization/node.cjs +100 -6
- package/dist/cjs/summarization/node.cjs.map +1 -1
- package/dist/cjs/tools/ToolNode.cjs +635 -23
- package/dist/cjs/tools/ToolNode.cjs.map +1 -1
- package/dist/cjs/tools/local/CompileCheckTool.cjs +227 -0
- package/dist/cjs/tools/local/CompileCheckTool.cjs.map +1 -0
- package/dist/cjs/tools/local/FileCheckpointer.cjs +90 -0
- package/dist/cjs/tools/local/FileCheckpointer.cjs.map +1 -0
- package/dist/cjs/tools/local/LocalCodingTools.cjs +1098 -0
- package/dist/cjs/tools/local/LocalCodingTools.cjs.map +1 -0
- package/dist/cjs/tools/local/LocalExecutionEngine.cjs +1042 -0
- package/dist/cjs/tools/local/LocalExecutionEngine.cjs.map +1 -0
- package/dist/cjs/tools/local/LocalExecutionTools.cjs +122 -0
- package/dist/cjs/tools/local/LocalExecutionTools.cjs.map +1 -0
- package/dist/cjs/tools/local/LocalProgrammaticToolCalling.cjs +453 -0
- package/dist/cjs/tools/local/LocalProgrammaticToolCalling.cjs.map +1 -0
- package/dist/cjs/tools/local/attachments.cjs +183 -0
- package/dist/cjs/tools/local/attachments.cjs.map +1 -0
- package/dist/cjs/tools/local/bashAst.cjs +129 -0
- package/dist/cjs/tools/local/bashAst.cjs.map +1 -0
- package/dist/cjs/tools/local/editStrategies.cjs +188 -0
- package/dist/cjs/tools/local/editStrategies.cjs.map +1 -0
- package/dist/cjs/tools/local/resolveLocalExecutionTools.cjs +141 -0
- package/dist/cjs/tools/local/resolveLocalExecutionTools.cjs.map +1 -0
- package/dist/cjs/tools/local/syntaxCheck.cjs +182 -0
- package/dist/cjs/tools/local/syntaxCheck.cjs.map +1 -0
- package/dist/cjs/tools/local/textEncoding.cjs +30 -0
- package/dist/cjs/tools/local/textEncoding.cjs.map +1 -0
- package/dist/cjs/tools/local/workspaceFS.cjs +51 -0
- package/dist/cjs/tools/local/workspaceFS.cjs.map +1 -0
- package/dist/cjs/tools/subagent/SubagentExecutor.cjs +31 -0
- package/dist/cjs/tools/subagent/SubagentExecutor.cjs.map +1 -1
- package/dist/esm/common/enum.mjs +53 -1
- package/dist/esm/common/enum.mjs.map +1 -1
- package/dist/esm/graphs/Graph.mjs +156 -5
- package/dist/esm/graphs/Graph.mjs.map +1 -1
- package/dist/esm/hooks/createWorkspacePolicyHook.mjs +289 -0
- package/dist/esm/hooks/createWorkspacePolicyHook.mjs.map +1 -0
- package/dist/esm/main.mjs +17 -2
- package/dist/esm/main.mjs.map +1 -1
- package/dist/esm/messages/anthropicToolCache.mjs +99 -0
- package/dist/esm/messages/anthropicToolCache.mjs.map +1 -0
- package/dist/esm/messages/prune.mjs +26 -1
- package/dist/esm/messages/prune.mjs.map +1 -1
- package/dist/esm/messages/recency.mjs +97 -0
- package/dist/esm/messages/recency.mjs.map +1 -0
- package/dist/esm/run.mjs +30 -0
- package/dist/esm/run.mjs.map +1 -1
- package/dist/esm/summarization/node.mjs +100 -6
- package/dist/esm/summarization/node.mjs.map +1 -1
- package/dist/esm/tools/ToolNode.mjs +635 -23
- package/dist/esm/tools/ToolNode.mjs.map +1 -1
- package/dist/esm/tools/local/CompileCheckTool.mjs +223 -0
- package/dist/esm/tools/local/CompileCheckTool.mjs.map +1 -0
- package/dist/esm/tools/local/FileCheckpointer.mjs +87 -0
- package/dist/esm/tools/local/FileCheckpointer.mjs.map +1 -0
- package/dist/esm/tools/local/LocalCodingTools.mjs +1075 -0
- package/dist/esm/tools/local/LocalCodingTools.mjs.map +1 -0
- package/dist/esm/tools/local/LocalExecutionEngine.mjs +1022 -0
- package/dist/esm/tools/local/LocalExecutionEngine.mjs.map +1 -0
- package/dist/esm/tools/local/LocalExecutionTools.mjs +117 -0
- package/dist/esm/tools/local/LocalExecutionTools.mjs.map +1 -0
- package/dist/esm/tools/local/LocalProgrammaticToolCalling.mjs +448 -0
- package/dist/esm/tools/local/LocalProgrammaticToolCalling.mjs.map +1 -0
- package/dist/esm/tools/local/attachments.mjs +180 -0
- package/dist/esm/tools/local/attachments.mjs.map +1 -0
- package/dist/esm/tools/local/bashAst.mjs +126 -0
- package/dist/esm/tools/local/bashAst.mjs.map +1 -0
- package/dist/esm/tools/local/editStrategies.mjs +185 -0
- package/dist/esm/tools/local/editStrategies.mjs.map +1 -0
- package/dist/esm/tools/local/resolveLocalExecutionTools.mjs +137 -0
- package/dist/esm/tools/local/resolveLocalExecutionTools.mjs.map +1 -0
- package/dist/esm/tools/local/syntaxCheck.mjs +179 -0
- package/dist/esm/tools/local/syntaxCheck.mjs.map +1 -0
- package/dist/esm/tools/local/textEncoding.mjs +27 -0
- package/dist/esm/tools/local/textEncoding.mjs.map +1 -0
- package/dist/esm/tools/local/workspaceFS.mjs +49 -0
- package/dist/esm/tools/local/workspaceFS.mjs.map +1 -0
- package/dist/esm/tools/subagent/SubagentExecutor.mjs +31 -0
- package/dist/esm/tools/subagent/SubagentExecutor.mjs.map +1 -1
- package/dist/types/common/enum.d.ts +39 -1
- package/dist/types/graphs/Graph.d.ts +34 -0
- package/dist/types/hooks/createWorkspacePolicyHook.d.ts +95 -0
- package/dist/types/hooks/index.d.ts +2 -0
- package/dist/types/index.d.ts +1 -0
- package/dist/types/messages/anthropicToolCache.d.ts +51 -0
- package/dist/types/messages/index.d.ts +2 -0
- package/dist/types/messages/prune.d.ts +11 -0
- package/dist/types/messages/recency.d.ts +64 -0
- package/dist/types/run.d.ts +21 -0
- package/dist/types/tools/ToolNode.d.ts +145 -2
- package/dist/types/tools/local/CompileCheckTool.d.ts +31 -0
- package/dist/types/tools/local/FileCheckpointer.d.ts +39 -0
- package/dist/types/tools/local/LocalCodingTools.d.ts +57 -0
- package/dist/types/tools/local/LocalExecutionEngine.d.ts +149 -0
- package/dist/types/tools/local/LocalExecutionTools.d.ts +9 -0
- package/dist/types/tools/local/LocalProgrammaticToolCalling.d.ts +21 -0
- package/dist/types/tools/local/attachments.d.ts +84 -0
- package/dist/types/tools/local/bashAst.d.ts +11 -0
- package/dist/types/tools/local/editStrategies.d.ts +28 -0
- package/dist/types/tools/local/index.d.ts +12 -0
- package/dist/types/tools/local/resolveLocalExecutionTools.d.ts +38 -0
- package/dist/types/tools/local/syntaxCheck.d.ts +42 -0
- package/dist/types/tools/local/textEncoding.d.ts +21 -0
- package/dist/types/tools/local/workspaceFS.d.ts +49 -0
- package/dist/types/tools/subagent/SubagentExecutor.d.ts +29 -0
- package/dist/types/types/hitl.d.ts +56 -27
- package/dist/types/types/run.d.ts +8 -1
- package/dist/types/types/summarize.d.ts +30 -0
- package/dist/types/types/tools.d.ts +341 -6
- package/package.json +21 -2
- package/src/common/enum.ts +54 -0
- package/src/graphs/Graph.ts +173 -6
- package/src/hooks/__tests__/compactHooks.test.ts +38 -2
- package/src/hooks/__tests__/createWorkspacePolicyHook.test.ts +393 -0
- package/src/hooks/createWorkspacePolicyHook.ts +355 -0
- package/src/hooks/index.ts +6 -0
- package/src/index.ts +1 -0
- package/src/messages/__tests__/anthropicToolCache.test.ts +125 -0
- package/src/messages/__tests__/recency.test.ts +267 -0
- package/src/messages/anthropicToolCache.ts +116 -0
- package/src/messages/index.ts +2 -0
- package/src/messages/prune.ts +27 -1
- package/src/messages/recency.ts +155 -0
- package/src/run.ts +31 -0
- package/src/scripts/compare_pi_vs_ours.ts +840 -0
- package/src/scripts/local_engine.ts +166 -0
- package/src/scripts/local_engine_checkpointer.ts +205 -0
- package/src/scripts/local_engine_compile.ts +263 -0
- package/src/scripts/local_engine_hooks.ts +226 -0
- package/src/scripts/local_engine_image.ts +201 -0
- package/src/scripts/local_engine_ptc.ts +151 -0
- package/src/scripts/local_engine_workspace.ts +258 -0
- package/src/scripts/subagent-configurable-inheritance.ts +252 -0
- package/src/scripts/summarization-recency.ts +462 -0
- package/src/specs/prune.test.ts +39 -0
- package/src/summarization/__tests__/node.test.ts +499 -3
- package/src/summarization/node.ts +124 -7
- package/src/tools/ToolNode.ts +769 -20
- package/src/tools/__tests__/LocalExecutionTools.test.ts +2647 -0
- package/src/tools/__tests__/ProgrammaticToolCalling.test.ts +175 -0
- package/src/tools/__tests__/SubagentExecutor.test.ts +148 -0
- package/src/tools/__tests__/ToolNode.outputReferences.test.ts +114 -0
- package/src/tools/__tests__/ToolNode.session.test.ts +84 -0
- package/src/tools/__tests__/directToolHITLResumeScope.test.ts +467 -0
- package/src/tools/__tests__/directToolHooks.test.ts +411 -0
- package/src/tools/__tests__/localToolNames.test.ts +73 -0
- package/src/tools/__tests__/workspaceSeam.test.ts +134 -0
- package/src/tools/local/CompileCheckTool.ts +278 -0
- package/src/tools/local/FileCheckpointer.ts +93 -0
- package/src/tools/local/LocalCodingTools.ts +1342 -0
- package/src/tools/local/LocalExecutionEngine.ts +1329 -0
- package/src/tools/local/LocalExecutionTools.ts +167 -0
- package/src/tools/local/LocalProgrammaticToolCalling.ts +594 -0
- package/src/tools/local/__tests__/FileCheckpointer.test.ts +120 -0
- package/src/tools/local/__tests__/editStrategies.test.ts +134 -0
- package/src/tools/local/attachments.ts +251 -0
- package/src/tools/local/bashAst.ts +151 -0
- package/src/tools/local/editStrategies.ts +188 -0
- package/src/tools/local/index.ts +12 -0
- package/src/tools/local/resolveLocalExecutionTools.ts +208 -0
- package/src/tools/local/syntaxCheck.ts +243 -0
- package/src/tools/local/textEncoding.ts +37 -0
- package/src/tools/local/workspaceFS.ts +89 -0
- package/src/tools/subagent/SubagentExecutor.ts +60 -0
- package/src/types/hitl.ts +56 -27
- package/src/types/run.ts +12 -1
- package/src/types/summarize.ts +31 -0
- package/src/types/tools.ts +359 -7
|
@@ -0,0 +1,1098 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var path = require('path');
|
|
4
|
+
var tools = require('@langchain/core/tools');
|
|
5
|
+
var diff = require('diff');
|
|
6
|
+
var LocalExecutionTools = require('./LocalExecutionTools.cjs');
|
|
7
|
+
var LocalProgrammaticToolCalling = require('./LocalProgrammaticToolCalling.cjs');
|
|
8
|
+
var LocalExecutionEngine = require('./LocalExecutionEngine.cjs');
|
|
9
|
+
var FileCheckpointer = require('./FileCheckpointer.cjs');
|
|
10
|
+
var editStrategies = require('./editStrategies.cjs');
|
|
11
|
+
var textEncoding = require('./textEncoding.cjs');
|
|
12
|
+
var attachments = require('./attachments.cjs');
|
|
13
|
+
var syntaxCheck = require('./syntaxCheck.cjs');
|
|
14
|
+
var CompileCheckTool = require('./CompileCheckTool.cjs');
|
|
15
|
+
var _enum = require('../../common/enum.cjs');
|
|
16
|
+
|
|
17
|
+
const MAX_READ_CHARS = 256000;
|
|
18
|
+
const DEFAULT_MAX_RESULTS = 200;
|
|
19
|
+
const DEFAULT_MAX_READ_BYTES = 10 * 1024 * 1024;
|
|
20
|
+
const BINARY_DETECTION_BYTES = 8000;
|
|
21
|
+
/**
|
|
22
|
+
* Tool name aliases retained for back-compat with consumers that imported
|
|
23
|
+
* the per-file `Local*ToolName` constants. The canonical names live on
|
|
24
|
+
* `Constants.*` (see `src/common/enum.ts`); these aliases just point at
|
|
25
|
+
* them so a typo upstream gets caught at the type level.
|
|
26
|
+
*/
|
|
27
|
+
const LocalWriteFileToolName = _enum.Constants.WRITE_FILE;
|
|
28
|
+
const LocalEditFileToolName = _enum.Constants.EDIT_FILE;
|
|
29
|
+
const LocalGrepSearchToolName = _enum.Constants.GREP_SEARCH;
|
|
30
|
+
const LocalGlobSearchToolName = _enum.Constants.GLOB_SEARCH;
|
|
31
|
+
const LocalListDirectoryToolName = _enum.Constants.LIST_DIRECTORY;
|
|
32
|
+
const LocalReadFileToolSchema = {
|
|
33
|
+
type: 'object',
|
|
34
|
+
properties: {
|
|
35
|
+
file_path: {
|
|
36
|
+
type: 'string',
|
|
37
|
+
description: 'Path to a local file, relative to the configured cwd unless absolute paths are allowed.',
|
|
38
|
+
},
|
|
39
|
+
offset: {
|
|
40
|
+
type: 'integer',
|
|
41
|
+
description: 'Optional 1-indexed line offset for large files.',
|
|
42
|
+
},
|
|
43
|
+
limit: {
|
|
44
|
+
type: 'integer',
|
|
45
|
+
description: 'Optional maximum number of lines to return.',
|
|
46
|
+
},
|
|
47
|
+
},
|
|
48
|
+
required: ['file_path'],
|
|
49
|
+
};
|
|
50
|
+
const LocalWriteFileToolSchema = {
|
|
51
|
+
type: 'object',
|
|
52
|
+
properties: {
|
|
53
|
+
file_path: {
|
|
54
|
+
type: 'string',
|
|
55
|
+
description: 'Path to write, relative to the configured cwd unless absolute paths are allowed.',
|
|
56
|
+
},
|
|
57
|
+
content: {
|
|
58
|
+
type: 'string',
|
|
59
|
+
description: 'Complete file contents to write.',
|
|
60
|
+
},
|
|
61
|
+
},
|
|
62
|
+
required: ['file_path', 'content'],
|
|
63
|
+
};
|
|
64
|
+
const LocalEditFileToolSchema = {
|
|
65
|
+
type: 'object',
|
|
66
|
+
properties: {
|
|
67
|
+
file_path: {
|
|
68
|
+
type: 'string',
|
|
69
|
+
description: 'Path to edit, relative to the configured cwd unless absolute paths are allowed.',
|
|
70
|
+
},
|
|
71
|
+
old_text: {
|
|
72
|
+
type: 'string',
|
|
73
|
+
description: 'Exact text to replace. Must appear exactly once.',
|
|
74
|
+
},
|
|
75
|
+
new_text: {
|
|
76
|
+
type: 'string',
|
|
77
|
+
description: 'Replacement text.',
|
|
78
|
+
},
|
|
79
|
+
edits: {
|
|
80
|
+
type: 'array',
|
|
81
|
+
description: 'Optional batch of exact replacements. Each old_text must appear exactly once in the original file.',
|
|
82
|
+
items: {
|
|
83
|
+
type: 'object',
|
|
84
|
+
properties: {
|
|
85
|
+
old_text: { type: 'string' },
|
|
86
|
+
new_text: { type: 'string' },
|
|
87
|
+
},
|
|
88
|
+
required: ['old_text', 'new_text'],
|
|
89
|
+
},
|
|
90
|
+
},
|
|
91
|
+
},
|
|
92
|
+
required: ['file_path'],
|
|
93
|
+
};
|
|
94
|
+
const LocalGrepSearchToolSchema = {
|
|
95
|
+
type: 'object',
|
|
96
|
+
properties: {
|
|
97
|
+
pattern: {
|
|
98
|
+
type: 'string',
|
|
99
|
+
description: 'Regex pattern to search for.',
|
|
100
|
+
},
|
|
101
|
+
path: {
|
|
102
|
+
type: 'string',
|
|
103
|
+
description: 'Directory or file to search. Defaults to cwd.',
|
|
104
|
+
},
|
|
105
|
+
glob: {
|
|
106
|
+
type: 'string',
|
|
107
|
+
description: 'Optional file glob passed to rg -g.',
|
|
108
|
+
},
|
|
109
|
+
max_results: {
|
|
110
|
+
type: 'integer',
|
|
111
|
+
description: 'Maximum matching lines to return.',
|
|
112
|
+
},
|
|
113
|
+
},
|
|
114
|
+
required: ['pattern'],
|
|
115
|
+
};
|
|
116
|
+
const LocalGlobSearchToolSchema = {
|
|
117
|
+
type: 'object',
|
|
118
|
+
properties: {
|
|
119
|
+
pattern: {
|
|
120
|
+
type: 'string',
|
|
121
|
+
description: 'File glob pattern, for example "src/**/*.ts".',
|
|
122
|
+
},
|
|
123
|
+
path: {
|
|
124
|
+
type: 'string',
|
|
125
|
+
description: 'Directory to search. Defaults to cwd.',
|
|
126
|
+
},
|
|
127
|
+
max_results: {
|
|
128
|
+
type: 'integer',
|
|
129
|
+
description: 'Maximum file paths to return.',
|
|
130
|
+
},
|
|
131
|
+
},
|
|
132
|
+
required: ['pattern'],
|
|
133
|
+
};
|
|
134
|
+
const LocalListDirectoryToolSchema = {
|
|
135
|
+
type: 'object',
|
|
136
|
+
properties: {
|
|
137
|
+
path: {
|
|
138
|
+
type: 'string',
|
|
139
|
+
description: 'Directory to list. Defaults to cwd.',
|
|
140
|
+
},
|
|
141
|
+
},
|
|
142
|
+
};
|
|
143
|
+
function lineWindow(content, offset, limit) {
|
|
144
|
+
const start = Math.max((offset ?? 1) - 1, 0);
|
|
145
|
+
// Avoid splitting the whole file when the caller asked for a small
|
|
146
|
+
// window. For a 10 MB file with `offset: 1, limit: 10`, the prior
|
|
147
|
+
// `content.split('\n')` allocated millions of strings to throw all
|
|
148
|
+
// but 10 away. We walk newline indices directly: O(start + limit)
|
|
149
|
+
// instead of O(file). When `limit` is omitted, fall back to the
|
|
150
|
+
// simple split — it's the same amount of work either way.
|
|
151
|
+
if (limit == null || limit <= 0) {
|
|
152
|
+
const lines = content.split('\n');
|
|
153
|
+
const selected = lines.slice(start);
|
|
154
|
+
const numbered = selected
|
|
155
|
+
.map((line, index) => `${String(start + index + 1).padStart(6, ' ')}\t${line}`)
|
|
156
|
+
.join('\n');
|
|
157
|
+
return {
|
|
158
|
+
text: LocalExecutionEngine.truncateLocalOutput(numbered, MAX_READ_CHARS),
|
|
159
|
+
truncated: numbered.length > MAX_READ_CHARS,
|
|
160
|
+
};
|
|
161
|
+
}
|
|
162
|
+
// Walk to the start line by counting newlines.
|
|
163
|
+
let cursor = 0;
|
|
164
|
+
for (let i = 0; i < start; i++) {
|
|
165
|
+
const next = content.indexOf('\n', cursor);
|
|
166
|
+
if (next === -1) {
|
|
167
|
+
// File has fewer lines than `offset` — return empty window.
|
|
168
|
+
return { text: '', truncated: false };
|
|
169
|
+
}
|
|
170
|
+
cursor = next + 1;
|
|
171
|
+
}
|
|
172
|
+
// Collect up to `limit` lines from `cursor`.
|
|
173
|
+
const out = [];
|
|
174
|
+
let pos = cursor;
|
|
175
|
+
let exhausted = true;
|
|
176
|
+
for (let k = 0; k < limit; k++) {
|
|
177
|
+
const next = content.indexOf('\n', pos);
|
|
178
|
+
if (next === -1) {
|
|
179
|
+
out.push(content.slice(pos));
|
|
180
|
+
break;
|
|
181
|
+
}
|
|
182
|
+
out.push(content.slice(pos, next));
|
|
183
|
+
pos = next + 1;
|
|
184
|
+
if (k === limit - 1 && pos < content.length) {
|
|
185
|
+
exhausted = false;
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
const numbered = out
|
|
189
|
+
.map((text, index) => `${String(start + index + 1).padStart(6, ' ')}\t${text}`)
|
|
190
|
+
.join('\n');
|
|
191
|
+
return {
|
|
192
|
+
text: LocalExecutionEngine.truncateLocalOutput(numbered, MAX_READ_CHARS),
|
|
193
|
+
truncated: !exhausted || numbered.length > MAX_READ_CHARS,
|
|
194
|
+
};
|
|
195
|
+
}
|
|
196
|
+
const MAX_DIFF_CHARS = 4000;
|
|
197
|
+
async function maybeRunSyntaxCheck(path, config) {
|
|
198
|
+
const mode = config.postEditSyntaxCheck ?? 'off';
|
|
199
|
+
if (mode === 'off')
|
|
200
|
+
return undefined;
|
|
201
|
+
const outcome = await syntaxCheck.runPostEditSyntaxCheck(path, config);
|
|
202
|
+
if (outcome == null)
|
|
203
|
+
return undefined;
|
|
204
|
+
return { mode, outcome };
|
|
205
|
+
}
|
|
206
|
+
function appendSyntaxCheckSummary(base, run) {
|
|
207
|
+
if (run == null)
|
|
208
|
+
return base;
|
|
209
|
+
if (run.outcome.ok)
|
|
210
|
+
return base;
|
|
211
|
+
const banner = run.mode === 'strict'
|
|
212
|
+
? `\n\n[syntax-check FAILED via ${run.outcome.checker}]\n`
|
|
213
|
+
: `\n\n[syntax-check warning via ${run.outcome.checker}]\n`;
|
|
214
|
+
return `${base}${banner}${run.outcome.output}`;
|
|
215
|
+
}
|
|
216
|
+
/**
|
|
217
|
+
* Revert a write_file/edit_file mutation in `postEditSyntaxCheck:
|
|
218
|
+
* 'strict'` mode after the post-write syntax check failed. Strict
|
|
219
|
+
* mode advertises a safety gate, so leaving the corrupted file on
|
|
220
|
+
* disk + throwing is a half-broken contract — the model "reacts" to
|
|
221
|
+
* the error but the next call sees broken on-disk state. Codex P2
|
|
222
|
+
* [49]. Best-effort: a swallowed error here means the workspace is
|
|
223
|
+
* still in the bad post-write state, but we still throw the
|
|
224
|
+
* original syntax-check error so the caller knows.
|
|
225
|
+
*
|
|
226
|
+
* - If the file existed pre-write: restore the previous bytes with
|
|
227
|
+
* the original encoding.
|
|
228
|
+
* - If the file is brand-new: unlink it.
|
|
229
|
+
*/
|
|
230
|
+
async function revertStrictWrite(fs, path, existed, before, encoding) {
|
|
231
|
+
try {
|
|
232
|
+
if (existed) {
|
|
233
|
+
// encodeFile uses encoding.{hasBom,newline} to restore the
|
|
234
|
+
// on-disk shape; the `text` field is overridden by the
|
|
235
|
+
// explicit `before` arg we pass in.
|
|
236
|
+
await fs.writeFile(path, textEncoding.encodeFile(before, { ...encoding, text: before }), 'utf8');
|
|
237
|
+
}
|
|
238
|
+
else {
|
|
239
|
+
await fs.unlink(path);
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
catch {
|
|
243
|
+
/* best-effort: caller still sees the original syntax error */
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
function summariseDiff(filePath, before, after) {
|
|
247
|
+
if (before === after) {
|
|
248
|
+
return '(no textual changes)';
|
|
249
|
+
}
|
|
250
|
+
const name = path.basename(filePath);
|
|
251
|
+
const patch = diff.createTwoFilesPatch(name, name, before, after, '', '', {
|
|
252
|
+
context: 3,
|
|
253
|
+
});
|
|
254
|
+
if (patch.length <= MAX_DIFF_CHARS) {
|
|
255
|
+
return patch;
|
|
256
|
+
}
|
|
257
|
+
return (patch.slice(0, MAX_DIFF_CHARS) +
|
|
258
|
+
`\n[... diff truncated, ${patch.length - MAX_DIFF_CHARS} more chars ...]`);
|
|
259
|
+
}
|
|
260
|
+
function normalizeEdits(input) {
|
|
261
|
+
const edits = Array.isArray(input.edits)
|
|
262
|
+
? input.edits.map((edit) => ({
|
|
263
|
+
oldText: edit.old_text ?? '',
|
|
264
|
+
newText: edit.new_text ?? '',
|
|
265
|
+
}))
|
|
266
|
+
: [];
|
|
267
|
+
if (input.old_text != null || input.new_text != null) {
|
|
268
|
+
edits.push({
|
|
269
|
+
oldText: input.old_text ?? '',
|
|
270
|
+
newText: input.new_text ?? '',
|
|
271
|
+
});
|
|
272
|
+
}
|
|
273
|
+
return edits;
|
|
274
|
+
}
|
|
275
|
+
function toolDefinition(name, description, parameters) {
|
|
276
|
+
return {
|
|
277
|
+
name,
|
|
278
|
+
description,
|
|
279
|
+
parameters,
|
|
280
|
+
allowed_callers: ['direct', 'code_execution'],
|
|
281
|
+
responseFormat: _enum.Constants.CONTENT_AND_ARTIFACT,
|
|
282
|
+
toolType: 'builtin',
|
|
283
|
+
};
|
|
284
|
+
}
|
|
285
|
+
async function looksBinary(path, fs) {
|
|
286
|
+
let handle;
|
|
287
|
+
try {
|
|
288
|
+
handle = await fs.open(path, 'r');
|
|
289
|
+
const sample = Buffer.alloc(BINARY_DETECTION_BYTES);
|
|
290
|
+
const { bytesRead } = await handle.read(sample, 0, BINARY_DETECTION_BYTES, 0);
|
|
291
|
+
for (let i = 0; i < bytesRead; i++) {
|
|
292
|
+
if (sample[i] === 0) {
|
|
293
|
+
return true;
|
|
294
|
+
}
|
|
295
|
+
}
|
|
296
|
+
return false;
|
|
297
|
+
}
|
|
298
|
+
finally {
|
|
299
|
+
await handle?.close();
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
const DEFAULT_MAX_ATTACHMENT_BYTES = 5 * 1024 * 1024;
|
|
303
|
+
function createLocalReadFileTool(config = {}) {
|
|
304
|
+
const fs = LocalExecutionEngine.getWorkspaceFS(config);
|
|
305
|
+
return tools.tool(async (rawInput) => {
|
|
306
|
+
const input = rawInput;
|
|
307
|
+
const path = await LocalExecutionEngine.resolveWorkspacePathSafe(input.file_path, config, 'read');
|
|
308
|
+
const fileStat = await fs.stat(path);
|
|
309
|
+
if (!fileStat.isFile()) {
|
|
310
|
+
throw new Error(`Path is not a file: ${input.file_path}`);
|
|
311
|
+
}
|
|
312
|
+
const maxBytes = Math.max(config.maxReadBytes ?? DEFAULT_MAX_READ_BYTES, 1);
|
|
313
|
+
if (fileStat.size > maxBytes) {
|
|
314
|
+
const stub = `File is ${fileStat.size} bytes, exceeds the ${maxBytes}-byte read cap. Read a slice via bash (e.g. head/sed) or raise local.maxReadBytes.`;
|
|
315
|
+
return [stub, { path, bytes: fileStat.size, truncated: true }];
|
|
316
|
+
}
|
|
317
|
+
if (await looksBinary(path, fs)) {
|
|
318
|
+
const attachmentMode = config.attachReadAttachments ?? 'off';
|
|
319
|
+
if (attachmentMode !== 'off') {
|
|
320
|
+
const attachment = await attachments.classifyAttachment({
|
|
321
|
+
path,
|
|
322
|
+
bytes: fileStat.size,
|
|
323
|
+
mode: attachmentMode,
|
|
324
|
+
maxBytes: config.maxAttachmentBytes ?? DEFAULT_MAX_ATTACHMENT_BYTES,
|
|
325
|
+
// Route through the configured WorkspaceFS so a custom
|
|
326
|
+
// engine sees the same path semantics as `read_file`
|
|
327
|
+
// itself (manual review finding F).
|
|
328
|
+
fs,
|
|
329
|
+
});
|
|
330
|
+
if (attachment.kind === 'image') {
|
|
331
|
+
return [
|
|
332
|
+
attachments.imageAttachmentContent(path, attachment),
|
|
333
|
+
{
|
|
334
|
+
path,
|
|
335
|
+
bytes: fileStat.size,
|
|
336
|
+
mime: attachment.mime,
|
|
337
|
+
attachment: 'image',
|
|
338
|
+
},
|
|
339
|
+
];
|
|
340
|
+
}
|
|
341
|
+
if (attachment.kind === 'pdf') {
|
|
342
|
+
return [
|
|
343
|
+
[
|
|
344
|
+
{
|
|
345
|
+
type: 'text',
|
|
346
|
+
text: `Read ${path} (application/pdf, ${fileStat.size} bytes). PDF attached as base64 data URL; vision-capable models that accept PDF will render it.`,
|
|
347
|
+
},
|
|
348
|
+
{
|
|
349
|
+
type: 'image_url',
|
|
350
|
+
image_url: { url: attachment.dataUrl },
|
|
351
|
+
},
|
|
352
|
+
],
|
|
353
|
+
{
|
|
354
|
+
path,
|
|
355
|
+
bytes: fileStat.size,
|
|
356
|
+
mime: attachment.mime,
|
|
357
|
+
attachment: 'pdf',
|
|
358
|
+
},
|
|
359
|
+
];
|
|
360
|
+
}
|
|
361
|
+
if (attachment.kind === 'oversize') {
|
|
362
|
+
return [
|
|
363
|
+
`Refusing to embed ${attachment.mime} attachment (${attachment.bytes} bytes exceeds ${attachment.maxBytes}-byte cap).`,
|
|
364
|
+
{
|
|
365
|
+
path,
|
|
366
|
+
bytes: fileStat.size,
|
|
367
|
+
mime: attachment.mime,
|
|
368
|
+
attachment: 'oversize',
|
|
369
|
+
},
|
|
370
|
+
];
|
|
371
|
+
}
|
|
372
|
+
if (attachment.kind === 'binary') {
|
|
373
|
+
return [
|
|
374
|
+
`Refusing to read binary file (${fileStat.size} bytes, ${attachment.mime}): ${path}`,
|
|
375
|
+
{
|
|
376
|
+
path,
|
|
377
|
+
bytes: fileStat.size,
|
|
378
|
+
mime: attachment.mime,
|
|
379
|
+
binary: true,
|
|
380
|
+
},
|
|
381
|
+
];
|
|
382
|
+
}
|
|
383
|
+
// text-or-unknown falls through to the text-read path below.
|
|
384
|
+
}
|
|
385
|
+
else {
|
|
386
|
+
return [
|
|
387
|
+
`Refusing to read binary file (${fileStat.size} bytes): ${path}`,
|
|
388
|
+
{ path, bytes: fileStat.size, binary: true },
|
|
389
|
+
];
|
|
390
|
+
}
|
|
391
|
+
}
|
|
392
|
+
const content = await fs.readFile(path, 'utf8');
|
|
393
|
+
const result = lineWindow(content, input.offset, input.limit);
|
|
394
|
+
return [
|
|
395
|
+
result.truncated ? `${result.text}\n[truncated]` : result.text,
|
|
396
|
+
{ path, bytes: fileStat.size },
|
|
397
|
+
];
|
|
398
|
+
}, {
|
|
399
|
+
name: _enum.Constants.READ_FILE,
|
|
400
|
+
description: 'Read a local text file from the configured working directory with line numbers. ' +
|
|
401
|
+
'When `attachReadAttachments` is enabled (e.g. images-only), reading an image returns an ' +
|
|
402
|
+
'`image_url` content block so vision-capable models can see the file directly.',
|
|
403
|
+
schema: LocalReadFileToolSchema,
|
|
404
|
+
responseFormat: _enum.Constants.CONTENT_AND_ARTIFACT,
|
|
405
|
+
});
|
|
406
|
+
}
|
|
407
|
+
function createLocalWriteFileTool(config = {}, checkpointer) {
|
|
408
|
+
const fs = LocalExecutionEngine.getWorkspaceFS(config);
|
|
409
|
+
return tools.tool(async (rawInput) => {
|
|
410
|
+
const input = rawInput;
|
|
411
|
+
if (config.readOnly === true) {
|
|
412
|
+
throw new Error('write_file is blocked in read-only local mode.');
|
|
413
|
+
}
|
|
414
|
+
const path$1 = await LocalExecutionEngine.resolveWorkspacePathSafe(input.file_path, config, 'write');
|
|
415
|
+
if (checkpointer != null) {
|
|
416
|
+
await checkpointer.captureBeforeWrite(path$1);
|
|
417
|
+
}
|
|
418
|
+
let before = '';
|
|
419
|
+
let encoding = { text: '', hasBom: false, newline: '\n' };
|
|
420
|
+
let existed = false;
|
|
421
|
+
try {
|
|
422
|
+
const raw = await fs.readFile(path$1, 'utf8');
|
|
423
|
+
const decoded = textEncoding.decodeFile(raw);
|
|
424
|
+
before = decoded.text;
|
|
425
|
+
encoding = decoded;
|
|
426
|
+
existed = true;
|
|
427
|
+
}
|
|
428
|
+
catch {
|
|
429
|
+
existed = false;
|
|
430
|
+
}
|
|
431
|
+
await fs.mkdir(path.dirname(path$1), { recursive: true });
|
|
432
|
+
const finalText = textEncoding.encodeFile(input.content, encoding);
|
|
433
|
+
await fs.writeFile(path$1, finalText, 'utf8');
|
|
434
|
+
const syntax = await maybeRunSyntaxCheck(path$1, config);
|
|
435
|
+
const diff = existed
|
|
436
|
+
? summariseDiff(path$1, before, input.content)
|
|
437
|
+
: `(new file, ${input.content.length} chars)`;
|
|
438
|
+
const baseSummary = existed
|
|
439
|
+
? `Overwrote ${path$1} (${input.content.length} chars). Diff:\n${diff}`
|
|
440
|
+
: `Created ${path$1} (${input.content.length} chars).`;
|
|
441
|
+
const summary = appendSyntaxCheckSummary(baseSummary, syntax);
|
|
442
|
+
if (syntax?.outcome.ok === false && syntax.mode === 'strict') {
|
|
443
|
+
// Roll back the write so strict mode is an actual gate, not
|
|
444
|
+
// "fail the call AND leave the corrupted file on disk".
|
|
445
|
+
// Codex P2 [49].
|
|
446
|
+
await revertStrictWrite(fs, path$1, existed, before, encoding);
|
|
447
|
+
throw new Error(`write_file syntax check failed (${syntax.outcome.checker}); reverted to pre-write state.\n${syntax.outcome.output}`);
|
|
448
|
+
}
|
|
449
|
+
return [
|
|
450
|
+
summary,
|
|
451
|
+
{
|
|
452
|
+
path: path$1,
|
|
453
|
+
bytes: finalText.length,
|
|
454
|
+
new_file: !existed,
|
|
455
|
+
newline: encoding.newline === '\r\n' ? 'CRLF' : 'LF',
|
|
456
|
+
had_bom: encoding.hasBom,
|
|
457
|
+
...(syntax != null && syntax.outcome.ok === false
|
|
458
|
+
? { syntax_error: syntax.outcome.checker }
|
|
459
|
+
: {}),
|
|
460
|
+
},
|
|
461
|
+
];
|
|
462
|
+
}, {
|
|
463
|
+
name: LocalWriteFileToolName,
|
|
464
|
+
description: 'Create or overwrite a local text file in the configured working directory. ' +
|
|
465
|
+
'Preserves the existing BOM and line endings when overwriting; defaults to LF without BOM for new files. ' +
|
|
466
|
+
'Returns a unified diff of the changes when overwriting.',
|
|
467
|
+
schema: LocalWriteFileToolSchema,
|
|
468
|
+
responseFormat: _enum.Constants.CONTENT_AND_ARTIFACT,
|
|
469
|
+
});
|
|
470
|
+
}
|
|
471
|
+
function createLocalEditFileTool(config = {}, checkpointer) {
|
|
472
|
+
const fs = LocalExecutionEngine.getWorkspaceFS(config);
|
|
473
|
+
return tools.tool(async (rawInput) => {
|
|
474
|
+
const input = rawInput;
|
|
475
|
+
if (config.readOnly === true) {
|
|
476
|
+
throw new Error('edit_file is blocked in read-only local mode.');
|
|
477
|
+
}
|
|
478
|
+
const edits = normalizeEdits(input);
|
|
479
|
+
if (edits.length === 0) {
|
|
480
|
+
throw new Error('edit_file requires old_text/new_text or edits[].');
|
|
481
|
+
}
|
|
482
|
+
const path = await LocalExecutionEngine.resolveWorkspacePathSafe(input.file_path, config, 'write');
|
|
483
|
+
const raw = await fs.readFile(path, 'utf8');
|
|
484
|
+
const encoding = textEncoding.decodeFile(raw);
|
|
485
|
+
const original = encoding.text;
|
|
486
|
+
let next = original;
|
|
487
|
+
const strategiesUsed = [];
|
|
488
|
+
for (let i = 0; i < edits.length; i++) {
|
|
489
|
+
const edit = edits[i];
|
|
490
|
+
const match = editStrategies.locateEdit(next, edit.oldText);
|
|
491
|
+
if (match == null) {
|
|
492
|
+
throw new Error(`Edit ${i + 1}/${edits.length}: could not locate old_text in ${input.file_path}. ` +
|
|
493
|
+
'Tried exact, line-trimmed, whitespace-normalized, and indentation-flexible matching. ' +
|
|
494
|
+
'Re-read the file and copy the literal lines.');
|
|
495
|
+
}
|
|
496
|
+
strategiesUsed.push(match.strategy);
|
|
497
|
+
next = editStrategies.applyEdit(next, match, edit.newText);
|
|
498
|
+
}
|
|
499
|
+
if (checkpointer != null) {
|
|
500
|
+
await checkpointer.captureBeforeWrite(path);
|
|
501
|
+
}
|
|
502
|
+
const finalText = textEncoding.encodeFile(next, encoding);
|
|
503
|
+
await fs.writeFile(path, finalText, 'utf8');
|
|
504
|
+
const syntax = await maybeRunSyntaxCheck(path, config);
|
|
505
|
+
const diff = summariseDiff(path, original, next);
|
|
506
|
+
const fuzzy = strategiesUsed.some((s) => s !== 'exact');
|
|
507
|
+
const baseSummary = `Applied ${edits.length} edit(s) to ${path}` +
|
|
508
|
+
(fuzzy ? ` (strategies: ${strategiesUsed.join(', ')})` : '') +
|
|
509
|
+
`. Diff:\n${diff}`;
|
|
510
|
+
const summary = appendSyntaxCheckSummary(baseSummary, syntax);
|
|
511
|
+
if (syntax?.outcome.ok === false && syntax.mode === 'strict') {
|
|
512
|
+
// Restore the pre-edit bytes so strict mode is an actual
|
|
513
|
+
// gate (Codex P2 [49]). edit_file always operates on an
|
|
514
|
+
// existing file, so `existed = true` here.
|
|
515
|
+
await revertStrictWrite(fs, path, true, original, encoding);
|
|
516
|
+
throw new Error(`edit_file syntax check failed (${syntax.outcome.checker}); reverted to pre-edit state.\n${syntax.outcome.output}`);
|
|
517
|
+
}
|
|
518
|
+
return [
|
|
519
|
+
summary,
|
|
520
|
+
{
|
|
521
|
+
path,
|
|
522
|
+
edits: edits.length,
|
|
523
|
+
strategies: strategiesUsed,
|
|
524
|
+
newline: encoding.newline === '\r\n' ? 'CRLF' : 'LF',
|
|
525
|
+
had_bom: encoding.hasBom,
|
|
526
|
+
...(syntax != null && syntax.outcome.ok === false
|
|
527
|
+
? { syntax_error: syntax.outcome.checker }
|
|
528
|
+
: {}),
|
|
529
|
+
},
|
|
530
|
+
];
|
|
531
|
+
}, {
|
|
532
|
+
name: LocalEditFileToolName,
|
|
533
|
+
description: 'Apply exact text replacements to a local file. The matcher tries exact, line-trimmed, whitespace-normalized, and indentation-flexible strategies in order so common LLM whitespace mistakes are recoverable. Each old_text must still match exactly one location. Returns a unified diff of the changes.',
|
|
534
|
+
schema: LocalEditFileToolSchema,
|
|
535
|
+
responseFormat: _enum.Constants.CONTENT_AND_ARTIFACT,
|
|
536
|
+
});
|
|
537
|
+
}
|
|
538
|
+
/**
|
|
539
|
+
* Ripgrep availability cache, keyed on the *effective execution
|
|
540
|
+
* backend* — whatever function `getSpawn(config)` returns. Without
|
|
541
|
+
* the backend key, a Run that probes `rg` over Node's
|
|
542
|
+
* `child_process.spawn` would poison subsequent Runs whose
|
|
543
|
+
* `local.exec.spawn` routes to a remote sandbox or container that
|
|
544
|
+
* doesn't have rg installed: the cached `true` would skip the probe,
|
|
545
|
+
* the rg invocation would throw, and the Node fallback wouldn't be
|
|
546
|
+
* reached. Per-backend caching avoids that without paying for a
|
|
547
|
+
* spawn-per-search.
|
|
548
|
+
*/
|
|
549
|
+
// Per-backend × per-env cache. Codex P1 #34 — keying by spawn
|
|
550
|
+
// backend alone misses the case where two Runs share a backend but
|
|
551
|
+
// vary `local.env` (especially PATH). Stale cache then claims `rg`
|
|
552
|
+
// is available, the rg path runs, and the spawn fails with ENOENT
|
|
553
|
+
// instead of falling back to the Node walker. The inner Map is
|
|
554
|
+
// keyed by a stable JSON hash of the effective env so each unique
|
|
555
|
+
// env gets its own probe.
|
|
556
|
+
let ripgrepAvailabilityByBackend = new WeakMap();
|
|
557
|
+
function envCacheKey(env) {
|
|
558
|
+
// PATH is the only env entry that affects command lookup, but
|
|
559
|
+
// hashing the whole env keeps the key correct for hosts that
|
|
560
|
+
// vary anything else relevant. Stable JSON via sorted keys so
|
|
561
|
+
// {A:1,B:2} and {B:2,A:1} produce the same hash.
|
|
562
|
+
if (env == null)
|
|
563
|
+
return '';
|
|
564
|
+
const sorted = {};
|
|
565
|
+
for (const k of Object.keys(env).sort()) {
|
|
566
|
+
sorted[k] = env[k];
|
|
567
|
+
}
|
|
568
|
+
return JSON.stringify(sorted);
|
|
569
|
+
}
|
|
570
|
+
async function isRipgrepAvailable(config) {
|
|
571
|
+
const backend = LocalExecutionEngine.getSpawn(config);
|
|
572
|
+
let envMap = ripgrepAvailabilityByBackend.get(backend);
|
|
573
|
+
if (envMap == null) {
|
|
574
|
+
envMap = new Map();
|
|
575
|
+
ripgrepAvailabilityByBackend.set(backend, envMap);
|
|
576
|
+
}
|
|
577
|
+
const envKey = envCacheKey(config.env);
|
|
578
|
+
let probePromise = envMap.get(envKey);
|
|
579
|
+
if (probePromise == null) {
|
|
580
|
+
probePromise = LocalExecutionEngine.spawnLocalProcess('rg', ['--version'], { ...config, timeoutMs: 5000, sandbox: { enabled: false } }, { internal: true })
|
|
581
|
+
.then((probe) => probe != null && probe.exitCode === 0)
|
|
582
|
+
.catch(() => false);
|
|
583
|
+
envMap.set(envKey, probePromise);
|
|
584
|
+
}
|
|
585
|
+
return probePromise;
|
|
586
|
+
}
|
|
587
|
+
/**
|
|
588
|
+
* Test-only reset hook. Clears the ripgrep-availability cache so
|
|
589
|
+
* tests can swap in mocked spawn backends and reprobe deterministically.
|
|
590
|
+
*
|
|
591
|
+
* @internal Not part of the public SDK surface; the leading underscore
|
|
592
|
+
* and `@internal` tag together signal that consumers should not call
|
|
593
|
+
* this. Tests import it via the module path directly.
|
|
594
|
+
*/
|
|
595
|
+
function _resetRipgrepCacheForTests() {
|
|
596
|
+
ripgrepAvailabilityByBackend = new WeakMap();
|
|
597
|
+
}
|
|
598
|
+
// Skipped by the Node-fallback walker (used when ripgrep is
|
|
599
|
+
// unavailable). Covers common build outputs, virtualenvs, and
|
|
600
|
+
// caches so a `grep_search`/`glob_search` on a large monorepo or a
|
|
601
|
+
// Python project with `.venv/` doesn't read every file under those
|
|
602
|
+
// trees. ripgrep itself respects .gitignore so it doesn't need this
|
|
603
|
+
// list. Audit follow-up from the comprehensive review (finding #3).
|
|
604
|
+
const SKIP_DIRS = new Set([
|
|
605
|
+
'.git',
|
|
606
|
+
'.svn',
|
|
607
|
+
'.hg',
|
|
608
|
+
'node_modules',
|
|
609
|
+
'.next',
|
|
610
|
+
'.nuxt',
|
|
611
|
+
'.cache',
|
|
612
|
+
'.parcel-cache',
|
|
613
|
+
'.turbo',
|
|
614
|
+
'dist',
|
|
615
|
+
'build',
|
|
616
|
+
'out',
|
|
617
|
+
'target',
|
|
618
|
+
'vendor',
|
|
619
|
+
'coverage',
|
|
620
|
+
'.nyc_output',
|
|
621
|
+
'__pycache__',
|
|
622
|
+
'.venv',
|
|
623
|
+
'venv',
|
|
624
|
+
'env',
|
|
625
|
+
'.tox',
|
|
626
|
+
'.mypy_cache',
|
|
627
|
+
'.pytest_cache',
|
|
628
|
+
'.ruff_cache',
|
|
629
|
+
]);
|
|
630
|
+
function globToRegExp(pattern) {
|
|
631
|
+
let result = '^';
|
|
632
|
+
for (let i = 0; i < pattern.length; i++) {
|
|
633
|
+
const c = pattern[i];
|
|
634
|
+
if (c === '*') {
|
|
635
|
+
if (pattern[i + 1] === '*') {
|
|
636
|
+
result += '.*';
|
|
637
|
+
i += 1;
|
|
638
|
+
if (pattern[i + 1] === '/') {
|
|
639
|
+
i += 1;
|
|
640
|
+
}
|
|
641
|
+
}
|
|
642
|
+
else {
|
|
643
|
+
result += '[^/]*';
|
|
644
|
+
}
|
|
645
|
+
}
|
|
646
|
+
else if (c === '?') {
|
|
647
|
+
result += '[^/]';
|
|
648
|
+
}
|
|
649
|
+
else if ('.+^$|(){}[]\\'.includes(c)) {
|
|
650
|
+
result += '\\' + c;
|
|
651
|
+
}
|
|
652
|
+
else {
|
|
653
|
+
result += c;
|
|
654
|
+
}
|
|
655
|
+
}
|
|
656
|
+
result += '$';
|
|
657
|
+
return new RegExp(result);
|
|
658
|
+
}
|
|
659
|
+
async function* walkFiles(root, fs) {
|
|
660
|
+
const stack = [root];
|
|
661
|
+
while (stack.length > 0) {
|
|
662
|
+
const dir = stack.pop();
|
|
663
|
+
let entries;
|
|
664
|
+
try {
|
|
665
|
+
entries = await fs.readdir(dir, { withFileTypes: true });
|
|
666
|
+
}
|
|
667
|
+
catch {
|
|
668
|
+
continue;
|
|
669
|
+
}
|
|
670
|
+
for (const entry of entries) {
|
|
671
|
+
if (entry.name.startsWith('.git') || SKIP_DIRS.has(entry.name)) {
|
|
672
|
+
continue;
|
|
673
|
+
}
|
|
674
|
+
const full = `${dir}/${entry.name}`;
|
|
675
|
+
if (entry.isDirectory()) {
|
|
676
|
+
stack.push(full);
|
|
677
|
+
}
|
|
678
|
+
else if (entry.isFile()) {
|
|
679
|
+
yield full;
|
|
680
|
+
}
|
|
681
|
+
}
|
|
682
|
+
}
|
|
683
|
+
}
|
|
684
|
+
/**
|
|
685
|
+
* Catastrophic-backtracking guardrails for the fallback grep path.
|
|
686
|
+
*
|
|
687
|
+
* Without ripgrep we run the model-supplied pattern through Node's
|
|
688
|
+
* `RegExp` engine, which uses a backtracking implementation. Patterns
|
|
689
|
+
* with nested unbounded quantifiers (`(a+)+`, `(.*)*`, etc.) can
|
|
690
|
+
* monopolise the event loop for arbitrary wall-clock time on
|
|
691
|
+
* pathological input, and `setTimeout` cannot interrupt a synchronous
|
|
692
|
+
* `RegExp.exec`. Manual review (finding D) flagged this as a real DoS.
|
|
693
|
+
*
|
|
694
|
+
* Mitigations applied here, in order of severity:
|
|
695
|
+
* 1. Cap pattern length so an obviously oversize regex is rejected
|
|
696
|
+
* before compile.
|
|
697
|
+
* 2. Reject patterns that contain a nested unbounded quantifier of
|
|
698
|
+
* the form `(...+|*)([+*]|{n,})` — the standard pathological
|
|
699
|
+
* shape. Still a heuristic (not a full safety proof), but blocks
|
|
700
|
+
* every common DoS construction we've seen in coding-agent logs.
|
|
701
|
+
* 3. Wall-clock budget for the overall search: each file's regex
|
|
702
|
+
* pass is checked against a deadline; once exceeded the search
|
|
703
|
+
* bails with a partial result. Doesn't interrupt a stuck
|
|
704
|
+
* `exec()` call, but stops a slow pattern from making the whole
|
|
705
|
+
* Run hang once the first hung file finishes.
|
|
706
|
+
*
|
|
707
|
+
* Hosts that need bulletproof regex safety should install `rg` —
|
|
708
|
+
* ripgrep uses RE2 internally and has no backtracking.
|
|
709
|
+
*/
|
|
710
|
+
const MAX_FALLBACK_PATTERN_LENGTH = 1024;
|
|
711
|
+
const FALLBACK_GREP_BUDGET_MS = 5000;
|
|
712
|
+
// Per-file byte cap. Codex P2 #41 — without it, the whole-file
|
|
713
|
+
// `readFile` + `split('\n')` for a multi-GB log is an unbounded
|
|
714
|
+
// allocation that the wall-clock budget (checked between files)
|
|
715
|
+
// can't interrupt. Hosts that need to grep large files should
|
|
716
|
+
// install ripgrep.
|
|
717
|
+
const FALLBACK_GREP_MAX_FILE_BYTES = 5 * 1024 * 1024;
|
|
718
|
+
/**
|
|
719
|
+
* Heuristic: walks `pattern` to find any `(<contents>)<quant>` where
|
|
720
|
+
* `<contents>` itself has an unbounded quantifier. Catches the
|
|
721
|
+
* classic `(a+)+` form AND the double-nested `((a+)+)` form (which a
|
|
722
|
+
* single-pass regex misses because `[^)]*` stops at the first inner
|
|
723
|
+
* close-paren). Misses sufficiently obfuscated cases — bulletproof
|
|
724
|
+
* ReDoS detection requires a real parser. The 5 s wall-clock budget
|
|
725
|
+
* is the hard backstop for anything this slip past.
|
|
726
|
+
*/
|
|
727
|
+
function hasNestedUnboundedQuantifier(pattern) {
|
|
728
|
+
for (let i = 1; i < pattern.length - 1; i++) {
|
|
729
|
+
if (pattern[i] !== ')')
|
|
730
|
+
continue;
|
|
731
|
+
if (pattern[i - 1] === '\\')
|
|
732
|
+
continue;
|
|
733
|
+
const next = pattern[i + 1];
|
|
734
|
+
if (next !== '+' && next !== '*' && next !== '{')
|
|
735
|
+
continue;
|
|
736
|
+
// Walk back to find the matching opening paren (respecting depth
|
|
737
|
+
// and `\(` escapes).
|
|
738
|
+
let depth = 1;
|
|
739
|
+
let j = i - 1;
|
|
740
|
+
while (j >= 0) {
|
|
741
|
+
const c = pattern[j];
|
|
742
|
+
const escaped = j > 0 && pattern[j - 1] === '\\';
|
|
743
|
+
if (!escaped) {
|
|
744
|
+
if (c === ')')
|
|
745
|
+
depth++;
|
|
746
|
+
else if (c === '(') {
|
|
747
|
+
depth--;
|
|
748
|
+
if (depth === 0)
|
|
749
|
+
break;
|
|
750
|
+
}
|
|
751
|
+
}
|
|
752
|
+
j--;
|
|
753
|
+
}
|
|
754
|
+
if (j < 0)
|
|
755
|
+
continue;
|
|
756
|
+
const inner = pattern.slice(j + 1, i);
|
|
757
|
+
if (/(?<!\\)[+*]/.test(inner))
|
|
758
|
+
return true;
|
|
759
|
+
}
|
|
760
|
+
return false;
|
|
761
|
+
}
|
|
762
|
+
class FallbackGrepError extends Error {
|
|
763
|
+
kind;
|
|
764
|
+
constructor(kind, message) {
|
|
765
|
+
super(message);
|
|
766
|
+
this.kind = kind;
|
|
767
|
+
}
|
|
768
|
+
}
|
|
769
|
+
function compileFallbackRegex(pattern) {
|
|
770
|
+
if (pattern.length > MAX_FALLBACK_PATTERN_LENGTH) {
|
|
771
|
+
throw new FallbackGrepError('pattern-too-long', `Pattern exceeds ${MAX_FALLBACK_PATTERN_LENGTH}-char fallback cap (install ripgrep for unbounded patterns).`);
|
|
772
|
+
}
|
|
773
|
+
if (hasNestedUnboundedQuantifier(pattern)) {
|
|
774
|
+
throw new FallbackGrepError('unsafe-pattern', 'Pattern contains a nested unbounded quantifier (e.g. `(a+)+` or `((a+)+)`) which can cause catastrophic backtracking in the Node fallback. Install ripgrep for RE2-safe matching.');
|
|
775
|
+
}
|
|
776
|
+
try {
|
|
777
|
+
return new RegExp(pattern);
|
|
778
|
+
}
|
|
779
|
+
catch (e) {
|
|
780
|
+
throw new FallbackGrepError('invalid-pattern', `Invalid regex: ${e.message}`);
|
|
781
|
+
}
|
|
782
|
+
}
|
|
783
|
+
async function fallbackGrep(root, pattern, globFilter, maxResults, fs) {
|
|
784
|
+
const rx = compileFallbackRegex(pattern);
|
|
785
|
+
const deadline = Date.now() + FALLBACK_GREP_BUDGET_MS;
|
|
786
|
+
const globRx = globFilter != null && globFilter !== '' ? globToRegExp(globFilter) : undefined;
|
|
787
|
+
const matches = [];
|
|
788
|
+
// Track skipped (oversize) files separately so they don't consume
|
|
789
|
+
// the maxResults budget. Codex P2 [43]: round 14's fix pushed skip
|
|
790
|
+
// sentinels into `matches`, so a directory of one oversize non-
|
|
791
|
+
// matching file falsely reported `matches: 1`, and enough
|
|
792
|
+
// oversize files could fill the budget before any real match was
|
|
793
|
+
// scanned. Now diagnostics are appended after real matches and
|
|
794
|
+
// independent of the budget.
|
|
795
|
+
const skippedDiagnostics = [];
|
|
796
|
+
for await (const file of walkFiles(root, fs)) {
|
|
797
|
+
if (Date.now() > deadline) {
|
|
798
|
+
// Wall-clock budget exceeded — return partial results rather
|
|
799
|
+
// than letting a slow pattern hang the Run.
|
|
800
|
+
return { matches, skipped: skippedDiagnostics };
|
|
801
|
+
}
|
|
802
|
+
if (globRx != null) {
|
|
803
|
+
const rel = file.startsWith(root + '/') ? file.slice(root.length + 1) : file;
|
|
804
|
+
if (!globRx.test(rel)) {
|
|
805
|
+
continue;
|
|
806
|
+
}
|
|
807
|
+
}
|
|
808
|
+
// Skip files larger than the per-file cap and remember them as
|
|
809
|
+
// diagnostics (NOT as matches). Codex P2 [41]: pre-fix
|
|
810
|
+
// `fs.readFile` then `.split('\n')` allocated the whole file +
|
|
811
|
+
// an array of every line, which a single multi-GB log could
|
|
812
|
+
// turn into an OOM even after the regex DoS guards.
|
|
813
|
+
let stat;
|
|
814
|
+
try {
|
|
815
|
+
stat = await fs.stat(file);
|
|
816
|
+
}
|
|
817
|
+
catch {
|
|
818
|
+
continue;
|
|
819
|
+
}
|
|
820
|
+
if (stat.size > FALLBACK_GREP_MAX_FILE_BYTES) {
|
|
821
|
+
skippedDiagnostics.push(`${file}:0:[skipped: file > ${FALLBACK_GREP_MAX_FILE_BYTES} bytes; install ripgrep for unbounded grep]`);
|
|
822
|
+
continue;
|
|
823
|
+
}
|
|
824
|
+
let content;
|
|
825
|
+
try {
|
|
826
|
+
content = await fs.readFile(file, 'utf8');
|
|
827
|
+
}
|
|
828
|
+
catch {
|
|
829
|
+
continue;
|
|
830
|
+
}
|
|
831
|
+
if (content.includes('\0')) {
|
|
832
|
+
continue;
|
|
833
|
+
}
|
|
834
|
+
// Re-check the deadline AFTER the read — a slow disk on one
|
|
835
|
+
// file can blow the budget without us noticing.
|
|
836
|
+
if (Date.now() > deadline) {
|
|
837
|
+
return { matches, skipped: skippedDiagnostics };
|
|
838
|
+
}
|
|
839
|
+
const lines = content.split('\n');
|
|
840
|
+
for (let i = 0; i < lines.length; i++) {
|
|
841
|
+
if (rx.test(lines[i])) {
|
|
842
|
+
matches.push(`${file}:${i + 1}:${lines[i]}`);
|
|
843
|
+
if (matches.length >= maxResults) {
|
|
844
|
+
return { matches, skipped: skippedDiagnostics };
|
|
845
|
+
}
|
|
846
|
+
}
|
|
847
|
+
}
|
|
848
|
+
}
|
|
849
|
+
return { matches, skipped: skippedDiagnostics };
|
|
850
|
+
}
|
|
851
|
+
async function fallbackGlob(root, pattern, maxResults, fs) {
|
|
852
|
+
const rx = globToRegExp(pattern);
|
|
853
|
+
const out = [];
|
|
854
|
+
for await (const file of walkFiles(root, fs)) {
|
|
855
|
+
const rel = file.startsWith(root + '/') ? file.slice(root.length + 1) : file;
|
|
856
|
+
if (rx.test(rel)) {
|
|
857
|
+
out.push(file);
|
|
858
|
+
if (out.length >= maxResults) {
|
|
859
|
+
break;
|
|
860
|
+
}
|
|
861
|
+
}
|
|
862
|
+
}
|
|
863
|
+
return out;
|
|
864
|
+
}
|
|
865
|
+
function createLocalGrepSearchTool(config = {}) {
|
|
866
|
+
const fs = LocalExecutionEngine.getWorkspaceFS(config);
|
|
867
|
+
return tools.tool(async (rawInput) => {
|
|
868
|
+
const input = rawInput;
|
|
869
|
+
const target = await LocalExecutionEngine.resolveWorkspacePathSafe(input.path ?? '.', config, 'read');
|
|
870
|
+
const maxResults = Math.max(input.max_results ?? DEFAULT_MAX_RESULTS, 1);
|
|
871
|
+
if (await isRipgrepAvailable(config)) {
|
|
872
|
+
// Pass the pattern through `-e` so dash-prefixed patterns
|
|
873
|
+
// like `-foo` are treated as the search regex, not as a
|
|
874
|
+
// (probably-unknown) flag. `rg --help` explicitly requires
|
|
875
|
+
// `-e/--regexp` (or `--`) for that case. Same trick avoids
|
|
876
|
+
// any future flag-conflict if a user query happens to look
|
|
877
|
+
// like an rg long option.
|
|
878
|
+
const args = [
|
|
879
|
+
'--line-number',
|
|
880
|
+
'--column',
|
|
881
|
+
'--hidden',
|
|
882
|
+
'--glob',
|
|
883
|
+
'!.git/**',
|
|
884
|
+
...(input.glob != null && input.glob !== '' ? ['--glob', input.glob] : []),
|
|
885
|
+
'-e',
|
|
886
|
+
input.pattern,
|
|
887
|
+
target,
|
|
888
|
+
];
|
|
889
|
+
const result = await LocalExecutionEngine.spawnLocalProcess('rg', args, {
|
|
890
|
+
...config,
|
|
891
|
+
timeoutMs: config.timeoutMs ?? 30000,
|
|
892
|
+
});
|
|
893
|
+
// ripgrep exit codes:
|
|
894
|
+
// 0 → at least one match
|
|
895
|
+
// 1 → no matches (clean — "No matches found.")
|
|
896
|
+
// 2 → real error (bad regex, unreadable target, etc.)
|
|
897
|
+
// Without this branch (Codex P2 #23 — same fix shape glob_search
|
|
898
|
+
// got from P2 #13), exit-2 errors silently mapped to
|
|
899
|
+
// `matches: 0`, so the agent treated tooling failures as a
|
|
900
|
+
// genuine absence of matches.
|
|
901
|
+
if (result.timedOut || (result.exitCode != null && result.exitCode > 1)) {
|
|
902
|
+
const detail = result.stderr.trim() || `rg exited ${result.exitCode}`;
|
|
903
|
+
return [
|
|
904
|
+
`grep_search failed: ${detail}`,
|
|
905
|
+
{
|
|
906
|
+
matches: 0,
|
|
907
|
+
engine: 'ripgrep',
|
|
908
|
+
error: detail,
|
|
909
|
+
exitCode: result.exitCode,
|
|
910
|
+
},
|
|
911
|
+
];
|
|
912
|
+
}
|
|
913
|
+
const lines = result.stdout.split('\n').filter(Boolean).slice(0, maxResults);
|
|
914
|
+
const output = lines.length > 0
|
|
915
|
+
? lines.join('\n')
|
|
916
|
+
: result.stderr.trim() || 'No matches found.';
|
|
917
|
+
return [output, { matches: lines.length, engine: 'ripgrep' }];
|
|
918
|
+
}
|
|
919
|
+
try {
|
|
920
|
+
const { matches, skipped } = await fallbackGrep(target, input.pattern, input.glob, maxResults, fs);
|
|
921
|
+
// Display: real matches first, skip diagnostics appended.
|
|
922
|
+
// Artifact count: ONLY real matches (Codex P2 [43] —
|
|
923
|
+
// skip sentinels used to inflate the count and the budget).
|
|
924
|
+
const display = matches.length > 0
|
|
925
|
+
? [...matches, ...skipped].join('\n')
|
|
926
|
+
: skipped.length > 0
|
|
927
|
+
? skipped.join('\n')
|
|
928
|
+
: 'No matches found.';
|
|
929
|
+
return [
|
|
930
|
+
display,
|
|
931
|
+
{
|
|
932
|
+
matches: matches.length,
|
|
933
|
+
skipped: skipped.length,
|
|
934
|
+
engine: 'node-fallback',
|
|
935
|
+
},
|
|
936
|
+
];
|
|
937
|
+
}
|
|
938
|
+
catch (e) {
|
|
939
|
+
if (e instanceof FallbackGrepError) {
|
|
940
|
+
return [
|
|
941
|
+
`grep_search refused the pattern: ${e.message}`,
|
|
942
|
+
{
|
|
943
|
+
matches: 0,
|
|
944
|
+
engine: 'node-fallback',
|
|
945
|
+
error: e.message,
|
|
946
|
+
kind: e.kind,
|
|
947
|
+
},
|
|
948
|
+
];
|
|
949
|
+
}
|
|
950
|
+
throw e;
|
|
951
|
+
}
|
|
952
|
+
}, {
|
|
953
|
+
name: LocalGrepSearchToolName,
|
|
954
|
+
description: 'Search local files for a regex pattern (ripgrep when available, Node fallback otherwise).',
|
|
955
|
+
schema: LocalGrepSearchToolSchema,
|
|
956
|
+
responseFormat: _enum.Constants.CONTENT_AND_ARTIFACT,
|
|
957
|
+
});
|
|
958
|
+
}
|
|
959
|
+
function createLocalGlobSearchTool(config = {}) {
|
|
960
|
+
const fs = LocalExecutionEngine.getWorkspaceFS(config);
|
|
961
|
+
return tools.tool(async (rawInput) => {
|
|
962
|
+
const input = rawInput;
|
|
963
|
+
const target = await LocalExecutionEngine.resolveWorkspacePathSafe(input.path ?? '.', config, 'read');
|
|
964
|
+
const maxResults = Math.max(input.max_results ?? DEFAULT_MAX_RESULTS, 1);
|
|
965
|
+
if (await isRipgrepAvailable(config)) {
|
|
966
|
+
const result = await LocalExecutionEngine.spawnLocalProcess('rg', ['--files', '--hidden', '--glob', '!.git/**', '--glob', input.pattern, target], { ...config, timeoutMs: config.timeoutMs ?? 30000 });
|
|
967
|
+
// rg --files exit codes:
|
|
968
|
+
// 0 → at least one file matched
|
|
969
|
+
// 1 → no files matched (clean — "No files found.")
|
|
970
|
+
// 2 → real error (bad glob, unreadable target, etc.)
|
|
971
|
+
// Without this branch, exit-2 errors used to silently map to
|
|
972
|
+
// "No files found." — the agent then treats a tooling failure
|
|
973
|
+
// as a real absence of matches.
|
|
974
|
+
if (result.timedOut || (result.exitCode != null && result.exitCode > 1)) {
|
|
975
|
+
const detail = result.stderr.trim() || `rg exited ${result.exitCode}`;
|
|
976
|
+
return [
|
|
977
|
+
`glob_search failed: ${detail}`,
|
|
978
|
+
{
|
|
979
|
+
files: [],
|
|
980
|
+
engine: 'ripgrep',
|
|
981
|
+
error: detail,
|
|
982
|
+
exitCode: result.exitCode,
|
|
983
|
+
},
|
|
984
|
+
];
|
|
985
|
+
}
|
|
986
|
+
const lines = result.stdout
|
|
987
|
+
.split('\n')
|
|
988
|
+
.filter(Boolean)
|
|
989
|
+
.slice(0, maxResults);
|
|
990
|
+
return [
|
|
991
|
+
lines.length > 0 ? lines.join('\n') : 'No files found.',
|
|
992
|
+
{ files: lines, engine: 'ripgrep' },
|
|
993
|
+
];
|
|
994
|
+
}
|
|
995
|
+
const files = await fallbackGlob(target, input.pattern, maxResults, fs);
|
|
996
|
+
return [
|
|
997
|
+
files.length > 0 ? files.join('\n') : 'No files found.',
|
|
998
|
+
{ files, engine: 'node-fallback' },
|
|
999
|
+
];
|
|
1000
|
+
}, {
|
|
1001
|
+
name: LocalGlobSearchToolName,
|
|
1002
|
+
description: 'Find local files matching a glob pattern (ripgrep when available, Node fallback otherwise).',
|
|
1003
|
+
schema: LocalGlobSearchToolSchema,
|
|
1004
|
+
responseFormat: _enum.Constants.CONTENT_AND_ARTIFACT,
|
|
1005
|
+
});
|
|
1006
|
+
}
|
|
1007
|
+
function createLocalListDirectoryTool(config = {}) {
|
|
1008
|
+
const fs = LocalExecutionEngine.getWorkspaceFS(config);
|
|
1009
|
+
return tools.tool(async (rawInput) => {
|
|
1010
|
+
const input = rawInput;
|
|
1011
|
+
const path = await LocalExecutionEngine.resolveWorkspacePathSafe(input.path ?? '.', config, 'read');
|
|
1012
|
+
const entries = await fs.readdir(path, { withFileTypes: true });
|
|
1013
|
+
const output = entries
|
|
1014
|
+
.map((entry) => `${entry.isDirectory() ? 'dir ' : 'file'}\t${entry.name}`)
|
|
1015
|
+
.join('\n');
|
|
1016
|
+
return [output || 'Directory is empty.', { path, count: entries.length }];
|
|
1017
|
+
}, {
|
|
1018
|
+
name: LocalListDirectoryToolName,
|
|
1019
|
+
description: 'List files and directories in a local directory.',
|
|
1020
|
+
schema: LocalListDirectoryToolSchema,
|
|
1021
|
+
responseFormat: _enum.Constants.CONTENT_AND_ARTIFACT,
|
|
1022
|
+
});
|
|
1023
|
+
}
|
|
1024
|
+
function createLocalCodingTools(config = {}, options = {}) {
|
|
1025
|
+
const checkpointer = options.checkpointer ??
|
|
1026
|
+
(config.fileCheckpointing === true
|
|
1027
|
+
? FileCheckpointer.createLocalFileCheckpointer({ fs: config.exec?.fs })
|
|
1028
|
+
: undefined);
|
|
1029
|
+
return [
|
|
1030
|
+
createLocalReadFileTool(config),
|
|
1031
|
+
createLocalWriteFileTool(config, checkpointer),
|
|
1032
|
+
createLocalEditFileTool(config, checkpointer),
|
|
1033
|
+
createLocalGrepSearchTool(config),
|
|
1034
|
+
createLocalGlobSearchTool(config),
|
|
1035
|
+
createLocalListDirectoryTool(config),
|
|
1036
|
+
CompileCheckTool.createCompileCheckTool(config),
|
|
1037
|
+
LocalExecutionTools.createLocalBashExecutionTool({ config }),
|
|
1038
|
+
LocalExecutionTools.createLocalCodeExecutionTool(config),
|
|
1039
|
+
LocalProgrammaticToolCalling.createLocalProgrammaticToolCallingTool(config),
|
|
1040
|
+
LocalProgrammaticToolCalling.createLocalBashProgrammaticToolCallingTool(config),
|
|
1041
|
+
];
|
|
1042
|
+
}
|
|
1043
|
+
/**
|
|
1044
|
+
* Variant of `createLocalCodingTools` that returns the bundle alongside
|
|
1045
|
+
* the file checkpointer so callers can later call
|
|
1046
|
+
* `bundle.checkpointer?.rewind()`.
|
|
1047
|
+
*/
|
|
1048
|
+
function createLocalCodingToolBundle(config = {}, options = {}) {
|
|
1049
|
+
const checkpointer = options.checkpointer ??
|
|
1050
|
+
(config.fileCheckpointing === true
|
|
1051
|
+
? FileCheckpointer.createLocalFileCheckpointer({ fs: config.exec?.fs })
|
|
1052
|
+
: undefined);
|
|
1053
|
+
return {
|
|
1054
|
+
tools: createLocalCodingTools(config, { checkpointer }),
|
|
1055
|
+
checkpointer,
|
|
1056
|
+
};
|
|
1057
|
+
}
|
|
1058
|
+
function createLocalCodingToolDefinitions() {
|
|
1059
|
+
return [
|
|
1060
|
+
toolDefinition(_enum.Constants.READ_FILE, 'Read a local text file from the configured working directory with line numbers.', LocalReadFileToolSchema),
|
|
1061
|
+
toolDefinition(LocalWriteFileToolName, 'Create or overwrite a local text file in the configured working directory.', LocalWriteFileToolSchema),
|
|
1062
|
+
toolDefinition(LocalEditFileToolName, 'Apply exact text replacements to a local file.', LocalEditFileToolSchema),
|
|
1063
|
+
toolDefinition(LocalGrepSearchToolName, 'Search local files with ripgrep and return matching lines.', LocalGrepSearchToolSchema),
|
|
1064
|
+
toolDefinition(LocalGlobSearchToolName, 'Find local files matching a glob pattern.', LocalGlobSearchToolSchema),
|
|
1065
|
+
toolDefinition(LocalListDirectoryToolName, 'List files and directories in a local directory.', LocalListDirectoryToolSchema),
|
|
1066
|
+
CompileCheckTool.createCompileCheckToolDefinition(),
|
|
1067
|
+
];
|
|
1068
|
+
}
|
|
1069
|
+
function createLocalCodingToolRegistry() {
|
|
1070
|
+
return new Map(createLocalCodingToolDefinitions().map((definition) => [
|
|
1071
|
+
definition.name,
|
|
1072
|
+
definition,
|
|
1073
|
+
]));
|
|
1074
|
+
}
|
|
1075
|
+
|
|
1076
|
+
exports.LocalEditFileToolName = LocalEditFileToolName;
|
|
1077
|
+
exports.LocalEditFileToolSchema = LocalEditFileToolSchema;
|
|
1078
|
+
exports.LocalGlobSearchToolName = LocalGlobSearchToolName;
|
|
1079
|
+
exports.LocalGlobSearchToolSchema = LocalGlobSearchToolSchema;
|
|
1080
|
+
exports.LocalGrepSearchToolName = LocalGrepSearchToolName;
|
|
1081
|
+
exports.LocalGrepSearchToolSchema = LocalGrepSearchToolSchema;
|
|
1082
|
+
exports.LocalListDirectoryToolName = LocalListDirectoryToolName;
|
|
1083
|
+
exports.LocalListDirectoryToolSchema = LocalListDirectoryToolSchema;
|
|
1084
|
+
exports.LocalReadFileToolSchema = LocalReadFileToolSchema;
|
|
1085
|
+
exports.LocalWriteFileToolName = LocalWriteFileToolName;
|
|
1086
|
+
exports.LocalWriteFileToolSchema = LocalWriteFileToolSchema;
|
|
1087
|
+
exports._resetRipgrepCacheForTests = _resetRipgrepCacheForTests;
|
|
1088
|
+
exports.createLocalCodingToolBundle = createLocalCodingToolBundle;
|
|
1089
|
+
exports.createLocalCodingToolDefinitions = createLocalCodingToolDefinitions;
|
|
1090
|
+
exports.createLocalCodingToolRegistry = createLocalCodingToolRegistry;
|
|
1091
|
+
exports.createLocalCodingTools = createLocalCodingTools;
|
|
1092
|
+
exports.createLocalEditFileTool = createLocalEditFileTool;
|
|
1093
|
+
exports.createLocalGlobSearchTool = createLocalGlobSearchTool;
|
|
1094
|
+
exports.createLocalGrepSearchTool = createLocalGrepSearchTool;
|
|
1095
|
+
exports.createLocalListDirectoryTool = createLocalListDirectoryTool;
|
|
1096
|
+
exports.createLocalReadFileTool = createLocalReadFileTool;
|
|
1097
|
+
exports.createLocalWriteFileTool = createLocalWriteFileTool;
|
|
1098
|
+
//# sourceMappingURL=LocalCodingTools.cjs.map
|