@docyrus/docyrus 0.0.20 → 0.0.22
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/agent-loader.js +32 -1
- package/agent-loader.js.map +2 -2
- package/main.js +325 -71
- package/main.js.map +4 -4
- package/package.json +16 -3
- package/resources/chrome-tools/browser-content.js +103 -0
- package/resources/chrome-tools/browser-cookies.js +35 -0
- package/resources/chrome-tools/browser-eval.js +53 -0
- package/resources/chrome-tools/browser-hn-scraper.js +108 -0
- package/resources/chrome-tools/browser-nav.js +44 -0
- package/resources/chrome-tools/browser-pick.js +162 -0
- package/resources/chrome-tools/browser-screenshot.js +34 -0
- package/resources/chrome-tools/browser-start.js +86 -0
- package/resources/pi-agent/extensions/answer.ts +532 -0
- package/resources/pi-agent/extensions/context.ts +578 -0
- package/resources/pi-agent/extensions/control.ts +1779 -0
- package/resources/pi-agent/extensions/diff.ts +218 -0
- package/resources/pi-agent/extensions/files.ts +199 -0
- package/resources/pi-agent/extensions/loop.ts +446 -0
- package/resources/pi-agent/extensions/multi-edit.ts +835 -0
- package/resources/pi-agent/extensions/notify.ts +88 -0
- package/resources/pi-agent/extensions/pi-bash-live-view/LICENSE +21 -0
- package/resources/pi-agent/extensions/pi-bash-live-view/README.md +19 -0
- package/resources/pi-agent/extensions/pi-bash-live-view/index.ts +52 -0
- package/resources/pi-agent/extensions/pi-bash-live-view/package.json +61 -0
- package/resources/pi-agent/extensions/pi-bash-live-view/pty-execute.ts +97 -0
- package/resources/pi-agent/extensions/pi-bash-live-view/pty-kill.ts +25 -0
- package/resources/pi-agent/extensions/pi-bash-live-view/pty-session.ts +143 -0
- package/resources/pi-agent/extensions/pi-bash-live-view/spawn-helper.ts +31 -0
- package/resources/pi-agent/extensions/pi-bash-live-view/terminal-emulator.ts +439 -0
- package/resources/pi-agent/extensions/pi-bash-live-view/truncate.ts +68 -0
- package/resources/pi-agent/extensions/pi-bash-live-view/widget.ts +114 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/CHANGELOG.md +192 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/LICENSE +21 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/README.md +296 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/app-bridge.bundle.js +67 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/cli.js +108 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/commands.ts +211 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/config.ts +227 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/consent-manager.ts +64 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/direct-tools.ts +301 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/errors.ts +219 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/glimpse-ui.ts +80 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/host-html-template.ts +427 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/index.ts +232 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/init.ts +319 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/lifecycle.ts +93 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/logger.ts +169 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/mcp-panel.ts +713 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/metadata-cache.ts +191 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/npx-resolver.ts +419 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/oauth-handler.ts +56 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/package.json +85 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/paths.ts +29 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/proxy-modes.ts +635 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/resource-tools.ts +17 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/server-manager.ts +330 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/state.ts +41 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/tool-metadata.ts +144 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/tool-registrar.ts +46 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/types.ts +367 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/ui-resource-handler.ts +145 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/ui-server.ts +623 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/ui-session.ts +384 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/ui-stream-types.ts +89 -0
- package/resources/pi-agent/extensions/pi-mcp-adapter/utils.ts +75 -0
- package/resources/pi-agent/extensions/prompt-editor.ts +1315 -0
- package/resources/pi-agent/extensions/prompt-url-widget.ts +158 -0
- package/resources/pi-agent/extensions/redraws.ts +24 -0
- package/resources/pi-agent/extensions/review.ts +2160 -0
- package/resources/pi-agent/extensions/todos.ts +2076 -0
- package/resources/pi-agent/extensions/tps.ts +47 -0
- package/resources/pi-agent/extensions/whimsical.ts +474 -0
- package/resources/pi-agent/skills/changelog-generator/SKILL.md +425 -0
- package/resources/pi-agent/skills/docyrus-chrome-devtools-cli/SKILL.md +80 -0
- package/resources/pi-agent/skills/docyrus-platform/references/docyrus-cli-usage.md +51 -0
|
@@ -0,0 +1,835 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Multi-Edit Extension — replaces the built-in `edit` tool.
|
|
3
|
+
*
|
|
4
|
+
* Supports all original parameters (path, oldText, newText) plus:
|
|
5
|
+
* - `multi`: array of {path, oldText, newText} edits applied in sequence
|
|
6
|
+
* - `patch`: Codex-style apply_patch payload
|
|
7
|
+
*
|
|
8
|
+
* When both top-level params and `multi` are provided, the top-level edit
|
|
9
|
+
* is treated as an implicit first item prepended to the multi list.
|
|
10
|
+
*
|
|
11
|
+
* A preflight pass is performed before mutating files:
|
|
12
|
+
* - multi/top-level mode: preflight via virtualized built-in edit tool
|
|
13
|
+
* - patch mode: preflight by applying patch operations on a virtual filesystem
|
|
14
|
+
*/
|
|
15
|
+
|
|
16
|
+
import type { ExtensionAPI } from "@mariozechner/pi-coding-agent";
|
|
17
|
+
import { Type } from "@sinclair/typebox";
|
|
18
|
+
import * as Diff from "diff";
|
|
19
|
+
import { constants } from "fs";
|
|
20
|
+
import { access as fsAccess, readFile as fsReadFile, unlink as fsUnlink, writeFile as fsWriteFile } from "fs/promises";
|
|
21
|
+
import { isAbsolute, resolve as resolvePath } from "path";
|
|
22
|
+
|
|
23
|
+
const editItemSchema = Type.Object({
|
|
24
|
+
path: Type.Optional(Type.String({ description: "Path to the file to edit (relative or absolute). Inherits from top-level path if omitted." })),
|
|
25
|
+
oldText: Type.String({ description: "Exact text to find and replace (must match exactly)" }),
|
|
26
|
+
newText: Type.String({ description: "New text to replace the old text with" }),
|
|
27
|
+
});
|
|
28
|
+
|
|
29
|
+
const multiEditSchema = Type.Object({
|
|
30
|
+
path: Type.Optional(Type.String({ description: "Path to the file to edit (relative or absolute)" })),
|
|
31
|
+
oldText: Type.Optional(Type.String({ description: "Exact text to find and replace (must match exactly)" })),
|
|
32
|
+
newText: Type.Optional(Type.String({ description: "New text to replace the old text with" })),
|
|
33
|
+
multi: Type.Optional(
|
|
34
|
+
Type.Array(editItemSchema, {
|
|
35
|
+
description: "Multiple edits to apply in sequence. Each item has path, oldText, and newText.",
|
|
36
|
+
}),
|
|
37
|
+
),
|
|
38
|
+
patch: Type.Optional(
|
|
39
|
+
Type.String({
|
|
40
|
+
description:
|
|
41
|
+
"Codex-style apply_patch payload (*** Begin Patch ... *** End Patch). Mutually exclusive with path/oldText/newText/multi.",
|
|
42
|
+
}),
|
|
43
|
+
),
|
|
44
|
+
});
|
|
45
|
+
|
|
46
|
+
interface EditItem {
|
|
47
|
+
path: string;
|
|
48
|
+
oldText: string;
|
|
49
|
+
newText: string;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
interface EditResult {
|
|
53
|
+
path: string;
|
|
54
|
+
success: boolean;
|
|
55
|
+
message: string;
|
|
56
|
+
diff?: string;
|
|
57
|
+
firstChangedLine?: number;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
interface UpdateChunk {
|
|
61
|
+
changeContext?: string;
|
|
62
|
+
oldLines: string[];
|
|
63
|
+
newLines: string[];
|
|
64
|
+
isEndOfFile: boolean;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
type PatchOperation =
|
|
68
|
+
| { kind: "add"; path: string; contents: string }
|
|
69
|
+
| { kind: "delete"; path: string }
|
|
70
|
+
| { kind: "update"; path: string; chunks: UpdateChunk[] };
|
|
71
|
+
|
|
72
|
+
interface PatchOpResult {
|
|
73
|
+
path: string;
|
|
74
|
+
message: string;
|
|
75
|
+
diff?: string;
|
|
76
|
+
firstChangedLine?: number;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
function generateDiffString(
|
|
80
|
+
oldContent: string,
|
|
81
|
+
newContent: string,
|
|
82
|
+
contextLines = 4,
|
|
83
|
+
): { diff: string; firstChangedLine: number | undefined } {
|
|
84
|
+
const parts = Diff.diffLines(oldContent, newContent);
|
|
85
|
+
const output: string[] = [];
|
|
86
|
+
|
|
87
|
+
const oldLines = oldContent.split("\n");
|
|
88
|
+
const newLines = newContent.split("\n");
|
|
89
|
+
const maxLineNum = Math.max(oldLines.length, newLines.length);
|
|
90
|
+
const lineNumWidth = String(maxLineNum).length;
|
|
91
|
+
|
|
92
|
+
let oldLineNum = 1;
|
|
93
|
+
let newLineNum = 1;
|
|
94
|
+
let lastWasChange = false;
|
|
95
|
+
let firstChangedLine: number | undefined;
|
|
96
|
+
|
|
97
|
+
for (let i = 0; i < parts.length; i++) {
|
|
98
|
+
const part = parts[i];
|
|
99
|
+
const raw = part.value.split("\n");
|
|
100
|
+
if (raw[raw.length - 1] === "") {
|
|
101
|
+
raw.pop();
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
if (part.added || part.removed) {
|
|
105
|
+
if (firstChangedLine === undefined) {
|
|
106
|
+
firstChangedLine = newLineNum;
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
for (const line of raw) {
|
|
110
|
+
if (part.added) {
|
|
111
|
+
const lineNum = String(newLineNum).padStart(lineNumWidth, " ");
|
|
112
|
+
output.push(`+${lineNum} ${line}`);
|
|
113
|
+
newLineNum++;
|
|
114
|
+
} else {
|
|
115
|
+
const lineNum = String(oldLineNum).padStart(lineNumWidth, " ");
|
|
116
|
+
output.push(`-${lineNum} ${line}`);
|
|
117
|
+
oldLineNum++;
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
lastWasChange = true;
|
|
121
|
+
} else {
|
|
122
|
+
const nextPartIsChange = i < parts.length - 1 && (parts[i + 1].added || parts[i + 1].removed);
|
|
123
|
+
|
|
124
|
+
if (lastWasChange || nextPartIsChange) {
|
|
125
|
+
// Determine how many lines to show at the start and end of this
|
|
126
|
+
// unchanged block. When the block sits between two changes we
|
|
127
|
+
// show context on both sides but collapse the middle.
|
|
128
|
+
const showAtStart = lastWasChange ? contextLines : 0;
|
|
129
|
+
const showAtEnd = nextPartIsChange ? contextLines : 0;
|
|
130
|
+
|
|
131
|
+
if (raw.length <= showAtStart + showAtEnd) {
|
|
132
|
+
// Block is small enough — show it entirely.
|
|
133
|
+
for (const line of raw) {
|
|
134
|
+
const lineNum = String(oldLineNum).padStart(lineNumWidth, " ");
|
|
135
|
+
output.push(` ${lineNum} ${line}`);
|
|
136
|
+
oldLineNum++;
|
|
137
|
+
newLineNum++;
|
|
138
|
+
}
|
|
139
|
+
} else {
|
|
140
|
+
// Show head context.
|
|
141
|
+
for (let j = 0; j < showAtStart; j++) {
|
|
142
|
+
const lineNum = String(oldLineNum).padStart(lineNumWidth, " ");
|
|
143
|
+
output.push(` ${lineNum} ${raw[j]}`);
|
|
144
|
+
oldLineNum++;
|
|
145
|
+
newLineNum++;
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
// Collapse the middle.
|
|
149
|
+
const skipped = raw.length - showAtStart - showAtEnd;
|
|
150
|
+
if (skipped > 0) {
|
|
151
|
+
output.push(` ${"".padStart(lineNumWidth, " ")} ...`);
|
|
152
|
+
oldLineNum += skipped;
|
|
153
|
+
newLineNum += skipped;
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
// Show tail context.
|
|
157
|
+
for (let j = raw.length - showAtEnd; j < raw.length; j++) {
|
|
158
|
+
const lineNum = String(oldLineNum).padStart(lineNumWidth, " ");
|
|
159
|
+
output.push(` ${lineNum} ${raw[j]}`);
|
|
160
|
+
oldLineNum++;
|
|
161
|
+
newLineNum++;
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
} else {
|
|
165
|
+
oldLineNum += raw.length;
|
|
166
|
+
newLineNum += raw.length;
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
lastWasChange = false;
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
return { diff: output.join("\n"), firstChangedLine };
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
interface Workspace {
|
|
177
|
+
readText: (absolutePath: string) => Promise<string>;
|
|
178
|
+
writeText: (absolutePath: string, content: string) => Promise<void>;
|
|
179
|
+
deleteFile: (absolutePath: string) => Promise<void>;
|
|
180
|
+
exists: (absolutePath: string) => Promise<boolean>;
|
|
181
|
+
/** Check that the file is writable. Rejects if not. No-op on virtual workspaces. */
|
|
182
|
+
checkWriteAccess: (absolutePath: string) => Promise<void>;
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
function normalizeToLF(text: string): string {
|
|
186
|
+
return text.replace(/\r\n/g, "\n").replace(/\r/g, "\n");
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
function resolvePatchPath(cwd: string, filePath: string): string {
|
|
190
|
+
const trimmed = filePath.trim();
|
|
191
|
+
if (!trimmed) {
|
|
192
|
+
throw new Error("Patch path cannot be empty");
|
|
193
|
+
}
|
|
194
|
+
return isAbsolute(trimmed) ? resolvePath(trimmed) : resolvePath(cwd, trimmed);
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
function ensureTrailingNewline(content: string): string {
|
|
198
|
+
return content.endsWith("\n") ? content : `${content}\n`;
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
function normaliseLineForFuzzyMatch(s: string): string {
|
|
202
|
+
return s
|
|
203
|
+
.trim()
|
|
204
|
+
.replace(/[\u2010\u2011\u2012\u2013\u2014\u2015\u2212]/g, "-")
|
|
205
|
+
.replace(/[\u2018\u2019\u201A\u201B]/g, "'")
|
|
206
|
+
.replace(/[\u201C\u201D\u201E\u201F]/g, '"')
|
|
207
|
+
.replace(/[\u00A0\u2002-\u200A\u202F\u205F\u3000]/g, " ");
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
function seekSequence(lines: string[], pattern: string[], start: number, eof: boolean): number | undefined {
|
|
211
|
+
if (pattern.length === 0) return start;
|
|
212
|
+
if (pattern.length > lines.length) return undefined;
|
|
213
|
+
|
|
214
|
+
const searchStart = eof && lines.length >= pattern.length ? lines.length - pattern.length : start;
|
|
215
|
+
const searchEnd = lines.length - pattern.length;
|
|
216
|
+
|
|
217
|
+
const exactEqual = (a: string, b: string) => a === b;
|
|
218
|
+
const rstripEqual = (a: string, b: string) => a.trimEnd() === b.trimEnd();
|
|
219
|
+
const trimEqual = (a: string, b: string) => a.trim() === b.trim();
|
|
220
|
+
const fuzzyEqual = (a: string, b: string) => normaliseLineForFuzzyMatch(a) === normaliseLineForFuzzyMatch(b);
|
|
221
|
+
|
|
222
|
+
const passes = [exactEqual, rstripEqual, trimEqual, fuzzyEqual];
|
|
223
|
+
|
|
224
|
+
for (const eq of passes) {
|
|
225
|
+
for (let i = searchStart; i <= searchEnd; i++) {
|
|
226
|
+
let ok = true;
|
|
227
|
+
for (let p = 0; p < pattern.length; p++) {
|
|
228
|
+
if (!eq(lines[i + p], pattern[p])) {
|
|
229
|
+
ok = false;
|
|
230
|
+
break;
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
if (ok) return i;
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
return undefined;
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
function applyReplacements(lines: string[], replacements: Array<[number, number, string[]]>): string[] {
|
|
241
|
+
const next = [...lines];
|
|
242
|
+
|
|
243
|
+
for (const [start, oldLen, newSegment] of [...replacements].sort((a, b) => b[0] - a[0])) {
|
|
244
|
+
next.splice(start, oldLen, ...newSegment);
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
return next;
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
function deriveUpdatedContent(filePath: string, currentContent: string, chunks: UpdateChunk[]): string {
|
|
251
|
+
const originalLines = currentContent.split("\n");
|
|
252
|
+
if (originalLines[originalLines.length - 1] === "") {
|
|
253
|
+
originalLines.pop();
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
const replacements: Array<[number, number, string[]]> = [];
|
|
257
|
+
let lineIndex = 0;
|
|
258
|
+
|
|
259
|
+
for (const chunk of chunks) {
|
|
260
|
+
if (chunk.changeContext !== undefined) {
|
|
261
|
+
const ctxIndex = seekSequence(originalLines, [chunk.changeContext], lineIndex, false);
|
|
262
|
+
if (ctxIndex === undefined) {
|
|
263
|
+
throw new Error(`Failed to find context '${chunk.changeContext}' in ${filePath}`);
|
|
264
|
+
}
|
|
265
|
+
lineIndex = ctxIndex + 1;
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
if (chunk.oldLines.length === 0) {
|
|
269
|
+
replacements.push([originalLines.length, 0, [...chunk.newLines]]);
|
|
270
|
+
continue;
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
let pattern = chunk.oldLines;
|
|
274
|
+
let newSlice = chunk.newLines;
|
|
275
|
+
|
|
276
|
+
let found = seekSequence(originalLines, pattern, lineIndex, chunk.isEndOfFile);
|
|
277
|
+
if (found === undefined && pattern[pattern.length - 1] === "") {
|
|
278
|
+
pattern = pattern.slice(0, -1);
|
|
279
|
+
if (newSlice[newSlice.length - 1] === "") {
|
|
280
|
+
newSlice = newSlice.slice(0, -1);
|
|
281
|
+
}
|
|
282
|
+
found = seekSequence(originalLines, pattern, lineIndex, chunk.isEndOfFile);
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
if (found === undefined) {
|
|
286
|
+
throw new Error(`Failed to find expected lines in ${filePath}:\n${chunk.oldLines.join("\n")}`);
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
replacements.push([found, pattern.length, [...newSlice]]);
|
|
290
|
+
lineIndex = found + pattern.length;
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
const newLines = applyReplacements(originalLines, replacements);
|
|
294
|
+
if (newLines[newLines.length - 1] !== "") {
|
|
295
|
+
newLines.push("");
|
|
296
|
+
}
|
|
297
|
+
return newLines.join("\n");
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
function parseUpdateChunk(
|
|
301
|
+
lines: string[],
|
|
302
|
+
startIndex: number,
|
|
303
|
+
lastContentLine: number,
|
|
304
|
+
allowMissingContext: boolean,
|
|
305
|
+
): { chunk: UpdateChunk; nextIndex: number } {
|
|
306
|
+
let i = startIndex;
|
|
307
|
+
let changeContext: string | undefined;
|
|
308
|
+
const first = lines[i].trimEnd();
|
|
309
|
+
|
|
310
|
+
if (first === "@@") {
|
|
311
|
+
i++;
|
|
312
|
+
} else if (first.startsWith("@@ ")) {
|
|
313
|
+
changeContext = first.slice(3);
|
|
314
|
+
i++;
|
|
315
|
+
} else if (!allowMissingContext) {
|
|
316
|
+
throw new Error(`Expected update hunk to start with @@ context marker, got: '${lines[i]}'`);
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
const oldLines: string[] = [];
|
|
320
|
+
const newLines: string[] = [];
|
|
321
|
+
let parsed = 0;
|
|
322
|
+
let isEndOfFile = false;
|
|
323
|
+
|
|
324
|
+
while (i <= lastContentLine) {
|
|
325
|
+
const raw = lines[i];
|
|
326
|
+
const trimmed = raw.trimEnd();
|
|
327
|
+
|
|
328
|
+
if (trimmed === "*** End of File") {
|
|
329
|
+
if (parsed === 0) {
|
|
330
|
+
throw new Error("Update hunk does not contain any lines");
|
|
331
|
+
}
|
|
332
|
+
isEndOfFile = true;
|
|
333
|
+
i++;
|
|
334
|
+
break;
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
if (parsed > 0 && (trimmed.startsWith("@@") || trimmed.startsWith("*** "))) {
|
|
338
|
+
break;
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
if (raw.length === 0) {
|
|
342
|
+
oldLines.push("");
|
|
343
|
+
newLines.push("");
|
|
344
|
+
parsed++;
|
|
345
|
+
i++;
|
|
346
|
+
continue;
|
|
347
|
+
}
|
|
348
|
+
|
|
349
|
+
const marker = raw[0];
|
|
350
|
+
const body = raw.slice(1);
|
|
351
|
+
if (marker === " ") {
|
|
352
|
+
oldLines.push(body);
|
|
353
|
+
newLines.push(body);
|
|
354
|
+
} else if (marker === "-") {
|
|
355
|
+
oldLines.push(body);
|
|
356
|
+
} else if (marker === "+") {
|
|
357
|
+
newLines.push(body);
|
|
358
|
+
} else if (parsed === 0) {
|
|
359
|
+
throw new Error(
|
|
360
|
+
`Unexpected line found in update hunk: '${raw}'. Every line should start with ' ', '+', or '-'.`,
|
|
361
|
+
);
|
|
362
|
+
} else {
|
|
363
|
+
break;
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
parsed++;
|
|
367
|
+
i++;
|
|
368
|
+
}
|
|
369
|
+
|
|
370
|
+
if (parsed === 0) {
|
|
371
|
+
throw new Error("Update hunk does not contain any lines");
|
|
372
|
+
}
|
|
373
|
+
|
|
374
|
+
return {
|
|
375
|
+
chunk: { changeContext, oldLines, newLines, isEndOfFile },
|
|
376
|
+
nextIndex: i,
|
|
377
|
+
};
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
function parsePatch(patchText: string): PatchOperation[] {
|
|
381
|
+
const lines = normalizeToLF(patchText).trim().split("\n");
|
|
382
|
+
if (lines.length < 2) {
|
|
383
|
+
throw new Error("Patch is empty or invalid");
|
|
384
|
+
}
|
|
385
|
+
if (lines[0].trim() !== "*** Begin Patch") {
|
|
386
|
+
throw new Error("The first line of the patch must be '*** Begin Patch'");
|
|
387
|
+
}
|
|
388
|
+
if (lines[lines.length - 1].trim() !== "*** End Patch") {
|
|
389
|
+
throw new Error("The last line of the patch must be '*** End Patch'");
|
|
390
|
+
}
|
|
391
|
+
|
|
392
|
+
const operations: PatchOperation[] = [];
|
|
393
|
+
let i = 1;
|
|
394
|
+
const lastContentLine = lines.length - 2;
|
|
395
|
+
|
|
396
|
+
while (i <= lastContentLine) {
|
|
397
|
+
if (lines[i].trim() === "") {
|
|
398
|
+
i++;
|
|
399
|
+
continue;
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
const line = lines[i].trim();
|
|
403
|
+
if (line.startsWith("*** Add File: ")) {
|
|
404
|
+
const path = line.slice("*** Add File: ".length);
|
|
405
|
+
i++;
|
|
406
|
+
const contentLines: string[] = [];
|
|
407
|
+
while (i <= lastContentLine) {
|
|
408
|
+
const next = lines[i];
|
|
409
|
+
if (next.trim().startsWith("*** ")) break;
|
|
410
|
+
if (!next.startsWith("+")) {
|
|
411
|
+
throw new Error(`Invalid add-file line '${next}'. Add file lines must start with '+'`);
|
|
412
|
+
}
|
|
413
|
+
contentLines.push(next.slice(1));
|
|
414
|
+
i++;
|
|
415
|
+
}
|
|
416
|
+
operations.push({ kind: "add", path, contents: contentLines.length > 0 ? `${contentLines.join("\n")}\n` : "" });
|
|
417
|
+
continue;
|
|
418
|
+
}
|
|
419
|
+
|
|
420
|
+
if (line.startsWith("*** Delete File: ")) {
|
|
421
|
+
const path = line.slice("*** Delete File: ".length);
|
|
422
|
+
operations.push({ kind: "delete", path });
|
|
423
|
+
i++;
|
|
424
|
+
continue;
|
|
425
|
+
}
|
|
426
|
+
|
|
427
|
+
if (line.startsWith("*** Update File: ")) {
|
|
428
|
+
const path = line.slice("*** Update File: ".length);
|
|
429
|
+
i++;
|
|
430
|
+
|
|
431
|
+
if (i <= lastContentLine && lines[i].trim().startsWith("*** Move to: ")) {
|
|
432
|
+
throw new Error("Patch move operations (*** Move to:) are not supported.");
|
|
433
|
+
}
|
|
434
|
+
|
|
435
|
+
const chunks: UpdateChunk[] = [];
|
|
436
|
+
while (i <= lastContentLine) {
|
|
437
|
+
if (lines[i].trim() === "") {
|
|
438
|
+
i++;
|
|
439
|
+
continue;
|
|
440
|
+
}
|
|
441
|
+
if (lines[i].trim().startsWith("*** ")) {
|
|
442
|
+
break;
|
|
443
|
+
}
|
|
444
|
+
|
|
445
|
+
const parsed = parseUpdateChunk(lines, i, lastContentLine, chunks.length === 0);
|
|
446
|
+
chunks.push(parsed.chunk);
|
|
447
|
+
i = parsed.nextIndex;
|
|
448
|
+
}
|
|
449
|
+
|
|
450
|
+
if (chunks.length === 0) {
|
|
451
|
+
throw new Error(`Update file hunk for path '${path}' is empty`);
|
|
452
|
+
}
|
|
453
|
+
|
|
454
|
+
operations.push({ kind: "update", path, chunks });
|
|
455
|
+
continue;
|
|
456
|
+
}
|
|
457
|
+
|
|
458
|
+
throw new Error(
|
|
459
|
+
`'${line}' is not a valid hunk header. Valid headers: '*** Add File:', '*** Delete File:', '*** Update File:'`,
|
|
460
|
+
);
|
|
461
|
+
}
|
|
462
|
+
|
|
463
|
+
return operations;
|
|
464
|
+
}
|
|
465
|
+
|
|
466
|
+
function createRealWorkspace(): Workspace {
|
|
467
|
+
return {
|
|
468
|
+
readText: (absolutePath: string) => fsReadFile(absolutePath, "utf-8"),
|
|
469
|
+
writeText: (absolutePath: string, content: string) => fsWriteFile(absolutePath, content, "utf-8"),
|
|
470
|
+
deleteFile: (absolutePath: string) => fsUnlink(absolutePath),
|
|
471
|
+
exists: async (absolutePath: string) => {
|
|
472
|
+
try {
|
|
473
|
+
await fsAccess(absolutePath, constants.F_OK);
|
|
474
|
+
return true;
|
|
475
|
+
} catch {
|
|
476
|
+
return false;
|
|
477
|
+
}
|
|
478
|
+
},
|
|
479
|
+
checkWriteAccess: (absolutePath: string) => fsAccess(absolutePath, constants.R_OK | constants.W_OK),
|
|
480
|
+
};
|
|
481
|
+
}
|
|
482
|
+
|
|
483
|
+
function createVirtualWorkspace(cwd: string): Workspace {
|
|
484
|
+
const state = new Map<string, string | null>();
|
|
485
|
+
|
|
486
|
+
async function ensureLoaded(absolutePath: string): Promise<void> {
|
|
487
|
+
if (state.has(absolutePath)) return;
|
|
488
|
+
try {
|
|
489
|
+
const content = await fsReadFile(absolutePath, "utf-8");
|
|
490
|
+
state.set(absolutePath, content);
|
|
491
|
+
} catch {
|
|
492
|
+
state.set(absolutePath, null);
|
|
493
|
+
}
|
|
494
|
+
}
|
|
495
|
+
|
|
496
|
+
return {
|
|
497
|
+
readText: async (absolutePath) => {
|
|
498
|
+
await ensureLoaded(absolutePath);
|
|
499
|
+
const content = state.get(absolutePath);
|
|
500
|
+
if (content === null || content === undefined) {
|
|
501
|
+
throw new Error(`File not found: ${absolutePath.replace(`${cwd}/`, "")}`);
|
|
502
|
+
}
|
|
503
|
+
return content;
|
|
504
|
+
},
|
|
505
|
+
writeText: async (absolutePath, content) => {
|
|
506
|
+
state.set(absolutePath, content);
|
|
507
|
+
},
|
|
508
|
+
deleteFile: async (absolutePath) => {
|
|
509
|
+
await ensureLoaded(absolutePath);
|
|
510
|
+
if (state.get(absolutePath) === null) {
|
|
511
|
+
throw new Error(`File not found: ${absolutePath.replace(`${cwd}/`, "")}`);
|
|
512
|
+
}
|
|
513
|
+
state.set(absolutePath, null);
|
|
514
|
+
},
|
|
515
|
+
exists: async (absolutePath) => {
|
|
516
|
+
await ensureLoaded(absolutePath);
|
|
517
|
+
return state.get(absolutePath) !== null;
|
|
518
|
+
},
|
|
519
|
+
checkWriteAccess: async () => {
|
|
520
|
+
// No-op for virtual workspace — permission checks happen on the real pass.
|
|
521
|
+
},
|
|
522
|
+
};
|
|
523
|
+
}
|
|
524
|
+
|
|
525
|
+
async function applyPatchOperations(
|
|
526
|
+
ops: PatchOperation[],
|
|
527
|
+
workspace: Workspace,
|
|
528
|
+
cwd: string,
|
|
529
|
+
signal?: AbortSignal,
|
|
530
|
+
options?: { collectDiff?: boolean },
|
|
531
|
+
): Promise<PatchOpResult[]> {
|
|
532
|
+
const results: PatchOpResult[] = [];
|
|
533
|
+
const collectDiff = options?.collectDiff ?? false;
|
|
534
|
+
|
|
535
|
+
for (const op of ops) {
|
|
536
|
+
if (signal?.aborted) {
|
|
537
|
+
throw new Error("Operation aborted");
|
|
538
|
+
}
|
|
539
|
+
|
|
540
|
+
if (op.kind === "add") {
|
|
541
|
+
const abs = resolvePatchPath(cwd, op.path);
|
|
542
|
+
let oldText = "";
|
|
543
|
+
if (collectDiff && (await workspace.exists(abs))) {
|
|
544
|
+
oldText = await workspace.readText(abs);
|
|
545
|
+
}
|
|
546
|
+
const newText = ensureTrailingNewline(op.contents);
|
|
547
|
+
await workspace.writeText(abs, newText);
|
|
548
|
+
const result: PatchOpResult = { path: op.path, message: `Added file ${op.path}.` };
|
|
549
|
+
if (collectDiff) {
|
|
550
|
+
const diffResult = generateDiffString(oldText, newText);
|
|
551
|
+
result.diff = diffResult.diff;
|
|
552
|
+
result.firstChangedLine = diffResult.firstChangedLine;
|
|
553
|
+
}
|
|
554
|
+
results.push(result);
|
|
555
|
+
continue;
|
|
556
|
+
}
|
|
557
|
+
|
|
558
|
+
if (op.kind === "delete") {
|
|
559
|
+
const abs = resolvePatchPath(cwd, op.path);
|
|
560
|
+
const exists = await workspace.exists(abs);
|
|
561
|
+
if (!exists) {
|
|
562
|
+
throw new Error(`Failed to delete ${op.path}: file does not exist`);
|
|
563
|
+
}
|
|
564
|
+
let oldText = "";
|
|
565
|
+
if (collectDiff) {
|
|
566
|
+
oldText = await workspace.readText(abs);
|
|
567
|
+
}
|
|
568
|
+
await workspace.deleteFile(abs);
|
|
569
|
+
const result: PatchOpResult = { path: op.path, message: `Deleted file ${op.path}.` };
|
|
570
|
+
if (collectDiff) {
|
|
571
|
+
const diffResult = generateDiffString(oldText, "");
|
|
572
|
+
result.diff = diffResult.diff;
|
|
573
|
+
result.firstChangedLine = diffResult.firstChangedLine;
|
|
574
|
+
}
|
|
575
|
+
results.push(result);
|
|
576
|
+
continue;
|
|
577
|
+
}
|
|
578
|
+
|
|
579
|
+
const sourceAbs = resolvePatchPath(cwd, op.path);
|
|
580
|
+
const sourceText = await workspace.readText(sourceAbs);
|
|
581
|
+
const updated = deriveUpdatedContent(op.path, sourceText, op.chunks);
|
|
582
|
+
|
|
583
|
+
await workspace.writeText(sourceAbs, updated);
|
|
584
|
+
const result: PatchOpResult = { path: op.path, message: `Updated ${op.path}.` };
|
|
585
|
+
if (collectDiff) {
|
|
586
|
+
const diffResult = generateDiffString(sourceText, updated);
|
|
587
|
+
result.diff = diffResult.diff;
|
|
588
|
+
result.firstChangedLine = diffResult.firstChangedLine;
|
|
589
|
+
}
|
|
590
|
+
results.push(result);
|
|
591
|
+
}
|
|
592
|
+
|
|
593
|
+
return results;
|
|
594
|
+
}
|
|
595
|
+
|
|
596
|
+
/**
|
|
597
|
+
* Apply a list of classic edits (path/oldText/newText) sequentially via a Workspace.
|
|
598
|
+
*
|
|
599
|
+
* When multiple edits target the same file, occurrences are matched in file order
|
|
600
|
+
* (advancing a cursor after each match) so that the model can rely on positional
|
|
601
|
+
* ordering instead of needing globally-unique oldText snippets.
|
|
602
|
+
*/
|
|
603
|
+
async function applyClassicEdits(
|
|
604
|
+
edits: EditItem[],
|
|
605
|
+
workspace: Workspace,
|
|
606
|
+
cwd: string,
|
|
607
|
+
signal?: AbortSignal,
|
|
608
|
+
options?: { collectDiff?: boolean },
|
|
609
|
+
): Promise<EditResult[]> {
|
|
610
|
+
const collectDiff = options?.collectDiff ?? false;
|
|
611
|
+
|
|
612
|
+
// Group edits by resolved absolute path, preserving order.
|
|
613
|
+
const fileGroups = new Map<string, { index: number; edit: EditItem }[]>();
|
|
614
|
+
const editOrder: string[] = []; // track insertion order of keys
|
|
615
|
+
|
|
616
|
+
for (let i = 0; i < edits.length; i++) {
|
|
617
|
+
const abs = isAbsolute(edits[i].path) ? resolvePath(edits[i].path) : resolvePath(cwd, edits[i].path);
|
|
618
|
+
if (!fileGroups.has(abs)) {
|
|
619
|
+
fileGroups.set(abs, []);
|
|
620
|
+
editOrder.push(abs);
|
|
621
|
+
}
|
|
622
|
+
fileGroups.get(abs)!.push({ index: i, edit: edits[i] });
|
|
623
|
+
}
|
|
624
|
+
|
|
625
|
+
const results: EditResult[] = new Array(edits.length);
|
|
626
|
+
|
|
627
|
+
// Verify write access to all target files before mutating anything.
|
|
628
|
+
for (const absPath of editOrder) {
|
|
629
|
+
await workspace.checkWriteAccess(absPath);
|
|
630
|
+
}
|
|
631
|
+
|
|
632
|
+
for (const absPath of editOrder) {
|
|
633
|
+
const group = fileGroups.get(absPath)!;
|
|
634
|
+
|
|
635
|
+
if (signal?.aborted) {
|
|
636
|
+
throw new Error("Operation aborted");
|
|
637
|
+
}
|
|
638
|
+
|
|
639
|
+
const originalContent = await workspace.readText(absPath);
|
|
640
|
+
let content = originalContent;
|
|
641
|
+
let searchOffset = 0;
|
|
642
|
+
|
|
643
|
+
for (const { index, edit } of group) {
|
|
644
|
+
if (signal?.aborted) {
|
|
645
|
+
throw new Error("Operation aborted");
|
|
646
|
+
}
|
|
647
|
+
|
|
648
|
+
// Find oldText starting from the cursor position (positional ordering).
|
|
649
|
+
const pos = content.indexOf(edit.oldText, searchOffset);
|
|
650
|
+
|
|
651
|
+
if (pos === -1) {
|
|
652
|
+
results[index] = {
|
|
653
|
+
path: edit.path,
|
|
654
|
+
success: false,
|
|
655
|
+
message: `Could not find the exact text in ${edit.path}. The old text must match exactly including all whitespace and newlines.`,
|
|
656
|
+
};
|
|
657
|
+
// Fill remaining edits in this group as skipped.
|
|
658
|
+
const filled = Array.from({ length: edits.length }, (_, i) => results[i]).filter(Boolean);
|
|
659
|
+
throw new Error(formatResults(filled, edits.length));
|
|
660
|
+
}
|
|
661
|
+
|
|
662
|
+
content = content.slice(0, pos) + edit.newText + content.slice(pos + edit.oldText.length);
|
|
663
|
+
searchOffset = pos + edit.newText.length;
|
|
664
|
+
|
|
665
|
+
results[index] = {
|
|
666
|
+
path: edit.path,
|
|
667
|
+
success: true,
|
|
668
|
+
message: `Edited ${edit.path}.`,
|
|
669
|
+
};
|
|
670
|
+
}
|
|
671
|
+
|
|
672
|
+
// Write back the fully-edited file.
|
|
673
|
+
await workspace.writeText(absPath, content);
|
|
674
|
+
|
|
675
|
+
// Generate a single diff for all edits to this file; attach to first edit.
|
|
676
|
+
if (collectDiff) {
|
|
677
|
+
const diffResult = generateDiffString(originalContent, content);
|
|
678
|
+
const firstIdx = group[0].index;
|
|
679
|
+
results[firstIdx].diff = diffResult.diff;
|
|
680
|
+
results[firstIdx].firstChangedLine = diffResult.firstChangedLine;
|
|
681
|
+
}
|
|
682
|
+
}
|
|
683
|
+
|
|
684
|
+
return results;
|
|
685
|
+
}
|
|
686
|
+
|
|
687
|
+
export default function (pi: ExtensionAPI) {
|
|
688
|
+
pi.registerTool({
|
|
689
|
+
name: "edit",
|
|
690
|
+
label: "edit",
|
|
691
|
+
description:
|
|
692
|
+
"Edit a file by replacing exact text. The oldText must match exactly (including whitespace). Use this for precise, surgical edits. Supports a `multi` parameter for batch edits across one or more files, and a `patch` parameter for Codex-style patches.",
|
|
693
|
+
promptSnippet:
|
|
694
|
+
"Edit a file by replacing exact text. The oldText must match exactly (including whitespace). Use this for precise, surgical edits.",
|
|
695
|
+
promptGuidelines: [
|
|
696
|
+
"Use edit for precise changes (old text must match exactly)",
|
|
697
|
+
"Use the `multi` parameter to apply multiple edits in a single tool call",
|
|
698
|
+
"Use the `patch` parameter for Codex-style multi-file / hunk-based edits",
|
|
699
|
+
],
|
|
700
|
+
parameters: multiEditSchema,
|
|
701
|
+
|
|
702
|
+
async execute(toolCallId, params, signal, onUpdate, ctx) {
|
|
703
|
+
const { path, oldText, newText, multi, patch } = params;
|
|
704
|
+
|
|
705
|
+
const hasAnyClassicParam = path !== undefined || oldText !== undefined || newText !== undefined || multi !== undefined;
|
|
706
|
+
if (patch !== undefined && hasAnyClassicParam) {
|
|
707
|
+
throw new Error("The `patch` parameter is mutually exclusive with path/oldText/newText/multi.");
|
|
708
|
+
}
|
|
709
|
+
|
|
710
|
+
if (patch !== undefined) {
|
|
711
|
+
const ops = parsePatch(patch);
|
|
712
|
+
|
|
713
|
+
// Preflight on virtual filesystem before mutating real files.
|
|
714
|
+
await applyPatchOperations(ops, createVirtualWorkspace(ctx.cwd), ctx.cwd, signal, { collectDiff: false });
|
|
715
|
+
|
|
716
|
+
// Apply for real.
|
|
717
|
+
const applied = await applyPatchOperations(ops, createRealWorkspace(), ctx.cwd, signal, { collectDiff: true });
|
|
718
|
+
const summary = applied.map((r, i) => `${i + 1}. ${r.message}`).join("\n");
|
|
719
|
+
const combinedDiff = applied
|
|
720
|
+
.filter((r) => r.diff)
|
|
721
|
+
.map((r) => `File: ${r.path}\n${r.diff}`)
|
|
722
|
+
.join("\n\n");
|
|
723
|
+
const firstChangedLine = applied.find((r) => r.firstChangedLine !== undefined)?.firstChangedLine;
|
|
724
|
+
return {
|
|
725
|
+
content: [{ type: "text" as const, text: `Applied patch with ${applied.length} operation(s).\n${summary}` }],
|
|
726
|
+
details: {
|
|
727
|
+
diff: combinedDiff,
|
|
728
|
+
firstChangedLine,
|
|
729
|
+
},
|
|
730
|
+
};
|
|
731
|
+
}
|
|
732
|
+
|
|
733
|
+
// Build classic edit list.
|
|
734
|
+
const edits: EditItem[] = [];
|
|
735
|
+
const hasTopLevel = path !== undefined && oldText !== undefined && newText !== undefined;
|
|
736
|
+
|
|
737
|
+
if (hasTopLevel) {
|
|
738
|
+
edits.push({ path: path!, oldText: oldText!, newText: newText! });
|
|
739
|
+
} else if (path !== undefined || oldText !== undefined || newText !== undefined) {
|
|
740
|
+
// When multi is present, only a bare top-level `path` (for inheritance) is allowed.
|
|
741
|
+
// Any other partial combination (e.g. path+oldText, oldText+newText) is an error.
|
|
742
|
+
const hasOnlyPath = path !== undefined && oldText === undefined && newText === undefined;
|
|
743
|
+
if (!hasOnlyPath || multi === undefined) {
|
|
744
|
+
const missing: string[] = [];
|
|
745
|
+
if (path === undefined) missing.push("path");
|
|
746
|
+
if (oldText === undefined) missing.push("oldText");
|
|
747
|
+
if (newText === undefined) missing.push("newText");
|
|
748
|
+
throw new Error(
|
|
749
|
+
`Incomplete top-level edit: missing ${missing.join(", ")}. Provide all three (path, oldText, newText) or use only the multi parameter.`,
|
|
750
|
+
);
|
|
751
|
+
}
|
|
752
|
+
// path-only top-level with multi is fine — path is inherited below.
|
|
753
|
+
}
|
|
754
|
+
|
|
755
|
+
if (multi) {
|
|
756
|
+
for (const item of multi) {
|
|
757
|
+
edits.push({
|
|
758
|
+
path: item.path ?? path ?? "",
|
|
759
|
+
oldText: item.oldText,
|
|
760
|
+
newText: item.newText,
|
|
761
|
+
});
|
|
762
|
+
}
|
|
763
|
+
}
|
|
764
|
+
|
|
765
|
+
if (edits.length === 0) {
|
|
766
|
+
throw new Error("No edits provided. Supply path/oldText/newText, a multi array, or a patch.");
|
|
767
|
+
}
|
|
768
|
+
|
|
769
|
+
// Validate that every edit has a path.
|
|
770
|
+
for (let i = 0; i < edits.length; i++) {
|
|
771
|
+
if (!edits[i].path) {
|
|
772
|
+
throw new Error(
|
|
773
|
+
`Edit ${i + 1} is missing a path. Provide a path on each multi item or set a top-level path to inherit.`,
|
|
774
|
+
);
|
|
775
|
+
}
|
|
776
|
+
}
|
|
777
|
+
|
|
778
|
+
// Preflight pass on virtual workspace before mutating real files.
|
|
779
|
+
// Uses sequential occurrence matching so same-file edits are resolved
|
|
780
|
+
// in file order (positional ordering).
|
|
781
|
+
try {
|
|
782
|
+
await applyClassicEdits(edits, createVirtualWorkspace(ctx.cwd), ctx.cwd, signal, { collectDiff: false });
|
|
783
|
+
} catch (err: any) {
|
|
784
|
+
throw new Error(`Preflight failed before mutating files.\n${err.message ?? String(err)}`);
|
|
785
|
+
}
|
|
786
|
+
|
|
787
|
+
// Apply for real.
|
|
788
|
+
const results = await applyClassicEdits(edits, createRealWorkspace(), ctx.cwd, signal, { collectDiff: true });
|
|
789
|
+
|
|
790
|
+
if (results.length === 1) {
|
|
791
|
+
const r = results[0];
|
|
792
|
+
return {
|
|
793
|
+
content: [{ type: "text" as const, text: r.message }],
|
|
794
|
+
details: {
|
|
795
|
+
diff: r.diff ?? "",
|
|
796
|
+
firstChangedLine: r.firstChangedLine,
|
|
797
|
+
},
|
|
798
|
+
};
|
|
799
|
+
}
|
|
800
|
+
|
|
801
|
+
const combinedDiff = results
|
|
802
|
+
.filter((r) => r.diff)
|
|
803
|
+
.map((r) => r.diff)
|
|
804
|
+
.join("\n");
|
|
805
|
+
|
|
806
|
+
const firstChanged = results.find((r) => r.firstChangedLine !== undefined)?.firstChangedLine;
|
|
807
|
+
const summary = results.map((r, i) => `${i + 1}. ${r.message}`).join("\n");
|
|
808
|
+
|
|
809
|
+
return {
|
|
810
|
+
content: [{ type: "text" as const, text: `Applied ${results.length} edit(s) successfully.\n${summary}` }],
|
|
811
|
+
details: {
|
|
812
|
+
diff: combinedDiff,
|
|
813
|
+
firstChangedLine: firstChanged,
|
|
814
|
+
},
|
|
815
|
+
};
|
|
816
|
+
},
|
|
817
|
+
});
|
|
818
|
+
}
|
|
819
|
+
|
|
820
|
+
function formatResults(results: EditResult[], totalEdits: number): string {
|
|
821
|
+
const lines: string[] = [];
|
|
822
|
+
|
|
823
|
+
for (let i = 0; i < results.length; i++) {
|
|
824
|
+
const r = results[i];
|
|
825
|
+
const status = r.success ? "✓" : "✗";
|
|
826
|
+
lines.push(`${status} Edit ${i + 1}/${totalEdits} (${r.path}): ${r.message}`);
|
|
827
|
+
}
|
|
828
|
+
|
|
829
|
+
const remaining = totalEdits - results.length;
|
|
830
|
+
if (remaining > 0) {
|
|
831
|
+
lines.push(`⊘ ${remaining} remaining edit(s) skipped due to error.`);
|
|
832
|
+
}
|
|
833
|
+
|
|
834
|
+
return lines.join("\n");
|
|
835
|
+
}
|