mitsupi 1.4.0 → 1.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/README.md +2 -1
- package/intercepted-commands/poetry +8 -1
- package/package.json +1 -1
- package/pi-extensions/multi-edit.ts +641 -0
- package/pi-extensions/session-breakdown.ts +551 -74
- package/pi-extensions/uv.ts +89 -0
- package/skills/pi-share/SKILL.md +5 -2
- package/skills/pi-share/fetch-session.mjs +46 -15
- package/skills/summarize/SKILL.md +4 -3
- package/skills/summarize/to-markdown.mjs +3 -1
- package/skills/web-browser/scripts/start.js +19 -8
package/CHANGELOG.md
CHANGED
|
@@ -4,6 +4,18 @@ All notable changes to agent-stuff are documented here.
|
|
|
4
4
|
|
|
5
5
|
## Unreleased
|
|
6
6
|
|
|
7
|
+
## 1.5.0
|
|
8
|
+
|
|
9
|
+
* Added a `multi-edit` extension that replaces `edit` with support for batched `multi` edits and Codex-style `patch` payloads.
|
|
10
|
+
* Added preflight validation before mutating files for both `multi` edits and `patch` operations in `multi-edit`.
|
|
11
|
+
* Added `/session-breakdown` views for cwd, day-of-week, and time-of-day breakdowns.
|
|
12
|
+
* Added `pi-share` support for `pi.dev` URLs and `#session_id` inputs.
|
|
13
|
+
* Improved day rendering in `/session-breakdown`.
|
|
14
|
+
* Fixed PDF handling in the `summarize` skill.
|
|
15
|
+
* Hardened `uv` command handling by blocking pip/poetry bypasses.
|
|
16
|
+
* Fixed `web-browser` startup behavior to avoid killing user Chrome instances.
|
|
17
|
+
* Updated README extension docs to include `pi-extensions/multi-edit.ts`.
|
|
18
|
+
|
|
7
19
|
## 1.4.0
|
|
8
20
|
|
|
9
21
|
* Added a prompt editor extension for managing prompt modes (create, rename, delete, and edit), with persistence and detection fixes.
|
package/README.md
CHANGED
|
@@ -20,7 +20,7 @@ All skills live in the [`skills`](skills) folder:
|
|
|
20
20
|
* [`/native-web-search`](skills/native-web-search) - Trigger native web search with concise summaries and source URLs.
|
|
21
21
|
* [`/oebb-scotty`](skills/oebb-scotty) - Plan Austrian rail journeys via ÖBB Scotty API.
|
|
22
22
|
* [`/openscad`](skills/openscad) - Create/render OpenSCAD models and export STL files.
|
|
23
|
-
* [`/pi-share`](skills/pi-share) - Load and parse session transcripts from shittycodingagent.ai/buildwithpi URLs.
|
|
23
|
+
* [`/pi-share`](skills/pi-share) - Load and parse session transcripts from shittycodingagent.ai/buildwithpi/pi.dev URLs.
|
|
24
24
|
* [`/sentry`](skills/sentry) - Fetch and analyze Sentry issues, events, transactions, and logs.
|
|
25
25
|
* [`/summarize`](skills/summarize) - Convert files/URLs to Markdown via `uvx markitdown` and summarize.
|
|
26
26
|
* [`/tmux`](skills/tmux) - Drive tmux sessions via keystrokes and pane output scraping.
|
|
@@ -38,6 +38,7 @@ Custom extensions for Pi Coding Agent are in [`pi-extensions`](pi-extensions):
|
|
|
38
38
|
* [`files.ts`](pi-extensions/files.ts) - Unified file browser with git status + session references and reveal/open/edit/diff actions.
|
|
39
39
|
* [`go-to-bed.ts`](pi-extensions/go-to-bed.ts) - Late-night safety guard with explicit confirmation after midnight.
|
|
40
40
|
* [`loop.ts`](pi-extensions/loop.ts) - Prompt loop for rapid iterative coding with optional auto-continue.
|
|
41
|
+
* [`multi-edit.ts`](pi-extensions/multi-edit.ts) - Replaces the built-in `edit` tool with batch `multi` edits and Codex-style `patch` support, including preflight validation.
|
|
41
42
|
* [`notify.ts`](pi-extensions/notify.ts) - Native desktop notifications when the agent finishes.
|
|
42
43
|
* [`prompt-editor.ts`](pi-extensions/prompt-editor.ts) - In-editor prompt mode selector with persistence, history, config, and shortcuts.
|
|
43
44
|
* [`review.ts`](pi-extensions/review.ts) - Code review command (working tree, PR-style diff, commits, custom instructions, optional fix loop).
|
|
@@ -1,3 +1,10 @@
|
|
|
1
1
|
#!/bin/bash
|
|
2
|
-
|
|
2
|
+
|
|
3
|
+
echo "Error: poetry is disabled. Use uv instead:" >&2
|
|
4
|
+
echo "" >&2
|
|
5
|
+
echo " To initialize a project: uv init" >&2
|
|
6
|
+
echo " To add a dependency: uv add PACKAGE" >&2
|
|
7
|
+
echo " To sync dependencies: uv sync" >&2
|
|
8
|
+
echo " To run commands: uv run COMMAND" >&2
|
|
9
|
+
echo "" >&2
|
|
3
10
|
exit 1
|
package/package.json
CHANGED
|
@@ -0,0 +1,641 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Multi-Edit Extension — replaces the built-in `edit` tool.
|
|
3
|
+
*
|
|
4
|
+
* Supports all original parameters (path, oldText, newText) plus:
|
|
5
|
+
* - `multi`: array of {path, oldText, newText} edits applied in sequence
|
|
6
|
+
* - `patch`: Codex-style apply_patch payload
|
|
7
|
+
*
|
|
8
|
+
* When both top-level params and `multi` are provided, the top-level edit
|
|
9
|
+
* is treated as an implicit first item prepended to the multi list.
|
|
10
|
+
*
|
|
11
|
+
* A preflight pass is performed before mutating files:
|
|
12
|
+
* - multi/top-level mode: preflight via virtualized built-in edit tool
|
|
13
|
+
* - patch mode: preflight by applying patch operations on a virtual filesystem
|
|
14
|
+
*/
|
|
15
|
+
|
|
16
|
+
import type { ExtensionAPI } from "@mariozechner/pi-coding-agent";
|
|
17
|
+
import { createEditTool, type EditToolDetails } from "@mariozechner/pi-coding-agent";
|
|
18
|
+
import { Type } from "@sinclair/typebox";
|
|
19
|
+
import { constants } from "fs";
|
|
20
|
+
import { access as fsAccess, readFile as fsReadFile, unlink as fsUnlink, writeFile as fsWriteFile } from "fs/promises";
|
|
21
|
+
import { isAbsolute, resolve as resolvePath } from "path";
|
|
22
|
+
|
|
23
|
+
const editItemSchema = Type.Object({
|
|
24
|
+
path: Type.String({ description: "Path to the file to edit (relative or absolute)" }),
|
|
25
|
+
oldText: Type.String({ description: "Exact text to find and replace (must match exactly)" }),
|
|
26
|
+
newText: Type.String({ description: "New text to replace the old text with" }),
|
|
27
|
+
});
|
|
28
|
+
|
|
29
|
+
const multiEditSchema = Type.Object({
|
|
30
|
+
path: Type.Optional(Type.String({ description: "Path to the file to edit (relative or absolute)" })),
|
|
31
|
+
oldText: Type.Optional(Type.String({ description: "Exact text to find and replace (must match exactly)" })),
|
|
32
|
+
newText: Type.Optional(Type.String({ description: "New text to replace the old text with" })),
|
|
33
|
+
multi: Type.Optional(
|
|
34
|
+
Type.Array(editItemSchema, {
|
|
35
|
+
description: "Multiple edits to apply in sequence. Each item has path, oldText, and newText.",
|
|
36
|
+
}),
|
|
37
|
+
),
|
|
38
|
+
patch: Type.Optional(
|
|
39
|
+
Type.String({
|
|
40
|
+
description:
|
|
41
|
+
"Codex-style apply_patch payload (*** Begin Patch ... *** End Patch). Mutually exclusive with path/oldText/newText/multi.",
|
|
42
|
+
}),
|
|
43
|
+
),
|
|
44
|
+
});
|
|
45
|
+
|
|
46
|
+
interface EditItem {
|
|
47
|
+
path: string;
|
|
48
|
+
oldText: string;
|
|
49
|
+
newText: string;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
interface EditResult {
|
|
53
|
+
path: string;
|
|
54
|
+
success: boolean;
|
|
55
|
+
message: string;
|
|
56
|
+
diff?: string;
|
|
57
|
+
firstChangedLine?: number;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
interface UpdateChunk {
|
|
61
|
+
changeContext?: string;
|
|
62
|
+
oldLines: string[];
|
|
63
|
+
newLines: string[];
|
|
64
|
+
isEndOfFile: boolean;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
type PatchOperation =
|
|
68
|
+
| { kind: "add"; path: string; contents: string }
|
|
69
|
+
| { kind: "delete"; path: string }
|
|
70
|
+
| { kind: "update"; path: string; chunks: UpdateChunk[] };
|
|
71
|
+
|
|
72
|
+
interface PatchOpResult {
|
|
73
|
+
path: string;
|
|
74
|
+
message: string;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
interface Workspace {
|
|
78
|
+
readText: (absolutePath: string) => Promise<string>;
|
|
79
|
+
writeText: (absolutePath: string, content: string) => Promise<void>;
|
|
80
|
+
deleteFile: (absolutePath: string) => Promise<void>;
|
|
81
|
+
exists: (absolutePath: string) => Promise<boolean>;
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
function normalizeToLF(text: string): string {
|
|
85
|
+
return text.replace(/\r\n/g, "\n").replace(/\r/g, "\n");
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
function resolvePatchPath(cwd: string, filePath: string): string {
|
|
89
|
+
const trimmed = filePath.trim();
|
|
90
|
+
if (!trimmed) {
|
|
91
|
+
throw new Error("Patch path cannot be empty");
|
|
92
|
+
}
|
|
93
|
+
return isAbsolute(trimmed) ? resolvePath(trimmed) : resolvePath(cwd, trimmed);
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
function ensureTrailingNewline(content: string): string {
|
|
97
|
+
return content.endsWith("\n") ? content : `${content}\n`;
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
function normaliseLineForFuzzyMatch(s: string): string {
|
|
101
|
+
return s
|
|
102
|
+
.trim()
|
|
103
|
+
.replace(/[\u2010\u2011\u2012\u2013\u2014\u2015\u2212]/g, "-")
|
|
104
|
+
.replace(/[\u2018\u2019\u201A\u201B]/g, "'")
|
|
105
|
+
.replace(/[\u201C\u201D\u201E\u201F]/g, '"')
|
|
106
|
+
.replace(/[\u00A0\u2002-\u200A\u202F\u205F\u3000]/g, " ");
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
function seekSequence(lines: string[], pattern: string[], start: number, eof: boolean): number | undefined {
|
|
110
|
+
if (pattern.length === 0) return start;
|
|
111
|
+
if (pattern.length > lines.length) return undefined;
|
|
112
|
+
|
|
113
|
+
const searchStart = eof && lines.length >= pattern.length ? lines.length - pattern.length : start;
|
|
114
|
+
const searchEnd = lines.length - pattern.length;
|
|
115
|
+
|
|
116
|
+
const exactEqual = (a: string, b: string) => a === b;
|
|
117
|
+
const rstripEqual = (a: string, b: string) => a.trimEnd() === b.trimEnd();
|
|
118
|
+
const trimEqual = (a: string, b: string) => a.trim() === b.trim();
|
|
119
|
+
const fuzzyEqual = (a: string, b: string) => normaliseLineForFuzzyMatch(a) === normaliseLineForFuzzyMatch(b);
|
|
120
|
+
|
|
121
|
+
const passes = [exactEqual, rstripEqual, trimEqual, fuzzyEqual];
|
|
122
|
+
|
|
123
|
+
for (const eq of passes) {
|
|
124
|
+
for (let i = searchStart; i <= searchEnd; i++) {
|
|
125
|
+
let ok = true;
|
|
126
|
+
for (let p = 0; p < pattern.length; p++) {
|
|
127
|
+
if (!eq(lines[i + p], pattern[p])) {
|
|
128
|
+
ok = false;
|
|
129
|
+
break;
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
if (ok) return i;
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
return undefined;
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
function applyReplacements(lines: string[], replacements: Array<[number, number, string[]]>): string[] {
|
|
140
|
+
const next = [...lines];
|
|
141
|
+
|
|
142
|
+
for (const [start, oldLen, newSegment] of [...replacements].sort((a, b) => b[0] - a[0])) {
|
|
143
|
+
next.splice(start, oldLen, ...newSegment);
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
return next;
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
function deriveUpdatedContent(filePath: string, currentContent: string, chunks: UpdateChunk[]): string {
|
|
150
|
+
const originalLines = currentContent.split("\n");
|
|
151
|
+
if (originalLines[originalLines.length - 1] === "") {
|
|
152
|
+
originalLines.pop();
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
const replacements: Array<[number, number, string[]]> = [];
|
|
156
|
+
let lineIndex = 0;
|
|
157
|
+
|
|
158
|
+
for (const chunk of chunks) {
|
|
159
|
+
if (chunk.changeContext !== undefined) {
|
|
160
|
+
const ctxIndex = seekSequence(originalLines, [chunk.changeContext], lineIndex, false);
|
|
161
|
+
if (ctxIndex === undefined) {
|
|
162
|
+
throw new Error(`Failed to find context '${chunk.changeContext}' in ${filePath}`);
|
|
163
|
+
}
|
|
164
|
+
lineIndex = ctxIndex + 1;
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
if (chunk.oldLines.length === 0) {
|
|
168
|
+
replacements.push([originalLines.length, 0, [...chunk.newLines]]);
|
|
169
|
+
continue;
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
let pattern = chunk.oldLines;
|
|
173
|
+
let newSlice = chunk.newLines;
|
|
174
|
+
|
|
175
|
+
let found = seekSequence(originalLines, pattern, lineIndex, chunk.isEndOfFile);
|
|
176
|
+
if (found === undefined && pattern[pattern.length - 1] === "") {
|
|
177
|
+
pattern = pattern.slice(0, -1);
|
|
178
|
+
if (newSlice[newSlice.length - 1] === "") {
|
|
179
|
+
newSlice = newSlice.slice(0, -1);
|
|
180
|
+
}
|
|
181
|
+
found = seekSequence(originalLines, pattern, lineIndex, chunk.isEndOfFile);
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
if (found === undefined) {
|
|
185
|
+
throw new Error(`Failed to find expected lines in ${filePath}:\n${chunk.oldLines.join("\n")}`);
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
replacements.push([found, pattern.length, [...newSlice]]);
|
|
189
|
+
lineIndex = found + pattern.length;
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
const newLines = applyReplacements(originalLines, replacements);
|
|
193
|
+
if (newLines[newLines.length - 1] !== "") {
|
|
194
|
+
newLines.push("");
|
|
195
|
+
}
|
|
196
|
+
return newLines.join("\n");
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
function parseUpdateChunk(
|
|
200
|
+
lines: string[],
|
|
201
|
+
startIndex: number,
|
|
202
|
+
lastContentLine: number,
|
|
203
|
+
allowMissingContext: boolean,
|
|
204
|
+
): { chunk: UpdateChunk; nextIndex: number } {
|
|
205
|
+
let i = startIndex;
|
|
206
|
+
let changeContext: string | undefined;
|
|
207
|
+
const first = lines[i].trimEnd();
|
|
208
|
+
|
|
209
|
+
if (first === "@@") {
|
|
210
|
+
i++;
|
|
211
|
+
} else if (first.startsWith("@@ ")) {
|
|
212
|
+
changeContext = first.slice(3);
|
|
213
|
+
i++;
|
|
214
|
+
} else if (!allowMissingContext) {
|
|
215
|
+
throw new Error(`Expected update hunk to start with @@ context marker, got: '${lines[i]}'`);
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
const oldLines: string[] = [];
|
|
219
|
+
const newLines: string[] = [];
|
|
220
|
+
let parsed = 0;
|
|
221
|
+
let isEndOfFile = false;
|
|
222
|
+
|
|
223
|
+
while (i <= lastContentLine) {
|
|
224
|
+
const raw = lines[i];
|
|
225
|
+
const trimmed = raw.trimEnd();
|
|
226
|
+
|
|
227
|
+
if (trimmed === "*** End of File") {
|
|
228
|
+
if (parsed === 0) {
|
|
229
|
+
throw new Error("Update hunk does not contain any lines");
|
|
230
|
+
}
|
|
231
|
+
isEndOfFile = true;
|
|
232
|
+
i++;
|
|
233
|
+
break;
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
if (parsed > 0 && (trimmed.startsWith("@@") || trimmed.startsWith("*** "))) {
|
|
237
|
+
break;
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
if (raw.length === 0) {
|
|
241
|
+
oldLines.push("");
|
|
242
|
+
newLines.push("");
|
|
243
|
+
parsed++;
|
|
244
|
+
i++;
|
|
245
|
+
continue;
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
const marker = raw[0];
|
|
249
|
+
const body = raw.slice(1);
|
|
250
|
+
if (marker === " ") {
|
|
251
|
+
oldLines.push(body);
|
|
252
|
+
newLines.push(body);
|
|
253
|
+
} else if (marker === "-") {
|
|
254
|
+
oldLines.push(body);
|
|
255
|
+
} else if (marker === "+") {
|
|
256
|
+
newLines.push(body);
|
|
257
|
+
} else if (parsed === 0) {
|
|
258
|
+
throw new Error(
|
|
259
|
+
`Unexpected line found in update hunk: '${raw}'. Every line should start with ' ', '+', or '-'.`,
|
|
260
|
+
);
|
|
261
|
+
} else {
|
|
262
|
+
break;
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
parsed++;
|
|
266
|
+
i++;
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
if (parsed === 0) {
|
|
270
|
+
throw new Error("Update hunk does not contain any lines");
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
return {
|
|
274
|
+
chunk: { changeContext, oldLines, newLines, isEndOfFile },
|
|
275
|
+
nextIndex: i,
|
|
276
|
+
};
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
function parsePatch(patchText: string): PatchOperation[] {
|
|
280
|
+
const lines = normalizeToLF(patchText).trim().split("\n");
|
|
281
|
+
if (lines.length < 2) {
|
|
282
|
+
throw new Error("Patch is empty or invalid");
|
|
283
|
+
}
|
|
284
|
+
if (lines[0].trim() !== "*** Begin Patch") {
|
|
285
|
+
throw new Error("The first line of the patch must be '*** Begin Patch'");
|
|
286
|
+
}
|
|
287
|
+
if (lines[lines.length - 1].trim() !== "*** End Patch") {
|
|
288
|
+
throw new Error("The last line of the patch must be '*** End Patch'");
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
const operations: PatchOperation[] = [];
|
|
292
|
+
let i = 1;
|
|
293
|
+
const lastContentLine = lines.length - 2;
|
|
294
|
+
|
|
295
|
+
while (i <= lastContentLine) {
|
|
296
|
+
if (lines[i].trim() === "") {
|
|
297
|
+
i++;
|
|
298
|
+
continue;
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
const line = lines[i].trim();
|
|
302
|
+
if (line.startsWith("*** Add File: ")) {
|
|
303
|
+
const path = line.slice("*** Add File: ".length);
|
|
304
|
+
i++;
|
|
305
|
+
const contentLines: string[] = [];
|
|
306
|
+
while (i <= lastContentLine) {
|
|
307
|
+
const next = lines[i];
|
|
308
|
+
if (next.trim().startsWith("*** ")) break;
|
|
309
|
+
if (!next.startsWith("+")) {
|
|
310
|
+
throw new Error(`Invalid add-file line '${next}'. Add file lines must start with '+'`);
|
|
311
|
+
}
|
|
312
|
+
contentLines.push(next.slice(1));
|
|
313
|
+
i++;
|
|
314
|
+
}
|
|
315
|
+
operations.push({ kind: "add", path, contents: contentLines.length > 0 ? `${contentLines.join("\n")}\n` : "" });
|
|
316
|
+
continue;
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
if (line.startsWith("*** Delete File: ")) {
|
|
320
|
+
const path = line.slice("*** Delete File: ".length);
|
|
321
|
+
operations.push({ kind: "delete", path });
|
|
322
|
+
i++;
|
|
323
|
+
continue;
|
|
324
|
+
}
|
|
325
|
+
|
|
326
|
+
if (line.startsWith("*** Update File: ")) {
|
|
327
|
+
const path = line.slice("*** Update File: ".length);
|
|
328
|
+
i++;
|
|
329
|
+
|
|
330
|
+
if (i <= lastContentLine && lines[i].trim().startsWith("*** Move to: ")) {
|
|
331
|
+
throw new Error("Patch move operations (*** Move to:) are not supported.");
|
|
332
|
+
}
|
|
333
|
+
|
|
334
|
+
const chunks: UpdateChunk[] = [];
|
|
335
|
+
while (i <= lastContentLine) {
|
|
336
|
+
if (lines[i].trim() === "") {
|
|
337
|
+
i++;
|
|
338
|
+
continue;
|
|
339
|
+
}
|
|
340
|
+
if (lines[i].trim().startsWith("*** ")) {
|
|
341
|
+
break;
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
const parsed = parseUpdateChunk(lines, i, lastContentLine, chunks.length === 0);
|
|
345
|
+
chunks.push(parsed.chunk);
|
|
346
|
+
i = parsed.nextIndex;
|
|
347
|
+
}
|
|
348
|
+
|
|
349
|
+
if (chunks.length === 0) {
|
|
350
|
+
throw new Error(`Update file hunk for path '${path}' is empty`);
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
operations.push({ kind: "update", path, chunks });
|
|
354
|
+
continue;
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
throw new Error(
|
|
358
|
+
`'${line}' is not a valid hunk header. Valid headers: '*** Add File:', '*** Delete File:', '*** Update File:'`,
|
|
359
|
+
);
|
|
360
|
+
}
|
|
361
|
+
|
|
362
|
+
return operations;
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
function createVirtualEditOperations(): {
|
|
366
|
+
readFile: (absolutePath: string) => Promise<Buffer>;
|
|
367
|
+
writeFile: (absolutePath: string, content: string) => Promise<void>;
|
|
368
|
+
access: (absolutePath: string) => Promise<void>;
|
|
369
|
+
} {
|
|
370
|
+
const files = new Map<string, string>();
|
|
371
|
+
|
|
372
|
+
async function ensureLoaded(absolutePath: string): Promise<void> {
|
|
373
|
+
if (files.has(absolutePath)) return;
|
|
374
|
+
const content = await fsReadFile(absolutePath, "utf-8");
|
|
375
|
+
files.set(absolutePath, content);
|
|
376
|
+
}
|
|
377
|
+
|
|
378
|
+
return {
|
|
379
|
+
readFile: async (absolutePath) => {
|
|
380
|
+
await ensureLoaded(absolutePath);
|
|
381
|
+
return Buffer.from(files.get(absolutePath) ?? "", "utf-8");
|
|
382
|
+
},
|
|
383
|
+
writeFile: async (absolutePath, content) => {
|
|
384
|
+
files.set(absolutePath, content);
|
|
385
|
+
},
|
|
386
|
+
access: async (absolutePath) => {
|
|
387
|
+
if (files.has(absolutePath)) return;
|
|
388
|
+
await fsAccess(absolutePath, constants.R_OK | constants.W_OK);
|
|
389
|
+
},
|
|
390
|
+
};
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
function createRealWorkspace(): Workspace {
|
|
394
|
+
return {
|
|
395
|
+
readText: (absolutePath: string) => fsReadFile(absolutePath, "utf-8"),
|
|
396
|
+
writeText: (absolutePath: string, content: string) => fsWriteFile(absolutePath, content, "utf-8"),
|
|
397
|
+
deleteFile: (absolutePath: string) => fsUnlink(absolutePath),
|
|
398
|
+
exists: async (absolutePath: string) => {
|
|
399
|
+
try {
|
|
400
|
+
await fsAccess(absolutePath, constants.F_OK);
|
|
401
|
+
return true;
|
|
402
|
+
} catch {
|
|
403
|
+
return false;
|
|
404
|
+
}
|
|
405
|
+
},
|
|
406
|
+
};
|
|
407
|
+
}
|
|
408
|
+
|
|
409
|
+
function createVirtualWorkspace(cwd: string): Workspace {
|
|
410
|
+
const state = new Map<string, string | null>();
|
|
411
|
+
|
|
412
|
+
async function ensureLoaded(absolutePath: string): Promise<void> {
|
|
413
|
+
if (state.has(absolutePath)) return;
|
|
414
|
+
try {
|
|
415
|
+
const content = await fsReadFile(absolutePath, "utf-8");
|
|
416
|
+
state.set(absolutePath, content);
|
|
417
|
+
} catch {
|
|
418
|
+
state.set(absolutePath, null);
|
|
419
|
+
}
|
|
420
|
+
}
|
|
421
|
+
|
|
422
|
+
return {
|
|
423
|
+
readText: async (absolutePath) => {
|
|
424
|
+
await ensureLoaded(absolutePath);
|
|
425
|
+
const content = state.get(absolutePath);
|
|
426
|
+
if (content === null || content === undefined) {
|
|
427
|
+
throw new Error(`File not found: ${absolutePath.replace(`${cwd}/`, "")}`);
|
|
428
|
+
}
|
|
429
|
+
return content;
|
|
430
|
+
},
|
|
431
|
+
writeText: async (absolutePath, content) => {
|
|
432
|
+
state.set(absolutePath, content);
|
|
433
|
+
},
|
|
434
|
+
deleteFile: async (absolutePath) => {
|
|
435
|
+
await ensureLoaded(absolutePath);
|
|
436
|
+
if (state.get(absolutePath) === null) {
|
|
437
|
+
throw new Error(`File not found: ${absolutePath.replace(`${cwd}/`, "")}`);
|
|
438
|
+
}
|
|
439
|
+
state.set(absolutePath, null);
|
|
440
|
+
},
|
|
441
|
+
exists: async (absolutePath) => {
|
|
442
|
+
await ensureLoaded(absolutePath);
|
|
443
|
+
return state.get(absolutePath) !== null;
|
|
444
|
+
},
|
|
445
|
+
};
|
|
446
|
+
}
|
|
447
|
+
|
|
448
|
+
async function applyPatchOperations(
|
|
449
|
+
ops: PatchOperation[],
|
|
450
|
+
workspace: Workspace,
|
|
451
|
+
cwd: string,
|
|
452
|
+
signal?: AbortSignal,
|
|
453
|
+
): Promise<PatchOpResult[]> {
|
|
454
|
+
const results: PatchOpResult[] = [];
|
|
455
|
+
|
|
456
|
+
for (const op of ops) {
|
|
457
|
+
if (signal?.aborted) {
|
|
458
|
+
throw new Error("Operation aborted");
|
|
459
|
+
}
|
|
460
|
+
|
|
461
|
+
if (op.kind === "add") {
|
|
462
|
+
const abs = resolvePatchPath(cwd, op.path);
|
|
463
|
+
await workspace.writeText(abs, ensureTrailingNewline(op.contents));
|
|
464
|
+
results.push({ path: op.path, message: `Added file ${op.path}.` });
|
|
465
|
+
continue;
|
|
466
|
+
}
|
|
467
|
+
|
|
468
|
+
if (op.kind === "delete") {
|
|
469
|
+
const abs = resolvePatchPath(cwd, op.path);
|
|
470
|
+
const exists = await workspace.exists(abs);
|
|
471
|
+
if (!exists) {
|
|
472
|
+
throw new Error(`Failed to delete ${op.path}: file does not exist`);
|
|
473
|
+
}
|
|
474
|
+
await workspace.deleteFile(abs);
|
|
475
|
+
results.push({ path: op.path, message: `Deleted file ${op.path}.` });
|
|
476
|
+
continue;
|
|
477
|
+
}
|
|
478
|
+
|
|
479
|
+
const sourceAbs = resolvePatchPath(cwd, op.path);
|
|
480
|
+
const sourceText = await workspace.readText(sourceAbs);
|
|
481
|
+
const updated = deriveUpdatedContent(op.path, sourceText, op.chunks);
|
|
482
|
+
|
|
483
|
+
await workspace.writeText(sourceAbs, updated);
|
|
484
|
+
results.push({ path: op.path, message: `Updated ${op.path}.` });
|
|
485
|
+
}
|
|
486
|
+
|
|
487
|
+
return results;
|
|
488
|
+
}
|
|
489
|
+
|
|
490
|
+
export default function (pi: ExtensionAPI) {
|
|
491
|
+
pi.registerTool({
|
|
492
|
+
name: "edit",
|
|
493
|
+
label: "edit",
|
|
494
|
+
description:
|
|
495
|
+
"Edit a file by replacing exact text. The oldText must match exactly (including whitespace). Use this for precise, surgical edits. Supports a `multi` parameter for batch edits across one or more files, and a `patch` parameter for Codex-style patches.",
|
|
496
|
+
promptSnippet:
|
|
497
|
+
"Edit a file by replacing exact text. The oldText must match exactly (including whitespace). Use this for precise, surgical edits.",
|
|
498
|
+
promptGuidelines: [
|
|
499
|
+
"Use edit for precise changes (old text must match exactly)",
|
|
500
|
+
"Use the `multi` parameter to apply multiple edits in a single tool call",
|
|
501
|
+
"Use the `patch` parameter for Codex-style multi-file / hunk-based edits",
|
|
502
|
+
],
|
|
503
|
+
parameters: multiEditSchema,
|
|
504
|
+
|
|
505
|
+
async execute(toolCallId, params, signal, onUpdate, ctx) {
|
|
506
|
+
const { path, oldText, newText, multi, patch } = params;
|
|
507
|
+
|
|
508
|
+
const hasAnyClassicParam = path !== undefined || oldText !== undefined || newText !== undefined || multi !== undefined;
|
|
509
|
+
if (patch !== undefined && hasAnyClassicParam) {
|
|
510
|
+
throw new Error("The `patch` parameter is mutually exclusive with path/oldText/newText/multi.");
|
|
511
|
+
}
|
|
512
|
+
|
|
513
|
+
if (patch !== undefined) {
|
|
514
|
+
const ops = parsePatch(patch);
|
|
515
|
+
|
|
516
|
+
// Preflight on virtual filesystem before mutating real files.
|
|
517
|
+
await applyPatchOperations(ops, createVirtualWorkspace(ctx.cwd), ctx.cwd, signal);
|
|
518
|
+
|
|
519
|
+
// Apply for real.
|
|
520
|
+
const applied = await applyPatchOperations(ops, createRealWorkspace(), ctx.cwd, signal);
|
|
521
|
+
const summary = applied.map((r, i) => `${i + 1}. ${r.message}`).join("\n");
|
|
522
|
+
return {
|
|
523
|
+
content: [{ type: "text" as const, text: `Applied patch with ${applied.length} operation(s).\n${summary}` }],
|
|
524
|
+
};
|
|
525
|
+
}
|
|
526
|
+
|
|
527
|
+
// Build classic edit list.
|
|
528
|
+
const edits: EditItem[] = [];
|
|
529
|
+
const hasTopLevel = path !== undefined && oldText !== undefined && newText !== undefined;
|
|
530
|
+
|
|
531
|
+
if (hasTopLevel) {
|
|
532
|
+
edits.push({ path: path!, oldText: oldText!, newText: newText! });
|
|
533
|
+
} else if (path !== undefined || oldText !== undefined || newText !== undefined) {
|
|
534
|
+
const missing: string[] = [];
|
|
535
|
+
if (path === undefined) missing.push("path");
|
|
536
|
+
if (oldText === undefined) missing.push("oldText");
|
|
537
|
+
if (newText === undefined) missing.push("newText");
|
|
538
|
+
throw new Error(
|
|
539
|
+
`Incomplete top-level edit: missing ${missing.join(", ")}. Provide all three (path, oldText, newText) or use only the multi parameter.`,
|
|
540
|
+
);
|
|
541
|
+
}
|
|
542
|
+
|
|
543
|
+
if (multi) {
|
|
544
|
+
edits.push(...multi);
|
|
545
|
+
}
|
|
546
|
+
|
|
547
|
+
if (edits.length === 0) {
|
|
548
|
+
throw new Error("No edits provided. Supply path/oldText/newText, a multi array, or a patch.");
|
|
549
|
+
}
|
|
550
|
+
|
|
551
|
+
// Preflight pass before mutating files.
|
|
552
|
+
const preflightTool = createEditTool(ctx.cwd, { operations: createVirtualEditOperations() });
|
|
553
|
+
const preflightResults: EditResult[] = [];
|
|
554
|
+
for (let i = 0; i < edits.length; i++) {
|
|
555
|
+
if (signal?.aborted) {
|
|
556
|
+
throw new Error("Operation aborted");
|
|
557
|
+
}
|
|
558
|
+
const edit = edits[i];
|
|
559
|
+
try {
|
|
560
|
+
await preflightTool.execute(`${toolCallId}_preflight_${i}`, edit, signal);
|
|
561
|
+
preflightResults.push({ path: edit.path, success: true, message: "Preflight passed." });
|
|
562
|
+
} catch (err: any) {
|
|
563
|
+
preflightResults.push({ path: edit.path, success: false, message: err.message ?? String(err) });
|
|
564
|
+
throw new Error(`Preflight failed before mutating files.\n${formatResults(preflightResults, edits.length)}`);
|
|
565
|
+
}
|
|
566
|
+
}
|
|
567
|
+
|
|
568
|
+
// Apply for real with built-in edit tool.
|
|
569
|
+
const innerTool = createEditTool(ctx.cwd);
|
|
570
|
+
const results: EditResult[] = [];
|
|
571
|
+
|
|
572
|
+
for (let i = 0; i < edits.length; i++) {
|
|
573
|
+
if (signal?.aborted) {
|
|
574
|
+
throw new Error("Operation aborted");
|
|
575
|
+
}
|
|
576
|
+
|
|
577
|
+
const edit = edits[i];
|
|
578
|
+
try {
|
|
579
|
+
const result = await innerTool.execute(`${toolCallId}_${i}`, edit, signal);
|
|
580
|
+
const details = result.details as EditToolDetails | undefined;
|
|
581
|
+
const text = result.content?.[0]?.type === "text" ? result.content[0].text : `Edit ${i + 1} applied.`;
|
|
582
|
+
|
|
583
|
+
results.push({
|
|
584
|
+
path: edit.path,
|
|
585
|
+
success: true,
|
|
586
|
+
message: text,
|
|
587
|
+
diff: details?.diff,
|
|
588
|
+
firstChangedLine: details?.firstChangedLine,
|
|
589
|
+
});
|
|
590
|
+
} catch (err: any) {
|
|
591
|
+
results.push({ path: edit.path, success: false, message: err.message ?? String(err) });
|
|
592
|
+
throw new Error(formatResults(results, edits.length));
|
|
593
|
+
}
|
|
594
|
+
}
|
|
595
|
+
|
|
596
|
+
if (results.length === 1) {
|
|
597
|
+
const r = results[0];
|
|
598
|
+
return {
|
|
599
|
+
content: [{ type: "text" as const, text: r.message }],
|
|
600
|
+
details: {
|
|
601
|
+
diff: r.diff ?? "",
|
|
602
|
+
firstChangedLine: r.firstChangedLine,
|
|
603
|
+
},
|
|
604
|
+
};
|
|
605
|
+
}
|
|
606
|
+
|
|
607
|
+
const combinedDiff = results
|
|
608
|
+
.filter((r) => r.diff)
|
|
609
|
+
.map((r) => r.diff)
|
|
610
|
+
.join("\n");
|
|
611
|
+
|
|
612
|
+
const firstChanged = results.find((r) => r.firstChangedLine !== undefined)?.firstChangedLine;
|
|
613
|
+
const summary = results.map((r, i) => `${i + 1}. ${r.message}`).join("\n");
|
|
614
|
+
|
|
615
|
+
return {
|
|
616
|
+
content: [{ type: "text" as const, text: `Applied ${results.length} edit(s) successfully.\n${summary}` }],
|
|
617
|
+
details: {
|
|
618
|
+
diff: combinedDiff,
|
|
619
|
+
firstChangedLine: firstChanged,
|
|
620
|
+
},
|
|
621
|
+
};
|
|
622
|
+
},
|
|
623
|
+
});
|
|
624
|
+
}
|
|
625
|
+
|
|
626
|
+
function formatResults(results: EditResult[], totalEdits: number): string {
|
|
627
|
+
const lines: string[] = [];
|
|
628
|
+
|
|
629
|
+
for (let i = 0; i < results.length; i++) {
|
|
630
|
+
const r = results[i];
|
|
631
|
+
const status = r.success ? "✓" : "✗";
|
|
632
|
+
lines.push(`${status} Edit ${i + 1}/${totalEdits} (${r.path}): ${r.message}`);
|
|
633
|
+
}
|
|
634
|
+
|
|
635
|
+
const remaining = totalEdits - results.length;
|
|
636
|
+
if (remaining > 0) {
|
|
637
|
+
lines.push(`⊘ ${remaining} remaining edit(s) skipped due to error.`);
|
|
638
|
+
}
|
|
639
|
+
|
|
640
|
+
return lines.join("\n");
|
|
641
|
+
}
|