clanka 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -0
- package/dist/Agent.d.ts +119 -0
- package/dist/Agent.d.ts.map +1 -0
- package/dist/Agent.js +240 -0
- package/dist/Agent.js.map +1 -0
- package/dist/AgentTools.d.ts +246 -0
- package/dist/AgentTools.d.ts.map +1 -0
- package/dist/AgentTools.js +374 -0
- package/dist/AgentTools.js.map +1 -0
- package/dist/AgentTools.test.d.ts +2 -0
- package/dist/AgentTools.test.d.ts.map +1 -0
- package/dist/AgentTools.test.js +147 -0
- package/dist/AgentTools.test.js.map +1 -0
- package/dist/ApplyPatch.d.ts +27 -0
- package/dist/ApplyPatch.d.ts.map +1 -0
- package/dist/ApplyPatch.js +343 -0
- package/dist/ApplyPatch.js.map +1 -0
- package/dist/ApplyPatch.test.d.ts +2 -0
- package/dist/ApplyPatch.test.d.ts.map +1 -0
- package/dist/ApplyPatch.test.js +99 -0
- package/dist/ApplyPatch.test.js.map +1 -0
- package/dist/Codex.d.ts +11 -0
- package/dist/Codex.d.ts.map +1 -0
- package/dist/Codex.js +14 -0
- package/dist/Codex.js.map +1 -0
- package/dist/CodexAuth.d.ts +68 -0
- package/dist/CodexAuth.d.ts.map +1 -0
- package/dist/CodexAuth.js +270 -0
- package/dist/CodexAuth.js.map +1 -0
- package/dist/CodexAuth.test.d.ts +2 -0
- package/dist/CodexAuth.test.d.ts.map +1 -0
- package/dist/CodexAuth.test.js +425 -0
- package/dist/CodexAuth.test.js.map +1 -0
- package/dist/Executor.d.ts +20 -0
- package/dist/Executor.d.ts.map +1 -0
- package/dist/Executor.js +76 -0
- package/dist/Executor.js.map +1 -0
- package/dist/OutputFormatter.d.ts +11 -0
- package/dist/OutputFormatter.d.ts.map +1 -0
- package/dist/OutputFormatter.js +5 -0
- package/dist/OutputFormatter.js.map +1 -0
- package/dist/ToolkitRenderer.d.ts +17 -0
- package/dist/ToolkitRenderer.d.ts.map +1 -0
- package/dist/ToolkitRenderer.js +25 -0
- package/dist/ToolkitRenderer.js.map +1 -0
- package/dist/TypeBuilder.d.ts +11 -0
- package/dist/TypeBuilder.d.ts.map +1 -0
- package/dist/TypeBuilder.js +383 -0
- package/dist/TypeBuilder.js.map +1 -0
- package/dist/TypeBuilder.test.d.ts +2 -0
- package/dist/TypeBuilder.test.d.ts.map +1 -0
- package/dist/TypeBuilder.test.js +243 -0
- package/dist/TypeBuilder.test.js.map +1 -0
- package/dist/index.d.ts +25 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +25 -0
- package/dist/index.js.map +1 -0
- package/package.json +72 -0
- package/src/Agent.ts +398 -0
- package/src/AgentTools.test.ts +215 -0
- package/src/AgentTools.ts +507 -0
- package/src/ApplyPatch.test.ts +154 -0
- package/src/ApplyPatch.ts +473 -0
- package/src/Codex.ts +14 -0
- package/src/CodexAuth.test.ts +729 -0
- package/src/CodexAuth.ts +571 -0
- package/src/Executor.ts +129 -0
- package/src/OutputFormatter.ts +17 -0
- package/src/ToolkitRenderer.ts +39 -0
- package/src/TypeBuilder.test.ts +508 -0
- package/src/TypeBuilder.ts +670 -0
- package/src/index.ts +29 -0
|
@@ -0,0 +1,507 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @since 1.0.0
|
|
3
|
+
*/
|
|
4
|
+
import {
|
|
5
|
+
Array,
|
|
6
|
+
Data,
|
|
7
|
+
Deferred,
|
|
8
|
+
Effect,
|
|
9
|
+
FileSystem,
|
|
10
|
+
Path,
|
|
11
|
+
pipe,
|
|
12
|
+
Schema,
|
|
13
|
+
ServiceMap,
|
|
14
|
+
Stream,
|
|
15
|
+
} from "effect"
|
|
16
|
+
import { Tool, Toolkit } from "effect/unstable/ai"
|
|
17
|
+
import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process"
|
|
18
|
+
import * as Glob from "glob"
|
|
19
|
+
import * as Rg from "@vscode/ripgrep"
|
|
20
|
+
import { parsePatch, patchChunks } from "./ApplyPatch.ts"
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* @since 1.0.0
|
|
24
|
+
* @category Context
|
|
25
|
+
*/
|
|
26
|
+
export class CurrentDirectory extends ServiceMap.Service<
|
|
27
|
+
CurrentDirectory,
|
|
28
|
+
string
|
|
29
|
+
>()("clanka/AgentTools/CurrentDirectory") {}
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* @since 1.0.0
|
|
33
|
+
* @category Context
|
|
34
|
+
*/
|
|
35
|
+
export class TaskCompleteDeferred extends ServiceMap.Service<
|
|
36
|
+
TaskCompleteDeferred,
|
|
37
|
+
Deferred.Deferred<string>
|
|
38
|
+
>()("clanka/AgentTools/TaskCompleteDeferred") {}
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* @since 1.0.0
|
|
42
|
+
* @category Context
|
|
43
|
+
*/
|
|
44
|
+
export class SubagentContext extends ServiceMap.Service<
|
|
45
|
+
SubagentContext,
|
|
46
|
+
{
|
|
47
|
+
spawn(options: { readonly prompt: string }): Effect.Effect<string>
|
|
48
|
+
}
|
|
49
|
+
>()("clanka/AgentTools/SubagentContext") {}
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* @since 1.0.0
|
|
53
|
+
* @category Context
|
|
54
|
+
*/
|
|
55
|
+
export const makeContextNoop = (cwd?: string) =>
|
|
56
|
+
SubagentContext.serviceMap({
|
|
57
|
+
spawn: () => Effect.die("Not implemented"),
|
|
58
|
+
}).pipe(
|
|
59
|
+
ServiceMap.add(CurrentDirectory, cwd ?? "/"),
|
|
60
|
+
ServiceMap.add(TaskCompleteDeferred, Deferred.makeUnsafe()),
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
/**
|
|
64
|
+
* @since 1.0.0
|
|
65
|
+
* @category Toolkit
|
|
66
|
+
*/
|
|
67
|
+
export const AgentTools = Toolkit.make(
|
|
68
|
+
Tool.make("readFile", {
|
|
69
|
+
description:
|
|
70
|
+
"Read a file and optionally filter the lines to return. Returns null if the file doesn't exist.",
|
|
71
|
+
parameters: Schema.Struct({
|
|
72
|
+
path: Schema.String,
|
|
73
|
+
startLine: Schema.optional(Schema.Number),
|
|
74
|
+
endLine: Schema.optional(Schema.Number),
|
|
75
|
+
}),
|
|
76
|
+
success: Schema.NullOr(Schema.String),
|
|
77
|
+
dependencies: [CurrentDirectory],
|
|
78
|
+
}),
|
|
79
|
+
Tool.make("createFile", {
|
|
80
|
+
description:
|
|
81
|
+
"Write content to a file, creating parent directories if needed.",
|
|
82
|
+
parameters: Schema.Struct({
|
|
83
|
+
path: Schema.String,
|
|
84
|
+
content: Schema.String,
|
|
85
|
+
}),
|
|
86
|
+
dependencies: [CurrentDirectory],
|
|
87
|
+
}),
|
|
88
|
+
Tool.make("applyPatch", {
|
|
89
|
+
description: "Apply a wrapped patch with Add/Delete/Update sections.",
|
|
90
|
+
parameters: Schema.String.annotate({
|
|
91
|
+
identifier: "patch",
|
|
92
|
+
}),
|
|
93
|
+
success: Schema.String,
|
|
94
|
+
dependencies: [CurrentDirectory],
|
|
95
|
+
}),
|
|
96
|
+
Tool.make("removeFile", {
|
|
97
|
+
description: "Remove a file.",
|
|
98
|
+
parameters: Schema.String.annotate({
|
|
99
|
+
identifier: "path",
|
|
100
|
+
}),
|
|
101
|
+
dependencies: [CurrentDirectory],
|
|
102
|
+
}),
|
|
103
|
+
Tool.make("renameFile", {
|
|
104
|
+
description:
|
|
105
|
+
"Rename or move a file, creating parent directories if needed.",
|
|
106
|
+
parameters: Schema.Struct({
|
|
107
|
+
from: Schema.String,
|
|
108
|
+
to: Schema.String,
|
|
109
|
+
}),
|
|
110
|
+
dependencies: [CurrentDirectory],
|
|
111
|
+
}),
|
|
112
|
+
Tool.make("mkdir", {
|
|
113
|
+
description: "Make a directory, creating parent directories if needed.",
|
|
114
|
+
parameters: Schema.String.annotate({
|
|
115
|
+
identifier: "path",
|
|
116
|
+
}),
|
|
117
|
+
dependencies: [CurrentDirectory],
|
|
118
|
+
}),
|
|
119
|
+
Tool.make("ls", {
|
|
120
|
+
description: "List the contents of a directory",
|
|
121
|
+
parameters: Schema.String.annotate({
|
|
122
|
+
identifier: "directory",
|
|
123
|
+
}),
|
|
124
|
+
success: Schema.Array(Schema.String),
|
|
125
|
+
dependencies: [CurrentDirectory],
|
|
126
|
+
}),
|
|
127
|
+
Tool.make("rg", {
|
|
128
|
+
description: "Search for a pattern in files using ripgrep.",
|
|
129
|
+
parameters: Schema.Struct({
|
|
130
|
+
pattern: Schema.String,
|
|
131
|
+
glob: Schema.optional(Schema.String).annotate({
|
|
132
|
+
documentation: "--glob",
|
|
133
|
+
}),
|
|
134
|
+
maxLines: Schema.optional(Schema.Finite).annotate({
|
|
135
|
+
documentation:
|
|
136
|
+
"The total maximum number of lines to return across all files (default: 500)",
|
|
137
|
+
}),
|
|
138
|
+
}),
|
|
139
|
+
success: Schema.String,
|
|
140
|
+
dependencies: [CurrentDirectory],
|
|
141
|
+
}),
|
|
142
|
+
Tool.make("glob", {
|
|
143
|
+
description: "Find files matching a glob pattern.",
|
|
144
|
+
parameters: Schema.String.annotate({
|
|
145
|
+
identifier: "pattern",
|
|
146
|
+
}),
|
|
147
|
+
success: Schema.Array(Schema.String),
|
|
148
|
+
dependencies: [CurrentDirectory],
|
|
149
|
+
}),
|
|
150
|
+
Tool.make("bash", {
|
|
151
|
+
description: "Run a bash command and return the output",
|
|
152
|
+
parameters: Schema.String.annotate({
|
|
153
|
+
identifier: "command",
|
|
154
|
+
}),
|
|
155
|
+
success: Schema.String,
|
|
156
|
+
dependencies: [CurrentDirectory],
|
|
157
|
+
}),
|
|
158
|
+
Tool.make("gh", {
|
|
159
|
+
description: "Use the GitHub CLI to run a command and return the output",
|
|
160
|
+
parameters: Schema.Array(Schema.String).annotate({
|
|
161
|
+
identifier: "args",
|
|
162
|
+
}),
|
|
163
|
+
success: Schema.String,
|
|
164
|
+
dependencies: [CurrentDirectory],
|
|
165
|
+
}),
|
|
166
|
+
Tool.make("subagent", {
|
|
167
|
+
description:
|
|
168
|
+
"Prompt another agent with the same tools to assist with a subtask. The subagent will return a markdown summary of the work it did.",
|
|
169
|
+
parameters: Schema.String.annotate({
|
|
170
|
+
identifier: "prompt",
|
|
171
|
+
}),
|
|
172
|
+
success: Schema.String,
|
|
173
|
+
dependencies: [SubagentContext],
|
|
174
|
+
}),
|
|
175
|
+
Tool.make("sleep", {
|
|
176
|
+
description: "Sleep for a specified number of milliseconds",
|
|
177
|
+
parameters: Schema.Finite.annotate({
|
|
178
|
+
identifier: "ms",
|
|
179
|
+
}),
|
|
180
|
+
}),
|
|
181
|
+
Tool.make("taskComplete", {
|
|
182
|
+
description:
|
|
183
|
+
"Only call this when you have fully completed the user's task. Provide a markdown summary of the work you have done.",
|
|
184
|
+
parameters: Schema.String.annotate({
|
|
185
|
+
identifier: "summary",
|
|
186
|
+
}),
|
|
187
|
+
dependencies: [TaskCompleteDeferred],
|
|
188
|
+
}),
|
|
189
|
+
)
|
|
190
|
+
|
|
191
|
+
/**
|
|
192
|
+
* @since 1.0.0
|
|
193
|
+
* @category Toolkit
|
|
194
|
+
*/
|
|
195
|
+
export const AgentToolHandlers = AgentTools.toLayer(
|
|
196
|
+
Effect.gen(function* () {
|
|
197
|
+
const spawner = yield* ChildProcessSpawner.ChildProcessSpawner
|
|
198
|
+
const fs = yield* FileSystem.FileSystem
|
|
199
|
+
const pathService = yield* Path.Path
|
|
200
|
+
|
|
201
|
+
const execute = Effect.fn(function* (command: ChildProcess.Command) {
|
|
202
|
+
const handle = yield* spawner.spawn(command)
|
|
203
|
+
return yield* handle.all.pipe(
|
|
204
|
+
Stream.decodeText,
|
|
205
|
+
Stream.mkString,
|
|
206
|
+
Effect.flatMap(
|
|
207
|
+
Effect.fnUntraced(function* (output) {
|
|
208
|
+
const exitCode = yield* handle.exitCode
|
|
209
|
+
if (exitCode === 0) return output
|
|
210
|
+
return yield* Effect.die(
|
|
211
|
+
new Error(`Command failed with exit code ${exitCode}: ${output}`),
|
|
212
|
+
)
|
|
213
|
+
}),
|
|
214
|
+
),
|
|
215
|
+
)
|
|
216
|
+
}, Effect.scoped)
|
|
217
|
+
|
|
218
|
+
return AgentTools.of({
|
|
219
|
+
readFile: Effect.fn("AgentTools.readFile")(function* (options) {
|
|
220
|
+
yield* Effect.logInfo(`Calling "readFile"`).pipe(
|
|
221
|
+
Effect.annotateLogs(options),
|
|
222
|
+
)
|
|
223
|
+
const cwd = yield* CurrentDirectory
|
|
224
|
+
let stream = pipe(
|
|
225
|
+
fs.stream(pathService.resolve(cwd, options.path)),
|
|
226
|
+
Stream.decodeText,
|
|
227
|
+
Stream.splitLines,
|
|
228
|
+
)
|
|
229
|
+
if (options.startLine) {
|
|
230
|
+
stream = Stream.drop(stream, options.startLine - 1)
|
|
231
|
+
}
|
|
232
|
+
if (options.endLine) {
|
|
233
|
+
stream = Stream.take(
|
|
234
|
+
stream,
|
|
235
|
+
options.endLine - (options.startLine ?? 1) + 1,
|
|
236
|
+
)
|
|
237
|
+
}
|
|
238
|
+
return yield* Stream.runCollect(stream).pipe(
|
|
239
|
+
Effect.map(Array.join("\n")),
|
|
240
|
+
Effect.catchReason("PlatformError", "NotFound", () =>
|
|
241
|
+
Effect.succeed(null),
|
|
242
|
+
),
|
|
243
|
+
Effect.orDie,
|
|
244
|
+
)
|
|
245
|
+
}),
|
|
246
|
+
createFile: Effect.fn("AgentTools.createFile")(function* (options) {
|
|
247
|
+
yield* Effect.logInfo(`Calling "createFile"`).pipe(
|
|
248
|
+
Effect.annotateLogs({ path: options.path }),
|
|
249
|
+
)
|
|
250
|
+
const cwd = yield* CurrentDirectory
|
|
251
|
+
const path = pathService.resolve(cwd, options.path)
|
|
252
|
+
if (yield* fs.exists(path)) {
|
|
253
|
+
return yield* Effect.die("File already exists")
|
|
254
|
+
}
|
|
255
|
+
yield* fs.makeDirectory(pathService.dirname(path), {
|
|
256
|
+
recursive: true,
|
|
257
|
+
})
|
|
258
|
+
yield* fs.writeFileString(path, options.content)
|
|
259
|
+
}, Effect.orDie),
|
|
260
|
+
removeFile: Effect.fn("AgentTools.removeFile")(function* (path) {
|
|
261
|
+
yield* Effect.logInfo(`Calling "removeFile"`).pipe(
|
|
262
|
+
Effect.annotateLogs({ path }),
|
|
263
|
+
)
|
|
264
|
+
const cwd = yield* CurrentDirectory
|
|
265
|
+
return yield* fs.remove(pathService.resolve(cwd, path), { force: true })
|
|
266
|
+
}, Effect.orDie),
|
|
267
|
+
renameFile: Effect.fn("AgentTools.renameFile")(function* (options) {
|
|
268
|
+
yield* Effect.logInfo(`Calling "renameFile"`).pipe(
|
|
269
|
+
Effect.annotateLogs(options),
|
|
270
|
+
)
|
|
271
|
+
const cwd = yield* CurrentDirectory
|
|
272
|
+
const from = pathService.resolve(cwd, options.from)
|
|
273
|
+
const to = pathService.resolve(cwd, options.to)
|
|
274
|
+
yield* fs.makeDirectory(pathService.dirname(to), {
|
|
275
|
+
recursive: true,
|
|
276
|
+
})
|
|
277
|
+
return yield* fs.rename(from, to)
|
|
278
|
+
}, Effect.orDie),
|
|
279
|
+
mkdir: Effect.fn("AgentTools.mkdir")(function* (path) {
|
|
280
|
+
yield* Effect.logInfo(`Calling "mkdir"`).pipe(
|
|
281
|
+
Effect.annotateLogs({ path }),
|
|
282
|
+
)
|
|
283
|
+
const cwd = yield* CurrentDirectory
|
|
284
|
+
return yield* fs.makeDirectory(pathService.resolve(cwd, path), {
|
|
285
|
+
recursive: true,
|
|
286
|
+
})
|
|
287
|
+
}, Effect.orDie),
|
|
288
|
+
ls: Effect.fn("AgentTools.ls")(function* (path) {
|
|
289
|
+
yield* Effect.logInfo(`Calling "ls"`).pipe(
|
|
290
|
+
Effect.annotateLogs({ path }),
|
|
291
|
+
)
|
|
292
|
+
const cwd = yield* CurrentDirectory
|
|
293
|
+
return yield* fs
|
|
294
|
+
.readDirectory(pathService.resolve(cwd, path))
|
|
295
|
+
.pipe(Effect.orDie)
|
|
296
|
+
}),
|
|
297
|
+
rg: Effect.fn("AgentTools.rg")(function* (options) {
|
|
298
|
+
yield* Effect.logInfo(`Calling "rg"`).pipe(Effect.annotateLogs(options))
|
|
299
|
+
const cwd = yield* CurrentDirectory
|
|
300
|
+
const args = ["--max-filesize", "1M", "--line-number"]
|
|
301
|
+
if (options.glob) {
|
|
302
|
+
args.push("--glob", options.glob)
|
|
303
|
+
}
|
|
304
|
+
args.push(options.pattern)
|
|
305
|
+
let stream = pipe(
|
|
306
|
+
spawner.streamLines(
|
|
307
|
+
ChildProcess.make(Rg.rgPath, args, {
|
|
308
|
+
cwd,
|
|
309
|
+
stdin: "ignore",
|
|
310
|
+
}),
|
|
311
|
+
),
|
|
312
|
+
Stream.map((line) => {
|
|
313
|
+
if (line.length <= 500) return line
|
|
314
|
+
return line.slice(0, 500) + "...[truncated]"
|
|
315
|
+
}),
|
|
316
|
+
)
|
|
317
|
+
stream = Stream.take(stream, options.maxLines ?? 500)
|
|
318
|
+
return yield* Stream.runCollect(stream).pipe(
|
|
319
|
+
Effect.map(Array.join("\n")),
|
|
320
|
+
Effect.orDie,
|
|
321
|
+
)
|
|
322
|
+
}),
|
|
323
|
+
glob: Effect.fn("AgentTools.glob")(function* (pattern) {
|
|
324
|
+
yield* Effect.logInfo(`Calling "glob"`).pipe(
|
|
325
|
+
Effect.annotateLogs({ pattern }),
|
|
326
|
+
)
|
|
327
|
+
const cwd = yield* CurrentDirectory
|
|
328
|
+
return yield* Effect.promise(() => Glob.glob(pattern, { cwd }))
|
|
329
|
+
}),
|
|
330
|
+
bash: Effect.fn("AgentTools.bash")(function* (command) {
|
|
331
|
+
yield* Effect.logInfo(`Calling "bash"`).pipe(
|
|
332
|
+
Effect.annotateLogs({ command }),
|
|
333
|
+
)
|
|
334
|
+
const cwd = yield* CurrentDirectory
|
|
335
|
+
const cmd = ChildProcess.make("bash", ["-c", command], {
|
|
336
|
+
cwd,
|
|
337
|
+
stdin: "ignore",
|
|
338
|
+
})
|
|
339
|
+
return yield* execute(cmd)
|
|
340
|
+
}, Effect.orDie),
|
|
341
|
+
gh: Effect.fn("AgentTools.gh")(function* (args) {
|
|
342
|
+
yield* Effect.logInfo(`Calling "gh"`).pipe(
|
|
343
|
+
Effect.annotateLogs({ args }),
|
|
344
|
+
)
|
|
345
|
+
const cwd = yield* CurrentDirectory
|
|
346
|
+
const cmd = ChildProcess.make("gh", args, {
|
|
347
|
+
cwd,
|
|
348
|
+
stdin: "ignore",
|
|
349
|
+
})
|
|
350
|
+
return yield* execute(cmd)
|
|
351
|
+
}, Effect.orDie),
|
|
352
|
+
sleep: Effect.fn("AgentTools.sleep")(function* (ms) {
|
|
353
|
+
yield* Effect.logInfo(`Calling "sleep" for ${ms}ms`)
|
|
354
|
+
return yield* Effect.sleep(ms)
|
|
355
|
+
}),
|
|
356
|
+
applyPatch: Effect.fn("AgentTools.applyPatch")(function* (patchText) {
|
|
357
|
+
yield* Effect.logInfo(`Calling "applyPatch"`)
|
|
358
|
+
const cwd = yield* CurrentDirectory
|
|
359
|
+
const fail = (path: string, reason: "delete" | "update") =>
|
|
360
|
+
Effect.fail(
|
|
361
|
+
new ApplyPatchError({
|
|
362
|
+
message: `verification failed: Failed to read file to ${reason}: ${path}`,
|
|
363
|
+
}),
|
|
364
|
+
)
|
|
365
|
+
const state = new Map<string, string | null>()
|
|
366
|
+
const steps = [] as Array<
|
|
367
|
+
| {
|
|
368
|
+
readonly type: "add" | "update"
|
|
369
|
+
readonly path: string
|
|
370
|
+
readonly next: string
|
|
371
|
+
}
|
|
372
|
+
| {
|
|
373
|
+
readonly type: "move"
|
|
374
|
+
readonly path: string
|
|
375
|
+
readonly movePath: string
|
|
376
|
+
readonly next: string
|
|
377
|
+
}
|
|
378
|
+
| {
|
|
379
|
+
readonly type: "delete"
|
|
380
|
+
readonly path: string
|
|
381
|
+
}
|
|
382
|
+
>
|
|
383
|
+
const out = [] as string[]
|
|
384
|
+
const rel = (path: string) =>
|
|
385
|
+
pathService.relative(cwd, path).replaceAll("\\", "/")
|
|
386
|
+
const load = Effect.fn("AgentTools.applyPatch.load")(function* (
|
|
387
|
+
path: string,
|
|
388
|
+
reason: "delete" | "update",
|
|
389
|
+
) {
|
|
390
|
+
if (state.has(path)) {
|
|
391
|
+
const input = state.get(path)
|
|
392
|
+
if (input === null) {
|
|
393
|
+
return yield* fail(path, reason)
|
|
394
|
+
}
|
|
395
|
+
return input!
|
|
396
|
+
}
|
|
397
|
+
|
|
398
|
+
const input = yield* fs.readFileString(path)
|
|
399
|
+
state.set(path, input)
|
|
400
|
+
return input
|
|
401
|
+
})
|
|
402
|
+
|
|
403
|
+
for (const patch of parsePatch(patchText)) {
|
|
404
|
+
const path = pathService.resolve(cwd, patch.path)
|
|
405
|
+
switch (patch.type) {
|
|
406
|
+
case "add": {
|
|
407
|
+
const next =
|
|
408
|
+
patch.content.length === 0 || patch.content.endsWith("\n")
|
|
409
|
+
? patch.content
|
|
410
|
+
: `${patch.content}\n`
|
|
411
|
+
state.set(path, next)
|
|
412
|
+
steps.push({
|
|
413
|
+
type: "add",
|
|
414
|
+
path,
|
|
415
|
+
next,
|
|
416
|
+
})
|
|
417
|
+
out.push(`A ${rel(path)}`)
|
|
418
|
+
break
|
|
419
|
+
}
|
|
420
|
+
case "delete": {
|
|
421
|
+
yield* load(path, "delete")
|
|
422
|
+
state.set(path, null)
|
|
423
|
+
steps.push({
|
|
424
|
+
type: "delete",
|
|
425
|
+
path,
|
|
426
|
+
})
|
|
427
|
+
out.push(`D ${rel(path)}`)
|
|
428
|
+
break
|
|
429
|
+
}
|
|
430
|
+
case "update": {
|
|
431
|
+
const input = yield* load(path, "update")
|
|
432
|
+
const next = patchChunks(path, input, patch.chunks)
|
|
433
|
+
const movePath =
|
|
434
|
+
patch.movePath === undefined
|
|
435
|
+
? undefined
|
|
436
|
+
: pathService.resolve(cwd, patch.movePath)
|
|
437
|
+
|
|
438
|
+
if (movePath === undefined || movePath === path) {
|
|
439
|
+
state.set(path, next)
|
|
440
|
+
steps.push({
|
|
441
|
+
type: "update",
|
|
442
|
+
path,
|
|
443
|
+
next,
|
|
444
|
+
})
|
|
445
|
+
out.push(`M ${rel(path)}`)
|
|
446
|
+
break
|
|
447
|
+
}
|
|
448
|
+
|
|
449
|
+
state.set(path, null)
|
|
450
|
+
state.set(movePath, next)
|
|
451
|
+
steps.push({
|
|
452
|
+
type: "move",
|
|
453
|
+
path,
|
|
454
|
+
movePath,
|
|
455
|
+
next,
|
|
456
|
+
})
|
|
457
|
+
out.push(`M ${rel(movePath)}`)
|
|
458
|
+
break
|
|
459
|
+
}
|
|
460
|
+
}
|
|
461
|
+
}
|
|
462
|
+
|
|
463
|
+
for (const step of steps) {
|
|
464
|
+
switch (step.type) {
|
|
465
|
+
case "add":
|
|
466
|
+
case "update": {
|
|
467
|
+
yield* fs.makeDirectory(pathService.dirname(step.path), {
|
|
468
|
+
recursive: true,
|
|
469
|
+
})
|
|
470
|
+
yield* fs.writeFileString(step.path, step.next)
|
|
471
|
+
break
|
|
472
|
+
}
|
|
473
|
+
case "move": {
|
|
474
|
+
yield* fs.makeDirectory(pathService.dirname(step.movePath), {
|
|
475
|
+
recursive: true,
|
|
476
|
+
})
|
|
477
|
+
yield* fs.writeFileString(step.movePath, step.next)
|
|
478
|
+
yield* fs.remove(step.path)
|
|
479
|
+
break
|
|
480
|
+
}
|
|
481
|
+
case "delete": {
|
|
482
|
+
yield* fs.remove(step.path)
|
|
483
|
+
break
|
|
484
|
+
}
|
|
485
|
+
}
|
|
486
|
+
}
|
|
487
|
+
|
|
488
|
+
return `Success. Updated the following files:\n${out.join("\n")}`
|
|
489
|
+
}, Effect.orDie),
|
|
490
|
+
subagent: Effect.fn("AgentTools.subagent")(function* (prompt) {
|
|
491
|
+
yield* Effect.logInfo(`Calling "subagent"`).pipe(
|
|
492
|
+
Effect.annotateLogs({ prompt }),
|
|
493
|
+
)
|
|
494
|
+
const context = yield* SubagentContext
|
|
495
|
+
return yield* context.spawn({ prompt })
|
|
496
|
+
}, Effect.orDie),
|
|
497
|
+
taskComplete: Effect.fn("AgentTools.taskComplete")(function* (message) {
|
|
498
|
+
const deferred = yield* TaskCompleteDeferred
|
|
499
|
+
yield* Deferred.succeed(deferred, message)
|
|
500
|
+
}),
|
|
501
|
+
})
|
|
502
|
+
}),
|
|
503
|
+
)
|
|
504
|
+
|
|
505
|
+
class ApplyPatchError extends Data.TaggedClass("ApplyPatchError")<{
|
|
506
|
+
readonly message: string
|
|
507
|
+
}> {}
|
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
import { describe, expect, it } from "vitest"
|
|
2
|
+
import { parsePatch, patchContent } from "./ApplyPatch.ts"
|
|
3
|
+
|
|
4
|
+
describe("patchContent", () => {
|
|
5
|
+
it("applies raw hunks", () => {
|
|
6
|
+
expect(
|
|
7
|
+
patchContent("sample.txt", "line1\nline2\n", "@@\n-line2\n+changed"),
|
|
8
|
+
).toBe("line1\nchanged\n")
|
|
9
|
+
})
|
|
10
|
+
|
|
11
|
+
it("does not treat raw marker text as a wrapped patch", () => {
|
|
12
|
+
expect(
|
|
13
|
+
patchContent(
|
|
14
|
+
"sample.txt",
|
|
15
|
+
"*** Begin Patch\nfinish\n",
|
|
16
|
+
"@@\n-*** Begin Patch\n+*** End Patch",
|
|
17
|
+
),
|
|
18
|
+
).toBe("*** End Patch\nfinish\n")
|
|
19
|
+
})
|
|
20
|
+
|
|
21
|
+
it("parses wrapped single-file patches", () => {
|
|
22
|
+
expect(
|
|
23
|
+
patchContent(
|
|
24
|
+
"sample.txt",
|
|
25
|
+
"alpha\nomega\n",
|
|
26
|
+
"*** Begin Patch\n*** Update File: ignored.txt\n@@\n alpha\n+beta\n omega\n*** End Patch",
|
|
27
|
+
),
|
|
28
|
+
).toBe("alpha\nbeta\nomega\n")
|
|
29
|
+
})
|
|
30
|
+
|
|
31
|
+
it("parses multi-file wrapped patches", () => {
|
|
32
|
+
expect(
|
|
33
|
+
parsePatch(
|
|
34
|
+
[
|
|
35
|
+
"*** Begin Patch",
|
|
36
|
+
"*** Add File: hello.txt",
|
|
37
|
+
"+Hello world",
|
|
38
|
+
"*** Update File: src/app.ts",
|
|
39
|
+
"*** Move to: src/main.ts",
|
|
40
|
+
"@@ keep",
|
|
41
|
+
" keep",
|
|
42
|
+
"-old",
|
|
43
|
+
"+new",
|
|
44
|
+
"*** Delete File: obsolete.txt",
|
|
45
|
+
"*** End Patch",
|
|
46
|
+
].join("\n"),
|
|
47
|
+
),
|
|
48
|
+
).toEqual([
|
|
49
|
+
{
|
|
50
|
+
type: "add",
|
|
51
|
+
path: "hello.txt",
|
|
52
|
+
content: "Hello world",
|
|
53
|
+
},
|
|
54
|
+
{
|
|
55
|
+
type: "update",
|
|
56
|
+
path: "src/app.ts",
|
|
57
|
+
movePath: "src/main.ts",
|
|
58
|
+
chunks: [
|
|
59
|
+
{
|
|
60
|
+
ctx: "keep",
|
|
61
|
+
old: ["keep", "old"],
|
|
62
|
+
next: ["keep", "new"],
|
|
63
|
+
},
|
|
64
|
+
],
|
|
65
|
+
},
|
|
66
|
+
{
|
|
67
|
+
type: "delete",
|
|
68
|
+
path: "obsolete.txt",
|
|
69
|
+
},
|
|
70
|
+
])
|
|
71
|
+
})
|
|
72
|
+
|
|
73
|
+
it("parses wrapped patches when hunks contain marker text", () => {
|
|
74
|
+
expect(
|
|
75
|
+
parsePatch(
|
|
76
|
+
[
|
|
77
|
+
"*** Begin Patch",
|
|
78
|
+
"*** Update File: src/app.ts",
|
|
79
|
+
"@@",
|
|
80
|
+
" *** End Patch",
|
|
81
|
+
"-old",
|
|
82
|
+
"+new",
|
|
83
|
+
"*** Delete File: obsolete.txt",
|
|
84
|
+
"*** End Patch",
|
|
85
|
+
].join("\n"),
|
|
86
|
+
),
|
|
87
|
+
).toEqual([
|
|
88
|
+
{
|
|
89
|
+
type: "update",
|
|
90
|
+
path: "src/app.ts",
|
|
91
|
+
chunks: [
|
|
92
|
+
{
|
|
93
|
+
old: ["*** End Patch", "old"],
|
|
94
|
+
next: ["*** End Patch", "new"],
|
|
95
|
+
},
|
|
96
|
+
],
|
|
97
|
+
},
|
|
98
|
+
{
|
|
99
|
+
type: "delete",
|
|
100
|
+
path: "obsolete.txt",
|
|
101
|
+
},
|
|
102
|
+
])
|
|
103
|
+
})
|
|
104
|
+
|
|
105
|
+
it("parses heredoc-wrapped hunks", () => {
|
|
106
|
+
expect(
|
|
107
|
+
patchContent("sample.txt", "old\n", "<<'EOF'\n@@\n-old\n+new\nEOF"),
|
|
108
|
+
).toBe("new\n")
|
|
109
|
+
})
|
|
110
|
+
|
|
111
|
+
it("matches lines after trimming trailing whitespace", () => {
|
|
112
|
+
expect(patchContent("sample.txt", "old \n", "@@\n-old\n+new")).toBe(
|
|
113
|
+
"new\n",
|
|
114
|
+
)
|
|
115
|
+
})
|
|
116
|
+
|
|
117
|
+
it("matches lines after trimming surrounding whitespace", () => {
|
|
118
|
+
expect(patchContent("sample.txt", " old\n", "@@\n-old\n+new")).toBe(
|
|
119
|
+
"new\n",
|
|
120
|
+
)
|
|
121
|
+
})
|
|
122
|
+
|
|
123
|
+
it("matches lines after normalizing Unicode punctuation", () => {
|
|
124
|
+
expect(
|
|
125
|
+
patchContent("sample.txt", "Don’t wait…\n", "@@\n-Don't wait...\n+Done"),
|
|
126
|
+
).toBe("Done\n")
|
|
127
|
+
})
|
|
128
|
+
|
|
129
|
+
it("matches EOF hunks from the end of the file", () => {
|
|
130
|
+
expect(
|
|
131
|
+
patchContent(
|
|
132
|
+
"tail.txt",
|
|
133
|
+
"start\nmarker\nend\nmiddle\nmarker\nend\n",
|
|
134
|
+
"@@\n-marker\n-end\n+marker-changed\n+end\n*** End of File",
|
|
135
|
+
),
|
|
136
|
+
).toBe("start\nmarker\nend\nmiddle\nmarker-changed\nend\n")
|
|
137
|
+
})
|
|
138
|
+
|
|
139
|
+
it("preserves CRLF files", () => {
|
|
140
|
+
expect(patchContent("crlf.txt", "old\r\n", "@@\n-old\n+new")).toBe(
|
|
141
|
+
"new\r\n",
|
|
142
|
+
)
|
|
143
|
+
})
|
|
144
|
+
|
|
145
|
+
it("rejects multi-file wrapped patches", () => {
|
|
146
|
+
expect(() =>
|
|
147
|
+
patchContent(
|
|
148
|
+
"sample.txt",
|
|
149
|
+
"line1\nline2\n",
|
|
150
|
+
"*** Begin Patch\n*** Update File: a.txt\n@@\n-line2\n+changed\n*** Update File: b.txt\n@@\n-old\n+new\n*** End Patch",
|
|
151
|
+
),
|
|
152
|
+
).toThrow("only one update file section is supported")
|
|
153
|
+
})
|
|
154
|
+
})
|