@link-assistant/agent 0.0.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/EXAMPLES.md +383 -0
- package/LICENSE +24 -0
- package/MODELS.md +95 -0
- package/README.md +388 -0
- package/TOOLS.md +134 -0
- package/package.json +89 -0
- package/src/agent/agent.ts +150 -0
- package/src/agent/generate.txt +75 -0
- package/src/auth/index.ts +64 -0
- package/src/bun/index.ts +96 -0
- package/src/bus/global.ts +10 -0
- package/src/bus/index.ts +119 -0
- package/src/cli/bootstrap.js +41 -0
- package/src/cli/bootstrap.ts +17 -0
- package/src/cli/cmd/agent.ts +165 -0
- package/src/cli/cmd/cmd.ts +5 -0
- package/src/cli/cmd/export.ts +88 -0
- package/src/cli/cmd/mcp.ts +80 -0
- package/src/cli/cmd/models.ts +58 -0
- package/src/cli/cmd/run.ts +359 -0
- package/src/cli/cmd/stats.ts +276 -0
- package/src/cli/error.ts +27 -0
- package/src/command/index.ts +73 -0
- package/src/command/template/initialize.txt +10 -0
- package/src/config/config.ts +705 -0
- package/src/config/markdown.ts +41 -0
- package/src/file/ripgrep.ts +391 -0
- package/src/file/time.ts +38 -0
- package/src/file/watcher.ts +75 -0
- package/src/file.ts +6 -0
- package/src/flag/flag.ts +19 -0
- package/src/format/formatter.ts +248 -0
- package/src/format/index.ts +137 -0
- package/src/global/index.ts +52 -0
- package/src/id/id.ts +72 -0
- package/src/index.js +371 -0
- package/src/mcp/index.ts +289 -0
- package/src/patch/index.ts +622 -0
- package/src/project/bootstrap.ts +22 -0
- package/src/project/instance.ts +67 -0
- package/src/project/project.ts +105 -0
- package/src/project/state.ts +65 -0
- package/src/provider/models-macro.ts +11 -0
- package/src/provider/models.ts +98 -0
- package/src/provider/opencode.js +47 -0
- package/src/provider/provider.ts +636 -0
- package/src/provider/transform.ts +241 -0
- package/src/server/project.ts +48 -0
- package/src/server/server.ts +249 -0
- package/src/session/agent.js +204 -0
- package/src/session/compaction.ts +249 -0
- package/src/session/index.ts +380 -0
- package/src/session/message-v2.ts +758 -0
- package/src/session/message.ts +189 -0
- package/src/session/processor.ts +356 -0
- package/src/session/prompt/anthropic-20250930.txt +166 -0
- package/src/session/prompt/anthropic.txt +105 -0
- package/src/session/prompt/anthropic_spoof.txt +1 -0
- package/src/session/prompt/beast.txt +147 -0
- package/src/session/prompt/build-switch.txt +5 -0
- package/src/session/prompt/codex.txt +318 -0
- package/src/session/prompt/copilot-gpt-5.txt +143 -0
- package/src/session/prompt/gemini.txt +155 -0
- package/src/session/prompt/grok-code.txt +1 -0
- package/src/session/prompt/plan.txt +8 -0
- package/src/session/prompt/polaris.txt +107 -0
- package/src/session/prompt/qwen.txt +109 -0
- package/src/session/prompt/summarize-turn.txt +5 -0
- package/src/session/prompt/summarize.txt +10 -0
- package/src/session/prompt/title.txt +25 -0
- package/src/session/prompt.ts +1390 -0
- package/src/session/retry.ts +53 -0
- package/src/session/revert.ts +108 -0
- package/src/session/status.ts +75 -0
- package/src/session/summary.ts +179 -0
- package/src/session/system.ts +138 -0
- package/src/session/todo.ts +36 -0
- package/src/snapshot/index.ts +197 -0
- package/src/storage/storage.ts +226 -0
- package/src/tool/bash.ts +193 -0
- package/src/tool/bash.txt +121 -0
- package/src/tool/batch.ts +173 -0
- package/src/tool/batch.txt +28 -0
- package/src/tool/codesearch.ts +123 -0
- package/src/tool/codesearch.txt +12 -0
- package/src/tool/edit.ts +604 -0
- package/src/tool/edit.txt +10 -0
- package/src/tool/glob.ts +65 -0
- package/src/tool/glob.txt +6 -0
- package/src/tool/grep.ts +116 -0
- package/src/tool/grep.txt +8 -0
- package/src/tool/invalid.ts +17 -0
- package/src/tool/ls.ts +110 -0
- package/src/tool/ls.txt +1 -0
- package/src/tool/multiedit.ts +46 -0
- package/src/tool/multiedit.txt +41 -0
- package/src/tool/patch.ts +188 -0
- package/src/tool/patch.txt +1 -0
- package/src/tool/read.ts +201 -0
- package/src/tool/read.txt +12 -0
- package/src/tool/registry.ts +87 -0
- package/src/tool/task.ts +126 -0
- package/src/tool/task.txt +60 -0
- package/src/tool/todo.ts +39 -0
- package/src/tool/todoread.txt +14 -0
- package/src/tool/todowrite.txt +167 -0
- package/src/tool/tool.ts +66 -0
- package/src/tool/webfetch.ts +171 -0
- package/src/tool/webfetch.txt +14 -0
- package/src/tool/websearch.ts +133 -0
- package/src/tool/websearch.txt +11 -0
- package/src/tool/write.ts +33 -0
- package/src/tool/write.txt +8 -0
- package/src/util/binary.ts +41 -0
- package/src/util/context.ts +25 -0
- package/src/util/defer.ts +12 -0
- package/src/util/error.ts +54 -0
- package/src/util/eventloop.ts +20 -0
- package/src/util/filesystem.ts +69 -0
- package/src/util/fn.ts +11 -0
- package/src/util/iife.ts +3 -0
- package/src/util/keybind.ts +79 -0
- package/src/util/lazy.ts +11 -0
- package/src/util/locale.ts +39 -0
- package/src/util/lock.ts +98 -0
- package/src/util/log.ts +177 -0
- package/src/util/queue.ts +19 -0
- package/src/util/rpc.ts +42 -0
- package/src/util/scrap.ts +10 -0
- package/src/util/signal.ts +12 -0
- package/src/util/timeout.ts +14 -0
- package/src/util/token.ts +7 -0
- package/src/util/wildcard.ts +54 -0
|
@@ -0,0 +1,622 @@
|
|
|
1
|
+
import z from "zod"
|
|
2
|
+
import * as path from "path"
|
|
3
|
+
import * as fs from "fs/promises"
|
|
4
|
+
import { Log } from "../util/log"
|
|
5
|
+
|
|
6
|
+
export namespace Patch {
|
|
7
|
+
const log = Log.create({ service: "patch" })
|
|
8
|
+
|
|
9
|
+
// Schema definitions
|
|
10
|
+
export const PatchSchema = z.object({
|
|
11
|
+
patchText: z.string().describe("The full patch text that describes all changes to be made"),
|
|
12
|
+
})
|
|
13
|
+
|
|
14
|
+
export type PatchParams = z.infer<typeof PatchSchema>
|
|
15
|
+
|
|
16
|
+
// Core types matching the Rust implementation
|
|
17
|
+
export interface ApplyPatchArgs {
|
|
18
|
+
patch: string
|
|
19
|
+
hunks: Hunk[]
|
|
20
|
+
workdir?: string
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export type Hunk =
|
|
24
|
+
| { type: "add"; path: string; contents: string }
|
|
25
|
+
| { type: "delete"; path: string }
|
|
26
|
+
| { type: "update"; path: string; move_path?: string; chunks: UpdateFileChunk[] }
|
|
27
|
+
|
|
28
|
+
export interface UpdateFileChunk {
|
|
29
|
+
old_lines: string[]
|
|
30
|
+
new_lines: string[]
|
|
31
|
+
change_context?: string
|
|
32
|
+
is_end_of_file?: boolean
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
export interface ApplyPatchAction {
|
|
36
|
+
changes: Map<string, ApplyPatchFileChange>
|
|
37
|
+
patch: string
|
|
38
|
+
cwd: string
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
export type ApplyPatchFileChange =
|
|
42
|
+
| { type: "add"; content: string }
|
|
43
|
+
| { type: "delete"; content: string }
|
|
44
|
+
| { type: "update"; unified_diff: string; move_path?: string; new_content: string }
|
|
45
|
+
|
|
46
|
+
export interface AffectedPaths {
|
|
47
|
+
added: string[]
|
|
48
|
+
modified: string[]
|
|
49
|
+
deleted: string[]
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
export enum ApplyPatchError {
|
|
53
|
+
ParseError = "ParseError",
|
|
54
|
+
IoError = "IoError",
|
|
55
|
+
ComputeReplacements = "ComputeReplacements",
|
|
56
|
+
ImplicitInvocation = "ImplicitInvocation",
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
export enum MaybeApplyPatch {
|
|
60
|
+
Body = "Body",
|
|
61
|
+
ShellParseError = "ShellParseError",
|
|
62
|
+
PatchParseError = "PatchParseError",
|
|
63
|
+
NotApplyPatch = "NotApplyPatch",
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
export enum MaybeApplyPatchVerified {
|
|
67
|
+
Body = "Body",
|
|
68
|
+
ShellParseError = "ShellParseError",
|
|
69
|
+
CorrectnessError = "CorrectnessError",
|
|
70
|
+
NotApplyPatch = "NotApplyPatch",
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
// Parser implementation
|
|
74
|
+
function parsePatchHeader(
|
|
75
|
+
lines: string[],
|
|
76
|
+
startIdx: number,
|
|
77
|
+
): { filePath: string; movePath?: string; nextIdx: number } | null {
|
|
78
|
+
const line = lines[startIdx]
|
|
79
|
+
|
|
80
|
+
if (line.startsWith("*** Add File:")) {
|
|
81
|
+
const filePath = line.split(":", 2)[1]?.trim()
|
|
82
|
+
return filePath ? { filePath, nextIdx: startIdx + 1 } : null
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
if (line.startsWith("*** Delete File:")) {
|
|
86
|
+
const filePath = line.split(":", 2)[1]?.trim()
|
|
87
|
+
return filePath ? { filePath, nextIdx: startIdx + 1 } : null
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
if (line.startsWith("*** Update File:")) {
|
|
91
|
+
const filePath = line.split(":", 2)[1]?.trim()
|
|
92
|
+
let movePath: string | undefined
|
|
93
|
+
let nextIdx = startIdx + 1
|
|
94
|
+
|
|
95
|
+
// Check for move directive
|
|
96
|
+
if (nextIdx < lines.length && lines[nextIdx].startsWith("*** Move to:")) {
|
|
97
|
+
movePath = lines[nextIdx].split(":", 2)[1]?.trim()
|
|
98
|
+
nextIdx++
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
return filePath ? { filePath, movePath, nextIdx } : null
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
return null
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
function parseUpdateFileChunks(lines: string[], startIdx: number): { chunks: UpdateFileChunk[]; nextIdx: number } {
|
|
108
|
+
const chunks: UpdateFileChunk[] = []
|
|
109
|
+
let i = startIdx
|
|
110
|
+
|
|
111
|
+
while (i < lines.length && !lines[i].startsWith("***")) {
|
|
112
|
+
if (lines[i].startsWith("@@")) {
|
|
113
|
+
// Parse context line
|
|
114
|
+
const contextLine = lines[i].substring(2).trim()
|
|
115
|
+
i++
|
|
116
|
+
|
|
117
|
+
const oldLines: string[] = []
|
|
118
|
+
const newLines: string[] = []
|
|
119
|
+
let isEndOfFile = false
|
|
120
|
+
|
|
121
|
+
// Parse change lines
|
|
122
|
+
while (i < lines.length && !lines[i].startsWith("@@") && !lines[i].startsWith("***")) {
|
|
123
|
+
const changeLine = lines[i]
|
|
124
|
+
|
|
125
|
+
if (changeLine === "*** End of File") {
|
|
126
|
+
isEndOfFile = true
|
|
127
|
+
i++
|
|
128
|
+
break
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
if (changeLine.startsWith(" ")) {
|
|
132
|
+
// Keep line - appears in both old and new
|
|
133
|
+
const content = changeLine.substring(1)
|
|
134
|
+
oldLines.push(content)
|
|
135
|
+
newLines.push(content)
|
|
136
|
+
} else if (changeLine.startsWith("-")) {
|
|
137
|
+
// Remove line - only in old
|
|
138
|
+
oldLines.push(changeLine.substring(1))
|
|
139
|
+
} else if (changeLine.startsWith("+")) {
|
|
140
|
+
// Add line - only in new
|
|
141
|
+
newLines.push(changeLine.substring(1))
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
i++
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
chunks.push({
|
|
148
|
+
old_lines: oldLines,
|
|
149
|
+
new_lines: newLines,
|
|
150
|
+
change_context: contextLine || undefined,
|
|
151
|
+
is_end_of_file: isEndOfFile || undefined,
|
|
152
|
+
})
|
|
153
|
+
} else {
|
|
154
|
+
i++
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
return { chunks, nextIdx: i }
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
function parseAddFileContent(lines: string[], startIdx: number): { content: string; nextIdx: number } {
|
|
162
|
+
let content = ""
|
|
163
|
+
let i = startIdx
|
|
164
|
+
|
|
165
|
+
while (i < lines.length && !lines[i].startsWith("***")) {
|
|
166
|
+
if (lines[i].startsWith("+")) {
|
|
167
|
+
content += lines[i].substring(1) + "\n"
|
|
168
|
+
}
|
|
169
|
+
i++
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
// Remove trailing newline
|
|
173
|
+
if (content.endsWith("\n")) {
|
|
174
|
+
content = content.slice(0, -1)
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
return { content, nextIdx: i }
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
export function parsePatch(patchText: string): { hunks: Hunk[] } {
|
|
181
|
+
const lines = patchText.split("\n")
|
|
182
|
+
const hunks: Hunk[] = []
|
|
183
|
+
let i = 0
|
|
184
|
+
|
|
185
|
+
// Look for Begin/End patch markers
|
|
186
|
+
const beginMarker = "*** Begin Patch"
|
|
187
|
+
const endMarker = "*** End Patch"
|
|
188
|
+
|
|
189
|
+
const beginIdx = lines.findIndex((line) => line.trim() === beginMarker)
|
|
190
|
+
const endIdx = lines.findIndex((line) => line.trim() === endMarker)
|
|
191
|
+
|
|
192
|
+
if (beginIdx === -1 || endIdx === -1 || beginIdx >= endIdx) {
|
|
193
|
+
throw new Error("Invalid patch format: missing Begin/End markers")
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
// Parse content between markers
|
|
197
|
+
i = beginIdx + 1
|
|
198
|
+
|
|
199
|
+
while (i < endIdx) {
|
|
200
|
+
const header = parsePatchHeader(lines, i)
|
|
201
|
+
if (!header) {
|
|
202
|
+
i++
|
|
203
|
+
continue
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
if (lines[i].startsWith("*** Add File:")) {
|
|
207
|
+
const { content, nextIdx } = parseAddFileContent(lines, header.nextIdx)
|
|
208
|
+
hunks.push({
|
|
209
|
+
type: "add",
|
|
210
|
+
path: header.filePath,
|
|
211
|
+
contents: content,
|
|
212
|
+
})
|
|
213
|
+
i = nextIdx
|
|
214
|
+
} else if (lines[i].startsWith("*** Delete File:")) {
|
|
215
|
+
hunks.push({
|
|
216
|
+
type: "delete",
|
|
217
|
+
path: header.filePath,
|
|
218
|
+
})
|
|
219
|
+
i = header.nextIdx
|
|
220
|
+
} else if (lines[i].startsWith("*** Update File:")) {
|
|
221
|
+
const { chunks, nextIdx } = parseUpdateFileChunks(lines, header.nextIdx)
|
|
222
|
+
hunks.push({
|
|
223
|
+
type: "update",
|
|
224
|
+
path: header.filePath,
|
|
225
|
+
move_path: header.movePath,
|
|
226
|
+
chunks,
|
|
227
|
+
})
|
|
228
|
+
i = nextIdx
|
|
229
|
+
} else {
|
|
230
|
+
i++
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
return { hunks }
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
// Apply patch functionality
|
|
238
|
+
export function maybeParseApplyPatch(
|
|
239
|
+
argv: string[],
|
|
240
|
+
):
|
|
241
|
+
| { type: MaybeApplyPatch.Body; args: ApplyPatchArgs }
|
|
242
|
+
| { type: MaybeApplyPatch.PatchParseError; error: Error }
|
|
243
|
+
| { type: MaybeApplyPatch.NotApplyPatch } {
|
|
244
|
+
const APPLY_PATCH_COMMANDS = ["apply_patch", "applypatch"]
|
|
245
|
+
|
|
246
|
+
// Direct invocation: apply_patch <patch>
|
|
247
|
+
if (argv.length === 2 && APPLY_PATCH_COMMANDS.includes(argv[0])) {
|
|
248
|
+
try {
|
|
249
|
+
const { hunks } = parsePatch(argv[1])
|
|
250
|
+
return {
|
|
251
|
+
type: MaybeApplyPatch.Body,
|
|
252
|
+
args: {
|
|
253
|
+
patch: argv[1],
|
|
254
|
+
hunks,
|
|
255
|
+
},
|
|
256
|
+
}
|
|
257
|
+
} catch (error) {
|
|
258
|
+
return {
|
|
259
|
+
type: MaybeApplyPatch.PatchParseError,
|
|
260
|
+
error: error as Error,
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
// Bash heredoc form: bash -lc 'apply_patch <<"EOF" ...'
|
|
266
|
+
if (argv.length === 3 && argv[0] === "bash" && argv[1] === "-lc") {
|
|
267
|
+
// Simple extraction - in real implementation would need proper bash parsing
|
|
268
|
+
const script = argv[2]
|
|
269
|
+
const heredocMatch = script.match(/apply_patch\s*<<['"](\w+)['"]\s*\n([\s\S]*?)\n\1/)
|
|
270
|
+
|
|
271
|
+
if (heredocMatch) {
|
|
272
|
+
const patchContent = heredocMatch[2]
|
|
273
|
+
try {
|
|
274
|
+
const { hunks } = parsePatch(patchContent)
|
|
275
|
+
return {
|
|
276
|
+
type: MaybeApplyPatch.Body,
|
|
277
|
+
args: {
|
|
278
|
+
patch: patchContent,
|
|
279
|
+
hunks,
|
|
280
|
+
},
|
|
281
|
+
}
|
|
282
|
+
} catch (error) {
|
|
283
|
+
return {
|
|
284
|
+
type: MaybeApplyPatch.PatchParseError,
|
|
285
|
+
error: error as Error,
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
return { type: MaybeApplyPatch.NotApplyPatch }
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
// File content manipulation
|
|
295
|
+
interface ApplyPatchFileUpdate {
|
|
296
|
+
unified_diff: string
|
|
297
|
+
content: string
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
export function deriveNewContentsFromChunks(filePath: string, chunks: UpdateFileChunk[]): ApplyPatchFileUpdate {
|
|
301
|
+
// Read original file content
|
|
302
|
+
let originalContent: string
|
|
303
|
+
try {
|
|
304
|
+
originalContent = require("fs").readFileSync(filePath, "utf-8")
|
|
305
|
+
} catch (error) {
|
|
306
|
+
throw new Error(`Failed to read file ${filePath}: ${error}`)
|
|
307
|
+
}
|
|
308
|
+
|
|
309
|
+
let originalLines = originalContent.split("\n")
|
|
310
|
+
|
|
311
|
+
// Drop trailing empty element for consistent line counting
|
|
312
|
+
if (originalLines.length > 0 && originalLines[originalLines.length - 1] === "") {
|
|
313
|
+
originalLines.pop()
|
|
314
|
+
}
|
|
315
|
+
|
|
316
|
+
const replacements = computeReplacements(originalLines, filePath, chunks)
|
|
317
|
+
let newLines = applyReplacements(originalLines, replacements)
|
|
318
|
+
|
|
319
|
+
// Ensure trailing newline
|
|
320
|
+
if (newLines.length === 0 || newLines[newLines.length - 1] !== "") {
|
|
321
|
+
newLines.push("")
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
const newContent = newLines.join("\n")
|
|
325
|
+
|
|
326
|
+
// Generate unified diff
|
|
327
|
+
const unifiedDiff = generateUnifiedDiff(originalContent, newContent)
|
|
328
|
+
|
|
329
|
+
return {
|
|
330
|
+
unified_diff: unifiedDiff,
|
|
331
|
+
content: newContent,
|
|
332
|
+
}
|
|
333
|
+
}
|
|
334
|
+
|
|
335
|
+
function computeReplacements(
|
|
336
|
+
originalLines: string[],
|
|
337
|
+
filePath: string,
|
|
338
|
+
chunks: UpdateFileChunk[],
|
|
339
|
+
): Array<[number, number, string[]]> {
|
|
340
|
+
const replacements: Array<[number, number, string[]]> = []
|
|
341
|
+
let lineIndex = 0
|
|
342
|
+
|
|
343
|
+
for (const chunk of chunks) {
|
|
344
|
+
// Handle context-based seeking
|
|
345
|
+
if (chunk.change_context) {
|
|
346
|
+
const contextIdx = seekSequence(originalLines, [chunk.change_context], lineIndex)
|
|
347
|
+
if (contextIdx === -1) {
|
|
348
|
+
throw new Error(`Failed to find context '${chunk.change_context}' in ${filePath}`)
|
|
349
|
+
}
|
|
350
|
+
lineIndex = contextIdx + 1
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
// Handle pure addition (no old lines)
|
|
354
|
+
if (chunk.old_lines.length === 0) {
|
|
355
|
+
const insertionIdx =
|
|
356
|
+
originalLines.length > 0 && originalLines[originalLines.length - 1] === ""
|
|
357
|
+
? originalLines.length - 1
|
|
358
|
+
: originalLines.length
|
|
359
|
+
replacements.push([insertionIdx, 0, chunk.new_lines])
|
|
360
|
+
continue
|
|
361
|
+
}
|
|
362
|
+
|
|
363
|
+
// Try to match old lines in the file
|
|
364
|
+
let pattern = chunk.old_lines
|
|
365
|
+
let newSlice = chunk.new_lines
|
|
366
|
+
let found = seekSequence(originalLines, pattern, lineIndex)
|
|
367
|
+
|
|
368
|
+
// Retry without trailing empty line if not found
|
|
369
|
+
if (found === -1 && pattern.length > 0 && pattern[pattern.length - 1] === "") {
|
|
370
|
+
pattern = pattern.slice(0, -1)
|
|
371
|
+
if (newSlice.length > 0 && newSlice[newSlice.length - 1] === "") {
|
|
372
|
+
newSlice = newSlice.slice(0, -1)
|
|
373
|
+
}
|
|
374
|
+
found = seekSequence(originalLines, pattern, lineIndex)
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
if (found !== -1) {
|
|
378
|
+
replacements.push([found, pattern.length, newSlice])
|
|
379
|
+
lineIndex = found + pattern.length
|
|
380
|
+
} else {
|
|
381
|
+
throw new Error(`Failed to find expected lines in ${filePath}:\n${chunk.old_lines.join("\n")}`)
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
// Sort replacements by index to apply in order
|
|
386
|
+
replacements.sort((a, b) => a[0] - b[0])
|
|
387
|
+
|
|
388
|
+
return replacements
|
|
389
|
+
}
|
|
390
|
+
|
|
391
|
+
function applyReplacements(lines: string[], replacements: Array<[number, number, string[]]>): string[] {
|
|
392
|
+
// Apply replacements in reverse order to avoid index shifting
|
|
393
|
+
const result = [...lines]
|
|
394
|
+
|
|
395
|
+
for (let i = replacements.length - 1; i >= 0; i--) {
|
|
396
|
+
const [startIdx, oldLen, newSegment] = replacements[i]
|
|
397
|
+
|
|
398
|
+
// Remove old lines
|
|
399
|
+
result.splice(startIdx, oldLen)
|
|
400
|
+
|
|
401
|
+
// Insert new lines
|
|
402
|
+
for (let j = 0; j < newSegment.length; j++) {
|
|
403
|
+
result.splice(startIdx + j, 0, newSegment[j])
|
|
404
|
+
}
|
|
405
|
+
}
|
|
406
|
+
|
|
407
|
+
return result
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
function seekSequence(lines: string[], pattern: string[], startIndex: number): number {
|
|
411
|
+
if (pattern.length === 0) return -1
|
|
412
|
+
|
|
413
|
+
// Simple substring search implementation
|
|
414
|
+
for (let i = startIndex; i <= lines.length - pattern.length; i++) {
|
|
415
|
+
let matches = true
|
|
416
|
+
|
|
417
|
+
for (let j = 0; j < pattern.length; j++) {
|
|
418
|
+
if (lines[i + j] !== pattern[j]) {
|
|
419
|
+
matches = false
|
|
420
|
+
break
|
|
421
|
+
}
|
|
422
|
+
}
|
|
423
|
+
|
|
424
|
+
if (matches) {
|
|
425
|
+
return i
|
|
426
|
+
}
|
|
427
|
+
}
|
|
428
|
+
|
|
429
|
+
return -1
|
|
430
|
+
}
|
|
431
|
+
|
|
432
|
+
function generateUnifiedDiff(oldContent: string, newContent: string): string {
|
|
433
|
+
const oldLines = oldContent.split("\n")
|
|
434
|
+
const newLines = newContent.split("\n")
|
|
435
|
+
|
|
436
|
+
// Simple diff generation - in a real implementation you'd use a proper diff algorithm
|
|
437
|
+
let diff = "@@ -1 +1 @@\n"
|
|
438
|
+
|
|
439
|
+
// Find changes (simplified approach)
|
|
440
|
+
const maxLen = Math.max(oldLines.length, newLines.length)
|
|
441
|
+
let hasChanges = false
|
|
442
|
+
|
|
443
|
+
for (let i = 0; i < maxLen; i++) {
|
|
444
|
+
const oldLine = oldLines[i] || ""
|
|
445
|
+
const newLine = newLines[i] || ""
|
|
446
|
+
|
|
447
|
+
if (oldLine !== newLine) {
|
|
448
|
+
if (oldLine) diff += `-${oldLine}\n`
|
|
449
|
+
if (newLine) diff += `+${newLine}\n`
|
|
450
|
+
hasChanges = true
|
|
451
|
+
} else if (oldLine) {
|
|
452
|
+
diff += ` ${oldLine}\n`
|
|
453
|
+
}
|
|
454
|
+
}
|
|
455
|
+
|
|
456
|
+
return hasChanges ? diff : ""
|
|
457
|
+
}
|
|
458
|
+
|
|
459
|
+
// Apply hunks to filesystem
|
|
460
|
+
export async function applyHunksToFiles(hunks: Hunk[]): Promise<AffectedPaths> {
|
|
461
|
+
if (hunks.length === 0) {
|
|
462
|
+
throw new Error("No files were modified.")
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
const added: string[] = []
|
|
466
|
+
const modified: string[] = []
|
|
467
|
+
const deleted: string[] = []
|
|
468
|
+
|
|
469
|
+
for (const hunk of hunks) {
|
|
470
|
+
switch (hunk.type) {
|
|
471
|
+
case "add":
|
|
472
|
+
// Create parent directories
|
|
473
|
+
const addDir = path.dirname(hunk.path)
|
|
474
|
+
if (addDir !== "." && addDir !== "/") {
|
|
475
|
+
await fs.mkdir(addDir, { recursive: true })
|
|
476
|
+
}
|
|
477
|
+
|
|
478
|
+
await fs.writeFile(hunk.path, hunk.contents, "utf-8")
|
|
479
|
+
added.push(hunk.path)
|
|
480
|
+
log.info(`Added file: ${hunk.path}`)
|
|
481
|
+
break
|
|
482
|
+
|
|
483
|
+
case "delete":
|
|
484
|
+
await fs.unlink(hunk.path)
|
|
485
|
+
deleted.push(hunk.path)
|
|
486
|
+
log.info(`Deleted file: ${hunk.path}`)
|
|
487
|
+
break
|
|
488
|
+
|
|
489
|
+
case "update":
|
|
490
|
+
const fileUpdate = deriveNewContentsFromChunks(hunk.path, hunk.chunks)
|
|
491
|
+
|
|
492
|
+
if (hunk.move_path) {
|
|
493
|
+
// Handle file move
|
|
494
|
+
const moveDir = path.dirname(hunk.move_path)
|
|
495
|
+
if (moveDir !== "." && moveDir !== "/") {
|
|
496
|
+
await fs.mkdir(moveDir, { recursive: true })
|
|
497
|
+
}
|
|
498
|
+
|
|
499
|
+
await fs.writeFile(hunk.move_path, fileUpdate.content, "utf-8")
|
|
500
|
+
await fs.unlink(hunk.path)
|
|
501
|
+
modified.push(hunk.move_path)
|
|
502
|
+
log.info(`Moved file: ${hunk.path} -> ${hunk.move_path}`)
|
|
503
|
+
} else {
|
|
504
|
+
// Regular update
|
|
505
|
+
await fs.writeFile(hunk.path, fileUpdate.content, "utf-8")
|
|
506
|
+
modified.push(hunk.path)
|
|
507
|
+
log.info(`Updated file: ${hunk.path}`)
|
|
508
|
+
}
|
|
509
|
+
break
|
|
510
|
+
}
|
|
511
|
+
}
|
|
512
|
+
|
|
513
|
+
return { added, modified, deleted }
|
|
514
|
+
}
|
|
515
|
+
|
|
516
|
+
// Main patch application function
|
|
517
|
+
export async function applyPatch(patchText: string): Promise<AffectedPaths> {
|
|
518
|
+
const { hunks } = parsePatch(patchText)
|
|
519
|
+
return applyHunksToFiles(hunks)
|
|
520
|
+
}
|
|
521
|
+
|
|
522
|
+
// Async version of maybeParseApplyPatchVerified
|
|
523
|
+
export async function maybeParseApplyPatchVerified(
|
|
524
|
+
argv: string[],
|
|
525
|
+
cwd: string,
|
|
526
|
+
): Promise<
|
|
527
|
+
| { type: MaybeApplyPatchVerified.Body; action: ApplyPatchAction }
|
|
528
|
+
| { type: MaybeApplyPatchVerified.CorrectnessError; error: Error }
|
|
529
|
+
| { type: MaybeApplyPatchVerified.NotApplyPatch }
|
|
530
|
+
> {
|
|
531
|
+
// Detect implicit patch invocation (raw patch without apply_patch command)
|
|
532
|
+
if (argv.length === 1) {
|
|
533
|
+
try {
|
|
534
|
+
parsePatch(argv[0])
|
|
535
|
+
return {
|
|
536
|
+
type: MaybeApplyPatchVerified.CorrectnessError,
|
|
537
|
+
error: new Error(ApplyPatchError.ImplicitInvocation),
|
|
538
|
+
}
|
|
539
|
+
} catch {
|
|
540
|
+
// Not a patch, continue
|
|
541
|
+
}
|
|
542
|
+
}
|
|
543
|
+
|
|
544
|
+
const result = maybeParseApplyPatch(argv)
|
|
545
|
+
|
|
546
|
+
switch (result.type) {
|
|
547
|
+
case MaybeApplyPatch.Body:
|
|
548
|
+
const { args } = result
|
|
549
|
+
const effectiveCwd = args.workdir ? path.resolve(cwd, args.workdir) : cwd
|
|
550
|
+
const changes = new Map<string, ApplyPatchFileChange>()
|
|
551
|
+
|
|
552
|
+
for (const hunk of args.hunks) {
|
|
553
|
+
const resolvedPath = path.resolve(
|
|
554
|
+
effectiveCwd,
|
|
555
|
+
hunk.type === "update" && hunk.move_path ? hunk.move_path : hunk.path,
|
|
556
|
+
)
|
|
557
|
+
|
|
558
|
+
switch (hunk.type) {
|
|
559
|
+
case "add":
|
|
560
|
+
changes.set(resolvedPath, {
|
|
561
|
+
type: "add",
|
|
562
|
+
content: hunk.contents,
|
|
563
|
+
})
|
|
564
|
+
break
|
|
565
|
+
|
|
566
|
+
case "delete":
|
|
567
|
+
// For delete, we need to read the current content
|
|
568
|
+
const deletePath = path.resolve(effectiveCwd, hunk.path)
|
|
569
|
+
try {
|
|
570
|
+
const content = await fs.readFile(deletePath, "utf-8")
|
|
571
|
+
changes.set(resolvedPath, {
|
|
572
|
+
type: "delete",
|
|
573
|
+
content,
|
|
574
|
+
})
|
|
575
|
+
} catch (error) {
|
|
576
|
+
return {
|
|
577
|
+
type: MaybeApplyPatchVerified.CorrectnessError,
|
|
578
|
+
error: new Error(`Failed to read file for deletion: ${deletePath}`),
|
|
579
|
+
}
|
|
580
|
+
}
|
|
581
|
+
break
|
|
582
|
+
|
|
583
|
+
case "update":
|
|
584
|
+
const updatePath = path.resolve(effectiveCwd, hunk.path)
|
|
585
|
+
try {
|
|
586
|
+
const fileUpdate = deriveNewContentsFromChunks(updatePath, hunk.chunks)
|
|
587
|
+
changes.set(resolvedPath, {
|
|
588
|
+
type: "update",
|
|
589
|
+
unified_diff: fileUpdate.unified_diff,
|
|
590
|
+
move_path: hunk.move_path ? path.resolve(effectiveCwd, hunk.move_path) : undefined,
|
|
591
|
+
new_content: fileUpdate.content,
|
|
592
|
+
})
|
|
593
|
+
} catch (error) {
|
|
594
|
+
return {
|
|
595
|
+
type: MaybeApplyPatchVerified.CorrectnessError,
|
|
596
|
+
error: error as Error,
|
|
597
|
+
}
|
|
598
|
+
}
|
|
599
|
+
break
|
|
600
|
+
}
|
|
601
|
+
}
|
|
602
|
+
|
|
603
|
+
return {
|
|
604
|
+
type: MaybeApplyPatchVerified.Body,
|
|
605
|
+
action: {
|
|
606
|
+
changes,
|
|
607
|
+
patch: args.patch,
|
|
608
|
+
cwd: effectiveCwd,
|
|
609
|
+
},
|
|
610
|
+
}
|
|
611
|
+
|
|
612
|
+
case MaybeApplyPatch.PatchParseError:
|
|
613
|
+
return {
|
|
614
|
+
type: MaybeApplyPatchVerified.CorrectnessError,
|
|
615
|
+
error: result.error,
|
|
616
|
+
}
|
|
617
|
+
|
|
618
|
+
case MaybeApplyPatch.NotApplyPatch:
|
|
619
|
+
return { type: MaybeApplyPatchVerified.NotApplyPatch }
|
|
620
|
+
}
|
|
621
|
+
}
|
|
622
|
+
}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { Format } from "../format"
|
|
2
|
+
import { FileWatcher } from "../file/watcher"
|
|
3
|
+
import { File } from "../file"
|
|
4
|
+
import { Flag } from "../flag/flag"
|
|
5
|
+
import { Project } from "./project"
|
|
6
|
+
import { Bus } from "../bus"
|
|
7
|
+
import { Command } from "../command"
|
|
8
|
+
import { Instance } from "./instance"
|
|
9
|
+
import { Log } from "../util/log"
|
|
10
|
+
|
|
11
|
+
export async function InstanceBootstrap() {
|
|
12
|
+
Log.Default.info("bootstrapping", { directory: Instance.directory })
|
|
13
|
+
Format.init()
|
|
14
|
+
FileWatcher.init()
|
|
15
|
+
File.init()
|
|
16
|
+
|
|
17
|
+
Bus.subscribe(Command.Event.Executed, async (payload) => {
|
|
18
|
+
if (payload.properties.name === Command.Default.INIT) {
|
|
19
|
+
await Project.setInitialized(Instance.project.id)
|
|
20
|
+
}
|
|
21
|
+
})
|
|
22
|
+
}
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import { Log } from "../util/log"
|
|
2
|
+
import { Context } from "../util/context"
|
|
3
|
+
import { Project } from "./project"
|
|
4
|
+
import { State } from "./state"
|
|
5
|
+
import { iife } from "../util/iife"
|
|
6
|
+
|
|
7
|
+
interface Context {
|
|
8
|
+
directory: string
|
|
9
|
+
worktree: string
|
|
10
|
+
project: Project.Info
|
|
11
|
+
}
|
|
12
|
+
const context = Context.create<Context>("instance")
|
|
13
|
+
const cache = new Map<string, Promise<Context>>()
|
|
14
|
+
|
|
15
|
+
export const Instance = {
|
|
16
|
+
async provide<R>(input: { directory: string; init?: () => Promise<any>; fn: () => R }): Promise<R> {
|
|
17
|
+
let existing = cache.get(input.directory)
|
|
18
|
+
if (!existing) {
|
|
19
|
+
Log.Default.info("creating instance", { directory: input.directory })
|
|
20
|
+
existing = iife(async () => {
|
|
21
|
+
const project = await Project.fromDirectory(input.directory)
|
|
22
|
+
const ctx = {
|
|
23
|
+
directory: input.directory,
|
|
24
|
+
worktree: project.worktree,
|
|
25
|
+
project,
|
|
26
|
+
}
|
|
27
|
+
await context.provide(ctx, async () => {
|
|
28
|
+
await input.init?.()
|
|
29
|
+
})
|
|
30
|
+
return ctx
|
|
31
|
+
})
|
|
32
|
+
cache.set(input.directory, existing)
|
|
33
|
+
}
|
|
34
|
+
const ctx = await existing
|
|
35
|
+
return context.provide(ctx, async () => {
|
|
36
|
+
return input.fn()
|
|
37
|
+
})
|
|
38
|
+
},
|
|
39
|
+
get directory() {
|
|
40
|
+
return context.use().directory
|
|
41
|
+
},
|
|
42
|
+
get worktree() {
|
|
43
|
+
return context.use().worktree
|
|
44
|
+
},
|
|
45
|
+
get project() {
|
|
46
|
+
return context.use().project
|
|
47
|
+
},
|
|
48
|
+
state<S>(init: () => S, dispose?: (state: Awaited<S>) => Promise<void>): () => S {
|
|
49
|
+
return State.create(() => Instance.directory, init, dispose)
|
|
50
|
+
},
|
|
51
|
+
async dispose() {
|
|
52
|
+
Log.Default.info("disposing instance", { directory: Instance.directory })
|
|
53
|
+
await State.dispose(Instance.directory)
|
|
54
|
+
},
|
|
55
|
+
async disposeAll() {
|
|
56
|
+
Log.Default.info("disposing all instances")
|
|
57
|
+
for (const [_key, value] of cache) {
|
|
58
|
+
const awaited = await value.catch(() => {})
|
|
59
|
+
if (awaited) {
|
|
60
|
+
await context.provide(await value, async () => {
|
|
61
|
+
await Instance.dispose()
|
|
62
|
+
})
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
cache.clear()
|
|
66
|
+
},
|
|
67
|
+
}
|