error-trace-debugger 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,190 @@
1
+ import { create_initial_state, is_budget_exceeded } from "./state.js";
2
+
3
+ export class Orchestrator {
4
+ #tools;
5
+ #repo_path;
6
+ #max_iterations;
7
+ #budget_ms;
8
+ #run_tests;
9
+ #test_command;
10
+
11
+ constructor({
12
+ repo_path,
13
+ tools,
14
+ max_iterations = 5,
15
+ budget_ms,
16
+ run_tests = false,
17
+ test_command
18
+ }) {
19
+ this.#repo_path = repo_path;
20
+ this.#tools = tools;
21
+ this.#max_iterations = max_iterations;
22
+ this.#budget_ms = budget_ms;
23
+ this.#run_tests = run_tests;
24
+ this.#test_command = test_command;
25
+ }
26
+
27
+ async run({ raw_logs }) {
28
+ const state = create_initial_state({
29
+ repo_path: this.#repo_path,
30
+ raw_logs,
31
+ max_iterations: this.#max_iterations,
32
+ budget_ms: this.#budget_ms,
33
+ run_tests: this.#run_tests,
34
+ test_command: this.#test_command
35
+ });
36
+
37
+ state.log_artifact = await this.#tools.log_analyzer.run({
38
+ raw_logs: state.raw_logs,
39
+ repo_path: state.repo_path
40
+ });
41
+ state.investigation_steps.push({
42
+ tool_name: "log_analyzer",
43
+ summary: "Parsed logs into frames and signals.",
44
+ output: summarize_log_artifact(state.log_artifact)
45
+ });
46
+
47
+ while (state.iteration_index < state.max_iterations) {
48
+ if (is_budget_exceeded(state)) break;
49
+
50
+ const hypothesis = build_hypothesis_from_artifact(state.log_artifact, {
51
+ iteration_index: state.iteration_index
52
+ });
53
+ state.hypotheses.push(hypothesis);
54
+
55
+ const search_queries = build_search_queries(state.log_artifact, hypothesis);
56
+ const search_result = await this.#tools.code_search.run({
57
+ repo_path: state.repo_path,
58
+ queries: search_queries
59
+ });
60
+
61
+ state.investigation_steps.push({
62
+ tool_name: "code_search",
63
+ summary: "Searched the codebase for likely relevant symbols/strings.",
64
+ output: {
65
+ queries: search_queries,
66
+ match_count: search_result.matches.length,
67
+ top_matches: search_result.matches.slice(0, 10)
68
+ }
69
+ });
70
+
71
+ const patch_plan = await this.#tools.diff_generator.run({
72
+ repo_path: state.repo_path,
73
+ log_artifact: state.log_artifact,
74
+ hypothesis,
75
+ search_matches: search_result.matches
76
+ });
77
+
78
+ state.fix_plan = patch_plan.fix_plan;
79
+ state.patch_text = patch_plan.patch_text || "";
80
+ state.investigation_steps.push({
81
+ tool_name: "diff_generator",
82
+ summary: "Proposed a fix plan and generated a patch (if possible).",
83
+ output: {
84
+ has_patch: Boolean(state.patch_text && state.patch_text.trim().length > 0),
85
+ edit_count: state.fix_plan?.edits?.length ?? 0
86
+ }
87
+ });
88
+
89
+ if (state.run_tests) {
90
+ state.validation_result = await this.#tools.test_runner.run({
91
+ repo_path: state.repo_path,
92
+ test_command: state.test_command
93
+ });
94
+ state.investigation_steps.push({
95
+ tool_name: "test_runner",
96
+ summary: "Executed tests and summarized results.",
97
+ output: {
98
+ ok: state.validation_result.ok,
99
+ exit_code: state.validation_result.exit_code,
100
+ detected_command: state.validation_result.detected_command
101
+ }
102
+ });
103
+
104
+ if (state.validation_result.ok) break;
105
+ }
106
+
107
+ if (state.patch_text && state.patch_text.trim().length > 0) break;
108
+ state.iteration_index += 1;
109
+ }
110
+
111
+ if (state.run_tests && state.validation_result) {
112
+ state.has_error = !state.validation_result.ok;
113
+ }
114
+
115
+ return {
116
+ repo_path: state.repo_path,
117
+ raw_logs: state.raw_logs,
118
+ log_artifact: state.log_artifact,
119
+ hypotheses: state.hypotheses,
120
+ investigation_steps: state.investigation_steps,
121
+ fix_plan: state.fix_plan,
122
+ patch_text: state.patch_text,
123
+ validation_result: state.validation_result,
124
+ has_error: state.has_error
125
+ };
126
+ }
127
+ }
128
+
129
+ function summarize_log_artifact(log_artifact) {
130
+ return {
131
+ error_name: log_artifact.error_name,
132
+ error_message: log_artifact.error_message,
133
+ frame_count: log_artifact.frames.length,
134
+ top_frames: log_artifact.frames.slice(0, 5)
135
+ };
136
+ }
137
+
138
+ function build_hypothesis_from_artifact(log_artifact, { iteration_index }) {
139
+ const top_frame = log_artifact.frames.find((frame) => frame.is_repo_frame) || null;
140
+ const claim = top_frame
141
+ ? `Likely failing near ${top_frame.file_path}:${top_frame.line_number}.`
142
+ : "Likely failing in application code referenced by the stack trace.";
143
+
144
+ return {
145
+ iteration_index,
146
+ claim,
147
+ confidence: top_frame ? 0.6 : 0.3,
148
+ predicted_locations: top_frame
149
+ ? [{ file_path: top_frame.file_path, line_number: top_frame.line_number }]
150
+ : [],
151
+ evidence: {
152
+ error_name: log_artifact.error_name,
153
+ error_message: log_artifact.error_message,
154
+ top_frame
155
+ }
156
+ };
157
+ }
158
+
159
+ function build_search_queries(log_artifact, hypothesis) {
160
+ const queries = new Set();
161
+
162
+ if (log_artifact.error_name) queries.add(log_artifact.error_name);
163
+ if (log_artifact.error_message) {
164
+ for (const token of tokenize(log_artifact.error_message)) queries.add(token);
165
+ }
166
+
167
+ for (const location of hypothesis.predicted_locations) {
168
+ const basename = location.file_path.split("/").pop();
169
+ if (basename) queries.add(basename);
170
+ }
171
+
172
+ for (const frame of log_artifact.frames.slice(0, 10)) {
173
+ if (frame.function_name) queries.add(frame.function_name);
174
+ if (frame.file_path) {
175
+ const basename = frame.file_path.split("/").pop();
176
+ if (basename) queries.add(basename);
177
+ }
178
+ }
179
+
180
+ return [...queries].filter((q) => q && q.trim().length >= 3).slice(0, 25);
181
+ }
182
+
183
+ function tokenize(text) {
184
+ return text
185
+ .split(/[^a-zA-Z0-9_./-]+/g)
186
+ .map((t) => t.trim())
187
+ .filter(Boolean)
188
+ .slice(0, 20);
189
+ }
190
+
@@ -0,0 +1,161 @@
1
+ export function write_report(final_report, { format = "md" } = {}) {
2
+ if (format === "json") {
3
+ return `${JSON.stringify(final_report, null, 2)}\n`;
4
+ }
5
+
6
+ if (format === "text") {
7
+ return write_text_report(final_report);
8
+ }
9
+
10
+ return write_markdown_report(final_report);
11
+ }
12
+
13
+ function write_text_report(final_report) {
14
+ const lines = [];
15
+ lines.push(`repo: ${final_report.repo_path}`);
16
+ if (final_report.log_artifact?.error_name) {
17
+ lines.push(`error: ${final_report.log_artifact.error_name}`);
18
+ }
19
+ if (final_report.log_artifact?.error_message) {
20
+ lines.push(`message: ${final_report.log_artifact.error_message}`);
21
+ }
22
+ if (final_report.log_artifact?.frames?.length) {
23
+ const top_frame = final_report.log_artifact.frames.find((f) => f.is_repo_frame)
24
+ || final_report.log_artifact.frames[0];
25
+ if (top_frame) {
26
+ lines.push(`top_frame: ${top_frame.file_path}:${top_frame.line_number ?? "?"}`);
27
+ }
28
+ }
29
+ lines.push("");
30
+
31
+ const top_hypothesis = final_report.hypotheses?.[0];
32
+ if (top_hypothesis) {
33
+ lines.push(`hypothesis: ${top_hypothesis.claim}`);
34
+ lines.push(`confidence: ${top_hypothesis.confidence}`);
35
+ lines.push("");
36
+ }
37
+
38
+ if (final_report.investigation_steps?.length) {
39
+ lines.push("investigation_steps:");
40
+ for (const step of final_report.investigation_steps) {
41
+ lines.push(`- ${step.tool_name}: ${step.summary}`);
42
+ }
43
+ lines.push("");
44
+ }
45
+
46
+ if (final_report.fix_plan?.summary) {
47
+ lines.push("suggested fix:");
48
+ lines.push(final_report.fix_plan.summary);
49
+ lines.push("");
50
+ }
51
+
52
+ if (final_report.patch_text && final_report.patch_text.trim().length > 0) {
53
+ lines.push("patch:");
54
+ lines.push(final_report.patch_text);
55
+ }
56
+
57
+ return `${lines.join("\n")}\n`;
58
+ }
59
+
60
+ function write_markdown_report(final_report) {
61
+ const lines = [];
62
+ lines.push("## Deep Agent report");
63
+ lines.push("");
64
+ lines.push(`- **repo**: \`${final_report.repo_path}\``);
65
+
66
+ const artifact = final_report.log_artifact;
67
+ if (artifact?.error_name) lines.push(`- **error**: \`${artifact.error_name}\``);
68
+ if (artifact?.error_message) lines.push(`- **message**: ${escape_md(artifact.error_message)}`);
69
+
70
+ lines.push("");
71
+
72
+ if (artifact?.frames?.length) {
73
+ lines.push("## Stack (top frames)");
74
+ lines.push("");
75
+ lines.push("```");
76
+ for (const frame of artifact.frames.slice(0, 8)) {
77
+ const fn_part = frame.function_name ? `${frame.function_name} ` : "";
78
+ const loc = `${frame.file_path}:${frame.line_number ?? "?"}:${frame.column_number ?? "?"}`;
79
+ lines.push(`${fn_part}${loc}${frame.is_repo_frame ? " [repo]" : ""}`);
80
+ }
81
+ lines.push("```");
82
+ lines.push("");
83
+ }
84
+
85
+ if (final_report.hypotheses?.length) {
86
+ const hypothesis = final_report.hypotheses[0];
87
+ lines.push("## Hypothesis");
88
+ lines.push("");
89
+ lines.push(`- **claim**: ${escape_md(hypothesis.claim)}`);
90
+ lines.push(`- **confidence**: ${hypothesis.confidence}`);
91
+ if (hypothesis.predicted_locations?.length) {
92
+ lines.push("");
93
+ lines.push("Likely locations:");
94
+ for (const location of hypothesis.predicted_locations) {
95
+ lines.push(
96
+ `- \`${location.file_path}:${location.line_number ?? "?"}\``
97
+ );
98
+ }
99
+ }
100
+ lines.push("");
101
+ }
102
+
103
+ if (final_report.investigation_steps?.length) {
104
+ lines.push("## Investigation timeline");
105
+ lines.push("");
106
+ for (const step of final_report.investigation_steps) {
107
+ lines.push(`- **${step.tool_name}**: ${escape_md(step.summary)}`);
108
+ }
109
+ lines.push("");
110
+ }
111
+
112
+ if (final_report.fix_plan) {
113
+ lines.push("## Suggested fix");
114
+ lines.push("");
115
+ if (final_report.fix_plan.summary) lines.push(escape_md(final_report.fix_plan.summary));
116
+ if (final_report.fix_plan.edits?.length) {
117
+ lines.push("");
118
+ lines.push("Edits:");
119
+ for (const edit of final_report.fix_plan.edits) {
120
+ lines.push(`- \`${edit.file_path}\` (${edit.reason || "reason omitted"})`);
121
+ }
122
+ }
123
+ lines.push("");
124
+ }
125
+
126
+ if (final_report.validation_result) {
127
+ lines.push("## Tests");
128
+ lines.push("");
129
+ lines.push(`- **ok**: ${final_report.validation_result.ok}`);
130
+ if (final_report.validation_result.detected_command) {
131
+ lines.push(
132
+ `- **command**: \`${final_report.validation_result.detected_command}\``
133
+ );
134
+ }
135
+ if (final_report.validation_result.summary) {
136
+ lines.push("");
137
+ lines.push("Output summary:");
138
+ lines.push("");
139
+ lines.push("```");
140
+ lines.push(final_report.validation_result.summary);
141
+ lines.push("```");
142
+ }
143
+ lines.push("");
144
+ }
145
+
146
+ if (final_report.patch_text && final_report.patch_text.trim().length > 0) {
147
+ lines.push("## Patch");
148
+ lines.push("");
149
+ lines.push("```diff");
150
+ lines.push(final_report.patch_text.trimEnd());
151
+ lines.push("```");
152
+ lines.push("");
153
+ }
154
+
155
+ return `${lines.join("\n")}\n`;
156
+ }
157
+
158
+ function escape_md(text) {
159
+ return String(text).replaceAll("\r", "");
160
+ }
161
+
@@ -0,0 +1,33 @@
1
+ export function create_initial_state({
2
+ repo_path,
3
+ raw_logs,
4
+ max_iterations,
5
+ budget_ms,
6
+ run_tests,
7
+ test_command
8
+ }) {
9
+ return {
10
+ repo_path,
11
+ raw_logs,
12
+ max_iterations,
13
+ budget_ms,
14
+ run_tests,
15
+ test_command,
16
+ started_at_ms: Date.now(),
17
+ iteration_index: 0,
18
+ investigation_steps: [],
19
+ hypotheses: [],
20
+ log_artifact: null,
21
+ fix_plan: null,
22
+ patch_text: "",
23
+ validation_result: null,
24
+ has_error: false
25
+ };
26
+ }
27
+
28
+ export function is_budget_exceeded(state) {
29
+ if (!Number.isFinite(state.budget_ms)) return false;
30
+ const elapsed_ms = Date.now() - state.started_at_ms;
31
+ return elapsed_ms >= state.budget_ms;
32
+ }
33
+
@@ -0,0 +1,8 @@
1
+ import fs from "node:fs/promises";
2
+ import path from "node:path";
3
+
4
+ export async function write_text_file(target_path, contents) {
5
+ await fs.mkdir(path.dirname(target_path), { recursive: true });
6
+ await fs.writeFile(target_path, contents, "utf8");
7
+ }
8
+
@@ -0,0 +1,170 @@
1
+ import path from "node:path";
2
+ import fs from "node:fs/promises";
3
+ import { execa } from "execa";
4
+ import fg from "fast-glob";
5
+ import ignore from "ignore";
6
+
7
+ const DEFAULT_MAX_MATCHES = 300;
8
+ const DEFAULT_MAX_LINES_PER_QUERY = 200;
9
+ const DEFAULT_MAX_FILE_BYTES = 512 * 1024;
10
+ const DEFAULT_MAX_TOTAL_BYTES = 5 * 1024 * 1024;
11
+
12
+ export class CodeSearchTool {
13
+ #repo_path;
14
+ #has_ripgrep_promise;
15
+
16
+ constructor({ repo_path }) {
17
+ this.#repo_path = repo_path;
18
+ this.#has_ripgrep_promise = null;
19
+ }
20
+
21
+ async run({ repo_path, queries }) {
22
+ const resolved_repo_path = repo_path || this.#repo_path;
23
+ const normalized_queries = (queries || []).map((q) => String(q)).filter(Boolean);
24
+
25
+ const has_ripgrep = await this.#has_ripgrep();
26
+ if (has_ripgrep) {
27
+ return await this.#run_with_ripgrep(resolved_repo_path, normalized_queries);
28
+ }
29
+
30
+ return await this.#run_with_fallback_scan(resolved_repo_path, normalized_queries);
31
+ }
32
+
33
+ async #has_ripgrep() {
34
+ if (this.#has_ripgrep_promise) return this.#has_ripgrep_promise;
35
+ this.#has_ripgrep_promise = (async () => {
36
+ try {
37
+ await execa("rg", ["--version"], { reject: true });
38
+ return true;
39
+ } catch {
40
+ return false;
41
+ }
42
+ })();
43
+ return this.#has_ripgrep_promise;
44
+ }
45
+
46
+ async #run_with_ripgrep(repo_path, queries) {
47
+ const matches = [];
48
+ const unique_queries = [...new Set(queries)].slice(0, 25);
49
+
50
+ for (const query of unique_queries) {
51
+ const result = await execa(
52
+ "rg",
53
+ [
54
+ "--no-heading",
55
+ "--line-number",
56
+ "--color",
57
+ "never",
58
+ "--fixed-strings",
59
+ "--smart-case",
60
+ "--glob",
61
+ "!**/node_modules/**",
62
+ "--glob",
63
+ "!**/dist/**",
64
+ "--glob",
65
+ "!**/build/**",
66
+ query
67
+ ],
68
+ { cwd: repo_path, reject: false }
69
+ );
70
+
71
+ if (result.exitCode !== 0 && result.exitCode !== 1) continue;
72
+ const lines = String(result.stdout || "").split(/\r?\n/g).filter(Boolean);
73
+ for (const line of lines.slice(0, DEFAULT_MAX_LINES_PER_QUERY)) {
74
+ const parsed = parse_ripgrep_line(line);
75
+ if (parsed) matches.push({ query, ...parsed });
76
+ }
77
+ }
78
+
79
+ return { matches: matches.slice(0, DEFAULT_MAX_MATCHES) };
80
+ }
81
+
82
+ async #run_with_fallback_scan(repo_path, queries) {
83
+ const ig = await load_ignore(repo_path);
84
+ const candidates = await fg(["**/*.*"], {
85
+ cwd: repo_path,
86
+ dot: false,
87
+ onlyFiles: true,
88
+ followSymbolicLinks: false
89
+ });
90
+
91
+ const matches = [];
92
+ const unique_queries = [...new Set(queries)].slice(0, 25);
93
+ let total_bytes_read = 0;
94
+
95
+ for (const rel_path of candidates) {
96
+ if (!should_scan_file(rel_path)) continue;
97
+ if (ig.ignores(rel_path)) continue;
98
+
99
+ const absolute_path = path.join(repo_path, rel_path);
100
+ let contents = "";
101
+ try {
102
+ const stat = await fs.stat(absolute_path);
103
+ if (!stat.isFile()) continue;
104
+ if (stat.size > DEFAULT_MAX_FILE_BYTES) continue;
105
+ if (total_bytes_read + stat.size > DEFAULT_MAX_TOTAL_BYTES) break;
106
+
107
+ contents = await fs.readFile(absolute_path, "utf8");
108
+ total_bytes_read += Buffer.byteLength(contents, "utf8");
109
+ } catch {
110
+ continue;
111
+ }
112
+
113
+ for (const query of unique_queries) {
114
+ const index = contents.indexOf(query);
115
+ if (index < 0) continue;
116
+ matches.push({
117
+ query,
118
+ file_path: rel_path,
119
+ line_number: guess_line_number(contents, index),
120
+ snippet: extract_snippet(contents, index, query.length)
121
+ });
122
+ }
123
+
124
+ if (matches.length >= DEFAULT_MAX_MATCHES) break;
125
+ }
126
+
127
+ return { matches };
128
+ }
129
+ }
130
+
131
+ function parse_ripgrep_line(line) {
132
+ const match = line.match(/^(?<file_path>.*?):(?<line_number>\d+):(?<snippet>.*)$/);
133
+ if (!match?.groups) return null;
134
+ return {
135
+ file_path: match.groups.file_path,
136
+ line_number: Number(match.groups.line_number),
137
+ snippet: match.groups.snippet
138
+ };
139
+ }
140
+
141
+ function should_scan_file(rel_path) {
142
+ if (rel_path.startsWith("node_modules/")) return false;
143
+ if (rel_path.startsWith("dist/")) return false;
144
+ if (rel_path.startsWith("build/")) return false;
145
+ if (rel_path.includes("/.git/")) return false;
146
+ return true;
147
+ }
148
+
149
+ async function load_ignore(repo_path) {
150
+ const ig = ignore();
151
+ ig.add(["node_modules/", "dist/", "build/", ".git/"]);
152
+ try {
153
+ const gitignore = await fs.readFile(path.join(repo_path, ".gitignore"), "utf8");
154
+ ig.add(gitignore.split(/\r?\n/g));
155
+ } catch {
156
+ // ignore
157
+ }
158
+ return ig;
159
+ }
160
+
161
+ function guess_line_number(contents, index) {
162
+ return contents.slice(0, index).split("\n").length;
163
+ }
164
+
165
+ function extract_snippet(contents, index, length) {
166
+ const start_index = Math.max(0, index - 60);
167
+ const end_index = Math.min(contents.length, index + length + 60);
168
+ return contents.slice(start_index, end_index).replaceAll("\n", "\\n");
169
+ }
170
+
@@ -0,0 +1,115 @@
1
+ import path from "node:path";
2
+ import fs from "node:fs/promises";
3
+ import { createTwoFilesPatch } from "diff";
4
+
5
+ export class GitDiffTool {
6
+ #repo_path;
7
+
8
+ constructor({ repo_path }) {
9
+ this.#repo_path = repo_path;
10
+ }
11
+
12
+ async run({
13
+ repo_path,
14
+ log_artifact,
15
+ hypothesis,
16
+ search_matches,
17
+ fix_plan_overrides
18
+ }) {
19
+ const resolved_repo_path = repo_path || this.#repo_path;
20
+ const top_repo_frame = log_artifact.frames.find((f) => f.is_repo_frame) || null;
21
+
22
+ const base_plan = build_minimal_fix_plan({
23
+ hypothesis,
24
+ top_repo_frame,
25
+ search_matches
26
+ });
27
+
28
+ const fix_plan = merge_fix_plans(base_plan, fix_plan_overrides);
29
+
30
+ const patch_text = await generate_patch_from_edits(resolved_repo_path, fix_plan.edits);
31
+ return { fix_plan, patch_text };
32
+ }
33
+ }
34
+
35
+ function build_minimal_fix_plan({ hypothesis, top_repo_frame, search_matches }) {
36
+ const location_hint = top_repo_frame
37
+ ? `${top_repo_frame.file_path}:${top_repo_frame.line_number}`
38
+ : "unknown location";
39
+
40
+ const top_matches = (search_matches || []).slice(0, 3);
41
+ const summary_lines = [
42
+ `Start from ${location_hint} and verify the failing assumptions.`,
43
+ "If this is a null/undefined access, add validation or adjust the upstream contract.",
44
+ "If this is a missing import/export, ensure the symbol is defined and exported correctly."
45
+ ];
46
+ if (top_matches.length) {
47
+ summary_lines.push("");
48
+ summary_lines.push("Relevant search hits:");
49
+ for (const match of top_matches) {
50
+ summary_lines.push(`- ${match.file_path}:${match.line_number} (${match.query})`);
51
+ }
52
+ }
53
+
54
+ return {
55
+ summary: summary_lines.join("\n"),
56
+ edits: []
57
+ };
58
+ }
59
+
60
+ function merge_fix_plans(base_plan, overrides) {
61
+ if (!overrides) return base_plan;
62
+ const merged_summary =
63
+ typeof overrides.summary === "string" && overrides.summary.trim().length > 0
64
+ ? overrides.summary
65
+ : base_plan.summary;
66
+
67
+ const merged_edits = Array.isArray(overrides.edits) && overrides.edits.length > 0
68
+ ? overrides.edits
69
+ : base_plan.edits;
70
+
71
+ return {
72
+ summary: merged_summary,
73
+ edits: merged_edits
74
+ };
75
+ }
76
+
77
+ async function safe_read_text(absolute_path) {
78
+ try {
79
+ return await fs.readFile(absolute_path, "utf8");
80
+ } catch {
81
+ return "";
82
+ }
83
+ }
84
+
85
+ async function generate_patch_from_edits(repo_path, edits) {
86
+ const normalized_edits = Array.isArray(edits) ? edits : [];
87
+ if (!normalized_edits.length) return "";
88
+
89
+ const patches = [];
90
+ for (const edit of normalized_edits) {
91
+ const rel_path = edit.file_path;
92
+ if (!rel_path || typeof edit.new_text !== "string") continue;
93
+
94
+ const absolute_path = path.join(repo_path, rel_path);
95
+ const before_text = await safe_read_text(absolute_path);
96
+ const after_text = edit.new_text;
97
+
98
+ if (before_text === after_text) continue;
99
+
100
+ patches.push(
101
+ createTwoFilesPatch(
102
+ `a/${rel_path}`,
103
+ `b/${rel_path}`,
104
+ before_text,
105
+ after_text,
106
+ "",
107
+ "",
108
+ { context: 3 }
109
+ )
110
+ );
111
+ }
112
+
113
+ return patches.join("\n").trimEnd();
114
+ }
115
+