error-trace-debugger 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +22 -0
- package/README.md +96 -0
- package/bin/error-trace-debugger +4 -0
- package/package.json +33 -0
- package/src/adapters/llm_client_langchain.js +38 -0
- package/src/agents/agent_graph.js +228 -0
- package/src/agents/deep_agent_v2.js +74 -0
- package/src/cli/deep_agent.js +67 -0
- package/src/cli/run_analyze_command.js +85 -0
- package/src/cli/stdin_logs.js +44 -0
- package/src/core/Orchestrator.js +190 -0
- package/src/core/report_writer.js +161 -0
- package/src/core/state.js +33 -0
- package/src/core/write_text_file.js +8 -0
- package/src/tools/CodeSearchTool.js +170 -0
- package/src/tools/GitDiffTool.js +115 -0
- package/src/tools/LogAnalyzerTool.js +148 -0
- package/src/tools/PatchProposerTool.js +110 -0
- package/src/tools/ReadFileTool.js +98 -0
- package/src/tools/TestRunnerTool.js +98 -0
- package/src/tools/create_default_tools.js +14 -0
- package/src/util/redact_secrets.js +16 -0
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
|
|
3
|
+
export class LogAnalyzerTool {
|
|
4
|
+
#repo_path;
|
|
5
|
+
|
|
6
|
+
constructor({ repo_path }) {
|
|
7
|
+
this.#repo_path = repo_path;
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
async run({ raw_logs }) {
|
|
11
|
+
const parsed = parse_stack_trace(raw_logs, { repo_path: this.#repo_path });
|
|
12
|
+
return {
|
|
13
|
+
raw_logs,
|
|
14
|
+
error_name: parsed.error_name,
|
|
15
|
+
error_message: parsed.error_message,
|
|
16
|
+
frames: parsed.frames,
|
|
17
|
+
signals: parsed.signals
|
|
18
|
+
};
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
function parse_stack_trace(raw_logs, { repo_path }) {
|
|
23
|
+
const lines = String(raw_logs).split(/\r?\n/g);
|
|
24
|
+
const first_line = lines.find((l) => l.trim().length > 0) || "";
|
|
25
|
+
|
|
26
|
+
const header = parse_error_header(first_line);
|
|
27
|
+
const error_name = header.error_name;
|
|
28
|
+
const error_message = header.error_message;
|
|
29
|
+
|
|
30
|
+
const frames = [];
|
|
31
|
+
for (const line of lines) {
|
|
32
|
+
const frame = parse_node_frame(line);
|
|
33
|
+
if (!frame) continue;
|
|
34
|
+
const normalized = normalize_frame(frame, { repo_path });
|
|
35
|
+
frames.push(normalized);
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
const signals = {};
|
|
39
|
+
if (error_name) signals.error_name = error_name;
|
|
40
|
+
if (error_message) signals.error_message = error_message;
|
|
41
|
+
signals.is_jest = lines.some((l) => l.includes("Jest") || l.includes("jest"));
|
|
42
|
+
signals.has_stack_frames = frames.length > 0;
|
|
43
|
+
|
|
44
|
+
return { error_name, error_message, frames, signals };
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
function parse_error_header(first_line) {
|
|
48
|
+
const trimmed = String(first_line || "").trim();
|
|
49
|
+
if (!trimmed) return { error_name: "", error_message: "" };
|
|
50
|
+
|
|
51
|
+
const match = trimmed.match(
|
|
52
|
+
/^([A-Za-z_$][A-Za-z0-9_$]*Error|Error)\s*:?\s*(.*)$/
|
|
53
|
+
);
|
|
54
|
+
if (match) {
|
|
55
|
+
return {
|
|
56
|
+
error_name: match[1] || "",
|
|
57
|
+
error_message: (match[2] || "").trim()
|
|
58
|
+
};
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
const looks_like_error = trimmed.includes("Error") || trimmed.includes("Exception");
|
|
62
|
+
return { error_name: looks_like_error ? "Error" : "", error_message: trimmed };
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
function parse_node_frame(line) {
|
|
66
|
+
const trimmed = line.trim();
|
|
67
|
+
if (!trimmed.startsWith("at ")) return null;
|
|
68
|
+
|
|
69
|
+
const with_fn = trimmed.match(
|
|
70
|
+
/^at\s+(?<function_name>.+?)\s+\((?<file_path>.*?):(?<line_number>\d+):(?<column_number>\d+)\)$/
|
|
71
|
+
);
|
|
72
|
+
if (with_fn?.groups) {
|
|
73
|
+
return {
|
|
74
|
+
function_name: with_fn.groups.function_name.trim(),
|
|
75
|
+
file_path: with_fn.groups.file_path,
|
|
76
|
+
line_number: Number(with_fn.groups.line_number),
|
|
77
|
+
column_number: Number(with_fn.groups.column_number),
|
|
78
|
+
raw_line: trimmed
|
|
79
|
+
};
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
const without_fn = trimmed.match(
|
|
83
|
+
/^at\s+(?<file_path>.*?):(?<line_number>\d+):(?<column_number>\d+)$/
|
|
84
|
+
);
|
|
85
|
+
if (without_fn?.groups) {
|
|
86
|
+
return {
|
|
87
|
+
function_name: "",
|
|
88
|
+
file_path: without_fn.groups.file_path,
|
|
89
|
+
line_number: Number(without_fn.groups.line_number),
|
|
90
|
+
column_number: Number(without_fn.groups.column_number),
|
|
91
|
+
raw_line: trimmed
|
|
92
|
+
};
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
const with_fn_no_col = trimmed.match(
|
|
96
|
+
/^at\s+(?<function_name>.+?)\s+\((?<file_path>.*?):(?<line_number>\d+)\)$/
|
|
97
|
+
);
|
|
98
|
+
if (with_fn_no_col?.groups) {
|
|
99
|
+
return {
|
|
100
|
+
function_name: with_fn_no_col.groups.function_name.trim(),
|
|
101
|
+
file_path: with_fn_no_col.groups.file_path,
|
|
102
|
+
line_number: Number(with_fn_no_col.groups.line_number),
|
|
103
|
+
column_number: null,
|
|
104
|
+
raw_line: trimmed
|
|
105
|
+
};
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
const without_fn_no_col = trimmed.match(
|
|
109
|
+
/^at\s+(?<file_path>.*?):(?<line_number>\d+)$/
|
|
110
|
+
);
|
|
111
|
+
if (without_fn_no_col?.groups) {
|
|
112
|
+
return {
|
|
113
|
+
function_name: "",
|
|
114
|
+
file_path: without_fn_no_col.groups.file_path,
|
|
115
|
+
line_number: Number(without_fn_no_col.groups.line_number),
|
|
116
|
+
column_number: null,
|
|
117
|
+
raw_line: trimmed
|
|
118
|
+
};
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
return null;
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
function normalize_frame(frame, { repo_path }) {
|
|
125
|
+
const is_file_url = frame.file_path.startsWith("file://");
|
|
126
|
+
const raw_path = is_file_url ? file_url_to_path(frame.file_path) : frame.file_path;
|
|
127
|
+
const normalized_separators = raw_path.replaceAll("\\", "/");
|
|
128
|
+
|
|
129
|
+
const is_absolute_path = path.isAbsolute(normalized_separators);
|
|
130
|
+
const normalized_path = is_absolute_path
|
|
131
|
+
? normalized_separators
|
|
132
|
+
: path.resolve(repo_path, normalized_separators);
|
|
133
|
+
const is_repo_frame = normalized_path.startsWith(repo_path);
|
|
134
|
+
|
|
135
|
+
return {
|
|
136
|
+
function_name: frame.function_name,
|
|
137
|
+
file_path: is_repo_frame ? path.relative(repo_path, normalized_path) : raw_path,
|
|
138
|
+
line_number: frame.line_number,
|
|
139
|
+
column_number: frame.column_number,
|
|
140
|
+
is_repo_frame,
|
|
141
|
+
raw_line: frame.raw_line || ""
|
|
142
|
+
};
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
function file_url_to_path(file_url) {
|
|
146
|
+
return decodeURIComponent(file_url.replace(/^file:\/\//, ""));
|
|
147
|
+
}
|
|
148
|
+
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
import { redactSecrets } from "../util/redact_secrets.js";
|
|
2
|
+
|
|
3
|
+
const MAX_RETRIES = 2;
|
|
4
|
+
|
|
5
|
+
export class PatchProposerTool {
|
|
6
|
+
#llm;
|
|
7
|
+
|
|
8
|
+
constructor({ llm }) {
|
|
9
|
+
this.#llm = llm;
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
async run({ log_artifact, search_matches, file_contexts }) {
|
|
13
|
+
const safe_logs = redactSecrets(log_artifact.raw_logs || "");
|
|
14
|
+
const prompt = buildPrompt({ log_artifact, search_matches, file_contexts, safe_logs });
|
|
15
|
+
|
|
16
|
+
let last_error = null;
|
|
17
|
+
for (let attempt_index = 0; attempt_index <= MAX_RETRIES; attempt_index += 1) {
|
|
18
|
+
const raw = await this.#llm.generateJson(prompt);
|
|
19
|
+
try {
|
|
20
|
+
const parsed = JSON.parse(raw);
|
|
21
|
+
const validated = validateFixPlan(parsed);
|
|
22
|
+
return validated;
|
|
23
|
+
} catch (error) {
|
|
24
|
+
last_error = error;
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
return {
|
|
29
|
+
summary: "Failed to generate a structured fix plan from the LLM.",
|
|
30
|
+
edits: [],
|
|
31
|
+
citations: [],
|
|
32
|
+
error: String(last_error || "Unknown error from LLM.")
|
|
33
|
+
};
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
function buildPrompt({ log_artifact, search_matches, file_contexts, safe_logs }) {
|
|
38
|
+
const context_lines = [];
|
|
39
|
+
context_lines.push("You are a senior JavaScript engineer.");
|
|
40
|
+
context_lines.push(
|
|
41
|
+
"You receive application logs, code search hits, and file excerpts from a Node.js repo."
|
|
42
|
+
);
|
|
43
|
+
context_lines.push(
|
|
44
|
+
"Your task is to propose a minimal, safe patch that fixes the bug described in the logs."
|
|
45
|
+
);
|
|
46
|
+
context_lines.push("");
|
|
47
|
+
context_lines.push("Logs (redacted):");
|
|
48
|
+
context_lines.push("```");
|
|
49
|
+
context_lines.push(safe_logs.slice(0, 4000));
|
|
50
|
+
context_lines.push("```");
|
|
51
|
+
context_lines.push("");
|
|
52
|
+
context_lines.push("Search matches (top):");
|
|
53
|
+
for (const match of search_matches.slice(0, 10)) {
|
|
54
|
+
context_lines.push(
|
|
55
|
+
`- ${match.file_path}:${match.line_number} :: ${match.query} :: ${match.snippet || ""}`
|
|
56
|
+
);
|
|
57
|
+
}
|
|
58
|
+
context_lines.push("");
|
|
59
|
+
context_lines.push("File excerpts:");
|
|
60
|
+
for (const context of file_contexts.slice(0, 8)) {
|
|
61
|
+
context_lines.push(
|
|
62
|
+
`--- ${context.file_path} [${context.start_line}-${context.end_line}] ---`
|
|
63
|
+
);
|
|
64
|
+
context_lines.push(context.excerpt.slice(0, 2000));
|
|
65
|
+
context_lines.push("");
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
context_lines.push("");
|
|
69
|
+
context_lines.push(
|
|
70
|
+
"Return ONLY a JSON object with this shape, no markdown or explanation:"
|
|
71
|
+
);
|
|
72
|
+
context_lines.push(
|
|
73
|
+
`{"summary": string, "edits": [{"file_path": string, "new_text": string, "reason": string}], "citations": [{"file_path": string, "start_line": number, "end_line": number}]}`
|
|
74
|
+
);
|
|
75
|
+
|
|
76
|
+
return context_lines.join("\n");
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
function validateFixPlan(candidate) {
|
|
80
|
+
const safe = candidate || {};
|
|
81
|
+
const summary = String(safe.summary || "").slice(0, 2000);
|
|
82
|
+
const raw_edits = Array.isArray(safe.edits) ? safe.edits : [];
|
|
83
|
+
const raw_citations = Array.isArray(safe.citations) ? safe.citations : [];
|
|
84
|
+
|
|
85
|
+
const edits = [];
|
|
86
|
+
for (const edit of raw_edits) {
|
|
87
|
+
if (!edit || typeof edit.file_path !== "string") continue;
|
|
88
|
+
if (typeof edit.new_text !== "string") continue;
|
|
89
|
+
edits.push({
|
|
90
|
+
file_path: edit.file_path,
|
|
91
|
+
new_text: edit.new_text,
|
|
92
|
+
reason: typeof edit.reason === "string" ? edit.reason : ""
|
|
93
|
+
});
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
const citations = [];
|
|
97
|
+
for (const citation of raw_citations) {
|
|
98
|
+
if (!citation || typeof citation.file_path !== "string") continue;
|
|
99
|
+
citations.push({
|
|
100
|
+
file_path: citation.file_path,
|
|
101
|
+
start_line: Number.isFinite(citation.start_line)
|
|
102
|
+
? citation.start_line
|
|
103
|
+
: null,
|
|
104
|
+
end_line: Number.isFinite(citation.end_line) ? citation.end_line : null
|
|
105
|
+
});
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
return { summary, edits, citations };
|
|
109
|
+
}
|
|
110
|
+
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
import fs from "node:fs/promises";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import ignore from "ignore";
|
|
4
|
+
|
|
5
|
+
const DEFAULT_MAX_BYTES_PER_FILE = 128 * 1024;
|
|
6
|
+
const DEFAULT_MAX_TOTAL_BYTES = 2 * 1024 * 1024;
|
|
7
|
+
|
|
8
|
+
export class ReadFileTool {
|
|
9
|
+
#repo_path;
|
|
10
|
+
#max_bytes_per_file;
|
|
11
|
+
#max_total_bytes;
|
|
12
|
+
#ig_promise;
|
|
13
|
+
#bytes_read;
|
|
14
|
+
|
|
15
|
+
constructor({
|
|
16
|
+
repo_path,
|
|
17
|
+
max_bytes_per_file = DEFAULT_MAX_BYTES_PER_FILE,
|
|
18
|
+
max_total_bytes = DEFAULT_MAX_TOTAL_BYTES
|
|
19
|
+
}) {
|
|
20
|
+
this.#repo_path = repo_path;
|
|
21
|
+
this.#max_bytes_per_file = max_bytes_per_file;
|
|
22
|
+
this.#max_total_bytes = max_total_bytes;
|
|
23
|
+
this.#ig_promise = null;
|
|
24
|
+
this.#bytes_read = 0;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
async run({ file_path, start_line, end_line, max_bytes }) {
|
|
28
|
+
const ig = await this.#load_ignore();
|
|
29
|
+
const rel_path = file_path;
|
|
30
|
+
if (ig.ignores(rel_path)) {
|
|
31
|
+
return {
|
|
32
|
+
ok: false,
|
|
33
|
+
error: `File is ignored by ignore rules: ${rel_path}`
|
|
34
|
+
};
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
const absolute_path = path.join(this.#repo_path, rel_path);
|
|
38
|
+
let contents = "";
|
|
39
|
+
try {
|
|
40
|
+
contents = await fs.readFile(absolute_path, "utf8");
|
|
41
|
+
} catch (error) {
|
|
42
|
+
return { ok: false, error: String(error) };
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
const available_bytes = Math.min(
|
|
46
|
+
max_bytes || this.#max_bytes_per_file,
|
|
47
|
+
this.#max_bytes_per_file
|
|
48
|
+
);
|
|
49
|
+
const remaining_budget = this.#max_total_bytes - this.#bytes_read;
|
|
50
|
+
if (remaining_budget <= 0) {
|
|
51
|
+
return { ok: false, error: "ReadFileTool budget exhausted." };
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
const slice_bytes = Math.min(
|
|
55
|
+
Buffer.byteLength(contents, "utf8"),
|
|
56
|
+
available_bytes,
|
|
57
|
+
remaining_budget
|
|
58
|
+
);
|
|
59
|
+
const sliced_text = contents.slice(0, slice_bytes);
|
|
60
|
+
this.#bytes_read += Buffer.byteLength(sliced_text, "utf8");
|
|
61
|
+
|
|
62
|
+
const lines = sliced_text.split(/\r?\n/g);
|
|
63
|
+
const start_index = Number.isFinite(start_line) && start_line > 0 ? start_line - 1 : 0;
|
|
64
|
+
const end_index =
|
|
65
|
+
Number.isFinite(end_line) && end_line > 0
|
|
66
|
+
? Math.min(end_line, lines.length)
|
|
67
|
+
: Math.min(start_index + 200, lines.length);
|
|
68
|
+
|
|
69
|
+
const excerpt = lines.slice(start_index, end_index).join("\n");
|
|
70
|
+
|
|
71
|
+
return {
|
|
72
|
+
ok: true,
|
|
73
|
+
file_path: rel_path,
|
|
74
|
+
start_line: start_index + 1,
|
|
75
|
+
end_line: end_index,
|
|
76
|
+
total_lines: lines.length,
|
|
77
|
+
excerpt
|
|
78
|
+
};
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
async #load_ignore() {
|
|
82
|
+
if (this.#ig_promise) return this.#ig_promise;
|
|
83
|
+
this.#ig_promise = (async () => {
|
|
84
|
+
const ig = ignore();
|
|
85
|
+
ig.add(["node_modules/", "dist/", "build/", ".git/", ".env", ".env.*"]);
|
|
86
|
+
try {
|
|
87
|
+
const gitignore_path = path.join(this.#repo_path, ".gitignore");
|
|
88
|
+
const gitignore = await fs.readFile(gitignore_path, "utf8");
|
|
89
|
+
ig.add(gitignore.split(/\r?\n/g));
|
|
90
|
+
} catch {
|
|
91
|
+
// ignore
|
|
92
|
+
}
|
|
93
|
+
return ig;
|
|
94
|
+
})();
|
|
95
|
+
return this.#ig_promise;
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
import fs from "node:fs/promises";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { execaCommand } from "execa";
|
|
4
|
+
|
|
5
|
+
const DEFAULT_TEST_TIMEOUT_MS = 10 * 60 * 1000;
|
|
6
|
+
|
|
7
|
+
export class TestRunnerTool {
|
|
8
|
+
#repo_path;
|
|
9
|
+
|
|
10
|
+
constructor({ repo_path }) {
|
|
11
|
+
this.#repo_path = repo_path;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
async run({ repo_path, test_command }) {
|
|
15
|
+
const resolved_repo_path = repo_path || this.#repo_path;
|
|
16
|
+
const detected_command = test_command || (await detect_test_command(resolved_repo_path));
|
|
17
|
+
|
|
18
|
+
const result = await execa_command(detected_command, { cwd: resolved_repo_path });
|
|
19
|
+
return {
|
|
20
|
+
ok: result.exit_code === 0,
|
|
21
|
+
exit_code: result.exit_code,
|
|
22
|
+
detected_command,
|
|
23
|
+
summary: summarize_test_output(result.stdout, result.stderr)
|
|
24
|
+
};
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
async function detect_test_command(repo_path) {
|
|
29
|
+
const has_pnpm = await file_exists(path.join(repo_path, "pnpm-lock.yaml"));
|
|
30
|
+
if (has_pnpm) return "pnpm test";
|
|
31
|
+
|
|
32
|
+
const has_yarn = await file_exists(path.join(repo_path, "yarn.lock"));
|
|
33
|
+
if (has_yarn) return "yarn test";
|
|
34
|
+
|
|
35
|
+
return "npm test";
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
async function file_exists(absolute_path) {
|
|
39
|
+
try {
|
|
40
|
+
await fs.access(absolute_path);
|
|
41
|
+
return true;
|
|
42
|
+
} catch {
|
|
43
|
+
return false;
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
async function execa_command(command, { cwd }) {
|
|
48
|
+
const result = await execaCommand(String(command), {
|
|
49
|
+
cwd,
|
|
50
|
+
reject: false,
|
|
51
|
+
timeout: DEFAULT_TEST_TIMEOUT_MS,
|
|
52
|
+
shell: true,
|
|
53
|
+
stdout: "pipe",
|
|
54
|
+
stderr: "pipe",
|
|
55
|
+
env: process.env
|
|
56
|
+
});
|
|
57
|
+
|
|
58
|
+
return {
|
|
59
|
+
exit_code: result.exitCode ?? 1,
|
|
60
|
+
stdout: String(result.stdout || ""),
|
|
61
|
+
stderr: String(result.stderr || "")
|
|
62
|
+
};
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
function summarize_test_output(stdout, stderr) {
|
|
66
|
+
const combined = `${stdout}\n${stderr}`.trim();
|
|
67
|
+
if (!combined) return "";
|
|
68
|
+
const lines = combined.split(/\r?\n/g);
|
|
69
|
+
const failure_index = find_first_failure_line_index(lines);
|
|
70
|
+
if (failure_index >= 0) {
|
|
71
|
+
const start_index = Math.max(0, failure_index - 10);
|
|
72
|
+
const end_index = Math.min(lines.length, failure_index + 50);
|
|
73
|
+
return lines.slice(start_index, end_index).join("\n").trim();
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
return lines.slice(-80).join("\n").trim();
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
function find_first_failure_line_index(lines) {
|
|
80
|
+
const patterns = [
|
|
81
|
+
/(^|\s)FAIL(\s|$)/,
|
|
82
|
+
/^Error:\s+/,
|
|
83
|
+
/TypeError:/,
|
|
84
|
+
/ReferenceError:/,
|
|
85
|
+
/AssertionError:/,
|
|
86
|
+
/UnhandledPromiseRejection/
|
|
87
|
+
];
|
|
88
|
+
|
|
89
|
+
for (let index = 0; index < lines.length; index += 1) {
|
|
90
|
+
const line = lines[index];
|
|
91
|
+
if (!line) continue;
|
|
92
|
+
for (const pattern of patterns) {
|
|
93
|
+
if (pattern.test(line)) return index;
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
return -1;
|
|
98
|
+
}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { LogAnalyzerTool } from "./LogAnalyzerTool.js";
|
|
2
|
+
import { CodeSearchTool } from "./CodeSearchTool.js";
|
|
3
|
+
import { GitDiffTool } from "./GitDiffTool.js";
|
|
4
|
+
import { TestRunnerTool } from "./TestRunnerTool.js";
|
|
5
|
+
|
|
6
|
+
export function create_default_tools({ repo_path }) {
|
|
7
|
+
return {
|
|
8
|
+
log_analyzer: new LogAnalyzerTool({ repo_path }),
|
|
9
|
+
code_search: new CodeSearchTool({ repo_path }),
|
|
10
|
+
diff_generator: new GitDiffTool({ repo_path }),
|
|
11
|
+
test_runner: new TestRunnerTool({ repo_path })
|
|
12
|
+
};
|
|
13
|
+
}
|
|
14
|
+
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
const TOKEN_PATTERNS = [
|
|
2
|
+
/(ghp_[0-9A-Za-z]{20,})/g,
|
|
3
|
+
/(github_pat_[0-9A-Za-z]{20,})/g,
|
|
4
|
+
/(sk-[0-9A-Za-z]{20,})/g,
|
|
5
|
+
/(\"access_token\"\\s*:\\s*\"[0-9A-Za-z\-_.]{16,}\")/g,
|
|
6
|
+
/Bearer\\s+[0-9A-Za-z\-_.]{16,}/g
|
|
7
|
+
];
|
|
8
|
+
|
|
9
|
+
export function redactSecrets(text) {
|
|
10
|
+
let redacted_text = String(text || "");
|
|
11
|
+
for (const pattern of TOKEN_PATTERNS) {
|
|
12
|
+
redacted_text = redacted_text.replace(pattern, "[REDACTED_SECRET]");
|
|
13
|
+
}
|
|
14
|
+
return redacted_text;
|
|
15
|
+
}
|
|
16
|
+
|