coderoast 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/Agents.md +391 -0
- package/LICENSE +21 -0
- package/README.md +153 -0
- package/dist/agents/cli-agent.js +93 -0
- package/dist/agents/code-analysis-agent.js +438 -0
- package/dist/agents/evidence-guard-agent.js +71 -0
- package/dist/agents/fix-apply-agent.js +134 -0
- package/dist/agents/fix-it-agent.js +584 -0
- package/dist/agents/gemini-client.js +44 -0
- package/dist/agents/insight-aggregator-agent.js +95 -0
- package/dist/agents/output-formatter-agent.js +158 -0
- package/dist/agents/repo-scanner-agent.js +407 -0
- package/dist/agents/roast-narrator-agent.js +256 -0
- package/dist/index.js +14 -0
- package/dist/pipeline.js +28 -0
- package/dist/types.js +2 -0
- package/package.json +38 -0
|
@@ -0,0 +1,438 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.DUPLICATE_MIN_OCCURRENCES = exports.DUPLICATE_MAX_LINES = exports.DUPLICATE_MIN_LINES = exports.LONG_FUNCTION_LOC = void 0;
|
|
7
|
+
exports.runCodeAnalysisAgent = runCodeAnalysisAgent;
|
|
8
|
+
const promises_1 = __importDefault(require("node:fs/promises"));
|
|
9
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
10
|
+
const typescript_1 = __importDefault(require("typescript"));
|
|
11
|
+
exports.LONG_FUNCTION_LOC = 50;
|
|
12
|
+
exports.DUPLICATE_MIN_LINES = 10;
|
|
13
|
+
exports.DUPLICATE_MAX_LINES = 50;
|
|
14
|
+
exports.DUPLICATE_MIN_OCCURRENCES = 2;
|
|
15
|
+
const JS_TS_EXTENSIONS = new Set([".ts", ".tsx", ".js", ".jsx", ".mjs", ".cjs"]);
|
|
16
|
+
const TEST_DIR_NAMES = new Set(["__tests__", "test", "tests"]);
|
|
17
|
+
function getScriptKind(extension) {
|
|
18
|
+
switch (extension) {
|
|
19
|
+
case ".tsx":
|
|
20
|
+
return typescript_1.default.ScriptKind.TSX;
|
|
21
|
+
case ".jsx":
|
|
22
|
+
return typescript_1.default.ScriptKind.JSX;
|
|
23
|
+
case ".js":
|
|
24
|
+
return typescript_1.default.ScriptKind.JS;
|
|
25
|
+
case ".mjs":
|
|
26
|
+
return typescript_1.default.ScriptKind.JS;
|
|
27
|
+
case ".cjs":
|
|
28
|
+
return typescript_1.default.ScriptKind.JS;
|
|
29
|
+
default:
|
|
30
|
+
return typescript_1.default.ScriptKind.TS;
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
function getLineRange(node, sourceFile) {
|
|
34
|
+
const start = sourceFile.getLineAndCharacterOfPosition(node.getStart(sourceFile));
|
|
35
|
+
const end = sourceFile.getLineAndCharacterOfPosition(node.getEnd());
|
|
36
|
+
return { startLine: start.line + 1, endLine: end.line + 1 };
|
|
37
|
+
}
|
|
38
|
+
function normalizeContent(content) {
|
|
39
|
+
const withoutBlockComments = content.replace(/\/\*[\s\S]*?\*\//g, (match) => {
|
|
40
|
+
return match.replace(/[^\n]/g, "");
|
|
41
|
+
});
|
|
42
|
+
const withoutLineComments = withoutBlockComments.replace(/\/\/.*$/gm, "");
|
|
43
|
+
const rawLines = withoutLineComments.split(/\r?\n/);
|
|
44
|
+
const normalizedLines = [];
|
|
45
|
+
const lineNumbers = [];
|
|
46
|
+
for (let i = 0; i < rawLines.length; i += 1) {
|
|
47
|
+
const normalized = rawLines[i].replace(/\s+/g, " ").trim();
|
|
48
|
+
if (normalized.length === 0) {
|
|
49
|
+
continue;
|
|
50
|
+
}
|
|
51
|
+
normalizedLines.push(normalized);
|
|
52
|
+
lineNumbers.push(i + 1);
|
|
53
|
+
}
|
|
54
|
+
return { normalizedLines, lineNumbers };
|
|
55
|
+
}
|
|
56
|
+
function hashString(value) {
|
|
57
|
+
let hash = 5381;
|
|
58
|
+
for (let i = 0; i < value.length; i += 1) {
|
|
59
|
+
hash = (hash * 33) ^ value.charCodeAt(i);
|
|
60
|
+
}
|
|
61
|
+
return (hash >>> 0).toString(16);
|
|
62
|
+
}
|
|
63
|
+
function isTestPath(relativePath) {
|
|
64
|
+
const segments = relativePath.split("/");
|
|
65
|
+
if (segments.some((segment) => TEST_DIR_NAMES.has(segment))) {
|
|
66
|
+
return true;
|
|
67
|
+
}
|
|
68
|
+
const fileName = node_path_1.default.posix.basename(relativePath);
|
|
69
|
+
return /\.(spec|test)\.[jt]sx?$/.test(fileName);
|
|
70
|
+
}
|
|
71
|
+
function toAbsolutePath(rootPath, relativePath) {
|
|
72
|
+
const parts = relativePath.split("/");
|
|
73
|
+
return node_path_1.default.resolve(rootPath, node_path_1.default.join(...parts));
|
|
74
|
+
}
|
|
75
|
+
function collectImports(sourceFile) {
|
|
76
|
+
const imports = [];
|
|
77
|
+
const addImport = (specifier, node) => {
|
|
78
|
+
const { startLine, endLine } = getLineRange(node, sourceFile);
|
|
79
|
+
imports.push({ specifier, startLine, endLine });
|
|
80
|
+
};
|
|
81
|
+
const visit = (node) => {
|
|
82
|
+
if (typescript_1.default.isImportDeclaration(node) && node.moduleSpecifier && typescript_1.default.isStringLiteral(node.moduleSpecifier)) {
|
|
83
|
+
addImport(node.moduleSpecifier.text, node);
|
|
84
|
+
}
|
|
85
|
+
else if (typescript_1.default.isExportDeclaration(node) &&
|
|
86
|
+
node.moduleSpecifier &&
|
|
87
|
+
typescript_1.default.isStringLiteral(node.moduleSpecifier)) {
|
|
88
|
+
addImport(node.moduleSpecifier.text, node);
|
|
89
|
+
}
|
|
90
|
+
else if (typescript_1.default.isImportEqualsDeclaration(node)) {
|
|
91
|
+
const ref = node.moduleReference;
|
|
92
|
+
if (typescript_1.default.isExternalModuleReference(ref) && ref.expression && typescript_1.default.isStringLiteral(ref.expression)) {
|
|
93
|
+
addImport(ref.expression.text, node);
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
else if (typescript_1.default.isCallExpression(node)) {
|
|
97
|
+
if (typescript_1.default.isIdentifier(node.expression) &&
|
|
98
|
+
node.expression.text === "require" &&
|
|
99
|
+
node.arguments.length === 1 &&
|
|
100
|
+
typescript_1.default.isStringLiteral(node.arguments[0])) {
|
|
101
|
+
addImport(node.arguments[0].text, node);
|
|
102
|
+
}
|
|
103
|
+
else if (node.expression.kind === typescript_1.default.SyntaxKind.ImportKeyword && node.arguments.length === 1) {
|
|
104
|
+
const arg = node.arguments[0];
|
|
105
|
+
if (typescript_1.default.isStringLiteral(arg)) {
|
|
106
|
+
addImport(arg.text, node);
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
typescript_1.default.forEachChild(node, visit);
|
|
111
|
+
};
|
|
112
|
+
visit(sourceFile);
|
|
113
|
+
return imports;
|
|
114
|
+
}
|
|
115
|
+
function getFunctionName(node, sourceFile) {
|
|
116
|
+
if (typescript_1.default.isFunctionDeclaration(node) && node.name) {
|
|
117
|
+
return node.name.text;
|
|
118
|
+
}
|
|
119
|
+
if ((typescript_1.default.isMethodDeclaration(node) ||
|
|
120
|
+
typescript_1.default.isMethodSignature(node) ||
|
|
121
|
+
typescript_1.default.isGetAccessorDeclaration(node) ||
|
|
122
|
+
typescript_1.default.isSetAccessorDeclaration(node)) &&
|
|
123
|
+
node.name) {
|
|
124
|
+
return node.name.getText(sourceFile);
|
|
125
|
+
}
|
|
126
|
+
if (typescript_1.default.isConstructorDeclaration(node)) {
|
|
127
|
+
return "constructor";
|
|
128
|
+
}
|
|
129
|
+
const parent = node.parent;
|
|
130
|
+
if (parent) {
|
|
131
|
+
if (typescript_1.default.isVariableDeclaration(parent) && typescript_1.default.isIdentifier(parent.name)) {
|
|
132
|
+
return parent.name.text;
|
|
133
|
+
}
|
|
134
|
+
if (typescript_1.default.isPropertyAssignment(parent) || typescript_1.default.isPropertyDeclaration(parent)) {
|
|
135
|
+
if (typescript_1.default.isIdentifier(parent.name)) {
|
|
136
|
+
return parent.name.text;
|
|
137
|
+
}
|
|
138
|
+
return parent.name.getText(sourceFile);
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
return "<anonymous>";
|
|
142
|
+
}
|
|
143
|
+
function collectFunctionLengths(sourceFile, filePath) {
|
|
144
|
+
const functions = [];
|
|
145
|
+
const visit = (node) => {
|
|
146
|
+
if (typescript_1.default.isFunctionLike(node) && "body" in node && node.body) {
|
|
147
|
+
const start = sourceFile.getLineAndCharacterOfPosition(node.getStart(sourceFile));
|
|
148
|
+
const end = sourceFile.getLineAndCharacterOfPosition(node.getEnd());
|
|
149
|
+
const startLine = start.line + 1;
|
|
150
|
+
const endLine = end.line + 1;
|
|
151
|
+
const length = endLine - startLine + 1;
|
|
152
|
+
functions.push({
|
|
153
|
+
file: filePath,
|
|
154
|
+
name: getFunctionName(node, sourceFile),
|
|
155
|
+
length,
|
|
156
|
+
startLine,
|
|
157
|
+
endLine,
|
|
158
|
+
});
|
|
159
|
+
}
|
|
160
|
+
typescript_1.default.forEachChild(node, visit);
|
|
161
|
+
};
|
|
162
|
+
visit(sourceFile);
|
|
163
|
+
return functions;
|
|
164
|
+
}
|
|
165
|
+
function resolveImportPath(importerPath, specifier, filePaths) {
|
|
166
|
+
if (!specifier.startsWith(".")) {
|
|
167
|
+
return null;
|
|
168
|
+
}
|
|
169
|
+
const baseDir = node_path_1.default.posix.dirname(importerPath);
|
|
170
|
+
const combined = node_path_1.default.posix.normalize(node_path_1.default.posix.join(baseDir, specifier));
|
|
171
|
+
const hasExtension = node_path_1.default.posix.extname(combined) !== "";
|
|
172
|
+
const candidates = [];
|
|
173
|
+
if (hasExtension) {
|
|
174
|
+
candidates.push(combined);
|
|
175
|
+
}
|
|
176
|
+
else {
|
|
177
|
+
for (const extension of JS_TS_EXTENSIONS) {
|
|
178
|
+
candidates.push(`${combined}${extension}`);
|
|
179
|
+
candidates.push(node_path_1.default.posix.join(combined, `index${extension}`));
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
for (const candidate of candidates) {
|
|
183
|
+
if (filePaths.has(candidate)) {
|
|
184
|
+
return candidate;
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
return null;
|
|
188
|
+
}
|
|
189
|
+
function collectDuplicateBlocks(files) {
|
|
190
|
+
const candidates = new Map();
|
|
191
|
+
const fileIndex = new Map();
|
|
192
|
+
for (const file of files) {
|
|
193
|
+
fileIndex.set(file.path, file);
|
|
194
|
+
const totalLines = file.normalizedLines.length;
|
|
195
|
+
if (totalLines < exports.DUPLICATE_MIN_LINES) {
|
|
196
|
+
continue;
|
|
197
|
+
}
|
|
198
|
+
for (let start = 0; start <= totalLines - exports.DUPLICATE_MIN_LINES; start += 1) {
|
|
199
|
+
const blockLines = file.normalizedLines.slice(start, start + exports.DUPLICATE_MIN_LINES);
|
|
200
|
+
const blockKey = blockLines.join("\n");
|
|
201
|
+
const group = candidates.get(blockKey);
|
|
202
|
+
if (group) {
|
|
203
|
+
group.push({ file: file.path, startIndex: start });
|
|
204
|
+
}
|
|
205
|
+
else {
|
|
206
|
+
candidates.set(blockKey, [{ file: file.path, startIndex: start }]);
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
const duplicatesMap = new Map();
|
|
211
|
+
for (const [, occurrences] of candidates) {
|
|
212
|
+
if (occurrences.length < exports.DUPLICATE_MIN_OCCURRENCES) {
|
|
213
|
+
continue;
|
|
214
|
+
}
|
|
215
|
+
const baseline = occurrences[0];
|
|
216
|
+
const baselineFile = fileIndex.get(baseline.file);
|
|
217
|
+
if (!baselineFile) {
|
|
218
|
+
continue;
|
|
219
|
+
}
|
|
220
|
+
let length = exports.DUPLICATE_MIN_LINES;
|
|
221
|
+
for (let offset = exports.DUPLICATE_MIN_LINES; offset < exports.DUPLICATE_MAX_LINES; offset += 1) {
|
|
222
|
+
const baselineIndex = baseline.startIndex + offset;
|
|
223
|
+
if (baselineIndex >= baselineFile.normalizedLines.length) {
|
|
224
|
+
break;
|
|
225
|
+
}
|
|
226
|
+
const expected = baselineFile.normalizedLines[baselineIndex];
|
|
227
|
+
let allMatch = true;
|
|
228
|
+
for (const occurrence of occurrences) {
|
|
229
|
+
const file = fileIndex.get(occurrence.file);
|
|
230
|
+
if (!file || occurrence.startIndex + offset >= file.normalizedLines.length) {
|
|
231
|
+
allMatch = false;
|
|
232
|
+
break;
|
|
233
|
+
}
|
|
234
|
+
if (file.normalizedLines[occurrence.startIndex + offset] !== expected) {
|
|
235
|
+
allMatch = false;
|
|
236
|
+
break;
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
if (!allMatch) {
|
|
240
|
+
break;
|
|
241
|
+
}
|
|
242
|
+
length += 1;
|
|
243
|
+
}
|
|
244
|
+
const extendedBlockLines = baselineFile.normalizedLines.slice(baseline.startIndex, baseline.startIndex + length);
|
|
245
|
+
const extendedKey = extendedBlockLines.join("\n");
|
|
246
|
+
const hash = hashString(extendedKey);
|
|
247
|
+
let entry = duplicatesMap.get(extendedKey);
|
|
248
|
+
if (!entry) {
|
|
249
|
+
entry = {
|
|
250
|
+
block: {
|
|
251
|
+
hash,
|
|
252
|
+
length,
|
|
253
|
+
occurrences: [],
|
|
254
|
+
},
|
|
255
|
+
occurrenceKeys: new Set(),
|
|
256
|
+
};
|
|
257
|
+
duplicatesMap.set(extendedKey, entry);
|
|
258
|
+
}
|
|
259
|
+
for (const occurrence of occurrences) {
|
|
260
|
+
const file = fileIndex.get(occurrence.file);
|
|
261
|
+
if (!file) {
|
|
262
|
+
continue;
|
|
263
|
+
}
|
|
264
|
+
const startLine = file.lineNumbers[occurrence.startIndex];
|
|
265
|
+
const endIndex = occurrence.startIndex + length - 1;
|
|
266
|
+
if (startLine === undefined || endIndex >= file.lineNumbers.length) {
|
|
267
|
+
continue;
|
|
268
|
+
}
|
|
269
|
+
const endLine = file.lineNumbers[endIndex];
|
|
270
|
+
const occurrenceKey = `${occurrence.file}:${startLine}:${endLine}`;
|
|
271
|
+
if (entry.occurrenceKeys.has(occurrenceKey)) {
|
|
272
|
+
continue;
|
|
273
|
+
}
|
|
274
|
+
entry.occurrenceKeys.add(occurrenceKey);
|
|
275
|
+
entry.block.occurrences.push({
|
|
276
|
+
file: occurrence.file,
|
|
277
|
+
startLine,
|
|
278
|
+
endLine,
|
|
279
|
+
});
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
const blocks = [];
|
|
283
|
+
for (const entry of duplicatesMap.values()) {
|
|
284
|
+
if (entry.block.occurrences.length >= exports.DUPLICATE_MIN_OCCURRENCES) {
|
|
285
|
+
blocks.push(entry.block);
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
return blocks;
|
|
289
|
+
}
|
|
290
|
+
function collectCircularDependencies(files, filePaths) {
|
|
291
|
+
const graph = new Map();
|
|
292
|
+
for (const file of files) {
|
|
293
|
+
const edges = new Map();
|
|
294
|
+
for (const ref of file.imports) {
|
|
295
|
+
const resolved = resolveImportPath(file.path, ref.specifier, filePaths);
|
|
296
|
+
if (!resolved) {
|
|
297
|
+
continue;
|
|
298
|
+
}
|
|
299
|
+
const ranges = edges.get(resolved) ?? [];
|
|
300
|
+
ranges.push({ startLine: ref.startLine, endLine: ref.endLine });
|
|
301
|
+
edges.set(resolved, ranges);
|
|
302
|
+
}
|
|
303
|
+
graph.set(file.path, edges);
|
|
304
|
+
}
|
|
305
|
+
const cycles = [];
|
|
306
|
+
const seen = new Set();
|
|
307
|
+
for (const [from, targets] of graph) {
|
|
308
|
+
for (const [to, ranges] of targets) {
|
|
309
|
+
const reverseRanges = graph.get(to)?.get(from);
|
|
310
|
+
if (!reverseRanges || reverseRanges.length === 0) {
|
|
311
|
+
continue;
|
|
312
|
+
}
|
|
313
|
+
const key = [from, to].sort().join("::");
|
|
314
|
+
if (seen.has(key)) {
|
|
315
|
+
continue;
|
|
316
|
+
}
|
|
317
|
+
seen.add(key);
|
|
318
|
+
const fromRange = ranges[0];
|
|
319
|
+
const toRange = reverseRanges[0];
|
|
320
|
+
cycles.push({
|
|
321
|
+
from,
|
|
322
|
+
to,
|
|
323
|
+
fromStartLine: fromRange.startLine,
|
|
324
|
+
fromEndLine: fromRange.endLine,
|
|
325
|
+
toStartLine: toRange.startLine,
|
|
326
|
+
toEndLine: toRange.endLine,
|
|
327
|
+
});
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
return cycles;
|
|
331
|
+
}
|
|
332
|
+
function buildDependencySummary(files, filePaths, cycles) {
|
|
333
|
+
const outCounts = new Map();
|
|
334
|
+
const inCounts = new Map();
|
|
335
|
+
for (const file of files) {
|
|
336
|
+
for (const ref of file.imports) {
|
|
337
|
+
const resolved = resolveImportPath(file.path, ref.specifier, filePaths);
|
|
338
|
+
if (!resolved) {
|
|
339
|
+
continue;
|
|
340
|
+
}
|
|
341
|
+
const outSet = outCounts.get(file.path) ?? new Set();
|
|
342
|
+
outSet.add(resolved);
|
|
343
|
+
outCounts.set(file.path, outSet);
|
|
344
|
+
const inSet = inCounts.get(resolved) ?? new Set();
|
|
345
|
+
inSet.add(file.path);
|
|
346
|
+
inCounts.set(resolved, inSet);
|
|
347
|
+
}
|
|
348
|
+
}
|
|
349
|
+
const topImporters = [...outCounts.entries()]
|
|
350
|
+
.map(([file, imports]) => ({ file, imports: imports.size }))
|
|
351
|
+
.sort((a, b) => b.imports - a.imports)
|
|
352
|
+
.slice(0, 5);
|
|
353
|
+
const topImported = [...inCounts.entries()]
|
|
354
|
+
.map(([file, importedBy]) => ({ file, importedBy: importedBy.size }))
|
|
355
|
+
.sort((a, b) => b.importedBy - a.importedBy)
|
|
356
|
+
.slice(0, 5);
|
|
357
|
+
let edges = 0;
|
|
358
|
+
for (const imports of outCounts.values()) {
|
|
359
|
+
edges += imports.size;
|
|
360
|
+
}
|
|
361
|
+
const sampleCycle = cycles.length > 0 ? { from: cycles[0].from, to: cycles[0].to } : undefined;
|
|
362
|
+
return {
|
|
363
|
+
nodes: filePaths.size,
|
|
364
|
+
edges,
|
|
365
|
+
topImporters,
|
|
366
|
+
topImported,
|
|
367
|
+
cycles: cycles.length,
|
|
368
|
+
sampleCycle,
|
|
369
|
+
};
|
|
370
|
+
}
|
|
371
|
+
async function runCodeAnalysisAgent(config, scan, overrides) {
|
|
372
|
+
const rootPath = node_path_1.default.resolve(config.path);
|
|
373
|
+
const files = scan.files.filter((file) => JS_TS_EXTENSIONS.has(file.extension));
|
|
374
|
+
const normalizedFiles = [];
|
|
375
|
+
const allFunctions = [];
|
|
376
|
+
const testFiles = [];
|
|
377
|
+
for (const file of files) {
|
|
378
|
+
const relativePath = file.path;
|
|
379
|
+
if (isTestPath(relativePath)) {
|
|
380
|
+
testFiles.push(relativePath);
|
|
381
|
+
}
|
|
382
|
+
const absolutePath = toAbsolutePath(rootPath, relativePath);
|
|
383
|
+
let content;
|
|
384
|
+
const overrideContent = overrides?.[relativePath];
|
|
385
|
+
if (overrideContent !== undefined) {
|
|
386
|
+
content = overrideContent;
|
|
387
|
+
}
|
|
388
|
+
else {
|
|
389
|
+
try {
|
|
390
|
+
content = await promises_1.default.readFile(absolutePath, "utf8");
|
|
391
|
+
}
|
|
392
|
+
catch {
|
|
393
|
+
continue;
|
|
394
|
+
}
|
|
395
|
+
}
|
|
396
|
+
const scriptKind = getScriptKind(file.extension);
|
|
397
|
+
const sourceFile = typescript_1.default.createSourceFile(relativePath, content, typescript_1.default.ScriptTarget.Latest, true, scriptKind);
|
|
398
|
+
allFunctions.push(...collectFunctionLengths(sourceFile, relativePath));
|
|
399
|
+
const { normalizedLines, lineNumbers } = normalizeContent(content);
|
|
400
|
+
normalizedFiles.push({
|
|
401
|
+
path: relativePath,
|
|
402
|
+
extension: file.extension,
|
|
403
|
+
normalizedLines,
|
|
404
|
+
lineNumbers,
|
|
405
|
+
imports: collectImports(sourceFile),
|
|
406
|
+
});
|
|
407
|
+
}
|
|
408
|
+
const totalFunctions = allFunctions.length;
|
|
409
|
+
const maxFunctionLength = totalFunctions
|
|
410
|
+
? Math.max(...allFunctions.map((fn) => fn.length))
|
|
411
|
+
: 0;
|
|
412
|
+
const avgFunctionLength = totalFunctions
|
|
413
|
+
? Math.round((allFunctions.reduce((sum, fn) => sum + fn.length, 0) / totalFunctions) * 100) / 100
|
|
414
|
+
: 0;
|
|
415
|
+
const longFunctions = allFunctions.filter((fn) => fn.length >= exports.LONG_FUNCTION_LOC);
|
|
416
|
+
const duplicateBlocks = collectDuplicateBlocks(normalizedFiles);
|
|
417
|
+
const filePathSet = new Set(normalizedFiles.map((file) => file.path));
|
|
418
|
+
const circularDependencies = collectCircularDependencies(normalizedFiles, filePathSet);
|
|
419
|
+
const dependencySummary = buildDependencySummary(normalizedFiles, filePathSet, circularDependencies);
|
|
420
|
+
return {
|
|
421
|
+
metrics: {
|
|
422
|
+
maxFunctionLength,
|
|
423
|
+
avgFunctionLength,
|
|
424
|
+
duplicateBlocks: duplicateBlocks.length,
|
|
425
|
+
totalFunctions,
|
|
426
|
+
},
|
|
427
|
+
signals: {
|
|
428
|
+
longFunctions,
|
|
429
|
+
duplicateBlocks,
|
|
430
|
+
circularDependencies,
|
|
431
|
+
testPresence: {
|
|
432
|
+
hasTests: testFiles.length > 0,
|
|
433
|
+
testFiles: testFiles.sort(),
|
|
434
|
+
},
|
|
435
|
+
},
|
|
436
|
+
dependencySummary,
|
|
437
|
+
};
|
|
438
|
+
}
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.runEvidenceGuardAgent = runEvidenceGuardAgent;
|
|
4
|
+
function isNonEmptyString(value) {
|
|
5
|
+
return typeof value === "string" && value.trim().length > 0;
|
|
6
|
+
}
|
|
7
|
+
function isValidLine(value) {
|
|
8
|
+
return typeof value === "number" && Number.isInteger(value) && value > 0;
|
|
9
|
+
}
|
|
10
|
+
function isValidMetricValue(value) {
|
|
11
|
+
if (typeof value === "number") {
|
|
12
|
+
return Number.isFinite(value) && value >= 0;
|
|
13
|
+
}
|
|
14
|
+
if (typeof value === "string") {
|
|
15
|
+
return value.trim().length > 0;
|
|
16
|
+
}
|
|
17
|
+
return false;
|
|
18
|
+
}
|
|
19
|
+
function validateEvidence(issue) {
|
|
20
|
+
if (!issue.evidence || issue.evidence.length === 0) {
|
|
21
|
+
return {
|
|
22
|
+
...issue,
|
|
23
|
+
evidenceComplete: false,
|
|
24
|
+
missingEvidenceReason: "No evidence items provided.",
|
|
25
|
+
};
|
|
26
|
+
}
|
|
27
|
+
for (const item of issue.evidence) {
|
|
28
|
+
if (!isNonEmptyString(item.file)) {
|
|
29
|
+
return {
|
|
30
|
+
...issue,
|
|
31
|
+
evidenceComplete: false,
|
|
32
|
+
missingEvidenceReason: "Evidence item missing file path.",
|
|
33
|
+
};
|
|
34
|
+
}
|
|
35
|
+
if (!isValidLine(item.startLine) || !isValidLine(item.endLine)) {
|
|
36
|
+
return {
|
|
37
|
+
...issue,
|
|
38
|
+
evidenceComplete: false,
|
|
39
|
+
missingEvidenceReason: "Evidence item missing line range.",
|
|
40
|
+
};
|
|
41
|
+
}
|
|
42
|
+
if (item.endLine < item.startLine) {
|
|
43
|
+
return {
|
|
44
|
+
...issue,
|
|
45
|
+
evidenceComplete: false,
|
|
46
|
+
missingEvidenceReason: "Evidence item line range is invalid.",
|
|
47
|
+
};
|
|
48
|
+
}
|
|
49
|
+
if (!item.metrics || item.metrics.length === 0) {
|
|
50
|
+
return {
|
|
51
|
+
...issue,
|
|
52
|
+
evidenceComplete: false,
|
|
53
|
+
missingEvidenceReason: "Evidence item missing metrics.",
|
|
54
|
+
};
|
|
55
|
+
}
|
|
56
|
+
for (const metric of item.metrics) {
|
|
57
|
+
if (!metric.type || !isValidMetricValue(metric.value)) {
|
|
58
|
+
return {
|
|
59
|
+
...issue,
|
|
60
|
+
evidenceComplete: false,
|
|
61
|
+
missingEvidenceReason: "Evidence item has invalid metric.",
|
|
62
|
+
};
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
return { ...issue, evidenceComplete: true };
|
|
67
|
+
}
|
|
68
|
+
function runEvidenceGuardAgent(insights) {
|
|
69
|
+
const issues = insights.issues.map((issue) => validateEvidence({ ...issue, evidenceComplete: true }));
|
|
70
|
+
return { issues };
|
|
71
|
+
}
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.runFixApplyAgent = runFixApplyAgent;
|
|
7
|
+
const promises_1 = __importDefault(require("node:fs/promises"));
|
|
8
|
+
const node_os_1 = __importDefault(require("node:os"));
|
|
9
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
10
|
+
const node_child_process_1 = require("node:child_process");
|
|
11
|
+
function runGit(command, cwd) {
|
|
12
|
+
return (0, node_child_process_1.execSync)(`git ${command}`, { cwd, stdio: ["ignore", "pipe", "pipe"] })
|
|
13
|
+
.toString()
|
|
14
|
+
.trim();
|
|
15
|
+
}
|
|
16
|
+
function getRepoRoot(rootPath) {
|
|
17
|
+
return runGit("rev-parse --show-toplevel", rootPath);
|
|
18
|
+
}
|
|
19
|
+
function isWorkingTreeClean(rootPath) {
|
|
20
|
+
const status = runGit("status --porcelain", rootPath);
|
|
21
|
+
return status.length === 0;
|
|
22
|
+
}
|
|
23
|
+
function getCurrentBranch(rootPath) {
|
|
24
|
+
return runGit("rev-parse --abbrev-ref HEAD", rootPath);
|
|
25
|
+
}
|
|
26
|
+
function branchExists(rootPath, name) {
|
|
27
|
+
const result = runGit(`branch --list ${name}`, rootPath);
|
|
28
|
+
return result.length > 0;
|
|
29
|
+
}
|
|
30
|
+
function buildBranchName(rootPath, requested) {
|
|
31
|
+
const base = requested?.trim() ||
|
|
32
|
+
`coderoast-fix-${new Date().toISOString().replace(/[-:T]/g, "").slice(0, 14)}`;
|
|
33
|
+
if (!branchExists(rootPath, base)) {
|
|
34
|
+
return base;
|
|
35
|
+
}
|
|
36
|
+
let counter = 1;
|
|
37
|
+
while (branchExists(rootPath, `${base}-${counter}`)) {
|
|
38
|
+
counter += 1;
|
|
39
|
+
}
|
|
40
|
+
return `${base}-${counter}`;
|
|
41
|
+
}
|
|
42
|
+
async function writeTempPatch(contents) {
|
|
43
|
+
const tempDir = await promises_1.default.mkdtemp(node_path_1.default.join(node_os_1.default.tmpdir(), "coderoast-patch-"));
|
|
44
|
+
const patchPath = node_path_1.default.join(tempDir, "fix.patch");
|
|
45
|
+
await promises_1.default.writeFile(patchPath, contents);
|
|
46
|
+
return patchPath;
|
|
47
|
+
}
|
|
48
|
+
async function runFixApplyAgent(config, fixResult) {
|
|
49
|
+
const rootPath = node_path_1.default.resolve(config.path);
|
|
50
|
+
const patches = fixResult.suggestions
|
|
51
|
+
.filter((suggestion) => suggestion.verified && suggestion.patch)
|
|
52
|
+
.map((suggestion) => suggestion.patch.trim())
|
|
53
|
+
.filter((patch) => patch.length > 0);
|
|
54
|
+
if (patches.length === 0) {
|
|
55
|
+
return {
|
|
56
|
+
status: "skipped",
|
|
57
|
+
message: "Apply skipped: no verified patches to apply.",
|
|
58
|
+
};
|
|
59
|
+
}
|
|
60
|
+
let repoRoot = "";
|
|
61
|
+
try {
|
|
62
|
+
repoRoot = getRepoRoot(rootPath);
|
|
63
|
+
}
|
|
64
|
+
catch {
|
|
65
|
+
return {
|
|
66
|
+
status: "failed",
|
|
67
|
+
message: "Apply failed: not inside a git repository.",
|
|
68
|
+
};
|
|
69
|
+
}
|
|
70
|
+
if (!isWorkingTreeClean(repoRoot)) {
|
|
71
|
+
return {
|
|
72
|
+
status: "failed",
|
|
73
|
+
message: "Apply failed: working tree is not clean. Commit or stash changes first.",
|
|
74
|
+
};
|
|
75
|
+
}
|
|
76
|
+
const branch = buildBranchName(repoRoot, config.fixBranch);
|
|
77
|
+
const currentBranch = getCurrentBranch(repoRoot);
|
|
78
|
+
try {
|
|
79
|
+
runGit(`checkout -b ${branch}`, repoRoot);
|
|
80
|
+
}
|
|
81
|
+
catch {
|
|
82
|
+
return {
|
|
83
|
+
status: "failed",
|
|
84
|
+
message: `Apply failed: could not create branch ${branch}.`,
|
|
85
|
+
branch,
|
|
86
|
+
};
|
|
87
|
+
}
|
|
88
|
+
let patchPath = "";
|
|
89
|
+
try {
|
|
90
|
+
patchPath = await writeTempPatch(patches.join("\n\n") + "\n");
|
|
91
|
+
(0, node_child_process_1.execSync)(`git apply --whitespace=nowarn "${patchPath}"`, {
|
|
92
|
+
cwd: repoRoot,
|
|
93
|
+
stdio: "inherit",
|
|
94
|
+
});
|
|
95
|
+
}
|
|
96
|
+
catch {
|
|
97
|
+
try {
|
|
98
|
+
runGit(`checkout ${currentBranch}`, repoRoot);
|
|
99
|
+
runGit(`branch -D ${branch}`, repoRoot);
|
|
100
|
+
}
|
|
101
|
+
catch {
|
|
102
|
+
// ignore rollback errors
|
|
103
|
+
}
|
|
104
|
+
return {
|
|
105
|
+
status: "failed",
|
|
106
|
+
message: "Apply failed: patch could not be applied cleanly.",
|
|
107
|
+
branch,
|
|
108
|
+
};
|
|
109
|
+
}
|
|
110
|
+
const testCommand = config.fixTestCmd ?? "npm test";
|
|
111
|
+
let testsPassed = true;
|
|
112
|
+
try {
|
|
113
|
+
(0, node_child_process_1.execSync)(testCommand, { cwd: repoRoot, stdio: "inherit" });
|
|
114
|
+
}
|
|
115
|
+
catch {
|
|
116
|
+
testsPassed = false;
|
|
117
|
+
}
|
|
118
|
+
if (!testsPassed) {
|
|
119
|
+
return {
|
|
120
|
+
status: "failed",
|
|
121
|
+
message: "Apply failed: tests did not pass.",
|
|
122
|
+
branch,
|
|
123
|
+
testCommand,
|
|
124
|
+
testsPassed,
|
|
125
|
+
};
|
|
126
|
+
}
|
|
127
|
+
return {
|
|
128
|
+
status: "applied",
|
|
129
|
+
message: "Apply succeeded: patch applied on a new branch with passing tests.",
|
|
130
|
+
branch,
|
|
131
|
+
testCommand,
|
|
132
|
+
testsPassed,
|
|
133
|
+
};
|
|
134
|
+
}
|