@eduardbar/drift 0.9.0 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/publish-vscode.yml +76 -0
- package/AGENTS.md +30 -12
- package/CHANGELOG.md +9 -0
- package/README.md +273 -168
- package/ROADMAP.md +130 -98
- package/dist/analyzer.d.ts +4 -38
- package/dist/analyzer.js +85 -1510
- package/dist/cli.js +47 -4
- package/dist/config.js +1 -1
- package/dist/fix.d.ts +13 -0
- package/dist/fix.js +120 -0
- package/dist/git/blame.d.ts +22 -0
- package/dist/git/blame.js +227 -0
- package/dist/git/helpers.d.ts +36 -0
- package/dist/git/helpers.js +152 -0
- package/dist/git/trend.d.ts +21 -0
- package/dist/git/trend.js +80 -0
- package/dist/git.d.ts +0 -4
- package/dist/git.js +2 -2
- package/dist/report.js +620 -293
- package/dist/rules/phase0-basic.d.ts +11 -0
- package/dist/rules/phase0-basic.js +176 -0
- package/dist/rules/phase1-complexity.d.ts +31 -0
- package/dist/rules/phase1-complexity.js +277 -0
- package/dist/rules/phase2-crossfile.d.ts +27 -0
- package/dist/rules/phase2-crossfile.js +122 -0
- package/dist/rules/phase3-arch.d.ts +31 -0
- package/dist/rules/phase3-arch.js +148 -0
- package/dist/rules/phase5-ai.d.ts +8 -0
- package/dist/rules/phase5-ai.js +262 -0
- package/dist/rules/phase8-semantic.d.ts +22 -0
- package/dist/rules/phase8-semantic.js +109 -0
- package/dist/rules/shared.d.ts +7 -0
- package/dist/rules/shared.js +27 -0
- package/package.json +8 -3
- package/packages/vscode-drift/.vscodeignore +9 -0
- package/packages/vscode-drift/LICENSE +21 -0
- package/packages/vscode-drift/README.md +64 -0
- package/packages/vscode-drift/images/icon.png +0 -0
- package/packages/vscode-drift/images/icon.svg +30 -0
- package/packages/vscode-drift/package-lock.json +485 -0
- package/packages/vscode-drift/package.json +119 -0
- package/packages/vscode-drift/src/analyzer.ts +38 -0
- package/packages/vscode-drift/src/diagnostics.ts +55 -0
- package/packages/vscode-drift/src/extension.ts +111 -0
- package/packages/vscode-drift/src/statusbar.ts +47 -0
- package/packages/vscode-drift/src/treeview.ts +108 -0
- package/packages/vscode-drift/tsconfig.json +18 -0
- package/packages/vscode-drift/vscode-drift-0.1.0.vsix +0 -0
- package/packages/vscode-drift/vscode-drift-0.1.1.vsix +0 -0
- package/src/analyzer.ts +124 -1726
- package/src/cli.ts +53 -4
- package/src/config.ts +1 -1
- package/src/fix.ts +154 -0
- package/src/git/blame.ts +279 -0
- package/src/git/helpers.ts +198 -0
- package/src/git/trend.ts +116 -0
- package/src/git.ts +2 -2
- package/src/report.ts +631 -296
- package/src/rules/phase0-basic.ts +187 -0
- package/src/rules/phase1-complexity.ts +302 -0
- package/src/rules/phase2-crossfile.ts +149 -0
- package/src/rules/phase3-arch.ts +179 -0
- package/src/rules/phase5-ai.ts +292 -0
- package/src/rules/phase8-semantic.ts +132 -0
- package/src/rules/shared.ts +39 -0
- package/tests/helpers.ts +45 -0
- package/tests/rules.test.ts +1269 -0
- package/vitest.config.ts +15 -0
package/dist/cli.js
CHANGED
|
@@ -1,11 +1,12 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
+
// drift-ignore-file
|
|
2
3
|
import { Command } from 'commander';
|
|
3
4
|
import { writeFileSync } from 'node:fs';
|
|
4
5
|
import { resolve } from 'node:path';
|
|
5
6
|
import { createRequire } from 'node:module';
|
|
6
7
|
const require = createRequire(import.meta.url);
|
|
7
8
|
const { version: VERSION } = require('../package.json');
|
|
8
|
-
import { analyzeProject } from './analyzer.js';
|
|
9
|
+
import { analyzeProject, analyzeFile, TrendAnalyzer, BlameAnalyzer } from './analyzer.js';
|
|
9
10
|
import { buildReport, formatMarkdown, formatAIOutput } from './reporter.js';
|
|
10
11
|
import { printConsole, printDiff } from './printer.js';
|
|
11
12
|
import { loadConfig } from './config.js';
|
|
@@ -14,7 +15,7 @@ import { computeDiff } from './diff.js';
|
|
|
14
15
|
import { generateHtmlReport } from './report.js';
|
|
15
16
|
import { generateBadge } from './badge.js';
|
|
16
17
|
import { emitCIAnnotations, printCISummary } from './ci.js';
|
|
17
|
-
import {
|
|
18
|
+
import { applyFixes } from './fix.js';
|
|
18
19
|
const program = new Command();
|
|
19
20
|
program
|
|
20
21
|
.name('drift')
|
|
@@ -159,7 +160,7 @@ program
|
|
|
159
160
|
const resolvedPath = resolve('.');
|
|
160
161
|
process.stderr.write(`\nAnalyzing trend in ${resolvedPath}...\n`);
|
|
161
162
|
const config = await loadConfig(resolvedPath);
|
|
162
|
-
const analyzer = new TrendAnalyzer(resolvedPath, config);
|
|
163
|
+
const analyzer = new TrendAnalyzer(resolvedPath, analyzeProject, config);
|
|
163
164
|
const trendData = await analyzer.analyzeTrend({
|
|
164
165
|
period: period,
|
|
165
166
|
since: options.since,
|
|
@@ -176,7 +177,7 @@ program
|
|
|
176
177
|
const resolvedPath = resolve('.');
|
|
177
178
|
process.stderr.write(`\nAnalyzing blame in ${resolvedPath}...\n`);
|
|
178
179
|
const config = await loadConfig(resolvedPath);
|
|
179
|
-
const analyzer = new BlameAnalyzer(resolvedPath, config);
|
|
180
|
+
const analyzer = new BlameAnalyzer(resolvedPath, analyzeProject, analyzeFile, config);
|
|
180
181
|
const blameData = await analyzer.analyzeBlame({
|
|
181
182
|
target: target,
|
|
182
183
|
top: Number(options.top)
|
|
@@ -184,5 +185,47 @@ program
|
|
|
184
185
|
process.stderr.write(`\nBlame analysis complete:\n`);
|
|
185
186
|
process.stdout.write(JSON.stringify(blameData, null, 2) + '\n');
|
|
186
187
|
});
|
|
188
|
+
program
|
|
189
|
+
.command('fix [path]')
|
|
190
|
+
.description('Auto-fix safe issues (debug-leftover console.*, catch-swallow)')
|
|
191
|
+
.option('--rule <rule>', 'Fix only a specific rule')
|
|
192
|
+
.option('--dry-run', 'Show what would change without writing files')
|
|
193
|
+
.action(async (targetPath, options) => {
|
|
194
|
+
const resolvedPath = resolve(targetPath ?? '.');
|
|
195
|
+
const config = await loadConfig(resolvedPath);
|
|
196
|
+
const results = await applyFixes(resolvedPath, config, {
|
|
197
|
+
rule: options.rule,
|
|
198
|
+
dryRun: options.dryRun,
|
|
199
|
+
});
|
|
200
|
+
if (results.length === 0) {
|
|
201
|
+
console.log('No fixable issues found.');
|
|
202
|
+
return;
|
|
203
|
+
}
|
|
204
|
+
const applied = results.filter(r => r.applied);
|
|
205
|
+
if (options.dryRun) {
|
|
206
|
+
console.log(`\ndrift fix --dry-run: ${results.length} fixable issues found\n`);
|
|
207
|
+
}
|
|
208
|
+
else {
|
|
209
|
+
console.log(`\ndrift fix: ${applied.length} fixes applied\n`);
|
|
210
|
+
}
|
|
211
|
+
// Group by file for clean output
|
|
212
|
+
const byFile = new Map();
|
|
213
|
+
for (const r of results) {
|
|
214
|
+
if (!byFile.has(r.file))
|
|
215
|
+
byFile.set(r.file, []);
|
|
216
|
+
byFile.get(r.file).push(r);
|
|
217
|
+
}
|
|
218
|
+
for (const [file, fileResults] of byFile) {
|
|
219
|
+
const relPath = file.replace(resolvedPath + '/', '').replace(resolvedPath + '\\', '');
|
|
220
|
+
console.log(` ${relPath}`);
|
|
221
|
+
for (const r of fileResults) {
|
|
222
|
+
const status = r.applied ? (options.dryRun ? 'would fix' : 'fixed') : 'skipped';
|
|
223
|
+
console.log(` [${r.rule}] line ${r.line}: ${r.description} — ${status}`);
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
if (!options.dryRun && applied.length > 0) {
|
|
227
|
+
console.log(`\n${applied.length} issue(s) fixed. Re-run drift scan to verify.`);
|
|
228
|
+
}
|
|
229
|
+
});
|
|
187
230
|
program.parse();
|
|
188
231
|
//# sourceMappingURL=cli.js.map
|
package/dist/config.js
CHANGED
package/dist/fix.d.ts
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import type { DriftConfig } from './types.js';
|
|
2
|
+
export interface FixResult {
|
|
3
|
+
file: string;
|
|
4
|
+
rule: string;
|
|
5
|
+
line: number;
|
|
6
|
+
description: string;
|
|
7
|
+
applied: boolean;
|
|
8
|
+
}
|
|
9
|
+
export declare function applyFixes(targetPath: string, config?: DriftConfig, options?: {
|
|
10
|
+
rule?: string;
|
|
11
|
+
dryRun?: boolean;
|
|
12
|
+
}): Promise<FixResult[]>;
|
|
13
|
+
//# sourceMappingURL=fix.d.ts.map
|
package/dist/fix.js
ADDED
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
import { readFileSync, writeFileSync, statSync } from 'node:fs';
|
|
2
|
+
import { resolve } from 'node:path';
|
|
3
|
+
import { analyzeProject, analyzeFile } from './analyzer.js';
|
|
4
|
+
import { Project } from 'ts-morph';
|
|
5
|
+
const FIXABLE_RULES = new Set(['debug-leftover', 'catch-swallow']);
|
|
6
|
+
function isConsoleDebug(issue) {
|
|
7
|
+
// debug-leftover for console.* has messages like "console.log left in production code."
|
|
8
|
+
// Unresolved markers start with "Unresolved marker"
|
|
9
|
+
return issue.rule === 'debug-leftover' && !issue.message.startsWith('Unresolved marker');
|
|
10
|
+
}
|
|
11
|
+
function isFixable(issue) {
|
|
12
|
+
if (issue.rule === 'debug-leftover')
|
|
13
|
+
return isConsoleDebug(issue);
|
|
14
|
+
return FIXABLE_RULES.has(issue.rule);
|
|
15
|
+
}
|
|
16
|
+
function fixDebugLeftover(lines, line) {
|
|
17
|
+
// line is 1-based, lines is 0-based
|
|
18
|
+
return [...lines.slice(0, line - 1), ...lines.slice(line)];
|
|
19
|
+
}
|
|
20
|
+
function fixCatchSwallow(lines, line) {
|
|
21
|
+
// line is 1-based — points to the catch (...) line
|
|
22
|
+
let openBraceLine = line - 1; // convert to 0-based index
|
|
23
|
+
// Find the opening { of the catch block (same line or next few lines)
|
|
24
|
+
for (let i = openBraceLine; i < Math.min(openBraceLine + 3, lines.length); i++) { // drift-ignore
|
|
25
|
+
if (lines[i].includes('{')) {
|
|
26
|
+
openBraceLine = i;
|
|
27
|
+
break;
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
const indentMatch = lines[openBraceLine].match(/^(\s*)/);
|
|
31
|
+
const indent = indentMatch ? indentMatch[1] + ' ' : ' ';
|
|
32
|
+
return [
|
|
33
|
+
...lines.slice(0, openBraceLine + 1),
|
|
34
|
+
`${indent}// TODO: handle error`, // drift-ignore
|
|
35
|
+
...lines.slice(openBraceLine + 1),
|
|
36
|
+
];
|
|
37
|
+
}
|
|
38
|
+
function applyFixToLines(lines, issue) {
|
|
39
|
+
if (issue.rule === 'debug-leftover' && isConsoleDebug(issue)) {
|
|
40
|
+
return {
|
|
41
|
+
newLines: fixDebugLeftover(lines, issue.line),
|
|
42
|
+
description: `remove ${issue.message.split(' ')[0]} statement`,
|
|
43
|
+
};
|
|
44
|
+
}
|
|
45
|
+
if (issue.rule === 'catch-swallow') {
|
|
46
|
+
return {
|
|
47
|
+
newLines: fixCatchSwallow(lines, issue.line),
|
|
48
|
+
description: 'add TODO comment to empty catch block',
|
|
49
|
+
};
|
|
50
|
+
}
|
|
51
|
+
return null;
|
|
52
|
+
}
|
|
53
|
+
export async function applyFixes(targetPath, config, options) {
|
|
54
|
+
const resolvedPath = resolve(targetPath);
|
|
55
|
+
const dryRun = options?.dryRun ?? false;
|
|
56
|
+
// Determine if target is a file or directory
|
|
57
|
+
let fileReports;
|
|
58
|
+
const stat = statSync(resolvedPath);
|
|
59
|
+
if (stat.isFile()) {
|
|
60
|
+
const project = new Project({
|
|
61
|
+
skipAddingFilesFromTsConfig: true,
|
|
62
|
+
compilerOptions: { allowJs: true, jsx: 1 },
|
|
63
|
+
});
|
|
64
|
+
const sourceFile = project.addSourceFileAtPath(resolvedPath);
|
|
65
|
+
fileReports = [analyzeFile(sourceFile)];
|
|
66
|
+
}
|
|
67
|
+
else {
|
|
68
|
+
fileReports = analyzeProject(resolvedPath, config);
|
|
69
|
+
}
|
|
70
|
+
// Collect fixable issues, optionally filtered by rule
|
|
71
|
+
const fixableByFile = new Map();
|
|
72
|
+
for (const report of fileReports) {
|
|
73
|
+
const fixableIssues = report.issues.filter(issue => {
|
|
74
|
+
if (!isFixable(issue))
|
|
75
|
+
return false;
|
|
76
|
+
if (options?.rule && issue.rule !== options.rule)
|
|
77
|
+
return false;
|
|
78
|
+
return true;
|
|
79
|
+
});
|
|
80
|
+
if (fixableIssues.length > 0) {
|
|
81
|
+
fixableByFile.set(report.path, fixableIssues);
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
const results = [];
|
|
85
|
+
for (const [filePath, issues] of fixableByFile) {
|
|
86
|
+
const content = readFileSync(filePath, 'utf8');
|
|
87
|
+
let lines = content.split('\n');
|
|
88
|
+
// Sort issues by line descending to avoid line number drift after fixes
|
|
89
|
+
const sortedIssues = [...issues].sort((a, b) => b.line - a.line);
|
|
90
|
+
// Track line offset caused by deletions (debug-leftover removes lines)
|
|
91
|
+
// We process top-to-bottom after sorting descending, so no offset needed per issue
|
|
92
|
+
for (const issue of sortedIssues) {
|
|
93
|
+
const fixResult = applyFixToLines(lines, issue);
|
|
94
|
+
if (fixResult) {
|
|
95
|
+
results.push({
|
|
96
|
+
file: filePath,
|
|
97
|
+
rule: issue.rule,
|
|
98
|
+
line: issue.line,
|
|
99
|
+
description: fixResult.description,
|
|
100
|
+
applied: true,
|
|
101
|
+
});
|
|
102
|
+
lines = fixResult.newLines;
|
|
103
|
+
}
|
|
104
|
+
else {
|
|
105
|
+
results.push({
|
|
106
|
+
file: filePath,
|
|
107
|
+
rule: issue.rule,
|
|
108
|
+
line: issue.line,
|
|
109
|
+
description: 'no fix available',
|
|
110
|
+
applied: false,
|
|
111
|
+
});
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
if (!dryRun) {
|
|
115
|
+
writeFileSync(filePath, lines.join('\n'), 'utf8');
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
return results;
|
|
119
|
+
}
|
|
120
|
+
//# sourceMappingURL=fix.js.map
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import type { SourceFile } from 'ts-morph';
|
|
2
|
+
import type { FileReport, DriftConfig, BlameAttribution, DriftBlameReport } from '../types.js';
|
|
3
|
+
export declare class BlameAnalyzer {
|
|
4
|
+
private readonly projectPath;
|
|
5
|
+
private readonly config;
|
|
6
|
+
private readonly analyzeProjectFn;
|
|
7
|
+
private readonly analyzeFileFn;
|
|
8
|
+
constructor(projectPath: string, analyzeProjectFn: (targetPath: string, config?: DriftConfig) => FileReport[], analyzeFileFn: (sf: SourceFile) => FileReport, config?: DriftConfig);
|
|
9
|
+
/** Blame a single file: returns per-author attribution. */
|
|
10
|
+
static analyzeFileBlame(filePath: string, analyzeFileFn: (sf: SourceFile) => FileReport): Promise<BlameAttribution[]>;
|
|
11
|
+
/** Blame for a specific rule across all files in targetPath. */
|
|
12
|
+
static analyzeRuleBlame(rule: string, targetPath: string, analyzeFileFn: (sf: SourceFile) => FileReport): Promise<BlameAttribution[]>;
|
|
13
|
+
/** Overall blame across all files and rules. */
|
|
14
|
+
static analyzeOverallBlame(targetPath: string, analyzeFileFn: (sf: SourceFile) => FileReport): Promise<BlameAttribution[]>;
|
|
15
|
+
analyzeBlame(options: {
|
|
16
|
+
target?: 'file' | 'rule' | 'overall';
|
|
17
|
+
top?: number;
|
|
18
|
+
filePath?: string;
|
|
19
|
+
rule?: string;
|
|
20
|
+
}): Promise<DriftBlameReport>;
|
|
21
|
+
}
|
|
22
|
+
//# sourceMappingURL=blame.d.ts.map
|
|
@@ -0,0 +1,227 @@
|
|
|
1
|
+
// drift-ignore-file
|
|
2
|
+
import * as fs from 'node:fs';
|
|
3
|
+
import * as path from 'node:path';
|
|
4
|
+
import { assertGitRepo, execGit, analyzeFilePath } from './helpers.js';
|
|
5
|
+
import { buildReport } from '../reporter.js';
|
|
6
|
+
function parseGitBlame(blameOutput) {
|
|
7
|
+
const entries = [];
|
|
8
|
+
const lines = blameOutput.split('\n');
|
|
9
|
+
let i = 0;
|
|
10
|
+
while (i < lines.length) {
|
|
11
|
+
const headerLine = lines[i];
|
|
12
|
+
if (!headerLine || headerLine.trim() === '') {
|
|
13
|
+
i++;
|
|
14
|
+
continue;
|
|
15
|
+
}
|
|
16
|
+
// Porcelain blame format: first line is "<hash> <orig-line> <final-line> [<num-lines>]"
|
|
17
|
+
const headerMatch = headerLine.match(/^([0-9a-f]{40})\s/);
|
|
18
|
+
if (!headerMatch) {
|
|
19
|
+
i++;
|
|
20
|
+
continue;
|
|
21
|
+
}
|
|
22
|
+
const hash = headerMatch[1];
|
|
23
|
+
let author = '';
|
|
24
|
+
let email = '';
|
|
25
|
+
let codeLine = '';
|
|
26
|
+
i++;
|
|
27
|
+
while (i < lines.length && !lines[i].match(/^[0-9a-f]{40}\s/)) {
|
|
28
|
+
const l = lines[i];
|
|
29
|
+
if (l.startsWith('author '))
|
|
30
|
+
author = l.slice(7).trim();
|
|
31
|
+
else if (l.startsWith('author-mail '))
|
|
32
|
+
email = l.slice(12).replace(/[<>]/g, '').trim();
|
|
33
|
+
else if (l.startsWith('\t'))
|
|
34
|
+
codeLine = l.slice(1);
|
|
35
|
+
i++;
|
|
36
|
+
}
|
|
37
|
+
entries.push({ hash, author, email, line: codeLine });
|
|
38
|
+
}
|
|
39
|
+
return entries;
|
|
40
|
+
}
|
|
41
|
+
export class BlameAnalyzer {
|
|
42
|
+
projectPath;
|
|
43
|
+
config;
|
|
44
|
+
analyzeProjectFn;
|
|
45
|
+
analyzeFileFn;
|
|
46
|
+
constructor(projectPath, analyzeProjectFn, analyzeFileFn, config) {
|
|
47
|
+
this.projectPath = projectPath;
|
|
48
|
+
this.analyzeProjectFn = analyzeProjectFn;
|
|
49
|
+
this.analyzeFileFn = analyzeFileFn;
|
|
50
|
+
this.config = config;
|
|
51
|
+
}
|
|
52
|
+
/** Blame a single file: returns per-author attribution. */
|
|
53
|
+
static async analyzeFileBlame(filePath, analyzeFileFn) {
|
|
54
|
+
const dir = path.dirname(filePath);
|
|
55
|
+
assertGitRepo(dir);
|
|
56
|
+
const blameOutput = execGit(`git blame --porcelain "${filePath}"`, dir);
|
|
57
|
+
const entries = parseGitBlame(blameOutput);
|
|
58
|
+
// Analyse issues in the file
|
|
59
|
+
const report = analyzeFilePath(filePath, analyzeFileFn);
|
|
60
|
+
// Map line numbers of issues to authors
|
|
61
|
+
const issuesByLine = new Map();
|
|
62
|
+
for (const issue of report.issues) {
|
|
63
|
+
issuesByLine.set(issue.line, (issuesByLine.get(issue.line) ?? 0) + 1);
|
|
64
|
+
}
|
|
65
|
+
// Aggregate by author
|
|
66
|
+
const byAuthor = new Map();
|
|
67
|
+
entries.forEach((entry, idx) => {
|
|
68
|
+
const key = entry.email || entry.author;
|
|
69
|
+
if (!byAuthor.has(key)) {
|
|
70
|
+
byAuthor.set(key, {
|
|
71
|
+
author: entry.author,
|
|
72
|
+
email: entry.email,
|
|
73
|
+
commits: 0,
|
|
74
|
+
linesChanged: 0,
|
|
75
|
+
issuesIntroduced: 0,
|
|
76
|
+
avgScoreImpact: 0,
|
|
77
|
+
});
|
|
78
|
+
}
|
|
79
|
+
const attr = byAuthor.get(key);
|
|
80
|
+
attr.linesChanged++;
|
|
81
|
+
const lineNum = idx + 1;
|
|
82
|
+
if (issuesByLine.has(lineNum)) {
|
|
83
|
+
attr.issuesIntroduced += issuesByLine.get(lineNum);
|
|
84
|
+
}
|
|
85
|
+
});
|
|
86
|
+
// Count unique commits per author
|
|
87
|
+
const commitsByAuthor = new Map();
|
|
88
|
+
for (const entry of entries) {
|
|
89
|
+
const key = entry.email || entry.author;
|
|
90
|
+
if (!commitsByAuthor.has(key))
|
|
91
|
+
commitsByAuthor.set(key, new Set());
|
|
92
|
+
commitsByAuthor.get(key).add(entry.hash);
|
|
93
|
+
}
|
|
94
|
+
const total = entries.length || 1;
|
|
95
|
+
const results = [];
|
|
96
|
+
for (const [key, attr] of byAuthor) {
|
|
97
|
+
attr.commits = commitsByAuthor.get(key)?.size ?? 0;
|
|
98
|
+
attr.avgScoreImpact = (attr.linesChanged / total) * report.score;
|
|
99
|
+
results.push(attr);
|
|
100
|
+
}
|
|
101
|
+
return results.sort((a, b) => b.issuesIntroduced - a.issuesIntroduced);
|
|
102
|
+
}
|
|
103
|
+
/** Blame for a specific rule across all files in targetPath. */
|
|
104
|
+
static async analyzeRuleBlame(rule, targetPath, analyzeFileFn) {
|
|
105
|
+
assertGitRepo(targetPath);
|
|
106
|
+
const tsFiles = fs
|
|
107
|
+
.readdirSync(targetPath, { recursive: true, encoding: 'utf8' })
|
|
108
|
+
.filter((f) => (f.endsWith('.ts') || f.endsWith('.tsx') || f.endsWith('.js') || f.endsWith('.jsx')) && !f.includes('node_modules') && !f.endsWith('.d.ts'))
|
|
109
|
+
.map(f => path.join(targetPath, f));
|
|
110
|
+
const combined = new Map();
|
|
111
|
+
const commitsByAuthor = new Map();
|
|
112
|
+
for (const file of tsFiles) {
|
|
113
|
+
let blameEntries = [];
|
|
114
|
+
try {
|
|
115
|
+
const blameOutput = execGit(`git blame --porcelain "${file}"`, targetPath);
|
|
116
|
+
blameEntries = parseGitBlame(blameOutput);
|
|
117
|
+
}
|
|
118
|
+
catch {
|
|
119
|
+
continue;
|
|
120
|
+
}
|
|
121
|
+
const report = analyzeFilePath(file, analyzeFileFn);
|
|
122
|
+
const issuesByLine = new Map();
|
|
123
|
+
for (const issue of report.issues) {
|
|
124
|
+
issuesByLine.set(issue.line, (issuesByLine.get(issue.line) ?? 0) + 1);
|
|
125
|
+
}
|
|
126
|
+
blameEntries.forEach((entry, idx) => {
|
|
127
|
+
const key = entry.email || entry.author;
|
|
128
|
+
if (!combined.has(key)) {
|
|
129
|
+
combined.set(key, {
|
|
130
|
+
author: entry.author,
|
|
131
|
+
email: entry.email,
|
|
132
|
+
commits: 0,
|
|
133
|
+
linesChanged: 0,
|
|
134
|
+
issuesIntroduced: 0,
|
|
135
|
+
avgScoreImpact: 0,
|
|
136
|
+
});
|
|
137
|
+
commitsByAuthor.set(key, new Set());
|
|
138
|
+
}
|
|
139
|
+
const attr = combined.get(key);
|
|
140
|
+
attr.linesChanged++;
|
|
141
|
+
commitsByAuthor.get(key).add(entry.hash);
|
|
142
|
+
const lineNum = idx + 1;
|
|
143
|
+
if (issuesByLine.has(lineNum)) {
|
|
144
|
+
attr.issuesIntroduced += issuesByLine.get(lineNum);
|
|
145
|
+
attr.avgScoreImpact += report.score * (1 / (blameEntries.length || 1));
|
|
146
|
+
}
|
|
147
|
+
});
|
|
148
|
+
}
|
|
149
|
+
for (const [key, attr] of combined) {
|
|
150
|
+
attr.commits = commitsByAuthor.get(key)?.size ?? 0;
|
|
151
|
+
}
|
|
152
|
+
return Array.from(combined.values()).sort((a, b) => b.issuesIntroduced - a.issuesIntroduced);
|
|
153
|
+
}
|
|
154
|
+
/** Overall blame across all files and rules. */
|
|
155
|
+
static async analyzeOverallBlame(targetPath, analyzeFileFn) {
|
|
156
|
+
assertGitRepo(targetPath);
|
|
157
|
+
const tsFiles = fs
|
|
158
|
+
.readdirSync(targetPath, { recursive: true, encoding: 'utf8' })
|
|
159
|
+
.filter((f) => (f.endsWith('.ts') || f.endsWith('.tsx') || f.endsWith('.js') || f.endsWith('.jsx')) && !f.includes('node_modules') && !f.endsWith('.d.ts'))
|
|
160
|
+
.map(f => path.join(targetPath, f));
|
|
161
|
+
const combined = new Map();
|
|
162
|
+
const commitsByAuthor = new Map();
|
|
163
|
+
for (const file of tsFiles) {
|
|
164
|
+
let blameEntries = [];
|
|
165
|
+
try {
|
|
166
|
+
const blameOutput = execGit(`git blame --porcelain "${file}"`, targetPath);
|
|
167
|
+
blameEntries = parseGitBlame(blameOutput);
|
|
168
|
+
}
|
|
169
|
+
catch {
|
|
170
|
+
continue;
|
|
171
|
+
}
|
|
172
|
+
const report = analyzeFilePath(file, analyzeFileFn);
|
|
173
|
+
const issuesByLine = new Map();
|
|
174
|
+
for (const issue of report.issues) {
|
|
175
|
+
issuesByLine.set(issue.line, (issuesByLine.get(issue.line) ?? 0) + 1);
|
|
176
|
+
}
|
|
177
|
+
blameEntries.forEach((entry, idx) => {
|
|
178
|
+
const key = entry.email || entry.author;
|
|
179
|
+
if (!combined.has(key)) {
|
|
180
|
+
combined.set(key, {
|
|
181
|
+
author: entry.author,
|
|
182
|
+
email: entry.email,
|
|
183
|
+
commits: 0,
|
|
184
|
+
linesChanged: 0,
|
|
185
|
+
issuesIntroduced: 0,
|
|
186
|
+
avgScoreImpact: 0,
|
|
187
|
+
});
|
|
188
|
+
commitsByAuthor.set(key, new Set());
|
|
189
|
+
}
|
|
190
|
+
const attr = combined.get(key);
|
|
191
|
+
attr.linesChanged++;
|
|
192
|
+
commitsByAuthor.get(key).add(entry.hash);
|
|
193
|
+
const lineNum = idx + 1;
|
|
194
|
+
if (issuesByLine.has(lineNum)) {
|
|
195
|
+
attr.issuesIntroduced += issuesByLine.get(lineNum);
|
|
196
|
+
attr.avgScoreImpact += report.score * (1 / (blameEntries.length || 1));
|
|
197
|
+
}
|
|
198
|
+
});
|
|
199
|
+
}
|
|
200
|
+
for (const [key, attr] of combined) {
|
|
201
|
+
attr.commits = commitsByAuthor.get(key)?.size ?? 0;
|
|
202
|
+
}
|
|
203
|
+
return Array.from(combined.values()).sort((a, b) => b.issuesIntroduced - a.issuesIntroduced);
|
|
204
|
+
}
|
|
205
|
+
// --- Instance method -------------------------------------------------------
|
|
206
|
+
async analyzeBlame(options) {
|
|
207
|
+
assertGitRepo(this.projectPath);
|
|
208
|
+
let blame = [];
|
|
209
|
+
const mode = options.target ?? 'overall';
|
|
210
|
+
if (mode === 'file' && options.filePath) {
|
|
211
|
+
blame = await BlameAnalyzer.analyzeFileBlame(options.filePath, this.analyzeFileFn);
|
|
212
|
+
}
|
|
213
|
+
else if (mode === 'rule' && options.rule) {
|
|
214
|
+
blame = await BlameAnalyzer.analyzeRuleBlame(options.rule, this.projectPath, this.analyzeFileFn);
|
|
215
|
+
}
|
|
216
|
+
else {
|
|
217
|
+
blame = await BlameAnalyzer.analyzeOverallBlame(this.projectPath, this.analyzeFileFn);
|
|
218
|
+
}
|
|
219
|
+
if (options.top) {
|
|
220
|
+
blame = blame.slice(0, options.top);
|
|
221
|
+
}
|
|
222
|
+
const currentFiles = this.analyzeProjectFn(this.projectPath, this.config);
|
|
223
|
+
const baseReport = buildReport(this.projectPath, currentFiles);
|
|
224
|
+
return { ...baseReport, blame };
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
//# sourceMappingURL=blame.js.map
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import type { SourceFile } from 'ts-morph';
|
|
2
|
+
import type { FileReport, DriftConfig, HistoricalAnalysis } from '../types.js';
|
|
3
|
+
/**
|
|
4
|
+
* Analyse a file given its absolute path string.
|
|
5
|
+
* Accepts analyzeFile as a parameter to avoid circular dependency.
|
|
6
|
+
*/
|
|
7
|
+
export declare function analyzeFilePath(filePath: string, analyzeFile: (sf: SourceFile) => FileReport): FileReport;
|
|
8
|
+
/**
|
|
9
|
+
* Execute a git command synchronously and return stdout.
|
|
10
|
+
* Throws a descriptive error if the command fails or git is not available.
|
|
11
|
+
*/
|
|
12
|
+
export declare function execGit(cmd: string, cwd: string): string;
|
|
13
|
+
/**
|
|
14
|
+
* Verify the given directory is a git repository.
|
|
15
|
+
* Throws if git is not available or the directory is not a repo.
|
|
16
|
+
*/
|
|
17
|
+
export declare function assertGitRepo(cwd: string): void;
|
|
18
|
+
/**
|
|
19
|
+
* Analyse a single file as it existed at a given commit hash.
|
|
20
|
+
* Writes the blob to a temp file, runs analyzeFile, then cleans up.
|
|
21
|
+
*/
|
|
22
|
+
export declare function analyzeFileAtCommit(// drift-ignore
|
|
23
|
+
filePath: string, commitHash: string, projectRoot: string, analyzeFile: (sf: SourceFile) => FileReport): Promise<FileReport>;
|
|
24
|
+
/**
|
|
25
|
+
* Analyse ALL TypeScript files in the project snapshot at a given commit.
|
|
26
|
+
* Uses `git ls-tree` to enumerate every file in the tree, writes them to a
|
|
27
|
+
* temp directory, then runs `analyzeProject` on that full snapshot.
|
|
28
|
+
*/
|
|
29
|
+
export declare function analyzeSingleCommit(// drift-ignore
|
|
30
|
+
commitHash: string, targetPath: string, analyzeProject: (targetPath: string, config?: DriftConfig) => FileReport[], config?: DriftConfig): Promise<HistoricalAnalysis>;
|
|
31
|
+
/**
|
|
32
|
+
* Run historical analysis over all commits since a given date.
|
|
33
|
+
* Returns results ordered chronologically (oldest first).
|
|
34
|
+
*/
|
|
35
|
+
export declare function analyzeHistoricalCommits(sinceDate: Date, targetPath: string, maxCommits: number, analyzeProject: (targetPath: string, config?: DriftConfig) => FileReport[], config?: DriftConfig, maxSamples?: number): Promise<HistoricalAnalysis[]>;
|
|
36
|
+
//# sourceMappingURL=helpers.d.ts.map
|
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
// drift-ignore-file
|
|
2
|
+
import * as fs from 'node:fs';
|
|
3
|
+
import * as path from 'node:path';
|
|
4
|
+
import * as os from 'node:os';
|
|
5
|
+
import * as crypto from 'node:crypto';
|
|
6
|
+
import { execSync } from 'node:child_process';
|
|
7
|
+
import { Project } from 'ts-morph';
|
|
8
|
+
/**
|
|
9
|
+
* Analyse a file given its absolute path string.
|
|
10
|
+
* Accepts analyzeFile as a parameter to avoid circular dependency.
|
|
11
|
+
*/
|
|
12
|
+
export function analyzeFilePath(filePath, analyzeFile) {
|
|
13
|
+
const proj = new Project({
|
|
14
|
+
skipAddingFilesFromTsConfig: true,
|
|
15
|
+
compilerOptions: { allowJs: true },
|
|
16
|
+
});
|
|
17
|
+
const sf = proj.addSourceFileAtPath(filePath);
|
|
18
|
+
return analyzeFile(sf);
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* Execute a git command synchronously and return stdout.
|
|
22
|
+
* Throws a descriptive error if the command fails or git is not available.
|
|
23
|
+
*/
|
|
24
|
+
export function execGit(cmd, cwd) {
|
|
25
|
+
try {
|
|
26
|
+
return execSync(cmd, { cwd, encoding: 'utf8', stdio: ['pipe', 'pipe', 'pipe'] }).trim();
|
|
27
|
+
}
|
|
28
|
+
catch (err) {
|
|
29
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
30
|
+
throw new Error(`Git command failed: ${cmd}\n${msg}`);
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
/**
|
|
34
|
+
* Verify the given directory is a git repository.
|
|
35
|
+
* Throws if git is not available or the directory is not a repo.
|
|
36
|
+
*/
|
|
37
|
+
export function assertGitRepo(cwd) {
|
|
38
|
+
try {
|
|
39
|
+
execGit('git rev-parse --is-inside-work-tree', cwd);
|
|
40
|
+
}
|
|
41
|
+
catch {
|
|
42
|
+
throw new Error(`Directory is not a git repository: ${cwd}`);
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
/**
|
|
46
|
+
* Analyse a single file as it existed at a given commit hash.
|
|
47
|
+
* Writes the blob to a temp file, runs analyzeFile, then cleans up.
|
|
48
|
+
*/
|
|
49
|
+
export async function analyzeFileAtCommit(// drift-ignore
|
|
50
|
+
filePath, commitHash, projectRoot, analyzeFile) {
|
|
51
|
+
const relPath = path.relative(projectRoot, filePath).replace(/\\/g, '/');
|
|
52
|
+
const blob = execGit(`git show ${commitHash}:${relPath}`, projectRoot);
|
|
53
|
+
const tmpFile = path.join(os.tmpdir(), `drift-${crypto.randomBytes(8).toString('hex')}.ts`);
|
|
54
|
+
try {
|
|
55
|
+
fs.writeFileSync(tmpFile, blob, 'utf8');
|
|
56
|
+
const report = analyzeFilePath(tmpFile, analyzeFile);
|
|
57
|
+
// Replace temp path with original for readable output
|
|
58
|
+
return { ...report, path: filePath };
|
|
59
|
+
}
|
|
60
|
+
finally {
|
|
61
|
+
try {
|
|
62
|
+
fs.unlinkSync(tmpFile);
|
|
63
|
+
}
|
|
64
|
+
catch { /* ignore cleanup errors */ } // drift-ignore
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
/**
|
|
68
|
+
* Analyse ALL TypeScript files in the project snapshot at a given commit.
|
|
69
|
+
* Uses `git ls-tree` to enumerate every file in the tree, writes them to a
|
|
70
|
+
* temp directory, then runs `analyzeProject` on that full snapshot.
|
|
71
|
+
*/
|
|
72
|
+
export async function analyzeSingleCommit(// drift-ignore
|
|
73
|
+
commitHash, targetPath, analyzeProject, config) {
|
|
74
|
+
// 1. Commit metadata
|
|
75
|
+
const meta = execGit(`git show --no-patch --format="%H|%aI|%an|%s" ${commitHash}`, targetPath);
|
|
76
|
+
const [hash, dateStr, author, ...msgParts] = meta.split('|');
|
|
77
|
+
const message = msgParts.join('|').trim();
|
|
78
|
+
const commitDate = new Date(dateStr ?? '');
|
|
79
|
+
// 2. All .ts/.tsx files tracked at this commit (no diffs, full tree)
|
|
80
|
+
const allFiles = execGit(`git ls-tree -r ${commitHash} --name-only`, targetPath)
|
|
81
|
+
.split('\n')
|
|
82
|
+
.filter(f => (f.endsWith('.ts') || f.endsWith('.tsx') || f.endsWith('.js') || f.endsWith('.jsx')) &&
|
|
83
|
+
!f.endsWith('.d.ts') &&
|
|
84
|
+
!f.includes('node_modules') &&
|
|
85
|
+
!f.startsWith('dist/'));
|
|
86
|
+
if (allFiles.length === 0) {
|
|
87
|
+
return {
|
|
88
|
+
commitHash: hash ?? commitHash,
|
|
89
|
+
commitDate,
|
|
90
|
+
author: author ?? '',
|
|
91
|
+
message,
|
|
92
|
+
files: [],
|
|
93
|
+
totalScore: 0,
|
|
94
|
+
averageScore: 0,
|
|
95
|
+
};
|
|
96
|
+
}
|
|
97
|
+
// 3. Write snapshot to temp directory
|
|
98
|
+
const tmpDir = path.join(os.tmpdir(), `drift-${(hash ?? commitHash).slice(0, 8)}`);
|
|
99
|
+
fs.mkdirSync(tmpDir, { recursive: true });
|
|
100
|
+
for (const relPath of allFiles) {
|
|
101
|
+
try {
|
|
102
|
+
const content = execGit(`git show ${commitHash}:${relPath}`, targetPath);
|
|
103
|
+
const destPath = path.join(tmpDir, relPath);
|
|
104
|
+
fs.mkdirSync(path.dirname(destPath), { recursive: true });
|
|
105
|
+
fs.writeFileSync(destPath, content, 'utf-8');
|
|
106
|
+
}
|
|
107
|
+
catch { // drift-ignore
|
|
108
|
+
// skip files that can't be read (binary, deleted in partial clone, etc.)
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
// 4. Analyse the full project snapshot
|
|
112
|
+
const fileReports = analyzeProject(tmpDir, config);
|
|
113
|
+
const totalScore = fileReports.reduce((sum, r) => sum + r.score, 0);
|
|
114
|
+
const averageScore = fileReports.length > 0 ? totalScore / fileReports.length : 0;
|
|
115
|
+
// 5. Cleanup
|
|
116
|
+
try {
|
|
117
|
+
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
118
|
+
}
|
|
119
|
+
catch { // drift-ignore
|
|
120
|
+
// non-fatal — temp dirs are cleaned by the OS eventually
|
|
121
|
+
}
|
|
122
|
+
return {
|
|
123
|
+
commitHash: hash ?? commitHash,
|
|
124
|
+
commitDate,
|
|
125
|
+
author: author ?? '',
|
|
126
|
+
message,
|
|
127
|
+
files: fileReports,
|
|
128
|
+
totalScore,
|
|
129
|
+
averageScore,
|
|
130
|
+
};
|
|
131
|
+
}
|
|
132
|
+
/**
|
|
133
|
+
* Run historical analysis over all commits since a given date.
|
|
134
|
+
* Returns results ordered chronologically (oldest first).
|
|
135
|
+
*/
|
|
136
|
+
export async function analyzeHistoricalCommits(sinceDate, targetPath, maxCommits, analyzeProject, config, maxSamples = 10) {
|
|
137
|
+
assertGitRepo(targetPath);
|
|
138
|
+
const isoDate = sinceDate.toISOString();
|
|
139
|
+
const raw = execGit(`git log --since="${isoDate}" --format="%H" --max-count=${maxCommits}`, targetPath);
|
|
140
|
+
if (!raw)
|
|
141
|
+
return [];
|
|
142
|
+
const hashes = raw.split('\n').filter(Boolean);
|
|
143
|
+
// Sample: distribute evenly across the range
|
|
144
|
+
const sampled = hashes.length <= maxSamples
|
|
145
|
+
? hashes
|
|
146
|
+
: Array.from({ length: maxSamples }, (_, i) => hashes[Math.floor(i * (hashes.length - 1) / (maxSamples - 1))]);
|
|
147
|
+
const analyses = await Promise.all(sampled.map(h => analyzeSingleCommit(h, targetPath, analyzeProject, config).catch(() => null)));
|
|
148
|
+
return analyses
|
|
149
|
+
.filter((a) => a !== null)
|
|
150
|
+
.sort((a, b) => a.commitDate.getTime() - b.commitDate.getTime());
|
|
151
|
+
}
|
|
152
|
+
//# sourceMappingURL=helpers.js.map
|