@eduardbar/drift 0.7.0 → 0.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +26 -2
- package/assets/og.svg +105 -105
- package/dist/analyzer.d.ts +36 -1
- package/dist/analyzer.js +547 -0
- package/dist/cli.js +39 -1
- package/dist/report.js +3 -1
- package/dist/types.d.ts +39 -0
- package/package.json +1 -1
- package/src/analyzer.ts +661 -1
- package/src/cli.ts +47 -1
- package/src/printer.ts +60 -60
- package/src/report.ts +3 -1
- package/src/types.ts +59 -15
- package/src/utils.ts +35 -35
package/dist/analyzer.js
CHANGED
|
@@ -1,6 +1,10 @@
|
|
|
1
1
|
import * as fs from 'node:fs';
|
|
2
|
+
import * as crypto from 'node:crypto';
|
|
2
3
|
import * as path from 'node:path';
|
|
4
|
+
import * as os from 'node:os';
|
|
5
|
+
import { execSync } from 'node:child_process';
|
|
3
6
|
import { Project, SyntaxKind, } from 'ts-morph';
|
|
7
|
+
import { buildReport } from './reporter.js';
|
|
4
8
|
// Rules and their drift score weight
|
|
5
9
|
export const RULE_WEIGHTS = {
|
|
6
10
|
'large-file': { severity: 'error', weight: 20 },
|
|
@@ -34,6 +38,8 @@ export const RULE_WEIGHTS = {
|
|
|
34
38
|
'inconsistent-error-handling': { severity: 'warning', weight: 8 },
|
|
35
39
|
'unnecessary-abstraction': { severity: 'warning', weight: 7 },
|
|
36
40
|
'naming-inconsistency': { severity: 'warning', weight: 6 },
|
|
41
|
+
// Phase 8: semantic duplication
|
|
42
|
+
'semantic-duplication': { severity: 'warning', weight: 12 },
|
|
37
43
|
};
|
|
38
44
|
function hasIgnoreComment(file, line) {
|
|
39
45
|
const lines = file.getFullText().split('\n');
|
|
@@ -786,6 +792,105 @@ function calculateScore(issues) {
|
|
|
786
792
|
}
|
|
787
793
|
return Math.min(100, raw);
|
|
788
794
|
}
|
|
795
|
+
/** Normalize a function body to a canonical string (Type-2 clone detection).
|
|
796
|
+
* Variable names, parameter names, and numeric/string literals are replaced
|
|
797
|
+
* with canonical tokens so that two functions with identical logic but
|
|
798
|
+
* different identifiers produce the same fingerprint.
|
|
799
|
+
*/
|
|
800
|
+
function normalizeFunctionBody(fn) {
|
|
801
|
+
// Build a substitution map: localName → canonical token
|
|
802
|
+
const subst = new Map();
|
|
803
|
+
// Map parameters first
|
|
804
|
+
for (const [i, param] of fn.getParameters().entries()) {
|
|
805
|
+
const name = param.getName();
|
|
806
|
+
if (name && name !== '_')
|
|
807
|
+
subst.set(name, `P${i}`);
|
|
808
|
+
}
|
|
809
|
+
// Map locally declared variables (VariableDeclaration)
|
|
810
|
+
let varIdx = 0;
|
|
811
|
+
fn.forEachDescendant(node => {
|
|
812
|
+
if (node.getKind() === SyntaxKind.VariableDeclaration) {
|
|
813
|
+
const nameNode = node.getNameNode();
|
|
814
|
+
// Support destructuring — getNameNode() may be a BindingPattern
|
|
815
|
+
if (nameNode.getKind() === SyntaxKind.Identifier) {
|
|
816
|
+
const name = nameNode.getText();
|
|
817
|
+
if (!subst.has(name))
|
|
818
|
+
subst.set(name, `V${varIdx++}`);
|
|
819
|
+
}
|
|
820
|
+
}
|
|
821
|
+
});
|
|
822
|
+
function serializeNode(node) {
|
|
823
|
+
const kind = node.getKindName();
|
|
824
|
+
switch (node.getKind()) {
|
|
825
|
+
case SyntaxKind.Identifier: {
|
|
826
|
+
const text = node.getText();
|
|
827
|
+
return subst.get(text) ?? text; // external refs (Math, console) kept as-is
|
|
828
|
+
}
|
|
829
|
+
case SyntaxKind.NumericLiteral:
|
|
830
|
+
return 'NL';
|
|
831
|
+
case SyntaxKind.StringLiteral:
|
|
832
|
+
case SyntaxKind.NoSubstitutionTemplateLiteral:
|
|
833
|
+
return 'SL';
|
|
834
|
+
case SyntaxKind.TrueKeyword:
|
|
835
|
+
return 'TRUE';
|
|
836
|
+
case SyntaxKind.FalseKeyword:
|
|
837
|
+
return 'FALSE';
|
|
838
|
+
case SyntaxKind.NullKeyword:
|
|
839
|
+
return 'NULL';
|
|
840
|
+
}
|
|
841
|
+
const children = node.getChildren();
|
|
842
|
+
if (children.length === 0)
|
|
843
|
+
return kind;
|
|
844
|
+
const childStr = children.map(serializeNode).join('|');
|
|
845
|
+
return `${kind}(${childStr})`;
|
|
846
|
+
}
|
|
847
|
+
const body = fn.getBody();
|
|
848
|
+
if (!body)
|
|
849
|
+
return '';
|
|
850
|
+
return serializeNode(body);
|
|
851
|
+
}
|
|
852
|
+
/** Return a SHA-256 fingerprint for a function body (normalized). */
|
|
853
|
+
function fingerprintFunction(fn) {
|
|
854
|
+
const normalized = normalizeFunctionBody(fn);
|
|
855
|
+
return crypto.createHash('sha256').update(normalized).digest('hex');
|
|
856
|
+
}
|
|
857
|
+
/** Return all function-like nodes from a SourceFile that are worth comparing:
|
|
858
|
+
* - At least MIN_LINES lines in their body
|
|
859
|
+
* - Not test helpers (describe/it/test/beforeEach/afterEach)
|
|
860
|
+
*/
|
|
861
|
+
const MIN_LINES = 8;
|
|
862
|
+
function collectFunctions(sf) {
|
|
863
|
+
const results = [];
|
|
864
|
+
const kinds = [
|
|
865
|
+
SyntaxKind.FunctionDeclaration,
|
|
866
|
+
SyntaxKind.FunctionExpression,
|
|
867
|
+
SyntaxKind.ArrowFunction,
|
|
868
|
+
SyntaxKind.MethodDeclaration,
|
|
869
|
+
];
|
|
870
|
+
for (const kind of kinds) {
|
|
871
|
+
for (const node of sf.getDescendantsOfKind(kind)) {
|
|
872
|
+
const body = node.getBody();
|
|
873
|
+
if (!body)
|
|
874
|
+
continue;
|
|
875
|
+
const start = body.getStartLineNumber();
|
|
876
|
+
const end = body.getEndLineNumber();
|
|
877
|
+
if (end - start + 1 < MIN_LINES)
|
|
878
|
+
continue;
|
|
879
|
+
// Skip test-framework helpers
|
|
880
|
+
const name = node.getKind() === SyntaxKind.FunctionDeclaration
|
|
881
|
+
? node.getName() ?? '<anonymous>'
|
|
882
|
+
: node.getKind() === SyntaxKind.MethodDeclaration
|
|
883
|
+
? node.getName()
|
|
884
|
+
: '<anonymous>';
|
|
885
|
+
if (['describe', 'it', 'test', 'beforeEach', 'afterEach', 'beforeAll', 'afterAll'].includes(name))
|
|
886
|
+
continue;
|
|
887
|
+
const pos = node.getStart();
|
|
888
|
+
const lineInfo = sf.getLineAndColumnAtPos(pos);
|
|
889
|
+
results.push({ fn: node, name, line: lineInfo.line, col: lineInfo.column });
|
|
890
|
+
}
|
|
891
|
+
}
|
|
892
|
+
return results;
|
|
893
|
+
}
|
|
789
894
|
// ---------------------------------------------------------------------------
|
|
790
895
|
// Public API
|
|
791
896
|
// ---------------------------------------------------------------------------
|
|
@@ -1162,6 +1267,448 @@ export function analyzeProject(targetPath, config) {
|
|
|
1162
1267
|
}
|
|
1163
1268
|
}
|
|
1164
1269
|
}
|
|
1270
|
+
// ── Phase 8: semantic-duplication ────────────────────────────────────────
|
|
1271
|
+
// Build a fingerprint → [{filePath, fnName, line, col}] map across all files
|
|
1272
|
+
const fingerprintMap = new Map();
|
|
1273
|
+
for (const sf of sourceFiles) {
|
|
1274
|
+
const sfPath = sf.getFilePath();
|
|
1275
|
+
for (const { fn, name, line, col } of collectFunctions(sf)) {
|
|
1276
|
+
const fp = fingerprintFunction(fn);
|
|
1277
|
+
if (!fingerprintMap.has(fp))
|
|
1278
|
+
fingerprintMap.set(fp, []);
|
|
1279
|
+
fingerprintMap.get(fp).push({ filePath: sfPath, name, line, col });
|
|
1280
|
+
}
|
|
1281
|
+
}
|
|
1282
|
+
// For each fingerprint with 2+ functions: report each as a duplicate of the others
|
|
1283
|
+
for (const [, entries] of fingerprintMap) {
|
|
1284
|
+
if (entries.length < 2)
|
|
1285
|
+
continue;
|
|
1286
|
+
for (const entry of entries) {
|
|
1287
|
+
const report = reportByPath.get(entry.filePath);
|
|
1288
|
+
if (!report)
|
|
1289
|
+
continue;
|
|
1290
|
+
// Build the "duplicated in" list (all other locations)
|
|
1291
|
+
const others = entries
|
|
1292
|
+
.filter(e => e !== entry)
|
|
1293
|
+
.map(e => {
|
|
1294
|
+
const rel = path.relative(targetPath, e.filePath).replace(/\\/g, '/');
|
|
1295
|
+
return `${rel}:${e.line} (${e.name})`;
|
|
1296
|
+
})
|
|
1297
|
+
.join(', ');
|
|
1298
|
+
const weight = RULE_WEIGHTS['semantic-duplication']?.weight ?? 12;
|
|
1299
|
+
report.issues.push({
|
|
1300
|
+
rule: 'semantic-duplication',
|
|
1301
|
+
severity: 'warning',
|
|
1302
|
+
message: `Function '${entry.name}' is semantically identical to: ${others}`,
|
|
1303
|
+
line: entry.line,
|
|
1304
|
+
column: entry.col,
|
|
1305
|
+
snippet: `function ${entry.name} — duplicated in ${entries.length - 1} other location${entries.length > 2 ? 's' : ''}`,
|
|
1306
|
+
});
|
|
1307
|
+
report.score = Math.min(100, report.score + weight);
|
|
1308
|
+
}
|
|
1309
|
+
}
|
|
1165
1310
|
return reports;
|
|
1166
1311
|
}
|
|
1312
|
+
// ---------------------------------------------------------------------------
|
|
1313
|
+
// Git helpers
|
|
1314
|
+
// ---------------------------------------------------------------------------
|
|
1315
|
+
/** Analyse a file given its absolute path string (wraps analyzeFile). */
|
|
1316
|
+
function analyzeFilePath(filePath) {
|
|
1317
|
+
const proj = new Project({
|
|
1318
|
+
skipAddingFilesFromTsConfig: true,
|
|
1319
|
+
compilerOptions: { allowJs: true },
|
|
1320
|
+
});
|
|
1321
|
+
const sf = proj.addSourceFileAtPath(filePath);
|
|
1322
|
+
return analyzeFile(sf);
|
|
1323
|
+
}
|
|
1324
|
+
/**
|
|
1325
|
+
* Execute a git command synchronously and return stdout.
|
|
1326
|
+
* Throws a descriptive error if the command fails or git is not available.
|
|
1327
|
+
*/
|
|
1328
|
+
function execGit(cmd, cwd) {
|
|
1329
|
+
try {
|
|
1330
|
+
return execSync(cmd, { cwd, encoding: 'utf8', stdio: ['pipe', 'pipe', 'pipe'] }).trim();
|
|
1331
|
+
}
|
|
1332
|
+
catch (err) {
|
|
1333
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
1334
|
+
throw new Error(`Git command failed: ${cmd}\n${msg}`);
|
|
1335
|
+
}
|
|
1336
|
+
}
|
|
1337
|
+
/**
|
|
1338
|
+
* Verify the given directory is a git repository.
|
|
1339
|
+
* Throws if git is not available or the directory is not a repo.
|
|
1340
|
+
*/
|
|
1341
|
+
function assertGitRepo(cwd) {
|
|
1342
|
+
try {
|
|
1343
|
+
execGit('git rev-parse --is-inside-work-tree', cwd);
|
|
1344
|
+
}
|
|
1345
|
+
catch {
|
|
1346
|
+
throw new Error(`Directory is not a git repository: ${cwd}`);
|
|
1347
|
+
}
|
|
1348
|
+
}
|
|
1349
|
+
// ---------------------------------------------------------------------------
|
|
1350
|
+
// Historical analysis helpers
|
|
1351
|
+
// ---------------------------------------------------------------------------
|
|
1352
|
+
/**
|
|
1353
|
+
* Analyse a single file as it existed at a given commit hash.
|
|
1354
|
+
* Writes the blob to a temp file, runs analyzeFile, then cleans up.
|
|
1355
|
+
*/
|
|
1356
|
+
async function analyzeFileAtCommit(filePath, commitHash, projectRoot) {
|
|
1357
|
+
const relPath = path.relative(projectRoot, filePath).replace(/\\/g, '/');
|
|
1358
|
+
const blob = execGit(`git show ${commitHash}:${relPath}`, projectRoot);
|
|
1359
|
+
const tmpFile = path.join(os.tmpdir(), `drift-${crypto.randomBytes(8).toString('hex')}.ts`);
|
|
1360
|
+
try {
|
|
1361
|
+
fs.writeFileSync(tmpFile, blob, 'utf8');
|
|
1362
|
+
const report = analyzeFilePath(tmpFile);
|
|
1363
|
+
// Replace temp path with original for readable output
|
|
1364
|
+
return { ...report, path: filePath };
|
|
1365
|
+
}
|
|
1366
|
+
finally {
|
|
1367
|
+
try {
|
|
1368
|
+
fs.unlinkSync(tmpFile);
|
|
1369
|
+
}
|
|
1370
|
+
catch { /* ignore cleanup errors */ }
|
|
1371
|
+
}
|
|
1372
|
+
}
|
|
1373
|
+
/**
|
|
1374
|
+
* Analyse all TypeScript files changed in a single commit.
|
|
1375
|
+
*/
|
|
1376
|
+
async function analyzeSingleCommit(commitHash, targetPath) {
|
|
1377
|
+
// --name-only lists changed files; format gives metadata
|
|
1378
|
+
const raw = execGit(`git show --name-only --format="%H|%ai|%an|%s" ${commitHash}`, targetPath);
|
|
1379
|
+
const lines = raw.split('\n');
|
|
1380
|
+
// First non-empty line is the metadata line
|
|
1381
|
+
const metaLine = lines[0] ?? '';
|
|
1382
|
+
const [hash, dateStr, author, ...msgParts] = metaLine.split('|');
|
|
1383
|
+
const message = msgParts.join('|').trim();
|
|
1384
|
+
const commitDate = new Date(dateStr ?? '');
|
|
1385
|
+
// Collect changed .ts/.tsx files (lines after the empty separator)
|
|
1386
|
+
const changedFiles = [];
|
|
1387
|
+
let pastSeparator = false;
|
|
1388
|
+
for (const line of lines.slice(1)) {
|
|
1389
|
+
if (!pastSeparator && line.trim() === '') {
|
|
1390
|
+
pastSeparator = true;
|
|
1391
|
+
continue;
|
|
1392
|
+
}
|
|
1393
|
+
if (pastSeparator && (line.endsWith('.ts') || line.endsWith('.tsx'))) {
|
|
1394
|
+
changedFiles.push(path.join(targetPath, line.trim()));
|
|
1395
|
+
}
|
|
1396
|
+
}
|
|
1397
|
+
const fileReports = await Promise.all(changedFiles.map(f => analyzeFileAtCommit(f, hash ?? commitHash, targetPath).catch(() => null)));
|
|
1398
|
+
const validReports = fileReports.filter((r) => r !== null);
|
|
1399
|
+
const totalScore = validReports.reduce((s, r) => s + r.score, 0);
|
|
1400
|
+
const averageScore = validReports.length > 0 ? totalScore / validReports.length : 0;
|
|
1401
|
+
return {
|
|
1402
|
+
commitHash: hash ?? commitHash,
|
|
1403
|
+
commitDate,
|
|
1404
|
+
author: author ?? '',
|
|
1405
|
+
message,
|
|
1406
|
+
files: validReports,
|
|
1407
|
+
totalScore,
|
|
1408
|
+
averageScore,
|
|
1409
|
+
};
|
|
1410
|
+
}
|
|
1411
|
+
/**
|
|
1412
|
+
* Run historical analysis over all commits since a given date.
|
|
1413
|
+
* Returns results ordered chronologically (oldest first).
|
|
1414
|
+
*/
|
|
1415
|
+
async function analyzeHistoricalCommits(sinceDate, targetPath, maxCommits) {
|
|
1416
|
+
assertGitRepo(targetPath);
|
|
1417
|
+
const isoDate = sinceDate.toISOString();
|
|
1418
|
+
const raw = execGit(`git log --since="${isoDate}" --format="%H" --max-count=${maxCommits}`, targetPath);
|
|
1419
|
+
if (!raw)
|
|
1420
|
+
return [];
|
|
1421
|
+
const hashes = raw.split('\n').filter(Boolean);
|
|
1422
|
+
const analyses = await Promise.all(hashes.map(h => analyzeSingleCommit(h, targetPath).catch(() => null)));
|
|
1423
|
+
return analyses
|
|
1424
|
+
.filter((a) => a !== null)
|
|
1425
|
+
.sort((a, b) => a.commitDate.getTime() - b.commitDate.getTime());
|
|
1426
|
+
}
|
|
1427
|
+
// ---------------------------------------------------------------------------
|
|
1428
|
+
// TrendAnalyzer
|
|
1429
|
+
// ---------------------------------------------------------------------------
|
|
1430
|
+
export class TrendAnalyzer {
|
|
1431
|
+
projectPath;
|
|
1432
|
+
config;
|
|
1433
|
+
constructor(projectPath, config) {
|
|
1434
|
+
this.projectPath = projectPath;
|
|
1435
|
+
this.config = config;
|
|
1436
|
+
}
|
|
1437
|
+
// --- Static utility methods -----------------------------------------------
|
|
1438
|
+
static calculateMovingAverage(data, windowSize) {
|
|
1439
|
+
return data.map((_, i) => {
|
|
1440
|
+
const start = Math.max(0, i - windowSize + 1);
|
|
1441
|
+
const window = data.slice(start, i + 1);
|
|
1442
|
+
return window.reduce((s, p) => s + p.score, 0) / window.length;
|
|
1443
|
+
});
|
|
1444
|
+
}
|
|
1445
|
+
static linearRegression(data) {
|
|
1446
|
+
const n = data.length;
|
|
1447
|
+
if (n < 2)
|
|
1448
|
+
return { slope: 0, intercept: data[0]?.score ?? 0, r2: 0 };
|
|
1449
|
+
const xs = data.map((_, i) => i);
|
|
1450
|
+
const ys = data.map(p => p.score);
|
|
1451
|
+
const xMean = xs.reduce((s, x) => s + x, 0) / n;
|
|
1452
|
+
const yMean = ys.reduce((s, y) => s + y, 0) / n;
|
|
1453
|
+
const ssXX = xs.reduce((s, x) => s + (x - xMean) ** 2, 0);
|
|
1454
|
+
const ssXY = xs.reduce((s, x, i) => s + (x - xMean) * (ys[i] - yMean), 0);
|
|
1455
|
+
const ssYY = ys.reduce((s, y) => s + (y - yMean) ** 2, 0);
|
|
1456
|
+
const slope = ssXX === 0 ? 0 : ssXY / ssXX;
|
|
1457
|
+
const intercept = yMean - slope * xMean;
|
|
1458
|
+
const r2 = ssYY === 0 ? 1 : (ssXY ** 2) / (ssXX * ssYY);
|
|
1459
|
+
return { slope, intercept, r2 };
|
|
1460
|
+
}
|
|
1461
|
+
/** Generate a simple horizontal ASCII bar chart (one bar per data point). */
|
|
1462
|
+
static generateTrendChart(data) {
|
|
1463
|
+
if (data.length === 0)
|
|
1464
|
+
return '(no data)';
|
|
1465
|
+
const maxScore = Math.max(...data.map(p => p.score), 1);
|
|
1466
|
+
const chartWidth = 40;
|
|
1467
|
+
const lines = data.map(p => {
|
|
1468
|
+
const barLen = Math.round((p.score / maxScore) * chartWidth);
|
|
1469
|
+
const bar = '█'.repeat(barLen);
|
|
1470
|
+
const dateStr = p.date.toISOString().slice(0, 10);
|
|
1471
|
+
return `${dateStr} │${bar.padEnd(chartWidth)} ${p.score.toFixed(1)}`;
|
|
1472
|
+
});
|
|
1473
|
+
return lines.join('\n');
|
|
1474
|
+
}
|
|
1475
|
+
// --- Instance method -------------------------------------------------------
|
|
1476
|
+
async analyzeTrend(options) {
|
|
1477
|
+
assertGitRepo(this.projectPath);
|
|
1478
|
+
const periodDays = {
|
|
1479
|
+
week: 7, month: 30, quarter: 90, year: 365,
|
|
1480
|
+
};
|
|
1481
|
+
const days = periodDays[options.period ?? 'month'] ?? 30;
|
|
1482
|
+
const sinceDate = options.since
|
|
1483
|
+
? new Date(options.since)
|
|
1484
|
+
: new Date(Date.now() - days * 24 * 60 * 60 * 1000);
|
|
1485
|
+
const historicalAnalyses = await analyzeHistoricalCommits(sinceDate, this.projectPath, 100);
|
|
1486
|
+
const trendPoints = historicalAnalyses.map(h => ({
|
|
1487
|
+
date: h.commitDate,
|
|
1488
|
+
score: h.averageScore,
|
|
1489
|
+
fileCount: h.files.length,
|
|
1490
|
+
avgIssuesPerFile: h.files.length > 0
|
|
1491
|
+
? h.files.reduce((s, f) => s + f.issues.length, 0) / h.files.length
|
|
1492
|
+
: 0,
|
|
1493
|
+
}));
|
|
1494
|
+
const regression = TrendAnalyzer.linearRegression(trendPoints);
|
|
1495
|
+
// Current state report
|
|
1496
|
+
const currentFiles = analyzeProject(this.projectPath, this.config);
|
|
1497
|
+
const baseReport = buildReport(this.projectPath, currentFiles);
|
|
1498
|
+
return {
|
|
1499
|
+
...baseReport,
|
|
1500
|
+
trend: trendPoints,
|
|
1501
|
+
regression,
|
|
1502
|
+
};
|
|
1503
|
+
}
|
|
1504
|
+
}
|
|
1505
|
+
function parseGitBlame(blameOutput) {
|
|
1506
|
+
const entries = [];
|
|
1507
|
+
const lines = blameOutput.split('\n');
|
|
1508
|
+
let i = 0;
|
|
1509
|
+
while (i < lines.length) {
|
|
1510
|
+
const headerLine = lines[i];
|
|
1511
|
+
if (!headerLine || headerLine.trim() === '') {
|
|
1512
|
+
i++;
|
|
1513
|
+
continue;
|
|
1514
|
+
}
|
|
1515
|
+
// Porcelain blame format: first line is "<hash> <orig-line> <final-line> [<num-lines>]"
|
|
1516
|
+
const headerMatch = headerLine.match(/^([0-9a-f]{40})\s/);
|
|
1517
|
+
if (!headerMatch) {
|
|
1518
|
+
i++;
|
|
1519
|
+
continue;
|
|
1520
|
+
}
|
|
1521
|
+
const hash = headerMatch[1];
|
|
1522
|
+
let author = '';
|
|
1523
|
+
let email = '';
|
|
1524
|
+
let codeLine = '';
|
|
1525
|
+
i++;
|
|
1526
|
+
while (i < lines.length && !lines[i].match(/^[0-9a-f]{40}\s/)) {
|
|
1527
|
+
const l = lines[i];
|
|
1528
|
+
if (l.startsWith('author '))
|
|
1529
|
+
author = l.slice(7).trim();
|
|
1530
|
+
else if (l.startsWith('author-mail '))
|
|
1531
|
+
email = l.slice(12).replace(/[<>]/g, '').trim();
|
|
1532
|
+
else if (l.startsWith('\t'))
|
|
1533
|
+
codeLine = l.slice(1);
|
|
1534
|
+
i++;
|
|
1535
|
+
}
|
|
1536
|
+
entries.push({ hash, author, email, line: codeLine });
|
|
1537
|
+
}
|
|
1538
|
+
return entries;
|
|
1539
|
+
}
|
|
1540
|
+
export class BlameAnalyzer {
|
|
1541
|
+
projectPath;
|
|
1542
|
+
config;
|
|
1543
|
+
constructor(projectPath, config) {
|
|
1544
|
+
this.projectPath = projectPath;
|
|
1545
|
+
this.config = config;
|
|
1546
|
+
}
|
|
1547
|
+
/** Blame a single file: returns per-author attribution. */
|
|
1548
|
+
static async analyzeFileBlame(filePath) {
|
|
1549
|
+
const dir = path.dirname(filePath);
|
|
1550
|
+
assertGitRepo(dir);
|
|
1551
|
+
const blameOutput = execGit(`git blame --porcelain "${filePath}"`, dir);
|
|
1552
|
+
const entries = parseGitBlame(blameOutput);
|
|
1553
|
+
// Analyse issues in the file
|
|
1554
|
+
const report = analyzeFilePath(filePath);
|
|
1555
|
+
// Map line numbers of issues to authors
|
|
1556
|
+
const issuesByLine = new Map();
|
|
1557
|
+
for (const issue of report.issues) {
|
|
1558
|
+
issuesByLine.set(issue.line, (issuesByLine.get(issue.line) ?? 0) + 1);
|
|
1559
|
+
}
|
|
1560
|
+
// Aggregate by author
|
|
1561
|
+
const byAuthor = new Map();
|
|
1562
|
+
entries.forEach((entry, idx) => {
|
|
1563
|
+
const key = entry.email || entry.author;
|
|
1564
|
+
if (!byAuthor.has(key)) {
|
|
1565
|
+
byAuthor.set(key, {
|
|
1566
|
+
author: entry.author,
|
|
1567
|
+
email: entry.email,
|
|
1568
|
+
commits: 0,
|
|
1569
|
+
linesChanged: 0,
|
|
1570
|
+
issuesIntroduced: 0,
|
|
1571
|
+
avgScoreImpact: 0,
|
|
1572
|
+
});
|
|
1573
|
+
}
|
|
1574
|
+
const attr = byAuthor.get(key);
|
|
1575
|
+
attr.linesChanged++;
|
|
1576
|
+
const lineNum = idx + 1;
|
|
1577
|
+
if (issuesByLine.has(lineNum)) {
|
|
1578
|
+
attr.issuesIntroduced += issuesByLine.get(lineNum);
|
|
1579
|
+
}
|
|
1580
|
+
});
|
|
1581
|
+
// Count unique commits per author
|
|
1582
|
+
const commitsByAuthor = new Map();
|
|
1583
|
+
for (const entry of entries) {
|
|
1584
|
+
const key = entry.email || entry.author;
|
|
1585
|
+
if (!commitsByAuthor.has(key))
|
|
1586
|
+
commitsByAuthor.set(key, new Set());
|
|
1587
|
+
commitsByAuthor.get(key).add(entry.hash);
|
|
1588
|
+
}
|
|
1589
|
+
const total = entries.length || 1;
|
|
1590
|
+
const results = [];
|
|
1591
|
+
for (const [key, attr] of byAuthor) {
|
|
1592
|
+
attr.commits = commitsByAuthor.get(key)?.size ?? 0;
|
|
1593
|
+
attr.avgScoreImpact = (attr.linesChanged / total) * report.score;
|
|
1594
|
+
results.push(attr);
|
|
1595
|
+
}
|
|
1596
|
+
return results.sort((a, b) => b.issuesIntroduced - a.issuesIntroduced);
|
|
1597
|
+
}
|
|
1598
|
+
/** Blame for a specific rule across all files in targetPath. */
|
|
1599
|
+
static async analyzeRuleBlame(rule, targetPath) {
|
|
1600
|
+
assertGitRepo(targetPath);
|
|
1601
|
+
const tsFiles = fs
|
|
1602
|
+
.readdirSync(targetPath, { recursive: true, encoding: 'utf8' })
|
|
1603
|
+
.filter((f) => (f.endsWith('.ts') || f.endsWith('.tsx')) && !f.includes('node_modules'))
|
|
1604
|
+
.map(f => path.join(targetPath, f));
|
|
1605
|
+
const combined = new Map();
|
|
1606
|
+
for (const file of tsFiles) {
|
|
1607
|
+
const report = analyzeFilePath(file);
|
|
1608
|
+
const ruleIssues = report.issues.filter(i => i.rule === rule);
|
|
1609
|
+
if (ruleIssues.length === 0)
|
|
1610
|
+
continue;
|
|
1611
|
+
let blameEntries = [];
|
|
1612
|
+
try {
|
|
1613
|
+
const blameOutput = execGit(`git blame --porcelain "${file}"`, targetPath);
|
|
1614
|
+
blameEntries = parseGitBlame(blameOutput);
|
|
1615
|
+
}
|
|
1616
|
+
catch {
|
|
1617
|
+
continue;
|
|
1618
|
+
}
|
|
1619
|
+
for (const issue of ruleIssues) {
|
|
1620
|
+
const entry = blameEntries[issue.line - 1];
|
|
1621
|
+
if (!entry)
|
|
1622
|
+
continue;
|
|
1623
|
+
const key = entry.email || entry.author;
|
|
1624
|
+
if (!combined.has(key)) {
|
|
1625
|
+
combined.set(key, {
|
|
1626
|
+
author: entry.author,
|
|
1627
|
+
email: entry.email,
|
|
1628
|
+
commits: 0,
|
|
1629
|
+
linesChanged: 0,
|
|
1630
|
+
issuesIntroduced: 0,
|
|
1631
|
+
avgScoreImpact: 0,
|
|
1632
|
+
});
|
|
1633
|
+
}
|
|
1634
|
+
const attr = combined.get(key);
|
|
1635
|
+
attr.issuesIntroduced++;
|
|
1636
|
+
attr.avgScoreImpact += RULE_WEIGHTS[rule]?.weight ?? 5;
|
|
1637
|
+
}
|
|
1638
|
+
}
|
|
1639
|
+
return Array.from(combined.values()).sort((a, b) => b.issuesIntroduced - a.issuesIntroduced);
|
|
1640
|
+
}
|
|
1641
|
+
/** Overall blame across all files and rules. */
|
|
1642
|
+
static async analyzeOverallBlame(targetPath) {
|
|
1643
|
+
assertGitRepo(targetPath);
|
|
1644
|
+
const tsFiles = fs
|
|
1645
|
+
.readdirSync(targetPath, { recursive: true, encoding: 'utf8' })
|
|
1646
|
+
.filter((f) => (f.endsWith('.ts') || f.endsWith('.tsx')) && !f.includes('node_modules'))
|
|
1647
|
+
.map(f => path.join(targetPath, f));
|
|
1648
|
+
const combined = new Map();
|
|
1649
|
+
const commitsByAuthor = new Map();
|
|
1650
|
+
for (const file of tsFiles) {
|
|
1651
|
+
let blameEntries = [];
|
|
1652
|
+
try {
|
|
1653
|
+
const blameOutput = execGit(`git blame --porcelain "${file}"`, targetPath);
|
|
1654
|
+
blameEntries = parseGitBlame(blameOutput);
|
|
1655
|
+
}
|
|
1656
|
+
catch {
|
|
1657
|
+
continue;
|
|
1658
|
+
}
|
|
1659
|
+
const report = analyzeFilePath(file);
|
|
1660
|
+
const issuesByLine = new Map();
|
|
1661
|
+
for (const issue of report.issues) {
|
|
1662
|
+
issuesByLine.set(issue.line, (issuesByLine.get(issue.line) ?? 0) + 1);
|
|
1663
|
+
}
|
|
1664
|
+
blameEntries.forEach((entry, idx) => {
|
|
1665
|
+
const key = entry.email || entry.author;
|
|
1666
|
+
if (!combined.has(key)) {
|
|
1667
|
+
combined.set(key, {
|
|
1668
|
+
author: entry.author,
|
|
1669
|
+
email: entry.email,
|
|
1670
|
+
commits: 0,
|
|
1671
|
+
linesChanged: 0,
|
|
1672
|
+
issuesIntroduced: 0,
|
|
1673
|
+
avgScoreImpact: 0,
|
|
1674
|
+
});
|
|
1675
|
+
commitsByAuthor.set(key, new Set());
|
|
1676
|
+
}
|
|
1677
|
+
const attr = combined.get(key);
|
|
1678
|
+
attr.linesChanged++;
|
|
1679
|
+
commitsByAuthor.get(key).add(entry.hash);
|
|
1680
|
+
const lineNum = idx + 1;
|
|
1681
|
+
if (issuesByLine.has(lineNum)) {
|
|
1682
|
+
attr.issuesIntroduced += issuesByLine.get(lineNum);
|
|
1683
|
+
attr.avgScoreImpact += report.score * (1 / (blameEntries.length || 1));
|
|
1684
|
+
}
|
|
1685
|
+
});
|
|
1686
|
+
}
|
|
1687
|
+
for (const [key, attr] of combined) {
|
|
1688
|
+
attr.commits = commitsByAuthor.get(key)?.size ?? 0;
|
|
1689
|
+
}
|
|
1690
|
+
return Array.from(combined.values()).sort((a, b) => b.issuesIntroduced - a.issuesIntroduced);
|
|
1691
|
+
}
|
|
1692
|
+
// --- Instance method -------------------------------------------------------
|
|
1693
|
+
async analyzeBlame(options) {
|
|
1694
|
+
assertGitRepo(this.projectPath);
|
|
1695
|
+
let blame = [];
|
|
1696
|
+
const mode = options.target ?? 'overall';
|
|
1697
|
+
if (mode === 'file' && options.filePath) {
|
|
1698
|
+
blame = await BlameAnalyzer.analyzeFileBlame(options.filePath);
|
|
1699
|
+
}
|
|
1700
|
+
else if (mode === 'rule' && options.rule) {
|
|
1701
|
+
blame = await BlameAnalyzer.analyzeRuleBlame(options.rule, this.projectPath);
|
|
1702
|
+
}
|
|
1703
|
+
else {
|
|
1704
|
+
blame = await BlameAnalyzer.analyzeOverallBlame(this.projectPath);
|
|
1705
|
+
}
|
|
1706
|
+
if (options.top) {
|
|
1707
|
+
blame = blame.slice(0, options.top);
|
|
1708
|
+
}
|
|
1709
|
+
const currentFiles = analyzeProject(this.projectPath, this.config);
|
|
1710
|
+
const baseReport = buildReport(this.projectPath, currentFiles);
|
|
1711
|
+
return { ...baseReport, blame };
|
|
1712
|
+
}
|
|
1713
|
+
}
|
|
1167
1714
|
//# sourceMappingURL=analyzer.js.map
|
package/dist/cli.js
CHANGED
|
@@ -2,6 +2,9 @@
|
|
|
2
2
|
import { Command } from 'commander';
|
|
3
3
|
import { writeFileSync } from 'node:fs';
|
|
4
4
|
import { resolve } from 'node:path';
|
|
5
|
+
import { createRequire } from 'node:module';
|
|
6
|
+
const require = createRequire(import.meta.url);
|
|
7
|
+
const { version: VERSION } = require('../package.json');
|
|
5
8
|
import { analyzeProject } from './analyzer.js';
|
|
6
9
|
import { buildReport, formatMarkdown, formatAIOutput } from './reporter.js';
|
|
7
10
|
import { printConsole, printDiff } from './printer.js';
|
|
@@ -11,11 +14,12 @@ import { computeDiff } from './diff.js';
|
|
|
11
14
|
import { generateHtmlReport } from './report.js';
|
|
12
15
|
import { generateBadge } from './badge.js';
|
|
13
16
|
import { emitCIAnnotations, printCISummary } from './ci.js';
|
|
17
|
+
import { TrendAnalyzer, BlameAnalyzer } from './analyzer.js';
|
|
14
18
|
const program = new Command();
|
|
15
19
|
program
|
|
16
20
|
.name('drift')
|
|
17
21
|
.description('Detect silent technical debt left by AI-generated code')
|
|
18
|
-
.version(
|
|
22
|
+
.version(VERSION);
|
|
19
23
|
program
|
|
20
24
|
.command('scan [path]', { isDefault: true })
|
|
21
25
|
.description('Scan a directory for vibe coding drift')
|
|
@@ -146,5 +150,39 @@ program
|
|
|
146
150
|
process.exit(1);
|
|
147
151
|
}
|
|
148
152
|
});
|
|
153
|
+
program
|
|
154
|
+
.command('trend [period]')
|
|
155
|
+
.description('Analyze trend of technical debt over time')
|
|
156
|
+
.option('--since <date>', 'Start date for trend analysis (ISO format)')
|
|
157
|
+
.option('--until <date>', 'End date for trend analysis (ISO format)')
|
|
158
|
+
.action(async (period, options) => {
|
|
159
|
+
const resolvedPath = resolve('.');
|
|
160
|
+
process.stderr.write(`\nAnalyzing trend in ${resolvedPath}...\n`);
|
|
161
|
+
const config = await loadConfig(resolvedPath);
|
|
162
|
+
const analyzer = new TrendAnalyzer(resolvedPath, config);
|
|
163
|
+
const trendData = await analyzer.analyzeTrend({
|
|
164
|
+
period: period,
|
|
165
|
+
since: options.since,
|
|
166
|
+
until: options.until
|
|
167
|
+
});
|
|
168
|
+
process.stderr.write(`\nTrend analysis complete:\n`);
|
|
169
|
+
process.stdout.write(JSON.stringify(trendData, null, 2) + '\n');
|
|
170
|
+
});
|
|
171
|
+
program
|
|
172
|
+
.command('blame [target]')
|
|
173
|
+
.description('Analyze which files/rules contribute most to technical debt')
|
|
174
|
+
.option('--top <n>', 'Number of top contributors to show (default: 10)', '10')
|
|
175
|
+
.action(async (target, options) => {
|
|
176
|
+
const resolvedPath = resolve('.');
|
|
177
|
+
process.stderr.write(`\nAnalyzing blame in ${resolvedPath}...\n`);
|
|
178
|
+
const config = await loadConfig(resolvedPath);
|
|
179
|
+
const analyzer = new BlameAnalyzer(resolvedPath, config);
|
|
180
|
+
const blameData = await analyzer.analyzeBlame({
|
|
181
|
+
target: target,
|
|
182
|
+
top: Number(options.top)
|
|
183
|
+
});
|
|
184
|
+
process.stderr.write(`\nBlame analysis complete:\n`);
|
|
185
|
+
process.stdout.write(JSON.stringify(blameData, null, 2) + '\n');
|
|
186
|
+
});
|
|
149
187
|
program.parse();
|
|
150
188
|
//# sourceMappingURL=cli.js.map
|
package/dist/report.js
CHANGED
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
import { basename } from 'node:path';
|
|
2
|
-
|
|
2
|
+
import { createRequire } from 'node:module';
|
|
3
|
+
const require = createRequire(import.meta.url);
|
|
4
|
+
const { version: VERSION } = require('../package.json');
|
|
3
5
|
function severityColor(severity) {
|
|
4
6
|
switch (severity) {
|
|
5
7
|
case 'error': return '#ef4444';
|
package/dist/types.d.ts
CHANGED
|
@@ -95,4 +95,43 @@ export interface DriftDiff {
|
|
|
95
95
|
newIssuesCount: number;
|
|
96
96
|
resolvedIssuesCount: number;
|
|
97
97
|
}
|
|
98
|
+
/** Historical analysis data for a single commit */
|
|
99
|
+
export interface HistoricalAnalysis {
|
|
100
|
+
commitHash: string;
|
|
101
|
+
commitDate: Date;
|
|
102
|
+
author: string;
|
|
103
|
+
message: string;
|
|
104
|
+
files: FileReport[];
|
|
105
|
+
totalScore: number;
|
|
106
|
+
averageScore: number;
|
|
107
|
+
}
|
|
108
|
+
/** Trend data point for score evolution */
|
|
109
|
+
export interface TrendDataPoint {
|
|
110
|
+
date: Date;
|
|
111
|
+
score: number;
|
|
112
|
+
fileCount: number;
|
|
113
|
+
avgIssuesPerFile: number;
|
|
114
|
+
}
|
|
115
|
+
/** Blame attribution data */
|
|
116
|
+
export interface BlameAttribution {
|
|
117
|
+
author: string;
|
|
118
|
+
email: string;
|
|
119
|
+
commits: number;
|
|
120
|
+
linesChanged: number;
|
|
121
|
+
issuesIntroduced: number;
|
|
122
|
+
avgScoreImpact: number;
|
|
123
|
+
}
|
|
124
|
+
/** Extended DriftReport with historical context */
|
|
125
|
+
export interface DriftTrendReport extends DriftReport {
|
|
126
|
+
trend: TrendDataPoint[];
|
|
127
|
+
regression: {
|
|
128
|
+
slope: number;
|
|
129
|
+
intercept: number;
|
|
130
|
+
r2: number;
|
|
131
|
+
};
|
|
132
|
+
}
|
|
133
|
+
/** Extended DriftReport with blame data */
|
|
134
|
+
export interface DriftBlameReport extends DriftReport {
|
|
135
|
+
blame: BlameAttribution[];
|
|
136
|
+
}
|
|
98
137
|
//# sourceMappingURL=types.d.ts.map
|