@eduardbar/drift 0.8.0 → 0.9.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/analyzer.js CHANGED
@@ -1,7 +1,10 @@
1
1
  import * as fs from 'node:fs';
2
2
  import * as crypto from 'node:crypto';
3
3
  import * as path from 'node:path';
4
+ import * as os from 'node:os';
5
+ import { execSync } from 'node:child_process';
4
6
  import { Project, SyntaxKind, } from 'ts-morph';
7
+ import { buildReport } from './reporter.js';
5
8
  // Rules and their drift score weight
6
9
  export const RULE_WEIGHTS = {
7
10
  'large-file': { severity: 'error', weight: 20 },
@@ -1306,4 +1309,439 @@ export function analyzeProject(targetPath, config) {
1306
1309
  }
1307
1310
  return reports;
1308
1311
  }
1312
+ // ---------------------------------------------------------------------------
1313
+ // Git helpers
1314
+ // ---------------------------------------------------------------------------
1315
+ /** Analyse a file given its absolute path string (wraps analyzeFile). */
1316
+ function analyzeFilePath(filePath) {
1317
+ const proj = new Project({
1318
+ skipAddingFilesFromTsConfig: true,
1319
+ compilerOptions: { allowJs: true },
1320
+ });
1321
+ const sf = proj.addSourceFileAtPath(filePath);
1322
+ return analyzeFile(sf);
1323
+ }
1324
+ /**
1325
+ * Execute a git command synchronously and return stdout.
1326
+ * Throws a descriptive error if the command fails or git is not available.
1327
+ */
1328
+ function execGit(cmd, cwd) {
1329
+ try {
1330
+ return execSync(cmd, { cwd, encoding: 'utf8', stdio: ['pipe', 'pipe', 'pipe'] }).trim();
1331
+ }
1332
+ catch (err) {
1333
+ const msg = err instanceof Error ? err.message : String(err);
1334
+ throw new Error(`Git command failed: ${cmd}\n${msg}`);
1335
+ }
1336
+ }
1337
+ /**
1338
+ * Verify the given directory is a git repository.
1339
+ * Throws if git is not available or the directory is not a repo.
1340
+ */
1341
+ function assertGitRepo(cwd) {
1342
+ try {
1343
+ execGit('git rev-parse --is-inside-work-tree', cwd);
1344
+ }
1345
+ catch {
1346
+ throw new Error(`Directory is not a git repository: ${cwd}`);
1347
+ }
1348
+ }
1349
+ // ---------------------------------------------------------------------------
1350
+ // Historical analysis helpers
1351
+ // ---------------------------------------------------------------------------
1352
+ /**
1353
+ * Analyse a single file as it existed at a given commit hash.
1354
+ * Writes the blob to a temp file, runs analyzeFile, then cleans up.
1355
+ */
1356
+ async function analyzeFileAtCommit(filePath, commitHash, projectRoot) {
1357
+ const relPath = path.relative(projectRoot, filePath).replace(/\\/g, '/');
1358
+ const blob = execGit(`git show ${commitHash}:${relPath}`, projectRoot);
1359
+ const tmpFile = path.join(os.tmpdir(), `drift-${crypto.randomBytes(8).toString('hex')}.ts`);
1360
+ try {
1361
+ fs.writeFileSync(tmpFile, blob, 'utf8');
1362
+ const report = analyzeFilePath(tmpFile);
1363
+ // Replace temp path with original for readable output
1364
+ return { ...report, path: filePath };
1365
+ }
1366
+ finally {
1367
+ try {
1368
+ fs.unlinkSync(tmpFile);
1369
+ }
1370
+ catch { /* ignore cleanup errors */ }
1371
+ }
1372
+ }
1373
+ /**
1374
+ * Analyse ALL TypeScript files in the project snapshot at a given commit.
1375
+ * Uses `git ls-tree` to enumerate every file in the tree, writes them to a
1376
+ * temp directory, then runs `analyzeProject` on that full snapshot so that
1377
+ * the resulting `averageScore` reflects the complete project health rather
1378
+ * than only the files touched in that diff.
1379
+ */
1380
+ async function analyzeSingleCommit(commitHash, targetPath, config) {
1381
+ // 1. Commit metadata
1382
+ const meta = execGit(`git show --no-patch --format="%H|%aI|%an|%s" ${commitHash}`, targetPath);
1383
+ const [hash, dateStr, author, ...msgParts] = meta.split('|');
1384
+ const message = msgParts.join('|').trim();
1385
+ const commitDate = new Date(dateStr ?? '');
1386
+ // 2. All .ts/.tsx files tracked at this commit (no diffs, full tree)
1387
+ const allFiles = execGit(`git ls-tree -r ${commitHash} --name-only`, targetPath)
1388
+ .split('\n')
1389
+ .filter(f => (f.endsWith('.ts') || f.endsWith('.tsx')) &&
1390
+ !f.endsWith('.d.ts') &&
1391
+ !f.includes('node_modules') &&
1392
+ !f.startsWith('dist/'));
1393
+ if (allFiles.length === 0) {
1394
+ return {
1395
+ commitHash: hash ?? commitHash,
1396
+ commitDate,
1397
+ author: author ?? '',
1398
+ message,
1399
+ files: [],
1400
+ totalScore: 0,
1401
+ averageScore: 0,
1402
+ };
1403
+ }
1404
+ // 3. Write snapshot to temp directory
1405
+ const tmpDir = path.join(os.tmpdir(), `drift-${(hash ?? commitHash).slice(0, 8)}`);
1406
+ fs.mkdirSync(tmpDir, { recursive: true });
1407
+ for (const relPath of allFiles) {
1408
+ try {
1409
+ const content = execGit(`git show ${commitHash}:${relPath}`, targetPath);
1410
+ const destPath = path.join(tmpDir, relPath);
1411
+ fs.mkdirSync(path.dirname(destPath), { recursive: true });
1412
+ fs.writeFileSync(destPath, content, 'utf-8');
1413
+ }
1414
+ catch {
1415
+ // skip files that can't be read (binary, deleted in partial clone, etc.)
1416
+ }
1417
+ }
1418
+ // 4. Analyse the full project snapshot
1419
+ const fileReports = analyzeProject(tmpDir, config);
1420
+ const totalScore = fileReports.reduce((sum, r) => sum + r.score, 0);
1421
+ const averageScore = fileReports.length > 0 ? totalScore / fileReports.length : 0;
1422
+ // 5. Cleanup
1423
+ try {
1424
+ fs.rmSync(tmpDir, { recursive: true, force: true });
1425
+ }
1426
+ catch {
1427
+ // non-fatal — temp dirs are cleaned by the OS eventually
1428
+ }
1429
+ return {
1430
+ commitHash: hash ?? commitHash,
1431
+ commitDate,
1432
+ author: author ?? '',
1433
+ message,
1434
+ files: fileReports,
1435
+ totalScore,
1436
+ averageScore,
1437
+ };
1438
+ }
1439
+ /**
1440
+ * Run historical analysis over all commits since a given date.
1441
+ * Returns results ordered chronologically (oldest first).
1442
+ */
1443
+ async function analyzeHistoricalCommits(sinceDate, targetPath, maxCommits, config, maxSamples = 10) {
1444
+ assertGitRepo(targetPath);
1445
+ const isoDate = sinceDate.toISOString();
1446
+ const raw = execGit(`git log --since="${isoDate}" --format="%H" --max-count=${maxCommits}`, targetPath);
1447
+ if (!raw)
1448
+ return [];
1449
+ const hashes = raw.split('\n').filter(Boolean);
1450
+ // Sample: distribute evenly across the range
1451
+ // E.g. 122 commits, maxSamples=10 → pick index 0, 13, 26, 39, 52, 65, 78, 91, 104, 121
1452
+ const sampled = hashes.length <= maxSamples
1453
+ ? hashes
1454
+ : Array.from({ length: maxSamples }, (_, i) => hashes[Math.floor(i * (hashes.length - 1) / (maxSamples - 1))]);
1455
+ const analyses = await Promise.all(sampled.map(h => analyzeSingleCommit(h, targetPath, config).catch(() => null)));
1456
+ return analyses
1457
+ .filter((a) => a !== null)
1458
+ .sort((a, b) => a.commitDate.getTime() - b.commitDate.getTime());
1459
+ }
1460
+ // ---------------------------------------------------------------------------
1461
+ // TrendAnalyzer
1462
+ // ---------------------------------------------------------------------------
1463
+ export class TrendAnalyzer {
1464
+ projectPath;
1465
+ config;
1466
+ constructor(projectPath, config) {
1467
+ this.projectPath = projectPath;
1468
+ this.config = config;
1469
+ }
1470
+ // --- Static utility methods -----------------------------------------------
1471
+ static calculateMovingAverage(data, windowSize) {
1472
+ return data.map((_, i) => {
1473
+ const start = Math.max(0, i - windowSize + 1);
1474
+ const window = data.slice(start, i + 1);
1475
+ return window.reduce((s, p) => s + p.score, 0) / window.length;
1476
+ });
1477
+ }
1478
+ static linearRegression(data) {
1479
+ const n = data.length;
1480
+ if (n < 2)
1481
+ return { slope: 0, intercept: data[0]?.score ?? 0, r2: 0 };
1482
+ const xs = data.map((_, i) => i);
1483
+ const ys = data.map(p => p.score);
1484
+ const xMean = xs.reduce((s, x) => s + x, 0) / n;
1485
+ const yMean = ys.reduce((s, y) => s + y, 0) / n;
1486
+ const ssXX = xs.reduce((s, x) => s + (x - xMean) ** 2, 0);
1487
+ const ssXY = xs.reduce((s, x, i) => s + (x - xMean) * (ys[i] - yMean), 0);
1488
+ const ssYY = ys.reduce((s, y) => s + (y - yMean) ** 2, 0);
1489
+ const slope = ssXX === 0 ? 0 : ssXY / ssXX;
1490
+ const intercept = yMean - slope * xMean;
1491
+ const r2 = ssYY === 0 ? 1 : (ssXY ** 2) / (ssXX * ssYY);
1492
+ return { slope, intercept, r2 };
1493
+ }
1494
+ /** Generate a simple horizontal ASCII bar chart (one bar per data point). */
1495
+ static generateTrendChart(data) {
1496
+ if (data.length === 0)
1497
+ return '(no data)';
1498
+ const maxScore = Math.max(...data.map(p => p.score), 1);
1499
+ const chartWidth = 40;
1500
+ const lines = data.map(p => {
1501
+ const barLen = Math.round((p.score / maxScore) * chartWidth);
1502
+ const bar = '█'.repeat(barLen);
1503
+ const dateStr = p.date.toISOString().slice(0, 10);
1504
+ return `${dateStr} │${bar.padEnd(chartWidth)} ${p.score.toFixed(1)}`;
1505
+ });
1506
+ return lines.join('\n');
1507
+ }
1508
+ // --- Instance method -------------------------------------------------------
1509
+ async analyzeTrend(options) {
1510
+ assertGitRepo(this.projectPath);
1511
+ const periodDays = {
1512
+ week: 7, month: 30, quarter: 90, year: 365,
1513
+ };
1514
+ const days = periodDays[options.period ?? 'month'] ?? 30;
1515
+ const sinceDate = options.since
1516
+ ? new Date(options.since)
1517
+ : new Date(Date.now() - days * 24 * 60 * 60 * 1000);
1518
+ const historicalAnalyses = await analyzeHistoricalCommits(sinceDate, this.projectPath, 100, this.config, 10);
1519
+ const trendPoints = historicalAnalyses.map(h => ({
1520
+ date: h.commitDate,
1521
+ score: h.averageScore,
1522
+ fileCount: h.files.length,
1523
+ avgIssuesPerFile: h.files.length > 0
1524
+ ? h.files.reduce((s, f) => s + f.issues.length, 0) / h.files.length
1525
+ : 0,
1526
+ }));
1527
+ const regression = TrendAnalyzer.linearRegression(trendPoints);
1528
+ // Current state report
1529
+ const currentFiles = analyzeProject(this.projectPath, this.config);
1530
+ const baseReport = buildReport(this.projectPath, currentFiles);
1531
+ return {
1532
+ ...baseReport,
1533
+ trend: trendPoints,
1534
+ regression,
1535
+ };
1536
+ }
1537
+ }
1538
+ function parseGitBlame(blameOutput) {
1539
+ const entries = [];
1540
+ const lines = blameOutput.split('\n');
1541
+ let i = 0;
1542
+ while (i < lines.length) {
1543
+ const headerLine = lines[i];
1544
+ if (!headerLine || headerLine.trim() === '') {
1545
+ i++;
1546
+ continue;
1547
+ }
1548
+ // Porcelain blame format: first line is "<hash> <orig-line> <final-line> [<num-lines>]"
1549
+ const headerMatch = headerLine.match(/^([0-9a-f]{40})\s/);
1550
+ if (!headerMatch) {
1551
+ i++;
1552
+ continue;
1553
+ }
1554
+ const hash = headerMatch[1];
1555
+ let author = '';
1556
+ let email = '';
1557
+ let codeLine = '';
1558
+ i++;
1559
+ while (i < lines.length && !lines[i].match(/^[0-9a-f]{40}\s/)) {
1560
+ const l = lines[i];
1561
+ if (l.startsWith('author '))
1562
+ author = l.slice(7).trim();
1563
+ else if (l.startsWith('author-mail '))
1564
+ email = l.slice(12).replace(/[<>]/g, '').trim();
1565
+ else if (l.startsWith('\t'))
1566
+ codeLine = l.slice(1);
1567
+ i++;
1568
+ }
1569
+ entries.push({ hash, author, email, line: codeLine });
1570
+ }
1571
+ return entries;
1572
+ }
1573
+ export class BlameAnalyzer {
1574
+ projectPath;
1575
+ config;
1576
+ constructor(projectPath, config) {
1577
+ this.projectPath = projectPath;
1578
+ this.config = config;
1579
+ }
1580
+ /** Blame a single file: returns per-author attribution. */
1581
+ static async analyzeFileBlame(filePath) {
1582
+ const dir = path.dirname(filePath);
1583
+ assertGitRepo(dir);
1584
+ const blameOutput = execGit(`git blame --porcelain "${filePath}"`, dir);
1585
+ const entries = parseGitBlame(blameOutput);
1586
+ // Analyse issues in the file
1587
+ const report = analyzeFilePath(filePath);
1588
+ // Map line numbers of issues to authors
1589
+ const issuesByLine = new Map();
1590
+ for (const issue of report.issues) {
1591
+ issuesByLine.set(issue.line, (issuesByLine.get(issue.line) ?? 0) + 1);
1592
+ }
1593
+ // Aggregate by author
1594
+ const byAuthor = new Map();
1595
+ entries.forEach((entry, idx) => {
1596
+ const key = entry.email || entry.author;
1597
+ if (!byAuthor.has(key)) {
1598
+ byAuthor.set(key, {
1599
+ author: entry.author,
1600
+ email: entry.email,
1601
+ commits: 0,
1602
+ linesChanged: 0,
1603
+ issuesIntroduced: 0,
1604
+ avgScoreImpact: 0,
1605
+ });
1606
+ }
1607
+ const attr = byAuthor.get(key);
1608
+ attr.linesChanged++;
1609
+ const lineNum = idx + 1;
1610
+ if (issuesByLine.has(lineNum)) {
1611
+ attr.issuesIntroduced += issuesByLine.get(lineNum);
1612
+ }
1613
+ });
1614
+ // Count unique commits per author
1615
+ const commitsByAuthor = new Map();
1616
+ for (const entry of entries) {
1617
+ const key = entry.email || entry.author;
1618
+ if (!commitsByAuthor.has(key))
1619
+ commitsByAuthor.set(key, new Set());
1620
+ commitsByAuthor.get(key).add(entry.hash);
1621
+ }
1622
+ const total = entries.length || 1;
1623
+ const results = [];
1624
+ for (const [key, attr] of byAuthor) {
1625
+ attr.commits = commitsByAuthor.get(key)?.size ?? 0;
1626
+ attr.avgScoreImpact = (attr.linesChanged / total) * report.score;
1627
+ results.push(attr);
1628
+ }
1629
+ return results.sort((a, b) => b.issuesIntroduced - a.issuesIntroduced);
1630
+ }
1631
+ /** Blame for a specific rule across all files in targetPath. */
1632
+ static async analyzeRuleBlame(rule, targetPath) {
1633
+ assertGitRepo(targetPath);
1634
+ const tsFiles = fs
1635
+ .readdirSync(targetPath, { recursive: true, encoding: 'utf8' })
1636
+ .filter((f) => (f.endsWith('.ts') || f.endsWith('.tsx')) && !f.includes('node_modules'))
1637
+ .map(f => path.join(targetPath, f));
1638
+ const combined = new Map();
1639
+ for (const file of tsFiles) {
1640
+ const report = analyzeFilePath(file);
1641
+ const ruleIssues = report.issues.filter(i => i.rule === rule);
1642
+ if (ruleIssues.length === 0)
1643
+ continue;
1644
+ let blameEntries = [];
1645
+ try {
1646
+ const blameOutput = execGit(`git blame --porcelain "${file}"`, targetPath);
1647
+ blameEntries = parseGitBlame(blameOutput);
1648
+ }
1649
+ catch {
1650
+ continue;
1651
+ }
1652
+ for (const issue of ruleIssues) {
1653
+ const entry = blameEntries[issue.line - 1];
1654
+ if (!entry)
1655
+ continue;
1656
+ const key = entry.email || entry.author;
1657
+ if (!combined.has(key)) {
1658
+ combined.set(key, {
1659
+ author: entry.author,
1660
+ email: entry.email,
1661
+ commits: 0,
1662
+ linesChanged: 0,
1663
+ issuesIntroduced: 0,
1664
+ avgScoreImpact: 0,
1665
+ });
1666
+ }
1667
+ const attr = combined.get(key);
1668
+ attr.issuesIntroduced++;
1669
+ attr.avgScoreImpact += RULE_WEIGHTS[rule]?.weight ?? 5;
1670
+ }
1671
+ }
1672
+ return Array.from(combined.values()).sort((a, b) => b.issuesIntroduced - a.issuesIntroduced);
1673
+ }
1674
+ /** Overall blame across all files and rules. */
1675
+ static async analyzeOverallBlame(targetPath) {
1676
+ assertGitRepo(targetPath);
1677
+ const tsFiles = fs
1678
+ .readdirSync(targetPath, { recursive: true, encoding: 'utf8' })
1679
+ .filter((f) => (f.endsWith('.ts') || f.endsWith('.tsx')) && !f.includes('node_modules'))
1680
+ .map(f => path.join(targetPath, f));
1681
+ const combined = new Map();
1682
+ const commitsByAuthor = new Map();
1683
+ for (const file of tsFiles) {
1684
+ let blameEntries = [];
1685
+ try {
1686
+ const blameOutput = execGit(`git blame --porcelain "${file}"`, targetPath);
1687
+ blameEntries = parseGitBlame(blameOutput);
1688
+ }
1689
+ catch {
1690
+ continue;
1691
+ }
1692
+ const report = analyzeFilePath(file);
1693
+ const issuesByLine = new Map();
1694
+ for (const issue of report.issues) {
1695
+ issuesByLine.set(issue.line, (issuesByLine.get(issue.line) ?? 0) + 1);
1696
+ }
1697
+ blameEntries.forEach((entry, idx) => {
1698
+ const key = entry.email || entry.author;
1699
+ if (!combined.has(key)) {
1700
+ combined.set(key, {
1701
+ author: entry.author,
1702
+ email: entry.email,
1703
+ commits: 0,
1704
+ linesChanged: 0,
1705
+ issuesIntroduced: 0,
1706
+ avgScoreImpact: 0,
1707
+ });
1708
+ commitsByAuthor.set(key, new Set());
1709
+ }
1710
+ const attr = combined.get(key);
1711
+ attr.linesChanged++;
1712
+ commitsByAuthor.get(key).add(entry.hash);
1713
+ const lineNum = idx + 1;
1714
+ if (issuesByLine.has(lineNum)) {
1715
+ attr.issuesIntroduced += issuesByLine.get(lineNum);
1716
+ attr.avgScoreImpact += report.score * (1 / (blameEntries.length || 1));
1717
+ }
1718
+ });
1719
+ }
1720
+ for (const [key, attr] of combined) {
1721
+ attr.commits = commitsByAuthor.get(key)?.size ?? 0;
1722
+ }
1723
+ return Array.from(combined.values()).sort((a, b) => b.issuesIntroduced - a.issuesIntroduced);
1724
+ }
1725
+ // --- Instance method -------------------------------------------------------
1726
+ async analyzeBlame(options) {
1727
+ assertGitRepo(this.projectPath);
1728
+ let blame = [];
1729
+ const mode = options.target ?? 'overall';
1730
+ if (mode === 'file' && options.filePath) {
1731
+ blame = await BlameAnalyzer.analyzeFileBlame(options.filePath);
1732
+ }
1733
+ else if (mode === 'rule' && options.rule) {
1734
+ blame = await BlameAnalyzer.analyzeRuleBlame(options.rule, this.projectPath);
1735
+ }
1736
+ else {
1737
+ blame = await BlameAnalyzer.analyzeOverallBlame(this.projectPath);
1738
+ }
1739
+ if (options.top) {
1740
+ blame = blame.slice(0, options.top);
1741
+ }
1742
+ const currentFiles = analyzeProject(this.projectPath, this.config);
1743
+ const baseReport = buildReport(this.projectPath, currentFiles);
1744
+ return { ...baseReport, blame };
1745
+ }
1746
+ }
1309
1747
  //# sourceMappingURL=analyzer.js.map
package/dist/cli.js CHANGED
@@ -2,6 +2,9 @@
2
2
  import { Command } from 'commander';
3
3
  import { writeFileSync } from 'node:fs';
4
4
  import { resolve } from 'node:path';
5
+ import { createRequire } from 'node:module';
6
+ const require = createRequire(import.meta.url);
7
+ const { version: VERSION } = require('../package.json');
5
8
  import { analyzeProject } from './analyzer.js';
6
9
  import { buildReport, formatMarkdown, formatAIOutput } from './reporter.js';
7
10
  import { printConsole, printDiff } from './printer.js';
@@ -11,11 +14,12 @@ import { computeDiff } from './diff.js';
11
14
  import { generateHtmlReport } from './report.js';
12
15
  import { generateBadge } from './badge.js';
13
16
  import { emitCIAnnotations, printCISummary } from './ci.js';
17
+ import { TrendAnalyzer, BlameAnalyzer } from './analyzer.js';
14
18
  const program = new Command();
15
19
  program
16
20
  .name('drift')
17
21
  .description('Detect silent technical debt left by AI-generated code')
18
- .version('0.6.0');
22
+ .version(VERSION);
19
23
  program
20
24
  .command('scan [path]', { isDefault: true })
21
25
  .description('Scan a directory for vibe coding drift')
@@ -146,5 +150,39 @@ program
146
150
  process.exit(1);
147
151
  }
148
152
  });
153
+ program
154
+ .command('trend [period]')
155
+ .description('Analyze trend of technical debt over time')
156
+ .option('--since <date>', 'Start date for trend analysis (ISO format)')
157
+ .option('--until <date>', 'End date for trend analysis (ISO format)')
158
+ .action(async (period, options) => {
159
+ const resolvedPath = resolve('.');
160
+ process.stderr.write(`\nAnalyzing trend in ${resolvedPath}...\n`);
161
+ const config = await loadConfig(resolvedPath);
162
+ const analyzer = new TrendAnalyzer(resolvedPath, config);
163
+ const trendData = await analyzer.analyzeTrend({
164
+ period: period,
165
+ since: options.since,
166
+ until: options.until
167
+ });
168
+ process.stderr.write(`\nTrend analysis complete:\n`);
169
+ process.stdout.write(JSON.stringify(trendData, null, 2) + '\n');
170
+ });
171
+ program
172
+ .command('blame [target]')
173
+ .description('Analyze which files/rules contribute most to technical debt')
174
+ .option('--top <n>', 'Number of top contributors to show (default: 10)', '10')
175
+ .action(async (target, options) => {
176
+ const resolvedPath = resolve('.');
177
+ process.stderr.write(`\nAnalyzing blame in ${resolvedPath}...\n`);
178
+ const config = await loadConfig(resolvedPath);
179
+ const analyzer = new BlameAnalyzer(resolvedPath, config);
180
+ const blameData = await analyzer.analyzeBlame({
181
+ target: target,
182
+ top: Number(options.top)
183
+ });
184
+ process.stderr.write(`\nBlame analysis complete:\n`);
185
+ process.stdout.write(JSON.stringify(blameData, null, 2) + '\n');
186
+ });
149
187
  program.parse();
150
188
  //# sourceMappingURL=cli.js.map
package/dist/report.js CHANGED
@@ -1,5 +1,7 @@
1
1
  import { basename } from 'node:path';
2
- const VERSION = '0.6.0';
2
+ import { createRequire } from 'node:module';
3
+ const require = createRequire(import.meta.url);
4
+ const { version: VERSION } = require('../package.json');
3
5
  function severityColor(severity) {
4
6
  switch (severity) {
5
7
  case 'error': return '#ef4444';
package/dist/types.d.ts CHANGED
@@ -95,4 +95,43 @@ export interface DriftDiff {
95
95
  newIssuesCount: number;
96
96
  resolvedIssuesCount: number;
97
97
  }
98
+ /** Historical analysis data for a single commit */
99
+ export interface HistoricalAnalysis {
100
+ commitHash: string;
101
+ commitDate: Date;
102
+ author: string;
103
+ message: string;
104
+ files: FileReport[];
105
+ totalScore: number;
106
+ averageScore: number;
107
+ }
108
+ /** Trend data point for score evolution */
109
+ export interface TrendDataPoint {
110
+ date: Date;
111
+ score: number;
112
+ fileCount: number;
113
+ avgIssuesPerFile: number;
114
+ }
115
+ /** Blame attribution data */
116
+ export interface BlameAttribution {
117
+ author: string;
118
+ email: string;
119
+ commits: number;
120
+ linesChanged: number;
121
+ issuesIntroduced: number;
122
+ avgScoreImpact: number;
123
+ }
124
+ /** Extended DriftReport with historical context */
125
+ export interface DriftTrendReport extends DriftReport {
126
+ trend: TrendDataPoint[];
127
+ regression: {
128
+ slope: number;
129
+ intercept: number;
130
+ r2: number;
131
+ };
132
+ }
133
+ /** Extended DriftReport with blame data */
134
+ export interface DriftBlameReport extends DriftReport {
135
+ blame: BlameAttribution[];
136
+ }
98
137
  //# sourceMappingURL=types.d.ts.map
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@eduardbar/drift",
3
- "version": "0.8.0",
3
+ "version": "0.9.1",
4
4
  "description": "Detect silent technical debt left by AI-generated code",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",