@eduardbar/drift 0.9.1 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (129) hide show
  1. package/.github/actions/drift-scan/README.md +61 -0
  2. package/.github/actions/drift-scan/action.yml +65 -0
  3. package/.github/workflows/publish-vscode.yml +78 -0
  4. package/AGENTS.md +83 -23
  5. package/README.md +69 -2
  6. package/ROADMAP.md +130 -98
  7. package/dist/analyzer.d.ts +8 -38
  8. package/dist/analyzer.js +181 -1526
  9. package/dist/badge.js +40 -22
  10. package/dist/ci.js +32 -18
  11. package/dist/cli.js +125 -4
  12. package/dist/config.js +1 -1
  13. package/dist/diff.d.ts +0 -7
  14. package/dist/diff.js +26 -25
  15. package/dist/fix.d.ts +17 -0
  16. package/dist/fix.js +132 -0
  17. package/dist/git/blame.d.ts +22 -0
  18. package/dist/git/blame.js +227 -0
  19. package/dist/git/helpers.d.ts +36 -0
  20. package/dist/git/helpers.js +152 -0
  21. package/dist/git/trend.d.ts +21 -0
  22. package/dist/git/trend.js +81 -0
  23. package/dist/git.d.ts +0 -13
  24. package/dist/git.js +27 -21
  25. package/dist/index.d.ts +5 -1
  26. package/dist/index.js +3 -0
  27. package/dist/map.d.ts +3 -0
  28. package/dist/map.js +103 -0
  29. package/dist/metrics.d.ts +4 -0
  30. package/dist/metrics.js +176 -0
  31. package/dist/plugins.d.ts +6 -0
  32. package/dist/plugins.js +74 -0
  33. package/dist/printer.js +20 -0
  34. package/dist/report.js +654 -293
  35. package/dist/reporter.js +85 -2
  36. package/dist/review.d.ts +15 -0
  37. package/dist/review.js +80 -0
  38. package/dist/rules/comments.d.ts +4 -0
  39. package/dist/rules/comments.js +45 -0
  40. package/dist/rules/complexity.d.ts +4 -0
  41. package/dist/rules/complexity.js +51 -0
  42. package/dist/rules/coupling.d.ts +4 -0
  43. package/dist/rules/coupling.js +19 -0
  44. package/dist/rules/magic.d.ts +4 -0
  45. package/dist/rules/magic.js +33 -0
  46. package/dist/rules/nesting.d.ts +5 -0
  47. package/dist/rules/nesting.js +82 -0
  48. package/dist/rules/phase0-basic.d.ts +11 -0
  49. package/dist/rules/phase0-basic.js +183 -0
  50. package/dist/rules/phase1-complexity.d.ts +7 -0
  51. package/dist/rules/phase1-complexity.js +8 -0
  52. package/dist/rules/phase2-crossfile.d.ts +23 -0
  53. package/dist/rules/phase2-crossfile.js +135 -0
  54. package/dist/rules/phase3-arch.d.ts +23 -0
  55. package/dist/rules/phase3-arch.js +151 -0
  56. package/dist/rules/phase3-configurable.d.ts +6 -0
  57. package/dist/rules/phase3-configurable.js +97 -0
  58. package/dist/rules/phase5-ai.d.ts +8 -0
  59. package/dist/rules/phase5-ai.js +262 -0
  60. package/dist/rules/phase8-semantic.d.ts +17 -0
  61. package/dist/rules/phase8-semantic.js +110 -0
  62. package/dist/rules/promise.d.ts +4 -0
  63. package/dist/rules/promise.js +24 -0
  64. package/dist/rules/shared.d.ts +7 -0
  65. package/dist/rules/shared.js +27 -0
  66. package/dist/snapshot.d.ts +19 -0
  67. package/dist/snapshot.js +119 -0
  68. package/dist/types.d.ts +69 -0
  69. package/dist/utils.d.ts +2 -1
  70. package/dist/utils.js +1 -0
  71. package/docs/AGENTS.md +146 -0
  72. package/docs/PRD.md +208 -0
  73. package/package.json +8 -3
  74. package/packages/eslint-plugin-drift/src/index.ts +1 -1
  75. package/packages/vscode-drift/.vscodeignore +9 -0
  76. package/packages/vscode-drift/LICENSE +21 -0
  77. package/packages/vscode-drift/README.md +64 -0
  78. package/packages/vscode-drift/images/icon.png +0 -0
  79. package/packages/vscode-drift/images/icon.svg +30 -0
  80. package/packages/vscode-drift/package-lock.json +485 -0
  81. package/packages/vscode-drift/package.json +119 -0
  82. package/packages/vscode-drift/src/analyzer.ts +40 -0
  83. package/packages/vscode-drift/src/diagnostics.ts +55 -0
  84. package/packages/vscode-drift/src/extension.ts +135 -0
  85. package/packages/vscode-drift/src/statusbar.ts +55 -0
  86. package/packages/vscode-drift/src/treeview.ts +110 -0
  87. package/packages/vscode-drift/tsconfig.json +18 -0
  88. package/packages/vscode-drift/vscode-drift-0.1.0.vsix +0 -0
  89. package/packages/vscode-drift/vscode-drift-0.1.1.vsix +0 -0
  90. package/src/analyzer.ts +248 -1765
  91. package/src/badge.ts +38 -16
  92. package/src/ci.ts +38 -17
  93. package/src/cli.ts +143 -4
  94. package/src/config.ts +1 -1
  95. package/src/diff.ts +36 -30
  96. package/src/fix.ts +178 -0
  97. package/src/git/blame.ts +279 -0
  98. package/src/git/helpers.ts +198 -0
  99. package/src/git/trend.ts +117 -0
  100. package/src/git.ts +33 -24
  101. package/src/index.ts +16 -1
  102. package/src/map.ts +117 -0
  103. package/src/metrics.ts +200 -0
  104. package/src/plugins.ts +76 -0
  105. package/src/printer.ts +20 -0
  106. package/src/report.ts +666 -296
  107. package/src/reporter.ts +95 -2
  108. package/src/review.ts +98 -0
  109. package/src/rules/comments.ts +56 -0
  110. package/src/rules/complexity.ts +57 -0
  111. package/src/rules/coupling.ts +23 -0
  112. package/src/rules/magic.ts +38 -0
  113. package/src/rules/nesting.ts +88 -0
  114. package/src/rules/phase0-basic.ts +194 -0
  115. package/src/rules/phase1-complexity.ts +8 -0
  116. package/src/rules/phase2-crossfile.ts +177 -0
  117. package/src/rules/phase3-arch.ts +183 -0
  118. package/src/rules/phase3-configurable.ts +132 -0
  119. package/src/rules/phase5-ai.ts +292 -0
  120. package/src/rules/phase8-semantic.ts +136 -0
  121. package/src/rules/promise.ts +29 -0
  122. package/src/rules/shared.ts +39 -0
  123. package/src/snapshot.ts +175 -0
  124. package/src/types.ts +75 -1
  125. package/src/utils.ts +3 -1
  126. package/tests/helpers.ts +45 -0
  127. package/tests/new-features.test.ts +153 -0
  128. package/tests/rules.test.ts +1269 -0
  129. package/vitest.config.ts +15 -0
package/dist/analyzer.js CHANGED
@@ -1,11 +1,27 @@
1
- import * as fs from 'node:fs';
2
- import * as crypto from 'node:crypto';
1
+ // drift-ignore-file
3
2
  import * as path from 'node:path';
4
- import * as os from 'node:os';
5
- import { execSync } from 'node:child_process';
6
- import { Project, SyntaxKind, } from 'ts-morph';
7
- import { buildReport } from './reporter.js';
8
- // Rules and their drift score weight
3
+ import { Project } from 'ts-morph';
4
+ // Rules
5
+ import { isFileIgnored } from './rules/shared.js';
6
+ import { detectLargeFile, detectLargeFunctions, detectDebugLeftovers, detectDeadCode, detectDuplicateFunctionNames, detectAnyAbuse, detectCatchSwallow, detectMissingReturnTypes, } from './rules/phase0-basic.js';
7
+ import { detectHighComplexity } from './rules/complexity.js';
8
+ import { detectDeepNesting, detectTooManyParams } from './rules/nesting.js';
9
+ import { detectHighCoupling } from './rules/coupling.js';
10
+ import { detectPromiseStyleMix } from './rules/promise.js';
11
+ import { detectMagicNumbers } from './rules/magic.js';
12
+ import { detectCommentContradiction } from './rules/comments.js';
13
+ import { detectDeadFiles, detectUnusedExports, detectUnusedDependencies, } from './rules/phase2-crossfile.js';
14
+ import { detectCircularDependencies, detectLayerViolations, detectCrossBoundaryImports, } from './rules/phase3-arch.js';
15
+ import { detectControllerNoDb, detectServiceNoHttp, detectMaxFunctionLines, } from './rules/phase3-configurable.js';
16
+ import { detectOverCommented, detectHardcodedConfig, detectInconsistentErrorHandling, detectUnnecessaryAbstraction, detectNamingInconsistency, } from './rules/phase5-ai.js';
17
+ import { collectFunctions, fingerprintFunction, calculateScore, } from './rules/phase8-semantic.js';
18
+ import { loadPlugins } from './plugins.js';
19
+ // Git analyzers (re-exported as part of the public API)
20
+ export { TrendAnalyzer } from './git/trend.js';
21
+ export { BlameAnalyzer } from './git/blame.js';
22
+ // ---------------------------------------------------------------------------
23
+ // Rule weights — single source of truth for severities and drift score weights
24
+ // ---------------------------------------------------------------------------
9
25
  export const RULE_WEIGHTS = {
10
26
  'large-file': { severity: 'error', weight: 20 },
11
27
  'large-function': { severity: 'error', weight: 15 },
@@ -32,869 +48,95 @@ export const RULE_WEIGHTS = {
32
48
  // Phase 3b/c: layer and module boundary enforcement (require drift.config.ts)
33
49
  'layer-violation': { severity: 'error', weight: 16 },
34
50
  'cross-boundary-import': { severity: 'warning', weight: 10 },
51
+ 'controller-no-db': { severity: 'warning', weight: 11 },
52
+ 'service-no-http': { severity: 'warning', weight: 11 },
53
+ 'max-function-lines': { severity: 'warning', weight: 9 },
35
54
  // Phase 5: AI authorship heuristics
36
55
  'over-commented': { severity: 'info', weight: 4 },
37
56
  'hardcoded-config': { severity: 'warning', weight: 10 },
38
57
  'inconsistent-error-handling': { severity: 'warning', weight: 8 },
39
58
  'unnecessary-abstraction': { severity: 'warning', weight: 7 },
40
59
  'naming-inconsistency': { severity: 'warning', weight: 6 },
60
+ 'ai-code-smell': { severity: 'warning', weight: 12 },
41
61
  // Phase 8: semantic duplication
42
62
  'semantic-duplication': { severity: 'warning', weight: 12 },
63
+ 'plugin-error': { severity: 'warning', weight: 4 },
43
64
  };
44
- function hasIgnoreComment(file, line) {
45
- const lines = file.getFullText().split('\n');
46
- const currentLine = lines[line - 1] ?? '';
47
- const prevLine = lines[line - 2] ?? '';
48
- if (/\/\/\s*drift-ignore\b/.test(currentLine))
49
- return true;
50
- if (/\/\/\s*drift-ignore\b/.test(prevLine))
51
- return true;
52
- return false;
53
- }
54
- function isFileIgnored(file) {
55
- const firstLines = file.getFullText().split('\n').slice(0, 10).join('\n');
56
- return /\/\/\s*drift-ignore-file\b/.test(firstLines);
57
- }
58
- function getSnippet(node, file) {
59
- const startLine = node.getStartLineNumber();
60
- const lines = file.getFullText().split('\n');
61
- return lines
62
- .slice(Math.max(0, startLine - 1), startLine + 1)
63
- .join('\n')
64
- .trim()
65
- .slice(0, 120);
66
- }
67
- function getFunctionLikeLines(node) {
68
- return node.getEndLineNumber() - node.getStartLineNumber();
69
- }
70
- // ---------------------------------------------------------------------------
71
- // Existing rules
72
- // ---------------------------------------------------------------------------
73
- function detectLargeFile(file) {
74
- const lineCount = file.getEndLineNumber();
75
- if (lineCount > 300) {
76
- return [
77
- {
78
- rule: 'large-file',
79
- severity: 'error',
80
- message: `File has ${lineCount} lines (threshold: 300). Large files are the #1 sign of AI-generated structural drift.`,
81
- line: 1,
82
- column: 1,
83
- snippet: `// ${lineCount} lines total`,
84
- },
85
- ];
86
- }
87
- return [];
88
- }
89
- function detectLargeFunctions(file) {
90
- const issues = [];
91
- const fns = [
92
- ...file.getFunctions(),
93
- ...file.getDescendantsOfKind(SyntaxKind.ArrowFunction),
94
- ...file.getDescendantsOfKind(SyntaxKind.FunctionExpression),
95
- ...file.getClasses().flatMap((c) => c.getMethods()),
96
- ];
97
- for (const fn of fns) {
98
- const lines = getFunctionLikeLines(fn);
99
- const startLine = fn.getStartLineNumber();
100
- if (lines > 50) {
101
- if (hasIgnoreComment(file, startLine))
102
- continue;
103
- issues.push({
104
- rule: 'large-function',
105
- severity: 'error',
106
- message: `Function spans ${lines} lines (threshold: 50). AI tends to dump logic into single functions.`,
107
- line: startLine,
108
- column: fn.getStartLinePos(),
109
- snippet: getSnippet(fn, file),
110
- });
111
- }
112
- }
113
- return issues;
114
- }
115
- function detectDebugLeftovers(file) {
116
- const issues = [];
117
- for (const call of file.getDescendantsOfKind(SyntaxKind.CallExpression)) {
118
- const expr = call.getExpression().getText();
119
- const line = call.getStartLineNumber();
120
- if (/^console\.(log|warn|error|debug|info)\b/.test(expr)) {
121
- if (hasIgnoreComment(file, line))
122
- continue;
123
- issues.push({
124
- rule: 'debug-leftover',
125
- severity: 'warning',
126
- message: `console.${expr.split('.')[1]} left in production code.`,
127
- line,
128
- column: call.getStartLinePos(),
129
- snippet: getSnippet(call, file),
130
- });
131
- }
132
- }
133
- const lines = file.getFullText().split('\n');
134
- lines.forEach((lineContent, i) => {
135
- if (/\/\/\s*(TODO|FIXME|HACK|XXX|TEMP)\b/i.test(lineContent)) {
136
- if (hasIgnoreComment(file, i + 1))
137
- return;
138
- issues.push({
139
- rule: 'debug-leftover',
140
- severity: 'warning',
141
- message: `Unresolved marker found: ${lineContent.trim().slice(0, 60)}`,
142
- line: i + 1,
143
- column: 1,
144
- snippet: lineContent.trim().slice(0, 120),
145
- });
146
- }
147
- });
148
- return issues;
149
- }
150
- function detectDeadCode(file) {
151
- const issues = [];
152
- for (const imp of file.getImportDeclarations()) {
153
- for (const named of imp.getNamedImports()) {
154
- const name = named.getName();
155
- const refs = file.getDescendantsOfKind(SyntaxKind.Identifier).filter((id) => id.getText() === name && id !== named.getNameNode());
156
- if (refs.length === 0) {
157
- issues.push({
158
- rule: 'dead-code',
159
- severity: 'warning',
160
- message: `Unused import '${name}'. AI often imports more than it uses.`,
161
- line: imp.getStartLineNumber(),
162
- column: imp.getStartLinePos(),
163
- snippet: getSnippet(imp, file),
164
- });
165
- }
166
- }
167
- }
168
- return issues;
169
- }
170
- function detectDuplicateFunctionNames(file) {
171
- const issues = [];
172
- const seen = new Map();
173
- const fns = file.getFunctions();
174
- for (const fn of fns) {
175
- const name = fn.getName();
176
- if (!name)
65
+ const AI_SMELL_SIGNALS = new Set([
66
+ 'over-commented',
67
+ 'hardcoded-config',
68
+ 'inconsistent-error-handling',
69
+ 'unnecessary-abstraction',
70
+ 'naming-inconsistency',
71
+ 'comment-contradiction',
72
+ 'promise-style-mix',
73
+ 'any-abuse',
74
+ ]);
75
+ function detectAICodeSmell(issues, filePath) {
76
+ const signalCounts = new Map();
77
+ for (const issue of issues) {
78
+ if (!AI_SMELL_SIGNALS.has(issue.rule))
177
79
  continue;
178
- const normalized = name.toLowerCase().replace(/[_-]/g, '');
179
- if (seen.has(normalized)) {
180
- issues.push({
181
- rule: 'duplicate-function-name',
182
- severity: 'error',
183
- message: `Function '${name}' looks like a duplicate of a previously defined function. AI often generates near-identical helpers.`,
184
- line: fn.getStartLineNumber(),
185
- column: fn.getStartLinePos(),
186
- snippet: getSnippet(fn, file),
187
- });
188
- }
189
- else {
190
- seen.set(normalized, fn.getStartLineNumber());
191
- }
80
+ signalCounts.set(issue.rule, (signalCounts.get(issue.rule) ?? 0) + 1);
192
81
  }
193
- return issues;
194
- }
195
- function detectAnyAbuse(file) {
196
- const issues = [];
197
- for (const node of file.getDescendantsOfKind(SyntaxKind.AnyKeyword)) {
198
- issues.push({
199
- rule: 'any-abuse',
82
+ const totalSignals = [...signalCounts.values()].reduce((sum, count) => sum + count, 0);
83
+ if (totalSignals < 3)
84
+ return [];
85
+ const triggers = [...signalCounts.entries()]
86
+ .sort((a, b) => b[1] - a[1])
87
+ .slice(0, 3)
88
+ .map(([rule, count]) => `${rule} x${count}`);
89
+ return [{
90
+ rule: 'ai-code-smell',
200
91
  severity: 'warning',
201
- message: `Explicit 'any' type detected. AI defaults to 'any' when it can't infer types properly.`,
202
- line: node.getStartLineNumber(),
203
- column: node.getStartLinePos(),
204
- snippet: getSnippet(node, file),
205
- });
206
- }
207
- return issues;
208
- }
209
- function detectCatchSwallow(file) {
210
- const issues = [];
211
- for (const tryCatch of file.getDescendantsOfKind(SyntaxKind.TryStatement)) {
212
- const catchClause = tryCatch.getCatchClause();
213
- if (!catchClause)
214
- continue;
215
- const block = catchClause.getBlock();
216
- const stmts = block.getStatements();
217
- if (stmts.length === 0) {
218
- issues.push({
219
- rule: 'catch-swallow',
220
- severity: 'warning',
221
- message: `Empty catch block silently swallows errors. Classic AI pattern to make code "not throw".`,
222
- line: catchClause.getStartLineNumber(),
223
- column: catchClause.getStartLinePos(),
224
- snippet: getSnippet(catchClause, file),
225
- });
226
- }
227
- }
228
- return issues;
229
- }
230
- function detectMissingReturnTypes(file) {
231
- const issues = [];
232
- for (const fn of file.getFunctions()) {
233
- if (!fn.getReturnTypeNode()) {
234
- issues.push({
235
- rule: 'no-return-type',
236
- severity: 'info',
237
- message: `Function '${fn.getName() ?? 'anonymous'}' has no explicit return type.`,
238
- line: fn.getStartLineNumber(),
239
- column: fn.getStartLinePos(),
240
- snippet: getSnippet(fn, file),
241
- });
242
- }
243
- }
244
- return issues;
245
- }
246
- // ---------------------------------------------------------------------------
247
- // Phase 1: complexity detection rules
248
- // ---------------------------------------------------------------------------
249
- /**
250
- * Cyclomatic complexity: count decision points in a function.
251
- * Each if/else if/ternary/?:/for/while/do/case/catch/&&/|| adds 1.
252
- * Threshold: > 10 is considered high complexity.
253
- */
254
- function getCyclomaticComplexity(fn) {
255
- let complexity = 1; // base path
256
- const incrementKinds = [
257
- SyntaxKind.IfStatement,
258
- SyntaxKind.ForStatement,
259
- SyntaxKind.ForInStatement,
260
- SyntaxKind.ForOfStatement,
261
- SyntaxKind.WhileStatement,
262
- SyntaxKind.DoStatement,
263
- SyntaxKind.CaseClause,
264
- SyntaxKind.CatchClause,
265
- SyntaxKind.ConditionalExpression, // ternary
266
- SyntaxKind.AmpersandAmpersandToken,
267
- SyntaxKind.BarBarToken,
268
- SyntaxKind.QuestionQuestionToken, // ??
269
- ];
270
- for (const kind of incrementKinds) {
271
- complexity += fn.getDescendantsOfKind(kind).length;
272
- }
273
- return complexity;
274
- }
275
- function detectHighComplexity(file) {
276
- const issues = [];
277
- const fns = [
278
- ...file.getFunctions(),
279
- ...file.getDescendantsOfKind(SyntaxKind.ArrowFunction),
280
- ...file.getDescendantsOfKind(SyntaxKind.FunctionExpression),
281
- ...file.getClasses().flatMap((c) => c.getMethods()),
282
- ];
283
- for (const fn of fns) {
284
- const complexity = getCyclomaticComplexity(fn);
285
- if (complexity > 10) {
286
- const startLine = fn.getStartLineNumber();
287
- if (hasIgnoreComment(file, startLine))
288
- continue;
289
- issues.push({
290
- rule: 'high-complexity',
291
- severity: 'error',
292
- message: `Cyclomatic complexity is ${complexity} (threshold: 10). AI generates correct code, not simple code.`,
293
- line: startLine,
294
- column: fn.getStartLinePos(),
295
- snippet: getSnippet(fn, file),
296
- });
297
- }
298
- }
299
- return issues;
300
- }
301
- /**
302
- * Deep nesting: count the maximum nesting depth of control flow inside a function.
303
- * Counts: if, for, while, do, try, switch.
304
- * Threshold: > 3 levels.
305
- */
306
- function getMaxNestingDepth(fn) {
307
- const nestingKinds = new Set([
308
- SyntaxKind.IfStatement,
309
- SyntaxKind.ForStatement,
310
- SyntaxKind.ForInStatement,
311
- SyntaxKind.ForOfStatement,
312
- SyntaxKind.WhileStatement,
313
- SyntaxKind.DoStatement,
314
- SyntaxKind.TryStatement,
315
- SyntaxKind.SwitchStatement,
316
- ]);
317
- let maxDepth = 0;
318
- function walk(node, depth) {
319
- if (nestingKinds.has(node.getKind())) {
320
- depth++;
321
- if (depth > maxDepth)
322
- maxDepth = depth;
323
- }
324
- for (const child of node.getChildren()) {
325
- walk(child, depth);
326
- }
327
- }
328
- walk(fn, 0);
329
- return maxDepth;
330
- }
331
- function detectDeepNesting(file) {
332
- const issues = [];
333
- const fns = [
334
- ...file.getFunctions(),
335
- ...file.getDescendantsOfKind(SyntaxKind.ArrowFunction),
336
- ...file.getDescendantsOfKind(SyntaxKind.FunctionExpression),
337
- ...file.getClasses().flatMap((c) => c.getMethods()),
338
- ];
339
- for (const fn of fns) {
340
- const depth = getMaxNestingDepth(fn);
341
- if (depth > 3) {
342
- const startLine = fn.getStartLineNumber();
343
- if (hasIgnoreComment(file, startLine))
344
- continue;
345
- issues.push({
346
- rule: 'deep-nesting',
347
- severity: 'warning',
348
- message: `Maximum nesting depth is ${depth} (threshold: 3). Deep nesting is the #1 readability killer.`,
349
- line: startLine,
350
- column: fn.getStartLinePos(),
351
- snippet: getSnippet(fn, file),
352
- });
353
- }
354
- }
355
- return issues;
356
- }
357
- /**
358
- * Too many parameters: functions with more than 4 parameters.
359
- * AI avoids refactoring parameters into objects/options bags.
360
- */
361
- function detectTooManyParams(file) {
362
- const issues = [];
363
- const fns = [
364
- ...file.getFunctions(),
365
- ...file.getDescendantsOfKind(SyntaxKind.ArrowFunction),
366
- ...file.getDescendantsOfKind(SyntaxKind.FunctionExpression),
367
- ...file.getClasses().flatMap((c) => c.getMethods()),
368
- ];
369
- for (const fn of fns) {
370
- const paramCount = fn.getParameters().length;
371
- if (paramCount > 4) {
372
- const startLine = fn.getStartLineNumber();
373
- if (hasIgnoreComment(file, startLine))
374
- continue;
375
- issues.push({
376
- rule: 'too-many-params',
377
- severity: 'warning',
378
- message: `Function has ${paramCount} parameters (threshold: 4). AI avoids refactoring into options objects.`,
379
- line: startLine,
380
- column: fn.getStartLinePos(),
381
- snippet: getSnippet(fn, file),
382
- });
383
- }
384
- }
385
- return issues;
386
- }
387
- /**
388
- * High coupling: files with more than 10 distinct import sources.
389
- * AI imports broadly without considering module cohesion.
390
- */
391
- function detectHighCoupling(file) {
392
- const imports = file.getImportDeclarations();
393
- const sources = new Set(imports.map((i) => i.getModuleSpecifierValue()));
394
- if (sources.size > 10) {
395
- return [
396
- {
397
- rule: 'high-coupling',
398
- severity: 'warning',
399
- message: `File imports from ${sources.size} distinct modules (threshold: 10). High coupling makes refactoring dangerous.`,
400
- line: 1,
401
- column: 1,
402
- snippet: `// ${sources.size} import sources`,
403
- },
404
- ];
405
- }
406
- return [];
407
- }
408
- /**
409
- * Promise style mix: async/await and .then()/.catch() used in the same file.
410
- * AI generates both styles without consistency.
411
- */
412
- function detectPromiseStyleMix(file) {
413
- const text = file.getFullText();
414
- // detect .then( or .catch( calls (property access on a promise)
415
- const hasThen = file.getDescendantsOfKind(SyntaxKind.PropertyAccessExpression).some((node) => {
416
- const name = node.getName();
417
- return name === 'then' || name === 'catch';
418
- });
419
- // detect async keyword usage
420
- const hasAsync = file.getDescendantsOfKind(SyntaxKind.AsyncKeyword).length > 0 ||
421
- /\bawait\b/.test(text);
422
- if (hasThen && hasAsync) {
423
- return [
424
- {
425
- rule: 'promise-style-mix',
426
- severity: 'warning',
427
- message: `File mixes async/await with .then()/.catch(). AI generates both styles without picking one.`,
428
- line: 1,
429
- column: 1,
430
- snippet: `// mixed promise styles detected`,
431
- },
432
- ];
433
- }
434
- return [];
435
- }
436
- /**
437
- * Magic numbers: numeric literals used directly in logic outside of named constants.
438
- * Excludes 0, 1, -1 (universally understood) and array indices in obvious patterns.
439
- */
440
- function detectMagicNumbers(file) {
441
- const issues = [];
442
- const ALLOWED = new Set([0, 1, -1, 2, 100]);
443
- for (const node of file.getDescendantsOfKind(SyntaxKind.NumericLiteral)) {
444
- const value = Number(node.getLiteralValue());
445
- if (ALLOWED.has(value))
446
- continue;
447
- // Skip: variable/const initializers at top level (those ARE the named constants)
448
- const parent = node.getParent();
449
- if (!parent)
450
- continue;
451
- const parentKind = parent.getKind();
452
- if (parentKind === SyntaxKind.VariableDeclaration ||
453
- parentKind === SyntaxKind.PropertyAssignment ||
454
- parentKind === SyntaxKind.EnumMember ||
455
- parentKind === SyntaxKind.Parameter)
456
- continue;
457
- const line = node.getStartLineNumber();
458
- if (hasIgnoreComment(file, line))
459
- continue;
460
- issues.push({
461
- rule: 'magic-number',
462
- severity: 'info',
463
- message: `Magic number ${value} used directly in logic. Extract to a named constant.`,
464
- line,
465
- column: node.getStartLinePos(),
466
- snippet: getSnippet(node, file),
467
- });
468
- }
469
- return issues;
470
- }
471
- /**
472
- * Comment contradiction: comments that restate exactly what the code does.
473
- * Classic AI pattern — documents the obvious instead of the why.
474
- * Detects: "// increment counter" above counter++, "// return x" above return x, etc.
475
- */
476
- function detectCommentContradiction(file) {
477
- const issues = [];
478
- const lines = file.getFullText().split('\n');
479
- // Patterns: comment that is a near-literal restatement of the next line
480
- const trivialCommentPatterns = [
481
- // "// return ..." above a return statement
482
- { comment: /\/\/\s*return\b/i, code: /^\s*return\b/ },
483
- // "// increment ..." or "// increase ..." above x++ or x += 1
484
- { comment: /\/\/\s*(increment|increase|add\s+1|plus\s+1)\b/i, code: /\+\+|(\+= ?1)\b/ },
485
- // "// decrement ..." above x-- or x -= 1
486
- { comment: /\/\/\s*(decrement|decrease|subtract\s+1|minus\s+1)\b/i, code: /--|(-= ?1)\b/ },
487
- // "// log ..." above console.log
488
- { comment: /\/\/\s*log\b/i, code: /console\.(log|warn|error)/ },
489
- // "// set ... to ..." or "// assign ..." above assignment
490
- { comment: /\/\/\s*(set|assign)\b/i, code: /^\s*\w[\w.[\]]*\s*=(?!=)/ },
491
- // "// call ..." above a function call
492
- { comment: /\/\/\s*call\b/i, code: /^\s*\w[\w.]*\(/ },
493
- // "// declare ..." or "// define ..." or "// create ..." above const/let/var
494
- { comment: /\/\/\s*(declare|define|create|initialize)\b/i, code: /^\s*(const|let|var)\b/ },
495
- // "// check if ..." above an if statement
496
- { comment: /\/\/\s*check\s+if\b/i, code: /^\s*if\s*\(/ },
497
- // "// loop ..." or "// iterate ..." above for/while
498
- { comment: /\/\/\s*(loop|iterate|for each|foreach)\b/i, code: /^\s*(for|while)\b/ },
499
- // "// import ..." above an import
500
- { comment: /\/\/\s*import\b/i, code: /^\s*import\b/ },
501
- ];
502
- for (let i = 0; i < lines.length - 1; i++) {
503
- const commentLine = lines[i].trim();
504
- const nextLine = lines[i + 1];
505
- for (const { comment, code } of trivialCommentPatterns) {
506
- if (comment.test(commentLine) && code.test(nextLine)) {
507
- if (hasIgnoreComment(file, i + 1))
508
- continue;
509
- issues.push({
510
- rule: 'comment-contradiction',
511
- severity: 'warning',
512
- message: `Comment restates what the code already says. AI documents the obvious instead of the why.`,
513
- line: i + 1,
514
- column: 1,
515
- snippet: `${commentLine.slice(0, 60)}\n${nextLine.trim().slice(0, 60)}`,
516
- });
517
- break; // one issue per comment line max
518
- }
519
- }
520
- }
521
- return issues;
92
+ message: `Aggregated AI smell signals detected (${totalSignals}): ${triggers.join(', ')}`,
93
+ line: 1,
94
+ column: 1,
95
+ snippet: path.basename(filePath),
96
+ }];
522
97
  }
523
- // ---------------------------------------------------------------------------
524
- // Phase 5: AI authorship heuristics
525
- // ---------------------------------------------------------------------------
526
- function detectOverCommented(file) {
98
+ function runPluginRules(file, loadedPlugins, config, projectRoot) {
99
+ if (loadedPlugins.length === 0)
100
+ return [];
101
+ const context = {
102
+ projectRoot,
103
+ filePath: file.getFilePath(),
104
+ config,
105
+ };
527
106
  const issues = [];
528
- for (const fn of file.getFunctions()) {
529
- const body = fn.getBody();
530
- if (!body)
531
- continue;
532
- const bodyText = body.getText();
533
- const lines = bodyText.split('\n');
534
- const totalLines = lines.length;
535
- if (totalLines < 6)
536
- continue;
537
- let commentLines = 0;
538
- for (const line of lines) {
539
- const trimmed = line.trim();
540
- if (trimmed.startsWith('//') || trimmed.startsWith('*') || trimmed.startsWith('/*') || trimmed.startsWith('*/')) {
541
- commentLines++;
542
- }
543
- }
544
- const ratio = commentLines / totalLines;
545
- if (ratio >= 0.4) {
546
- issues.push({
547
- rule: 'over-commented',
548
- severity: 'info',
549
- message: `Function has ${Math.round(ratio * 100)}% comment density (${commentLines}/${totalLines} lines). AI documents the obvious instead of the why.`,
550
- line: fn.getStartLineNumber(),
551
- column: fn.getStartLinePos(),
552
- snippet: fn.getName() ? `function ${fn.getName()}` : '(anonymous function)',
553
- });
554
- }
555
- }
556
- for (const cls of file.getClasses()) {
557
- for (const method of cls.getMethods()) {
558
- const body = method.getBody();
559
- if (!body)
560
- continue;
561
- const bodyText = body.getText();
562
- const lines = bodyText.split('\n');
563
- const totalLines = lines.length;
564
- if (totalLines < 6)
565
- continue;
566
- let commentLines = 0;
567
- for (const line of lines) {
568
- const trimmed = line.trim();
569
- if (trimmed.startsWith('//') || trimmed.startsWith('*') || trimmed.startsWith('/*') || trimmed.startsWith('*/')) {
570
- commentLines++;
107
+ for (const loaded of loadedPlugins) {
108
+ for (const rule of loaded.plugin.rules) {
109
+ try {
110
+ const detected = rule.detect(file, context) ?? [];
111
+ for (const issue of detected) {
112
+ issues.push({
113
+ ...issue,
114
+ rule: issue.rule || `${loaded.plugin.name}/${rule.name}`,
115
+ severity: issue.severity ?? (rule.severity ?? 'warning'),
116
+ });
571
117
  }
572
118
  }
573
- const ratio = commentLines / totalLines;
574
- if (ratio >= 0.4) {
119
+ catch (error) {
575
120
  issues.push({
576
- rule: 'over-commented',
577
- severity: 'info',
578
- message: `Method '${method.getName()}' has ${Math.round(ratio * 100)}% comment density (${commentLines}/${totalLines} lines). AI documents the obvious instead of the why.`,
579
- line: method.getStartLineNumber(),
580
- column: method.getStartLinePos(),
581
- snippet: `${cls.getName()}.${method.getName()}`,
582
- });
583
- }
584
- }
585
- }
586
- return issues;
587
- }
588
- function detectHardcodedConfig(file) {
589
- const issues = [];
590
- const CONFIG_PATTERNS = [
591
- { pattern: /^https?:\/\//i, label: 'HTTP/HTTPS URL' },
592
- { pattern: /^wss?:\/\//i, label: 'WebSocket URL' },
593
- { pattern: /^mongodb(\+srv)?:\/\//i, label: 'MongoDB connection string' },
594
- { pattern: /^postgres(?:ql)?:\/\//i, label: 'PostgreSQL connection string' },
595
- { pattern: /^mysql:\/\//i, label: 'MySQL connection string' },
596
- { pattern: /^redis:\/\//i, label: 'Redis connection string' },
597
- { pattern: /^amqps?:\/\//i, label: 'AMQP connection string' },
598
- { pattern: /^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$/, label: 'IP address' },
599
- { pattern: /^:[0-9]{2,5}$/, label: 'Port number in string' },
600
- { pattern: /^\/[a-z]/i, label: 'Absolute file path' },
601
- { pattern: /localhost(:[0-9]+)?/i, label: 'localhost reference' },
602
- ];
603
- const filePath = file.getFilePath().replace(/\\/g, '/');
604
- if (filePath.includes('.test.') || filePath.includes('.spec.') || filePath.includes('__tests__')) {
605
- return issues;
606
- }
607
- for (const node of file.getDescendantsOfKind(SyntaxKind.StringLiteral)) {
608
- const value = node.getLiteralValue();
609
- if (!value || value.length < 4)
610
- continue;
611
- const parent = node.getParent();
612
- if (!parent)
613
- continue;
614
- const parentKind = parent.getKindName();
615
- if (parentKind === 'ImportDeclaration' ||
616
- parentKind === 'ExportDeclaration' ||
617
- (parentKind === 'CallExpression' && parent.getText().startsWith('import(')))
618
- continue;
619
- for (const { pattern, label } of CONFIG_PATTERNS) {
620
- if (pattern.test(value)) {
621
- issues.push({
622
- rule: 'hardcoded-config',
121
+ rule: 'plugin-error',
623
122
  severity: 'warning',
624
- message: `Hardcoded ${label} detected. AI skips environment variables extract to process.env or a config module.`,
625
- line: node.getStartLineNumber(),
626
- column: node.getStartLinePos(),
627
- snippet: value.length > 60 ? value.slice(0, 60) + '...' : value,
123
+ message: `Plugin '${loaded.id}' rule '${rule.name}' failed: ${error instanceof Error ? error.message : String(error)}`,
124
+ line: 1,
125
+ column: 1,
126
+ snippet: file.getBaseName(),
628
127
  });
629
- break;
630
- }
631
- }
632
- }
633
- return issues;
634
- }
635
- function detectInconsistentErrorHandling(file) {
636
- const issues = [];
637
- let hasTryCatch = false;
638
- let hasDotCatch = false;
639
- let hasThenErrorHandler = false;
640
- let firstLine = 0;
641
- // Detectar try/catch
642
- const tryCatches = file.getDescendantsOfKind(SyntaxKind.TryStatement);
643
- if (tryCatches.length > 0) {
644
- hasTryCatch = true;
645
- firstLine = firstLine || tryCatches[0].getStartLineNumber();
646
- }
647
- // Detectar .catch(handler) en call expressions
648
- for (const call of file.getDescendantsOfKind(SyntaxKind.CallExpression)) {
649
- const expr = call.getExpression();
650
- if (expr.getKindName() === 'PropertyAccessExpression') {
651
- const propAccess = expr.asKindOrThrow(SyntaxKind.PropertyAccessExpression);
652
- const propName = propAccess.getName();
653
- if (propName === 'catch') {
654
- // Verificar que tiene al menos un argumento (handler real, no .catch() vacío)
655
- if (call.getArguments().length > 0) {
656
- hasDotCatch = true;
657
- if (!firstLine)
658
- firstLine = call.getStartLineNumber();
659
- }
660
- }
661
- // Detectar .then(onFulfilled, onRejected) — segundo argumento = error handler
662
- if (propName === 'then' && call.getArguments().length >= 2) {
663
- hasThenErrorHandler = true;
664
- if (!firstLine)
665
- firstLine = call.getStartLineNumber();
666
- }
667
- }
668
- }
669
- const stylesUsed = [hasTryCatch, hasDotCatch, hasThenErrorHandler].filter(Boolean).length;
670
- if (stylesUsed >= 2) {
671
- const styles = [];
672
- if (hasTryCatch)
673
- styles.push('try/catch');
674
- if (hasDotCatch)
675
- styles.push('.catch()');
676
- if (hasThenErrorHandler)
677
- styles.push('.then(_, handler)');
678
- issues.push({
679
- rule: 'inconsistent-error-handling',
680
- severity: 'warning',
681
- message: `Mixed error handling styles: ${styles.join(', ')}. AI uses whatever pattern it saw last — pick one and stick to it.`,
682
- line: firstLine || 1,
683
- column: 1,
684
- snippet: styles.join(' + '),
685
- });
686
- }
687
- return issues;
688
- }
689
- function detectUnnecessaryAbstraction(file) {
690
- const issues = [];
691
- const fileText = file.getFullText();
692
- // Interfaces con un solo método
693
- for (const iface of file.getInterfaces()) {
694
- const methods = iface.getMethods();
695
- const properties = iface.getProperties();
696
- // Solo reportar si tiene exactamente 1 método y 0 propiedades (abstracción pura de comportamiento)
697
- if (methods.length !== 1 || properties.length !== 0)
698
- continue;
699
- const ifaceName = iface.getName();
700
- // Contar cuántas veces aparece el nombre en el archivo (excluyendo la declaración misma)
701
- const usageCount = (fileText.match(new RegExp(`\\b${ifaceName}\\b`, 'g')) ?? []).length;
702
- // La declaración misma cuenta como 1 uso, implementaciones cuentan como 1 cada una
703
- // Si usageCount <= 2 (declaración + 1 uso), es candidata a innecesaria
704
- if (usageCount <= 2) {
705
- issues.push({
706
- rule: 'unnecessary-abstraction',
707
- severity: 'warning',
708
- message: `Interface '${ifaceName}' has 1 method and is used only once. AI creates abstractions preemptively — YAGNI.`,
709
- line: iface.getStartLineNumber(),
710
- column: iface.getStartLinePos(),
711
- snippet: `interface ${ifaceName} { ${methods[0].getName()}(...) }`,
712
- });
713
- }
714
- }
715
- // Clases abstractas con un solo método abstracto y sin implementaciones en el archivo
716
- for (const cls of file.getClasses()) {
717
- if (!cls.isAbstract())
718
- continue;
719
- const abstractMethods = cls.getMethods().filter(m => m.isAbstract());
720
- const concreteMethods = cls.getMethods().filter(m => !m.isAbstract());
721
- if (abstractMethods.length !== 1 || concreteMethods.length !== 0)
722
- continue;
723
- const clsName = cls.getName() ?? '';
724
- const usageCount = (fileText.match(new RegExp(`\\b${clsName}\\b`, 'g')) ?? []).length;
725
- if (usageCount <= 2) {
726
- issues.push({
727
- rule: 'unnecessary-abstraction',
728
- severity: 'warning',
729
- message: `Abstract class '${clsName}' has 1 abstract method and is extended nowhere in this file. AI over-engineers single-use code.`,
730
- line: cls.getStartLineNumber(),
731
- column: cls.getStartLinePos(),
732
- snippet: `abstract class ${clsName}`,
733
- });
734
- }
735
- }
736
- return issues;
737
- }
738
- function detectNamingInconsistency(file) {
739
- const issues = [];
740
- const isCamelCase = (name) => /^[a-z][a-zA-Z0-9]*$/.test(name) && /[A-Z]/.test(name);
741
- const isSnakeCase = (name) => /^[a-z][a-z0-9]*(_[a-z0-9]+)+$/.test(name);
742
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
743
- function checkFunction(fn) {
744
- const vars = fn.getVariableDeclarations();
745
- if (vars.length < 3)
746
- return; // muy pocas vars para ser significativo
747
- let camelCount = 0;
748
- let snakeCount = 0;
749
- const snakeExamples = [];
750
- const camelExamples = [];
751
- for (const v of vars) {
752
- const name = v.getName();
753
- if (isCamelCase(name)) {
754
- camelCount++;
755
- if (camelExamples.length < 2)
756
- camelExamples.push(name);
757
- }
758
- else if (isSnakeCase(name)) {
759
- snakeCount++;
760
- if (snakeExamples.length < 2)
761
- snakeExamples.push(name);
762
128
  }
763
129
  }
764
- if (camelCount >= 1 && snakeCount >= 1) {
765
- issues.push({
766
- rule: 'naming-inconsistency',
767
- severity: 'warning',
768
- message: `Mixed naming conventions: camelCase (${camelExamples.join(', ')}) and snake_case (${snakeExamples.join(', ')}) in the same scope. AI mixes conventions from different training examples.`,
769
- line: fn.getStartLineNumber(),
770
- column: fn.getStartLinePos(),
771
- snippet: `camelCase: ${camelExamples[0]} / snake_case: ${snakeExamples[0]}`,
772
- });
773
- }
774
- }
775
- for (const fn of file.getFunctions()) {
776
- checkFunction(fn);
777
- }
778
- for (const cls of file.getClasses()) {
779
- for (const method of cls.getMethods()) {
780
- checkFunction(method);
781
- }
782
130
  }
783
131
  return issues;
784
132
  }
785
133
  // ---------------------------------------------------------------------------
786
- // Score
787
- // ---------------------------------------------------------------------------
788
- function calculateScore(issues) {
789
- let raw = 0;
790
- for (const issue of issues) {
791
- raw += RULE_WEIGHTS[issue.rule]?.weight ?? 5;
792
- }
793
- return Math.min(100, raw);
794
- }
795
- /** Normalize a function body to a canonical string (Type-2 clone detection).
796
- * Variable names, parameter names, and numeric/string literals are replaced
797
- * with canonical tokens so that two functions with identical logic but
798
- * different identifiers produce the same fingerprint.
799
- */
800
- function normalizeFunctionBody(fn) {
801
- // Build a substitution map: localName → canonical token
802
- const subst = new Map();
803
- // Map parameters first
804
- for (const [i, param] of fn.getParameters().entries()) {
805
- const name = param.getName();
806
- if (name && name !== '_')
807
- subst.set(name, `P${i}`);
808
- }
809
- // Map locally declared variables (VariableDeclaration)
810
- let varIdx = 0;
811
- fn.forEachDescendant(node => {
812
- if (node.getKind() === SyntaxKind.VariableDeclaration) {
813
- const nameNode = node.getNameNode();
814
- // Support destructuring — getNameNode() may be a BindingPattern
815
- if (nameNode.getKind() === SyntaxKind.Identifier) {
816
- const name = nameNode.getText();
817
- if (!subst.has(name))
818
- subst.set(name, `V${varIdx++}`);
819
- }
820
- }
821
- });
822
- function serializeNode(node) {
823
- const kind = node.getKindName();
824
- switch (node.getKind()) {
825
- case SyntaxKind.Identifier: {
826
- const text = node.getText();
827
- return subst.get(text) ?? text; // external refs (Math, console) kept as-is
828
- }
829
- case SyntaxKind.NumericLiteral:
830
- return 'NL';
831
- case SyntaxKind.StringLiteral:
832
- case SyntaxKind.NoSubstitutionTemplateLiteral:
833
- return 'SL';
834
- case SyntaxKind.TrueKeyword:
835
- return 'TRUE';
836
- case SyntaxKind.FalseKeyword:
837
- return 'FALSE';
838
- case SyntaxKind.NullKeyword:
839
- return 'NULL';
840
- }
841
- const children = node.getChildren();
842
- if (children.length === 0)
843
- return kind;
844
- const childStr = children.map(serializeNode).join('|');
845
- return `${kind}(${childStr})`;
846
- }
847
- const body = fn.getBody();
848
- if (!body)
849
- return '';
850
- return serializeNode(body);
851
- }
852
- /** Return a SHA-256 fingerprint for a function body (normalized). */
853
- function fingerprintFunction(fn) {
854
- const normalized = normalizeFunctionBody(fn);
855
- return crypto.createHash('sha256').update(normalized).digest('hex');
856
- }
857
- /** Return all function-like nodes from a SourceFile that are worth comparing:
858
- * - At least MIN_LINES lines in their body
859
- * - Not test helpers (describe/it/test/beforeEach/afterEach)
860
- */
861
- const MIN_LINES = 8;
862
- function collectFunctions(sf) {
863
- const results = [];
864
- const kinds = [
865
- SyntaxKind.FunctionDeclaration,
866
- SyntaxKind.FunctionExpression,
867
- SyntaxKind.ArrowFunction,
868
- SyntaxKind.MethodDeclaration,
869
- ];
870
- for (const kind of kinds) {
871
- for (const node of sf.getDescendantsOfKind(kind)) {
872
- const body = node.getBody();
873
- if (!body)
874
- continue;
875
- const start = body.getStartLineNumber();
876
- const end = body.getEndLineNumber();
877
- if (end - start + 1 < MIN_LINES)
878
- continue;
879
- // Skip test-framework helpers
880
- const name = node.getKind() === SyntaxKind.FunctionDeclaration
881
- ? node.getName() ?? '<anonymous>'
882
- : node.getKind() === SyntaxKind.MethodDeclaration
883
- ? node.getName()
884
- : '<anonymous>';
885
- if (['describe', 'it', 'test', 'beforeEach', 'afterEach', 'beforeAll', 'afterAll'].includes(name))
886
- continue;
887
- const pos = node.getStart();
888
- const lineInfo = sf.getLineAndColumnAtPos(pos);
889
- results.push({ fn: node, name, line: lineInfo.line, col: lineInfo.column });
890
- }
891
- }
892
- return results;
893
- }
894
- // ---------------------------------------------------------------------------
895
- // Public API
134
+ // Per-file analysis
896
135
  // ---------------------------------------------------------------------------
897
- export function analyzeFile(file) {
136
+ export function analyzeFile(file, options) {
137
+ const normalizedOptions = (options && typeof options === 'object' && ('config' in options || 'loadedPlugins' in options || 'projectRoot' in options))
138
+ ? options
139
+ : { config: (options && typeof options === 'object' ? options : undefined) };
898
140
  if (isFileIgnored(file)) {
899
141
  return {
900
142
  path: file.getFilePath(),
@@ -917,7 +159,6 @@ export function analyzeFile(file) {
917
159
  ...detectTooManyParams(file),
918
160
  ...detectHighCoupling(file),
919
161
  ...detectPromiseStyleMix(file),
920
- // Stubs now implemented
921
162
  ...detectMagicNumbers(file),
922
163
  ...detectCommentContradiction(file),
923
164
  // Phase 5: AI authorship heuristics
@@ -926,17 +167,27 @@ export function analyzeFile(file) {
926
167
  ...detectInconsistentErrorHandling(file),
927
168
  ...detectUnnecessaryAbstraction(file),
928
169
  ...detectNamingInconsistency(file),
170
+ // Configurable architecture rules
171
+ ...detectControllerNoDb(file, normalizedOptions?.config),
172
+ ...detectServiceNoHttp(file, normalizedOptions?.config),
173
+ ...detectMaxFunctionLines(file, normalizedOptions?.config),
174
+ // Plugin rules
175
+ ...runPluginRules(file, normalizedOptions?.loadedPlugins ?? [], normalizedOptions?.config, normalizedOptions?.projectRoot ?? path.dirname(file.getFilePath())),
929
176
  ];
177
+ issues.push(...detectAICodeSmell(issues, file.getFilePath()));
930
178
  return {
931
179
  path: file.getFilePath(),
932
180
  issues,
933
- score: calculateScore(issues),
181
+ score: calculateScore(issues, RULE_WEIGHTS),
934
182
  };
935
183
  }
184
+ // ---------------------------------------------------------------------------
185
+ // Project-level analysis (phases 2, 3, 8 require the full file set)
186
+ // ---------------------------------------------------------------------------
936
187
  export function analyzeProject(targetPath, config) {
937
188
  const project = new Project({
938
189
  skipAddingFilesFromTsConfig: true,
939
- compilerOptions: { allowJs: true },
190
+ compilerOptions: { allowJs: true, jsx: 1 }, // 1 = JsxEmit.Preserve
940
191
  });
941
192
  project.addSourceFilesAtPaths([
942
193
  `${targetPath}/**/*.ts`,
@@ -952,31 +203,35 @@ export function analyzeProject(targetPath, config) {
952
203
  `!${targetPath}/**/*.spec.*`,
953
204
  ]);
954
205
  const sourceFiles = project.getSourceFiles();
206
+ const pluginRuntime = loadPlugins(targetPath, config?.plugins);
955
207
  // Phase 1: per-file analysis
956
- const reports = sourceFiles.map(analyzeFile);
208
+ const reports = sourceFiles.map((file) => analyzeFile(file, {
209
+ config,
210
+ loadedPlugins: pluginRuntime.plugins,
211
+ projectRoot: targetPath,
212
+ }));
957
213
  const reportByPath = new Map();
958
214
  for (const r of reports)
959
215
  reportByPath.set(r.path, r);
960
- // Phase 2: cross-file analysis build import graph first
961
- const allImportedPaths = new Set(); // absolute paths of files that are imported
962
- const allImportedNames = new Map(); // file path → set of imported names
963
- const allLiteralImports = new Set(); // raw module specifiers (for unused-dependency)
964
- const importGraph = new Map(); // Phase 3: filePath → Set of imported filePaths
216
+ // Build set of ignored paths so cross-file phases don't re-add issues
217
+ const ignoredPaths = new Set(sourceFiles.filter(sf => isFileIgnored(sf)).map(sf => sf.getFilePath()));
218
+ // ── Phase 2 setup: build import graph ──────────────────────────────────────
219
+ const allImportedPaths = new Set();
220
+ const allImportedNames = new Map();
221
+ const allLiteralImports = new Set();
222
+ const importGraph = new Map();
965
223
  for (const sf of sourceFiles) {
966
224
  const sfPath = sf.getFilePath();
967
225
  for (const decl of sf.getImportDeclarations()) {
968
226
  const moduleSpecifier = decl.getModuleSpecifierValue();
969
227
  allLiteralImports.add(moduleSpecifier);
970
- // Resolve to absolute path for dead-file / unused-export
971
228
  const resolved = decl.getModuleSpecifierSourceFile();
972
229
  if (resolved) {
973
230
  const resolvedPath = resolved.getFilePath();
974
231
  allImportedPaths.add(resolvedPath);
975
- // Phase 3: populate directed import graph
976
232
  if (!importGraph.has(sfPath))
977
233
  importGraph.set(sfPath, new Set());
978
234
  importGraph.get(sfPath).add(resolvedPath);
979
- // Collect named imports { A, B } and default imports
980
235
  const named = decl.getNamedImports().map(n => n.getName());
981
236
  const def = decl.getDefaultImport()?.getText();
982
237
  const ns = decl.getNamespaceImport()?.getText();
@@ -989,11 +244,9 @@ export function analyzeProject(targetPath, config) {
989
244
  if (def)
990
245
  nameSet.add('default');
991
246
  if (ns)
992
- nameSet.add('*'); // namespace import — counts all exports as used
247
+ nameSet.add('*');
993
248
  }
994
249
  }
995
- // Also register re-exports: export { X, Y } from './module'
996
- // These count as "using" X and Y from the source module
997
250
  for (const exportDecl of sf.getExportDeclarations()) {
998
251
  const reExportedModule = exportDecl.getModuleSpecifierSourceFile();
999
252
  if (!reExportedModule)
@@ -1006,7 +259,6 @@ export function analyzeProject(targetPath, config) {
1006
259
  const nameSet = allImportedNames.get(reExportedPath);
1007
260
  const namedExports = exportDecl.getNamedExports();
1008
261
  if (namedExports.length === 0) {
1009
- // export * from './module' — namespace re-export, all names used
1010
262
  nameSet.add('*');
1011
263
  }
1012
264
  else {
@@ -1015,262 +267,102 @@ export function analyzeProject(targetPath, config) {
1015
267
  }
1016
268
  }
1017
269
  }
1018
- // Detect unused-export and dead-file per source file
1019
- for (const sf of sourceFiles) {
1020
- const sfPath = sf.getFilePath();
1021
- const report = reportByPath.get(sfPath);
1022
- if (!report)
1023
- continue;
1024
- // dead-file: file is never imported by anyone
1025
- // Exclude entry-point candidates: index.ts, main.ts, cli.ts, app.ts, bin files
1026
- const basename = path.basename(sfPath);
1027
- const isBinFile = sfPath.replace(/\\/g, '/').includes('/bin/');
1028
- const isEntryPoint = /^(index|main|cli|app)\.(ts|tsx|js|jsx)$/.test(basename) || isBinFile;
1029
- if (!isEntryPoint && !allImportedPaths.has(sfPath)) {
1030
- const issue = {
1031
- rule: 'dead-file',
1032
- severity: RULE_WEIGHTS['dead-file'].severity,
1033
- message: 'File is never imported — may be dead code',
270
+ // Plugin load failures are surfaced as synthetic report entries.
271
+ if (pluginRuntime.errors.length > 0) {
272
+ for (const err of pluginRuntime.errors) {
273
+ const pluginIssue = {
274
+ rule: 'plugin-error',
275
+ severity: 'warning',
276
+ message: `Failed to load plugin '${err.pluginId}': ${err.message}`,
1034
277
  line: 1,
1035
278
  column: 1,
1036
- snippet: basename,
279
+ snippet: err.pluginId,
1037
280
  };
1038
- report.issues.push(issue);
1039
- report.score = calculateScore(report.issues);
1040
- }
1041
- // unused-export: named exports not imported anywhere
1042
- // Skip barrel files (index.ts) — their entire surface is the public API
1043
- const isBarrel = /^index\.(ts|tsx|js|jsx)$/.test(basename);
1044
- const importedNamesForFile = allImportedNames.get(sfPath);
1045
- const hasNamespaceImport = importedNamesForFile?.has('*') ?? false;
1046
- if (!isBarrel && !hasNamespaceImport) {
1047
- for (const exportDecl of sf.getExportDeclarations()) {
1048
- for (const namedExport of exportDecl.getNamedExports()) {
1049
- const name = namedExport.getName();
1050
- if (!importedNamesForFile?.has(name)) {
1051
- const line = namedExport.getStartLineNumber();
1052
- const issue = {
1053
- rule: 'unused-export',
1054
- severity: RULE_WEIGHTS['unused-export'].severity,
1055
- message: `'${name}' is exported but never imported`,
1056
- line,
1057
- column: 1,
1058
- snippet: namedExport.getText().slice(0, 80),
1059
- };
1060
- report.issues.push(issue);
1061
- report.score = calculateScore(report.issues);
1062
- }
1063
- }
1064
- }
1065
- // Also check inline export declarations (export function foo, export const bar)
1066
- for (const exportSymbol of sf.getExportedDeclarations()) {
1067
- const [exportName, declarations] = [exportSymbol[0], exportSymbol[1]];
1068
- if (exportName === 'default')
1069
- continue;
1070
- if (importedNamesForFile?.has(exportName))
1071
- continue;
1072
- for (const decl of declarations) {
1073
- // Skip if this is a re-export from another file
1074
- if (decl.getSourceFile().getFilePath() !== sfPath)
1075
- continue;
1076
- const line = decl.getStartLineNumber();
1077
- const issue = {
1078
- rule: 'unused-export',
1079
- severity: RULE_WEIGHTS['unused-export'].severity,
1080
- message: `'${exportName}' is exported but never imported`,
1081
- line,
1082
- column: 1,
1083
- snippet: decl.getText().split('\n')[0].slice(0, 80),
1084
- };
1085
- report.issues.push(issue);
1086
- report.score = calculateScore(report.issues);
1087
- break; // one issue per export name is enough
1088
- }
1089
- }
1090
- }
1091
- }
1092
- // Detect unused-dependency: packages in package.json never imported
1093
- const pkgPath = path.join(targetPath, 'package.json');
1094
- if (fs.existsSync(pkgPath)) {
1095
- let pkg;
1096
- try {
1097
- pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf-8'));
1098
- }
1099
- catch {
1100
- pkg = {};
1101
- }
1102
- const deps = {
1103
- ...(pkg.dependencies ?? {}),
1104
- };
1105
- const unusedDeps = [];
1106
- for (const depName of Object.keys(deps)) {
1107
- // Skip type-only packages (@types/*)
1108
- if (depName.startsWith('@types/'))
1109
- continue;
1110
- // A dependency is "used" if any import specifier starts with the package name
1111
- // (handles sub-paths like 'lodash/merge', 'date-fns/format', etc.)
1112
- const isUsed = [...allLiteralImports].some(imp => imp === depName || imp.startsWith(depName + '/'));
1113
- if (!isUsed)
1114
- unusedDeps.push(depName);
1115
- }
1116
- if (unusedDeps.length > 0) {
1117
- const pkgIssues = unusedDeps.map(dep => ({
1118
- rule: 'unused-dependency',
1119
- severity: RULE_WEIGHTS['unused-dependency'].severity,
1120
- message: `'${dep}' is in package.json but never imported`,
1121
- line: 1,
1122
- column: 1,
1123
- snippet: `"${dep}"`,
1124
- }));
1125
281
  reports.push({
1126
- path: pkgPath,
1127
- issues: pkgIssues,
1128
- score: calculateScore(pkgIssues),
282
+ path: path.join(targetPath, '.drift-plugin-errors', `${err.pluginId}.plugin`),
283
+ issues: [pluginIssue],
284
+ score: calculateScore([pluginIssue], RULE_WEIGHTS),
1129
285
  });
1130
286
  }
1131
287
  }
1132
- // Phase 3: circular-dependency DFS cycle detection
1133
- function findCycles(graph) {
1134
- const visited = new Set();
1135
- const inStack = new Set();
1136
- const cycles = [];
1137
- function dfs(node, stack) {
1138
- visited.add(node);
1139
- inStack.add(node);
1140
- stack.push(node);
1141
- for (const neighbor of graph.get(node) ?? []) {
1142
- if (!visited.has(neighbor)) {
1143
- dfs(neighbor, stack);
1144
- }
1145
- else if (inStack.has(neighbor)) {
1146
- // Found a cycle — extract the cycle portion from the stack
1147
- const cycleStart = stack.indexOf(neighbor);
1148
- cycles.push(stack.slice(cycleStart));
1149
- }
1150
- }
1151
- stack.pop();
1152
- inStack.delete(node);
288
+ // ── Phase 2: dead-file + unused-export + unused-dependency ─────────────────
289
+ const deadFiles = detectDeadFiles(sourceFiles, allImportedPaths, RULE_WEIGHTS);
290
+ for (const [sfPath, issue] of deadFiles) {
291
+ if (ignoredPaths.has(sfPath))
292
+ continue;
293
+ const report = reportByPath.get(sfPath);
294
+ if (report) {
295
+ report.issues.push(issue);
296
+ report.score = calculateScore(report.issues, RULE_WEIGHTS);
1153
297
  }
1154
- for (const node of graph.keys()) {
1155
- if (!visited.has(node)) {
1156
- dfs(node, []);
298
+ }
299
+ const unusedExports = detectUnusedExports(sourceFiles, allImportedNames, RULE_WEIGHTS);
300
+ for (const [sfPath, issues] of unusedExports) {
301
+ if (ignoredPaths.has(sfPath))
302
+ continue;
303
+ const report = reportByPath.get(sfPath);
304
+ if (report) {
305
+ for (const issue of issues) {
306
+ report.issues.push(issue);
1157
307
  }
308
+ report.score = calculateScore(report.issues, RULE_WEIGHTS);
1158
309
  }
1159
- return cycles;
1160
310
  }
1161
- const cycles = findCycles(importGraph);
1162
- // De-duplicate: each unique cycle (regardless of starting node) reported once per file
1163
- const reportedCycleKeys = new Set();
1164
- for (const cycle of cycles) {
1165
- const cycleKey = [...cycle].sort().join('|');
1166
- if (reportedCycleKeys.has(cycleKey))
1167
- continue;
1168
- reportedCycleKeys.add(cycleKey);
1169
- // Report on the first file in the cycle
1170
- const firstFile = cycle[0];
1171
- const report = reportByPath.get(firstFile);
1172
- if (!report)
311
+ const unusedDepIssues = detectUnusedDependencies(targetPath, allLiteralImports, RULE_WEIGHTS);
312
+ if (unusedDepIssues.length > 0) {
313
+ const pkgPath = path.join(targetPath, 'package.json');
314
+ reports.push({
315
+ path: pkgPath,
316
+ issues: unusedDepIssues,
317
+ score: calculateScore(unusedDepIssues, RULE_WEIGHTS),
318
+ });
319
+ }
320
+ // ── Phase 3: circular-dependency ────────────────────────────────────────────
321
+ const circularIssues = detectCircularDependencies(importGraph, RULE_WEIGHTS);
322
+ for (const [filePath, issue] of circularIssues) {
323
+ if (ignoredPaths.has(filePath))
1173
324
  continue;
1174
- const cycleDisplay = cycle
1175
- .map(p => path.basename(p))
1176
- .concat(path.basename(cycle[0])) // close the loop visually: A → B → C → A
1177
- .join(' → ');
1178
- const issue = {
1179
- rule: 'circular-dependency',
1180
- severity: RULE_WEIGHTS['circular-dependency'].severity,
1181
- message: `Circular dependency detected: ${cycleDisplay}`,
1182
- line: 1,
1183
- column: 1,
1184
- snippet: cycleDisplay,
1185
- };
1186
- report.issues.push(issue);
1187
- report.score = calculateScore(report.issues);
325
+ const report = reportByPath.get(filePath);
326
+ if (report) {
327
+ report.issues.push(issue);
328
+ report.score = calculateScore(report.issues, RULE_WEIGHTS);
329
+ }
1188
330
  }
1189
- // ── Phase 3b: layer-violation ──────────────────────────────────────────
331
+ // ── Phase 3b: layer-violation ────────────────────────────────────────────────
1190
332
  if (config?.layers && config.layers.length > 0) {
1191
- const { layers } = config;
1192
- function getLayer(filePath) {
1193
- const rel = filePath.replace(/\\/g, '/');
1194
- return layers.find(layer => layer.patterns.some(pattern => {
1195
- const regexStr = pattern
1196
- .replace(/\\/g, '/')
1197
- .replace(/[.+^${}()|[\]]/g, '\\$&')
1198
- .replace(/\*\*/g, '###DOUBLESTAR###')
1199
- .replace(/\*/g, '[^/]*')
1200
- .replace(/###DOUBLESTAR###/g, '.*');
1201
- return new RegExp(`^${regexStr}`).test(rel);
1202
- }));
1203
- }
1204
- for (const [filePath, imports] of importGraph.entries()) {
1205
- const fileLayer = getLayer(filePath);
1206
- if (!fileLayer)
333
+ const layerIssues = detectLayerViolations(importGraph, config.layers, targetPath, RULE_WEIGHTS);
334
+ for (const [filePath, issues] of layerIssues) {
335
+ if (ignoredPaths.has(filePath))
1207
336
  continue;
1208
- for (const importedPath of imports) {
1209
- const importedLayer = getLayer(importedPath);
1210
- if (!importedLayer)
1211
- continue;
1212
- if (importedLayer.name === fileLayer.name)
1213
- continue;
1214
- if (!fileLayer.canImportFrom.includes(importedLayer.name)) {
1215
- const report = reportByPath.get(filePath);
1216
- if (report) {
1217
- const weight = RULE_WEIGHTS['layer-violation']?.weight ?? 5;
1218
- report.issues.push({
1219
- rule: 'layer-violation',
1220
- severity: 'error',
1221
- message: `Layer '${fileLayer.name}' must not import from layer '${importedLayer.name}'`,
1222
- line: 1,
1223
- column: 1,
1224
- snippet: `import from '${path.relative(targetPath, importedPath).replace(/\\/g, '/')}'`,
1225
- });
1226
- report.score = Math.min(100, report.score + weight);
1227
- }
337
+ const report = reportByPath.get(filePath);
338
+ if (report) {
339
+ for (const issue of issues) {
340
+ report.issues.push(issue);
341
+ report.score = Math.min(100, report.score + (RULE_WEIGHTS['layer-violation']?.weight ?? 5));
1228
342
  }
1229
343
  }
1230
344
  }
1231
345
  }
1232
- // ── Phase 3c: cross-boundary-import ────────────────────────────────────
346
+ // ── Phase 3c: cross-boundary-import ─────────────────────────────────────────
1233
347
  if (config?.modules && config.modules.length > 0) {
1234
- const { modules } = config;
1235
- function getModule(filePath) {
1236
- const rel = filePath.replace(/\\/g, '/');
1237
- return modules.find(m => rel.startsWith(m.root.replace(/\\/g, '/')));
1238
- }
1239
- for (const [filePath, imports] of importGraph.entries()) {
1240
- const fileModule = getModule(filePath);
1241
- if (!fileModule)
348
+ const boundaryIssues = detectCrossBoundaryImports(importGraph, config.modules, targetPath, RULE_WEIGHTS);
349
+ for (const [filePath, issues] of boundaryIssues) {
350
+ if (ignoredPaths.has(filePath))
1242
351
  continue;
1243
- for (const importedPath of imports) {
1244
- const importedModule = getModule(importedPath);
1245
- if (!importedModule)
1246
- continue;
1247
- if (importedModule.name === fileModule.name)
1248
- continue;
1249
- const allowedImports = fileModule.allowedExternalImports ?? [];
1250
- const relImported = importedPath.replace(/\\/g, '/');
1251
- const isAllowed = allowedImports.some(allowed => relImported.startsWith(allowed.replace(/\\/g, '/')));
1252
- if (!isAllowed) {
1253
- const report = reportByPath.get(filePath);
1254
- if (report) {
1255
- const weight = RULE_WEIGHTS['cross-boundary-import']?.weight ?? 5;
1256
- report.issues.push({
1257
- rule: 'cross-boundary-import',
1258
- severity: 'warning',
1259
- message: `Module '${fileModule.name}' must not import from module '${importedModule.name}'`,
1260
- line: 1,
1261
- column: 1,
1262
- snippet: `import from '${path.relative(targetPath, importedPath).replace(/\\/g, '/')}'`,
1263
- });
1264
- report.score = Math.min(100, report.score + weight);
1265
- }
352
+ const report = reportByPath.get(filePath);
353
+ if (report) {
354
+ for (const issue of issues) {
355
+ report.issues.push(issue);
356
+ report.score = Math.min(100, report.score + (RULE_WEIGHTS['cross-boundary-import']?.weight ?? 5));
1266
357
  }
1267
358
  }
1268
359
  }
1269
360
  }
1270
- // ── Phase 8: semantic-duplication ────────────────────────────────────────
1271
- // Build a fingerprint → [{filePath, fnName, line, col}] map across all files
361
+ // ── Phase 8: semantic-duplication ───────────────────────────────────────────
1272
362
  const fingerprintMap = new Map();
1273
363
  for (const sf of sourceFiles) {
364
+ if (isFileIgnored(sf))
365
+ continue;
1274
366
  const sfPath = sf.getFilePath();
1275
367
  for (const { fn, name, line, col } of collectFunctions(sf)) {
1276
368
  const fp = fingerprintFunction(fn);
@@ -1279,7 +371,6 @@ export function analyzeProject(targetPath, config) {
1279
371
  fingerprintMap.get(fp).push({ filePath: sfPath, name, line, col });
1280
372
  }
1281
373
  }
1282
- // For each fingerprint with 2+ functions: report each as a duplicate of the others
1283
374
  for (const [, entries] of fingerprintMap) {
1284
375
  if (entries.length < 2)
1285
376
  continue;
@@ -1287,7 +378,6 @@ export function analyzeProject(targetPath, config) {
1287
378
  const report = reportByPath.get(entry.filePath);
1288
379
  if (!report)
1289
380
  continue;
1290
- // Build the "duplicated in" list (all other locations)
1291
381
  const others = entries
1292
382
  .filter(e => e !== entry)
1293
383
  .map(e => {
@@ -1309,439 +399,4 @@ export function analyzeProject(targetPath, config) {
1309
399
  }
1310
400
  return reports;
1311
401
  }
1312
- // ---------------------------------------------------------------------------
1313
- // Git helpers
1314
- // ---------------------------------------------------------------------------
1315
- /** Analyse a file given its absolute path string (wraps analyzeFile). */
1316
- function analyzeFilePath(filePath) {
1317
- const proj = new Project({
1318
- skipAddingFilesFromTsConfig: true,
1319
- compilerOptions: { allowJs: true },
1320
- });
1321
- const sf = proj.addSourceFileAtPath(filePath);
1322
- return analyzeFile(sf);
1323
- }
1324
- /**
1325
- * Execute a git command synchronously and return stdout.
1326
- * Throws a descriptive error if the command fails or git is not available.
1327
- */
1328
- function execGit(cmd, cwd) {
1329
- try {
1330
- return execSync(cmd, { cwd, encoding: 'utf8', stdio: ['pipe', 'pipe', 'pipe'] }).trim();
1331
- }
1332
- catch (err) {
1333
- const msg = err instanceof Error ? err.message : String(err);
1334
- throw new Error(`Git command failed: ${cmd}\n${msg}`);
1335
- }
1336
- }
1337
- /**
1338
- * Verify the given directory is a git repository.
1339
- * Throws if git is not available or the directory is not a repo.
1340
- */
1341
- function assertGitRepo(cwd) {
1342
- try {
1343
- execGit('git rev-parse --is-inside-work-tree', cwd);
1344
- }
1345
- catch {
1346
- throw new Error(`Directory is not a git repository: ${cwd}`);
1347
- }
1348
- }
1349
- // ---------------------------------------------------------------------------
1350
- // Historical analysis helpers
1351
- // ---------------------------------------------------------------------------
1352
- /**
1353
- * Analyse a single file as it existed at a given commit hash.
1354
- * Writes the blob to a temp file, runs analyzeFile, then cleans up.
1355
- */
1356
- async function analyzeFileAtCommit(filePath, commitHash, projectRoot) {
1357
- const relPath = path.relative(projectRoot, filePath).replace(/\\/g, '/');
1358
- const blob = execGit(`git show ${commitHash}:${relPath}`, projectRoot);
1359
- const tmpFile = path.join(os.tmpdir(), `drift-${crypto.randomBytes(8).toString('hex')}.ts`);
1360
- try {
1361
- fs.writeFileSync(tmpFile, blob, 'utf8');
1362
- const report = analyzeFilePath(tmpFile);
1363
- // Replace temp path with original for readable output
1364
- return { ...report, path: filePath };
1365
- }
1366
- finally {
1367
- try {
1368
- fs.unlinkSync(tmpFile);
1369
- }
1370
- catch { /* ignore cleanup errors */ }
1371
- }
1372
- }
1373
- /**
1374
- * Analyse ALL TypeScript files in the project snapshot at a given commit.
1375
- * Uses `git ls-tree` to enumerate every file in the tree, writes them to a
1376
- * temp directory, then runs `analyzeProject` on that full snapshot so that
1377
- * the resulting `averageScore` reflects the complete project health rather
1378
- * than only the files touched in that diff.
1379
- */
1380
- async function analyzeSingleCommit(commitHash, targetPath, config) {
1381
- // 1. Commit metadata
1382
- const meta = execGit(`git show --no-patch --format="%H|%aI|%an|%s" ${commitHash}`, targetPath);
1383
- const [hash, dateStr, author, ...msgParts] = meta.split('|');
1384
- const message = msgParts.join('|').trim();
1385
- const commitDate = new Date(dateStr ?? '');
1386
- // 2. All .ts/.tsx files tracked at this commit (no diffs, full tree)
1387
- const allFiles = execGit(`git ls-tree -r ${commitHash} --name-only`, targetPath)
1388
- .split('\n')
1389
- .filter(f => (f.endsWith('.ts') || f.endsWith('.tsx')) &&
1390
- !f.endsWith('.d.ts') &&
1391
- !f.includes('node_modules') &&
1392
- !f.startsWith('dist/'));
1393
- if (allFiles.length === 0) {
1394
- return {
1395
- commitHash: hash ?? commitHash,
1396
- commitDate,
1397
- author: author ?? '',
1398
- message,
1399
- files: [],
1400
- totalScore: 0,
1401
- averageScore: 0,
1402
- };
1403
- }
1404
- // 3. Write snapshot to temp directory
1405
- const tmpDir = path.join(os.tmpdir(), `drift-${(hash ?? commitHash).slice(0, 8)}`);
1406
- fs.mkdirSync(tmpDir, { recursive: true });
1407
- for (const relPath of allFiles) {
1408
- try {
1409
- const content = execGit(`git show ${commitHash}:${relPath}`, targetPath);
1410
- const destPath = path.join(tmpDir, relPath);
1411
- fs.mkdirSync(path.dirname(destPath), { recursive: true });
1412
- fs.writeFileSync(destPath, content, 'utf-8');
1413
- }
1414
- catch {
1415
- // skip files that can't be read (binary, deleted in partial clone, etc.)
1416
- }
1417
- }
1418
- // 4. Analyse the full project snapshot
1419
- const fileReports = analyzeProject(tmpDir, config);
1420
- const totalScore = fileReports.reduce((sum, r) => sum + r.score, 0);
1421
- const averageScore = fileReports.length > 0 ? totalScore / fileReports.length : 0;
1422
- // 5. Cleanup
1423
- try {
1424
- fs.rmSync(tmpDir, { recursive: true, force: true });
1425
- }
1426
- catch {
1427
- // non-fatal — temp dirs are cleaned by the OS eventually
1428
- }
1429
- return {
1430
- commitHash: hash ?? commitHash,
1431
- commitDate,
1432
- author: author ?? '',
1433
- message,
1434
- files: fileReports,
1435
- totalScore,
1436
- averageScore,
1437
- };
1438
- }
1439
- /**
1440
- * Run historical analysis over all commits since a given date.
1441
- * Returns results ordered chronologically (oldest first).
1442
- */
1443
- async function analyzeHistoricalCommits(sinceDate, targetPath, maxCommits, config, maxSamples = 10) {
1444
- assertGitRepo(targetPath);
1445
- const isoDate = sinceDate.toISOString();
1446
- const raw = execGit(`git log --since="${isoDate}" --format="%H" --max-count=${maxCommits}`, targetPath);
1447
- if (!raw)
1448
- return [];
1449
- const hashes = raw.split('\n').filter(Boolean);
1450
- // Sample: distribute evenly across the range
1451
- // E.g. 122 commits, maxSamples=10 → pick index 0, 13, 26, 39, 52, 65, 78, 91, 104, 121
1452
- const sampled = hashes.length <= maxSamples
1453
- ? hashes
1454
- : Array.from({ length: maxSamples }, (_, i) => hashes[Math.floor(i * (hashes.length - 1) / (maxSamples - 1))]);
1455
- const analyses = await Promise.all(sampled.map(h => analyzeSingleCommit(h, targetPath, config).catch(() => null)));
1456
- return analyses
1457
- .filter((a) => a !== null)
1458
- .sort((a, b) => a.commitDate.getTime() - b.commitDate.getTime());
1459
- }
1460
- // ---------------------------------------------------------------------------
1461
- // TrendAnalyzer
1462
- // ---------------------------------------------------------------------------
1463
- export class TrendAnalyzer {
1464
- projectPath;
1465
- config;
1466
- constructor(projectPath, config) {
1467
- this.projectPath = projectPath;
1468
- this.config = config;
1469
- }
1470
- // --- Static utility methods -----------------------------------------------
1471
- static calculateMovingAverage(data, windowSize) {
1472
- return data.map((_, i) => {
1473
- const start = Math.max(0, i - windowSize + 1);
1474
- const window = data.slice(start, i + 1);
1475
- return window.reduce((s, p) => s + p.score, 0) / window.length;
1476
- });
1477
- }
1478
- static linearRegression(data) {
1479
- const n = data.length;
1480
- if (n < 2)
1481
- return { slope: 0, intercept: data[0]?.score ?? 0, r2: 0 };
1482
- const xs = data.map((_, i) => i);
1483
- const ys = data.map(p => p.score);
1484
- const xMean = xs.reduce((s, x) => s + x, 0) / n;
1485
- const yMean = ys.reduce((s, y) => s + y, 0) / n;
1486
- const ssXX = xs.reduce((s, x) => s + (x - xMean) ** 2, 0);
1487
- const ssXY = xs.reduce((s, x, i) => s + (x - xMean) * (ys[i] - yMean), 0);
1488
- const ssYY = ys.reduce((s, y) => s + (y - yMean) ** 2, 0);
1489
- const slope = ssXX === 0 ? 0 : ssXY / ssXX;
1490
- const intercept = yMean - slope * xMean;
1491
- const r2 = ssYY === 0 ? 1 : (ssXY ** 2) / (ssXX * ssYY);
1492
- return { slope, intercept, r2 };
1493
- }
1494
- /** Generate a simple horizontal ASCII bar chart (one bar per data point). */
1495
- static generateTrendChart(data) {
1496
- if (data.length === 0)
1497
- return '(no data)';
1498
- const maxScore = Math.max(...data.map(p => p.score), 1);
1499
- const chartWidth = 40;
1500
- const lines = data.map(p => {
1501
- const barLen = Math.round((p.score / maxScore) * chartWidth);
1502
- const bar = '█'.repeat(barLen);
1503
- const dateStr = p.date.toISOString().slice(0, 10);
1504
- return `${dateStr} │${bar.padEnd(chartWidth)} ${p.score.toFixed(1)}`;
1505
- });
1506
- return lines.join('\n');
1507
- }
1508
- // --- Instance method -------------------------------------------------------
1509
- async analyzeTrend(options) {
1510
- assertGitRepo(this.projectPath);
1511
- const periodDays = {
1512
- week: 7, month: 30, quarter: 90, year: 365,
1513
- };
1514
- const days = periodDays[options.period ?? 'month'] ?? 30;
1515
- const sinceDate = options.since
1516
- ? new Date(options.since)
1517
- : new Date(Date.now() - days * 24 * 60 * 60 * 1000);
1518
- const historicalAnalyses = await analyzeHistoricalCommits(sinceDate, this.projectPath, 100, this.config, 10);
1519
- const trendPoints = historicalAnalyses.map(h => ({
1520
- date: h.commitDate,
1521
- score: h.averageScore,
1522
- fileCount: h.files.length,
1523
- avgIssuesPerFile: h.files.length > 0
1524
- ? h.files.reduce((s, f) => s + f.issues.length, 0) / h.files.length
1525
- : 0,
1526
- }));
1527
- const regression = TrendAnalyzer.linearRegression(trendPoints);
1528
- // Current state report
1529
- const currentFiles = analyzeProject(this.projectPath, this.config);
1530
- const baseReport = buildReport(this.projectPath, currentFiles);
1531
- return {
1532
- ...baseReport,
1533
- trend: trendPoints,
1534
- regression,
1535
- };
1536
- }
1537
- }
1538
- function parseGitBlame(blameOutput) {
1539
- const entries = [];
1540
- const lines = blameOutput.split('\n');
1541
- let i = 0;
1542
- while (i < lines.length) {
1543
- const headerLine = lines[i];
1544
- if (!headerLine || headerLine.trim() === '') {
1545
- i++;
1546
- continue;
1547
- }
1548
- // Porcelain blame format: first line is "<hash> <orig-line> <final-line> [<num-lines>]"
1549
- const headerMatch = headerLine.match(/^([0-9a-f]{40})\s/);
1550
- if (!headerMatch) {
1551
- i++;
1552
- continue;
1553
- }
1554
- const hash = headerMatch[1];
1555
- let author = '';
1556
- let email = '';
1557
- let codeLine = '';
1558
- i++;
1559
- while (i < lines.length && !lines[i].match(/^[0-9a-f]{40}\s/)) {
1560
- const l = lines[i];
1561
- if (l.startsWith('author '))
1562
- author = l.slice(7).trim();
1563
- else if (l.startsWith('author-mail '))
1564
- email = l.slice(12).replace(/[<>]/g, '').trim();
1565
- else if (l.startsWith('\t'))
1566
- codeLine = l.slice(1);
1567
- i++;
1568
- }
1569
- entries.push({ hash, author, email, line: codeLine });
1570
- }
1571
- return entries;
1572
- }
1573
- export class BlameAnalyzer {
1574
- projectPath;
1575
- config;
1576
- constructor(projectPath, config) {
1577
- this.projectPath = projectPath;
1578
- this.config = config;
1579
- }
1580
- /** Blame a single file: returns per-author attribution. */
1581
- static async analyzeFileBlame(filePath) {
1582
- const dir = path.dirname(filePath);
1583
- assertGitRepo(dir);
1584
- const blameOutput = execGit(`git blame --porcelain "${filePath}"`, dir);
1585
- const entries = parseGitBlame(blameOutput);
1586
- // Analyse issues in the file
1587
- const report = analyzeFilePath(filePath);
1588
- // Map line numbers of issues to authors
1589
- const issuesByLine = new Map();
1590
- for (const issue of report.issues) {
1591
- issuesByLine.set(issue.line, (issuesByLine.get(issue.line) ?? 0) + 1);
1592
- }
1593
- // Aggregate by author
1594
- const byAuthor = new Map();
1595
- entries.forEach((entry, idx) => {
1596
- const key = entry.email || entry.author;
1597
- if (!byAuthor.has(key)) {
1598
- byAuthor.set(key, {
1599
- author: entry.author,
1600
- email: entry.email,
1601
- commits: 0,
1602
- linesChanged: 0,
1603
- issuesIntroduced: 0,
1604
- avgScoreImpact: 0,
1605
- });
1606
- }
1607
- const attr = byAuthor.get(key);
1608
- attr.linesChanged++;
1609
- const lineNum = idx + 1;
1610
- if (issuesByLine.has(lineNum)) {
1611
- attr.issuesIntroduced += issuesByLine.get(lineNum);
1612
- }
1613
- });
1614
- // Count unique commits per author
1615
- const commitsByAuthor = new Map();
1616
- for (const entry of entries) {
1617
- const key = entry.email || entry.author;
1618
- if (!commitsByAuthor.has(key))
1619
- commitsByAuthor.set(key, new Set());
1620
- commitsByAuthor.get(key).add(entry.hash);
1621
- }
1622
- const total = entries.length || 1;
1623
- const results = [];
1624
- for (const [key, attr] of byAuthor) {
1625
- attr.commits = commitsByAuthor.get(key)?.size ?? 0;
1626
- attr.avgScoreImpact = (attr.linesChanged / total) * report.score;
1627
- results.push(attr);
1628
- }
1629
- return results.sort((a, b) => b.issuesIntroduced - a.issuesIntroduced);
1630
- }
1631
- /** Blame for a specific rule across all files in targetPath. */
1632
- static async analyzeRuleBlame(rule, targetPath) {
1633
- assertGitRepo(targetPath);
1634
- const tsFiles = fs
1635
- .readdirSync(targetPath, { recursive: true, encoding: 'utf8' })
1636
- .filter((f) => (f.endsWith('.ts') || f.endsWith('.tsx')) && !f.includes('node_modules'))
1637
- .map(f => path.join(targetPath, f));
1638
- const combined = new Map();
1639
- for (const file of tsFiles) {
1640
- const report = analyzeFilePath(file);
1641
- const ruleIssues = report.issues.filter(i => i.rule === rule);
1642
- if (ruleIssues.length === 0)
1643
- continue;
1644
- let blameEntries = [];
1645
- try {
1646
- const blameOutput = execGit(`git blame --porcelain "${file}"`, targetPath);
1647
- blameEntries = parseGitBlame(blameOutput);
1648
- }
1649
- catch {
1650
- continue;
1651
- }
1652
- for (const issue of ruleIssues) {
1653
- const entry = blameEntries[issue.line - 1];
1654
- if (!entry)
1655
- continue;
1656
- const key = entry.email || entry.author;
1657
- if (!combined.has(key)) {
1658
- combined.set(key, {
1659
- author: entry.author,
1660
- email: entry.email,
1661
- commits: 0,
1662
- linesChanged: 0,
1663
- issuesIntroduced: 0,
1664
- avgScoreImpact: 0,
1665
- });
1666
- }
1667
- const attr = combined.get(key);
1668
- attr.issuesIntroduced++;
1669
- attr.avgScoreImpact += RULE_WEIGHTS[rule]?.weight ?? 5;
1670
- }
1671
- }
1672
- return Array.from(combined.values()).sort((a, b) => b.issuesIntroduced - a.issuesIntroduced);
1673
- }
1674
- /** Overall blame across all files and rules. */
1675
- static async analyzeOverallBlame(targetPath) {
1676
- assertGitRepo(targetPath);
1677
- const tsFiles = fs
1678
- .readdirSync(targetPath, { recursive: true, encoding: 'utf8' })
1679
- .filter((f) => (f.endsWith('.ts') || f.endsWith('.tsx')) && !f.includes('node_modules'))
1680
- .map(f => path.join(targetPath, f));
1681
- const combined = new Map();
1682
- const commitsByAuthor = new Map();
1683
- for (const file of tsFiles) {
1684
- let blameEntries = [];
1685
- try {
1686
- const blameOutput = execGit(`git blame --porcelain "${file}"`, targetPath);
1687
- blameEntries = parseGitBlame(blameOutput);
1688
- }
1689
- catch {
1690
- continue;
1691
- }
1692
- const report = analyzeFilePath(file);
1693
- const issuesByLine = new Map();
1694
- for (const issue of report.issues) {
1695
- issuesByLine.set(issue.line, (issuesByLine.get(issue.line) ?? 0) + 1);
1696
- }
1697
- blameEntries.forEach((entry, idx) => {
1698
- const key = entry.email || entry.author;
1699
- if (!combined.has(key)) {
1700
- combined.set(key, {
1701
- author: entry.author,
1702
- email: entry.email,
1703
- commits: 0,
1704
- linesChanged: 0,
1705
- issuesIntroduced: 0,
1706
- avgScoreImpact: 0,
1707
- });
1708
- commitsByAuthor.set(key, new Set());
1709
- }
1710
- const attr = combined.get(key);
1711
- attr.linesChanged++;
1712
- commitsByAuthor.get(key).add(entry.hash);
1713
- const lineNum = idx + 1;
1714
- if (issuesByLine.has(lineNum)) {
1715
- attr.issuesIntroduced += issuesByLine.get(lineNum);
1716
- attr.avgScoreImpact += report.score * (1 / (blameEntries.length || 1));
1717
- }
1718
- });
1719
- }
1720
- for (const [key, attr] of combined) {
1721
- attr.commits = commitsByAuthor.get(key)?.size ?? 0;
1722
- }
1723
- return Array.from(combined.values()).sort((a, b) => b.issuesIntroduced - a.issuesIntroduced);
1724
- }
1725
- // --- Instance method -------------------------------------------------------
1726
- async analyzeBlame(options) {
1727
- assertGitRepo(this.projectPath);
1728
- let blame = [];
1729
- const mode = options.target ?? 'overall';
1730
- if (mode === 'file' && options.filePath) {
1731
- blame = await BlameAnalyzer.analyzeFileBlame(options.filePath);
1732
- }
1733
- else if (mode === 'rule' && options.rule) {
1734
- blame = await BlameAnalyzer.analyzeRuleBlame(options.rule, this.projectPath);
1735
- }
1736
- else {
1737
- blame = await BlameAnalyzer.analyzeOverallBlame(this.projectPath);
1738
- }
1739
- if (options.top) {
1740
- blame = blame.slice(0, options.top);
1741
- }
1742
- const currentFiles = analyzeProject(this.projectPath, this.config);
1743
- const baseReport = buildReport(this.projectPath, currentFiles);
1744
- return { ...baseReport, blame };
1745
- }
1746
- }
1747
402
  //# sourceMappingURL=analyzer.js.map