@eduardbar/drift 0.9.1 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/actions/drift-scan/README.md +61 -0
- package/.github/actions/drift-scan/action.yml +65 -0
- package/.github/workflows/publish-vscode.yml +78 -0
- package/AGENTS.md +83 -23
- package/README.md +69 -2
- package/ROADMAP.md +130 -98
- package/dist/analyzer.d.ts +8 -38
- package/dist/analyzer.js +181 -1526
- package/dist/badge.js +40 -22
- package/dist/ci.js +32 -18
- package/dist/cli.js +125 -4
- package/dist/config.js +1 -1
- package/dist/diff.d.ts +0 -7
- package/dist/diff.js +26 -25
- package/dist/fix.d.ts +17 -0
- package/dist/fix.js +132 -0
- package/dist/git/blame.d.ts +22 -0
- package/dist/git/blame.js +227 -0
- package/dist/git/helpers.d.ts +36 -0
- package/dist/git/helpers.js +152 -0
- package/dist/git/trend.d.ts +21 -0
- package/dist/git/trend.js +81 -0
- package/dist/git.d.ts +0 -13
- package/dist/git.js +27 -21
- package/dist/index.d.ts +5 -1
- package/dist/index.js +3 -0
- package/dist/map.d.ts +3 -0
- package/dist/map.js +103 -0
- package/dist/metrics.d.ts +4 -0
- package/dist/metrics.js +176 -0
- package/dist/plugins.d.ts +6 -0
- package/dist/plugins.js +74 -0
- package/dist/printer.js +20 -0
- package/dist/report.js +654 -293
- package/dist/reporter.js +85 -2
- package/dist/review.d.ts +15 -0
- package/dist/review.js +80 -0
- package/dist/rules/comments.d.ts +4 -0
- package/dist/rules/comments.js +45 -0
- package/dist/rules/complexity.d.ts +4 -0
- package/dist/rules/complexity.js +51 -0
- package/dist/rules/coupling.d.ts +4 -0
- package/dist/rules/coupling.js +19 -0
- package/dist/rules/magic.d.ts +4 -0
- package/dist/rules/magic.js +33 -0
- package/dist/rules/nesting.d.ts +5 -0
- package/dist/rules/nesting.js +82 -0
- package/dist/rules/phase0-basic.d.ts +11 -0
- package/dist/rules/phase0-basic.js +183 -0
- package/dist/rules/phase1-complexity.d.ts +7 -0
- package/dist/rules/phase1-complexity.js +8 -0
- package/dist/rules/phase2-crossfile.d.ts +23 -0
- package/dist/rules/phase2-crossfile.js +135 -0
- package/dist/rules/phase3-arch.d.ts +23 -0
- package/dist/rules/phase3-arch.js +151 -0
- package/dist/rules/phase3-configurable.d.ts +6 -0
- package/dist/rules/phase3-configurable.js +97 -0
- package/dist/rules/phase5-ai.d.ts +8 -0
- package/dist/rules/phase5-ai.js +262 -0
- package/dist/rules/phase8-semantic.d.ts +17 -0
- package/dist/rules/phase8-semantic.js +110 -0
- package/dist/rules/promise.d.ts +4 -0
- package/dist/rules/promise.js +24 -0
- package/dist/rules/shared.d.ts +7 -0
- package/dist/rules/shared.js +27 -0
- package/dist/snapshot.d.ts +19 -0
- package/dist/snapshot.js +119 -0
- package/dist/types.d.ts +69 -0
- package/dist/utils.d.ts +2 -1
- package/dist/utils.js +1 -0
- package/docs/AGENTS.md +146 -0
- package/docs/PRD.md +208 -0
- package/package.json +8 -3
- package/packages/eslint-plugin-drift/src/index.ts +1 -1
- package/packages/vscode-drift/.vscodeignore +9 -0
- package/packages/vscode-drift/LICENSE +21 -0
- package/packages/vscode-drift/README.md +64 -0
- package/packages/vscode-drift/images/icon.png +0 -0
- package/packages/vscode-drift/images/icon.svg +30 -0
- package/packages/vscode-drift/package-lock.json +485 -0
- package/packages/vscode-drift/package.json +119 -0
- package/packages/vscode-drift/src/analyzer.ts +40 -0
- package/packages/vscode-drift/src/diagnostics.ts +55 -0
- package/packages/vscode-drift/src/extension.ts +135 -0
- package/packages/vscode-drift/src/statusbar.ts +55 -0
- package/packages/vscode-drift/src/treeview.ts +110 -0
- package/packages/vscode-drift/tsconfig.json +18 -0
- package/packages/vscode-drift/vscode-drift-0.1.0.vsix +0 -0
- package/packages/vscode-drift/vscode-drift-0.1.1.vsix +0 -0
- package/src/analyzer.ts +248 -1765
- package/src/badge.ts +38 -16
- package/src/ci.ts +38 -17
- package/src/cli.ts +143 -4
- package/src/config.ts +1 -1
- package/src/diff.ts +36 -30
- package/src/fix.ts +178 -0
- package/src/git/blame.ts +279 -0
- package/src/git/helpers.ts +198 -0
- package/src/git/trend.ts +117 -0
- package/src/git.ts +33 -24
- package/src/index.ts +16 -1
- package/src/map.ts +117 -0
- package/src/metrics.ts +200 -0
- package/src/plugins.ts +76 -0
- package/src/printer.ts +20 -0
- package/src/report.ts +666 -296
- package/src/reporter.ts +95 -2
- package/src/review.ts +98 -0
- package/src/rules/comments.ts +56 -0
- package/src/rules/complexity.ts +57 -0
- package/src/rules/coupling.ts +23 -0
- package/src/rules/magic.ts +38 -0
- package/src/rules/nesting.ts +88 -0
- package/src/rules/phase0-basic.ts +194 -0
- package/src/rules/phase1-complexity.ts +8 -0
- package/src/rules/phase2-crossfile.ts +177 -0
- package/src/rules/phase3-arch.ts +183 -0
- package/src/rules/phase3-configurable.ts +132 -0
- package/src/rules/phase5-ai.ts +292 -0
- package/src/rules/phase8-semantic.ts +136 -0
- package/src/rules/promise.ts +29 -0
- package/src/rules/shared.ts +39 -0
- package/src/snapshot.ts +175 -0
- package/src/types.ts +75 -1
- package/src/utils.ts +3 -1
- package/tests/helpers.ts +45 -0
- package/tests/new-features.test.ts +153 -0
- package/tests/rules.test.ts +1269 -0
- package/vitest.config.ts +15 -0
package/src/analyzer.ts
CHANGED
|
@@ -1,25 +1,63 @@
|
|
|
1
|
-
|
|
2
|
-
import * as crypto from 'node:crypto'
|
|
1
|
+
// drift-ignore-file
|
|
3
2
|
import * as path from 'node:path'
|
|
4
|
-
import
|
|
5
|
-
import {
|
|
3
|
+
import { Project } from 'ts-morph'
|
|
4
|
+
import type { DriftIssue, FileReport, DriftConfig, LoadedPlugin, PluginRuleContext } from './types.js'
|
|
5
|
+
|
|
6
|
+
// Rules
|
|
7
|
+
import { isFileIgnored } from './rules/shared.js'
|
|
8
|
+
import {
|
|
9
|
+
detectLargeFile,
|
|
10
|
+
detectLargeFunctions,
|
|
11
|
+
detectDebugLeftovers,
|
|
12
|
+
detectDeadCode,
|
|
13
|
+
detectDuplicateFunctionNames,
|
|
14
|
+
detectAnyAbuse,
|
|
15
|
+
detectCatchSwallow,
|
|
16
|
+
detectMissingReturnTypes,
|
|
17
|
+
} from './rules/phase0-basic.js'
|
|
18
|
+
import { detectHighComplexity } from './rules/complexity.js'
|
|
19
|
+
import { detectDeepNesting, detectTooManyParams } from './rules/nesting.js'
|
|
20
|
+
import { detectHighCoupling } from './rules/coupling.js'
|
|
21
|
+
import { detectPromiseStyleMix } from './rules/promise.js'
|
|
22
|
+
import { detectMagicNumbers } from './rules/magic.js'
|
|
23
|
+
import { detectCommentContradiction } from './rules/comments.js'
|
|
24
|
+
import {
|
|
25
|
+
detectDeadFiles,
|
|
26
|
+
detectUnusedExports,
|
|
27
|
+
detectUnusedDependencies,
|
|
28
|
+
} from './rules/phase2-crossfile.js'
|
|
29
|
+
import {
|
|
30
|
+
detectCircularDependencies,
|
|
31
|
+
detectLayerViolations,
|
|
32
|
+
detectCrossBoundaryImports,
|
|
33
|
+
} from './rules/phase3-arch.js'
|
|
34
|
+
import {
|
|
35
|
+
detectControllerNoDb,
|
|
36
|
+
detectServiceNoHttp,
|
|
37
|
+
detectMaxFunctionLines,
|
|
38
|
+
} from './rules/phase3-configurable.js'
|
|
39
|
+
import {
|
|
40
|
+
detectOverCommented,
|
|
41
|
+
detectHardcodedConfig,
|
|
42
|
+
detectInconsistentErrorHandling,
|
|
43
|
+
detectUnnecessaryAbstraction,
|
|
44
|
+
detectNamingInconsistency,
|
|
45
|
+
} from './rules/phase5-ai.js'
|
|
6
46
|
import {
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
} from '
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
import { buildReport } from './reporter.js'
|
|
47
|
+
collectFunctions,
|
|
48
|
+
fingerprintFunction,
|
|
49
|
+
calculateScore,
|
|
50
|
+
} from './rules/phase8-semantic.js'
|
|
51
|
+
import { loadPlugins } from './plugins.js'
|
|
52
|
+
|
|
53
|
+
// Git analyzers (re-exported as part of the public API)
|
|
54
|
+
export { TrendAnalyzer } from './git/trend.js'
|
|
55
|
+
export { BlameAnalyzer } from './git/blame.js'
|
|
56
|
+
|
|
57
|
+
// ---------------------------------------------------------------------------
|
|
58
|
+
// Rule weights — single source of truth for severities and drift score weights
|
|
59
|
+
// ---------------------------------------------------------------------------
|
|
21
60
|
|
|
22
|
-
// Rules and their drift score weight
|
|
23
61
|
export const RULE_WEIGHTS: Record<string, { severity: DriftIssue['severity']; weight: number }> = {
|
|
24
62
|
'large-file': { severity: 'error', weight: 20 },
|
|
25
63
|
'large-function': { severity: 'error', weight: 15 },
|
|
@@ -46,964 +84,113 @@ export const RULE_WEIGHTS: Record<string, { severity: DriftIssue['severity']; we
|
|
|
46
84
|
// Phase 3b/c: layer and module boundary enforcement (require drift.config.ts)
|
|
47
85
|
'layer-violation': { severity: 'error', weight: 16 },
|
|
48
86
|
'cross-boundary-import': { severity: 'warning', weight: 10 },
|
|
87
|
+
'controller-no-db': { severity: 'warning', weight: 11 },
|
|
88
|
+
'service-no-http': { severity: 'warning', weight: 11 },
|
|
89
|
+
'max-function-lines': { severity: 'warning', weight: 9 },
|
|
49
90
|
// Phase 5: AI authorship heuristics
|
|
50
91
|
'over-commented': { severity: 'info', weight: 4 },
|
|
51
92
|
'hardcoded-config': { severity: 'warning', weight: 10 },
|
|
52
93
|
'inconsistent-error-handling': { severity: 'warning', weight: 8 },
|
|
53
|
-
'unnecessary-abstraction': { severity: 'warning', weight: 7 },
|
|
54
|
-
'naming-inconsistency': { severity: 'warning', weight: 6 },
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
const startLine = node.getStartLineNumber()
|
|
78
|
-
const lines = file.getFullText().split('\n')
|
|
79
|
-
return lines
|
|
80
|
-
.slice(Math.max(0, startLine - 1), startLine + 1)
|
|
81
|
-
.join('\n')
|
|
82
|
-
.trim()
|
|
83
|
-
.slice(0, 120)
|
|
84
|
-
}
|
|
85
|
-
|
|
86
|
-
function getFunctionLikeLines(node: FunctionLike): number {
|
|
87
|
-
return node.getEndLineNumber() - node.getStartLineNumber()
|
|
88
|
-
}
|
|
89
|
-
|
|
90
|
-
// ---------------------------------------------------------------------------
|
|
91
|
-
// Existing rules
|
|
92
|
-
// ---------------------------------------------------------------------------
|
|
93
|
-
|
|
94
|
-
function detectLargeFile(file: SourceFile): DriftIssue[] {
|
|
95
|
-
const lineCount = file.getEndLineNumber()
|
|
96
|
-
if (lineCount > 300) {
|
|
97
|
-
return [
|
|
98
|
-
{
|
|
99
|
-
rule: 'large-file',
|
|
100
|
-
severity: 'error',
|
|
101
|
-
message: `File has ${lineCount} lines (threshold: 300). Large files are the #1 sign of AI-generated structural drift.`,
|
|
102
|
-
line: 1,
|
|
103
|
-
column: 1,
|
|
104
|
-
snippet: `// ${lineCount} lines total`,
|
|
105
|
-
},
|
|
106
|
-
]
|
|
107
|
-
}
|
|
108
|
-
return []
|
|
109
|
-
}
|
|
110
|
-
|
|
111
|
-
function detectLargeFunctions(file: SourceFile): DriftIssue[] {
|
|
112
|
-
const issues: DriftIssue[] = []
|
|
113
|
-
const fns: FunctionLike[] = [
|
|
114
|
-
...file.getFunctions(),
|
|
115
|
-
...file.getDescendantsOfKind(SyntaxKind.ArrowFunction),
|
|
116
|
-
...file.getDescendantsOfKind(SyntaxKind.FunctionExpression),
|
|
117
|
-
...file.getClasses().flatMap((c) => c.getMethods()),
|
|
118
|
-
]
|
|
119
|
-
|
|
120
|
-
for (const fn of fns) {
|
|
121
|
-
const lines = getFunctionLikeLines(fn)
|
|
122
|
-
const startLine = fn.getStartLineNumber()
|
|
123
|
-
if (lines > 50) {
|
|
124
|
-
if (hasIgnoreComment(file, startLine)) continue
|
|
125
|
-
issues.push({
|
|
126
|
-
rule: 'large-function',
|
|
127
|
-
severity: 'error',
|
|
128
|
-
message: `Function spans ${lines} lines (threshold: 50). AI tends to dump logic into single functions.`,
|
|
129
|
-
line: startLine,
|
|
130
|
-
column: fn.getStartLinePos(),
|
|
131
|
-
snippet: getSnippet(fn, file),
|
|
132
|
-
})
|
|
133
|
-
}
|
|
134
|
-
}
|
|
135
|
-
return issues
|
|
136
|
-
}
|
|
137
|
-
|
|
138
|
-
function detectDebugLeftovers(file: SourceFile): DriftIssue[] {
|
|
139
|
-
const issues: DriftIssue[] = []
|
|
140
|
-
|
|
141
|
-
for (const call of file.getDescendantsOfKind(SyntaxKind.CallExpression)) {
|
|
142
|
-
const expr = call.getExpression().getText()
|
|
143
|
-
const line = call.getStartLineNumber()
|
|
144
|
-
if (/^console\.(log|warn|error|debug|info)\b/.test(expr)) {
|
|
145
|
-
if (hasIgnoreComment(file, line)) continue
|
|
146
|
-
issues.push({
|
|
147
|
-
rule: 'debug-leftover',
|
|
148
|
-
severity: 'warning',
|
|
149
|
-
message: `console.${expr.split('.')[1]} left in production code.`,
|
|
150
|
-
line,
|
|
151
|
-
column: call.getStartLinePos(),
|
|
152
|
-
snippet: getSnippet(call, file),
|
|
153
|
-
})
|
|
154
|
-
}
|
|
155
|
-
}
|
|
156
|
-
|
|
157
|
-
const lines = file.getFullText().split('\n')
|
|
158
|
-
lines.forEach((lineContent, i) => {
|
|
159
|
-
if (/\/\/\s*(TODO|FIXME|HACK|XXX|TEMP)\b/i.test(lineContent)) {
|
|
160
|
-
if (hasIgnoreComment(file, i + 1)) return
|
|
161
|
-
issues.push({
|
|
162
|
-
rule: 'debug-leftover',
|
|
163
|
-
severity: 'warning',
|
|
164
|
-
message: `Unresolved marker found: ${lineContent.trim().slice(0, 60)}`,
|
|
165
|
-
line: i + 1,
|
|
166
|
-
column: 1,
|
|
167
|
-
snippet: lineContent.trim().slice(0, 120),
|
|
168
|
-
})
|
|
169
|
-
}
|
|
170
|
-
})
|
|
171
|
-
|
|
172
|
-
return issues
|
|
173
|
-
}
|
|
174
|
-
|
|
175
|
-
function detectDeadCode(file: SourceFile): DriftIssue[] {
|
|
176
|
-
const issues: DriftIssue[] = []
|
|
177
|
-
|
|
178
|
-
for (const imp of file.getImportDeclarations()) {
|
|
179
|
-
for (const named of imp.getNamedImports()) {
|
|
180
|
-
const name = named.getName()
|
|
181
|
-
const refs = file.getDescendantsOfKind(SyntaxKind.Identifier).filter(
|
|
182
|
-
(id) => id.getText() === name && id !== named.getNameNode()
|
|
183
|
-
)
|
|
184
|
-
if (refs.length === 0) {
|
|
185
|
-
issues.push({
|
|
186
|
-
rule: 'dead-code',
|
|
187
|
-
severity: 'warning',
|
|
188
|
-
message: `Unused import '${name}'. AI often imports more than it uses.`,
|
|
189
|
-
line: imp.getStartLineNumber(),
|
|
190
|
-
column: imp.getStartLinePos(),
|
|
191
|
-
snippet: getSnippet(imp, file),
|
|
192
|
-
})
|
|
193
|
-
}
|
|
194
|
-
}
|
|
195
|
-
}
|
|
196
|
-
|
|
197
|
-
return issues
|
|
198
|
-
}
|
|
199
|
-
|
|
200
|
-
function detectDuplicateFunctionNames(file: SourceFile): DriftIssue[] {
|
|
201
|
-
const issues: DriftIssue[] = []
|
|
202
|
-
const seen = new Map<string, number>()
|
|
203
|
-
|
|
204
|
-
const fns = file.getFunctions()
|
|
205
|
-
for (const fn of fns) {
|
|
206
|
-
const name = fn.getName()
|
|
207
|
-
if (!name) continue
|
|
208
|
-
const normalized = name.toLowerCase().replace(/[_-]/g, '')
|
|
209
|
-
if (seen.has(normalized)) {
|
|
210
|
-
issues.push({
|
|
211
|
-
rule: 'duplicate-function-name',
|
|
212
|
-
severity: 'error',
|
|
213
|
-
message: `Function '${name}' looks like a duplicate of a previously defined function. AI often generates near-identical helpers.`,
|
|
214
|
-
line: fn.getStartLineNumber(),
|
|
215
|
-
column: fn.getStartLinePos(),
|
|
216
|
-
snippet: getSnippet(fn, file),
|
|
217
|
-
})
|
|
218
|
-
} else {
|
|
219
|
-
seen.set(normalized, fn.getStartLineNumber())
|
|
220
|
-
}
|
|
221
|
-
}
|
|
222
|
-
return issues
|
|
223
|
-
}
|
|
224
|
-
|
|
225
|
-
function detectAnyAbuse(file: SourceFile): DriftIssue[] {
|
|
226
|
-
const issues: DriftIssue[] = []
|
|
227
|
-
for (const node of file.getDescendantsOfKind(SyntaxKind.AnyKeyword)) {
|
|
228
|
-
issues.push({
|
|
229
|
-
rule: 'any-abuse',
|
|
230
|
-
severity: 'warning',
|
|
231
|
-
message: `Explicit 'any' type detected. AI defaults to 'any' when it can't infer types properly.`,
|
|
232
|
-
line: node.getStartLineNumber(),
|
|
233
|
-
column: node.getStartLinePos(),
|
|
234
|
-
snippet: getSnippet(node, file),
|
|
235
|
-
})
|
|
236
|
-
}
|
|
237
|
-
return issues
|
|
238
|
-
}
|
|
239
|
-
|
|
240
|
-
function detectCatchSwallow(file: SourceFile): DriftIssue[] {
|
|
241
|
-
const issues: DriftIssue[] = []
|
|
242
|
-
for (const tryCatch of file.getDescendantsOfKind(SyntaxKind.TryStatement)) {
|
|
243
|
-
const catchClause = tryCatch.getCatchClause()
|
|
244
|
-
if (!catchClause) continue
|
|
245
|
-
const block = catchClause.getBlock()
|
|
246
|
-
const stmts = block.getStatements()
|
|
247
|
-
if (stmts.length === 0) {
|
|
248
|
-
issues.push({
|
|
249
|
-
rule: 'catch-swallow',
|
|
250
|
-
severity: 'warning',
|
|
251
|
-
message: `Empty catch block silently swallows errors. Classic AI pattern to make code "not throw".`,
|
|
252
|
-
line: catchClause.getStartLineNumber(),
|
|
253
|
-
column: catchClause.getStartLinePos(),
|
|
254
|
-
snippet: getSnippet(catchClause, file),
|
|
255
|
-
})
|
|
256
|
-
}
|
|
257
|
-
}
|
|
258
|
-
return issues
|
|
259
|
-
}
|
|
260
|
-
|
|
261
|
-
function detectMissingReturnTypes(file: SourceFile): DriftIssue[] {
|
|
262
|
-
const issues: DriftIssue[] = []
|
|
263
|
-
for (const fn of file.getFunctions()) {
|
|
264
|
-
if (!fn.getReturnTypeNode()) {
|
|
265
|
-
issues.push({
|
|
266
|
-
rule: 'no-return-type',
|
|
267
|
-
severity: 'info',
|
|
268
|
-
message: `Function '${fn.getName() ?? 'anonymous'}' has no explicit return type.`,
|
|
269
|
-
line: fn.getStartLineNumber(),
|
|
270
|
-
column: fn.getStartLinePos(),
|
|
271
|
-
snippet: getSnippet(fn, file),
|
|
272
|
-
})
|
|
273
|
-
}
|
|
274
|
-
}
|
|
275
|
-
return issues
|
|
276
|
-
}
|
|
277
|
-
|
|
278
|
-
// ---------------------------------------------------------------------------
|
|
279
|
-
// Phase 1: complexity detection rules
|
|
280
|
-
// ---------------------------------------------------------------------------
|
|
281
|
-
|
|
282
|
-
/**
|
|
283
|
-
* Cyclomatic complexity: count decision points in a function.
|
|
284
|
-
* Each if/else if/ternary/?:/for/while/do/case/catch/&&/|| adds 1.
|
|
285
|
-
* Threshold: > 10 is considered high complexity.
|
|
286
|
-
*/
|
|
287
|
-
function getCyclomaticComplexity(fn: FunctionLike): number {
|
|
288
|
-
let complexity = 1 // base path
|
|
289
|
-
|
|
290
|
-
const incrementKinds = [
|
|
291
|
-
SyntaxKind.IfStatement,
|
|
292
|
-
SyntaxKind.ForStatement,
|
|
293
|
-
SyntaxKind.ForInStatement,
|
|
294
|
-
SyntaxKind.ForOfStatement,
|
|
295
|
-
SyntaxKind.WhileStatement,
|
|
296
|
-
SyntaxKind.DoStatement,
|
|
297
|
-
SyntaxKind.CaseClause,
|
|
298
|
-
SyntaxKind.CatchClause,
|
|
299
|
-
SyntaxKind.ConditionalExpression, // ternary
|
|
300
|
-
SyntaxKind.AmpersandAmpersandToken,
|
|
301
|
-
SyntaxKind.BarBarToken,
|
|
302
|
-
SyntaxKind.QuestionQuestionToken, // ??
|
|
303
|
-
]
|
|
304
|
-
|
|
305
|
-
for (const kind of incrementKinds) {
|
|
306
|
-
complexity += fn.getDescendantsOfKind(kind).length
|
|
307
|
-
}
|
|
308
|
-
|
|
309
|
-
return complexity
|
|
310
|
-
}
|
|
311
|
-
|
|
312
|
-
function detectHighComplexity(file: SourceFile): DriftIssue[] {
|
|
313
|
-
const issues: DriftIssue[] = []
|
|
314
|
-
const fns: FunctionLike[] = [
|
|
315
|
-
...file.getFunctions(),
|
|
316
|
-
...file.getDescendantsOfKind(SyntaxKind.ArrowFunction),
|
|
317
|
-
...file.getDescendantsOfKind(SyntaxKind.FunctionExpression),
|
|
318
|
-
...file.getClasses().flatMap((c) => c.getMethods()),
|
|
319
|
-
]
|
|
320
|
-
|
|
321
|
-
for (const fn of fns) {
|
|
322
|
-
const complexity = getCyclomaticComplexity(fn)
|
|
323
|
-
if (complexity > 10) {
|
|
324
|
-
const startLine = fn.getStartLineNumber()
|
|
325
|
-
if (hasIgnoreComment(file, startLine)) continue
|
|
326
|
-
issues.push({
|
|
327
|
-
rule: 'high-complexity',
|
|
328
|
-
severity: 'error',
|
|
329
|
-
message: `Cyclomatic complexity is ${complexity} (threshold: 10). AI generates correct code, not simple code.`,
|
|
330
|
-
line: startLine,
|
|
331
|
-
column: fn.getStartLinePos(),
|
|
332
|
-
snippet: getSnippet(fn, file),
|
|
333
|
-
})
|
|
334
|
-
}
|
|
335
|
-
}
|
|
336
|
-
return issues
|
|
337
|
-
}
|
|
338
|
-
|
|
339
|
-
/**
|
|
340
|
-
* Deep nesting: count the maximum nesting depth of control flow inside a function.
|
|
341
|
-
* Counts: if, for, while, do, try, switch.
|
|
342
|
-
* Threshold: > 3 levels.
|
|
343
|
-
*/
|
|
344
|
-
function getMaxNestingDepth(fn: FunctionLike): number {
|
|
345
|
-
const nestingKinds = new Set([
|
|
346
|
-
SyntaxKind.IfStatement,
|
|
347
|
-
SyntaxKind.ForStatement,
|
|
348
|
-
SyntaxKind.ForInStatement,
|
|
349
|
-
SyntaxKind.ForOfStatement,
|
|
350
|
-
SyntaxKind.WhileStatement,
|
|
351
|
-
SyntaxKind.DoStatement,
|
|
352
|
-
SyntaxKind.TryStatement,
|
|
353
|
-
SyntaxKind.SwitchStatement,
|
|
354
|
-
])
|
|
355
|
-
|
|
356
|
-
let maxDepth = 0
|
|
357
|
-
|
|
358
|
-
function walk(node: Node, depth: number): void {
|
|
359
|
-
if (nestingKinds.has(node.getKind())) {
|
|
360
|
-
depth++
|
|
361
|
-
if (depth > maxDepth) maxDepth = depth
|
|
362
|
-
}
|
|
363
|
-
for (const child of node.getChildren()) {
|
|
364
|
-
walk(child, depth)
|
|
365
|
-
}
|
|
366
|
-
}
|
|
367
|
-
|
|
368
|
-
walk(fn, 0)
|
|
369
|
-
return maxDepth
|
|
370
|
-
}
|
|
371
|
-
|
|
372
|
-
function detectDeepNesting(file: SourceFile): DriftIssue[] {
|
|
373
|
-
const issues: DriftIssue[] = []
|
|
374
|
-
const fns: FunctionLike[] = [
|
|
375
|
-
...file.getFunctions(),
|
|
376
|
-
...file.getDescendantsOfKind(SyntaxKind.ArrowFunction),
|
|
377
|
-
...file.getDescendantsOfKind(SyntaxKind.FunctionExpression),
|
|
378
|
-
...file.getClasses().flatMap((c) => c.getMethods()),
|
|
379
|
-
]
|
|
380
|
-
|
|
381
|
-
for (const fn of fns) {
|
|
382
|
-
const depth = getMaxNestingDepth(fn)
|
|
383
|
-
if (depth > 3) {
|
|
384
|
-
const startLine = fn.getStartLineNumber()
|
|
385
|
-
if (hasIgnoreComment(file, startLine)) continue
|
|
386
|
-
issues.push({
|
|
387
|
-
rule: 'deep-nesting',
|
|
388
|
-
severity: 'warning',
|
|
389
|
-
message: `Maximum nesting depth is ${depth} (threshold: 3). Deep nesting is the #1 readability killer.`,
|
|
390
|
-
line: startLine,
|
|
391
|
-
column: fn.getStartLinePos(),
|
|
392
|
-
snippet: getSnippet(fn, file),
|
|
393
|
-
})
|
|
394
|
-
}
|
|
395
|
-
}
|
|
396
|
-
return issues
|
|
397
|
-
}
|
|
398
|
-
|
|
399
|
-
/**
|
|
400
|
-
* Too many parameters: functions with more than 4 parameters.
|
|
401
|
-
* AI avoids refactoring parameters into objects/options bags.
|
|
402
|
-
*/
|
|
403
|
-
function detectTooManyParams(file: SourceFile): DriftIssue[] {
|
|
404
|
-
const issues: DriftIssue[] = []
|
|
405
|
-
const fns: FunctionLike[] = [
|
|
406
|
-
...file.getFunctions(),
|
|
407
|
-
...file.getDescendantsOfKind(SyntaxKind.ArrowFunction),
|
|
408
|
-
...file.getDescendantsOfKind(SyntaxKind.FunctionExpression),
|
|
409
|
-
...file.getClasses().flatMap((c) => c.getMethods()),
|
|
410
|
-
]
|
|
411
|
-
|
|
412
|
-
for (const fn of fns) {
|
|
413
|
-
const paramCount = fn.getParameters().length
|
|
414
|
-
if (paramCount > 4) {
|
|
415
|
-
const startLine = fn.getStartLineNumber()
|
|
416
|
-
if (hasIgnoreComment(file, startLine)) continue
|
|
417
|
-
issues.push({
|
|
418
|
-
rule: 'too-many-params',
|
|
419
|
-
severity: 'warning',
|
|
420
|
-
message: `Function has ${paramCount} parameters (threshold: 4). AI avoids refactoring into options objects.`,
|
|
421
|
-
line: startLine,
|
|
422
|
-
column: fn.getStartLinePos(),
|
|
423
|
-
snippet: getSnippet(fn, file),
|
|
424
|
-
})
|
|
425
|
-
}
|
|
426
|
-
}
|
|
427
|
-
return issues
|
|
428
|
-
}
|
|
429
|
-
|
|
430
|
-
/**
|
|
431
|
-
* High coupling: files with more than 10 distinct import sources.
|
|
432
|
-
* AI imports broadly without considering module cohesion.
|
|
433
|
-
*/
|
|
434
|
-
function detectHighCoupling(file: SourceFile): DriftIssue[] {
|
|
435
|
-
const imports = file.getImportDeclarations()
|
|
436
|
-
const sources = new Set(imports.map((i) => i.getModuleSpecifierValue()))
|
|
437
|
-
|
|
438
|
-
if (sources.size > 10) {
|
|
439
|
-
return [
|
|
440
|
-
{
|
|
441
|
-
rule: 'high-coupling',
|
|
442
|
-
severity: 'warning',
|
|
443
|
-
message: `File imports from ${sources.size} distinct modules (threshold: 10). High coupling makes refactoring dangerous.`,
|
|
444
|
-
line: 1,
|
|
445
|
-
column: 1,
|
|
446
|
-
snippet: `// ${sources.size} import sources`,
|
|
447
|
-
},
|
|
448
|
-
]
|
|
449
|
-
}
|
|
450
|
-
return []
|
|
451
|
-
}
|
|
452
|
-
|
|
453
|
-
/**
|
|
454
|
-
* Promise style mix: async/await and .then()/.catch() used in the same file.
|
|
455
|
-
* AI generates both styles without consistency.
|
|
456
|
-
*/
|
|
457
|
-
function detectPromiseStyleMix(file: SourceFile): DriftIssue[] {
|
|
458
|
-
const text = file.getFullText()
|
|
459
|
-
|
|
460
|
-
// detect .then( or .catch( calls (property access on a promise)
|
|
461
|
-
const hasThen = file.getDescendantsOfKind(SyntaxKind.PropertyAccessExpression).some((node) => {
|
|
462
|
-
const name = node.getName()
|
|
463
|
-
return name === 'then' || name === 'catch'
|
|
464
|
-
})
|
|
465
|
-
|
|
466
|
-
// detect async keyword usage
|
|
467
|
-
const hasAsync =
|
|
468
|
-
file.getDescendantsOfKind(SyntaxKind.AsyncKeyword).length > 0 ||
|
|
469
|
-
/\bawait\b/.test(text)
|
|
470
|
-
|
|
471
|
-
if (hasThen && hasAsync) {
|
|
472
|
-
return [
|
|
473
|
-
{
|
|
474
|
-
rule: 'promise-style-mix',
|
|
475
|
-
severity: 'warning',
|
|
476
|
-
message: `File mixes async/await with .then()/.catch(). AI generates both styles without picking one.`,
|
|
477
|
-
line: 1,
|
|
478
|
-
column: 1,
|
|
479
|
-
snippet: `// mixed promise styles detected`,
|
|
480
|
-
},
|
|
481
|
-
]
|
|
482
|
-
}
|
|
483
|
-
return []
|
|
484
|
-
}
|
|
485
|
-
|
|
486
|
-
/**
|
|
487
|
-
* Magic numbers: numeric literals used directly in logic outside of named constants.
|
|
488
|
-
* Excludes 0, 1, -1 (universally understood) and array indices in obvious patterns.
|
|
489
|
-
*/
|
|
490
|
-
function detectMagicNumbers(file: SourceFile): DriftIssue[] {
|
|
491
|
-
const issues: DriftIssue[] = []
|
|
492
|
-
const ALLOWED = new Set([0, 1, -1, 2, 100])
|
|
493
|
-
|
|
494
|
-
for (const node of file.getDescendantsOfKind(SyntaxKind.NumericLiteral)) {
|
|
495
|
-
const value = Number(node.getLiteralValue())
|
|
496
|
-
if (ALLOWED.has(value)) continue
|
|
497
|
-
|
|
498
|
-
// Skip: variable/const initializers at top level (those ARE the named constants)
|
|
499
|
-
const parent = node.getParent()
|
|
500
|
-
if (!parent) continue
|
|
501
|
-
const parentKind = parent.getKind()
|
|
502
|
-
if (
|
|
503
|
-
parentKind === SyntaxKind.VariableDeclaration ||
|
|
504
|
-
parentKind === SyntaxKind.PropertyAssignment ||
|
|
505
|
-
parentKind === SyntaxKind.EnumMember ||
|
|
506
|
-
parentKind === SyntaxKind.Parameter
|
|
507
|
-
) continue
|
|
508
|
-
|
|
509
|
-
const line = node.getStartLineNumber()
|
|
510
|
-
if (hasIgnoreComment(file, line)) continue
|
|
511
|
-
|
|
512
|
-
issues.push({
|
|
513
|
-
rule: 'magic-number',
|
|
514
|
-
severity: 'info',
|
|
515
|
-
message: `Magic number ${value} used directly in logic. Extract to a named constant.`,
|
|
516
|
-
line,
|
|
517
|
-
column: node.getStartLinePos(),
|
|
518
|
-
snippet: getSnippet(node, file),
|
|
519
|
-
})
|
|
520
|
-
}
|
|
521
|
-
return issues
|
|
522
|
-
}
|
|
523
|
-
|
|
524
|
-
/**
|
|
525
|
-
* Comment contradiction: comments that restate exactly what the code does.
|
|
526
|
-
* Classic AI pattern — documents the obvious instead of the why.
|
|
527
|
-
* Detects: "// increment counter" above counter++, "// return x" above return x, etc.
|
|
528
|
-
*/
|
|
529
|
-
function detectCommentContradiction(file: SourceFile): DriftIssue[] {
|
|
530
|
-
const issues: DriftIssue[] = []
|
|
531
|
-
const lines = file.getFullText().split('\n')
|
|
532
|
-
|
|
533
|
-
// Patterns: comment that is a near-literal restatement of the next line
|
|
534
|
-
const trivialCommentPatterns = [
|
|
535
|
-
// "// return ..." above a return statement
|
|
536
|
-
{ comment: /\/\/\s*return\b/i, code: /^\s*return\b/ },
|
|
537
|
-
// "// increment ..." or "// increase ..." above x++ or x += 1
|
|
538
|
-
{ comment: /\/\/\s*(increment|increase|add\s+1|plus\s+1)\b/i, code: /\+\+|(\+= ?1)\b/ },
|
|
539
|
-
// "// decrement ..." above x-- or x -= 1
|
|
540
|
-
{ comment: /\/\/\s*(decrement|decrease|subtract\s+1|minus\s+1)\b/i, code: /--|(-= ?1)\b/ },
|
|
541
|
-
// "// log ..." above console.log
|
|
542
|
-
{ comment: /\/\/\s*log\b/i, code: /console\.(log|warn|error)/ },
|
|
543
|
-
// "// set ... to ..." or "// assign ..." above assignment
|
|
544
|
-
{ comment: /\/\/\s*(set|assign)\b/i, code: /^\s*\w[\w.[\]]*\s*=(?!=)/ },
|
|
545
|
-
// "// call ..." above a function call
|
|
546
|
-
{ comment: /\/\/\s*call\b/i, code: /^\s*\w[\w.]*\(/ },
|
|
547
|
-
// "// declare ..." or "// define ..." or "// create ..." above const/let/var
|
|
548
|
-
{ comment: /\/\/\s*(declare|define|create|initialize)\b/i, code: /^\s*(const|let|var)\b/ },
|
|
549
|
-
// "// check if ..." above an if statement
|
|
550
|
-
{ comment: /\/\/\s*check\s+if\b/i, code: /^\s*if\s*\(/ },
|
|
551
|
-
// "// loop ..." or "// iterate ..." above for/while
|
|
552
|
-
{ comment: /\/\/\s*(loop|iterate|for each|foreach)\b/i, code: /^\s*(for|while)\b/ },
|
|
553
|
-
// "// import ..." above an import
|
|
554
|
-
{ comment: /\/\/\s*import\b/i, code: /^\s*import\b/ },
|
|
555
|
-
]
|
|
556
|
-
|
|
557
|
-
for (let i = 0; i < lines.length - 1; i++) {
|
|
558
|
-
const commentLine = lines[i].trim()
|
|
559
|
-
const nextLine = lines[i + 1]
|
|
560
|
-
|
|
561
|
-
for (const { comment, code } of trivialCommentPatterns) {
|
|
562
|
-
if (comment.test(commentLine) && code.test(nextLine)) {
|
|
563
|
-
if (hasIgnoreComment(file, i + 1)) continue
|
|
564
|
-
issues.push({
|
|
565
|
-
rule: 'comment-contradiction',
|
|
566
|
-
severity: 'warning',
|
|
567
|
-
message: `Comment restates what the code already says. AI documents the obvious instead of the why.`,
|
|
568
|
-
line: i + 1,
|
|
569
|
-
column: 1,
|
|
570
|
-
snippet: `${commentLine.slice(0, 60)}\n${nextLine.trim().slice(0, 60)}`,
|
|
571
|
-
})
|
|
572
|
-
break // one issue per comment line max
|
|
573
|
-
}
|
|
574
|
-
}
|
|
575
|
-
}
|
|
576
|
-
|
|
577
|
-
return issues
|
|
578
|
-
}
|
|
579
|
-
|
|
580
|
-
// ---------------------------------------------------------------------------
|
|
581
|
-
// Phase 5: AI authorship heuristics
|
|
582
|
-
// ---------------------------------------------------------------------------
|
|
583
|
-
|
|
584
|
-
function detectOverCommented(file: SourceFile): DriftIssue[] {
|
|
585
|
-
const issues: DriftIssue[] = []
|
|
586
|
-
|
|
587
|
-
for (const fn of file.getFunctions()) {
|
|
588
|
-
const body = fn.getBody()
|
|
589
|
-
if (!body) continue
|
|
590
|
-
|
|
591
|
-
const bodyText = body.getText()
|
|
592
|
-
const lines = bodyText.split('\n')
|
|
593
|
-
const totalLines = lines.length
|
|
594
|
-
|
|
595
|
-
if (totalLines < 6) continue
|
|
596
|
-
|
|
597
|
-
let commentLines = 0
|
|
598
|
-
for (const line of lines) {
|
|
599
|
-
const trimmed = line.trim()
|
|
600
|
-
if (trimmed.startsWith('//') || trimmed.startsWith('*') || trimmed.startsWith('/*') || trimmed.startsWith('*/')) {
|
|
601
|
-
commentLines++
|
|
602
|
-
}
|
|
603
|
-
}
|
|
604
|
-
|
|
605
|
-
const ratio = commentLines / totalLines
|
|
606
|
-
if (ratio >= 0.4) {
|
|
607
|
-
issues.push({
|
|
608
|
-
rule: 'over-commented',
|
|
609
|
-
severity: 'info',
|
|
610
|
-
message: `Function has ${Math.round(ratio * 100)}% comment density (${commentLines}/${totalLines} lines). AI documents the obvious instead of the why.`,
|
|
611
|
-
line: fn.getStartLineNumber(),
|
|
612
|
-
column: fn.getStartLinePos(),
|
|
613
|
-
snippet: fn.getName() ? `function ${fn.getName()}` : '(anonymous function)',
|
|
614
|
-
})
|
|
615
|
-
}
|
|
616
|
-
}
|
|
617
|
-
|
|
618
|
-
for (const cls of file.getClasses()) {
|
|
619
|
-
for (const method of cls.getMethods()) {
|
|
620
|
-
const body = method.getBody()
|
|
621
|
-
if (!body) continue
|
|
622
|
-
|
|
623
|
-
const bodyText = body.getText()
|
|
624
|
-
const lines = bodyText.split('\n')
|
|
625
|
-
const totalLines = lines.length
|
|
626
|
-
|
|
627
|
-
if (totalLines < 6) continue
|
|
628
|
-
|
|
629
|
-
let commentLines = 0
|
|
630
|
-
for (const line of lines) {
|
|
631
|
-
const trimmed = line.trim()
|
|
632
|
-
if (trimmed.startsWith('//') || trimmed.startsWith('*') || trimmed.startsWith('/*') || trimmed.startsWith('*/')) {
|
|
633
|
-
commentLines++
|
|
634
|
-
}
|
|
635
|
-
}
|
|
636
|
-
|
|
637
|
-
const ratio = commentLines / totalLines
|
|
638
|
-
if (ratio >= 0.4) {
|
|
639
|
-
issues.push({
|
|
640
|
-
rule: 'over-commented',
|
|
641
|
-
severity: 'info',
|
|
642
|
-
message: `Method '${method.getName()}' has ${Math.round(ratio * 100)}% comment density (${commentLines}/${totalLines} lines). AI documents the obvious instead of the why.`,
|
|
643
|
-
line: method.getStartLineNumber(),
|
|
644
|
-
column: method.getStartLinePos(),
|
|
645
|
-
snippet: `${cls.getName()}.${method.getName()}`,
|
|
646
|
-
})
|
|
647
|
-
}
|
|
648
|
-
}
|
|
649
|
-
}
|
|
650
|
-
|
|
651
|
-
return issues
|
|
652
|
-
}
|
|
653
|
-
|
|
654
|
-
function detectHardcodedConfig(file: SourceFile): DriftIssue[] {
|
|
655
|
-
const issues: DriftIssue[] = []
|
|
656
|
-
|
|
657
|
-
const CONFIG_PATTERNS: Array<{ pattern: RegExp; label: string }> = [
|
|
658
|
-
{ pattern: /^https?:\/\//i, label: 'HTTP/HTTPS URL' },
|
|
659
|
-
{ pattern: /^wss?:\/\//i, label: 'WebSocket URL' },
|
|
660
|
-
{ pattern: /^mongodb(\+srv)?:\/\//i, label: 'MongoDB connection string' },
|
|
661
|
-
{ pattern: /^postgres(?:ql)?:\/\//i, label: 'PostgreSQL connection string' },
|
|
662
|
-
{ pattern: /^mysql:\/\//i, label: 'MySQL connection string' },
|
|
663
|
-
{ pattern: /^redis:\/\//i, label: 'Redis connection string' },
|
|
664
|
-
{ pattern: /^amqps?:\/\//i, label: 'AMQP connection string' },
|
|
665
|
-
{ pattern: /^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$/, label: 'IP address' },
|
|
666
|
-
{ pattern: /^:[0-9]{2,5}$/, label: 'Port number in string' },
|
|
667
|
-
{ pattern: /^\/[a-z]/i, label: 'Absolute file path' },
|
|
668
|
-
{ pattern: /localhost(:[0-9]+)?/i, label: 'localhost reference' },
|
|
669
|
-
]
|
|
670
|
-
|
|
671
|
-
const filePath = file.getFilePath().replace(/\\/g, '/')
|
|
672
|
-
if (filePath.includes('.test.') || filePath.includes('.spec.') || filePath.includes('__tests__')) {
|
|
673
|
-
return issues
|
|
674
|
-
}
|
|
675
|
-
|
|
676
|
-
for (const node of file.getDescendantsOfKind(SyntaxKind.StringLiteral)) {
|
|
677
|
-
const value = node.getLiteralValue()
|
|
678
|
-
if (!value || value.length < 4) continue
|
|
679
|
-
|
|
680
|
-
const parent = node.getParent()
|
|
681
|
-
if (!parent) continue
|
|
682
|
-
const parentKind = parent.getKindName()
|
|
683
|
-
if (
|
|
684
|
-
parentKind === 'ImportDeclaration' ||
|
|
685
|
-
parentKind === 'ExportDeclaration' ||
|
|
686
|
-
(parentKind === 'CallExpression' && parent.getText().startsWith('import('))
|
|
687
|
-
) continue
|
|
688
|
-
|
|
689
|
-
for (const { pattern, label } of CONFIG_PATTERNS) {
|
|
690
|
-
if (pattern.test(value)) {
|
|
691
|
-
issues.push({
|
|
692
|
-
rule: 'hardcoded-config',
|
|
693
|
-
severity: 'warning',
|
|
694
|
-
message: `Hardcoded ${label} detected. AI skips environment variables — extract to process.env or a config module.`,
|
|
695
|
-
line: node.getStartLineNumber(),
|
|
696
|
-
column: node.getStartLinePos(),
|
|
697
|
-
snippet: value.length > 60 ? value.slice(0, 60) + '...' : value,
|
|
698
|
-
})
|
|
699
|
-
break
|
|
700
|
-
}
|
|
701
|
-
}
|
|
702
|
-
}
|
|
703
|
-
|
|
704
|
-
return issues
|
|
705
|
-
}
|
|
706
|
-
|
|
707
|
-
function detectInconsistentErrorHandling(file: SourceFile): DriftIssue[] {
|
|
708
|
-
const issues: DriftIssue[] = []
|
|
709
|
-
|
|
710
|
-
let hasTryCatch = false
|
|
711
|
-
let hasDotCatch = false
|
|
712
|
-
let hasThenErrorHandler = false
|
|
713
|
-
let firstLine = 0
|
|
714
|
-
|
|
715
|
-
// Detectar try/catch
|
|
716
|
-
const tryCatches = file.getDescendantsOfKind(SyntaxKind.TryStatement)
|
|
717
|
-
if (tryCatches.length > 0) {
|
|
718
|
-
hasTryCatch = true
|
|
719
|
-
firstLine = firstLine || tryCatches[0].getStartLineNumber()
|
|
720
|
-
}
|
|
721
|
-
|
|
722
|
-
// Detectar .catch(handler) en call expressions
|
|
723
|
-
for (const call of file.getDescendantsOfKind(SyntaxKind.CallExpression)) {
|
|
724
|
-
const expr = call.getExpression()
|
|
725
|
-
if (expr.getKindName() === 'PropertyAccessExpression') {
|
|
726
|
-
const propAccess = expr.asKindOrThrow(SyntaxKind.PropertyAccessExpression)
|
|
727
|
-
const propName = propAccess.getName()
|
|
728
|
-
if (propName === 'catch') {
|
|
729
|
-
// Verificar que tiene al menos un argumento (handler real, no .catch() vacío)
|
|
730
|
-
if (call.getArguments().length > 0) {
|
|
731
|
-
hasDotCatch = true
|
|
732
|
-
if (!firstLine) firstLine = call.getStartLineNumber()
|
|
733
|
-
}
|
|
734
|
-
}
|
|
735
|
-
// Detectar .then(onFulfilled, onRejected) — segundo argumento = error handler
|
|
736
|
-
if (propName === 'then' && call.getArguments().length >= 2) {
|
|
737
|
-
hasThenErrorHandler = true
|
|
738
|
-
if (!firstLine) firstLine = call.getStartLineNumber()
|
|
739
|
-
}
|
|
740
|
-
}
|
|
741
|
-
}
|
|
742
|
-
|
|
743
|
-
const stylesUsed = [hasTryCatch, hasDotCatch, hasThenErrorHandler].filter(Boolean).length
|
|
744
|
-
|
|
745
|
-
if (stylesUsed >= 2) {
|
|
746
|
-
const styles: string[] = []
|
|
747
|
-
if (hasTryCatch) styles.push('try/catch')
|
|
748
|
-
if (hasDotCatch) styles.push('.catch()')
|
|
749
|
-
if (hasThenErrorHandler) styles.push('.then(_, handler)')
|
|
750
|
-
|
|
751
|
-
issues.push({
|
|
752
|
-
rule: 'inconsistent-error-handling',
|
|
753
|
-
severity: 'warning',
|
|
754
|
-
message: `Mixed error handling styles: ${styles.join(', ')}. AI uses whatever pattern it saw last — pick one and stick to it.`,
|
|
755
|
-
line: firstLine || 1,
|
|
756
|
-
column: 1,
|
|
757
|
-
snippet: styles.join(' + '),
|
|
758
|
-
})
|
|
759
|
-
}
|
|
760
|
-
|
|
761
|
-
return issues
|
|
762
|
-
}
|
|
763
|
-
|
|
764
|
-
function detectUnnecessaryAbstraction(file: SourceFile): DriftIssue[] {
|
|
765
|
-
const issues: DriftIssue[] = []
|
|
766
|
-
const fileText = file.getFullText()
|
|
767
|
-
|
|
768
|
-
// Interfaces con un solo método
|
|
769
|
-
for (const iface of file.getInterfaces()) {
|
|
770
|
-
const methods = iface.getMethods()
|
|
771
|
-
const properties = iface.getProperties()
|
|
772
|
-
|
|
773
|
-
// Solo reportar si tiene exactamente 1 método y 0 propiedades (abstracción pura de comportamiento)
|
|
774
|
-
if (methods.length !== 1 || properties.length !== 0) continue
|
|
775
|
-
|
|
776
|
-
const ifaceName = iface.getName()
|
|
777
|
-
|
|
778
|
-
// Contar cuántas veces aparece el nombre en el archivo (excluyendo la declaración misma)
|
|
779
|
-
const usageCount = (fileText.match(new RegExp(`\\b${ifaceName}\\b`, 'g')) ?? []).length
|
|
780
|
-
// La declaración misma cuenta como 1 uso, implementaciones cuentan como 1 cada una
|
|
781
|
-
// Si usageCount <= 2 (declaración + 1 uso), es candidata a innecesaria
|
|
782
|
-
if (usageCount <= 2) {
|
|
783
|
-
issues.push({
|
|
784
|
-
rule: 'unnecessary-abstraction',
|
|
785
|
-
severity: 'warning',
|
|
786
|
-
message: `Interface '${ifaceName}' has 1 method and is used only once. AI creates abstractions preemptively — YAGNI.`,
|
|
787
|
-
line: iface.getStartLineNumber(),
|
|
788
|
-
column: iface.getStartLinePos(),
|
|
789
|
-
snippet: `interface ${ifaceName} { ${methods[0].getName()}(...) }`,
|
|
790
|
-
})
|
|
791
|
-
}
|
|
792
|
-
}
|
|
793
|
-
|
|
794
|
-
// Clases abstractas con un solo método abstracto y sin implementaciones en el archivo
|
|
795
|
-
for (const cls of file.getClasses()) {
|
|
796
|
-
if (!cls.isAbstract()) continue
|
|
797
|
-
|
|
798
|
-
const abstractMethods = cls.getMethods().filter(m => m.isAbstract())
|
|
799
|
-
const concreteMethods = cls.getMethods().filter(m => !m.isAbstract())
|
|
800
|
-
|
|
801
|
-
if (abstractMethods.length !== 1 || concreteMethods.length !== 0) continue
|
|
802
|
-
|
|
803
|
-
const clsName = cls.getName() ?? ''
|
|
804
|
-
const usageCount = (fileText.match(new RegExp(`\\b${clsName}\\b`, 'g')) ?? []).length
|
|
805
|
-
|
|
806
|
-
if (usageCount <= 2) {
|
|
807
|
-
issues.push({
|
|
808
|
-
rule: 'unnecessary-abstraction',
|
|
809
|
-
severity: 'warning',
|
|
810
|
-
message: `Abstract class '${clsName}' has 1 abstract method and is extended nowhere in this file. AI over-engineers single-use code.`,
|
|
811
|
-
line: cls.getStartLineNumber(),
|
|
812
|
-
column: cls.getStartLinePos(),
|
|
813
|
-
snippet: `abstract class ${clsName}`,
|
|
814
|
-
})
|
|
815
|
-
}
|
|
816
|
-
}
|
|
817
|
-
|
|
818
|
-
return issues
|
|
819
|
-
}
|
|
820
|
-
|
|
821
|
-
function detectNamingInconsistency(file: SourceFile): DriftIssue[] {
|
|
822
|
-
const issues: DriftIssue[] = []
|
|
823
|
-
|
|
824
|
-
const isCamelCase = (name: string) => /^[a-z][a-zA-Z0-9]*$/.test(name) && /[A-Z]/.test(name)
|
|
825
|
-
const isSnakeCase = (name: string) => /^[a-z][a-z0-9]*(_[a-z0-9]+)+$/.test(name)
|
|
826
|
-
|
|
827
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
828
|
-
function checkFunction(fn: any): void {
|
|
829
|
-
const vars = fn.getVariableDeclarations()
|
|
830
|
-
if (vars.length < 3) return // muy pocas vars para ser significativo
|
|
831
|
-
|
|
832
|
-
let camelCount = 0
|
|
833
|
-
let snakeCount = 0
|
|
834
|
-
const snakeExamples: string[] = []
|
|
835
|
-
const camelExamples: string[] = []
|
|
836
|
-
|
|
837
|
-
for (const v of vars) {
|
|
838
|
-
const name = v.getName()
|
|
839
|
-
if (isCamelCase(name)) {
|
|
840
|
-
camelCount++
|
|
841
|
-
if (camelExamples.length < 2) camelExamples.push(name)
|
|
842
|
-
} else if (isSnakeCase(name)) {
|
|
843
|
-
snakeCount++
|
|
844
|
-
if (snakeExamples.length < 2) snakeExamples.push(name)
|
|
845
|
-
}
|
|
846
|
-
}
|
|
847
|
-
|
|
848
|
-
if (camelCount >= 1 && snakeCount >= 1) {
|
|
849
|
-
issues.push({
|
|
850
|
-
rule: 'naming-inconsistency',
|
|
851
|
-
severity: 'warning',
|
|
852
|
-
message: `Mixed naming conventions: camelCase (${camelExamples.join(', ')}) and snake_case (${snakeExamples.join(', ')}) in the same scope. AI mixes conventions from different training examples.`,
|
|
853
|
-
line: fn.getStartLineNumber(),
|
|
854
|
-
column: fn.getStartLinePos(),
|
|
855
|
-
snippet: `camelCase: ${camelExamples[0]} / snake_case: ${snakeExamples[0]}`,
|
|
856
|
-
})
|
|
857
|
-
}
|
|
858
|
-
}
|
|
859
|
-
|
|
860
|
-
for (const fn of file.getFunctions()) {
|
|
861
|
-
checkFunction(fn)
|
|
862
|
-
}
|
|
863
|
-
|
|
864
|
-
for (const cls of file.getClasses()) {
|
|
865
|
-
for (const method of cls.getMethods()) {
|
|
866
|
-
checkFunction(method)
|
|
867
|
-
}
|
|
868
|
-
}
|
|
869
|
-
|
|
870
|
-
return issues
|
|
871
|
-
}
|
|
872
|
-
|
|
873
|
-
// ---------------------------------------------------------------------------
|
|
874
|
-
// Score
|
|
875
|
-
// ---------------------------------------------------------------------------
|
|
876
|
-
|
|
877
|
-
function calculateScore(issues: DriftIssue[]): number {
|
|
878
|
-
let raw = 0
|
|
879
|
-
for (const issue of issues) {
|
|
880
|
-
raw += RULE_WEIGHTS[issue.rule]?.weight ?? 5
|
|
881
|
-
}
|
|
882
|
-
return Math.min(100, raw)
|
|
883
|
-
}
|
|
884
|
-
|
|
885
|
-
// ---------------------------------------------------------------------------
|
|
886
|
-
// Phase 8: Semantic duplication — AST fingerprinting helpers
|
|
887
|
-
// ---------------------------------------------------------------------------
|
|
888
|
-
|
|
889
|
-
type FunctionLikeNode = FunctionDeclaration | ArrowFunction | FunctionExpression | MethodDeclaration
|
|
890
|
-
|
|
891
|
-
/** Normalize a function body to a canonical string (Type-2 clone detection).
|
|
892
|
-
* Variable names, parameter names, and numeric/string literals are replaced
|
|
893
|
-
* with canonical tokens so that two functions with identical logic but
|
|
894
|
-
* different identifiers produce the same fingerprint.
|
|
895
|
-
*/
|
|
896
|
-
function normalizeFunctionBody(fn: FunctionLikeNode): string {
|
|
897
|
-
// Build a substitution map: localName → canonical token
|
|
898
|
-
const subst = new Map<string, string>()
|
|
899
|
-
|
|
900
|
-
// Map parameters first
|
|
901
|
-
for (const [i, param] of fn.getParameters().entries()) {
|
|
902
|
-
const name = param.getName()
|
|
903
|
-
if (name && name !== '_') subst.set(name, `P${i}`)
|
|
904
|
-
}
|
|
905
|
-
|
|
906
|
-
// Map locally declared variables (VariableDeclaration)
|
|
907
|
-
let varIdx = 0
|
|
908
|
-
fn.forEachDescendant(node => {
|
|
909
|
-
if (node.getKind() === SyntaxKind.VariableDeclaration) {
|
|
910
|
-
const nameNode = (node as import('ts-morph').VariableDeclaration).getNameNode()
|
|
911
|
-
// Support destructuring — getNameNode() may be a BindingPattern
|
|
912
|
-
if (nameNode.getKind() === SyntaxKind.Identifier) {
|
|
913
|
-
const name = nameNode.getText()
|
|
914
|
-
if (!subst.has(name)) subst.set(name, `V${varIdx++}`)
|
|
915
|
-
}
|
|
916
|
-
}
|
|
917
|
-
})
|
|
918
|
-
|
|
919
|
-
function serializeNode(node: Node): string {
|
|
920
|
-
const kind = node.getKindName()
|
|
921
|
-
|
|
922
|
-
switch (node.getKind()) {
|
|
923
|
-
case SyntaxKind.Identifier: {
|
|
924
|
-
const text = node.getText()
|
|
925
|
-
return subst.get(text) ?? text // external refs (Math, console) kept as-is
|
|
926
|
-
}
|
|
927
|
-
case SyntaxKind.NumericLiteral:
|
|
928
|
-
return 'NL'
|
|
929
|
-
case SyntaxKind.StringLiteral:
|
|
930
|
-
case SyntaxKind.NoSubstitutionTemplateLiteral:
|
|
931
|
-
return 'SL'
|
|
932
|
-
case SyntaxKind.TrueKeyword:
|
|
933
|
-
return 'TRUE'
|
|
934
|
-
case SyntaxKind.FalseKeyword:
|
|
935
|
-
return 'FALSE'
|
|
936
|
-
case SyntaxKind.NullKeyword:
|
|
937
|
-
return 'NULL'
|
|
938
|
-
}
|
|
939
|
-
|
|
940
|
-
const children = node.getChildren()
|
|
941
|
-
if (children.length === 0) return kind
|
|
942
|
-
|
|
943
|
-
const childStr = children.map(serializeNode).join('|')
|
|
944
|
-
return `${kind}(${childStr})`
|
|
94
|
+
'unnecessary-abstraction': { severity: 'warning', weight: 7 },
|
|
95
|
+
'naming-inconsistency': { severity: 'warning', weight: 6 },
|
|
96
|
+
'ai-code-smell': { severity: 'warning', weight: 12 },
|
|
97
|
+
// Phase 8: semantic duplication
|
|
98
|
+
'semantic-duplication': { severity: 'warning', weight: 12 },
|
|
99
|
+
'plugin-error': { severity: 'warning', weight: 4 },
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
const AI_SMELL_SIGNALS = new Set([
|
|
103
|
+
'over-commented',
|
|
104
|
+
'hardcoded-config',
|
|
105
|
+
'inconsistent-error-handling',
|
|
106
|
+
'unnecessary-abstraction',
|
|
107
|
+
'naming-inconsistency',
|
|
108
|
+
'comment-contradiction',
|
|
109
|
+
'promise-style-mix',
|
|
110
|
+
'any-abuse',
|
|
111
|
+
])
|
|
112
|
+
|
|
113
|
+
function detectAICodeSmell(issues: DriftIssue[], filePath: string): DriftIssue[] {
|
|
114
|
+
const signalCounts = new Map<string, number>()
|
|
115
|
+
for (const issue of issues) {
|
|
116
|
+
if (!AI_SMELL_SIGNALS.has(issue.rule)) continue
|
|
117
|
+
signalCounts.set(issue.rule, (signalCounts.get(issue.rule) ?? 0) + 1)
|
|
945
118
|
}
|
|
946
119
|
|
|
947
|
-
const
|
|
948
|
-
if (
|
|
949
|
-
return serializeNode(body)
|
|
950
|
-
}
|
|
951
|
-
|
|
952
|
-
/** Return a SHA-256 fingerprint for a function body (normalized). */
|
|
953
|
-
function fingerprintFunction(fn: FunctionLikeNode): string {
|
|
954
|
-
const normalized = normalizeFunctionBody(fn)
|
|
955
|
-
return crypto.createHash('sha256').update(normalized).digest('hex')
|
|
956
|
-
}
|
|
957
|
-
|
|
958
|
-
/** Return all function-like nodes from a SourceFile that are worth comparing:
|
|
959
|
-
* - At least MIN_LINES lines in their body
|
|
960
|
-
* - Not test helpers (describe/it/test/beforeEach/afterEach)
|
|
961
|
-
*/
|
|
962
|
-
const MIN_LINES = 8
|
|
963
|
-
|
|
964
|
-
function collectFunctions(sf: SourceFile): Array<{ fn: FunctionLikeNode; name: string; line: number; col: number }> {
|
|
965
|
-
const results: Array<{ fn: FunctionLikeNode; name: string; line: number; col: number }> = []
|
|
966
|
-
|
|
967
|
-
const kinds = [
|
|
968
|
-
SyntaxKind.FunctionDeclaration,
|
|
969
|
-
SyntaxKind.FunctionExpression,
|
|
970
|
-
SyntaxKind.ArrowFunction,
|
|
971
|
-
SyntaxKind.MethodDeclaration,
|
|
972
|
-
] as const
|
|
973
|
-
|
|
974
|
-
for (const kind of kinds) {
|
|
975
|
-
for (const node of sf.getDescendantsOfKind(kind)) {
|
|
976
|
-
const body = (node as FunctionLikeNode).getBody()
|
|
977
|
-
if (!body) continue
|
|
120
|
+
const totalSignals = [...signalCounts.values()].reduce((sum, count) => sum + count, 0)
|
|
121
|
+
if (totalSignals < 3) return []
|
|
978
122
|
|
|
979
|
-
|
|
980
|
-
|
|
981
|
-
|
|
123
|
+
const triggers = [...signalCounts.entries()]
|
|
124
|
+
.sort((a, b) => b[1] - a[1])
|
|
125
|
+
.slice(0, 3)
|
|
126
|
+
.map(([rule, count]) => `${rule} x${count}`)
|
|
982
127
|
|
|
983
|
-
|
|
984
|
-
|
|
985
|
-
|
|
986
|
-
|
|
987
|
-
|
|
988
|
-
|
|
989
|
-
|
|
990
|
-
|
|
128
|
+
return [{
|
|
129
|
+
rule: 'ai-code-smell',
|
|
130
|
+
severity: 'warning',
|
|
131
|
+
message: `Aggregated AI smell signals detected (${totalSignals}): ${triggers.join(', ')}`,
|
|
132
|
+
line: 1,
|
|
133
|
+
column: 1,
|
|
134
|
+
snippet: path.basename(filePath),
|
|
135
|
+
}]
|
|
136
|
+
}
|
|
991
137
|
|
|
992
|
-
|
|
993
|
-
|
|
138
|
+
function runPluginRules(
|
|
139
|
+
file: import('ts-morph').SourceFile,
|
|
140
|
+
loadedPlugins: LoadedPlugin[],
|
|
141
|
+
config: DriftConfig | undefined,
|
|
142
|
+
projectRoot: string,
|
|
143
|
+
): DriftIssue[] {
|
|
144
|
+
if (loadedPlugins.length === 0) return []
|
|
145
|
+
const context: PluginRuleContext = {
|
|
146
|
+
projectRoot,
|
|
147
|
+
filePath: file.getFilePath(),
|
|
148
|
+
config,
|
|
149
|
+
}
|
|
994
150
|
|
|
995
|
-
|
|
151
|
+
const issues: DriftIssue[] = []
|
|
152
|
+
for (const loaded of loadedPlugins) {
|
|
153
|
+
for (const rule of loaded.plugin.rules) {
|
|
154
|
+
try {
|
|
155
|
+
const detected = rule.detect(file, context) ?? []
|
|
156
|
+
for (const issue of detected) {
|
|
157
|
+
issues.push({
|
|
158
|
+
...issue,
|
|
159
|
+
rule: issue.rule || `${loaded.plugin.name}/${rule.name}`,
|
|
160
|
+
severity: issue.severity ?? (rule.severity ?? 'warning'),
|
|
161
|
+
})
|
|
162
|
+
}
|
|
163
|
+
} catch (error) {
|
|
164
|
+
issues.push({
|
|
165
|
+
rule: 'plugin-error',
|
|
166
|
+
severity: 'warning',
|
|
167
|
+
message: `Plugin '${loaded.id}' rule '${rule.name}' failed: ${error instanceof Error ? error.message : String(error)}`,
|
|
168
|
+
line: 1,
|
|
169
|
+
column: 1,
|
|
170
|
+
snippet: file.getBaseName(),
|
|
171
|
+
})
|
|
172
|
+
}
|
|
996
173
|
}
|
|
997
174
|
}
|
|
998
|
-
|
|
999
|
-
return results
|
|
175
|
+
return issues
|
|
1000
176
|
}
|
|
1001
177
|
|
|
1002
178
|
// ---------------------------------------------------------------------------
|
|
1003
|
-
//
|
|
179
|
+
// Per-file analysis
|
|
1004
180
|
// ---------------------------------------------------------------------------
|
|
1005
181
|
|
|
1006
|
-
export function analyzeFile(
|
|
182
|
+
export function analyzeFile(
|
|
183
|
+
file: import('ts-morph').SourceFile,
|
|
184
|
+
options?: DriftConfig | {
|
|
185
|
+
config?: DriftConfig
|
|
186
|
+
loadedPlugins?: LoadedPlugin[]
|
|
187
|
+
projectRoot?: string
|
|
188
|
+
},
|
|
189
|
+
): FileReport {
|
|
190
|
+
const normalizedOptions = (options && typeof options === 'object' && ('config' in options || 'loadedPlugins' in options || 'projectRoot' in options))
|
|
191
|
+
? options
|
|
192
|
+
: { config: (options && typeof options === 'object' ? options : undefined) as DriftConfig | undefined }
|
|
193
|
+
|
|
1007
194
|
if (isFileIgnored(file)) {
|
|
1008
195
|
return {
|
|
1009
196
|
path: file.getFilePath(),
|
|
@@ -1027,7 +214,6 @@ export function analyzeFile(file: SourceFile): FileReport {
|
|
|
1027
214
|
...detectTooManyParams(file),
|
|
1028
215
|
...detectHighCoupling(file),
|
|
1029
216
|
...detectPromiseStyleMix(file),
|
|
1030
|
-
// Stubs now implemented
|
|
1031
217
|
...detectMagicNumbers(file),
|
|
1032
218
|
...detectCommentContradiction(file),
|
|
1033
219
|
// Phase 5: AI authorship heuristics
|
|
@@ -1036,19 +222,36 @@ export function analyzeFile(file: SourceFile): FileReport {
|
|
|
1036
222
|
...detectInconsistentErrorHandling(file),
|
|
1037
223
|
...detectUnnecessaryAbstraction(file),
|
|
1038
224
|
...detectNamingInconsistency(file),
|
|
225
|
+
// Configurable architecture rules
|
|
226
|
+
...detectControllerNoDb(file, normalizedOptions?.config),
|
|
227
|
+
...detectServiceNoHttp(file, normalizedOptions?.config),
|
|
228
|
+
...detectMaxFunctionLines(file, normalizedOptions?.config),
|
|
229
|
+
// Plugin rules
|
|
230
|
+
...runPluginRules(
|
|
231
|
+
file,
|
|
232
|
+
normalizedOptions?.loadedPlugins ?? [],
|
|
233
|
+
normalizedOptions?.config,
|
|
234
|
+
normalizedOptions?.projectRoot ?? path.dirname(file.getFilePath()),
|
|
235
|
+
),
|
|
1039
236
|
]
|
|
1040
237
|
|
|
238
|
+
issues.push(...detectAICodeSmell(issues, file.getFilePath()))
|
|
239
|
+
|
|
1041
240
|
return {
|
|
1042
241
|
path: file.getFilePath(),
|
|
1043
242
|
issues,
|
|
1044
|
-
score: calculateScore(issues),
|
|
243
|
+
score: calculateScore(issues, RULE_WEIGHTS),
|
|
1045
244
|
}
|
|
1046
245
|
}
|
|
1047
246
|
|
|
247
|
+
// ---------------------------------------------------------------------------
|
|
248
|
+
// Project-level analysis (phases 2, 3, 8 require the full file set)
|
|
249
|
+
// ---------------------------------------------------------------------------
|
|
250
|
+
|
|
1048
251
|
export function analyzeProject(targetPath: string, config?: DriftConfig): FileReport[] {
|
|
1049
252
|
const project = new Project({
|
|
1050
253
|
skipAddingFilesFromTsConfig: true,
|
|
1051
|
-
compilerOptions: { allowJs: true },
|
|
254
|
+
compilerOptions: { allowJs: true, jsx: 1 }, // 1 = JsxEmit.Preserve
|
|
1052
255
|
})
|
|
1053
256
|
|
|
1054
257
|
project.addSourceFilesAtPaths([
|
|
@@ -1066,17 +269,27 @@ export function analyzeProject(targetPath: string, config?: DriftConfig): FileRe
|
|
|
1066
269
|
])
|
|
1067
270
|
|
|
1068
271
|
const sourceFiles = project.getSourceFiles()
|
|
272
|
+
const pluginRuntime = loadPlugins(targetPath, config?.plugins)
|
|
1069
273
|
|
|
1070
274
|
// Phase 1: per-file analysis
|
|
1071
|
-
const reports: FileReport[] = sourceFiles.map(analyzeFile
|
|
275
|
+
const reports: FileReport[] = sourceFiles.map((file) => analyzeFile(file, {
|
|
276
|
+
config,
|
|
277
|
+
loadedPlugins: pluginRuntime.plugins,
|
|
278
|
+
projectRoot: targetPath,
|
|
279
|
+
}))
|
|
1072
280
|
const reportByPath = new Map<string, FileReport>()
|
|
1073
281
|
for (const r of reports) reportByPath.set(r.path, r)
|
|
1074
282
|
|
|
1075
|
-
//
|
|
1076
|
-
const
|
|
1077
|
-
|
|
1078
|
-
|
|
1079
|
-
|
|
283
|
+
// Build set of ignored paths so cross-file phases don't re-add issues
|
|
284
|
+
const ignoredPaths = new Set<string>(
|
|
285
|
+
sourceFiles.filter(sf => isFileIgnored(sf)).map(sf => sf.getFilePath())
|
|
286
|
+
)
|
|
287
|
+
|
|
288
|
+
// ── Phase 2 setup: build import graph ──────────────────────────────────────
|
|
289
|
+
const allImportedPaths = new Set<string>()
|
|
290
|
+
const allImportedNames = new Map<string, Set<string>>()
|
|
291
|
+
const allLiteralImports = new Set<string>()
|
|
292
|
+
const importGraph = new Map<string, Set<string>>()
|
|
1080
293
|
|
|
1081
294
|
for (const sf of sourceFiles) {
|
|
1082
295
|
const sfPath = sf.getFilePath()
|
|
@@ -1084,17 +297,14 @@ export function analyzeProject(targetPath: string, config?: DriftConfig): FileRe
|
|
|
1084
297
|
const moduleSpecifier = decl.getModuleSpecifierValue()
|
|
1085
298
|
allLiteralImports.add(moduleSpecifier)
|
|
1086
299
|
|
|
1087
|
-
// Resolve to absolute path for dead-file / unused-export
|
|
1088
300
|
const resolved = decl.getModuleSpecifierSourceFile()
|
|
1089
301
|
if (resolved) {
|
|
1090
302
|
const resolvedPath = resolved.getFilePath()
|
|
1091
303
|
allImportedPaths.add(resolvedPath)
|
|
1092
304
|
|
|
1093
|
-
// Phase 3: populate directed import graph
|
|
1094
305
|
if (!importGraph.has(sfPath)) importGraph.set(sfPath, new Set())
|
|
1095
306
|
importGraph.get(sfPath)!.add(resolvedPath)
|
|
1096
307
|
|
|
1097
|
-
// Collect named imports { A, B } and default imports
|
|
1098
308
|
const named = decl.getNamedImports().map(n => n.getName())
|
|
1099
309
|
const def = decl.getDefaultImport()?.getText()
|
|
1100
310
|
const ns = decl.getNamespaceImport()?.getText()
|
|
@@ -1105,12 +315,10 @@ export function analyzeProject(targetPath: string, config?: DriftConfig): FileRe
|
|
|
1105
315
|
const nameSet = allImportedNames.get(resolvedPath)!
|
|
1106
316
|
for (const n of named) nameSet.add(n)
|
|
1107
317
|
if (def) nameSet.add('default')
|
|
1108
|
-
if (ns) nameSet.add('*')
|
|
318
|
+
if (ns) nameSet.add('*')
|
|
1109
319
|
}
|
|
1110
320
|
}
|
|
1111
321
|
|
|
1112
|
-
// Also register re-exports: export { X, Y } from './module'
|
|
1113
|
-
// These count as "using" X and Y from the source module
|
|
1114
322
|
for (const exportDecl of sf.getExportDeclarations()) {
|
|
1115
323
|
const reExportedModule = exportDecl.getModuleSpecifierSourceFile()
|
|
1116
324
|
if (!reExportedModule) continue
|
|
@@ -1125,7 +333,6 @@ export function analyzeProject(targetPath: string, config?: DriftConfig): FileRe
|
|
|
1125
333
|
|
|
1126
334
|
const namedExports = exportDecl.getNamedExports()
|
|
1127
335
|
if (namedExports.length === 0) {
|
|
1128
|
-
// export * from './module' — namespace re-export, all names used
|
|
1129
336
|
nameSet.add('*')
|
|
1130
337
|
} else {
|
|
1131
338
|
for (const ne of namedExports) nameSet.add(ne.getName())
|
|
@@ -1133,288 +340,104 @@ export function analyzeProject(targetPath: string, config?: DriftConfig): FileRe
|
|
|
1133
340
|
}
|
|
1134
341
|
}
|
|
1135
342
|
|
|
1136
|
-
//
|
|
1137
|
-
|
|
1138
|
-
const
|
|
1139
|
-
|
|
1140
|
-
|
|
1141
|
-
|
|
1142
|
-
|
|
1143
|
-
// Exclude entry-point candidates: index.ts, main.ts, cli.ts, app.ts, bin files
|
|
1144
|
-
const basename = path.basename(sfPath)
|
|
1145
|
-
const isBinFile = sfPath.replace(/\\/g, '/').includes('/bin/')
|
|
1146
|
-
const isEntryPoint = /^(index|main|cli|app)\.(ts|tsx|js|jsx)$/.test(basename) || isBinFile
|
|
1147
|
-
if (!isEntryPoint && !allImportedPaths.has(sfPath)) {
|
|
1148
|
-
const issue: DriftIssue = {
|
|
1149
|
-
rule: 'dead-file',
|
|
1150
|
-
severity: RULE_WEIGHTS['dead-file'].severity,
|
|
1151
|
-
message: 'File is never imported — may be dead code',
|
|
343
|
+
// Plugin load failures are surfaced as synthetic report entries.
|
|
344
|
+
if (pluginRuntime.errors.length > 0) {
|
|
345
|
+
for (const err of pluginRuntime.errors) {
|
|
346
|
+
const pluginIssue: DriftIssue = {
|
|
347
|
+
rule: 'plugin-error',
|
|
348
|
+
severity: 'warning',
|
|
349
|
+
message: `Failed to load plugin '${err.pluginId}': ${err.message}`,
|
|
1152
350
|
line: 1,
|
|
1153
351
|
column: 1,
|
|
1154
|
-
snippet:
|
|
1155
|
-
}
|
|
1156
|
-
report.issues.push(issue)
|
|
1157
|
-
report.score = calculateScore(report.issues)
|
|
1158
|
-
}
|
|
1159
|
-
|
|
1160
|
-
// unused-export: named exports not imported anywhere
|
|
1161
|
-
// Skip barrel files (index.ts) — their entire surface is the public API
|
|
1162
|
-
const isBarrel = /^index\.(ts|tsx|js|jsx)$/.test(basename)
|
|
1163
|
-
const importedNamesForFile = allImportedNames.get(sfPath)
|
|
1164
|
-
const hasNamespaceImport = importedNamesForFile?.has('*') ?? false
|
|
1165
|
-
if (!isBarrel && !hasNamespaceImport) {
|
|
1166
|
-
for (const exportDecl of sf.getExportDeclarations()) {
|
|
1167
|
-
for (const namedExport of exportDecl.getNamedExports()) {
|
|
1168
|
-
const name = namedExport.getName()
|
|
1169
|
-
if (!importedNamesForFile?.has(name)) {
|
|
1170
|
-
const line = namedExport.getStartLineNumber()
|
|
1171
|
-
const issue: DriftIssue = {
|
|
1172
|
-
rule: 'unused-export',
|
|
1173
|
-
severity: RULE_WEIGHTS['unused-export'].severity,
|
|
1174
|
-
message: `'${name}' is exported but never imported`,
|
|
1175
|
-
line,
|
|
1176
|
-
column: 1,
|
|
1177
|
-
snippet: namedExport.getText().slice(0, 80),
|
|
1178
|
-
}
|
|
1179
|
-
report.issues.push(issue)
|
|
1180
|
-
report.score = calculateScore(report.issues)
|
|
1181
|
-
}
|
|
1182
|
-
}
|
|
1183
|
-
}
|
|
1184
|
-
|
|
1185
|
-
// Also check inline export declarations (export function foo, export const bar)
|
|
1186
|
-
for (const exportSymbol of sf.getExportedDeclarations()) {
|
|
1187
|
-
const [exportName, declarations] = [exportSymbol[0], exportSymbol[1]]
|
|
1188
|
-
if (exportName === 'default') continue
|
|
1189
|
-
if (importedNamesForFile?.has(exportName)) continue
|
|
1190
|
-
|
|
1191
|
-
for (const decl of declarations) {
|
|
1192
|
-
// Skip if this is a re-export from another file
|
|
1193
|
-
if (decl.getSourceFile().getFilePath() !== sfPath) continue
|
|
1194
|
-
|
|
1195
|
-
const line = decl.getStartLineNumber()
|
|
1196
|
-
const issue: DriftIssue = {
|
|
1197
|
-
rule: 'unused-export',
|
|
1198
|
-
severity: RULE_WEIGHTS['unused-export'].severity,
|
|
1199
|
-
message: `'${exportName}' is exported but never imported`,
|
|
1200
|
-
line,
|
|
1201
|
-
column: 1,
|
|
1202
|
-
snippet: decl.getText().split('\n')[0].slice(0, 80),
|
|
1203
|
-
}
|
|
1204
|
-
report.issues.push(issue)
|
|
1205
|
-
report.score = calculateScore(report.issues)
|
|
1206
|
-
break // one issue per export name is enough
|
|
1207
|
-
}
|
|
352
|
+
snippet: err.pluginId,
|
|
1208
353
|
}
|
|
1209
|
-
}
|
|
1210
|
-
}
|
|
1211
|
-
|
|
1212
|
-
// Detect unused-dependency: packages in package.json never imported
|
|
1213
|
-
const pkgPath = path.join(targetPath, 'package.json')
|
|
1214
|
-
if (fs.existsSync(pkgPath)) {
|
|
1215
|
-
let pkg: Record<string, unknown>
|
|
1216
|
-
try {
|
|
1217
|
-
pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf-8'))
|
|
1218
|
-
} catch {
|
|
1219
|
-
pkg = {}
|
|
1220
|
-
}
|
|
1221
|
-
|
|
1222
|
-
const deps = {
|
|
1223
|
-
...((pkg.dependencies as Record<string, string>) ?? {}),
|
|
1224
|
-
}
|
|
1225
|
-
|
|
1226
|
-
const unusedDeps: string[] = []
|
|
1227
|
-
for (const depName of Object.keys(deps)) {
|
|
1228
|
-
// Skip type-only packages (@types/*)
|
|
1229
|
-
if (depName.startsWith('@types/')) continue
|
|
1230
|
-
|
|
1231
|
-
// A dependency is "used" if any import specifier starts with the package name
|
|
1232
|
-
// (handles sub-paths like 'lodash/merge', 'date-fns/format', etc.)
|
|
1233
|
-
const isUsed = [...allLiteralImports].some(
|
|
1234
|
-
imp => imp === depName || imp.startsWith(depName + '/')
|
|
1235
|
-
)
|
|
1236
|
-
if (!isUsed) unusedDeps.push(depName)
|
|
1237
|
-
}
|
|
1238
|
-
|
|
1239
|
-
if (unusedDeps.length > 0) {
|
|
1240
|
-
const pkgIssues: DriftIssue[] = unusedDeps.map(dep => ({
|
|
1241
|
-
rule: 'unused-dependency',
|
|
1242
|
-
severity: RULE_WEIGHTS['unused-dependency'].severity,
|
|
1243
|
-
message: `'${dep}' is in package.json but never imported`,
|
|
1244
|
-
line: 1,
|
|
1245
|
-
column: 1,
|
|
1246
|
-
snippet: `"${dep}"`,
|
|
1247
|
-
}))
|
|
1248
|
-
|
|
1249
354
|
reports.push({
|
|
1250
|
-
path:
|
|
1251
|
-
issues:
|
|
1252
|
-
score: calculateScore(
|
|
355
|
+
path: path.join(targetPath, '.drift-plugin-errors', `${err.pluginId}.plugin`),
|
|
356
|
+
issues: [pluginIssue],
|
|
357
|
+
score: calculateScore([pluginIssue], RULE_WEIGHTS),
|
|
1253
358
|
})
|
|
1254
359
|
}
|
|
1255
360
|
}
|
|
1256
361
|
|
|
1257
|
-
// Phase
|
|
1258
|
-
|
|
1259
|
-
|
|
1260
|
-
|
|
1261
|
-
const
|
|
1262
|
-
|
|
1263
|
-
|
|
1264
|
-
|
|
1265
|
-
inStack.add(node)
|
|
1266
|
-
stack.push(node)
|
|
1267
|
-
|
|
1268
|
-
for (const neighbor of graph.get(node) ?? []) {
|
|
1269
|
-
if (!visited.has(neighbor)) {
|
|
1270
|
-
dfs(neighbor, stack)
|
|
1271
|
-
} else if (inStack.has(neighbor)) {
|
|
1272
|
-
// Found a cycle — extract the cycle portion from the stack
|
|
1273
|
-
const cycleStart = stack.indexOf(neighbor)
|
|
1274
|
-
cycles.push(stack.slice(cycleStart))
|
|
1275
|
-
}
|
|
1276
|
-
}
|
|
1277
|
-
|
|
1278
|
-
stack.pop()
|
|
1279
|
-
inStack.delete(node)
|
|
362
|
+
// ── Phase 2: dead-file + unused-export + unused-dependency ─────────────────
|
|
363
|
+
const deadFiles = detectDeadFiles(sourceFiles, allImportedPaths, RULE_WEIGHTS)
|
|
364
|
+
for (const [sfPath, issue] of deadFiles) {
|
|
365
|
+
if (ignoredPaths.has(sfPath)) continue
|
|
366
|
+
const report = reportByPath.get(sfPath)
|
|
367
|
+
if (report) {
|
|
368
|
+
report.issues.push(issue)
|
|
369
|
+
report.score = calculateScore(report.issues, RULE_WEIGHTS)
|
|
1280
370
|
}
|
|
371
|
+
}
|
|
1281
372
|
|
|
1282
|
-
|
|
1283
|
-
|
|
1284
|
-
|
|
373
|
+
const unusedExports = detectUnusedExports(sourceFiles, allImportedNames, RULE_WEIGHTS)
|
|
374
|
+
for (const [sfPath, issues] of unusedExports) {
|
|
375
|
+
if (ignoredPaths.has(sfPath)) continue
|
|
376
|
+
const report = reportByPath.get(sfPath)
|
|
377
|
+
if (report) {
|
|
378
|
+
for (const issue of issues) {
|
|
379
|
+
report.issues.push(issue)
|
|
1285
380
|
}
|
|
381
|
+
report.score = calculateScore(report.issues, RULE_WEIGHTS)
|
|
1286
382
|
}
|
|
1287
|
-
|
|
1288
|
-
return cycles
|
|
1289
383
|
}
|
|
1290
384
|
|
|
1291
|
-
const
|
|
1292
|
-
|
|
1293
|
-
|
|
1294
|
-
|
|
1295
|
-
|
|
1296
|
-
|
|
1297
|
-
|
|
1298
|
-
|
|
1299
|
-
|
|
1300
|
-
|
|
1301
|
-
// Report on the first file in the cycle
|
|
1302
|
-
const firstFile = cycle[0]
|
|
1303
|
-
const report = reportByPath.get(firstFile)
|
|
1304
|
-
if (!report) continue
|
|
1305
|
-
|
|
1306
|
-
const cycleDisplay = cycle
|
|
1307
|
-
.map(p => path.basename(p))
|
|
1308
|
-
.concat(path.basename(cycle[0])) // close the loop visually: A → B → C → A
|
|
1309
|
-
.join(' → ')
|
|
385
|
+
const unusedDepIssues = detectUnusedDependencies(targetPath, allLiteralImports, RULE_WEIGHTS)
|
|
386
|
+
if (unusedDepIssues.length > 0) {
|
|
387
|
+
const pkgPath = path.join(targetPath, 'package.json')
|
|
388
|
+
reports.push({
|
|
389
|
+
path: pkgPath,
|
|
390
|
+
issues: unusedDepIssues,
|
|
391
|
+
score: calculateScore(unusedDepIssues, RULE_WEIGHTS),
|
|
392
|
+
})
|
|
393
|
+
}
|
|
1310
394
|
|
|
1311
|
-
|
|
1312
|
-
|
|
1313
|
-
|
|
1314
|
-
|
|
1315
|
-
|
|
1316
|
-
|
|
1317
|
-
|
|
395
|
+
// ── Phase 3: circular-dependency ────────────────────────────────────────────
|
|
396
|
+
const circularIssues = detectCircularDependencies(importGraph, RULE_WEIGHTS)
|
|
397
|
+
for (const [filePath, issue] of circularIssues) {
|
|
398
|
+
if (ignoredPaths.has(filePath)) continue
|
|
399
|
+
const report = reportByPath.get(filePath)
|
|
400
|
+
if (report) {
|
|
401
|
+
report.issues.push(issue)
|
|
402
|
+
report.score = calculateScore(report.issues, RULE_WEIGHTS)
|
|
1318
403
|
}
|
|
1319
|
-
report.issues.push(issue)
|
|
1320
|
-
report.score = calculateScore(report.issues)
|
|
1321
404
|
}
|
|
1322
405
|
|
|
1323
|
-
// ── Phase 3b: layer-violation
|
|
406
|
+
// ── Phase 3b: layer-violation ────────────────────────────────────────────────
|
|
1324
407
|
if (config?.layers && config.layers.length > 0) {
|
|
1325
|
-
const
|
|
1326
|
-
|
|
1327
|
-
|
|
1328
|
-
const
|
|
1329
|
-
|
|
1330
|
-
|
|
1331
|
-
|
|
1332
|
-
|
|
1333
|
-
.replace(/[.+^${}()|[\]]/g, '\\$&')
|
|
1334
|
-
.replace(/\*\*/g, '###DOUBLESTAR###')
|
|
1335
|
-
.replace(/\*/g, '[^/]*')
|
|
1336
|
-
.replace(/###DOUBLESTAR###/g, '.*')
|
|
1337
|
-
return new RegExp(`^${regexStr}`).test(rel)
|
|
1338
|
-
})
|
|
1339
|
-
)
|
|
1340
|
-
}
|
|
1341
|
-
|
|
1342
|
-
for (const [filePath, imports] of importGraph.entries()) {
|
|
1343
|
-
const fileLayer = getLayer(filePath)
|
|
1344
|
-
if (!fileLayer) continue
|
|
1345
|
-
|
|
1346
|
-
for (const importedPath of imports) {
|
|
1347
|
-
const importedLayer = getLayer(importedPath)
|
|
1348
|
-
if (!importedLayer) continue
|
|
1349
|
-
if (importedLayer.name === fileLayer.name) continue
|
|
1350
|
-
|
|
1351
|
-
if (!fileLayer.canImportFrom.includes(importedLayer.name)) {
|
|
1352
|
-
const report = reportByPath.get(filePath)
|
|
1353
|
-
if (report) {
|
|
1354
|
-
const weight = RULE_WEIGHTS['layer-violation']?.weight ?? 5
|
|
1355
|
-
report.issues.push({
|
|
1356
|
-
rule: 'layer-violation',
|
|
1357
|
-
severity: 'error',
|
|
1358
|
-
message: `Layer '${fileLayer.name}' must not import from layer '${importedLayer.name}'`,
|
|
1359
|
-
line: 1,
|
|
1360
|
-
column: 1,
|
|
1361
|
-
snippet: `import from '${path.relative(targetPath, importedPath).replace(/\\/g, '/')}'`,
|
|
1362
|
-
})
|
|
1363
|
-
report.score = Math.min(100, report.score + weight)
|
|
1364
|
-
}
|
|
408
|
+
const layerIssues = detectLayerViolations(importGraph, config.layers, targetPath, RULE_WEIGHTS)
|
|
409
|
+
for (const [filePath, issues] of layerIssues) {
|
|
410
|
+
if (ignoredPaths.has(filePath)) continue
|
|
411
|
+
const report = reportByPath.get(filePath)
|
|
412
|
+
if (report) {
|
|
413
|
+
for (const issue of issues) {
|
|
414
|
+
report.issues.push(issue)
|
|
415
|
+
report.score = Math.min(100, report.score + (RULE_WEIGHTS['layer-violation']?.weight ?? 5))
|
|
1365
416
|
}
|
|
1366
417
|
}
|
|
1367
418
|
}
|
|
1368
419
|
}
|
|
1369
420
|
|
|
1370
|
-
// ── Phase 3c: cross-boundary-import
|
|
421
|
+
// ── Phase 3c: cross-boundary-import ─────────────────────────────────────────
|
|
1371
422
|
if (config?.modules && config.modules.length > 0) {
|
|
1372
|
-
const
|
|
1373
|
-
|
|
1374
|
-
|
|
1375
|
-
const
|
|
1376
|
-
|
|
1377
|
-
|
|
1378
|
-
|
|
1379
|
-
|
|
1380
|
-
const fileModule = getModule(filePath)
|
|
1381
|
-
if (!fileModule) continue
|
|
1382
|
-
|
|
1383
|
-
for (const importedPath of imports) {
|
|
1384
|
-
const importedModule = getModule(importedPath)
|
|
1385
|
-
if (!importedModule) continue
|
|
1386
|
-
if (importedModule.name === fileModule.name) continue
|
|
1387
|
-
|
|
1388
|
-
const allowedImports = fileModule.allowedExternalImports ?? []
|
|
1389
|
-
const relImported = importedPath.replace(/\\/g, '/')
|
|
1390
|
-
const isAllowed = allowedImports.some(allowed =>
|
|
1391
|
-
relImported.startsWith(allowed.replace(/\\/g, '/'))
|
|
1392
|
-
)
|
|
1393
|
-
|
|
1394
|
-
if (!isAllowed) {
|
|
1395
|
-
const report = reportByPath.get(filePath)
|
|
1396
|
-
if (report) {
|
|
1397
|
-
const weight = RULE_WEIGHTS['cross-boundary-import']?.weight ?? 5
|
|
1398
|
-
report.issues.push({
|
|
1399
|
-
rule: 'cross-boundary-import',
|
|
1400
|
-
severity: 'warning',
|
|
1401
|
-
message: `Module '${fileModule.name}' must not import from module '${importedModule.name}'`,
|
|
1402
|
-
line: 1,
|
|
1403
|
-
column: 1,
|
|
1404
|
-
snippet: `import from '${path.relative(targetPath, importedPath).replace(/\\/g, '/')}'`,
|
|
1405
|
-
})
|
|
1406
|
-
report.score = Math.min(100, report.score + weight)
|
|
1407
|
-
}
|
|
423
|
+
const boundaryIssues = detectCrossBoundaryImports(importGraph, config.modules, targetPath, RULE_WEIGHTS)
|
|
424
|
+
for (const [filePath, issues] of boundaryIssues) {
|
|
425
|
+
if (ignoredPaths.has(filePath)) continue
|
|
426
|
+
const report = reportByPath.get(filePath)
|
|
427
|
+
if (report) {
|
|
428
|
+
for (const issue of issues) {
|
|
429
|
+
report.issues.push(issue)
|
|
430
|
+
report.score = Math.min(100, report.score + (RULE_WEIGHTS['cross-boundary-import']?.weight ?? 5))
|
|
1408
431
|
}
|
|
1409
432
|
}
|
|
1410
433
|
}
|
|
1411
434
|
}
|
|
1412
435
|
|
|
1413
|
-
// ── Phase 8: semantic-duplication
|
|
1414
|
-
// Build a fingerprint → [{filePath, fnName, line, col}] map across all files
|
|
436
|
+
// ── Phase 8: semantic-duplication ───────────────────────────────────────────
|
|
1415
437
|
const fingerprintMap = new Map<string, Array<{ filePath: string; name: string; line: number; col: number }>>()
|
|
1416
438
|
|
|
1417
439
|
for (const sf of sourceFiles) {
|
|
440
|
+
if (isFileIgnored(sf)) continue
|
|
1418
441
|
const sfPath = sf.getFilePath()
|
|
1419
442
|
for (const { fn, name, line, col } of collectFunctions(sf)) {
|
|
1420
443
|
const fp = fingerprintFunction(fn)
|
|
@@ -1423,7 +446,6 @@ export function analyzeProject(targetPath: string, config?: DriftConfig): FileRe
|
|
|
1423
446
|
}
|
|
1424
447
|
}
|
|
1425
448
|
|
|
1426
|
-
// For each fingerprint with 2+ functions: report each as a duplicate of the others
|
|
1427
449
|
for (const [, entries] of fingerprintMap) {
|
|
1428
450
|
if (entries.length < 2) continue
|
|
1429
451
|
|
|
@@ -1431,7 +453,6 @@ export function analyzeProject(targetPath: string, config?: DriftConfig): FileRe
|
|
|
1431
453
|
const report = reportByPath.get(entry.filePath)
|
|
1432
454
|
if (!report) continue
|
|
1433
455
|
|
|
1434
|
-
// Build the "duplicated in" list (all other locations)
|
|
1435
456
|
const others = entries
|
|
1436
457
|
.filter(e => e !== entry)
|
|
1437
458
|
.map(e => {
|
|
@@ -1455,541 +476,3 @@ export function analyzeProject(targetPath: string, config?: DriftConfig): FileRe
|
|
|
1455
476
|
|
|
1456
477
|
return reports
|
|
1457
478
|
}
|
|
1458
|
-
|
|
1459
|
-
// ---------------------------------------------------------------------------
|
|
1460
|
-
// Git helpers
|
|
1461
|
-
// ---------------------------------------------------------------------------
|
|
1462
|
-
|
|
1463
|
-
/** Analyse a file given its absolute path string (wraps analyzeFile). */
|
|
1464
|
-
function analyzeFilePath(filePath: string): FileReport {
|
|
1465
|
-
const proj = new Project({
|
|
1466
|
-
skipAddingFilesFromTsConfig: true,
|
|
1467
|
-
compilerOptions: { allowJs: true },
|
|
1468
|
-
})
|
|
1469
|
-
const sf = proj.addSourceFileAtPath(filePath)
|
|
1470
|
-
return analyzeFile(sf)
|
|
1471
|
-
}
|
|
1472
|
-
|
|
1473
|
-
/**
|
|
1474
|
-
* Execute a git command synchronously and return stdout.
|
|
1475
|
-
* Throws a descriptive error if the command fails or git is not available.
|
|
1476
|
-
*/
|
|
1477
|
-
function execGit(cmd: string, cwd: string): string {
|
|
1478
|
-
try {
|
|
1479
|
-
return execSync(cmd, { cwd, encoding: 'utf8', stdio: ['pipe', 'pipe', 'pipe'] }).trim()
|
|
1480
|
-
} catch (err) {
|
|
1481
|
-
const msg = err instanceof Error ? err.message : String(err)
|
|
1482
|
-
throw new Error(`Git command failed: ${cmd}\n${msg}`)
|
|
1483
|
-
}
|
|
1484
|
-
}
|
|
1485
|
-
|
|
1486
|
-
/**
|
|
1487
|
-
* Verify the given directory is a git repository.
|
|
1488
|
-
* Throws if git is not available or the directory is not a repo.
|
|
1489
|
-
*/
|
|
1490
|
-
function assertGitRepo(cwd: string): void {
|
|
1491
|
-
try {
|
|
1492
|
-
execGit('git rev-parse --is-inside-work-tree', cwd)
|
|
1493
|
-
} catch {
|
|
1494
|
-
throw new Error(`Directory is not a git repository: ${cwd}`)
|
|
1495
|
-
}
|
|
1496
|
-
}
|
|
1497
|
-
|
|
1498
|
-
// ---------------------------------------------------------------------------
|
|
1499
|
-
// Historical analysis helpers
|
|
1500
|
-
// ---------------------------------------------------------------------------
|
|
1501
|
-
|
|
1502
|
-
/**
|
|
1503
|
-
* Analyse a single file as it existed at a given commit hash.
|
|
1504
|
-
* Writes the blob to a temp file, runs analyzeFile, then cleans up.
|
|
1505
|
-
*/
|
|
1506
|
-
async function analyzeFileAtCommit(
|
|
1507
|
-
filePath: string,
|
|
1508
|
-
commitHash: string,
|
|
1509
|
-
projectRoot: string,
|
|
1510
|
-
): Promise<FileReport> {
|
|
1511
|
-
const relPath = path.relative(projectRoot, filePath).replace(/\\/g, '/')
|
|
1512
|
-
const blob = execGit(`git show ${commitHash}:${relPath}`, projectRoot)
|
|
1513
|
-
|
|
1514
|
-
const tmpFile = path.join(os.tmpdir(), `drift-${crypto.randomBytes(8).toString('hex')}.ts`)
|
|
1515
|
-
try {
|
|
1516
|
-
fs.writeFileSync(tmpFile, blob, 'utf8')
|
|
1517
|
-
const report = analyzeFilePath(tmpFile)
|
|
1518
|
-
// Replace temp path with original for readable output
|
|
1519
|
-
return { ...report, path: filePath }
|
|
1520
|
-
} finally {
|
|
1521
|
-
try { fs.unlinkSync(tmpFile) } catch { /* ignore cleanup errors */ }
|
|
1522
|
-
}
|
|
1523
|
-
}
|
|
1524
|
-
|
|
1525
|
-
/**
|
|
1526
|
-
* Analyse ALL TypeScript files in the project snapshot at a given commit.
|
|
1527
|
-
* Uses `git ls-tree` to enumerate every file in the tree, writes them to a
|
|
1528
|
-
* temp directory, then runs `analyzeProject` on that full snapshot so that
|
|
1529
|
-
* the resulting `averageScore` reflects the complete project health rather
|
|
1530
|
-
* than only the files touched in that diff.
|
|
1531
|
-
*/
|
|
1532
|
-
async function analyzeSingleCommit(
|
|
1533
|
-
commitHash: string,
|
|
1534
|
-
targetPath: string,
|
|
1535
|
-
config?: DriftConfig,
|
|
1536
|
-
): Promise<HistoricalAnalysis> {
|
|
1537
|
-
// 1. Commit metadata
|
|
1538
|
-
const meta = execGit(
|
|
1539
|
-
`git show --no-patch --format="%H|%aI|%an|%s" ${commitHash}`,
|
|
1540
|
-
targetPath,
|
|
1541
|
-
)
|
|
1542
|
-
const [hash, dateStr, author, ...msgParts] = meta.split('|')
|
|
1543
|
-
const message = msgParts.join('|').trim()
|
|
1544
|
-
const commitDate = new Date(dateStr ?? '')
|
|
1545
|
-
|
|
1546
|
-
// 2. All .ts/.tsx files tracked at this commit (no diffs, full tree)
|
|
1547
|
-
const allFiles = execGit(
|
|
1548
|
-
`git ls-tree -r ${commitHash} --name-only`,
|
|
1549
|
-
targetPath,
|
|
1550
|
-
)
|
|
1551
|
-
.split('\n')
|
|
1552
|
-
.filter(
|
|
1553
|
-
f =>
|
|
1554
|
-
(f.endsWith('.ts') || f.endsWith('.tsx')) &&
|
|
1555
|
-
!f.endsWith('.d.ts') &&
|
|
1556
|
-
!f.includes('node_modules') &&
|
|
1557
|
-
!f.startsWith('dist/'),
|
|
1558
|
-
)
|
|
1559
|
-
|
|
1560
|
-
if (allFiles.length === 0) {
|
|
1561
|
-
return {
|
|
1562
|
-
commitHash: hash ?? commitHash,
|
|
1563
|
-
commitDate,
|
|
1564
|
-
author: author ?? '',
|
|
1565
|
-
message,
|
|
1566
|
-
files: [],
|
|
1567
|
-
totalScore: 0,
|
|
1568
|
-
averageScore: 0,
|
|
1569
|
-
}
|
|
1570
|
-
}
|
|
1571
|
-
|
|
1572
|
-
// 3. Write snapshot to temp directory
|
|
1573
|
-
const tmpDir = path.join(os.tmpdir(), `drift-${(hash ?? commitHash).slice(0, 8)}`)
|
|
1574
|
-
fs.mkdirSync(tmpDir, { recursive: true })
|
|
1575
|
-
|
|
1576
|
-
for (const relPath of allFiles) {
|
|
1577
|
-
try {
|
|
1578
|
-
const content = execGit(`git show ${commitHash}:${relPath}`, targetPath)
|
|
1579
|
-
const destPath = path.join(tmpDir, relPath)
|
|
1580
|
-
fs.mkdirSync(path.dirname(destPath), { recursive: true })
|
|
1581
|
-
fs.writeFileSync(destPath, content, 'utf-8')
|
|
1582
|
-
} catch {
|
|
1583
|
-
// skip files that can't be read (binary, deleted in partial clone, etc.)
|
|
1584
|
-
}
|
|
1585
|
-
}
|
|
1586
|
-
|
|
1587
|
-
// 4. Analyse the full project snapshot
|
|
1588
|
-
const fileReports = analyzeProject(tmpDir, config)
|
|
1589
|
-
const totalScore = fileReports.reduce((sum, r) => sum + r.score, 0)
|
|
1590
|
-
const averageScore = fileReports.length > 0 ? totalScore / fileReports.length : 0
|
|
1591
|
-
|
|
1592
|
-
// 5. Cleanup
|
|
1593
|
-
try {
|
|
1594
|
-
fs.rmSync(tmpDir, { recursive: true, force: true })
|
|
1595
|
-
} catch {
|
|
1596
|
-
// non-fatal — temp dirs are cleaned by the OS eventually
|
|
1597
|
-
}
|
|
1598
|
-
|
|
1599
|
-
return {
|
|
1600
|
-
commitHash: hash ?? commitHash,
|
|
1601
|
-
commitDate,
|
|
1602
|
-
author: author ?? '',
|
|
1603
|
-
message,
|
|
1604
|
-
files: fileReports,
|
|
1605
|
-
totalScore,
|
|
1606
|
-
averageScore,
|
|
1607
|
-
}
|
|
1608
|
-
}
|
|
1609
|
-
|
|
1610
|
-
/**
|
|
1611
|
-
* Run historical analysis over all commits since a given date.
|
|
1612
|
-
* Returns results ordered chronologically (oldest first).
|
|
1613
|
-
*/
|
|
1614
|
-
async function analyzeHistoricalCommits(
|
|
1615
|
-
sinceDate: Date,
|
|
1616
|
-
targetPath: string,
|
|
1617
|
-
maxCommits: number,
|
|
1618
|
-
config?: DriftConfig,
|
|
1619
|
-
maxSamples: number = 10,
|
|
1620
|
-
): Promise<HistoricalAnalysis[]> {
|
|
1621
|
-
assertGitRepo(targetPath)
|
|
1622
|
-
|
|
1623
|
-
const isoDate = sinceDate.toISOString()
|
|
1624
|
-
const raw = execGit(
|
|
1625
|
-
`git log --since="${isoDate}" --format="%H" --max-count=${maxCommits}`,
|
|
1626
|
-
targetPath,
|
|
1627
|
-
)
|
|
1628
|
-
|
|
1629
|
-
if (!raw) return []
|
|
1630
|
-
|
|
1631
|
-
const hashes = raw.split('\n').filter(Boolean)
|
|
1632
|
-
|
|
1633
|
-
// Sample: distribute evenly across the range
|
|
1634
|
-
// E.g. 122 commits, maxSamples=10 → pick index 0, 13, 26, 39, 52, 65, 78, 91, 104, 121
|
|
1635
|
-
const sampled = hashes.length <= maxSamples
|
|
1636
|
-
? hashes
|
|
1637
|
-
: Array.from({ length: maxSamples }, (_, i) =>
|
|
1638
|
-
hashes[Math.floor(i * (hashes.length - 1) / (maxSamples - 1))]
|
|
1639
|
-
)
|
|
1640
|
-
|
|
1641
|
-
const analyses = await Promise.all(
|
|
1642
|
-
sampled.map(h => analyzeSingleCommit(h, targetPath, config).catch(() => null)),
|
|
1643
|
-
)
|
|
1644
|
-
|
|
1645
|
-
return analyses
|
|
1646
|
-
.filter((a): a is HistoricalAnalysis => a !== null)
|
|
1647
|
-
.sort((a, b) => a.commitDate.getTime() - b.commitDate.getTime())
|
|
1648
|
-
}
|
|
1649
|
-
|
|
1650
|
-
// ---------------------------------------------------------------------------
|
|
1651
|
-
// TrendAnalyzer
|
|
1652
|
-
// ---------------------------------------------------------------------------
|
|
1653
|
-
|
|
1654
|
-
export class TrendAnalyzer {
|
|
1655
|
-
private readonly projectPath: string
|
|
1656
|
-
private readonly config: DriftConfig | undefined
|
|
1657
|
-
|
|
1658
|
-
constructor(projectPath: string, config?: DriftConfig) {
|
|
1659
|
-
this.projectPath = projectPath
|
|
1660
|
-
this.config = config
|
|
1661
|
-
}
|
|
1662
|
-
|
|
1663
|
-
// --- Static utility methods -----------------------------------------------
|
|
1664
|
-
|
|
1665
|
-
static calculateMovingAverage(data: TrendDataPoint[], windowSize: number): number[] {
|
|
1666
|
-
return data.map((_, i) => {
|
|
1667
|
-
const start = Math.max(0, i - windowSize + 1)
|
|
1668
|
-
const window = data.slice(start, i + 1)
|
|
1669
|
-
return window.reduce((s, p) => s + p.score, 0) / window.length
|
|
1670
|
-
})
|
|
1671
|
-
}
|
|
1672
|
-
|
|
1673
|
-
static linearRegression(data: TrendDataPoint[]): { slope: number; intercept: number; r2: number } {
|
|
1674
|
-
const n = data.length
|
|
1675
|
-
if (n < 2) return { slope: 0, intercept: data[0]?.score ?? 0, r2: 0 }
|
|
1676
|
-
|
|
1677
|
-
const xs = data.map((_, i) => i)
|
|
1678
|
-
const ys = data.map(p => p.score)
|
|
1679
|
-
|
|
1680
|
-
const xMean = xs.reduce((s, x) => s + x, 0) / n
|
|
1681
|
-
const yMean = ys.reduce((s, y) => s + y, 0) / n
|
|
1682
|
-
|
|
1683
|
-
const ssXX = xs.reduce((s, x) => s + (x - xMean) ** 2, 0)
|
|
1684
|
-
const ssXY = xs.reduce((s, x, i) => s + (x - xMean) * (ys[i]! - yMean), 0)
|
|
1685
|
-
const ssYY = ys.reduce((s, y) => s + (y - yMean) ** 2, 0)
|
|
1686
|
-
|
|
1687
|
-
const slope = ssXX === 0 ? 0 : ssXY / ssXX
|
|
1688
|
-
const intercept = yMean - slope * xMean
|
|
1689
|
-
const r2 = ssYY === 0 ? 1 : (ssXY ** 2) / (ssXX * ssYY)
|
|
1690
|
-
|
|
1691
|
-
return { slope, intercept, r2 }
|
|
1692
|
-
}
|
|
1693
|
-
|
|
1694
|
-
/** Generate a simple horizontal ASCII bar chart (one bar per data point). */
|
|
1695
|
-
static generateTrendChart(data: TrendDataPoint[]): string {
|
|
1696
|
-
if (data.length === 0) return '(no data)'
|
|
1697
|
-
|
|
1698
|
-
const maxScore = Math.max(...data.map(p => p.score), 1)
|
|
1699
|
-
const chartWidth = 40
|
|
1700
|
-
|
|
1701
|
-
const lines = data.map(p => {
|
|
1702
|
-
const barLen = Math.round((p.score / maxScore) * chartWidth)
|
|
1703
|
-
const bar = '█'.repeat(barLen)
|
|
1704
|
-
const dateStr = p.date.toISOString().slice(0, 10)
|
|
1705
|
-
return `${dateStr} │${bar.padEnd(chartWidth)} ${p.score.toFixed(1)}`
|
|
1706
|
-
})
|
|
1707
|
-
|
|
1708
|
-
return lines.join('\n')
|
|
1709
|
-
}
|
|
1710
|
-
|
|
1711
|
-
// --- Instance method -------------------------------------------------------
|
|
1712
|
-
|
|
1713
|
-
async analyzeTrend(options: {
|
|
1714
|
-
period?: 'week' | 'month' | 'quarter' | 'year'
|
|
1715
|
-
since?: string
|
|
1716
|
-
until?: string
|
|
1717
|
-
}): Promise<DriftTrendReport> {
|
|
1718
|
-
assertGitRepo(this.projectPath)
|
|
1719
|
-
|
|
1720
|
-
const periodDays: Record<string, number> = {
|
|
1721
|
-
week: 7, month: 30, quarter: 90, year: 365,
|
|
1722
|
-
}
|
|
1723
|
-
const days = periodDays[options.period ?? 'month'] ?? 30
|
|
1724
|
-
const sinceDate = options.since
|
|
1725
|
-
? new Date(options.since)
|
|
1726
|
-
: new Date(Date.now() - days * 24 * 60 * 60 * 1000)
|
|
1727
|
-
|
|
1728
|
-
const historicalAnalyses = await analyzeHistoricalCommits(sinceDate, this.projectPath, 100, this.config, 10)
|
|
1729
|
-
|
|
1730
|
-
const trendPoints: TrendDataPoint[] = historicalAnalyses.map(h => ({
|
|
1731
|
-
date: h.commitDate,
|
|
1732
|
-
score: h.averageScore,
|
|
1733
|
-
fileCount: h.files.length,
|
|
1734
|
-
avgIssuesPerFile: h.files.length > 0
|
|
1735
|
-
? h.files.reduce((s, f) => s + f.issues.length, 0) / h.files.length
|
|
1736
|
-
: 0,
|
|
1737
|
-
}))
|
|
1738
|
-
|
|
1739
|
-
const regression = TrendAnalyzer.linearRegression(trendPoints)
|
|
1740
|
-
|
|
1741
|
-
// Current state report
|
|
1742
|
-
const currentFiles = analyzeProject(this.projectPath, this.config)
|
|
1743
|
-
const baseReport = buildReport(this.projectPath, currentFiles)
|
|
1744
|
-
|
|
1745
|
-
return {
|
|
1746
|
-
...baseReport,
|
|
1747
|
-
trend: trendPoints,
|
|
1748
|
-
regression,
|
|
1749
|
-
}
|
|
1750
|
-
}
|
|
1751
|
-
}
|
|
1752
|
-
|
|
1753
|
-
// ---------------------------------------------------------------------------
|
|
1754
|
-
// BlameAnalyzer
|
|
1755
|
-
// ---------------------------------------------------------------------------
|
|
1756
|
-
|
|
1757
|
-
interface GitBlameEntry {
|
|
1758
|
-
hash: string
|
|
1759
|
-
author: string
|
|
1760
|
-
email: string
|
|
1761
|
-
line: string
|
|
1762
|
-
}
|
|
1763
|
-
|
|
1764
|
-
function parseGitBlame(blameOutput: string): GitBlameEntry[] {
|
|
1765
|
-
const entries: GitBlameEntry[] = []
|
|
1766
|
-
const lines = blameOutput.split('\n')
|
|
1767
|
-
let i = 0
|
|
1768
|
-
|
|
1769
|
-
while (i < lines.length) {
|
|
1770
|
-
const headerLine = lines[i]
|
|
1771
|
-
if (!headerLine || headerLine.trim() === '') { i++; continue }
|
|
1772
|
-
|
|
1773
|
-
// Porcelain blame format: first line is "<hash> <orig-line> <final-line> [<num-lines>]"
|
|
1774
|
-
const headerMatch = headerLine.match(/^([0-9a-f]{40})\s/)
|
|
1775
|
-
if (!headerMatch) { i++; continue }
|
|
1776
|
-
|
|
1777
|
-
const hash = headerMatch[1]!
|
|
1778
|
-
let author = ''
|
|
1779
|
-
let email = ''
|
|
1780
|
-
let codeLine = ''
|
|
1781
|
-
i++
|
|
1782
|
-
|
|
1783
|
-
while (i < lines.length && !lines[i]!.match(/^[0-9a-f]{40}\s/)) {
|
|
1784
|
-
const l = lines[i]!
|
|
1785
|
-
if (l.startsWith('author ')) author = l.slice(7).trim()
|
|
1786
|
-
else if (l.startsWith('author-mail ')) email = l.slice(12).replace(/[<>]/g, '').trim()
|
|
1787
|
-
else if (l.startsWith('\t')) codeLine = l.slice(1)
|
|
1788
|
-
i++
|
|
1789
|
-
}
|
|
1790
|
-
|
|
1791
|
-
entries.push({ hash, author, email, line: codeLine })
|
|
1792
|
-
}
|
|
1793
|
-
|
|
1794
|
-
return entries
|
|
1795
|
-
}
|
|
1796
|
-
|
|
1797
|
-
export class BlameAnalyzer {
|
|
1798
|
-
private readonly projectPath: string
|
|
1799
|
-
private readonly config: DriftConfig | undefined
|
|
1800
|
-
|
|
1801
|
-
constructor(projectPath: string, config?: DriftConfig) {
|
|
1802
|
-
this.projectPath = projectPath
|
|
1803
|
-
this.config = config
|
|
1804
|
-
}
|
|
1805
|
-
|
|
1806
|
-
/** Blame a single file: returns per-author attribution. */
|
|
1807
|
-
static async analyzeFileBlame(filePath: string): Promise<BlameAttribution[]> {
|
|
1808
|
-
const dir = path.dirname(filePath)
|
|
1809
|
-
assertGitRepo(dir)
|
|
1810
|
-
|
|
1811
|
-
const blameOutput = execGit(`git blame --porcelain "${filePath}"`, dir)
|
|
1812
|
-
const entries = parseGitBlame(blameOutput)
|
|
1813
|
-
|
|
1814
|
-
// Analyse issues in the file
|
|
1815
|
-
const report = analyzeFilePath(filePath)
|
|
1816
|
-
|
|
1817
|
-
// Map line numbers of issues to authors
|
|
1818
|
-
const issuesByLine = new Map<number, number>()
|
|
1819
|
-
for (const issue of report.issues) {
|
|
1820
|
-
issuesByLine.set(issue.line, (issuesByLine.get(issue.line) ?? 0) + 1)
|
|
1821
|
-
}
|
|
1822
|
-
|
|
1823
|
-
// Aggregate by author
|
|
1824
|
-
const byAuthor = new Map<string, BlameAttribution>()
|
|
1825
|
-
entries.forEach((entry, idx) => {
|
|
1826
|
-
const key = entry.email || entry.author
|
|
1827
|
-
if (!byAuthor.has(key)) {
|
|
1828
|
-
byAuthor.set(key, {
|
|
1829
|
-
author: entry.author,
|
|
1830
|
-
email: entry.email,
|
|
1831
|
-
commits: 0,
|
|
1832
|
-
linesChanged: 0,
|
|
1833
|
-
issuesIntroduced: 0,
|
|
1834
|
-
avgScoreImpact: 0,
|
|
1835
|
-
})
|
|
1836
|
-
}
|
|
1837
|
-
const attr = byAuthor.get(key)!
|
|
1838
|
-
attr.linesChanged++
|
|
1839
|
-
const lineNum = idx + 1
|
|
1840
|
-
if (issuesByLine.has(lineNum)) {
|
|
1841
|
-
attr.issuesIntroduced += issuesByLine.get(lineNum)!
|
|
1842
|
-
}
|
|
1843
|
-
})
|
|
1844
|
-
|
|
1845
|
-
// Count unique commits per author
|
|
1846
|
-
const commitsByAuthor = new Map<string, Set<string>>()
|
|
1847
|
-
for (const entry of entries) {
|
|
1848
|
-
const key = entry.email || entry.author
|
|
1849
|
-
if (!commitsByAuthor.has(key)) commitsByAuthor.set(key, new Set())
|
|
1850
|
-
commitsByAuthor.get(key)!.add(entry.hash)
|
|
1851
|
-
}
|
|
1852
|
-
|
|
1853
|
-
const total = entries.length || 1
|
|
1854
|
-
const results: BlameAttribution[] = []
|
|
1855
|
-
for (const [key, attr] of byAuthor) {
|
|
1856
|
-
attr.commits = commitsByAuthor.get(key)?.size ?? 0
|
|
1857
|
-
attr.avgScoreImpact = (attr.linesChanged / total) * report.score
|
|
1858
|
-
results.push(attr)
|
|
1859
|
-
}
|
|
1860
|
-
|
|
1861
|
-
return results.sort((a, b) => b.issuesIntroduced - a.issuesIntroduced)
|
|
1862
|
-
}
|
|
1863
|
-
|
|
1864
|
-
/** Blame for a specific rule across all files in targetPath. */
|
|
1865
|
-
static async analyzeRuleBlame(rule: string, targetPath: string): Promise<BlameAttribution[]> {
|
|
1866
|
-
assertGitRepo(targetPath)
|
|
1867
|
-
|
|
1868
|
-
const tsFiles = fs
|
|
1869
|
-
.readdirSync(targetPath, { recursive: true, encoding: 'utf8' })
|
|
1870
|
-
.filter((f): f is string => (f.endsWith('.ts') || f.endsWith('.tsx')) && !f.includes('node_modules'))
|
|
1871
|
-
.map(f => path.join(targetPath, f))
|
|
1872
|
-
|
|
1873
|
-
const combined = new Map<string, BlameAttribution>()
|
|
1874
|
-
|
|
1875
|
-
for (const file of tsFiles) {
|
|
1876
|
-
const report = analyzeFilePath(file)
|
|
1877
|
-
const ruleIssues = report.issues.filter(i => i.rule === rule)
|
|
1878
|
-
if (ruleIssues.length === 0) continue
|
|
1879
|
-
|
|
1880
|
-
let blameEntries: GitBlameEntry[] = []
|
|
1881
|
-
try {
|
|
1882
|
-
const blameOutput = execGit(`git blame --porcelain "${file}"`, targetPath)
|
|
1883
|
-
blameEntries = parseGitBlame(blameOutput)
|
|
1884
|
-
} catch { continue }
|
|
1885
|
-
|
|
1886
|
-
for (const issue of ruleIssues) {
|
|
1887
|
-
const entry = blameEntries[issue.line - 1]
|
|
1888
|
-
if (!entry) continue
|
|
1889
|
-
const key = entry.email || entry.author
|
|
1890
|
-
if (!combined.has(key)) {
|
|
1891
|
-
combined.set(key, {
|
|
1892
|
-
author: entry.author,
|
|
1893
|
-
email: entry.email,
|
|
1894
|
-
commits: 0,
|
|
1895
|
-
linesChanged: 0,
|
|
1896
|
-
issuesIntroduced: 0,
|
|
1897
|
-
avgScoreImpact: 0,
|
|
1898
|
-
})
|
|
1899
|
-
}
|
|
1900
|
-
const attr = combined.get(key)!
|
|
1901
|
-
attr.issuesIntroduced++
|
|
1902
|
-
attr.avgScoreImpact += RULE_WEIGHTS[rule]?.weight ?? 5
|
|
1903
|
-
}
|
|
1904
|
-
}
|
|
1905
|
-
|
|
1906
|
-
return Array.from(combined.values()).sort((a, b) => b.issuesIntroduced - a.issuesIntroduced)
|
|
1907
|
-
}
|
|
1908
|
-
|
|
1909
|
-
/** Overall blame across all files and rules. */
|
|
1910
|
-
static async analyzeOverallBlame(targetPath: string): Promise<BlameAttribution[]> {
|
|
1911
|
-
assertGitRepo(targetPath)
|
|
1912
|
-
|
|
1913
|
-
const tsFiles = fs
|
|
1914
|
-
.readdirSync(targetPath, { recursive: true, encoding: 'utf8' })
|
|
1915
|
-
.filter((f): f is string => (f.endsWith('.ts') || f.endsWith('.tsx')) && !f.includes('node_modules'))
|
|
1916
|
-
.map(f => path.join(targetPath, f))
|
|
1917
|
-
|
|
1918
|
-
const combined = new Map<string, BlameAttribution>()
|
|
1919
|
-
const commitsByAuthor = new Map<string, Set<string>>()
|
|
1920
|
-
|
|
1921
|
-
for (const file of tsFiles) {
|
|
1922
|
-
let blameEntries: GitBlameEntry[] = []
|
|
1923
|
-
try {
|
|
1924
|
-
const blameOutput = execGit(`git blame --porcelain "${file}"`, targetPath)
|
|
1925
|
-
blameEntries = parseGitBlame(blameOutput)
|
|
1926
|
-
} catch { continue }
|
|
1927
|
-
|
|
1928
|
-
const report = analyzeFilePath(file)
|
|
1929
|
-
const issuesByLine = new Map<number, number>()
|
|
1930
|
-
for (const issue of report.issues) {
|
|
1931
|
-
issuesByLine.set(issue.line, (issuesByLine.get(issue.line) ?? 0) + 1)
|
|
1932
|
-
}
|
|
1933
|
-
|
|
1934
|
-
blameEntries.forEach((entry, idx) => {
|
|
1935
|
-
const key = entry.email || entry.author
|
|
1936
|
-
if (!combined.has(key)) {
|
|
1937
|
-
combined.set(key, {
|
|
1938
|
-
author: entry.author,
|
|
1939
|
-
email: entry.email,
|
|
1940
|
-
commits: 0,
|
|
1941
|
-
linesChanged: 0,
|
|
1942
|
-
issuesIntroduced: 0,
|
|
1943
|
-
avgScoreImpact: 0,
|
|
1944
|
-
})
|
|
1945
|
-
commitsByAuthor.set(key, new Set())
|
|
1946
|
-
}
|
|
1947
|
-
const attr = combined.get(key)!
|
|
1948
|
-
attr.linesChanged++
|
|
1949
|
-
commitsByAuthor.get(key)!.add(entry.hash)
|
|
1950
|
-
const lineNum = idx + 1
|
|
1951
|
-
if (issuesByLine.has(lineNum)) {
|
|
1952
|
-
attr.issuesIntroduced += issuesByLine.get(lineNum)!
|
|
1953
|
-
attr.avgScoreImpact += report.score * (1 / (blameEntries.length || 1))
|
|
1954
|
-
}
|
|
1955
|
-
})
|
|
1956
|
-
}
|
|
1957
|
-
|
|
1958
|
-
for (const [key, attr] of combined) {
|
|
1959
|
-
attr.commits = commitsByAuthor.get(key)?.size ?? 0
|
|
1960
|
-
}
|
|
1961
|
-
|
|
1962
|
-
return Array.from(combined.values()).sort((a, b) => b.issuesIntroduced - a.issuesIntroduced)
|
|
1963
|
-
}
|
|
1964
|
-
|
|
1965
|
-
// --- Instance method -------------------------------------------------------
|
|
1966
|
-
|
|
1967
|
-
async analyzeBlame(options: {
|
|
1968
|
-
target?: 'file' | 'rule' | 'overall'
|
|
1969
|
-
top?: number
|
|
1970
|
-
filePath?: string
|
|
1971
|
-
rule?: string
|
|
1972
|
-
}): Promise<DriftBlameReport> {
|
|
1973
|
-
assertGitRepo(this.projectPath)
|
|
1974
|
-
|
|
1975
|
-
let blame: BlameAttribution[] = []
|
|
1976
|
-
const mode = options.target ?? 'overall'
|
|
1977
|
-
|
|
1978
|
-
if (mode === 'file' && options.filePath) {
|
|
1979
|
-
blame = await BlameAnalyzer.analyzeFileBlame(options.filePath)
|
|
1980
|
-
} else if (mode === 'rule' && options.rule) {
|
|
1981
|
-
blame = await BlameAnalyzer.analyzeRuleBlame(options.rule, this.projectPath)
|
|
1982
|
-
} else {
|
|
1983
|
-
blame = await BlameAnalyzer.analyzeOverallBlame(this.projectPath)
|
|
1984
|
-
}
|
|
1985
|
-
|
|
1986
|
-
if (options.top) {
|
|
1987
|
-
blame = blame.slice(0, options.top)
|
|
1988
|
-
}
|
|
1989
|
-
|
|
1990
|
-
const currentFiles = analyzeProject(this.projectPath, this.config)
|
|
1991
|
-
const baseReport = buildReport(this.projectPath, currentFiles)
|
|
1992
|
-
|
|
1993
|
-
return { ...baseReport, blame }
|
|
1994
|
-
}
|
|
1995
|
-
}
|