@eduardbar/drift 1.2.0 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. package/.github/workflows/publish-vscode.yml +3 -3
  2. package/.github/workflows/publish.yml +3 -3
  3. package/.github/workflows/review-pr.yml +98 -6
  4. package/AGENTS.md +6 -0
  5. package/README.md +160 -10
  6. package/ROADMAP.md +6 -5
  7. package/dist/analyzer.d.ts +2 -2
  8. package/dist/analyzer.js +420 -159
  9. package/dist/benchmark.d.ts +2 -0
  10. package/dist/benchmark.js +185 -0
  11. package/dist/cli.js +453 -62
  12. package/dist/diff.js +74 -10
  13. package/dist/git.js +12 -0
  14. package/dist/index.d.ts +5 -3
  15. package/dist/index.js +3 -1
  16. package/dist/plugins.d.ts +2 -1
  17. package/dist/plugins.js +177 -28
  18. package/dist/printer.js +4 -0
  19. package/dist/review.js +2 -2
  20. package/dist/rules/comments.js +2 -2
  21. package/dist/rules/complexity.js +2 -7
  22. package/dist/rules/nesting.js +3 -13
  23. package/dist/rules/phase0-basic.js +10 -10
  24. package/dist/rules/shared.d.ts +2 -0
  25. package/dist/rules/shared.js +27 -3
  26. package/dist/saas.d.ts +143 -7
  27. package/dist/saas.js +478 -37
  28. package/dist/trust-kpi.d.ts +9 -0
  29. package/dist/trust-kpi.js +445 -0
  30. package/dist/trust.d.ts +65 -0
  31. package/dist/trust.js +571 -0
  32. package/dist/types.d.ts +154 -0
  33. package/docs/PRD.md +187 -109
  34. package/docs/plugin-contract.md +61 -0
  35. package/docs/trust-core-release-checklist.md +55 -0
  36. package/package.json +5 -3
  37. package/src/analyzer.ts +484 -155
  38. package/src/benchmark.ts +244 -0
  39. package/src/cli.ts +562 -79
  40. package/src/diff.ts +75 -10
  41. package/src/git.ts +16 -0
  42. package/src/index.ts +48 -0
  43. package/src/plugins.ts +354 -26
  44. package/src/printer.ts +4 -0
  45. package/src/review.ts +2 -2
  46. package/src/rules/comments.ts +2 -2
  47. package/src/rules/complexity.ts +2 -7
  48. package/src/rules/nesting.ts +3 -13
  49. package/src/rules/phase0-basic.ts +11 -12
  50. package/src/rules/shared.ts +31 -3
  51. package/src/saas.ts +641 -43
  52. package/src/trust-kpi.ts +518 -0
  53. package/src/trust.ts +774 -0
  54. package/src/types.ts +171 -0
  55. package/tests/diff.test.ts +124 -0
  56. package/tests/new-features.test.ts +71 -0
  57. package/tests/plugins.test.ts +219 -0
  58. package/tests/rules.test.ts +23 -1
  59. package/tests/saas-foundation.test.ts +358 -1
  60. package/tests/trust-kpi.test.ts +120 -0
  61. package/tests/trust.test.ts +584 -0
package/src/analyzer.ts CHANGED
@@ -1,7 +1,18 @@
1
1
  // drift-ignore-file
2
2
  import * as path from 'node:path'
3
+ import { readdirSync, statSync } from 'node:fs'
3
4
  import { Project } from 'ts-morph'
4
- import type { DriftIssue, FileReport, DriftConfig, LoadedPlugin, PluginRuleContext } from './types.js'
5
+ import type {
6
+ DriftIssue,
7
+ FileReport,
8
+ DriftConfig,
9
+ DriftAnalysisOptions,
10
+ DriftPerformanceConfig,
11
+ LoadedPlugin,
12
+ PluginRuleContext,
13
+ PluginLoadError,
14
+ PluginLoadWarning,
15
+ } from './types.js'
5
16
 
6
17
  // Rules
7
18
  import { isFileIgnored } from './rules/shared.js'
@@ -97,6 +108,9 @@ export const RULE_WEIGHTS: Record<string, { severity: DriftIssue['severity']; we
97
108
  // Phase 8: semantic duplication
98
109
  'semantic-duplication': { severity: 'warning', weight: 12 },
99
110
  'plugin-error': { severity: 'warning', weight: 4 },
111
+ 'plugin-warning': { severity: 'info', weight: 0 },
112
+ 'analysis-skip-max-files': { severity: 'info', weight: 0 },
113
+ 'analysis-skip-file-size': { severity: 'info', weight: 0 },
100
114
  }
101
115
 
102
116
  const AI_SMELL_SIGNALS = new Set([
@@ -152,12 +166,43 @@ function runPluginRules(
152
166
  for (const loaded of loadedPlugins) {
153
167
  for (const rule of loaded.plugin.rules) {
154
168
  try {
155
- const detected = rule.detect(file, context) ?? []
156
- for (const issue of detected) {
169
+ const detected = rule.detect(file, context)
170
+ if (detected == null) continue
171
+ if (!Array.isArray(detected)) {
172
+ throw new Error(`detect() must return DriftIssue[], got ${typeof detected}`)
173
+ }
174
+
175
+ for (const [issueIndex, issue] of detected.entries()) {
176
+ if (!issue || typeof issue !== 'object') {
177
+ issues.push({
178
+ rule: 'plugin-error',
179
+ severity: 'warning',
180
+ message: `Plugin '${loaded.plugin.name}' rule '${rule.name}' returned a non-object issue at index ${issueIndex}`,
181
+ line: 1,
182
+ column: 1,
183
+ snippet: file.getBaseName(),
184
+ })
185
+ continue
186
+ }
187
+
188
+ const line = typeof issue.line === 'number' ? issue.line : 1
189
+ const column = typeof issue.column === 'number' ? issue.column : 1
190
+ const message = typeof issue.message === 'string'
191
+ ? issue.message
192
+ : `Invalid plugin issue at index ${issueIndex}: missing string 'message'`
193
+ const snippet = typeof issue.snippet === 'string' ? issue.snippet : file.getBaseName()
194
+ const severity = issue.severity === 'error' || issue.severity === 'warning' || issue.severity === 'info'
195
+ ? issue.severity
196
+ : (rule.severity ?? 'warning')
197
+
157
198
  issues.push({
158
199
  ...issue,
159
200
  rule: issue.rule || `${loaded.plugin.name}/${rule.name}`,
160
- severity: issue.severity ?? (rule.severity ?? 'warning'),
201
+ severity,
202
+ line,
203
+ column,
204
+ message,
205
+ snippet,
161
206
  })
162
207
  }
163
208
  } catch (error) {
@@ -175,6 +220,271 @@ function runPluginRules(
175
220
  return issues
176
221
  }
177
222
 
223
+ function normalizeDiagnosticFilePart(value: string): string {
224
+ return value.replace(/[^a-zA-Z0-9._-]+/g, '_')
225
+ }
226
+
227
+ function pluginDiagnosticHint(code: string | undefined): string {
228
+ switch (code) {
229
+ case 'plugin-api-version-implicit':
230
+ return 'Add apiVersion: 1 to make plugin compatibility explicit.'
231
+ case 'plugin-api-version-invalid':
232
+ return 'Use a positive integer apiVersion (for example: 1).'
233
+ case 'plugin-api-version-unsupported':
234
+ return 'Upgrade/downgrade the plugin to the currently supported API version.'
235
+ case 'plugin-rule-id-invalid':
236
+ return 'Rename the rule id to lowercase/kebab-case format.'
237
+ case 'plugin-rule-id-duplicate':
238
+ return 'Ensure each rule id is unique within the plugin.'
239
+ case 'plugin-capabilities-invalid':
240
+ case 'plugin-capabilities-value-invalid':
241
+ return 'Set capabilities as an object map with primitive values only.'
242
+ default:
243
+ return 'Review plugin contract docs and adjust exported metadata and rule shape.'
244
+ }
245
+ }
246
+
247
+ function pluginDiagnosticToIssue(
248
+ targetPath: string,
249
+ diagnostic: PluginLoadError | PluginLoadWarning,
250
+ kind: 'error' | 'warning',
251
+ ): FileReport {
252
+ const prefix = kind === 'error' ? 'Failed to load plugin' : 'Plugin validation warning'
253
+ const ruleLabel = diagnostic.ruleId ? ` rule '${diagnostic.ruleId}'` : ''
254
+ const codeLabel = diagnostic.code ? ` [${diagnostic.code}]` : ''
255
+ const hint = pluginDiagnosticHint(diagnostic.code)
256
+ const pluginLabel = diagnostic.pluginName
257
+ ? `'${diagnostic.pluginId}' (${diagnostic.pluginName})`
258
+ : `'${diagnostic.pluginId}'`
259
+
260
+ const issue: DriftIssue = {
261
+ rule: kind === 'error' ? 'plugin-error' : 'plugin-warning',
262
+ severity: kind === 'error' ? 'warning' : 'info',
263
+ message: `${prefix}${codeLabel} ${pluginLabel}${ruleLabel}: ${diagnostic.message} Next: ${hint}`,
264
+ line: 1,
265
+ column: 1,
266
+ snippet: diagnostic.pluginId,
267
+ }
268
+
269
+ const safePluginId = normalizeDiagnosticFilePart(diagnostic.pluginId)
270
+ const safeRuleId = diagnostic.ruleId ? `.${normalizeDiagnosticFilePart(diagnostic.ruleId)}` : ''
271
+ const kindDir = kind === 'error' ? '.drift-plugin-errors' : '.drift-plugin-warnings'
272
+
273
+ return {
274
+ path: path.join(targetPath, kindDir, `${safePluginId}${safeRuleId}.plugin`),
275
+ issues: [issue],
276
+ score: calculateScore([issue], RULE_WEIGHTS),
277
+ }
278
+ }
279
+
280
+ const ANALYZABLE_EXTENSIONS = new Set(['.ts', '.tsx', '.js', '.jsx'])
281
+ const EXCLUDED_DIR_NAMES = new Set(['node_modules', 'dist', '.next', 'build'])
282
+
283
+ function shouldAnalyzeFile(fileName: string): boolean {
284
+ if (fileName.endsWith('.d.ts')) return false
285
+ if (/\.test\.[^.]+$/.test(fileName)) return false
286
+ if (/\.spec\.[^.]+$/.test(fileName)) return false
287
+ return ANALYZABLE_EXTENSIONS.has(path.extname(fileName))
288
+ }
289
+
290
+ interface AnalyzableSource {
291
+ path: string
292
+ sizeBytes: number
293
+ }
294
+
295
+ interface ResolvedAnalysisOptions {
296
+ lowMemory: boolean
297
+ chunkSize: number
298
+ maxFiles?: number
299
+ maxFileSizeKb?: number
300
+ includeSemanticDuplication: boolean
301
+ }
302
+
303
+ interface SourceSelection {
304
+ selectedPaths: string[]
305
+ skippedReports: FileReport[]
306
+ }
307
+
308
+ function collectAnalyzableSources(targetPath: string): AnalyzableSource[] {
309
+ const sourcePaths: AnalyzableSource[] = []
310
+ const queue: string[] = [targetPath]
311
+
312
+ while (queue.length > 0) {
313
+ const currentDir = queue.pop()
314
+ if (!currentDir) continue
315
+
316
+ let entries: Array<import('node:fs').Dirent<string>>
317
+ try {
318
+ entries = readdirSync(currentDir, { withFileTypes: true })
319
+ } catch {
320
+ continue
321
+ }
322
+
323
+ for (const entry of entries) {
324
+ const entryPath = path.join(currentDir, entry.name)
325
+ if (entry.isDirectory()) {
326
+ if (EXCLUDED_DIR_NAMES.has(entry.name)) continue
327
+ queue.push(entryPath)
328
+ continue
329
+ }
330
+
331
+ if (!entry.isFile()) continue
332
+ if (!shouldAnalyzeFile(entry.name)) continue
333
+
334
+ let sizeBytes = 0
335
+ try {
336
+ sizeBytes = statSync(entryPath).size
337
+ } catch {
338
+ sizeBytes = 0
339
+ }
340
+
341
+ sourcePaths.push({ path: entryPath, sizeBytes })
342
+ }
343
+ }
344
+
345
+ sourcePaths.sort((a, b) => a.path.localeCompare(b.path))
346
+ return sourcePaths
347
+ }
348
+
349
+ function resolveAnalysisOptions(config?: DriftConfig, options?: DriftAnalysisOptions): ResolvedAnalysisOptions {
350
+ const performance: DriftPerformanceConfig | undefined = config?.performance
351
+ const lowMemory = options?.lowMemory ?? performance?.lowMemory ?? false
352
+ const chunkSize = Math.max(1, options?.chunkSize ?? performance?.chunkSize ?? (lowMemory ? 40 : 200))
353
+ const includeSemanticDuplication = options?.includeSemanticDuplication
354
+ ?? performance?.includeSemanticDuplication
355
+ ?? !lowMemory
356
+
357
+ return {
358
+ lowMemory,
359
+ chunkSize,
360
+ maxFiles: options?.maxFiles ?? performance?.maxFiles,
361
+ maxFileSizeKb: options?.maxFileSizeKb ?? performance?.maxFileSizeKb,
362
+ includeSemanticDuplication,
363
+ }
364
+ }
365
+
366
+ function chunkPaths(paths: string[], chunkSize: number): string[][] {
367
+ if (paths.length === 0) return []
368
+ const chunks: string[][] = []
369
+ for (let i = 0; i < paths.length; i += chunkSize) {
370
+ chunks.push(paths.slice(i, i + chunkSize))
371
+ }
372
+ return chunks
373
+ }
374
+
375
+ function toPathKey(filePath: string): string {
376
+ let normalized = path.normalize(filePath)
377
+ if (process.platform === 'win32' && /^\\[A-Za-z]:\\/.test(normalized)) {
378
+ normalized = normalized.slice(1)
379
+ }
380
+ return process.platform === 'win32' ? normalized.toLowerCase() : normalized
381
+ }
382
+
383
+ function createAnalysisSkipReport(filePath: string, rule: 'analysis-skip-max-files' | 'analysis-skip-file-size', message: string): FileReport {
384
+ const issue: DriftIssue = {
385
+ rule,
386
+ severity: RULE_WEIGHTS[rule].severity,
387
+ message,
388
+ line: 1,
389
+ column: 1,
390
+ snippet: path.basename(filePath),
391
+ }
392
+ return {
393
+ path: filePath,
394
+ issues: [issue],
395
+ score: calculateScore([issue], RULE_WEIGHTS),
396
+ }
397
+ }
398
+
399
+ function selectSourcesForAnalysis(sources: AnalyzableSource[], options: ResolvedAnalysisOptions): SourceSelection {
400
+ let selected = sources
401
+ const skippedReports: FileReport[] = []
402
+
403
+ if (typeof options.maxFiles === 'number' && options.maxFiles >= 0 && selected.length > options.maxFiles) {
404
+ const allowed = selected.slice(0, options.maxFiles)
405
+ const skipped = selected.slice(options.maxFiles)
406
+ selected = allowed
407
+
408
+ for (const source of skipped) {
409
+ skippedReports.push(createAnalysisSkipReport(
410
+ source.path,
411
+ 'analysis-skip-max-files',
412
+ `Skipped by maxFiles guardrail (${options.maxFiles})`,
413
+ ))
414
+ }
415
+ }
416
+
417
+ if (typeof options.maxFileSizeKb === 'number' && options.maxFileSizeKb > 0) {
418
+ const maxBytes = options.maxFileSizeKb * 1024
419
+ const keep: AnalyzableSource[] = []
420
+ for (const source of selected) {
421
+ if (source.sizeBytes > maxBytes) {
422
+ const fileSizeKb = Math.ceil(source.sizeBytes / 1024)
423
+ skippedReports.push(createAnalysisSkipReport(
424
+ source.path,
425
+ 'analysis-skip-file-size',
426
+ `Skipped by maxFileSizeKb guardrail (${fileSizeKb}KB > ${options.maxFileSizeKb}KB)`,
427
+ ))
428
+ } else {
429
+ keep.push(source)
430
+ }
431
+ }
432
+ selected = keep
433
+ }
434
+
435
+ return {
436
+ selectedPaths: selected.map((source) => source.path),
437
+ skippedReports,
438
+ }
439
+ }
440
+
441
+ function resolveImportTargetPath(
442
+ importerPath: string,
443
+ moduleSpecifier: string,
444
+ sourcePathMap: Map<string, string>,
445
+ ): string | undefined {
446
+ if (!moduleSpecifier.startsWith('.') && !path.isAbsolute(moduleSpecifier)) {
447
+ return undefined
448
+ }
449
+
450
+ const normalizedSpecifier = moduleSpecifier.replace(/\\/g, '/')
451
+ const basePath = path.resolve(path.dirname(importerPath), normalizedSpecifier)
452
+ const ext = path.extname(basePath)
453
+ const candidates = new Set<string>()
454
+
455
+ const addCandidate = (candidate: string) => {
456
+ candidates.add(path.normalize(candidate))
457
+ }
458
+
459
+ if (ext.length > 0) {
460
+ addCandidate(basePath)
461
+ if (ext === '.js' || ext === '.jsx' || ext === '.ts' || ext === '.tsx') {
462
+ const withoutExt = basePath.slice(0, -ext.length)
463
+ addCandidate(`${withoutExt}.ts`)
464
+ addCandidate(`${withoutExt}.tsx`)
465
+ addCandidate(`${withoutExt}.js`)
466
+ addCandidate(`${withoutExt}.jsx`)
467
+ }
468
+ } else {
469
+ addCandidate(basePath)
470
+ addCandidate(`${basePath}.ts`)
471
+ addCandidate(`${basePath}.tsx`)
472
+ addCandidate(`${basePath}.js`)
473
+ addCandidate(`${basePath}.jsx`)
474
+ addCandidate(path.join(basePath, 'index.ts'))
475
+ addCandidate(path.join(basePath, 'index.tsx'))
476
+ addCandidate(path.join(basePath, 'index.js'))
477
+ addCandidate(path.join(basePath, 'index.jsx'))
478
+ }
479
+
480
+ for (const candidate of candidates) {
481
+ const resolved = sourcePathMap.get(toPathKey(candidate))
482
+ if (resolved) return resolved
483
+ }
484
+
485
+ return undefined
486
+ }
487
+
178
488
  // ---------------------------------------------------------------------------
179
489
  // Per-file analysis
180
490
  // ---------------------------------------------------------------------------
@@ -248,137 +558,172 @@ export function analyzeFile(
248
558
  // Project-level analysis (phases 2, 3, 8 require the full file set)
249
559
  // ---------------------------------------------------------------------------
250
560
 
251
- export function analyzeProject(targetPath: string, config?: DriftConfig): FileReport[] {
252
- const project = new Project({
253
- skipAddingFilesFromTsConfig: true,
254
- compilerOptions: { allowJs: true, jsx: 1 }, // 1 = JsxEmit.Preserve
255
- })
256
-
257
- project.addSourceFilesAtPaths([
258
- `${targetPath}/**/*.ts`,
259
- `${targetPath}/**/*.tsx`,
260
- `${targetPath}/**/*.js`,
261
- `${targetPath}/**/*.jsx`,
262
- `!${targetPath}/**/node_modules/**`,
263
- `!${targetPath}/**/dist/**`,
264
- `!${targetPath}/**/.next/**`,
265
- `!${targetPath}/**/build/**`,
266
- `!${targetPath}/**/*.d.ts`,
267
- `!${targetPath}/**/*.test.*`,
268
- `!${targetPath}/**/*.spec.*`,
269
- ])
270
-
271
- const sourceFiles = project.getSourceFiles()
561
+ export function analyzeProject(targetPath: string, config?: DriftConfig, options?: DriftAnalysisOptions): FileReport[] {
562
+ const analysisOptions = resolveAnalysisOptions(config, options)
563
+ const discoveredSources = collectAnalyzableSources(targetPath)
564
+ const { selectedPaths: sourcePaths, skippedReports } = selectSourcesForAnalysis(discoveredSources, analysisOptions)
565
+ const sourcePathMap = new Map<string, string>(sourcePaths.map((filePath) => [toPathKey(filePath), filePath]))
272
566
  const pluginRuntime = loadPlugins(targetPath, config?.plugins)
273
567
 
274
- // Phase 1: per-file analysis
275
- const reports: FileReport[] = sourceFiles.map((file) => analyzeFile(file, {
276
- config,
277
- loadedPlugins: pluginRuntime.plugins,
278
- projectRoot: targetPath,
279
- }))
568
+ const reports: FileReport[] = [...skippedReports]
280
569
  const reportByPath = new Map<string, FileReport>()
281
- for (const r of reports) reportByPath.set(r.path, r)
282
-
283
- // Build set of ignored paths so cross-file phases don't re-add issues
284
- const ignoredPaths = new Set<string>(
285
- sourceFiles.filter(sf => isFileIgnored(sf)).map(sf => sf.getFilePath())
286
- )
287
-
288
- // ── Phase 2 setup: build import graph ──────────────────────────────────────
289
- const allImportedPaths = new Set<string>()
290
- const allImportedNames = new Map<string, Set<string>>()
570
+ const ignoredPaths = new Set<string>()
571
+ const allImportedPathKeys = new Set<string>()
572
+ const allImportedNamesByKey = new Map<string, Set<string>>()
291
573
  const allLiteralImports = new Set<string>()
292
574
  const importGraph = new Map<string, Set<string>>()
575
+ const fingerprintMap = new Map<string, Array<{ filePath: string; name: string; line: number; col: number }>>()
293
576
 
294
- for (const sf of sourceFiles) {
295
- const sfPath = sf.getFilePath()
296
- for (const decl of sf.getImportDeclarations()) {
577
+ const getReport = (filePath: string): FileReport | undefined => {
578
+ const fileKey = toPathKey(filePath)
579
+ if (ignoredPaths.has(fileKey)) return undefined
580
+ return reportByPath.get(fileKey)
581
+ }
582
+
583
+ const addImportedName = (resolvedPath: string, name: string) => {
584
+ const resolvedKey = toPathKey(resolvedPath)
585
+ if (!allImportedNamesByKey.has(resolvedKey)) {
586
+ allImportedNamesByKey.set(resolvedKey, new Set())
587
+ }
588
+ allImportedNamesByKey.get(resolvedKey)!.add(name)
589
+ }
590
+
591
+ const collectCrossFileMetadata = (sourceFile: import('ts-morph').SourceFile) => {
592
+ const sourceFilePath = sourceFile.getFilePath()
593
+ const sourceFileKey = toPathKey(sourceFilePath)
594
+ const sourceFilePathCanonical = sourcePathMap.get(sourceFileKey) ?? sourceFilePath
595
+
596
+ for (const decl of sourceFile.getImportDeclarations()) {
297
597
  const moduleSpecifier = decl.getModuleSpecifierValue()
298
598
  allLiteralImports.add(moduleSpecifier)
299
599
 
300
- const resolved = decl.getModuleSpecifierSourceFile()
301
- if (resolved) {
302
- const resolvedPath = resolved.getFilePath()
303
- allImportedPaths.add(resolvedPath)
600
+ const resolvedPath = analysisOptions.lowMemory
601
+ ? resolveImportTargetPath(sourceFilePath, moduleSpecifier, sourcePathMap)
602
+ : decl.getModuleSpecifierSourceFile()?.getFilePath()
304
603
 
305
- if (!importGraph.has(sfPath)) importGraph.set(sfPath, new Set())
306
- importGraph.get(sfPath)!.add(resolvedPath)
604
+ if (!resolvedPath) continue
605
+ const resolvedPathKey = toPathKey(resolvedPath)
606
+ const resolvedPathCanonical = sourcePathMap.get(resolvedPathKey) ?? resolvedPath
607
+ allImportedPathKeys.add(resolvedPathKey)
307
608
 
308
- const named = decl.getNamedImports().map(n => n.getName())
309
- const def = decl.getDefaultImport()?.getText()
310
- const ns = decl.getNamespaceImport()?.getText()
609
+ if (!importGraph.has(sourceFilePathCanonical)) importGraph.set(sourceFilePathCanonical, new Set())
610
+ importGraph.get(sourceFilePathCanonical)!.add(resolvedPathCanonical)
311
611
 
312
- if (!allImportedNames.has(resolvedPath)) {
313
- allImportedNames.set(resolvedPath, new Set())
314
- }
315
- const nameSet = allImportedNames.get(resolvedPath)!
316
- for (const n of named) nameSet.add(n)
317
- if (def) nameSet.add('default')
318
- if (ns) nameSet.add('*')
612
+ for (const named of decl.getNamedImports().map((namedImport) => namedImport.getName())) {
613
+ addImportedName(resolvedPathCanonical, named)
319
614
  }
615
+ if (decl.getDefaultImport()) addImportedName(resolvedPathCanonical, 'default')
616
+ if (decl.getNamespaceImport()) addImportedName(resolvedPathCanonical, '*')
320
617
  }
321
618
 
322
- for (const exportDecl of sf.getExportDeclarations()) {
323
- const reExportedModule = exportDecl.getModuleSpecifierSourceFile()
324
- if (!reExportedModule) continue
619
+ for (const exportDecl of sourceFile.getExportDeclarations()) {
620
+ const moduleSpecifier = exportDecl.getModuleSpecifierValue()
621
+ if (!moduleSpecifier) continue
325
622
 
326
- const reExportedPath = reExportedModule.getFilePath()
327
- allImportedPaths.add(reExportedPath)
623
+ const reExportedPath = analysisOptions.lowMemory
624
+ ? resolveImportTargetPath(sourceFilePath, moduleSpecifier, sourcePathMap)
625
+ : exportDecl.getModuleSpecifierSourceFile()?.getFilePath()
328
626
 
329
- if (!allImportedNames.has(reExportedPath)) {
330
- allImportedNames.set(reExportedPath, new Set())
331
- }
332
- const nameSet = allImportedNames.get(reExportedPath)!
627
+ if (!reExportedPath) continue
628
+ const reExportedPathKey = toPathKey(reExportedPath)
629
+ const reExportedPathCanonical = sourcePathMap.get(reExportedPathKey) ?? reExportedPath
630
+ allImportedPathKeys.add(reExportedPathKey)
333
631
 
334
632
  const namedExports = exportDecl.getNamedExports()
335
633
  if (namedExports.length === 0) {
336
- nameSet.add('*')
634
+ addImportedName(reExportedPathCanonical, '*')
337
635
  } else {
338
- for (const ne of namedExports) nameSet.add(ne.getName())
636
+ for (const namedExport of namedExports) {
637
+ addImportedName(reExportedPathCanonical, namedExport.getName())
638
+ }
339
639
  }
340
640
  }
641
+
642
+ if (!analysisOptions.includeSemanticDuplication || ignoredPaths.has(sourceFileKey)) {
643
+ return
644
+ }
645
+
646
+ for (const { fn, name, line, col } of collectFunctions(sourceFile)) {
647
+ const fp = fingerprintFunction(fn)
648
+ if (!fingerprintMap.has(fp)) fingerprintMap.set(fp, [])
649
+ fingerprintMap.get(fp)!.push({ filePath: sourceFilePathCanonical, name, line, col })
650
+ }
341
651
  }
342
652
 
343
- // Plugin load failures are surfaced as synthetic report entries.
653
+ const analyzeChunk = (chunk: string[]) => {
654
+ const project = new Project({
655
+ skipAddingFilesFromTsConfig: true,
656
+ compilerOptions: { allowJs: true, jsx: 1 },
657
+ })
658
+ project.addSourceFilesAtPaths(chunk)
659
+
660
+ for (const sourceFile of project.getSourceFiles()) {
661
+ const sourceFilePath = sourceFile.getFilePath()
662
+ const sourceFileKey = toPathKey(sourceFilePath)
663
+ const sourceFilePathCanonical = sourcePathMap.get(sourceFileKey) ?? sourceFilePath
664
+ const report = analyzeFile(sourceFile, {
665
+ config,
666
+ loadedPlugins: pluginRuntime.plugins,
667
+ projectRoot: targetPath,
668
+ })
669
+ report.path = sourceFilePathCanonical
670
+
671
+ reports.push(report)
672
+ reportByPath.set(sourceFileKey, report)
673
+ if (isFileIgnored(sourceFile)) ignoredPaths.add(sourceFileKey)
674
+ collectCrossFileMetadata(sourceFile)
675
+ }
676
+ }
677
+
678
+ const chunks = chunkPaths(sourcePaths, analysisOptions.lowMemory ? analysisOptions.chunkSize : sourcePaths.length || 1)
679
+ for (const chunk of chunks) {
680
+ analyzeChunk(chunk)
681
+ }
682
+
683
+ // Plugin diagnostics are surfaced as synthetic report entries.
344
684
  if (pluginRuntime.errors.length > 0) {
345
685
  for (const err of pluginRuntime.errors) {
346
- const pluginIssue: DriftIssue = {
347
- rule: 'plugin-error',
348
- severity: 'warning',
349
- message: `Failed to load plugin '${err.pluginId}': ${err.message}`,
350
- line: 1,
351
- column: 1,
352
- snippet: err.pluginId,
353
- }
354
- reports.push({
355
- path: path.join(targetPath, '.drift-plugin-errors', `${err.pluginId}.plugin`),
356
- issues: [pluginIssue],
357
- score: calculateScore([pluginIssue], RULE_WEIGHTS),
358
- })
686
+ reports.push(pluginDiagnosticToIssue(targetPath, err, 'error'))
359
687
  }
360
688
  }
361
689
 
362
- // ── Phase 2: dead-file + unused-export + unused-dependency ─────────────────
363
- const deadFiles = detectDeadFiles(sourceFiles, allImportedPaths, RULE_WEIGHTS)
364
- for (const [sfPath, issue] of deadFiles) {
365
- if (ignoredPaths.has(sfPath)) continue
366
- const report = reportByPath.get(sfPath)
367
- if (report) {
368
- report.issues.push(issue)
369
- report.score = calculateScore(report.issues, RULE_WEIGHTS)
690
+ if (pluginRuntime.warnings.length > 0) {
691
+ for (const warning of pluginRuntime.warnings) {
692
+ reports.push(pluginDiagnosticToIssue(targetPath, warning, 'warning'))
370
693
  }
371
694
  }
372
695
 
373
- const unusedExports = detectUnusedExports(sourceFiles, allImportedNames, RULE_WEIGHTS)
374
- for (const [sfPath, issues] of unusedExports) {
375
- if (ignoredPaths.has(sfPath)) continue
376
- const report = reportByPath.get(sfPath)
377
- if (report) {
696
+ for (const chunk of chunks) {
697
+ const project = new Project({
698
+ skipAddingFilesFromTsConfig: true,
699
+ compilerOptions: { allowJs: true, jsx: 1 },
700
+ })
701
+ project.addSourceFilesAtPaths(chunk)
702
+ const sourceFiles = project.getSourceFiles()
703
+
704
+ const importedPathsForChunk = new Set<string>()
705
+ const importedNamesForChunk = new Map<string, Set<string>>()
706
+ for (const sourceFile of sourceFiles) {
707
+ const sfPath = sourceFile.getFilePath()
708
+ const sfKey = toPathKey(sfPath)
709
+ if (allImportedPathKeys.has(sfKey)) importedPathsForChunk.add(sfPath)
710
+ const importedNames = allImportedNamesByKey.get(sfKey)
711
+ if (importedNames) importedNamesForChunk.set(sfPath, new Set(importedNames))
712
+ }
713
+
714
+ const deadFiles = detectDeadFiles(sourceFiles, importedPathsForChunk, RULE_WEIGHTS)
715
+ for (const [sfPath, issue] of deadFiles) {
716
+ const report = getReport(sfPath)
717
+ if (report) report.issues.push(issue)
718
+ }
719
+
720
+ const unusedExports = detectUnusedExports(sourceFiles, importedNamesForChunk, RULE_WEIGHTS)
721
+ for (const [sfPath, issues] of unusedExports) {
722
+ const report = getReport(sfPath)
723
+ if (!report) continue
378
724
  for (const issue of issues) {
379
725
  report.issues.push(issue)
380
726
  }
381
- report.score = calculateScore(report.issues, RULE_WEIGHTS)
382
727
  }
383
728
  }
384
729
 
@@ -392,87 +737,71 @@ export function analyzeProject(targetPath: string, config?: DriftConfig): FileRe
392
737
  })
393
738
  }
394
739
 
395
- // ── Phase 3: circular-dependency ────────────────────────────────────────────
396
740
  const circularIssues = detectCircularDependencies(importGraph, RULE_WEIGHTS)
397
741
  for (const [filePath, issue] of circularIssues) {
398
- if (ignoredPaths.has(filePath)) continue
399
- const report = reportByPath.get(filePath)
400
- if (report) {
401
- report.issues.push(issue)
402
- report.score = calculateScore(report.issues, RULE_WEIGHTS)
403
- }
742
+ const report = getReport(filePath)
743
+ if (report) report.issues.push(issue)
404
744
  }
405
745
 
406
- // ── Phase 3b: layer-violation ────────────────────────────────────────────────
407
746
  if (config?.layers && config.layers.length > 0) {
408
747
  const layerIssues = detectLayerViolations(importGraph, config.layers, targetPath, RULE_WEIGHTS)
409
748
  for (const [filePath, issues] of layerIssues) {
410
- if (ignoredPaths.has(filePath)) continue
411
- const report = reportByPath.get(filePath)
412
- if (report) {
413
- for (const issue of issues) {
414
- report.issues.push(issue)
415
- report.score = Math.min(100, report.score + (RULE_WEIGHTS['layer-violation']?.weight ?? 5))
416
- }
749
+ const report = getReport(filePath)
750
+ if (!report) continue
751
+ for (const issue of issues) {
752
+ report.issues.push(issue)
417
753
  }
418
754
  }
419
755
  }
420
756
 
421
- // ── Phase 3c: cross-boundary-import ─────────────────────────────────────────
422
757
  if (config?.modules && config.modules.length > 0) {
423
758
  const boundaryIssues = detectCrossBoundaryImports(importGraph, config.modules, targetPath, RULE_WEIGHTS)
424
759
  for (const [filePath, issues] of boundaryIssues) {
425
- if (ignoredPaths.has(filePath)) continue
426
- const report = reportByPath.get(filePath)
427
- if (report) {
428
- for (const issue of issues) {
429
- report.issues.push(issue)
430
- report.score = Math.min(100, report.score + (RULE_WEIGHTS['cross-boundary-import']?.weight ?? 5))
431
- }
760
+ const report = getReport(filePath)
761
+ if (!report) continue
762
+ for (const issue of issues) {
763
+ report.issues.push(issue)
432
764
  }
433
765
  }
434
766
  }
435
767
 
436
- // ── Phase 8: semantic-duplication ───────────────────────────────────────────
437
- const fingerprintMap = new Map<string, Array<{ filePath: string; name: string; line: number; col: number }>>()
438
-
439
- for (const sf of sourceFiles) {
440
- if (isFileIgnored(sf)) continue
441
- const sfPath = sf.getFilePath()
442
- for (const { fn, name, line, col } of collectFunctions(sf)) {
443
- const fp = fingerprintFunction(fn)
444
- if (!fingerprintMap.has(fp)) fingerprintMap.set(fp, [])
445
- fingerprintMap.get(fp)!.push({ filePath: sfPath, name, line, col })
768
+ if (analysisOptions.includeSemanticDuplication) {
769
+ const relativePathCache = new Map<string, string>()
770
+ const toRelativePath = (filePath: string): string => {
771
+ const cached = relativePathCache.get(filePath)
772
+ if (cached) return cached
773
+ const value = path.relative(targetPath, filePath).replace(/\\/g, '/')
774
+ relativePathCache.set(filePath, value)
775
+ return value
446
776
  }
447
- }
448
777
 
449
- for (const [, entries] of fingerprintMap) {
450
- if (entries.length < 2) continue
778
+ for (const [, entries] of fingerprintMap) {
779
+ if (entries.length < 2) continue
451
780
 
452
- for (const entry of entries) {
453
- const report = reportByPath.get(entry.filePath)
454
- if (!report) continue
781
+ for (const entry of entries) {
782
+ const report = getReport(entry.filePath)
783
+ if (!report) continue
455
784
 
456
- const others = entries
457
- .filter(e => e !== entry)
458
- .map(e => {
459
- const rel = path.relative(targetPath, e.filePath).replace(/\\/g, '/')
460
- return `${rel}:${e.line} (${e.name})`
785
+ const others = entries
786
+ .filter((other) => other !== entry)
787
+ .map((other) => `${toRelativePath(other.filePath)}:${other.line} (${other.name})`)
788
+ .join(', ')
789
+
790
+ report.issues.push({
791
+ rule: 'semantic-duplication',
792
+ severity: 'warning',
793
+ message: `Function '${entry.name}' is semantically identical to: ${others}`,
794
+ line: entry.line,
795
+ column: entry.col,
796
+ snippet: `function ${entry.name} - duplicated in ${entries.length - 1} other location${entries.length > 2 ? 's' : ''}`,
461
797
  })
462
- .join(', ')
463
-
464
- const weight = RULE_WEIGHTS['semantic-duplication']?.weight ?? 12
465
- report.issues.push({
466
- rule: 'semantic-duplication',
467
- severity: 'warning',
468
- message: `Function '${entry.name}' is semantically identical to: ${others}`,
469
- line: entry.line,
470
- column: entry.col,
471
- snippet: `function ${entry.name} — duplicated in ${entries.length - 1} other location${entries.length > 2 ? 's' : ''}`,
472
- })
473
- report.score = Math.min(100, report.score + weight)
798
+ }
474
799
  }
475
800
  }
476
801
 
802
+ for (const report of reportByPath.values()) {
803
+ report.score = calculateScore(report.issues, RULE_WEIGHTS)
804
+ }
805
+
477
806
  return reports
478
807
  }