@eduardbar/drift 1.2.0 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. package/.github/workflows/publish-vscode.yml +3 -3
  2. package/.github/workflows/publish.yml +3 -3
  3. package/.github/workflows/review-pr.yml +98 -6
  4. package/AGENTS.md +6 -0
  5. package/README.md +160 -10
  6. package/ROADMAP.md +6 -5
  7. package/dist/analyzer.d.ts +2 -2
  8. package/dist/analyzer.js +420 -159
  9. package/dist/benchmark.d.ts +2 -0
  10. package/dist/benchmark.js +185 -0
  11. package/dist/cli.js +453 -62
  12. package/dist/diff.js +74 -10
  13. package/dist/git.js +12 -0
  14. package/dist/index.d.ts +5 -3
  15. package/dist/index.js +3 -1
  16. package/dist/plugins.d.ts +2 -1
  17. package/dist/plugins.js +177 -28
  18. package/dist/printer.js +4 -0
  19. package/dist/review.js +2 -2
  20. package/dist/rules/comments.js +2 -2
  21. package/dist/rules/complexity.js +2 -7
  22. package/dist/rules/nesting.js +3 -13
  23. package/dist/rules/phase0-basic.js +10 -10
  24. package/dist/rules/shared.d.ts +2 -0
  25. package/dist/rules/shared.js +27 -3
  26. package/dist/saas.d.ts +143 -7
  27. package/dist/saas.js +478 -37
  28. package/dist/trust-kpi.d.ts +9 -0
  29. package/dist/trust-kpi.js +445 -0
  30. package/dist/trust.d.ts +65 -0
  31. package/dist/trust.js +571 -0
  32. package/dist/types.d.ts +154 -0
  33. package/docs/PRD.md +187 -109
  34. package/docs/plugin-contract.md +61 -0
  35. package/docs/trust-core-release-checklist.md +55 -0
  36. package/package.json +5 -3
  37. package/src/analyzer.ts +484 -155
  38. package/src/benchmark.ts +244 -0
  39. package/src/cli.ts +562 -79
  40. package/src/diff.ts +75 -10
  41. package/src/git.ts +16 -0
  42. package/src/index.ts +48 -0
  43. package/src/plugins.ts +354 -26
  44. package/src/printer.ts +4 -0
  45. package/src/review.ts +2 -2
  46. package/src/rules/comments.ts +2 -2
  47. package/src/rules/complexity.ts +2 -7
  48. package/src/rules/nesting.ts +3 -13
  49. package/src/rules/phase0-basic.ts +11 -12
  50. package/src/rules/shared.ts +31 -3
  51. package/src/saas.ts +641 -43
  52. package/src/trust-kpi.ts +518 -0
  53. package/src/trust.ts +774 -0
  54. package/src/types.ts +171 -0
  55. package/tests/diff.test.ts +124 -0
  56. package/tests/new-features.test.ts +71 -0
  57. package/tests/plugins.test.ts +219 -0
  58. package/tests/rules.test.ts +23 -1
  59. package/tests/saas-foundation.test.ts +358 -1
  60. package/tests/trust-kpi.test.ts +120 -0
  61. package/tests/trust.test.ts +584 -0
package/dist/analyzer.js CHANGED
@@ -1,5 +1,6 @@
1
1
  // drift-ignore-file
2
2
  import * as path from 'node:path';
3
+ import { readdirSync, statSync } from 'node:fs';
3
4
  import { Project } from 'ts-morph';
4
5
  // Rules
5
6
  import { isFileIgnored } from './rules/shared.js';
@@ -61,6 +62,9 @@ export const RULE_WEIGHTS = {
61
62
  // Phase 8: semantic duplication
62
63
  'semantic-duplication': { severity: 'warning', weight: 12 },
63
64
  'plugin-error': { severity: 'warning', weight: 4 },
65
+ 'plugin-warning': { severity: 'info', weight: 0 },
66
+ 'analysis-skip-max-files': { severity: 'info', weight: 0 },
67
+ 'analysis-skip-file-size': { severity: 'info', weight: 0 },
64
68
  };
65
69
  const AI_SMELL_SIGNALS = new Set([
66
70
  'over-commented',
@@ -107,12 +111,41 @@ function runPluginRules(file, loadedPlugins, config, projectRoot) {
107
111
  for (const loaded of loadedPlugins) {
108
112
  for (const rule of loaded.plugin.rules) {
109
113
  try {
110
- const detected = rule.detect(file, context) ?? [];
111
- for (const issue of detected) {
114
+ const detected = rule.detect(file, context);
115
+ if (detected == null)
116
+ continue;
117
+ if (!Array.isArray(detected)) {
118
+ throw new Error(`detect() must return DriftIssue[], got ${typeof detected}`);
119
+ }
120
+ for (const [issueIndex, issue] of detected.entries()) {
121
+ if (!issue || typeof issue !== 'object') {
122
+ issues.push({
123
+ rule: 'plugin-error',
124
+ severity: 'warning',
125
+ message: `Plugin '${loaded.plugin.name}' rule '${rule.name}' returned a non-object issue at index ${issueIndex}`,
126
+ line: 1,
127
+ column: 1,
128
+ snippet: file.getBaseName(),
129
+ });
130
+ continue;
131
+ }
132
+ const line = typeof issue.line === 'number' ? issue.line : 1;
133
+ const column = typeof issue.column === 'number' ? issue.column : 1;
134
+ const message = typeof issue.message === 'string'
135
+ ? issue.message
136
+ : `Invalid plugin issue at index ${issueIndex}: missing string 'message'`;
137
+ const snippet = typeof issue.snippet === 'string' ? issue.snippet : file.getBaseName();
138
+ const severity = issue.severity === 'error' || issue.severity === 'warning' || issue.severity === 'info'
139
+ ? issue.severity
140
+ : (rule.severity ?? 'warning');
112
141
  issues.push({
113
142
  ...issue,
114
143
  rule: issue.rule || `${loaded.plugin.name}/${rule.name}`,
115
- severity: issue.severity ?? (rule.severity ?? 'warning'),
144
+ severity,
145
+ line,
146
+ column,
147
+ message,
148
+ snippet,
116
149
  });
117
150
  }
118
151
  }
@@ -130,6 +163,218 @@ function runPluginRules(file, loadedPlugins, config, projectRoot) {
130
163
  }
131
164
  return issues;
132
165
  }
166
+ function normalizeDiagnosticFilePart(value) {
167
+ return value.replace(/[^a-zA-Z0-9._-]+/g, '_');
168
+ }
169
+ function pluginDiagnosticHint(code) {
170
+ switch (code) {
171
+ case 'plugin-api-version-implicit':
172
+ return 'Add apiVersion: 1 to make plugin compatibility explicit.';
173
+ case 'plugin-api-version-invalid':
174
+ return 'Use a positive integer apiVersion (for example: 1).';
175
+ case 'plugin-api-version-unsupported':
176
+ return 'Upgrade/downgrade the plugin to the currently supported API version.';
177
+ case 'plugin-rule-id-invalid':
178
+ return 'Rename the rule id to lowercase/kebab-case format.';
179
+ case 'plugin-rule-id-duplicate':
180
+ return 'Ensure each rule id is unique within the plugin.';
181
+ case 'plugin-capabilities-invalid':
182
+ case 'plugin-capabilities-value-invalid':
183
+ return 'Set capabilities as an object map with primitive values only.';
184
+ default:
185
+ return 'Review plugin contract docs and adjust exported metadata and rule shape.';
186
+ }
187
+ }
188
+ function pluginDiagnosticToIssue(targetPath, diagnostic, kind) {
189
+ const prefix = kind === 'error' ? 'Failed to load plugin' : 'Plugin validation warning';
190
+ const ruleLabel = diagnostic.ruleId ? ` rule '${diagnostic.ruleId}'` : '';
191
+ const codeLabel = diagnostic.code ? ` [${diagnostic.code}]` : '';
192
+ const hint = pluginDiagnosticHint(diagnostic.code);
193
+ const pluginLabel = diagnostic.pluginName
194
+ ? `'${diagnostic.pluginId}' (${diagnostic.pluginName})`
195
+ : `'${diagnostic.pluginId}'`;
196
+ const issue = {
197
+ rule: kind === 'error' ? 'plugin-error' : 'plugin-warning',
198
+ severity: kind === 'error' ? 'warning' : 'info',
199
+ message: `${prefix}${codeLabel} ${pluginLabel}${ruleLabel}: ${diagnostic.message} Next: ${hint}`,
200
+ line: 1,
201
+ column: 1,
202
+ snippet: diagnostic.pluginId,
203
+ };
204
+ const safePluginId = normalizeDiagnosticFilePart(diagnostic.pluginId);
205
+ const safeRuleId = diagnostic.ruleId ? `.${normalizeDiagnosticFilePart(diagnostic.ruleId)}` : '';
206
+ const kindDir = kind === 'error' ? '.drift-plugin-errors' : '.drift-plugin-warnings';
207
+ return {
208
+ path: path.join(targetPath, kindDir, `${safePluginId}${safeRuleId}.plugin`),
209
+ issues: [issue],
210
+ score: calculateScore([issue], RULE_WEIGHTS),
211
+ };
212
+ }
213
+ const ANALYZABLE_EXTENSIONS = new Set(['.ts', '.tsx', '.js', '.jsx']);
214
+ const EXCLUDED_DIR_NAMES = new Set(['node_modules', 'dist', '.next', 'build']);
215
+ function shouldAnalyzeFile(fileName) {
216
+ if (fileName.endsWith('.d.ts'))
217
+ return false;
218
+ if (/\.test\.[^.]+$/.test(fileName))
219
+ return false;
220
+ if (/\.spec\.[^.]+$/.test(fileName))
221
+ return false;
222
+ return ANALYZABLE_EXTENSIONS.has(path.extname(fileName));
223
+ }
224
+ function collectAnalyzableSources(targetPath) {
225
+ const sourcePaths = [];
226
+ const queue = [targetPath];
227
+ while (queue.length > 0) {
228
+ const currentDir = queue.pop();
229
+ if (!currentDir)
230
+ continue;
231
+ let entries;
232
+ try {
233
+ entries = readdirSync(currentDir, { withFileTypes: true });
234
+ }
235
+ catch {
236
+ continue;
237
+ }
238
+ for (const entry of entries) {
239
+ const entryPath = path.join(currentDir, entry.name);
240
+ if (entry.isDirectory()) {
241
+ if (EXCLUDED_DIR_NAMES.has(entry.name))
242
+ continue;
243
+ queue.push(entryPath);
244
+ continue;
245
+ }
246
+ if (!entry.isFile())
247
+ continue;
248
+ if (!shouldAnalyzeFile(entry.name))
249
+ continue;
250
+ let sizeBytes = 0;
251
+ try {
252
+ sizeBytes = statSync(entryPath).size;
253
+ }
254
+ catch {
255
+ sizeBytes = 0;
256
+ }
257
+ sourcePaths.push({ path: entryPath, sizeBytes });
258
+ }
259
+ }
260
+ sourcePaths.sort((a, b) => a.path.localeCompare(b.path));
261
+ return sourcePaths;
262
+ }
263
+ function resolveAnalysisOptions(config, options) {
264
+ const performance = config?.performance;
265
+ const lowMemory = options?.lowMemory ?? performance?.lowMemory ?? false;
266
+ const chunkSize = Math.max(1, options?.chunkSize ?? performance?.chunkSize ?? (lowMemory ? 40 : 200));
267
+ const includeSemanticDuplication = options?.includeSemanticDuplication
268
+ ?? performance?.includeSemanticDuplication
269
+ ?? !lowMemory;
270
+ return {
271
+ lowMemory,
272
+ chunkSize,
273
+ maxFiles: options?.maxFiles ?? performance?.maxFiles,
274
+ maxFileSizeKb: options?.maxFileSizeKb ?? performance?.maxFileSizeKb,
275
+ includeSemanticDuplication,
276
+ };
277
+ }
278
+ function chunkPaths(paths, chunkSize) {
279
+ if (paths.length === 0)
280
+ return [];
281
+ const chunks = [];
282
+ for (let i = 0; i < paths.length; i += chunkSize) {
283
+ chunks.push(paths.slice(i, i + chunkSize));
284
+ }
285
+ return chunks;
286
+ }
287
+ function toPathKey(filePath) {
288
+ let normalized = path.normalize(filePath);
289
+ if (process.platform === 'win32' && /^\\[A-Za-z]:\\/.test(normalized)) {
290
+ normalized = normalized.slice(1);
291
+ }
292
+ return process.platform === 'win32' ? normalized.toLowerCase() : normalized;
293
+ }
294
+ function createAnalysisSkipReport(filePath, rule, message) {
295
+ const issue = {
296
+ rule,
297
+ severity: RULE_WEIGHTS[rule].severity,
298
+ message,
299
+ line: 1,
300
+ column: 1,
301
+ snippet: path.basename(filePath),
302
+ };
303
+ return {
304
+ path: filePath,
305
+ issues: [issue],
306
+ score: calculateScore([issue], RULE_WEIGHTS),
307
+ };
308
+ }
309
+ function selectSourcesForAnalysis(sources, options) {
310
+ let selected = sources;
311
+ const skippedReports = [];
312
+ if (typeof options.maxFiles === 'number' && options.maxFiles >= 0 && selected.length > options.maxFiles) {
313
+ const allowed = selected.slice(0, options.maxFiles);
314
+ const skipped = selected.slice(options.maxFiles);
315
+ selected = allowed;
316
+ for (const source of skipped) {
317
+ skippedReports.push(createAnalysisSkipReport(source.path, 'analysis-skip-max-files', `Skipped by maxFiles guardrail (${options.maxFiles})`));
318
+ }
319
+ }
320
+ if (typeof options.maxFileSizeKb === 'number' && options.maxFileSizeKb > 0) {
321
+ const maxBytes = options.maxFileSizeKb * 1024;
322
+ const keep = [];
323
+ for (const source of selected) {
324
+ if (source.sizeBytes > maxBytes) {
325
+ const fileSizeKb = Math.ceil(source.sizeBytes / 1024);
326
+ skippedReports.push(createAnalysisSkipReport(source.path, 'analysis-skip-file-size', `Skipped by maxFileSizeKb guardrail (${fileSizeKb}KB > ${options.maxFileSizeKb}KB)`));
327
+ }
328
+ else {
329
+ keep.push(source);
330
+ }
331
+ }
332
+ selected = keep;
333
+ }
334
+ return {
335
+ selectedPaths: selected.map((source) => source.path),
336
+ skippedReports,
337
+ };
338
+ }
339
+ function resolveImportTargetPath(importerPath, moduleSpecifier, sourcePathMap) {
340
+ if (!moduleSpecifier.startsWith('.') && !path.isAbsolute(moduleSpecifier)) {
341
+ return undefined;
342
+ }
343
+ const normalizedSpecifier = moduleSpecifier.replace(/\\/g, '/');
344
+ const basePath = path.resolve(path.dirname(importerPath), normalizedSpecifier);
345
+ const ext = path.extname(basePath);
346
+ const candidates = new Set();
347
+ const addCandidate = (candidate) => {
348
+ candidates.add(path.normalize(candidate));
349
+ };
350
+ if (ext.length > 0) {
351
+ addCandidate(basePath);
352
+ if (ext === '.js' || ext === '.jsx' || ext === '.ts' || ext === '.tsx') {
353
+ const withoutExt = basePath.slice(0, -ext.length);
354
+ addCandidate(`${withoutExt}.ts`);
355
+ addCandidate(`${withoutExt}.tsx`);
356
+ addCandidate(`${withoutExt}.js`);
357
+ addCandidate(`${withoutExt}.jsx`);
358
+ }
359
+ }
360
+ else {
361
+ addCandidate(basePath);
362
+ addCandidate(`${basePath}.ts`);
363
+ addCandidate(`${basePath}.tsx`);
364
+ addCandidate(`${basePath}.js`);
365
+ addCandidate(`${basePath}.jsx`);
366
+ addCandidate(path.join(basePath, 'index.ts'));
367
+ addCandidate(path.join(basePath, 'index.tsx'));
368
+ addCandidate(path.join(basePath, 'index.js'));
369
+ addCandidate(path.join(basePath, 'index.jsx'));
370
+ }
371
+ for (const candidate of candidates) {
372
+ const resolved = sourcePathMap.get(toPathKey(candidate));
373
+ if (resolved)
374
+ return resolved;
375
+ }
376
+ return undefined;
377
+ }
133
378
  // ---------------------------------------------------------------------------
134
379
  // Per-file analysis
135
380
  // ---------------------------------------------------------------------------
@@ -184,128 +429,161 @@ export function analyzeFile(file, options) {
184
429
  // ---------------------------------------------------------------------------
185
430
  // Project-level analysis (phases 2, 3, 8 require the full file set)
186
431
  // ---------------------------------------------------------------------------
187
- export function analyzeProject(targetPath, config) {
188
- const project = new Project({
189
- skipAddingFilesFromTsConfig: true,
190
- compilerOptions: { allowJs: true, jsx: 1 }, // 1 = JsxEmit.Preserve
191
- });
192
- project.addSourceFilesAtPaths([
193
- `${targetPath}/**/*.ts`,
194
- `${targetPath}/**/*.tsx`,
195
- `${targetPath}/**/*.js`,
196
- `${targetPath}/**/*.jsx`,
197
- `!${targetPath}/**/node_modules/**`,
198
- `!${targetPath}/**/dist/**`,
199
- `!${targetPath}/**/.next/**`,
200
- `!${targetPath}/**/build/**`,
201
- `!${targetPath}/**/*.d.ts`,
202
- `!${targetPath}/**/*.test.*`,
203
- `!${targetPath}/**/*.spec.*`,
204
- ]);
205
- const sourceFiles = project.getSourceFiles();
432
+ export function analyzeProject(targetPath, config, options) {
433
+ const analysisOptions = resolveAnalysisOptions(config, options);
434
+ const discoveredSources = collectAnalyzableSources(targetPath);
435
+ const { selectedPaths: sourcePaths, skippedReports } = selectSourcesForAnalysis(discoveredSources, analysisOptions);
436
+ const sourcePathMap = new Map(sourcePaths.map((filePath) => [toPathKey(filePath), filePath]));
206
437
  const pluginRuntime = loadPlugins(targetPath, config?.plugins);
207
- // Phase 1: per-file analysis
208
- const reports = sourceFiles.map((file) => analyzeFile(file, {
209
- config,
210
- loadedPlugins: pluginRuntime.plugins,
211
- projectRoot: targetPath,
212
- }));
438
+ const reports = [...skippedReports];
213
439
  const reportByPath = new Map();
214
- for (const r of reports)
215
- reportByPath.set(r.path, r);
216
- // Build set of ignored paths so cross-file phases don't re-add issues
217
- const ignoredPaths = new Set(sourceFiles.filter(sf => isFileIgnored(sf)).map(sf => sf.getFilePath()));
218
- // ── Phase 2 setup: build import graph ──────────────────────────────────────
219
- const allImportedPaths = new Set();
220
- const allImportedNames = new Map();
440
+ const ignoredPaths = new Set();
441
+ const allImportedPathKeys = new Set();
442
+ const allImportedNamesByKey = new Map();
221
443
  const allLiteralImports = new Set();
222
444
  const importGraph = new Map();
223
- for (const sf of sourceFiles) {
224
- const sfPath = sf.getFilePath();
225
- for (const decl of sf.getImportDeclarations()) {
445
+ const fingerprintMap = new Map();
446
+ const getReport = (filePath) => {
447
+ const fileKey = toPathKey(filePath);
448
+ if (ignoredPaths.has(fileKey))
449
+ return undefined;
450
+ return reportByPath.get(fileKey);
451
+ };
452
+ const addImportedName = (resolvedPath, name) => {
453
+ const resolvedKey = toPathKey(resolvedPath);
454
+ if (!allImportedNamesByKey.has(resolvedKey)) {
455
+ allImportedNamesByKey.set(resolvedKey, new Set());
456
+ }
457
+ allImportedNamesByKey.get(resolvedKey).add(name);
458
+ };
459
+ const collectCrossFileMetadata = (sourceFile) => {
460
+ const sourceFilePath = sourceFile.getFilePath();
461
+ const sourceFileKey = toPathKey(sourceFilePath);
462
+ const sourceFilePathCanonical = sourcePathMap.get(sourceFileKey) ?? sourceFilePath;
463
+ for (const decl of sourceFile.getImportDeclarations()) {
226
464
  const moduleSpecifier = decl.getModuleSpecifierValue();
227
465
  allLiteralImports.add(moduleSpecifier);
228
- const resolved = decl.getModuleSpecifierSourceFile();
229
- if (resolved) {
230
- const resolvedPath = resolved.getFilePath();
231
- allImportedPaths.add(resolvedPath);
232
- if (!importGraph.has(sfPath))
233
- importGraph.set(sfPath, new Set());
234
- importGraph.get(sfPath).add(resolvedPath);
235
- const named = decl.getNamedImports().map(n => n.getName());
236
- const def = decl.getDefaultImport()?.getText();
237
- const ns = decl.getNamespaceImport()?.getText();
238
- if (!allImportedNames.has(resolvedPath)) {
239
- allImportedNames.set(resolvedPath, new Set());
240
- }
241
- const nameSet = allImportedNames.get(resolvedPath);
242
- for (const n of named)
243
- nameSet.add(n);
244
- if (def)
245
- nameSet.add('default');
246
- if (ns)
247
- nameSet.add('*');
466
+ const resolvedPath = analysisOptions.lowMemory
467
+ ? resolveImportTargetPath(sourceFilePath, moduleSpecifier, sourcePathMap)
468
+ : decl.getModuleSpecifierSourceFile()?.getFilePath();
469
+ if (!resolvedPath)
470
+ continue;
471
+ const resolvedPathKey = toPathKey(resolvedPath);
472
+ const resolvedPathCanonical = sourcePathMap.get(resolvedPathKey) ?? resolvedPath;
473
+ allImportedPathKeys.add(resolvedPathKey);
474
+ if (!importGraph.has(sourceFilePathCanonical))
475
+ importGraph.set(sourceFilePathCanonical, new Set());
476
+ importGraph.get(sourceFilePathCanonical).add(resolvedPathCanonical);
477
+ for (const named of decl.getNamedImports().map((namedImport) => namedImport.getName())) {
478
+ addImportedName(resolvedPathCanonical, named);
248
479
  }
480
+ if (decl.getDefaultImport())
481
+ addImportedName(resolvedPathCanonical, 'default');
482
+ if (decl.getNamespaceImport())
483
+ addImportedName(resolvedPathCanonical, '*');
249
484
  }
250
- for (const exportDecl of sf.getExportDeclarations()) {
251
- const reExportedModule = exportDecl.getModuleSpecifierSourceFile();
252
- if (!reExportedModule)
485
+ for (const exportDecl of sourceFile.getExportDeclarations()) {
486
+ const moduleSpecifier = exportDecl.getModuleSpecifierValue();
487
+ if (!moduleSpecifier)
253
488
  continue;
254
- const reExportedPath = reExportedModule.getFilePath();
255
- allImportedPaths.add(reExportedPath);
256
- if (!allImportedNames.has(reExportedPath)) {
257
- allImportedNames.set(reExportedPath, new Set());
258
- }
259
- const nameSet = allImportedNames.get(reExportedPath);
489
+ const reExportedPath = analysisOptions.lowMemory
490
+ ? resolveImportTargetPath(sourceFilePath, moduleSpecifier, sourcePathMap)
491
+ : exportDecl.getModuleSpecifierSourceFile()?.getFilePath();
492
+ if (!reExportedPath)
493
+ continue;
494
+ const reExportedPathKey = toPathKey(reExportedPath);
495
+ const reExportedPathCanonical = sourcePathMap.get(reExportedPathKey) ?? reExportedPath;
496
+ allImportedPathKeys.add(reExportedPathKey);
260
497
  const namedExports = exportDecl.getNamedExports();
261
498
  if (namedExports.length === 0) {
262
- nameSet.add('*');
499
+ addImportedName(reExportedPathCanonical, '*');
263
500
  }
264
501
  else {
265
- for (const ne of namedExports)
266
- nameSet.add(ne.getName());
502
+ for (const namedExport of namedExports) {
503
+ addImportedName(reExportedPathCanonical, namedExport.getName());
504
+ }
267
505
  }
268
506
  }
507
+ if (!analysisOptions.includeSemanticDuplication || ignoredPaths.has(sourceFileKey)) {
508
+ return;
509
+ }
510
+ for (const { fn, name, line, col } of collectFunctions(sourceFile)) {
511
+ const fp = fingerprintFunction(fn);
512
+ if (!fingerprintMap.has(fp))
513
+ fingerprintMap.set(fp, []);
514
+ fingerprintMap.get(fp).push({ filePath: sourceFilePathCanonical, name, line, col });
515
+ }
516
+ };
517
+ const analyzeChunk = (chunk) => {
518
+ const project = new Project({
519
+ skipAddingFilesFromTsConfig: true,
520
+ compilerOptions: { allowJs: true, jsx: 1 },
521
+ });
522
+ project.addSourceFilesAtPaths(chunk);
523
+ for (const sourceFile of project.getSourceFiles()) {
524
+ const sourceFilePath = sourceFile.getFilePath();
525
+ const sourceFileKey = toPathKey(sourceFilePath);
526
+ const sourceFilePathCanonical = sourcePathMap.get(sourceFileKey) ?? sourceFilePath;
527
+ const report = analyzeFile(sourceFile, {
528
+ config,
529
+ loadedPlugins: pluginRuntime.plugins,
530
+ projectRoot: targetPath,
531
+ });
532
+ report.path = sourceFilePathCanonical;
533
+ reports.push(report);
534
+ reportByPath.set(sourceFileKey, report);
535
+ if (isFileIgnored(sourceFile))
536
+ ignoredPaths.add(sourceFileKey);
537
+ collectCrossFileMetadata(sourceFile);
538
+ }
539
+ };
540
+ const chunks = chunkPaths(sourcePaths, analysisOptions.lowMemory ? analysisOptions.chunkSize : sourcePaths.length || 1);
541
+ for (const chunk of chunks) {
542
+ analyzeChunk(chunk);
269
543
  }
270
- // Plugin load failures are surfaced as synthetic report entries.
544
+ // Plugin diagnostics are surfaced as synthetic report entries.
271
545
  if (pluginRuntime.errors.length > 0) {
272
546
  for (const err of pluginRuntime.errors) {
273
- const pluginIssue = {
274
- rule: 'plugin-error',
275
- severity: 'warning',
276
- message: `Failed to load plugin '${err.pluginId}': ${err.message}`,
277
- line: 1,
278
- column: 1,
279
- snippet: err.pluginId,
280
- };
281
- reports.push({
282
- path: path.join(targetPath, '.drift-plugin-errors', `${err.pluginId}.plugin`),
283
- issues: [pluginIssue],
284
- score: calculateScore([pluginIssue], RULE_WEIGHTS),
285
- });
547
+ reports.push(pluginDiagnosticToIssue(targetPath, err, 'error'));
286
548
  }
287
549
  }
288
- // ── Phase 2: dead-file + unused-export + unused-dependency ─────────────────
289
- const deadFiles = detectDeadFiles(sourceFiles, allImportedPaths, RULE_WEIGHTS);
290
- for (const [sfPath, issue] of deadFiles) {
291
- if (ignoredPaths.has(sfPath))
292
- continue;
293
- const report = reportByPath.get(sfPath);
294
- if (report) {
295
- report.issues.push(issue);
296
- report.score = calculateScore(report.issues, RULE_WEIGHTS);
550
+ if (pluginRuntime.warnings.length > 0) {
551
+ for (const warning of pluginRuntime.warnings) {
552
+ reports.push(pluginDiagnosticToIssue(targetPath, warning, 'warning'));
297
553
  }
298
554
  }
299
- const unusedExports = detectUnusedExports(sourceFiles, allImportedNames, RULE_WEIGHTS);
300
- for (const [sfPath, issues] of unusedExports) {
301
- if (ignoredPaths.has(sfPath))
302
- continue;
303
- const report = reportByPath.get(sfPath);
304
- if (report) {
555
+ for (const chunk of chunks) {
556
+ const project = new Project({
557
+ skipAddingFilesFromTsConfig: true,
558
+ compilerOptions: { allowJs: true, jsx: 1 },
559
+ });
560
+ project.addSourceFilesAtPaths(chunk);
561
+ const sourceFiles = project.getSourceFiles();
562
+ const importedPathsForChunk = new Set();
563
+ const importedNamesForChunk = new Map();
564
+ for (const sourceFile of sourceFiles) {
565
+ const sfPath = sourceFile.getFilePath();
566
+ const sfKey = toPathKey(sfPath);
567
+ if (allImportedPathKeys.has(sfKey))
568
+ importedPathsForChunk.add(sfPath);
569
+ const importedNames = allImportedNamesByKey.get(sfKey);
570
+ if (importedNames)
571
+ importedNamesForChunk.set(sfPath, new Set(importedNames));
572
+ }
573
+ const deadFiles = detectDeadFiles(sourceFiles, importedPathsForChunk, RULE_WEIGHTS);
574
+ for (const [sfPath, issue] of deadFiles) {
575
+ const report = getReport(sfPath);
576
+ if (report)
577
+ report.issues.push(issue);
578
+ }
579
+ const unusedExports = detectUnusedExports(sourceFiles, importedNamesForChunk, RULE_WEIGHTS);
580
+ for (const [sfPath, issues] of unusedExports) {
581
+ const report = getReport(sfPath);
582
+ if (!report)
583
+ continue;
305
584
  for (const issue of issues) {
306
585
  report.issues.push(issue);
307
586
  }
308
- report.score = calculateScore(report.issues, RULE_WEIGHTS);
309
587
  }
310
588
  }
311
589
  const unusedDepIssues = detectUnusedDependencies(targetPath, allLiteralImports, RULE_WEIGHTS);
@@ -317,86 +595,69 @@ export function analyzeProject(targetPath, config) {
317
595
  score: calculateScore(unusedDepIssues, RULE_WEIGHTS),
318
596
  });
319
597
  }
320
- // ── Phase 3: circular-dependency ────────────────────────────────────────────
321
598
  const circularIssues = detectCircularDependencies(importGraph, RULE_WEIGHTS);
322
599
  for (const [filePath, issue] of circularIssues) {
323
- if (ignoredPaths.has(filePath))
324
- continue;
325
- const report = reportByPath.get(filePath);
326
- if (report) {
600
+ const report = getReport(filePath);
601
+ if (report)
327
602
  report.issues.push(issue);
328
- report.score = calculateScore(report.issues, RULE_WEIGHTS);
329
- }
330
603
  }
331
- // ── Phase 3b: layer-violation ────────────────────────────────────────────────
332
604
  if (config?.layers && config.layers.length > 0) {
333
605
  const layerIssues = detectLayerViolations(importGraph, config.layers, targetPath, RULE_WEIGHTS);
334
606
  for (const [filePath, issues] of layerIssues) {
335
- if (ignoredPaths.has(filePath))
607
+ const report = getReport(filePath);
608
+ if (!report)
336
609
  continue;
337
- const report = reportByPath.get(filePath);
338
- if (report) {
339
- for (const issue of issues) {
340
- report.issues.push(issue);
341
- report.score = Math.min(100, report.score + (RULE_WEIGHTS['layer-violation']?.weight ?? 5));
342
- }
610
+ for (const issue of issues) {
611
+ report.issues.push(issue);
343
612
  }
344
613
  }
345
614
  }
346
- // ── Phase 3c: cross-boundary-import ─────────────────────────────────────────
347
615
  if (config?.modules && config.modules.length > 0) {
348
616
  const boundaryIssues = detectCrossBoundaryImports(importGraph, config.modules, targetPath, RULE_WEIGHTS);
349
617
  for (const [filePath, issues] of boundaryIssues) {
350
- if (ignoredPaths.has(filePath))
618
+ const report = getReport(filePath);
619
+ if (!report)
351
620
  continue;
352
- const report = reportByPath.get(filePath);
353
- if (report) {
354
- for (const issue of issues) {
355
- report.issues.push(issue);
356
- report.score = Math.min(100, report.score + (RULE_WEIGHTS['cross-boundary-import']?.weight ?? 5));
357
- }
621
+ for (const issue of issues) {
622
+ report.issues.push(issue);
358
623
  }
359
624
  }
360
625
  }
361
- // ── Phase 8: semantic-duplication ───────────────────────────────────────────
362
- const fingerprintMap = new Map();
363
- for (const sf of sourceFiles) {
364
- if (isFileIgnored(sf))
365
- continue;
366
- const sfPath = sf.getFilePath();
367
- for (const { fn, name, line, col } of collectFunctions(sf)) {
368
- const fp = fingerprintFunction(fn);
369
- if (!fingerprintMap.has(fp))
370
- fingerprintMap.set(fp, []);
371
- fingerprintMap.get(fp).push({ filePath: sfPath, name, line, col });
372
- }
373
- }
374
- for (const [, entries] of fingerprintMap) {
375
- if (entries.length < 2)
376
- continue;
377
- for (const entry of entries) {
378
- const report = reportByPath.get(entry.filePath);
379
- if (!report)
626
+ if (analysisOptions.includeSemanticDuplication) {
627
+ const relativePathCache = new Map();
628
+ const toRelativePath = (filePath) => {
629
+ const cached = relativePathCache.get(filePath);
630
+ if (cached)
631
+ return cached;
632
+ const value = path.relative(targetPath, filePath).replace(/\\/g, '/');
633
+ relativePathCache.set(filePath, value);
634
+ return value;
635
+ };
636
+ for (const [, entries] of fingerprintMap) {
637
+ if (entries.length < 2)
380
638
  continue;
381
- const others = entries
382
- .filter(e => e !== entry)
383
- .map(e => {
384
- const rel = path.relative(targetPath, e.filePath).replace(/\\/g, '/');
385
- return `${rel}:${e.line} (${e.name})`;
386
- })
387
- .join(', ');
388
- const weight = RULE_WEIGHTS['semantic-duplication']?.weight ?? 12;
389
- report.issues.push({
390
- rule: 'semantic-duplication',
391
- severity: 'warning',
392
- message: `Function '${entry.name}' is semantically identical to: ${others}`,
393
- line: entry.line,
394
- column: entry.col,
395
- snippet: `function ${entry.name} duplicated in ${entries.length - 1} other location${entries.length > 2 ? 's' : ''}`,
396
- });
397
- report.score = Math.min(100, report.score + weight);
639
+ for (const entry of entries) {
640
+ const report = getReport(entry.filePath);
641
+ if (!report)
642
+ continue;
643
+ const others = entries
644
+ .filter((other) => other !== entry)
645
+ .map((other) => `${toRelativePath(other.filePath)}:${other.line} (${other.name})`)
646
+ .join(', ');
647
+ report.issues.push({
648
+ rule: 'semantic-duplication',
649
+ severity: 'warning',
650
+ message: `Function '${entry.name}' is semantically identical to: ${others}`,
651
+ line: entry.line,
652
+ column: entry.col,
653
+ snippet: `function ${entry.name} - duplicated in ${entries.length - 1} other location${entries.length > 2 ? 's' : ''}`,
654
+ });
655
+ }
398
656
  }
399
657
  }
658
+ for (const report of reportByPath.values()) {
659
+ report.score = calculateScore(report.issues, RULE_WEIGHTS);
660
+ }
400
661
  return reports;
401
662
  }
402
663
  //# sourceMappingURL=analyzer.js.map