@codesentinel/codesentinel 1.2.0 → 1.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -6,6 +6,104 @@ import { readFileSync as readFileSync2 } from "fs";
6
6
  import { dirname, resolve as resolve3 } from "path";
7
7
  import { fileURLToPath } from "url";
8
8
 
9
+ // src/application/format-analyze-output.ts
10
+ var createSummaryShape = (summary) => ({
11
+ targetPath: summary.structural.targetPath,
12
+ structural: summary.structural.metrics,
13
+ evolution: summary.evolution.available ? {
14
+ available: true,
15
+ metrics: summary.evolution.metrics,
16
+ hotspotsTop: summary.evolution.hotspots.slice(0, 5).map((hotspot) => hotspot.filePath)
17
+ } : {
18
+ available: false,
19
+ reason: summary.evolution.reason
20
+ },
21
+ external: summary.external.available ? {
22
+ available: true,
23
+ metrics: summary.external.metrics,
24
+ highRiskDependenciesTop: summary.external.highRiskDependencies.slice(0, 10)
25
+ } : {
26
+ available: false,
27
+ reason: summary.external.reason
28
+ },
29
+ risk: {
30
+ repositoryScore: summary.risk.repositoryScore,
31
+ normalizedScore: summary.risk.normalizedScore,
32
+ hotspotsTop: summary.risk.hotspots.slice(0, 5).map((hotspot) => ({
33
+ file: hotspot.file,
34
+ score: hotspot.score
35
+ })),
36
+ fragileClusterCount: summary.risk.fragileClusters.length,
37
+ dependencyAmplificationZoneCount: summary.risk.dependencyAmplificationZones.length
38
+ }
39
+ });
40
+ var formatAnalyzeOutput = (summary, mode) => mode === "json" ? JSON.stringify(summary, null, 2) : JSON.stringify(createSummaryShape(summary), null, 2);
41
+
42
+ // src/application/logger.ts
43
+ var logLevelRank = {
44
+ error: 0,
45
+ warn: 1,
46
+ info: 2,
47
+ debug: 3
48
+ };
49
+ var noop = () => {
50
+ };
51
+ var createSilentLogger = () => ({
52
+ error: noop,
53
+ warn: noop,
54
+ info: noop,
55
+ debug: noop
56
+ });
57
+ var shouldLog = (configuredLevel, messageLevel) => {
58
+ if (configuredLevel === "silent") {
59
+ return false;
60
+ }
61
+ return logLevelRank[messageLevel] <= logLevelRank[configuredLevel];
62
+ };
63
+ var write = (messageLevel, message) => {
64
+ process.stderr.write(`[codesentinel] ${messageLevel.toUpperCase()} ${message}
65
+ `);
66
+ };
67
+ var createStderrLogger = (level) => {
68
+ if (level === "silent") {
69
+ return createSilentLogger();
70
+ }
71
+ return {
72
+ error: (message) => {
73
+ if (shouldLog(level, "error")) {
74
+ write("error", message);
75
+ }
76
+ },
77
+ warn: (message) => {
78
+ if (shouldLog(level, "warn")) {
79
+ write("warn", message);
80
+ }
81
+ },
82
+ info: (message) => {
83
+ if (shouldLog(level, "info")) {
84
+ write("info", message);
85
+ }
86
+ },
87
+ debug: (message) => {
88
+ if (shouldLog(level, "debug")) {
89
+ write("debug", message);
90
+ }
91
+ }
92
+ };
93
+ };
94
+ var parseLogLevel = (value) => {
95
+ switch (value) {
96
+ case "silent":
97
+ case "error":
98
+ case "warn":
99
+ case "info":
100
+ case "debug":
101
+ return value;
102
+ default:
103
+ return "info";
104
+ }
105
+ };
106
+
9
107
  // src/application/run-analyze-command.ts
10
108
  import { resolve as resolve2 } from "path";
11
109
 
@@ -255,6 +353,29 @@ var createGraphAnalysisSummary = (targetPath, graph) => {
255
353
  };
256
354
  };
257
355
  var SOURCE_EXTENSIONS = /* @__PURE__ */ new Set([".ts", ".tsx", ".mts", ".cts", ".js", ".jsx", ".mjs", ".cjs"]);
356
+ var SCAN_EXCLUDES = [
357
+ "**/node_modules/**",
358
+ "**/.git/**",
359
+ "**/dist/**",
360
+ "**/build/**",
361
+ "**/.next/**",
362
+ "**/coverage/**",
363
+ "**/.turbo/**",
364
+ "**/.cache/**",
365
+ "**/out/**"
366
+ ];
367
+ var SCAN_INCLUDES = ["**/*"];
368
+ var IGNORED_SEGMENTS = /* @__PURE__ */ new Set([
369
+ "node_modules",
370
+ ".git",
371
+ "dist",
372
+ "build",
373
+ ".next",
374
+ "coverage",
375
+ ".turbo",
376
+ ".cache",
377
+ "out"
378
+ ]);
258
379
  var normalizePath = (pathValue) => pathValue.replaceAll("\\", "/");
259
380
  var isProjectSourceFile = (filePath, projectRoot) => {
260
381
  const extension = extname(filePath);
@@ -265,23 +386,20 @@ var isProjectSourceFile = (filePath, projectRoot) => {
265
386
  if (relativePath.startsWith("..")) {
266
387
  return false;
267
388
  }
268
- return !relativePath.includes("node_modules");
389
+ const normalizedRelativePath = normalizePath(relativePath);
390
+ const segments = normalizedRelativePath.split("/");
391
+ return !segments.some((segment) => IGNORED_SEGMENTS.has(segment));
269
392
  };
270
- var findProjectFiles = (projectRoot) => {
271
- const files = ts.sys.readDirectory(projectRoot, [...SOURCE_EXTENSIONS], void 0, void 0);
393
+ var discoverSourceFilesByScan = (projectRoot) => {
394
+ const files = ts.sys.readDirectory(
395
+ projectRoot,
396
+ [...SOURCE_EXTENSIONS],
397
+ SCAN_EXCLUDES,
398
+ SCAN_INCLUDES
399
+ );
272
400
  return files.map((filePath) => resolve(filePath));
273
401
  };
274
- var parseTsConfig = (projectRoot) => {
275
- const configPath = ts.findConfigFile(projectRoot, ts.sys.fileExists, "tsconfig.json");
276
- if (configPath === void 0) {
277
- return {
278
- fileNames: findProjectFiles(projectRoot),
279
- options: {
280
- allowJs: true,
281
- moduleResolution: ts.ModuleResolutionKind.NodeNext
282
- }
283
- };
284
- }
402
+ var parseTsConfigFile = (configPath) => {
285
403
  const parsedCommandLine = ts.getParsedCommandLineOfConfigFile(
286
404
  configPath,
287
405
  {},
@@ -295,16 +413,74 @@ var parseTsConfig = (projectRoot) => {
295
413
  if (parsedCommandLine === void 0) {
296
414
  throw new Error(`Failed to parse TypeScript configuration at ${configPath}`);
297
415
  }
298
- const fileNames = parsedCommandLine.fileNames.map((filePath) => resolve(filePath));
299
- if (fileNames.length === 0) {
416
+ return parsedCommandLine;
417
+ };
418
+ var collectFilesFromTsConfigGraph = (projectRoot) => {
419
+ const rootConfigPath = ts.findConfigFile(projectRoot, ts.sys.fileExists, "tsconfig.json");
420
+ if (rootConfigPath === void 0) {
421
+ return null;
422
+ }
423
+ const visitedConfigPaths = /* @__PURE__ */ new Set();
424
+ const collectedFiles = /* @__PURE__ */ new Set();
425
+ let rootOptions = null;
426
+ const visitConfig = (configPath) => {
427
+ const absoluteConfigPath = resolve(configPath);
428
+ if (visitedConfigPaths.has(absoluteConfigPath)) {
429
+ return;
430
+ }
431
+ visitedConfigPaths.add(absoluteConfigPath);
432
+ const parsed = parseTsConfigFile(absoluteConfigPath);
433
+ if (rootOptions === null) {
434
+ rootOptions = parsed.options;
435
+ }
436
+ for (const filePath of parsed.fileNames) {
437
+ collectedFiles.add(resolve(filePath));
438
+ }
439
+ for (const reference of parsed.projectReferences ?? []) {
440
+ const referencePath = resolve(reference.path);
441
+ const referenceConfigPath = ts.sys.directoryExists(referencePath) ? ts.findConfigFile(referencePath, ts.sys.fileExists, "tsconfig.json") : referencePath;
442
+ if (referenceConfigPath !== void 0 && ts.sys.fileExists(referenceConfigPath)) {
443
+ visitConfig(referenceConfigPath);
444
+ }
445
+ }
446
+ };
447
+ visitConfig(rootConfigPath);
448
+ return {
449
+ fileNames: [...collectedFiles],
450
+ rootOptions: rootOptions ?? {
451
+ moduleResolution: ts.ModuleResolutionKind.NodeNext
452
+ },
453
+ visitedConfigCount: visitedConfigPaths.size
454
+ };
455
+ };
456
+ var createCompilerOptions = (base) => ({
457
+ ...base,
458
+ allowJs: true,
459
+ moduleResolution: base?.moduleResolution ?? ts.ModuleResolutionKind.NodeNext
460
+ });
461
+ var parseTsConfig = (projectRoot) => {
462
+ const collected = collectFilesFromTsConfigGraph(projectRoot);
463
+ if (collected === null) {
464
+ return {
465
+ fileNames: discoverSourceFilesByScan(projectRoot),
466
+ options: createCompilerOptions(void 0),
467
+ tsconfigCount: 0,
468
+ usedFallbackScan: true
469
+ };
470
+ }
471
+ if (collected.fileNames.length === 0) {
300
472
  return {
301
- fileNames: findProjectFiles(projectRoot),
302
- options: parsedCommandLine.options
473
+ fileNames: discoverSourceFilesByScan(projectRoot),
474
+ options: createCompilerOptions(collected.rootOptions),
475
+ tsconfigCount: collected.visitedConfigCount,
476
+ usedFallbackScan: true
303
477
  };
304
478
  }
305
479
  return {
306
- fileNames,
307
- options: parsedCommandLine.options
480
+ fileNames: collected.fileNames,
481
+ options: createCompilerOptions(collected.rootOptions),
482
+ tsconfigCount: collected.visitedConfigCount,
483
+ usedFallbackScan: false
308
484
  };
309
485
  };
310
486
  var getSpecifierFromExpression = (expression) => {
@@ -385,16 +561,19 @@ var extractModuleSpecifiers = (sourceFile) => {
385
561
  visit(sourceFile);
386
562
  return [...specifiers];
387
563
  };
388
- var parseTypescriptProject = (projectPath) => {
564
+ var parseTypescriptProject = (projectPath, onProgress) => {
389
565
  const projectRoot = isAbsolute(projectPath) ? projectPath : resolve(projectPath);
390
- const { fileNames, options } = parseTsConfig(projectRoot);
566
+ const { fileNames, options, tsconfigCount, usedFallbackScan } = parseTsConfig(projectRoot);
567
+ onProgress?.({ stage: "config_resolved", tsconfigCount, usedFallbackScan });
391
568
  const sourceFilePaths = fileNames.filter((filePath) => isProjectSourceFile(filePath, projectRoot)).map((filePath) => normalizePath(resolve(filePath)));
392
569
  const uniqueSourceFilePaths = [...new Set(sourceFilePaths)].sort((a, b) => a.localeCompare(b));
393
570
  const sourceFilePathSet = new Set(uniqueSourceFilePaths);
571
+ onProgress?.({ stage: "files_discovered", totalSourceFiles: uniqueSourceFilePaths.length });
394
572
  const program2 = ts.createProgram({
395
573
  rootNames: uniqueSourceFilePaths,
396
574
  options
397
575
  });
576
+ onProgress?.({ stage: "program_created", totalSourceFiles: uniqueSourceFilePaths.length });
398
577
  const nodeByAbsolutePath = /* @__PURE__ */ new Map();
399
578
  for (const sourcePath of uniqueSourceFilePaths) {
400
579
  const relativePath = normalizePath(relative(projectRoot, sourcePath));
@@ -407,7 +586,7 @@ var parseTypescriptProject = (projectPath) => {
407
586
  }
408
587
  const resolverCache = /* @__PURE__ */ new Map();
409
588
  const edges = [];
410
- for (const sourcePath of uniqueSourceFilePaths) {
589
+ for (const [index, sourcePath] of uniqueSourceFilePaths.entries()) {
411
590
  const sourceFile = program2.getSourceFile(sourcePath);
412
591
  if (sourceFile === void 0) {
413
592
  continue;
@@ -436,14 +615,24 @@ var parseTypescriptProject = (projectPath) => {
436
615
  }
437
616
  edges.push({ from: fromNode.id, to: toNode.id });
438
617
  }
618
+ const processed = index + 1;
619
+ if (processed === 1 || processed === uniqueSourceFilePaths.length || processed % 50 === 0) {
620
+ onProgress?.({
621
+ stage: "file_processed",
622
+ processed,
623
+ total: uniqueSourceFilePaths.length,
624
+ filePath: fromNode.id
625
+ });
626
+ }
439
627
  }
628
+ onProgress?.({ stage: "edges_resolved", totalEdges: edges.length });
440
629
  return {
441
630
  nodes: [...nodeByAbsolutePath.values()],
442
631
  edges
443
632
  };
444
633
  };
445
634
  var buildProjectGraphSummary = (input) => {
446
- const parsedProject = parseTypescriptProject(input.projectPath);
635
+ const parsedProject = parseTypescriptProject(input.projectPath, input.onProgress);
447
636
  const graphData = createGraphData(parsedProject.nodes, parsedProject.edges);
448
637
  return createGraphAnalysisSummary(input.projectPath, graphData);
449
638
  };
@@ -1007,7 +1196,7 @@ var mapWithConcurrency = async (values, limit, handler) => {
1007
1196
  await Promise.all(workers);
1008
1197
  return results;
1009
1198
  };
1010
- var analyzeDependencyExposure = async (input, metadataProvider) => {
1199
+ var analyzeDependencyExposure = async (input, metadataProvider, onProgress) => {
1011
1200
  const packageJson = loadPackageJson(input.repositoryPath);
1012
1201
  if (packageJson === null) {
1013
1202
  return {
@@ -1016,6 +1205,7 @@ var analyzeDependencyExposure = async (input, metadataProvider) => {
1016
1205
  reason: "package_json_not_found"
1017
1206
  };
1018
1207
  }
1208
+ onProgress?.({ stage: "package_json_loaded" });
1019
1209
  const lockfile = selectLockfile(input.repositoryPath);
1020
1210
  if (lockfile === null) {
1021
1211
  return {
@@ -1024,23 +1214,50 @@ var analyzeDependencyExposure = async (input, metadataProvider) => {
1024
1214
  reason: "lockfile_not_found"
1025
1215
  };
1026
1216
  }
1217
+ onProgress?.({ stage: "lockfile_selected", kind: lockfile.kind });
1027
1218
  try {
1028
1219
  const directSpecs = parsePackageJson(packageJson.raw);
1029
1220
  const extraction = parseExtraction(lockfile.kind, lockfile.raw, directSpecs);
1030
1221
  const config = withDefaults(input.config);
1222
+ onProgress?.({
1223
+ stage: "lockfile_parsed",
1224
+ dependencyNodes: extraction.nodes.length,
1225
+ directDependencies: extraction.directDependencies.length
1226
+ });
1227
+ onProgress?.({ stage: "metadata_fetch_started", total: extraction.nodes.length });
1228
+ let completed = 0;
1031
1229
  const metadataEntries = await mapWithConcurrency(
1032
1230
  extraction.nodes,
1033
1231
  config.metadataRequestConcurrency,
1034
- async (node) => ({
1035
- key: `${node.name}@${node.version}`,
1036
- metadata: await metadataProvider.getMetadata(node.name, node.version)
1037
- })
1232
+ async (node) => {
1233
+ const result = {
1234
+ key: `${node.name}@${node.version}`,
1235
+ metadata: await metadataProvider.getMetadata(node.name, node.version)
1236
+ };
1237
+ completed += 1;
1238
+ onProgress?.({
1239
+ stage: "metadata_fetch_progress",
1240
+ completed,
1241
+ total: extraction.nodes.length,
1242
+ packageName: node.name
1243
+ });
1244
+ return result;
1245
+ }
1038
1246
  );
1247
+ onProgress?.({ stage: "metadata_fetch_completed", total: extraction.nodes.length });
1039
1248
  const metadataByKey = /* @__PURE__ */ new Map();
1040
1249
  for (const entry of metadataEntries) {
1041
1250
  metadataByKey.set(entry.key, entry.metadata);
1042
1251
  }
1043
- return buildExternalAnalysisSummary(input.repositoryPath, extraction, metadataByKey, config);
1252
+ const summary = buildExternalAnalysisSummary(input.repositoryPath, extraction, metadataByKey, config);
1253
+ if (summary.available) {
1254
+ onProgress?.({
1255
+ stage: "summary_built",
1256
+ totalDependencies: summary.metrics.totalDependencies,
1257
+ directDependencies: summary.metrics.directDependencies
1258
+ });
1259
+ }
1260
+ return summary;
1044
1261
  } catch (error) {
1045
1262
  const message = error instanceof Error ? error.message : "unknown";
1046
1263
  if (message.includes("unsupported_lockfile_format")) {
@@ -1123,9 +1340,9 @@ var NoopMetadataProvider = class {
1123
1340
  return null;
1124
1341
  }
1125
1342
  };
1126
- var analyzeDependencyExposureFromProject = async (input) => {
1343
+ var analyzeDependencyExposureFromProject = async (input, onProgress) => {
1127
1344
  const metadataProvider = process.env["CODESENTINEL_EXTERNAL_METADATA"] === "none" ? new NoopMetadataProvider() : new NpmRegistryMetadataProvider();
1128
- return analyzeDependencyExposure(input, metadataProvider);
1345
+ return analyzeDependencyExposure(input, metadataProvider, onProgress);
1129
1346
  };
1130
1347
 
1131
1348
  // ../git-analyzer/dist/index.js
@@ -1422,17 +1639,26 @@ var createEffectiveConfig = (overrides) => ({
1422
1639
  ...DEFAULT_EVOLUTION_CONFIG,
1423
1640
  ...overrides
1424
1641
  });
1425
- var analyzeRepositoryEvolution = (input, historyProvider) => {
1642
+ var analyzeRepositoryEvolution = (input, historyProvider, onProgress) => {
1643
+ onProgress?.({ stage: "checking_git_repository" });
1426
1644
  if (!historyProvider.isGitRepository(input.repositoryPath)) {
1645
+ onProgress?.({ stage: "not_git_repository" });
1427
1646
  return {
1428
1647
  targetPath: input.repositoryPath,
1429
1648
  available: false,
1430
1649
  reason: "not_git_repository"
1431
1650
  };
1432
1651
  }
1433
- const commits = historyProvider.getCommitHistory(input.repositoryPath);
1652
+ onProgress?.({ stage: "loading_commit_history" });
1653
+ const commits = historyProvider.getCommitHistory(
1654
+ input.repositoryPath,
1655
+ (event) => onProgress?.({ stage: "history", event })
1656
+ );
1434
1657
  const config = createEffectiveConfig(input.config);
1435
- return computeRepositoryEvolutionSummary(input.repositoryPath, commits, config);
1658
+ onProgress?.({ stage: "computing_metrics" });
1659
+ const summary = computeRepositoryEvolutionSummary(input.repositoryPath, commits, config);
1660
+ onProgress?.({ stage: "analysis_completed", available: summary.available });
1661
+ return summary;
1436
1662
  };
1437
1663
  var GitCommandError = class extends Error {
1438
1664
  args;
@@ -1459,6 +1685,11 @@ var ExecGitCommandClient = class {
1459
1685
  var COMMIT_RECORD_SEPARATOR = "";
1460
1686
  var COMMIT_FIELD_SEPARATOR = "";
1461
1687
  var GIT_LOG_FORMAT = `%x1e%H%x1f%at%x1f%an%x1f%ae`;
1688
+ var mapParseProgressToHistoryProgress = (event) => ({
1689
+ stage: "git_log_parse_progress",
1690
+ parsedRecords: event.parsedRecords,
1691
+ totalRecords: event.totalRecords
1692
+ });
1462
1693
  var parseInteger = (value) => {
1463
1694
  if (value.length === 0) {
1464
1695
  return null;
@@ -1521,10 +1752,10 @@ var parseNumstatLine = (line) => {
1521
1752
  deletions
1522
1753
  };
1523
1754
  };
1524
- var parseGitLog = (rawLog) => {
1755
+ var parseGitLog = (rawLog, onProgress) => {
1525
1756
  const records = rawLog.split(COMMIT_RECORD_SEPARATOR).map((record) => record.trim()).filter((record) => record.length > 0);
1526
1757
  const commits = [];
1527
- for (const record of records) {
1758
+ for (const [index, record] of records.entries()) {
1528
1759
  const lines = record.split("\n").map((line) => line.trimEnd()).filter((line) => line.length > 0);
1529
1760
  if (lines.length === 0) {
1530
1761
  continue;
@@ -1555,6 +1786,10 @@ var parseGitLog = (rawLog) => {
1555
1786
  authoredAtUnix,
1556
1787
  fileChanges
1557
1788
  });
1789
+ const parsedRecords = index + 1;
1790
+ if (parsedRecords === 1 || parsedRecords === records.length || parsedRecords % 500 === 0) {
1791
+ onProgress?.({ parsedRecords, totalRecords: records.length });
1792
+ }
1558
1793
  }
1559
1794
  commits.sort((a, b) => a.authoredAtUnix - b.authoredAtUnix || a.hash.localeCompare(b.hash));
1560
1795
  return commits;
@@ -1579,7 +1814,7 @@ var GitCliHistoryProvider = class {
1579
1814
  throw error;
1580
1815
  }
1581
1816
  }
1582
- getCommitHistory(repositoryPath) {
1817
+ getCommitHistory(repositoryPath, onProgress) {
1583
1818
  const output = this.gitClient.run(repositoryPath, [
1584
1819
  "-c",
1585
1820
  "core.quotepath=false",
@@ -1591,31 +1826,819 @@ var GitCliHistoryProvider = class {
1591
1826
  "--numstat",
1592
1827
  "--find-renames"
1593
1828
  ]);
1594
- return parseGitLog(output);
1829
+ onProgress?.({ stage: "git_log_received", bytes: Buffer.byteLength(output, "utf8") });
1830
+ const commits = parseGitLog(output, (event) => onProgress?.(mapParseProgressToHistoryProgress(event)));
1831
+ onProgress?.({ stage: "git_log_parsed", commits: commits.length });
1832
+ return commits;
1595
1833
  }
1596
1834
  };
1597
- var analyzeRepositoryEvolutionFromGit = (input) => {
1835
+ var analyzeRepositoryEvolutionFromGit = (input, onProgress) => {
1598
1836
  const historyProvider = new GitCliHistoryProvider(new ExecGitCommandClient());
1599
- return analyzeRepositoryEvolution(input, historyProvider);
1837
+ return analyzeRepositoryEvolution(input, historyProvider, onProgress);
1838
+ };
1839
+
1840
+ // ../risk-engine/dist/index.js
1841
+ var DEFAULT_RISK_ENGINE_CONFIG = {
1842
+ // Base dimensional influence. Risk is never dominated by a single dimension by default.
1843
+ dimensionWeights: {
1844
+ structural: 0.44,
1845
+ evolution: 0.36,
1846
+ external: 0.2
1847
+ },
1848
+ // Interaction terms activate only when both related dimensions are high.
1849
+ interactionWeights: {
1850
+ structuralEvolution: 0.35,
1851
+ centralInstability: 0.25,
1852
+ dependencyAmplification: 0.2
1853
+ },
1854
+ structuralFactorWeights: {
1855
+ fanIn: 0.3,
1856
+ fanOut: 0.25,
1857
+ depth: 0.2,
1858
+ cycleParticipation: 0.25
1859
+ },
1860
+ evolutionFactorWeights: {
1861
+ frequency: 0.26,
1862
+ churn: 0.24,
1863
+ recentVolatility: 0.2,
1864
+ ownershipConcentration: 0.18,
1865
+ busFactorRisk: 0.12
1866
+ },
1867
+ dependencyFactorWeights: {
1868
+ signals: 0.38,
1869
+ staleness: 0.16,
1870
+ maintainerConcentration: 0.16,
1871
+ transitiveBurden: 0.1,
1872
+ centrality: 0.08,
1873
+ chainDepth: 0.06,
1874
+ busFactorRisk: 0.06
1875
+ },
1876
+ quantileClamp: {
1877
+ lower: 0.05,
1878
+ upper: 0.95
1879
+ },
1880
+ hotspotTopPercent: 0.12,
1881
+ hotspotMinFiles: 3,
1882
+ hotspotMaxFiles: 30,
1883
+ couplingCluster: {
1884
+ minCoChangeCommits: 2,
1885
+ percentileThreshold: 0.9,
1886
+ floorScore: 0.35
1887
+ },
1888
+ amplificationZone: {
1889
+ pressureFloor: 0.2,
1890
+ percentileThreshold: 0.85,
1891
+ maxZones: 20
1892
+ },
1893
+ module: {
1894
+ maxPrefixSegments: 2,
1895
+ rootLabel: "(root)",
1896
+ commonSourceRoots: ["src", "lib", "app", "packages"]
1897
+ },
1898
+ dependencySignals: {
1899
+ inheritedSignalMultiplier: 0.45,
1900
+ // At this age, staleness reaches 50% risk.
1901
+ abandonedHalfLifeDays: 540,
1902
+ missingMetadataPenalty: 0.5
1903
+ },
1904
+ externalDimension: {
1905
+ topDependencyPercentile: 0.85,
1906
+ dependencyDepthHalfLife: 6
1907
+ }
1908
+ };
1909
+ var clamp01 = (value) => {
1910
+ if (Number.isNaN(value)) {
1911
+ return 0;
1912
+ }
1913
+ if (value <= 0) {
1914
+ return 0;
1915
+ }
1916
+ if (value >= 1) {
1917
+ return 1;
1918
+ }
1919
+ return value;
1920
+ };
1921
+ var round44 = (value) => Number(value.toFixed(4));
1922
+ var average = (values) => {
1923
+ if (values.length === 0) {
1924
+ return 0;
1925
+ }
1926
+ const total = values.reduce((sum, current) => sum + current, 0);
1927
+ return total / values.length;
1928
+ };
1929
+ var percentile = (values, p) => {
1930
+ if (values.length === 0) {
1931
+ return 0;
1932
+ }
1933
+ if (values.length === 1) {
1934
+ return values[0] ?? 0;
1935
+ }
1936
+ const sorted = [...values].sort((a, b) => a - b);
1937
+ const position = clamp01(p) * (sorted.length - 1);
1938
+ const lowerIndex = Math.floor(position);
1939
+ const upperIndex = Math.ceil(position);
1940
+ const lower = sorted[lowerIndex] ?? 0;
1941
+ const upper = sorted[upperIndex] ?? lower;
1942
+ if (lowerIndex === upperIndex) {
1943
+ return lower;
1944
+ }
1945
+ const ratio = position - lowerIndex;
1946
+ return lower + (upper - lower) * ratio;
1947
+ };
1948
+ var saturatingComposite = (baseline, amplifications) => {
1949
+ let value = clamp01(baseline);
1950
+ for (const amplification of amplifications) {
1951
+ const boundedAmplification = clamp01(amplification);
1952
+ value += (1 - value) * boundedAmplification;
1953
+ }
1954
+ return clamp01(value);
1955
+ };
1956
+ var halfLifeRisk = (value, halfLife) => {
1957
+ if (value <= 0 || halfLife <= 0) {
1958
+ return 0;
1959
+ }
1960
+ return clamp01(value / (value + halfLife));
1961
+ };
1962
+ var normalizeWeights = (weights, enabled) => {
1963
+ let total = 0;
1964
+ const result = { ...weights };
1965
+ for (const key of Object.keys(result)) {
1966
+ const enabledValue = enabled[key];
1967
+ if (!enabledValue) {
1968
+ result[key] = 0;
1969
+ continue;
1970
+ }
1971
+ const value = Math.max(0, result[key]);
1972
+ result[key] = value;
1973
+ total += value;
1974
+ }
1975
+ if (total === 0) {
1976
+ const activeKeys = Object.keys(result).filter((key) => enabled[key]);
1977
+ if (activeKeys.length === 0) {
1978
+ return result;
1979
+ }
1980
+ const uniform = 1 / activeKeys.length;
1981
+ for (const key of activeKeys) {
1982
+ result[key] = uniform;
1983
+ }
1984
+ return result;
1985
+ }
1986
+ for (const key of Object.keys(result)) {
1987
+ if (enabled[key]) {
1988
+ result[key] = result[key] / total;
1989
+ }
1990
+ }
1991
+ return result;
1992
+ };
1993
+ var logScale = (value) => Math.log1p(Math.max(0, value));
1994
+ var buildQuantileScale = (values, lowerPercentile, upperPercentile) => {
1995
+ if (values.length === 0) {
1996
+ return { lower: 0, upper: 0 };
1997
+ }
1998
+ return {
1999
+ lower: percentile(values, lowerPercentile),
2000
+ upper: percentile(values, upperPercentile)
2001
+ };
2002
+ };
2003
+ var normalizeWithScale = (value, scale) => {
2004
+ if (scale.upper <= scale.lower) {
2005
+ return value > 0 ? 1 : 0;
2006
+ }
2007
+ return clamp01((value - scale.lower) / (scale.upper - scale.lower));
2008
+ };
2009
+ var normalizePath2 = (path) => path.replaceAll("\\", "/");
2010
+ var dependencySignalWeights = {
2011
+ single_maintainer: 0.3,
2012
+ abandoned: 0.3,
2013
+ high_centrality: 0.16,
2014
+ deep_chain: 0.14,
2015
+ high_fanout: 0.06,
2016
+ metadata_unavailable: 0.04
2017
+ };
2018
+ var dependencySignalWeightBudget = Object.values(dependencySignalWeights).reduce(
2019
+ (sum, value) => sum + value,
2020
+ 0
2021
+ );
2022
+ var computeDependencySignalScore = (ownSignals, inheritedSignals, inheritedSignalMultiplier) => {
2023
+ const ownWeight = ownSignals.reduce((sum, signal) => sum + (dependencySignalWeights[signal] ?? 0), 0);
2024
+ const inheritedWeight = inheritedSignals.reduce(
2025
+ (sum, signal) => sum + (dependencySignalWeights[signal] ?? 0),
2026
+ 0
2027
+ );
2028
+ const weightedTotal = ownWeight + inheritedWeight * inheritedSignalMultiplier;
2029
+ const maxWeightedTotal = dependencySignalWeightBudget * (1 + inheritedSignalMultiplier);
2030
+ if (maxWeightedTotal <= 0) {
2031
+ return 0;
2032
+ }
2033
+ return clamp01(weightedTotal / maxWeightedTotal);
2034
+ };
2035
+ var computeDependencyScores = (external, config) => {
2036
+ if (!external.available) {
2037
+ return {
2038
+ dependencyScores: [],
2039
+ repositoryExternalPressure: 0
2040
+ };
2041
+ }
2042
+ const transitiveCounts = external.dependencies.map(
2043
+ (dependency) => logScale(dependency.transitiveDependencies.length)
2044
+ );
2045
+ const dependentCounts = external.dependencies.map((dependency) => logScale(dependency.dependents));
2046
+ const chainDepths = external.dependencies.map((dependency) => dependency.dependencyDepth);
2047
+ const transitiveScale = buildQuantileScale(
2048
+ transitiveCounts,
2049
+ config.quantileClamp.lower,
2050
+ config.quantileClamp.upper
2051
+ );
2052
+ const dependentScale = buildQuantileScale(
2053
+ dependentCounts,
2054
+ config.quantileClamp.lower,
2055
+ config.quantileClamp.upper
2056
+ );
2057
+ const chainDepthScale = buildQuantileScale(
2058
+ chainDepths,
2059
+ config.quantileClamp.lower,
2060
+ config.quantileClamp.upper
2061
+ );
2062
+ const dependencyScores = external.dependencies.map((dependency) => {
2063
+ const signalScore = computeDependencySignalScore(
2064
+ dependency.ownRiskSignals,
2065
+ dependency.inheritedRiskSignals,
2066
+ config.dependencySignals.inheritedSignalMultiplier
2067
+ );
2068
+ const maintainerConcentrationRisk = dependency.maintainerCount === null ? config.dependencySignals.missingMetadataPenalty : clamp01(1 / Math.max(1, dependency.maintainerCount));
2069
+ const stalenessRisk = dependency.daysSinceLastRelease === null ? config.dependencySignals.missingMetadataPenalty : halfLifeRisk(
2070
+ dependency.daysSinceLastRelease,
2071
+ config.dependencySignals.abandonedHalfLifeDays
2072
+ );
2073
+ const transitiveBurdenRisk = normalizeWithScale(
2074
+ logScale(dependency.transitiveDependencies.length),
2075
+ transitiveScale
2076
+ );
2077
+ const centralityRisk = normalizeWithScale(logScale(dependency.dependents), dependentScale);
2078
+ const chainDepthRisk = normalizeWithScale(dependency.dependencyDepth, chainDepthScale);
2079
+ const busFactorRisk = dependency.busFactor === null ? config.dependencySignals.missingMetadataPenalty : clamp01(1 / Math.max(1, dependency.busFactor));
2080
+ const weights = config.dependencyFactorWeights;
2081
+ const normalizedScore = clamp01(
2082
+ signalScore * weights.signals + stalenessRisk * weights.staleness + maintainerConcentrationRisk * weights.maintainerConcentration + transitiveBurdenRisk * weights.transitiveBurden + centralityRisk * weights.centrality + chainDepthRisk * weights.chainDepth + busFactorRisk * weights.busFactorRisk
2083
+ );
2084
+ return {
2085
+ dependency: dependency.name,
2086
+ score: round44(normalizedScore * 100),
2087
+ normalizedScore: round44(normalizedScore),
2088
+ ownRiskSignals: dependency.ownRiskSignals,
2089
+ inheritedRiskSignals: dependency.inheritedRiskSignals
2090
+ };
2091
+ }).sort(
2092
+ (a, b) => b.normalizedScore - a.normalizedScore || a.dependency.localeCompare(b.dependency)
2093
+ );
2094
+ const normalizedValues = dependencyScores.map((score) => score.normalizedScore);
2095
+ const highDependencyRisk = dependencyScores.length === 0 ? 0 : percentile(normalizedValues, config.externalDimension.topDependencyPercentile);
2096
+ const averageDependencyRisk = average(normalizedValues);
2097
+ const depthRisk = halfLifeRisk(
2098
+ external.metrics.dependencyDepth,
2099
+ config.externalDimension.dependencyDepthHalfLife
2100
+ );
2101
+ const repositoryExternalPressure = clamp01(
2102
+ highDependencyRisk * 0.5 + averageDependencyRisk * 0.3 + depthRisk * 0.2
2103
+ );
2104
+ return {
2105
+ dependencyScores,
2106
+ repositoryExternalPressure: round44(repositoryExternalPressure)
2107
+ };
2108
+ };
2109
+ var mapEvolutionByFile = (evolution) => {
2110
+ if (!evolution.available) {
2111
+ return /* @__PURE__ */ new Map();
2112
+ }
2113
+ return new Map(
2114
+ evolution.files.map((fileMetrics) => [normalizePath2(fileMetrics.filePath), fileMetrics])
2115
+ );
2116
+ };
2117
+ var computeEvolutionScales = (evolutionByFile, config) => {
2118
+ const evolutionFiles = [...evolutionByFile.values()];
2119
+ return {
2120
+ commitCount: buildQuantileScale(
2121
+ evolutionFiles.map((metrics) => logScale(metrics.commitCount)),
2122
+ config.quantileClamp.lower,
2123
+ config.quantileClamp.upper
2124
+ ),
2125
+ churnTotal: buildQuantileScale(
2126
+ evolutionFiles.map((metrics) => logScale(metrics.churnTotal)),
2127
+ config.quantileClamp.lower,
2128
+ config.quantileClamp.upper
2129
+ ),
2130
+ busFactor: buildQuantileScale(
2131
+ evolutionFiles.map((metrics) => metrics.busFactor),
2132
+ config.quantileClamp.lower,
2133
+ config.quantileClamp.upper
2134
+ )
2135
+ };
2136
+ };
2137
+ var inferModuleName = (filePath, config) => {
2138
+ const normalized = normalizePath2(filePath);
2139
+ const parts = normalized.split("/").filter((part) => part.length > 0);
2140
+ if (parts.length <= 1) {
2141
+ return config.module.rootLabel;
2142
+ }
2143
+ const first = parts[0];
2144
+ if (first === void 0) {
2145
+ return config.module.rootLabel;
2146
+ }
2147
+ if (!config.module.commonSourceRoots.includes(first)) {
2148
+ return first;
2149
+ }
2150
+ if (parts.length <= config.module.maxPrefixSegments) {
2151
+ return first;
2152
+ }
2153
+ return parts.slice(0, config.module.maxPrefixSegments).join("/");
2154
+ };
2155
+ var buildFragileClusters = (structural, evolution, fileScoresByFile, config) => {
2156
+ const clusters = [];
2157
+ let cycleClusterCount = 0;
2158
+ for (const cycle of structural.cycles) {
2159
+ const files = [...new Set(cycle.nodes.map((node) => normalizePath2(node)))].filter(
2160
+ (filePath) => fileScoresByFile.has(filePath)
2161
+ );
2162
+ if (files.length < 2) {
2163
+ continue;
2164
+ }
2165
+ files.sort((a, b) => a.localeCompare(b));
2166
+ const averageRisk = average(
2167
+ files.map((filePath) => fileScoresByFile.get(filePath)?.normalizedScore ?? 0)
2168
+ );
2169
+ const cycleSizeRisk = clamp01((files.length - 1) / 5);
2170
+ const score = round44(clamp01(averageRisk * 0.75 + cycleSizeRisk * 0.25) * 100);
2171
+ cycleClusterCount += 1;
2172
+ clusters.push({
2173
+ id: `cycle:${cycleClusterCount}`,
2174
+ kind: "structural_cycle",
2175
+ files,
2176
+ score
2177
+ });
2178
+ }
2179
+ if (evolution.available && evolution.coupling.pairs.length > 0) {
2180
+ const candidates = evolution.coupling.pairs.filter(
2181
+ (pair) => pair.coChangeCommits >= config.couplingCluster.minCoChangeCommits
2182
+ );
2183
+ const threshold = Math.max(
2184
+ config.couplingCluster.floorScore,
2185
+ percentile(
2186
+ candidates.map((pair) => pair.couplingScore),
2187
+ config.couplingCluster.percentileThreshold
2188
+ )
2189
+ );
2190
+ const selectedPairs = candidates.filter((pair) => pair.couplingScore >= threshold).map((pair) => ({
2191
+ fileA: normalizePath2(pair.fileA),
2192
+ fileB: normalizePath2(pair.fileB),
2193
+ couplingScore: pair.couplingScore
2194
+ })).filter(
2195
+ (pair) => pair.fileA !== pair.fileB && fileScoresByFile.has(pair.fileA) && fileScoresByFile.has(pair.fileB)
2196
+ );
2197
+ const adjacency = /* @__PURE__ */ new Map();
2198
+ for (const pair of selectedPairs) {
2199
+ const aNeighbors = adjacency.get(pair.fileA) ?? /* @__PURE__ */ new Set();
2200
+ aNeighbors.add(pair.fileB);
2201
+ adjacency.set(pair.fileA, aNeighbors);
2202
+ const bNeighbors = adjacency.get(pair.fileB) ?? /* @__PURE__ */ new Set();
2203
+ bNeighbors.add(pair.fileA);
2204
+ adjacency.set(pair.fileB, bNeighbors);
2205
+ }
2206
+ const visited = /* @__PURE__ */ new Set();
2207
+ let couplingClusterCount = 0;
2208
+ const orderedStarts = [...adjacency.keys()].sort((a, b) => a.localeCompare(b));
2209
+ for (const start of orderedStarts) {
2210
+ if (visited.has(start)) {
2211
+ continue;
2212
+ }
2213
+ const stack = [start];
2214
+ const files = [];
2215
+ while (stack.length > 0) {
2216
+ const current = stack.pop();
2217
+ if (current === void 0 || visited.has(current)) {
2218
+ continue;
2219
+ }
2220
+ visited.add(current);
2221
+ files.push(current);
2222
+ const neighbors = adjacency.get(current);
2223
+ if (neighbors === void 0) {
2224
+ continue;
2225
+ }
2226
+ for (const neighbor of neighbors) {
2227
+ if (!visited.has(neighbor)) {
2228
+ stack.push(neighbor);
2229
+ }
2230
+ }
2231
+ }
2232
+ if (files.length < 2) {
2233
+ continue;
2234
+ }
2235
+ files.sort((a, b) => a.localeCompare(b));
2236
+ const fileSet = new Set(files);
2237
+ const componentPairs = selectedPairs.filter(
2238
+ (pair) => fileSet.has(pair.fileA) && fileSet.has(pair.fileB)
2239
+ );
2240
+ const meanFileRisk = average(
2241
+ files.map((filePath) => fileScoresByFile.get(filePath)?.normalizedScore ?? 0)
2242
+ );
2243
+ const meanCoupling = average(componentPairs.map((pair) => pair.couplingScore));
2244
+ const score = round44(clamp01(meanFileRisk * 0.65 + meanCoupling * 0.35) * 100);
2245
+ couplingClusterCount += 1;
2246
+ clusters.push({
2247
+ id: `coupling:${couplingClusterCount}`,
2248
+ kind: "change_coupling",
2249
+ files,
2250
+ score
2251
+ });
2252
+ }
2253
+ }
2254
+ return clusters.sort(
2255
+ (a, b) => b.score - a.score || a.kind.localeCompare(b.kind) || a.id.localeCompare(b.id)
2256
+ );
2257
+ };
2258
+ var computeRiskSummary = (structural, evolution, external, config) => {
2259
+ const dependencyComputation = computeDependencyScores(external, config);
2260
+ const evolutionByFile = mapEvolutionByFile(evolution);
2261
+ const evolutionScales = computeEvolutionScales(evolutionByFile, config);
2262
+ const cycleFileSet = new Set(
2263
+ structural.cycles.flatMap((cycle) => cycle.nodes.map((node) => normalizePath2(node)))
2264
+ );
2265
+ const fanInScale = buildQuantileScale(
2266
+ structural.files.map((file) => logScale(file.fanIn)),
2267
+ config.quantileClamp.lower,
2268
+ config.quantileClamp.upper
2269
+ );
2270
+ const fanOutScale = buildQuantileScale(
2271
+ structural.files.map((file) => logScale(file.fanOut)),
2272
+ config.quantileClamp.lower,
2273
+ config.quantileClamp.upper
2274
+ );
2275
+ const depthScale = buildQuantileScale(
2276
+ structural.files.map((file) => file.depth),
2277
+ config.quantileClamp.lower,
2278
+ config.quantileClamp.upper
2279
+ );
2280
+ const dimensionWeights = normalizeWeights(config.dimensionWeights, {
2281
+ structural: true,
2282
+ evolution: evolution.available,
2283
+ external: external.available
2284
+ });
2285
+ const fileRiskContexts = structural.files.map((file) => {
2286
+ const filePath = normalizePath2(file.id);
2287
+ const inCycle = cycleFileSet.has(filePath) ? 1 : 0;
2288
+ const fanInRisk = normalizeWithScale(logScale(file.fanIn), fanInScale);
2289
+ const fanOutRisk = normalizeWithScale(logScale(file.fanOut), fanOutScale);
2290
+ const depthRisk = normalizeWithScale(file.depth, depthScale);
2291
+ const structuralWeights = config.structuralFactorWeights;
2292
+ const structuralFactor = clamp01(
2293
+ fanInRisk * structuralWeights.fanIn + fanOutRisk * structuralWeights.fanOut + depthRisk * structuralWeights.depth + inCycle * structuralWeights.cycleParticipation
2294
+ );
2295
+ const structuralCentrality = clamp01((fanInRisk + fanOutRisk) / 2);
2296
+ let evolutionFactor = 0;
2297
+ const evolutionMetrics = evolutionByFile.get(filePath);
2298
+ if (evolution.available && evolutionMetrics !== void 0) {
2299
+ const frequencyRisk = normalizeWithScale(
2300
+ logScale(evolutionMetrics.commitCount),
2301
+ evolutionScales.commitCount
2302
+ );
2303
+ const churnRisk = normalizeWithScale(
2304
+ logScale(evolutionMetrics.churnTotal),
2305
+ evolutionScales.churnTotal
2306
+ );
2307
+ const volatilityRisk = clamp01(evolutionMetrics.recentVolatility);
2308
+ const ownershipConcentrationRisk = clamp01(evolutionMetrics.topAuthorShare);
2309
+ const busFactorRisk = clamp01(1 - normalizeWithScale(evolutionMetrics.busFactor, evolutionScales.busFactor));
2310
+ const evolutionWeights = config.evolutionFactorWeights;
2311
+ evolutionFactor = clamp01(
2312
+ frequencyRisk * evolutionWeights.frequency + churnRisk * evolutionWeights.churn + volatilityRisk * evolutionWeights.recentVolatility + ownershipConcentrationRisk * evolutionWeights.ownershipConcentration + busFactorRisk * evolutionWeights.busFactorRisk
2313
+ );
2314
+ }
2315
+ const dependencyAffinity = clamp01(structuralCentrality * 0.6 + evolutionFactor * 0.4);
2316
+ const externalFactor = external.available ? clamp01(dependencyComputation.repositoryExternalPressure * dependencyAffinity) : 0;
2317
+ const baseline = structuralFactor * dimensionWeights.structural + evolutionFactor * dimensionWeights.evolution + externalFactor * dimensionWeights.external;
2318
+ const interactions = [
2319
+ structuralFactor * evolutionFactor * config.interactionWeights.structuralEvolution,
2320
+ structuralCentrality * evolutionFactor * config.interactionWeights.centralInstability,
2321
+ externalFactor * Math.max(structuralFactor, evolutionFactor) * config.interactionWeights.dependencyAmplification
2322
+ ];
2323
+ const normalizedScore = saturatingComposite(baseline, interactions);
2324
+ return {
2325
+ file: filePath,
2326
+ score: round44(normalizedScore * 100),
2327
+ normalizedScore: round44(normalizedScore),
2328
+ factors: {
2329
+ structural: round44(structuralFactor),
2330
+ evolution: round44(evolutionFactor),
2331
+ external: round44(externalFactor)
2332
+ },
2333
+ structuralCentrality: round44(structuralCentrality)
2334
+ };
2335
+ }).sort((a, b) => b.score - a.score || a.file.localeCompare(b.file));
2336
+ const fileScores = fileRiskContexts.map((context) => ({
2337
+ file: context.file,
2338
+ score: context.score,
2339
+ normalizedScore: context.normalizedScore,
2340
+ factors: context.factors
2341
+ }));
2342
+ const fileScoresByFile = new Map(fileScores.map((fileScore) => [fileScore.file, fileScore]));
2343
+ const hotspotsCount = Math.min(
2344
+ config.hotspotMaxFiles,
2345
+ Math.max(config.hotspotMinFiles, Math.ceil(fileScores.length * config.hotspotTopPercent))
2346
+ );
2347
+ const hotspots = fileScores.slice(0, hotspotsCount).map((fileScore) => ({
2348
+ file: fileScore.file,
2349
+ score: fileScore.score,
2350
+ factors: fileScore.factors
2351
+ }));
2352
+ const moduleFiles = /* @__PURE__ */ new Map();
2353
+ for (const fileScore of fileScores) {
2354
+ const moduleName = inferModuleName(fileScore.file, config);
2355
+ const values = moduleFiles.get(moduleName) ?? [];
2356
+ values.push(fileScore.normalizedScore);
2357
+ moduleFiles.set(moduleName, values);
2358
+ }
2359
+ const moduleScores = [...moduleFiles.entries()].map(([module, values]) => {
2360
+ const averageScore = average(values);
2361
+ const peakScore = values.reduce((max, value) => Math.max(max, value), 0);
2362
+ const normalizedScore = clamp01(averageScore * 0.65 + peakScore * 0.35);
2363
+ return {
2364
+ module,
2365
+ score: round44(normalizedScore * 100),
2366
+ normalizedScore: round44(normalizedScore),
2367
+ fileCount: values.length
2368
+ };
2369
+ }).sort((a, b) => b.score - a.score || a.module.localeCompare(b.module));
2370
+ const fragileClusters = buildFragileClusters(structural, evolution, fileScoresByFile, config);
2371
+ const externalPressures = fileScores.map((fileScore) => fileScore.factors.external);
2372
+ const pressureThreshold = Math.max(
2373
+ config.amplificationZone.pressureFloor,
2374
+ percentile(externalPressures, config.amplificationZone.percentileThreshold)
2375
+ );
2376
+ const dependencyAmplificationZones = fileScores.map((fileScore) => {
2377
+ const intensity = clamp01(
2378
+ fileScore.factors.external * Math.max(fileScore.factors.structural, fileScore.factors.evolution)
2379
+ );
2380
+ const normalizedZoneScore = clamp01(intensity * 0.7 + fileScore.normalizedScore * 0.3);
2381
+ return {
2382
+ file: fileScore.file,
2383
+ score: round44(normalizedZoneScore * 100),
2384
+ externalPressure: fileScore.factors.external
2385
+ };
2386
+ }).filter((zone) => external.available && zone.externalPressure >= pressureThreshold).sort((a, b) => b.score - a.score || a.file.localeCompare(b.file)).slice(0, config.amplificationZone.maxZones).map((zone) => ({
2387
+ ...zone,
2388
+ externalPressure: round44(zone.externalPressure)
2389
+ }));
2390
+ const structuralDimension = average(fileScores.map((fileScore) => fileScore.factors.structural));
2391
+ const evolutionDimension = average(fileScores.map((fileScore) => fileScore.factors.evolution));
2392
+ const externalDimension = dependencyComputation.repositoryExternalPressure;
2393
+ const topCentralSlice = Math.max(1, Math.ceil(fileRiskContexts.length * 0.1));
2394
+ const criticalInstability = average(
2395
+ [...fileRiskContexts].sort(
2396
+ (a, b) => b.structuralCentrality * b.factors.evolution - a.structuralCentrality * a.factors.evolution || a.file.localeCompare(b.file)
2397
+ ).slice(0, topCentralSlice).map((context) => context.structuralCentrality * context.factors.evolution)
2398
+ );
2399
+ const dependencyAmplification = average(
2400
+ dependencyAmplificationZones.map(
2401
+ (zone) => clamp01(zone.externalPressure * zone.score / 100)
2402
+ )
2403
+ );
2404
+ const repositoryBaseline = structuralDimension * dimensionWeights.structural + evolutionDimension * dimensionWeights.evolution + externalDimension * dimensionWeights.external;
2405
+ const repositoryNormalizedScore = saturatingComposite(repositoryBaseline, [
2406
+ structuralDimension * evolutionDimension * config.interactionWeights.structuralEvolution,
2407
+ criticalInstability * config.interactionWeights.centralInstability,
2408
+ dependencyAmplification * config.interactionWeights.dependencyAmplification
2409
+ ]);
2410
+ return {
2411
+ repositoryScore: round44(repositoryNormalizedScore * 100),
2412
+ normalizedScore: round44(repositoryNormalizedScore),
2413
+ hotspots,
2414
+ fragileClusters,
2415
+ dependencyAmplificationZones,
2416
+ fileScores,
2417
+ moduleScores,
2418
+ dependencyScores: dependencyComputation.dependencyScores
2419
+ };
2420
+ };
2421
+ var mergeConfig = (overrides) => {
2422
+ if (overrides === void 0) {
2423
+ return DEFAULT_RISK_ENGINE_CONFIG;
2424
+ }
2425
+ return {
2426
+ ...DEFAULT_RISK_ENGINE_CONFIG,
2427
+ ...overrides,
2428
+ dimensionWeights: {
2429
+ ...DEFAULT_RISK_ENGINE_CONFIG.dimensionWeights,
2430
+ ...overrides.dimensionWeights
2431
+ },
2432
+ interactionWeights: {
2433
+ ...DEFAULT_RISK_ENGINE_CONFIG.interactionWeights,
2434
+ ...overrides.interactionWeights
2435
+ },
2436
+ structuralFactorWeights: {
2437
+ ...DEFAULT_RISK_ENGINE_CONFIG.structuralFactorWeights,
2438
+ ...overrides.structuralFactorWeights
2439
+ },
2440
+ evolutionFactorWeights: {
2441
+ ...DEFAULT_RISK_ENGINE_CONFIG.evolutionFactorWeights,
2442
+ ...overrides.evolutionFactorWeights
2443
+ },
2444
+ dependencyFactorWeights: {
2445
+ ...DEFAULT_RISK_ENGINE_CONFIG.dependencyFactorWeights,
2446
+ ...overrides.dependencyFactorWeights
2447
+ },
2448
+ quantileClamp: {
2449
+ ...DEFAULT_RISK_ENGINE_CONFIG.quantileClamp,
2450
+ ...overrides.quantileClamp
2451
+ },
2452
+ couplingCluster: {
2453
+ ...DEFAULT_RISK_ENGINE_CONFIG.couplingCluster,
2454
+ ...overrides.couplingCluster
2455
+ },
2456
+ amplificationZone: {
2457
+ ...DEFAULT_RISK_ENGINE_CONFIG.amplificationZone,
2458
+ ...overrides.amplificationZone
2459
+ },
2460
+ module: {
2461
+ ...DEFAULT_RISK_ENGINE_CONFIG.module,
2462
+ ...overrides.module
2463
+ },
2464
+ dependencySignals: {
2465
+ ...DEFAULT_RISK_ENGINE_CONFIG.dependencySignals,
2466
+ ...overrides.dependencySignals
2467
+ },
2468
+ externalDimension: {
2469
+ ...DEFAULT_RISK_ENGINE_CONFIG.externalDimension,
2470
+ ...overrides.externalDimension
2471
+ }
2472
+ };
2473
+ };
2474
+ var computeRepositoryRiskSummary = (input) => {
2475
+ const config = mergeConfig(input.config);
2476
+ return computeRiskSummary(input.structural, input.evolution, input.external, config);
1600
2477
  };
1601
2478
 
1602
2479
  // src/application/run-analyze-command.ts
1603
2480
  var resolveTargetPath = (inputPath, cwd) => resolve2(cwd, inputPath ?? ".");
1604
- var runAnalyzeCommand = async (inputPath, authorIdentityMode) => {
2481
+ var createExternalProgressReporter = (logger) => {
2482
+ let lastLoggedProgress = 0;
2483
+ return (event) => {
2484
+ switch (event.stage) {
2485
+ case "package_json_loaded":
2486
+ logger.debug("external: package.json loaded");
2487
+ break;
2488
+ case "lockfile_selected":
2489
+ logger.info(`external: lockfile selected (${event.kind})`);
2490
+ break;
2491
+ case "lockfile_parsed":
2492
+ logger.info(
2493
+ `external: parsed ${event.dependencyNodes} locked dependencies (${event.directDependencies} direct)`
2494
+ );
2495
+ break;
2496
+ case "metadata_fetch_started":
2497
+ logger.info(`external: fetching dependency metadata (${event.total} packages)`);
2498
+ break;
2499
+ case "metadata_fetch_progress": {
2500
+ const currentPercent = event.total === 0 ? 100 : Math.floor(event.completed / event.total * 100);
2501
+ if (event.completed === event.total || event.completed === 1 || event.completed - lastLoggedProgress >= 25) {
2502
+ lastLoggedProgress = event.completed;
2503
+ logger.info(
2504
+ `external: metadata progress ${event.completed}/${event.total} (${currentPercent}%)`
2505
+ );
2506
+ logger.debug(`external: last package processed ${event.packageName}`);
2507
+ }
2508
+ break;
2509
+ }
2510
+ case "metadata_fetch_completed":
2511
+ logger.info(`external: metadata fetch completed (${event.total} packages)`);
2512
+ break;
2513
+ case "summary_built":
2514
+ logger.info(
2515
+ `external: summary built (${event.totalDependencies} total, ${event.directDependencies} direct)`
2516
+ );
2517
+ break;
2518
+ }
2519
+ };
2520
+ };
2521
+ var createStructuralProgressReporter = (logger) => {
2522
+ let lastProcessed = 0;
2523
+ return (event) => {
2524
+ switch (event.stage) {
2525
+ case "config_resolved":
2526
+ if (event.usedFallbackScan) {
2527
+ logger.info(
2528
+ `structural: using filesystem scan discovery (tsconfigs=${event.tsconfigCount})`
2529
+ );
2530
+ } else {
2531
+ logger.info(`structural: discovered tsconfig graph (${event.tsconfigCount} configs)`);
2532
+ }
2533
+ break;
2534
+ case "files_discovered":
2535
+ logger.info(`structural: discovered ${event.totalSourceFiles} source files`);
2536
+ break;
2537
+ case "program_created":
2538
+ logger.debug(`structural: TypeScript program created (${event.totalSourceFiles} files)`);
2539
+ break;
2540
+ case "file_processed":
2541
+ if (event.processed === event.total || event.processed === 1 || event.processed - lastProcessed >= 50) {
2542
+ lastProcessed = event.processed;
2543
+ logger.info(`structural: resolved ${event.processed}/${event.total} files`);
2544
+ logger.debug(`structural: last file processed ${event.filePath}`);
2545
+ }
2546
+ break;
2547
+ case "edges_resolved":
2548
+ logger.info(`structural: resolved ${event.totalEdges} dependency edges`);
2549
+ break;
2550
+ }
2551
+ };
2552
+ };
2553
+ var createEvolutionProgressReporter = (logger) => {
2554
+ let lastParsedRecords = 0;
2555
+ return (event) => {
2556
+ switch (event.stage) {
2557
+ case "checking_git_repository":
2558
+ logger.debug("evolution: checking git repository");
2559
+ break;
2560
+ case "not_git_repository":
2561
+ logger.warn("evolution: target path is not a git repository");
2562
+ break;
2563
+ case "loading_commit_history":
2564
+ logger.info("evolution: loading git history");
2565
+ break;
2566
+ case "history":
2567
+ if (event.event.stage === "git_log_received") {
2568
+ logger.info(`evolution: git log loaded (${event.event.bytes} bytes)`);
2569
+ break;
2570
+ }
2571
+ if (event.event.stage === "git_log_parsed") {
2572
+ logger.info(`evolution: parsed ${event.event.commits} commits`);
2573
+ break;
2574
+ }
2575
+ if (event.event.stage === "git_log_parse_progress" && (event.event.parsedRecords === event.event.totalRecords || event.event.parsedRecords === 1 || event.event.parsedRecords - lastParsedRecords >= 500)) {
2576
+ lastParsedRecords = event.event.parsedRecords;
2577
+ const currentPercent = event.event.totalRecords === 0 ? 100 : Math.floor(event.event.parsedRecords / event.event.totalRecords * 100);
2578
+ logger.info(
2579
+ `evolution: parse progress ${event.event.parsedRecords}/${event.event.totalRecords} (${currentPercent}%)`
2580
+ );
2581
+ }
2582
+ break;
2583
+ case "computing_metrics":
2584
+ logger.info("evolution: computing metrics");
2585
+ break;
2586
+ case "analysis_completed":
2587
+ logger.debug(`evolution: analysis completed (available=${event.available})`);
2588
+ break;
2589
+ }
2590
+ };
2591
+ };
2592
+ var runAnalyzeCommand = async (inputPath, authorIdentityMode, logger = createSilentLogger()) => {
1605
2593
  const invocationCwd = process.env["INIT_CWD"] ?? process.cwd();
1606
2594
  const targetPath = resolveTargetPath(inputPath, invocationCwd);
1607
- const structural = buildProjectGraphSummary({ projectPath: targetPath });
2595
+ logger.info(`analyzing repository: ${targetPath}`);
2596
+ logger.info("building structural graph");
2597
+ const structural = buildProjectGraphSummary({
2598
+ projectPath: targetPath,
2599
+ onProgress: createStructuralProgressReporter(logger)
2600
+ });
2601
+ logger.debug(
2602
+ `structural metrics: nodes=${structural.metrics.nodeCount}, edges=${structural.metrics.edgeCount}, cycles=${structural.metrics.cycleCount}`
2603
+ );
2604
+ logger.info(`analyzing git evolution (author identity: ${authorIdentityMode})`);
1608
2605
  const evolution = analyzeRepositoryEvolutionFromGit({
1609
2606
  repositoryPath: targetPath,
1610
2607
  config: { authorIdentityMode }
2608
+ }, createEvolutionProgressReporter(logger));
2609
+ if (evolution.available) {
2610
+ logger.debug(
2611
+ `evolution metrics: commits=${evolution.metrics.totalCommits}, files=${evolution.metrics.totalFiles}, hotspotThreshold=${evolution.metrics.hotspotThresholdCommitCount}`
2612
+ );
2613
+ } else {
2614
+ logger.warn(`evolution analysis unavailable: ${evolution.reason}`);
2615
+ }
2616
+ logger.info("analyzing external dependencies");
2617
+ const external = await analyzeDependencyExposureFromProject(
2618
+ { repositoryPath: targetPath },
2619
+ createExternalProgressReporter(logger)
2620
+ );
2621
+ if (external.available) {
2622
+ logger.debug(
2623
+ `external metrics: total=${external.metrics.totalDependencies}, direct=${external.metrics.directDependencies}, transitive=${external.metrics.transitiveDependencies}`
2624
+ );
2625
+ } else {
2626
+ logger.warn(`external analysis unavailable: ${external.reason}`);
2627
+ }
2628
+ logger.info("computing risk summary");
2629
+ const risk = computeRepositoryRiskSummary({
2630
+ structural,
2631
+ evolution,
2632
+ external
1611
2633
  });
1612
- const external = await analyzeDependencyExposureFromProject({ repositoryPath: targetPath });
2634
+ logger.info(`analysis completed (repositoryScore=${risk.repositoryScore})`);
1613
2635
  const summary = {
1614
2636
  structural,
1615
2637
  evolution,
1616
- external
2638
+ external,
2639
+ risk
1617
2640
  };
1618
- return JSON.stringify(summary, null, 2);
2641
+ return summary;
1619
2642
  };
1620
2643
 
1621
2644
  // src/index.ts
@@ -1628,14 +2651,35 @@ program.command("analyze").argument("[path]", "path to the project to analyze").
1628
2651
  "--author-identity <mode>",
1629
2652
  "author identity mode: likely_merge (heuristic) or strict_email (deterministic)"
1630
2653
  ).choices(["likely_merge", "strict_email"]).default("likely_merge")
1631
- ).action(async (path, options) => {
1632
- const output = await runAnalyzeCommand(path, options.authorIdentity);
1633
- process.stdout.write(`${output}
2654
+ ).addOption(
2655
+ new Option(
2656
+ "--log-level <level>",
2657
+ "log verbosity: silent, error, warn, info, debug (logs are written to stderr)"
2658
+ ).choices(["silent", "error", "warn", "info", "debug"]).default(parseLogLevel(process.env["CODESENTINEL_LOG_LEVEL"]))
2659
+ ).addOption(
2660
+ new Option(
2661
+ "--output <mode>",
2662
+ "output mode: summary (default) or json (full analysis object)"
2663
+ ).choices(["summary", "json"]).default("summary")
2664
+ ).option("--json", "shortcut for --output json").action(
2665
+ async (path, options) => {
2666
+ const logger = createStderrLogger(options.logLevel);
2667
+ const summary = await runAnalyzeCommand(path, options.authorIdentity, logger);
2668
+ const outputMode = options.json === true ? "json" : options.output;
2669
+ process.stdout.write(`${formatAnalyzeOutput(summary, outputMode)}
1634
2670
  `);
1635
- });
2671
+ }
2672
+ );
1636
2673
  if (process.argv.length <= 2) {
1637
2674
  program.outputHelp();
1638
2675
  process.exit(0);
1639
2676
  }
1640
- await program.parseAsync(process.argv);
2677
+ var executablePath = process.argv[0] ?? "";
2678
+ var scriptPath = process.argv[1] ?? "";
2679
+ var argv = process.argv[2] === "--" ? [executablePath, scriptPath, ...process.argv.slice(3)] : process.argv;
2680
+ if (argv.length <= 2) {
2681
+ program.outputHelp();
2682
+ process.exit(0);
2683
+ }
2684
+ await program.parseAsync(argv);
1641
2685
  //# sourceMappingURL=index.js.map