@codesentinel/codesentinel 1.2.0 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -6,6 +6,104 @@ import { readFileSync as readFileSync2 } from "fs";
6
6
  import { dirname, resolve as resolve3 } from "path";
7
7
  import { fileURLToPath } from "url";
8
8
 
9
+ // src/application/format-analyze-output.ts
10
+ var createSummaryShape = (summary) => ({
11
+ targetPath: summary.structural.targetPath,
12
+ structural: summary.structural.metrics,
13
+ evolution: summary.evolution.available ? {
14
+ available: true,
15
+ metrics: summary.evolution.metrics,
16
+ hotspotsTop: summary.evolution.hotspots.slice(0, 5).map((hotspot) => hotspot.filePath)
17
+ } : {
18
+ available: false,
19
+ reason: summary.evolution.reason
20
+ },
21
+ external: summary.external.available ? {
22
+ available: true,
23
+ metrics: summary.external.metrics,
24
+ highRiskDependenciesTop: summary.external.highRiskDependencies.slice(0, 10)
25
+ } : {
26
+ available: false,
27
+ reason: summary.external.reason
28
+ },
29
+ risk: {
30
+ repositoryScore: summary.risk.repositoryScore,
31
+ normalizedScore: summary.risk.normalizedScore,
32
+ hotspotsTop: summary.risk.hotspots.slice(0, 5).map((hotspot) => ({
33
+ file: hotspot.file,
34
+ score: hotspot.score
35
+ })),
36
+ fragileClusterCount: summary.risk.fragileClusters.length,
37
+ dependencyAmplificationZoneCount: summary.risk.dependencyAmplificationZones.length
38
+ }
39
+ });
40
+ var formatAnalyzeOutput = (summary, mode) => mode === "json" ? JSON.stringify(summary, null, 2) : JSON.stringify(createSummaryShape(summary), null, 2);
41
+
42
+ // src/application/logger.ts
43
+ var logLevelRank = {
44
+ error: 0,
45
+ warn: 1,
46
+ info: 2,
47
+ debug: 3
48
+ };
49
+ var noop = () => {
50
+ };
51
+ var createSilentLogger = () => ({
52
+ error: noop,
53
+ warn: noop,
54
+ info: noop,
55
+ debug: noop
56
+ });
57
+ var shouldLog = (configuredLevel, messageLevel) => {
58
+ if (configuredLevel === "silent") {
59
+ return false;
60
+ }
61
+ return logLevelRank[messageLevel] <= logLevelRank[configuredLevel];
62
+ };
63
+ var write = (messageLevel, message) => {
64
+ process.stderr.write(`[codesentinel] ${messageLevel.toUpperCase()} ${message}
65
+ `);
66
+ };
67
+ var createStderrLogger = (level) => {
68
+ if (level === "silent") {
69
+ return createSilentLogger();
70
+ }
71
+ return {
72
+ error: (message) => {
73
+ if (shouldLog(level, "error")) {
74
+ write("error", message);
75
+ }
76
+ },
77
+ warn: (message) => {
78
+ if (shouldLog(level, "warn")) {
79
+ write("warn", message);
80
+ }
81
+ },
82
+ info: (message) => {
83
+ if (shouldLog(level, "info")) {
84
+ write("info", message);
85
+ }
86
+ },
87
+ debug: (message) => {
88
+ if (shouldLog(level, "debug")) {
89
+ write("debug", message);
90
+ }
91
+ }
92
+ };
93
+ };
94
+ var parseLogLevel = (value) => {
95
+ switch (value) {
96
+ case "silent":
97
+ case "error":
98
+ case "warn":
99
+ case "info":
100
+ case "debug":
101
+ return value;
102
+ default:
103
+ return "info";
104
+ }
105
+ };
106
+
9
107
  // src/application/run-analyze-command.ts
10
108
  import { resolve as resolve2 } from "path";
11
109
 
@@ -385,16 +483,18 @@ var extractModuleSpecifiers = (sourceFile) => {
385
483
  visit(sourceFile);
386
484
  return [...specifiers];
387
485
  };
388
- var parseTypescriptProject = (projectPath) => {
486
+ var parseTypescriptProject = (projectPath, onProgress) => {
389
487
  const projectRoot = isAbsolute(projectPath) ? projectPath : resolve(projectPath);
390
488
  const { fileNames, options } = parseTsConfig(projectRoot);
391
489
  const sourceFilePaths = fileNames.filter((filePath) => isProjectSourceFile(filePath, projectRoot)).map((filePath) => normalizePath(resolve(filePath)));
392
490
  const uniqueSourceFilePaths = [...new Set(sourceFilePaths)].sort((a, b) => a.localeCompare(b));
393
491
  const sourceFilePathSet = new Set(uniqueSourceFilePaths);
492
+ onProgress?.({ stage: "files_discovered", totalSourceFiles: uniqueSourceFilePaths.length });
394
493
  const program2 = ts.createProgram({
395
494
  rootNames: uniqueSourceFilePaths,
396
495
  options
397
496
  });
497
+ onProgress?.({ stage: "program_created", totalSourceFiles: uniqueSourceFilePaths.length });
398
498
  const nodeByAbsolutePath = /* @__PURE__ */ new Map();
399
499
  for (const sourcePath of uniqueSourceFilePaths) {
400
500
  const relativePath = normalizePath(relative(projectRoot, sourcePath));
@@ -407,7 +507,7 @@ var parseTypescriptProject = (projectPath) => {
407
507
  }
408
508
  const resolverCache = /* @__PURE__ */ new Map();
409
509
  const edges = [];
410
- for (const sourcePath of uniqueSourceFilePaths) {
510
+ for (const [index, sourcePath] of uniqueSourceFilePaths.entries()) {
411
511
  const sourceFile = program2.getSourceFile(sourcePath);
412
512
  if (sourceFile === void 0) {
413
513
  continue;
@@ -436,14 +536,24 @@ var parseTypescriptProject = (projectPath) => {
436
536
  }
437
537
  edges.push({ from: fromNode.id, to: toNode.id });
438
538
  }
539
+ const processed = index + 1;
540
+ if (processed === 1 || processed === uniqueSourceFilePaths.length || processed % 50 === 0) {
541
+ onProgress?.({
542
+ stage: "file_processed",
543
+ processed,
544
+ total: uniqueSourceFilePaths.length,
545
+ filePath: fromNode.id
546
+ });
547
+ }
439
548
  }
549
+ onProgress?.({ stage: "edges_resolved", totalEdges: edges.length });
440
550
  return {
441
551
  nodes: [...nodeByAbsolutePath.values()],
442
552
  edges
443
553
  };
444
554
  };
445
555
  var buildProjectGraphSummary = (input) => {
446
- const parsedProject = parseTypescriptProject(input.projectPath);
556
+ const parsedProject = parseTypescriptProject(input.projectPath, input.onProgress);
447
557
  const graphData = createGraphData(parsedProject.nodes, parsedProject.edges);
448
558
  return createGraphAnalysisSummary(input.projectPath, graphData);
449
559
  };
@@ -1007,7 +1117,7 @@ var mapWithConcurrency = async (values, limit, handler) => {
1007
1117
  await Promise.all(workers);
1008
1118
  return results;
1009
1119
  };
1010
- var analyzeDependencyExposure = async (input, metadataProvider) => {
1120
+ var analyzeDependencyExposure = async (input, metadataProvider, onProgress) => {
1011
1121
  const packageJson = loadPackageJson(input.repositoryPath);
1012
1122
  if (packageJson === null) {
1013
1123
  return {
@@ -1016,6 +1126,7 @@ var analyzeDependencyExposure = async (input, metadataProvider) => {
1016
1126
  reason: "package_json_not_found"
1017
1127
  };
1018
1128
  }
1129
+ onProgress?.({ stage: "package_json_loaded" });
1019
1130
  const lockfile = selectLockfile(input.repositoryPath);
1020
1131
  if (lockfile === null) {
1021
1132
  return {
@@ -1024,23 +1135,50 @@ var analyzeDependencyExposure = async (input, metadataProvider) => {
1024
1135
  reason: "lockfile_not_found"
1025
1136
  };
1026
1137
  }
1138
+ onProgress?.({ stage: "lockfile_selected", kind: lockfile.kind });
1027
1139
  try {
1028
1140
  const directSpecs = parsePackageJson(packageJson.raw);
1029
1141
  const extraction = parseExtraction(lockfile.kind, lockfile.raw, directSpecs);
1030
1142
  const config = withDefaults(input.config);
1143
+ onProgress?.({
1144
+ stage: "lockfile_parsed",
1145
+ dependencyNodes: extraction.nodes.length,
1146
+ directDependencies: extraction.directDependencies.length
1147
+ });
1148
+ onProgress?.({ stage: "metadata_fetch_started", total: extraction.nodes.length });
1149
+ let completed = 0;
1031
1150
  const metadataEntries = await mapWithConcurrency(
1032
1151
  extraction.nodes,
1033
1152
  config.metadataRequestConcurrency,
1034
- async (node) => ({
1035
- key: `${node.name}@${node.version}`,
1036
- metadata: await metadataProvider.getMetadata(node.name, node.version)
1037
- })
1153
+ async (node) => {
1154
+ const result = {
1155
+ key: `${node.name}@${node.version}`,
1156
+ metadata: await metadataProvider.getMetadata(node.name, node.version)
1157
+ };
1158
+ completed += 1;
1159
+ onProgress?.({
1160
+ stage: "metadata_fetch_progress",
1161
+ completed,
1162
+ total: extraction.nodes.length,
1163
+ packageName: node.name
1164
+ });
1165
+ return result;
1166
+ }
1038
1167
  );
1168
+ onProgress?.({ stage: "metadata_fetch_completed", total: extraction.nodes.length });
1039
1169
  const metadataByKey = /* @__PURE__ */ new Map();
1040
1170
  for (const entry of metadataEntries) {
1041
1171
  metadataByKey.set(entry.key, entry.metadata);
1042
1172
  }
1043
- return buildExternalAnalysisSummary(input.repositoryPath, extraction, metadataByKey, config);
1173
+ const summary = buildExternalAnalysisSummary(input.repositoryPath, extraction, metadataByKey, config);
1174
+ if (summary.available) {
1175
+ onProgress?.({
1176
+ stage: "summary_built",
1177
+ totalDependencies: summary.metrics.totalDependencies,
1178
+ directDependencies: summary.metrics.directDependencies
1179
+ });
1180
+ }
1181
+ return summary;
1044
1182
  } catch (error) {
1045
1183
  const message = error instanceof Error ? error.message : "unknown";
1046
1184
  if (message.includes("unsupported_lockfile_format")) {
@@ -1123,9 +1261,9 @@ var NoopMetadataProvider = class {
1123
1261
  return null;
1124
1262
  }
1125
1263
  };
1126
- var analyzeDependencyExposureFromProject = async (input) => {
1264
+ var analyzeDependencyExposureFromProject = async (input, onProgress) => {
1127
1265
  const metadataProvider = process.env["CODESENTINEL_EXTERNAL_METADATA"] === "none" ? new NoopMetadataProvider() : new NpmRegistryMetadataProvider();
1128
- return analyzeDependencyExposure(input, metadataProvider);
1266
+ return analyzeDependencyExposure(input, metadataProvider, onProgress);
1129
1267
  };
1130
1268
 
1131
1269
  // ../git-analyzer/dist/index.js
@@ -1422,17 +1560,26 @@ var createEffectiveConfig = (overrides) => ({
1422
1560
  ...DEFAULT_EVOLUTION_CONFIG,
1423
1561
  ...overrides
1424
1562
  });
1425
- var analyzeRepositoryEvolution = (input, historyProvider) => {
1563
+ var analyzeRepositoryEvolution = (input, historyProvider, onProgress) => {
1564
+ onProgress?.({ stage: "checking_git_repository" });
1426
1565
  if (!historyProvider.isGitRepository(input.repositoryPath)) {
1566
+ onProgress?.({ stage: "not_git_repository" });
1427
1567
  return {
1428
1568
  targetPath: input.repositoryPath,
1429
1569
  available: false,
1430
1570
  reason: "not_git_repository"
1431
1571
  };
1432
1572
  }
1433
- const commits = historyProvider.getCommitHistory(input.repositoryPath);
1573
+ onProgress?.({ stage: "loading_commit_history" });
1574
+ const commits = historyProvider.getCommitHistory(
1575
+ input.repositoryPath,
1576
+ (event) => onProgress?.({ stage: "history", event })
1577
+ );
1434
1578
  const config = createEffectiveConfig(input.config);
1435
- return computeRepositoryEvolutionSummary(input.repositoryPath, commits, config);
1579
+ onProgress?.({ stage: "computing_metrics" });
1580
+ const summary = computeRepositoryEvolutionSummary(input.repositoryPath, commits, config);
1581
+ onProgress?.({ stage: "analysis_completed", available: summary.available });
1582
+ return summary;
1436
1583
  };
1437
1584
  var GitCommandError = class extends Error {
1438
1585
  args;
@@ -1459,6 +1606,11 @@ var ExecGitCommandClient = class {
1459
1606
  var COMMIT_RECORD_SEPARATOR = "";
1460
1607
  var COMMIT_FIELD_SEPARATOR = "";
1461
1608
  var GIT_LOG_FORMAT = `%x1e%H%x1f%at%x1f%an%x1f%ae`;
1609
+ var mapParseProgressToHistoryProgress = (event) => ({
1610
+ stage: "git_log_parse_progress",
1611
+ parsedRecords: event.parsedRecords,
1612
+ totalRecords: event.totalRecords
1613
+ });
1462
1614
  var parseInteger = (value) => {
1463
1615
  if (value.length === 0) {
1464
1616
  return null;
@@ -1521,10 +1673,10 @@ var parseNumstatLine = (line) => {
1521
1673
  deletions
1522
1674
  };
1523
1675
  };
1524
- var parseGitLog = (rawLog) => {
1676
+ var parseGitLog = (rawLog, onProgress) => {
1525
1677
  const records = rawLog.split(COMMIT_RECORD_SEPARATOR).map((record) => record.trim()).filter((record) => record.length > 0);
1526
1678
  const commits = [];
1527
- for (const record of records) {
1679
+ for (const [index, record] of records.entries()) {
1528
1680
  const lines = record.split("\n").map((line) => line.trimEnd()).filter((line) => line.length > 0);
1529
1681
  if (lines.length === 0) {
1530
1682
  continue;
@@ -1555,6 +1707,10 @@ var parseGitLog = (rawLog) => {
1555
1707
  authoredAtUnix,
1556
1708
  fileChanges
1557
1709
  });
1710
+ const parsedRecords = index + 1;
1711
+ if (parsedRecords === 1 || parsedRecords === records.length || parsedRecords % 500 === 0) {
1712
+ onProgress?.({ parsedRecords, totalRecords: records.length });
1713
+ }
1558
1714
  }
1559
1715
  commits.sort((a, b) => a.authoredAtUnix - b.authoredAtUnix || a.hash.localeCompare(b.hash));
1560
1716
  return commits;
@@ -1579,7 +1735,7 @@ var GitCliHistoryProvider = class {
1579
1735
  throw error;
1580
1736
  }
1581
1737
  }
1582
- getCommitHistory(repositoryPath) {
1738
+ getCommitHistory(repositoryPath, onProgress) {
1583
1739
  const output = this.gitClient.run(repositoryPath, [
1584
1740
  "-c",
1585
1741
  "core.quotepath=false",
@@ -1591,31 +1747,810 @@ var GitCliHistoryProvider = class {
1591
1747
  "--numstat",
1592
1748
  "--find-renames"
1593
1749
  ]);
1594
- return parseGitLog(output);
1750
+ onProgress?.({ stage: "git_log_received", bytes: Buffer.byteLength(output, "utf8") });
1751
+ const commits = parseGitLog(output, (event) => onProgress?.(mapParseProgressToHistoryProgress(event)));
1752
+ onProgress?.({ stage: "git_log_parsed", commits: commits.length });
1753
+ return commits;
1595
1754
  }
1596
1755
  };
1597
- var analyzeRepositoryEvolutionFromGit = (input) => {
1756
+ var analyzeRepositoryEvolutionFromGit = (input, onProgress) => {
1598
1757
  const historyProvider = new GitCliHistoryProvider(new ExecGitCommandClient());
1599
- return analyzeRepositoryEvolution(input, historyProvider);
1758
+ return analyzeRepositoryEvolution(input, historyProvider, onProgress);
1759
+ };
1760
+
1761
+ // ../risk-engine/dist/index.js
1762
+ var DEFAULT_RISK_ENGINE_CONFIG = {
1763
+ // Base dimensional influence. Risk is never dominated by a single dimension by default.
1764
+ dimensionWeights: {
1765
+ structural: 0.44,
1766
+ evolution: 0.36,
1767
+ external: 0.2
1768
+ },
1769
+ // Interaction terms activate only when both related dimensions are high.
1770
+ interactionWeights: {
1771
+ structuralEvolution: 0.35,
1772
+ centralInstability: 0.25,
1773
+ dependencyAmplification: 0.2
1774
+ },
1775
+ structuralFactorWeights: {
1776
+ fanIn: 0.3,
1777
+ fanOut: 0.25,
1778
+ depth: 0.2,
1779
+ cycleParticipation: 0.25
1780
+ },
1781
+ evolutionFactorWeights: {
1782
+ frequency: 0.26,
1783
+ churn: 0.24,
1784
+ recentVolatility: 0.2,
1785
+ ownershipConcentration: 0.18,
1786
+ busFactorRisk: 0.12
1787
+ },
1788
+ dependencyFactorWeights: {
1789
+ signals: 0.38,
1790
+ staleness: 0.16,
1791
+ maintainerConcentration: 0.16,
1792
+ transitiveBurden: 0.1,
1793
+ centrality: 0.08,
1794
+ chainDepth: 0.06,
1795
+ busFactorRisk: 0.06
1796
+ },
1797
+ quantileClamp: {
1798
+ lower: 0.05,
1799
+ upper: 0.95
1800
+ },
1801
+ hotspotTopPercent: 0.12,
1802
+ hotspotMinFiles: 3,
1803
+ hotspotMaxFiles: 30,
1804
+ couplingCluster: {
1805
+ minCoChangeCommits: 2,
1806
+ percentileThreshold: 0.9,
1807
+ floorScore: 0.35
1808
+ },
1809
+ amplificationZone: {
1810
+ pressureFloor: 0.2,
1811
+ percentileThreshold: 0.85,
1812
+ maxZones: 20
1813
+ },
1814
+ module: {
1815
+ maxPrefixSegments: 2,
1816
+ rootLabel: "(root)",
1817
+ commonSourceRoots: ["src", "lib", "app", "packages"]
1818
+ },
1819
+ dependencySignals: {
1820
+ inheritedSignalMultiplier: 0.45,
1821
+ // At this age, staleness reaches 50% risk.
1822
+ abandonedHalfLifeDays: 540,
1823
+ missingMetadataPenalty: 0.5
1824
+ },
1825
+ externalDimension: {
1826
+ topDependencyPercentile: 0.85,
1827
+ dependencyDepthHalfLife: 6
1828
+ }
1829
+ };
1830
+ var clamp01 = (value) => {
1831
+ if (Number.isNaN(value)) {
1832
+ return 0;
1833
+ }
1834
+ if (value <= 0) {
1835
+ return 0;
1836
+ }
1837
+ if (value >= 1) {
1838
+ return 1;
1839
+ }
1840
+ return value;
1841
+ };
1842
+ var round44 = (value) => Number(value.toFixed(4));
1843
+ var average = (values) => {
1844
+ if (values.length === 0) {
1845
+ return 0;
1846
+ }
1847
+ const total = values.reduce((sum, current) => sum + current, 0);
1848
+ return total / values.length;
1849
+ };
1850
+ var percentile = (values, p) => {
1851
+ if (values.length === 0) {
1852
+ return 0;
1853
+ }
1854
+ if (values.length === 1) {
1855
+ return values[0] ?? 0;
1856
+ }
1857
+ const sorted = [...values].sort((a, b) => a - b);
1858
+ const position = clamp01(p) * (sorted.length - 1);
1859
+ const lowerIndex = Math.floor(position);
1860
+ const upperIndex = Math.ceil(position);
1861
+ const lower = sorted[lowerIndex] ?? 0;
1862
+ const upper = sorted[upperIndex] ?? lower;
1863
+ if (lowerIndex === upperIndex) {
1864
+ return lower;
1865
+ }
1866
+ const ratio = position - lowerIndex;
1867
+ return lower + (upper - lower) * ratio;
1868
+ };
1869
+ var saturatingComposite = (baseline, amplifications) => {
1870
+ let value = clamp01(baseline);
1871
+ for (const amplification of amplifications) {
1872
+ const boundedAmplification = clamp01(amplification);
1873
+ value += (1 - value) * boundedAmplification;
1874
+ }
1875
+ return clamp01(value);
1876
+ };
1877
+ var halfLifeRisk = (value, halfLife) => {
1878
+ if (value <= 0 || halfLife <= 0) {
1879
+ return 0;
1880
+ }
1881
+ return clamp01(value / (value + halfLife));
1882
+ };
1883
+ var normalizeWeights = (weights, enabled) => {
1884
+ let total = 0;
1885
+ const result = { ...weights };
1886
+ for (const key of Object.keys(result)) {
1887
+ const enabledValue = enabled[key];
1888
+ if (!enabledValue) {
1889
+ result[key] = 0;
1890
+ continue;
1891
+ }
1892
+ const value = Math.max(0, result[key]);
1893
+ result[key] = value;
1894
+ total += value;
1895
+ }
1896
+ if (total === 0) {
1897
+ const activeKeys = Object.keys(result).filter((key) => enabled[key]);
1898
+ if (activeKeys.length === 0) {
1899
+ return result;
1900
+ }
1901
+ const uniform = 1 / activeKeys.length;
1902
+ for (const key of activeKeys) {
1903
+ result[key] = uniform;
1904
+ }
1905
+ return result;
1906
+ }
1907
+ for (const key of Object.keys(result)) {
1908
+ if (enabled[key]) {
1909
+ result[key] = result[key] / total;
1910
+ }
1911
+ }
1912
+ return result;
1913
+ };
1914
+ var logScale = (value) => Math.log1p(Math.max(0, value));
1915
+ var buildQuantileScale = (values, lowerPercentile, upperPercentile) => {
1916
+ if (values.length === 0) {
1917
+ return { lower: 0, upper: 0 };
1918
+ }
1919
+ return {
1920
+ lower: percentile(values, lowerPercentile),
1921
+ upper: percentile(values, upperPercentile)
1922
+ };
1923
+ };
1924
+ var normalizeWithScale = (value, scale) => {
1925
+ if (scale.upper <= scale.lower) {
1926
+ return value > 0 ? 1 : 0;
1927
+ }
1928
+ return clamp01((value - scale.lower) / (scale.upper - scale.lower));
1929
+ };
1930
+ var normalizePath2 = (path) => path.replaceAll("\\", "/");
1931
+ var dependencySignalWeights = {
1932
+ single_maintainer: 0.3,
1933
+ abandoned: 0.3,
1934
+ high_centrality: 0.16,
1935
+ deep_chain: 0.14,
1936
+ high_fanout: 0.06,
1937
+ metadata_unavailable: 0.04
1938
+ };
1939
+ var dependencySignalWeightBudget = Object.values(dependencySignalWeights).reduce(
1940
+ (sum, value) => sum + value,
1941
+ 0
1942
+ );
1943
+ var computeDependencySignalScore = (ownSignals, inheritedSignals, inheritedSignalMultiplier) => {
1944
+ const ownWeight = ownSignals.reduce((sum, signal) => sum + (dependencySignalWeights[signal] ?? 0), 0);
1945
+ const inheritedWeight = inheritedSignals.reduce(
1946
+ (sum, signal) => sum + (dependencySignalWeights[signal] ?? 0),
1947
+ 0
1948
+ );
1949
+ const weightedTotal = ownWeight + inheritedWeight * inheritedSignalMultiplier;
1950
+ const maxWeightedTotal = dependencySignalWeightBudget * (1 + inheritedSignalMultiplier);
1951
+ if (maxWeightedTotal <= 0) {
1952
+ return 0;
1953
+ }
1954
+ return clamp01(weightedTotal / maxWeightedTotal);
1955
+ };
1956
+ var computeDependencyScores = (external, config) => {
1957
+ if (!external.available) {
1958
+ return {
1959
+ dependencyScores: [],
1960
+ repositoryExternalPressure: 0
1961
+ };
1962
+ }
1963
+ const transitiveCounts = external.dependencies.map(
1964
+ (dependency) => logScale(dependency.transitiveDependencies.length)
1965
+ );
1966
+ const dependentCounts = external.dependencies.map((dependency) => logScale(dependency.dependents));
1967
+ const chainDepths = external.dependencies.map((dependency) => dependency.dependencyDepth);
1968
+ const transitiveScale = buildQuantileScale(
1969
+ transitiveCounts,
1970
+ config.quantileClamp.lower,
1971
+ config.quantileClamp.upper
1972
+ );
1973
+ const dependentScale = buildQuantileScale(
1974
+ dependentCounts,
1975
+ config.quantileClamp.lower,
1976
+ config.quantileClamp.upper
1977
+ );
1978
+ const chainDepthScale = buildQuantileScale(
1979
+ chainDepths,
1980
+ config.quantileClamp.lower,
1981
+ config.quantileClamp.upper
1982
+ );
1983
+ const dependencyScores = external.dependencies.map((dependency) => {
1984
+ const signalScore = computeDependencySignalScore(
1985
+ dependency.ownRiskSignals,
1986
+ dependency.inheritedRiskSignals,
1987
+ config.dependencySignals.inheritedSignalMultiplier
1988
+ );
1989
+ const maintainerConcentrationRisk = dependency.maintainerCount === null ? config.dependencySignals.missingMetadataPenalty : clamp01(1 / Math.max(1, dependency.maintainerCount));
1990
+ const stalenessRisk = dependency.daysSinceLastRelease === null ? config.dependencySignals.missingMetadataPenalty : halfLifeRisk(
1991
+ dependency.daysSinceLastRelease,
1992
+ config.dependencySignals.abandonedHalfLifeDays
1993
+ );
1994
+ const transitiveBurdenRisk = normalizeWithScale(
1995
+ logScale(dependency.transitiveDependencies.length),
1996
+ transitiveScale
1997
+ );
1998
+ const centralityRisk = normalizeWithScale(logScale(dependency.dependents), dependentScale);
1999
+ const chainDepthRisk = normalizeWithScale(dependency.dependencyDepth, chainDepthScale);
2000
+ const busFactorRisk = dependency.busFactor === null ? config.dependencySignals.missingMetadataPenalty : clamp01(1 / Math.max(1, dependency.busFactor));
2001
+ const weights = config.dependencyFactorWeights;
2002
+ const normalizedScore = clamp01(
2003
+ signalScore * weights.signals + stalenessRisk * weights.staleness + maintainerConcentrationRisk * weights.maintainerConcentration + transitiveBurdenRisk * weights.transitiveBurden + centralityRisk * weights.centrality + chainDepthRisk * weights.chainDepth + busFactorRisk * weights.busFactorRisk
2004
+ );
2005
+ return {
2006
+ dependency: dependency.name,
2007
+ score: round44(normalizedScore * 100),
2008
+ normalizedScore: round44(normalizedScore),
2009
+ ownRiskSignals: dependency.ownRiskSignals,
2010
+ inheritedRiskSignals: dependency.inheritedRiskSignals
2011
+ };
2012
+ }).sort(
2013
+ (a, b) => b.normalizedScore - a.normalizedScore || a.dependency.localeCompare(b.dependency)
2014
+ );
2015
+ const normalizedValues = dependencyScores.map((score) => score.normalizedScore);
2016
+ const highDependencyRisk = dependencyScores.length === 0 ? 0 : percentile(normalizedValues, config.externalDimension.topDependencyPercentile);
2017
+ const averageDependencyRisk = average(normalizedValues);
2018
+ const depthRisk = halfLifeRisk(
2019
+ external.metrics.dependencyDepth,
2020
+ config.externalDimension.dependencyDepthHalfLife
2021
+ );
2022
+ const repositoryExternalPressure = clamp01(
2023
+ highDependencyRisk * 0.5 + averageDependencyRisk * 0.3 + depthRisk * 0.2
2024
+ );
2025
+ return {
2026
+ dependencyScores,
2027
+ repositoryExternalPressure: round44(repositoryExternalPressure)
2028
+ };
2029
+ };
2030
+ var mapEvolutionByFile = (evolution) => {
2031
+ if (!evolution.available) {
2032
+ return /* @__PURE__ */ new Map();
2033
+ }
2034
+ return new Map(
2035
+ evolution.files.map((fileMetrics) => [normalizePath2(fileMetrics.filePath), fileMetrics])
2036
+ );
2037
+ };
2038
+ var computeEvolutionScales = (evolutionByFile, config) => {
2039
+ const evolutionFiles = [...evolutionByFile.values()];
2040
+ return {
2041
+ commitCount: buildQuantileScale(
2042
+ evolutionFiles.map((metrics) => logScale(metrics.commitCount)),
2043
+ config.quantileClamp.lower,
2044
+ config.quantileClamp.upper
2045
+ ),
2046
+ churnTotal: buildQuantileScale(
2047
+ evolutionFiles.map((metrics) => logScale(metrics.churnTotal)),
2048
+ config.quantileClamp.lower,
2049
+ config.quantileClamp.upper
2050
+ ),
2051
+ busFactor: buildQuantileScale(
2052
+ evolutionFiles.map((metrics) => metrics.busFactor),
2053
+ config.quantileClamp.lower,
2054
+ config.quantileClamp.upper
2055
+ )
2056
+ };
2057
+ };
2058
+ var inferModuleName = (filePath, config) => {
2059
+ const normalized = normalizePath2(filePath);
2060
+ const parts = normalized.split("/").filter((part) => part.length > 0);
2061
+ if (parts.length <= 1) {
2062
+ return config.module.rootLabel;
2063
+ }
2064
+ const first = parts[0];
2065
+ if (first === void 0) {
2066
+ return config.module.rootLabel;
2067
+ }
2068
+ if (!config.module.commonSourceRoots.includes(first)) {
2069
+ return first;
2070
+ }
2071
+ if (parts.length <= config.module.maxPrefixSegments) {
2072
+ return first;
2073
+ }
2074
+ return parts.slice(0, config.module.maxPrefixSegments).join("/");
2075
+ };
2076
+ var buildFragileClusters = (structural, evolution, fileScoresByFile, config) => {
2077
+ const clusters = [];
2078
+ let cycleClusterCount = 0;
2079
+ for (const cycle of structural.cycles) {
2080
+ const files = [...new Set(cycle.nodes.map((node) => normalizePath2(node)))].filter(
2081
+ (filePath) => fileScoresByFile.has(filePath)
2082
+ );
2083
+ if (files.length < 2) {
2084
+ continue;
2085
+ }
2086
+ files.sort((a, b) => a.localeCompare(b));
2087
+ const averageRisk = average(
2088
+ files.map((filePath) => fileScoresByFile.get(filePath)?.normalizedScore ?? 0)
2089
+ );
2090
+ const cycleSizeRisk = clamp01((files.length - 1) / 5);
2091
+ const score = round44(clamp01(averageRisk * 0.75 + cycleSizeRisk * 0.25) * 100);
2092
+ cycleClusterCount += 1;
2093
+ clusters.push({
2094
+ id: `cycle:${cycleClusterCount}`,
2095
+ kind: "structural_cycle",
2096
+ files,
2097
+ score
2098
+ });
2099
+ }
2100
+ if (evolution.available && evolution.coupling.pairs.length > 0) {
2101
+ const candidates = evolution.coupling.pairs.filter(
2102
+ (pair) => pair.coChangeCommits >= config.couplingCluster.minCoChangeCommits
2103
+ );
2104
+ const threshold = Math.max(
2105
+ config.couplingCluster.floorScore,
2106
+ percentile(
2107
+ candidates.map((pair) => pair.couplingScore),
2108
+ config.couplingCluster.percentileThreshold
2109
+ )
2110
+ );
2111
+ const selectedPairs = candidates.filter((pair) => pair.couplingScore >= threshold).map((pair) => ({
2112
+ fileA: normalizePath2(pair.fileA),
2113
+ fileB: normalizePath2(pair.fileB),
2114
+ couplingScore: pair.couplingScore
2115
+ })).filter(
2116
+ (pair) => pair.fileA !== pair.fileB && fileScoresByFile.has(pair.fileA) && fileScoresByFile.has(pair.fileB)
2117
+ );
2118
+ const adjacency = /* @__PURE__ */ new Map();
2119
+ for (const pair of selectedPairs) {
2120
+ const aNeighbors = adjacency.get(pair.fileA) ?? /* @__PURE__ */ new Set();
2121
+ aNeighbors.add(pair.fileB);
2122
+ adjacency.set(pair.fileA, aNeighbors);
2123
+ const bNeighbors = adjacency.get(pair.fileB) ?? /* @__PURE__ */ new Set();
2124
+ bNeighbors.add(pair.fileA);
2125
+ adjacency.set(pair.fileB, bNeighbors);
2126
+ }
2127
+ const visited = /* @__PURE__ */ new Set();
2128
+ let couplingClusterCount = 0;
2129
+ const orderedStarts = [...adjacency.keys()].sort((a, b) => a.localeCompare(b));
2130
+ for (const start of orderedStarts) {
2131
+ if (visited.has(start)) {
2132
+ continue;
2133
+ }
2134
+ const stack = [start];
2135
+ const files = [];
2136
+ while (stack.length > 0) {
2137
+ const current = stack.pop();
2138
+ if (current === void 0 || visited.has(current)) {
2139
+ continue;
2140
+ }
2141
+ visited.add(current);
2142
+ files.push(current);
2143
+ const neighbors = adjacency.get(current);
2144
+ if (neighbors === void 0) {
2145
+ continue;
2146
+ }
2147
+ for (const neighbor of neighbors) {
2148
+ if (!visited.has(neighbor)) {
2149
+ stack.push(neighbor);
2150
+ }
2151
+ }
2152
+ }
2153
+ if (files.length < 2) {
2154
+ continue;
2155
+ }
2156
+ files.sort((a, b) => a.localeCompare(b));
2157
+ const fileSet = new Set(files);
2158
+ const componentPairs = selectedPairs.filter(
2159
+ (pair) => fileSet.has(pair.fileA) && fileSet.has(pair.fileB)
2160
+ );
2161
+ const meanFileRisk = average(
2162
+ files.map((filePath) => fileScoresByFile.get(filePath)?.normalizedScore ?? 0)
2163
+ );
2164
+ const meanCoupling = average(componentPairs.map((pair) => pair.couplingScore));
2165
+ const score = round44(clamp01(meanFileRisk * 0.65 + meanCoupling * 0.35) * 100);
2166
+ couplingClusterCount += 1;
2167
+ clusters.push({
2168
+ id: `coupling:${couplingClusterCount}`,
2169
+ kind: "change_coupling",
2170
+ files,
2171
+ score
2172
+ });
2173
+ }
2174
+ }
2175
+ return clusters.sort(
2176
+ (a, b) => b.score - a.score || a.kind.localeCompare(b.kind) || a.id.localeCompare(b.id)
2177
+ );
2178
+ };
2179
+ var computeRiskSummary = (structural, evolution, external, config) => {
2180
+ const dependencyComputation = computeDependencyScores(external, config);
2181
+ const evolutionByFile = mapEvolutionByFile(evolution);
2182
+ const evolutionScales = computeEvolutionScales(evolutionByFile, config);
2183
+ const cycleFileSet = new Set(
2184
+ structural.cycles.flatMap((cycle) => cycle.nodes.map((node) => normalizePath2(node)))
2185
+ );
2186
+ const fanInScale = buildQuantileScale(
2187
+ structural.files.map((file) => logScale(file.fanIn)),
2188
+ config.quantileClamp.lower,
2189
+ config.quantileClamp.upper
2190
+ );
2191
+ const fanOutScale = buildQuantileScale(
2192
+ structural.files.map((file) => logScale(file.fanOut)),
2193
+ config.quantileClamp.lower,
2194
+ config.quantileClamp.upper
2195
+ );
2196
+ const depthScale = buildQuantileScale(
2197
+ structural.files.map((file) => file.depth),
2198
+ config.quantileClamp.lower,
2199
+ config.quantileClamp.upper
2200
+ );
2201
+ const dimensionWeights = normalizeWeights(config.dimensionWeights, {
2202
+ structural: true,
2203
+ evolution: evolution.available,
2204
+ external: external.available
2205
+ });
2206
+ const fileRiskContexts = structural.files.map((file) => {
2207
+ const filePath = normalizePath2(file.id);
2208
+ const inCycle = cycleFileSet.has(filePath) ? 1 : 0;
2209
+ const fanInRisk = normalizeWithScale(logScale(file.fanIn), fanInScale);
2210
+ const fanOutRisk = normalizeWithScale(logScale(file.fanOut), fanOutScale);
2211
+ const depthRisk = normalizeWithScale(file.depth, depthScale);
2212
+ const structuralWeights = config.structuralFactorWeights;
2213
+ const structuralFactor = clamp01(
2214
+ fanInRisk * structuralWeights.fanIn + fanOutRisk * structuralWeights.fanOut + depthRisk * structuralWeights.depth + inCycle * structuralWeights.cycleParticipation
2215
+ );
2216
+ const structuralCentrality = clamp01((fanInRisk + fanOutRisk) / 2);
2217
+ let evolutionFactor = 0;
2218
+ const evolutionMetrics = evolutionByFile.get(filePath);
2219
+ if (evolution.available && evolutionMetrics !== void 0) {
2220
+ const frequencyRisk = normalizeWithScale(
2221
+ logScale(evolutionMetrics.commitCount),
2222
+ evolutionScales.commitCount
2223
+ );
2224
+ const churnRisk = normalizeWithScale(
2225
+ logScale(evolutionMetrics.churnTotal),
2226
+ evolutionScales.churnTotal
2227
+ );
2228
+ const volatilityRisk = clamp01(evolutionMetrics.recentVolatility);
2229
+ const ownershipConcentrationRisk = clamp01(evolutionMetrics.topAuthorShare);
2230
+ const busFactorRisk = clamp01(1 - normalizeWithScale(evolutionMetrics.busFactor, evolutionScales.busFactor));
2231
+ const evolutionWeights = config.evolutionFactorWeights;
2232
+ evolutionFactor = clamp01(
2233
+ frequencyRisk * evolutionWeights.frequency + churnRisk * evolutionWeights.churn + volatilityRisk * evolutionWeights.recentVolatility + ownershipConcentrationRisk * evolutionWeights.ownershipConcentration + busFactorRisk * evolutionWeights.busFactorRisk
2234
+ );
2235
+ }
2236
+ const dependencyAffinity = clamp01(structuralCentrality * 0.6 + evolutionFactor * 0.4);
2237
+ const externalFactor = external.available ? clamp01(dependencyComputation.repositoryExternalPressure * dependencyAffinity) : 0;
2238
+ const baseline = structuralFactor * dimensionWeights.structural + evolutionFactor * dimensionWeights.evolution + externalFactor * dimensionWeights.external;
2239
+ const interactions = [
2240
+ structuralFactor * evolutionFactor * config.interactionWeights.structuralEvolution,
2241
+ structuralCentrality * evolutionFactor * config.interactionWeights.centralInstability,
2242
+ externalFactor * Math.max(structuralFactor, evolutionFactor) * config.interactionWeights.dependencyAmplification
2243
+ ];
2244
+ const normalizedScore = saturatingComposite(baseline, interactions);
2245
+ return {
2246
+ file: filePath,
2247
+ score: round44(normalizedScore * 100),
2248
+ normalizedScore: round44(normalizedScore),
2249
+ factors: {
2250
+ structural: round44(structuralFactor),
2251
+ evolution: round44(evolutionFactor),
2252
+ external: round44(externalFactor)
2253
+ },
2254
+ structuralCentrality: round44(structuralCentrality)
2255
+ };
2256
+ }).sort((a, b) => b.score - a.score || a.file.localeCompare(b.file));
2257
+ const fileScores = fileRiskContexts.map((context) => ({
2258
+ file: context.file,
2259
+ score: context.score,
2260
+ normalizedScore: context.normalizedScore,
2261
+ factors: context.factors
2262
+ }));
2263
+ const fileScoresByFile = new Map(fileScores.map((fileScore) => [fileScore.file, fileScore]));
2264
+ const hotspotsCount = Math.min(
2265
+ config.hotspotMaxFiles,
2266
+ Math.max(config.hotspotMinFiles, Math.ceil(fileScores.length * config.hotspotTopPercent))
2267
+ );
2268
+ const hotspots = fileScores.slice(0, hotspotsCount).map((fileScore) => ({
2269
+ file: fileScore.file,
2270
+ score: fileScore.score,
2271
+ factors: fileScore.factors
2272
+ }));
2273
+ const moduleFiles = /* @__PURE__ */ new Map();
2274
+ for (const fileScore of fileScores) {
2275
+ const moduleName = inferModuleName(fileScore.file, config);
2276
+ const values = moduleFiles.get(moduleName) ?? [];
2277
+ values.push(fileScore.normalizedScore);
2278
+ moduleFiles.set(moduleName, values);
2279
+ }
2280
+ const moduleScores = [...moduleFiles.entries()].map(([module, values]) => {
2281
+ const averageScore = average(values);
2282
+ const peakScore = values.reduce((max, value) => Math.max(max, value), 0);
2283
+ const normalizedScore = clamp01(averageScore * 0.65 + peakScore * 0.35);
2284
+ return {
2285
+ module,
2286
+ score: round44(normalizedScore * 100),
2287
+ normalizedScore: round44(normalizedScore),
2288
+ fileCount: values.length
2289
+ };
2290
+ }).sort((a, b) => b.score - a.score || a.module.localeCompare(b.module));
2291
+ const fragileClusters = buildFragileClusters(structural, evolution, fileScoresByFile, config);
2292
+ const externalPressures = fileScores.map((fileScore) => fileScore.factors.external);
2293
+ const pressureThreshold = Math.max(
2294
+ config.amplificationZone.pressureFloor,
2295
+ percentile(externalPressures, config.amplificationZone.percentileThreshold)
2296
+ );
2297
+ const dependencyAmplificationZones = fileScores.map((fileScore) => {
2298
+ const intensity = clamp01(
2299
+ fileScore.factors.external * Math.max(fileScore.factors.structural, fileScore.factors.evolution)
2300
+ );
2301
+ const normalizedZoneScore = clamp01(intensity * 0.7 + fileScore.normalizedScore * 0.3);
2302
+ return {
2303
+ file: fileScore.file,
2304
+ score: round44(normalizedZoneScore * 100),
2305
+ externalPressure: fileScore.factors.external
2306
+ };
2307
+ }).filter((zone) => external.available && zone.externalPressure >= pressureThreshold).sort((a, b) => b.score - a.score || a.file.localeCompare(b.file)).slice(0, config.amplificationZone.maxZones).map((zone) => ({
2308
+ ...zone,
2309
+ externalPressure: round44(zone.externalPressure)
2310
+ }));
2311
+ const structuralDimension = average(fileScores.map((fileScore) => fileScore.factors.structural));
2312
+ const evolutionDimension = average(fileScores.map((fileScore) => fileScore.factors.evolution));
2313
+ const externalDimension = dependencyComputation.repositoryExternalPressure;
2314
+ const topCentralSlice = Math.max(1, Math.ceil(fileRiskContexts.length * 0.1));
2315
+ const criticalInstability = average(
2316
+ [...fileRiskContexts].sort(
2317
+ (a, b) => b.structuralCentrality * b.factors.evolution - a.structuralCentrality * a.factors.evolution || a.file.localeCompare(b.file)
2318
+ ).slice(0, topCentralSlice).map((context) => context.structuralCentrality * context.factors.evolution)
2319
+ );
2320
+ const dependencyAmplification = average(
2321
+ dependencyAmplificationZones.map(
2322
+ (zone) => clamp01(zone.externalPressure * zone.score / 100)
2323
+ )
2324
+ );
2325
+ const repositoryBaseline = structuralDimension * dimensionWeights.structural + evolutionDimension * dimensionWeights.evolution + externalDimension * dimensionWeights.external;
2326
+ const repositoryNormalizedScore = saturatingComposite(repositoryBaseline, [
2327
+ structuralDimension * evolutionDimension * config.interactionWeights.structuralEvolution,
2328
+ criticalInstability * config.interactionWeights.centralInstability,
2329
+ dependencyAmplification * config.interactionWeights.dependencyAmplification
2330
+ ]);
2331
+ return {
2332
+ repositoryScore: round44(repositoryNormalizedScore * 100),
2333
+ normalizedScore: round44(repositoryNormalizedScore),
2334
+ hotspots,
2335
+ fragileClusters,
2336
+ dependencyAmplificationZones,
2337
+ fileScores,
2338
+ moduleScores,
2339
+ dependencyScores: dependencyComputation.dependencyScores
2340
+ };
2341
+ };
2342
+ var mergeConfig = (overrides) => {
2343
+ if (overrides === void 0) {
2344
+ return DEFAULT_RISK_ENGINE_CONFIG;
2345
+ }
2346
+ return {
2347
+ ...DEFAULT_RISK_ENGINE_CONFIG,
2348
+ ...overrides,
2349
+ dimensionWeights: {
2350
+ ...DEFAULT_RISK_ENGINE_CONFIG.dimensionWeights,
2351
+ ...overrides.dimensionWeights
2352
+ },
2353
+ interactionWeights: {
2354
+ ...DEFAULT_RISK_ENGINE_CONFIG.interactionWeights,
2355
+ ...overrides.interactionWeights
2356
+ },
2357
+ structuralFactorWeights: {
2358
+ ...DEFAULT_RISK_ENGINE_CONFIG.structuralFactorWeights,
2359
+ ...overrides.structuralFactorWeights
2360
+ },
2361
+ evolutionFactorWeights: {
2362
+ ...DEFAULT_RISK_ENGINE_CONFIG.evolutionFactorWeights,
2363
+ ...overrides.evolutionFactorWeights
2364
+ },
2365
+ dependencyFactorWeights: {
2366
+ ...DEFAULT_RISK_ENGINE_CONFIG.dependencyFactorWeights,
2367
+ ...overrides.dependencyFactorWeights
2368
+ },
2369
+ quantileClamp: {
2370
+ ...DEFAULT_RISK_ENGINE_CONFIG.quantileClamp,
2371
+ ...overrides.quantileClamp
2372
+ },
2373
+ couplingCluster: {
2374
+ ...DEFAULT_RISK_ENGINE_CONFIG.couplingCluster,
2375
+ ...overrides.couplingCluster
2376
+ },
2377
+ amplificationZone: {
2378
+ ...DEFAULT_RISK_ENGINE_CONFIG.amplificationZone,
2379
+ ...overrides.amplificationZone
2380
+ },
2381
+ module: {
2382
+ ...DEFAULT_RISK_ENGINE_CONFIG.module,
2383
+ ...overrides.module
2384
+ },
2385
+ dependencySignals: {
2386
+ ...DEFAULT_RISK_ENGINE_CONFIG.dependencySignals,
2387
+ ...overrides.dependencySignals
2388
+ },
2389
+ externalDimension: {
2390
+ ...DEFAULT_RISK_ENGINE_CONFIG.externalDimension,
2391
+ ...overrides.externalDimension
2392
+ }
2393
+ };
2394
+ };
2395
+ var computeRepositoryRiskSummary = (input) => {
2396
+ const config = mergeConfig(input.config);
2397
+ return computeRiskSummary(input.structural, input.evolution, input.external, config);
1600
2398
  };
1601
2399
 
1602
2400
  // src/application/run-analyze-command.ts
1603
2401
  var resolveTargetPath = (inputPath, cwd) => resolve2(cwd, inputPath ?? ".");
1604
- var runAnalyzeCommand = async (inputPath, authorIdentityMode) => {
2402
+ var createExternalProgressReporter = (logger) => {
2403
+ let lastLoggedProgress = 0;
2404
+ return (event) => {
2405
+ switch (event.stage) {
2406
+ case "package_json_loaded":
2407
+ logger.debug("external: package.json loaded");
2408
+ break;
2409
+ case "lockfile_selected":
2410
+ logger.info(`external: lockfile selected (${event.kind})`);
2411
+ break;
2412
+ case "lockfile_parsed":
2413
+ logger.info(
2414
+ `external: parsed ${event.dependencyNodes} locked dependencies (${event.directDependencies} direct)`
2415
+ );
2416
+ break;
2417
+ case "metadata_fetch_started":
2418
+ logger.info(`external: fetching dependency metadata (${event.total} packages)`);
2419
+ break;
2420
+ case "metadata_fetch_progress": {
2421
+ const currentPercent = event.total === 0 ? 100 : Math.floor(event.completed / event.total * 100);
2422
+ if (event.completed === event.total || event.completed === 1 || event.completed - lastLoggedProgress >= 25) {
2423
+ lastLoggedProgress = event.completed;
2424
+ logger.info(
2425
+ `external: metadata progress ${event.completed}/${event.total} (${currentPercent}%)`
2426
+ );
2427
+ logger.debug(`external: last package processed ${event.packageName}`);
2428
+ }
2429
+ break;
2430
+ }
2431
+ case "metadata_fetch_completed":
2432
+ logger.info(`external: metadata fetch completed (${event.total} packages)`);
2433
+ break;
2434
+ case "summary_built":
2435
+ logger.info(
2436
+ `external: summary built (${event.totalDependencies} total, ${event.directDependencies} direct)`
2437
+ );
2438
+ break;
2439
+ }
2440
+ };
2441
+ };
2442
+ var createStructuralProgressReporter = (logger) => {
2443
+ let lastProcessed = 0;
2444
+ return (event) => {
2445
+ switch (event.stage) {
2446
+ case "files_discovered":
2447
+ logger.info(`structural: discovered ${event.totalSourceFiles} source files`);
2448
+ break;
2449
+ case "program_created":
2450
+ logger.debug(`structural: TypeScript program created (${event.totalSourceFiles} files)`);
2451
+ break;
2452
+ case "file_processed":
2453
+ if (event.processed === event.total || event.processed === 1 || event.processed - lastProcessed >= 50) {
2454
+ lastProcessed = event.processed;
2455
+ logger.info(`structural: resolved ${event.processed}/${event.total} files`);
2456
+ logger.debug(`structural: last file processed ${event.filePath}`);
2457
+ }
2458
+ break;
2459
+ case "edges_resolved":
2460
+ logger.info(`structural: resolved ${event.totalEdges} dependency edges`);
2461
+ break;
2462
+ }
2463
+ };
2464
+ };
2465
+ var createEvolutionProgressReporter = (logger) => {
2466
+ let lastParsedRecords = 0;
2467
+ return (event) => {
2468
+ switch (event.stage) {
2469
+ case "checking_git_repository":
2470
+ logger.debug("evolution: checking git repository");
2471
+ break;
2472
+ case "not_git_repository":
2473
+ logger.warn("evolution: target path is not a git repository");
2474
+ break;
2475
+ case "loading_commit_history":
2476
+ logger.info("evolution: loading git history");
2477
+ break;
2478
+ case "history":
2479
+ if (event.event.stage === "git_log_received") {
2480
+ logger.info(`evolution: git log loaded (${event.event.bytes} bytes)`);
2481
+ break;
2482
+ }
2483
+ if (event.event.stage === "git_log_parsed") {
2484
+ logger.info(`evolution: parsed ${event.event.commits} commits`);
2485
+ break;
2486
+ }
2487
+ if (event.event.stage === "git_log_parse_progress" && (event.event.parsedRecords === event.event.totalRecords || event.event.parsedRecords === 1 || event.event.parsedRecords - lastParsedRecords >= 500)) {
2488
+ lastParsedRecords = event.event.parsedRecords;
2489
+ const currentPercent = event.event.totalRecords === 0 ? 100 : Math.floor(event.event.parsedRecords / event.event.totalRecords * 100);
2490
+ logger.info(
2491
+ `evolution: parse progress ${event.event.parsedRecords}/${event.event.totalRecords} (${currentPercent}%)`
2492
+ );
2493
+ }
2494
+ break;
2495
+ case "computing_metrics":
2496
+ logger.info("evolution: computing metrics");
2497
+ break;
2498
+ case "analysis_completed":
2499
+ logger.debug(`evolution: analysis completed (available=${event.available})`);
2500
+ break;
2501
+ }
2502
+ };
2503
+ };
2504
+ var runAnalyzeCommand = async (inputPath, authorIdentityMode, logger = createSilentLogger()) => {
1605
2505
  const invocationCwd = process.env["INIT_CWD"] ?? process.cwd();
1606
2506
  const targetPath = resolveTargetPath(inputPath, invocationCwd);
1607
- const structural = buildProjectGraphSummary({ projectPath: targetPath });
2507
+ logger.info(`analyzing repository: ${targetPath}`);
2508
+ logger.info("building structural graph");
2509
+ const structural = buildProjectGraphSummary({
2510
+ projectPath: targetPath,
2511
+ onProgress: createStructuralProgressReporter(logger)
2512
+ });
2513
+ logger.debug(
2514
+ `structural metrics: nodes=${structural.metrics.nodeCount}, edges=${structural.metrics.edgeCount}, cycles=${structural.metrics.cycleCount}`
2515
+ );
2516
+ logger.info(`analyzing git evolution (author identity: ${authorIdentityMode})`);
1608
2517
  const evolution = analyzeRepositoryEvolutionFromGit({
1609
2518
  repositoryPath: targetPath,
1610
2519
  config: { authorIdentityMode }
2520
+ }, createEvolutionProgressReporter(logger));
2521
+ if (evolution.available) {
2522
+ logger.debug(
2523
+ `evolution metrics: commits=${evolution.metrics.totalCommits}, files=${evolution.metrics.totalFiles}, hotspotThreshold=${evolution.metrics.hotspotThresholdCommitCount}`
2524
+ );
2525
+ } else {
2526
+ logger.warn(`evolution analysis unavailable: ${evolution.reason}`);
2527
+ }
2528
+ logger.info("analyzing external dependencies");
2529
+ const external = await analyzeDependencyExposureFromProject(
2530
+ { repositoryPath: targetPath },
2531
+ createExternalProgressReporter(logger)
2532
+ );
2533
+ if (external.available) {
2534
+ logger.debug(
2535
+ `external metrics: total=${external.metrics.totalDependencies}, direct=${external.metrics.directDependencies}, transitive=${external.metrics.transitiveDependencies}`
2536
+ );
2537
+ } else {
2538
+ logger.warn(`external analysis unavailable: ${external.reason}`);
2539
+ }
2540
+ logger.info("computing risk summary");
2541
+ const risk = computeRepositoryRiskSummary({
2542
+ structural,
2543
+ evolution,
2544
+ external
1611
2545
  });
1612
- const external = await analyzeDependencyExposureFromProject({ repositoryPath: targetPath });
2546
+ logger.info(`analysis completed (repositoryScore=${risk.repositoryScore})`);
1613
2547
  const summary = {
1614
2548
  structural,
1615
2549
  evolution,
1616
- external
2550
+ external,
2551
+ risk
1617
2552
  };
1618
- return JSON.stringify(summary, null, 2);
2553
+ return summary;
1619
2554
  };
1620
2555
 
1621
2556
  // src/index.ts
@@ -1628,14 +2563,35 @@ program.command("analyze").argument("[path]", "path to the project to analyze").
1628
2563
  "--author-identity <mode>",
1629
2564
  "author identity mode: likely_merge (heuristic) or strict_email (deterministic)"
1630
2565
  ).choices(["likely_merge", "strict_email"]).default("likely_merge")
1631
- ).action(async (path, options) => {
1632
- const output = await runAnalyzeCommand(path, options.authorIdentity);
1633
- process.stdout.write(`${output}
2566
+ ).addOption(
2567
+ new Option(
2568
+ "--log-level <level>",
2569
+ "log verbosity: silent, error, warn, info, debug (logs are written to stderr)"
2570
+ ).choices(["silent", "error", "warn", "info", "debug"]).default(parseLogLevel(process.env["CODESENTINEL_LOG_LEVEL"]))
2571
+ ).addOption(
2572
+ new Option(
2573
+ "--output <mode>",
2574
+ "output mode: summary (default) or json (full analysis object)"
2575
+ ).choices(["summary", "json"]).default("summary")
2576
+ ).option("--json", "shortcut for --output json").action(
2577
+ async (path, options) => {
2578
+ const logger = createStderrLogger(options.logLevel);
2579
+ const summary = await runAnalyzeCommand(path, options.authorIdentity, logger);
2580
+ const outputMode = options.json === true ? "json" : options.output;
2581
+ process.stdout.write(`${formatAnalyzeOutput(summary, outputMode)}
1634
2582
  `);
1635
- });
2583
+ }
2584
+ );
1636
2585
  if (process.argv.length <= 2) {
1637
2586
  program.outputHelp();
1638
2587
  process.exit(0);
1639
2588
  }
1640
- await program.parseAsync(process.argv);
2589
+ var executablePath = process.argv[0] ?? "";
2590
+ var scriptPath = process.argv[1] ?? "";
2591
+ var argv = process.argv[2] === "--" ? [executablePath, scriptPath, ...process.argv.slice(3)] : process.argv;
2592
+ if (argv.length <= 2) {
2593
+ program.outputHelp();
2594
+ process.exit(0);
2595
+ }
2596
+ await program.parseAsync(argv);
1641
2597
  //# sourceMappingURL=index.js.map