@codesentinel/codesentinel 1.1.0 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,11 +1,109 @@
1
1
  #!/usr/bin/env node
2
2
 
3
3
  // src/index.ts
4
- import { Command } from "commander";
5
- import { readFileSync } from "fs";
4
+ import { Command, Option } from "commander";
5
+ import { readFileSync as readFileSync2 } from "fs";
6
6
  import { dirname, resolve as resolve3 } from "path";
7
7
  import { fileURLToPath } from "url";
8
8
 
9
+ // src/application/format-analyze-output.ts
10
+ var createSummaryShape = (summary) => ({
11
+ targetPath: summary.structural.targetPath,
12
+ structural: summary.structural.metrics,
13
+ evolution: summary.evolution.available ? {
14
+ available: true,
15
+ metrics: summary.evolution.metrics,
16
+ hotspotsTop: summary.evolution.hotspots.slice(0, 5).map((hotspot) => hotspot.filePath)
17
+ } : {
18
+ available: false,
19
+ reason: summary.evolution.reason
20
+ },
21
+ external: summary.external.available ? {
22
+ available: true,
23
+ metrics: summary.external.metrics,
24
+ highRiskDependenciesTop: summary.external.highRiskDependencies.slice(0, 10)
25
+ } : {
26
+ available: false,
27
+ reason: summary.external.reason
28
+ },
29
+ risk: {
30
+ repositoryScore: summary.risk.repositoryScore,
31
+ normalizedScore: summary.risk.normalizedScore,
32
+ hotspotsTop: summary.risk.hotspots.slice(0, 5).map((hotspot) => ({
33
+ file: hotspot.file,
34
+ score: hotspot.score
35
+ })),
36
+ fragileClusterCount: summary.risk.fragileClusters.length,
37
+ dependencyAmplificationZoneCount: summary.risk.dependencyAmplificationZones.length
38
+ }
39
+ });
40
+ var formatAnalyzeOutput = (summary, mode) => mode === "json" ? JSON.stringify(summary, null, 2) : JSON.stringify(createSummaryShape(summary), null, 2);
41
+
42
+ // src/application/logger.ts
43
+ var logLevelRank = {
44
+ error: 0,
45
+ warn: 1,
46
+ info: 2,
47
+ debug: 3
48
+ };
49
+ var noop = () => {
50
+ };
51
+ var createSilentLogger = () => ({
52
+ error: noop,
53
+ warn: noop,
54
+ info: noop,
55
+ debug: noop
56
+ });
57
+ var shouldLog = (configuredLevel, messageLevel) => {
58
+ if (configuredLevel === "silent") {
59
+ return false;
60
+ }
61
+ return logLevelRank[messageLevel] <= logLevelRank[configuredLevel];
62
+ };
63
+ var write = (messageLevel, message) => {
64
+ process.stderr.write(`[codesentinel] ${messageLevel.toUpperCase()} ${message}
65
+ `);
66
+ };
67
+ var createStderrLogger = (level) => {
68
+ if (level === "silent") {
69
+ return createSilentLogger();
70
+ }
71
+ return {
72
+ error: (message) => {
73
+ if (shouldLog(level, "error")) {
74
+ write("error", message);
75
+ }
76
+ },
77
+ warn: (message) => {
78
+ if (shouldLog(level, "warn")) {
79
+ write("warn", message);
80
+ }
81
+ },
82
+ info: (message) => {
83
+ if (shouldLog(level, "info")) {
84
+ write("info", message);
85
+ }
86
+ },
87
+ debug: (message) => {
88
+ if (shouldLog(level, "debug")) {
89
+ write("debug", message);
90
+ }
91
+ }
92
+ };
93
+ };
94
+ var parseLogLevel = (value) => {
95
+ switch (value) {
96
+ case "silent":
97
+ case "error":
98
+ case "warn":
99
+ case "info":
100
+ case "debug":
101
+ return value;
102
+ default:
103
+ return "info";
104
+ }
105
+ };
106
+
9
107
  // src/application/run-analyze-command.ts
10
108
  import { resolve as resolve2 } from "path";
11
109
 
@@ -385,16 +483,18 @@ var extractModuleSpecifiers = (sourceFile) => {
385
483
  visit(sourceFile);
386
484
  return [...specifiers];
387
485
  };
388
- var parseTypescriptProject = (projectPath) => {
486
+ var parseTypescriptProject = (projectPath, onProgress) => {
389
487
  const projectRoot = isAbsolute(projectPath) ? projectPath : resolve(projectPath);
390
488
  const { fileNames, options } = parseTsConfig(projectRoot);
391
489
  const sourceFilePaths = fileNames.filter((filePath) => isProjectSourceFile(filePath, projectRoot)).map((filePath) => normalizePath(resolve(filePath)));
392
490
  const uniqueSourceFilePaths = [...new Set(sourceFilePaths)].sort((a, b) => a.localeCompare(b));
393
491
  const sourceFilePathSet = new Set(uniqueSourceFilePaths);
492
+ onProgress?.({ stage: "files_discovered", totalSourceFiles: uniqueSourceFilePaths.length });
394
493
  const program2 = ts.createProgram({
395
494
  rootNames: uniqueSourceFilePaths,
396
495
  options
397
496
  });
497
+ onProgress?.({ stage: "program_created", totalSourceFiles: uniqueSourceFilePaths.length });
398
498
  const nodeByAbsolutePath = /* @__PURE__ */ new Map();
399
499
  for (const sourcePath of uniqueSourceFilePaths) {
400
500
  const relativePath = normalizePath(relative(projectRoot, sourcePath));
@@ -407,7 +507,7 @@ var parseTypescriptProject = (projectPath) => {
407
507
  }
408
508
  const resolverCache = /* @__PURE__ */ new Map();
409
509
  const edges = [];
410
- for (const sourcePath of uniqueSourceFilePaths) {
510
+ for (const [index, sourcePath] of uniqueSourceFilePaths.entries()) {
411
511
  const sourceFile = program2.getSourceFile(sourcePath);
412
512
  if (sourceFile === void 0) {
413
513
  continue;
@@ -436,22 +536,844 @@ var parseTypescriptProject = (projectPath) => {
436
536
  }
437
537
  edges.push({ from: fromNode.id, to: toNode.id });
438
538
  }
539
+ const processed = index + 1;
540
+ if (processed === 1 || processed === uniqueSourceFilePaths.length || processed % 50 === 0) {
541
+ onProgress?.({
542
+ stage: "file_processed",
543
+ processed,
544
+ total: uniqueSourceFilePaths.length,
545
+ filePath: fromNode.id
546
+ });
547
+ }
439
548
  }
549
+ onProgress?.({ stage: "edges_resolved", totalEdges: edges.length });
440
550
  return {
441
551
  nodes: [...nodeByAbsolutePath.values()],
442
552
  edges
443
553
  };
444
554
  };
445
555
  var buildProjectGraphSummary = (input) => {
446
- const parsedProject = parseTypescriptProject(input.projectPath);
556
+ const parsedProject = parseTypescriptProject(input.projectPath, input.onProgress);
447
557
  const graphData = createGraphData(parsedProject.nodes, parsedProject.edges);
448
558
  return createGraphAnalysisSummary(input.projectPath, graphData);
449
559
  };
450
560
 
561
+ // ../dependency-firewall/dist/index.js
562
+ import { existsSync, readFileSync } from "fs";
563
+ import { join } from "path";
564
+ var round4 = (value) => Number(value.toFixed(4));
565
+ var normalizeNodes = (nodes) => {
566
+ const byName = /* @__PURE__ */ new Map();
567
+ for (const node of nodes) {
568
+ const bucket = byName.get(node.name) ?? [];
569
+ bucket.push(node);
570
+ byName.set(node.name, bucket);
571
+ }
572
+ const normalized = [];
573
+ for (const [name, candidates] of byName.entries()) {
574
+ if (candidates.length === 0) {
575
+ continue;
576
+ }
577
+ candidates.sort((a, b) => b.version.localeCompare(a.version));
578
+ const selected = candidates[0];
579
+ if (selected === void 0) {
580
+ continue;
581
+ }
582
+ const deps = selected.dependencies.map((dep) => {
583
+ const at = dep.lastIndexOf("@");
584
+ return at <= 0 ? dep : dep.slice(0, at);
585
+ }).filter((depName) => depName.length > 0).sort((a, b) => a.localeCompare(b));
586
+ normalized.push({
587
+ key: `${name}@${selected.version}`,
588
+ name,
589
+ version: selected.version,
590
+ dependencies: deps
591
+ });
592
+ }
593
+ return normalized.sort((a, b) => a.name.localeCompare(b.name));
594
+ };
595
+ var computeDepths = (nodeByName, directNames) => {
596
+ const visiting = /* @__PURE__ */ new Set();
597
+ const depthByName = /* @__PURE__ */ new Map();
598
+ const compute = (name) => {
599
+ const known = depthByName.get(name);
600
+ if (known !== void 0) {
601
+ return known;
602
+ }
603
+ if (visiting.has(name)) {
604
+ return 0;
605
+ }
606
+ visiting.add(name);
607
+ const node = nodeByName.get(name);
608
+ if (node === void 0) {
609
+ visiting.delete(name);
610
+ depthByName.set(name, 0);
611
+ return 0;
612
+ }
613
+ let maxChildDepth = 0;
614
+ for (const dependencyName of node.dependencies) {
615
+ const childDepth = compute(dependencyName);
616
+ if (childDepth > maxChildDepth) {
617
+ maxChildDepth = childDepth;
618
+ }
619
+ }
620
+ visiting.delete(name);
621
+ const ownDepth = directNames.has(name) ? 0 : maxChildDepth + 1;
622
+ depthByName.set(name, ownDepth);
623
+ return ownDepth;
624
+ };
625
+ for (const name of nodeByName.keys()) {
626
+ compute(name);
627
+ }
628
+ let maxDepth = 0;
629
+ for (const depth of depthByName.values()) {
630
+ if (depth > maxDepth) {
631
+ maxDepth = depth;
632
+ }
633
+ }
634
+ return { depthByName, maxDepth };
635
+ };
636
+ var rankCentrality = (nodes, dependentsByName, directNames, topN) => [...nodes].map((node) => ({
637
+ name: node.name,
638
+ dependents: dependentsByName.get(node.name) ?? 0,
639
+ fanOut: node.dependencies.length,
640
+ direct: directNames.has(node.name)
641
+ })).sort(
642
+ (a, b) => b.dependents - a.dependents || b.fanOut - a.fanOut || a.name.localeCompare(b.name)
643
+ ).slice(0, topN);
644
+ var canPropagateSignal = (signal) => signal === "abandoned" || signal === "high_centrality" || signal === "deep_chain" || signal === "high_fanout";
645
+ var collectTransitiveDependencies = (rootName, nodeByName) => {
646
+ const seen = /* @__PURE__ */ new Set();
647
+ const stack = [...nodeByName.get(rootName)?.dependencies ?? []];
648
+ while (stack.length > 0) {
649
+ const current = stack.pop();
650
+ if (current === void 0 || seen.has(current) || current === rootName) {
651
+ continue;
652
+ }
653
+ seen.add(current);
654
+ const currentNode = nodeByName.get(current);
655
+ if (currentNode === void 0) {
656
+ continue;
657
+ }
658
+ for (const next of currentNode.dependencies) {
659
+ if (!seen.has(next)) {
660
+ stack.push(next);
661
+ }
662
+ }
663
+ }
664
+ return [...seen].sort((a, b) => a.localeCompare(b));
665
+ };
666
+ var buildExternalAnalysisSummary = (targetPath, extraction, metadataByKey, config) => {
667
+ const nodes = normalizeNodes(extraction.nodes);
668
+ const directNames = new Set(extraction.directDependencies.map((dep) => dep.name));
669
+ const directSpecByName = new Map(extraction.directDependencies.map((dep) => [dep.name, dep.requestedRange]));
670
+ const nodeByName = new Map(nodes.map((node) => [node.name, node]));
671
+ const dependentsByName = /* @__PURE__ */ new Map();
672
+ for (const node of nodes) {
673
+ dependentsByName.set(node.name, dependentsByName.get(node.name) ?? 0);
674
+ }
675
+ for (const node of nodes) {
676
+ for (const dependencyName of node.dependencies) {
677
+ if (!nodeByName.has(dependencyName)) {
678
+ continue;
679
+ }
680
+ dependentsByName.set(dependencyName, (dependentsByName.get(dependencyName) ?? 0) + 1);
681
+ }
682
+ }
683
+ const { depthByName, maxDepth } = computeDepths(nodeByName, directNames);
684
+ const centralityRanking = rankCentrality(nodes, dependentsByName, directNames, config.centralityTopN);
685
+ const topCentralNames = new Set(
686
+ centralityRanking.slice(0, Math.max(1, Math.ceil(centralityRanking.length * 0.25))).map((entry) => entry.name)
687
+ );
688
+ const allDependencies = [];
689
+ let metadataAvailableCount = 0;
690
+ for (const node of nodes) {
691
+ const metadata = metadataByKey.get(node.key) ?? null;
692
+ if (metadata !== null) {
693
+ metadataAvailableCount += 1;
694
+ }
695
+ const dependencyDepth = depthByName.get(node.name) ?? 0;
696
+ const dependents = dependentsByName.get(node.name) ?? 0;
697
+ const riskSignals = [];
698
+ if ((metadata?.maintainerCount ?? 0) === 1) {
699
+ riskSignals.push("single_maintainer");
700
+ }
701
+ if ((metadata?.daysSinceLastRelease ?? 0) >= config.abandonedDaysThreshold) {
702
+ riskSignals.push("abandoned");
703
+ }
704
+ if (topCentralNames.has(node.name) && dependents > 0) {
705
+ riskSignals.push("high_centrality");
706
+ }
707
+ if (dependencyDepth >= config.deepChainThreshold) {
708
+ riskSignals.push("deep_chain");
709
+ }
710
+ if (node.dependencies.length >= config.fanOutHighThreshold) {
711
+ riskSignals.push("high_fanout");
712
+ }
713
+ if (metadata === null) {
714
+ riskSignals.push("metadata_unavailable");
715
+ }
716
+ allDependencies.push({
717
+ name: node.name,
718
+ direct: directNames.has(node.name),
719
+ requestedRange: directSpecByName.get(node.name) ?? null,
720
+ resolvedVersion: node.version,
721
+ transitiveDependencies: [],
722
+ dependencyDepth,
723
+ fanOut: node.dependencies.length,
724
+ dependents,
725
+ maintainerCount: metadata?.maintainerCount ?? null,
726
+ releaseFrequencyDays: metadata?.releaseFrequencyDays ?? null,
727
+ daysSinceLastRelease: metadata?.daysSinceLastRelease ?? null,
728
+ repositoryActivity30d: metadata?.repositoryActivity30d ?? null,
729
+ busFactor: metadata?.busFactor ?? null,
730
+ ownRiskSignals: [...riskSignals].sort((a, b) => a.localeCompare(b)),
731
+ inheritedRiskSignals: [],
732
+ riskSignals
733
+ });
734
+ }
735
+ allDependencies.sort((a, b) => a.name.localeCompare(b.name));
736
+ const allByName = new Map(allDependencies.map((dep) => [dep.name, dep]));
737
+ const dependencies = allDependencies.filter((dep) => dep.direct).map((dep) => {
738
+ const transitiveDependencies = collectTransitiveDependencies(dep.name, nodeByName);
739
+ const inheritedSignals = /* @__PURE__ */ new Set();
740
+ const allSignals = new Set(dep.ownRiskSignals);
741
+ for (const transitiveName of transitiveDependencies) {
742
+ const transitive = allByName.get(transitiveName);
743
+ if (transitive === void 0) {
744
+ continue;
745
+ }
746
+ for (const signal of transitive.riskSignals) {
747
+ if (canPropagateSignal(signal)) {
748
+ inheritedSignals.add(signal);
749
+ allSignals.add(signal);
750
+ }
751
+ }
752
+ }
753
+ return {
754
+ ...dep,
755
+ transitiveDependencies,
756
+ inheritedRiskSignals: [...inheritedSignals].sort((a, b) => a.localeCompare(b)),
757
+ riskSignals: [...allSignals].sort((a, b) => a.localeCompare(b))
758
+ };
759
+ }).sort((a, b) => a.name.localeCompare(b.name));
760
+ const highRiskDependencies = dependencies.filter((dep) => dep.riskSignals.length > 1).sort((a, b) => b.riskSignals.length - a.riskSignals.length || a.name.localeCompare(b.name)).slice(0, config.maxHighRiskDependencies).map((dep) => dep.name);
761
+ const singleMaintainerDependencies = dependencies.filter((dep) => dep.ownRiskSignals.includes("single_maintainer")).map((dep) => dep.name).sort((a, b) => a.localeCompare(b));
762
+ const abandonedDependencies = dependencies.filter((dep) => dep.ownRiskSignals.includes("abandoned")).map((dep) => dep.name).sort((a, b) => a.localeCompare(b));
763
+ return {
764
+ targetPath,
765
+ available: true,
766
+ metrics: {
767
+ totalDependencies: allDependencies.length,
768
+ directDependencies: dependencies.length,
769
+ transitiveDependencies: allDependencies.length - dependencies.length,
770
+ dependencyDepth: maxDepth,
771
+ lockfileKind: extraction.kind,
772
+ metadataCoverage: allDependencies.length === 0 ? 0 : round4(metadataAvailableCount / allDependencies.length)
773
+ },
774
+ dependencies,
775
+ highRiskDependencies,
776
+ singleMaintainerDependencies,
777
+ abandonedDependencies,
778
+ centralityRanking
779
+ };
780
+ };
781
+ var DEFAULT_EXTERNAL_ANALYSIS_CONFIG = {
782
+ abandonedDaysThreshold: 540,
783
+ deepChainThreshold: 6,
784
+ fanOutHighThreshold: 25,
785
+ centralityTopN: 20,
786
+ maxHighRiskDependencies: 100,
787
+ metadataRequestConcurrency: 8
788
+ };
789
+ var LOCKFILE_CANDIDATES = [
790
+ { fileName: "pnpm-lock.yaml", kind: "pnpm" },
791
+ { fileName: "package-lock.json", kind: "npm" },
792
+ { fileName: "npm-shrinkwrap.json", kind: "npm-shrinkwrap" },
793
+ { fileName: "yarn.lock", kind: "yarn" },
794
+ { fileName: "bun.lock", kind: "bun" },
795
+ { fileName: "bun.lockb", kind: "bun" }
796
+ ];
797
+ var loadPackageJson = (repositoryPath) => {
798
+ const packageJsonPath2 = join(repositoryPath, "package.json");
799
+ if (!existsSync(packageJsonPath2)) {
800
+ return null;
801
+ }
802
+ return {
803
+ path: packageJsonPath2,
804
+ raw: readFileSync(packageJsonPath2, "utf8")
805
+ };
806
+ };
807
+ var selectLockfile = (repositoryPath) => {
808
+ for (const candidate of LOCKFILE_CANDIDATES) {
809
+ const absolutePath = join(repositoryPath, candidate.fileName);
810
+ if (!existsSync(absolutePath)) {
811
+ continue;
812
+ }
813
+ return {
814
+ path: absolutePath,
815
+ kind: candidate.kind,
816
+ raw: readFileSync(absolutePath, "utf8")
817
+ };
818
+ }
819
+ return null;
820
+ };
821
+ var parsePackageJson = (raw) => {
822
+ const parsed = JSON.parse(raw);
823
+ const merged = /* @__PURE__ */ new Map();
824
+ for (const block of [
825
+ parsed.dependencies,
826
+ parsed.devDependencies,
827
+ parsed.optionalDependencies,
828
+ parsed.peerDependencies
829
+ ]) {
830
+ if (block === void 0) {
831
+ continue;
832
+ }
833
+ for (const [name, versionRange] of Object.entries(block)) {
834
+ merged.set(name, versionRange);
835
+ }
836
+ }
837
+ return [...merged.entries()].map(([name, requestedRange]) => ({ name, requestedRange })).sort((a, b) => a.name.localeCompare(b.name));
838
+ };
839
+ var parsePackageLock = (raw, directSpecs) => {
840
+ const parsed = JSON.parse(raw);
841
+ const nodes = [];
842
+ if (parsed.packages !== void 0) {
843
+ for (const [packagePath, packageData] of Object.entries(parsed.packages)) {
844
+ if (packagePath.length === 0 || packageData.version === void 0) {
845
+ continue;
846
+ }
847
+ const segments = packagePath.split("node_modules/");
848
+ const name = segments[segments.length - 1] ?? "";
849
+ if (name.length === 0) {
850
+ continue;
851
+ }
852
+ const dependencies = Object.entries(packageData.dependencies ?? {}).map(([depName, depRange]) => `${depName}@${String(depRange)}`).sort((a, b) => a.localeCompare(b));
853
+ nodes.push({
854
+ name,
855
+ version: packageData.version,
856
+ dependencies
857
+ });
858
+ }
859
+ } else if (parsed.dependencies !== void 0) {
860
+ for (const [name, dep] of Object.entries(parsed.dependencies)) {
861
+ if (dep.version === void 0) {
862
+ continue;
863
+ }
864
+ const dependencies = Object.entries(dep.dependencies ?? {}).map(([depName, depVersion]) => `${depName}@${String(depVersion)}`).sort((a, b) => a.localeCompare(b));
865
+ nodes.push({
866
+ name,
867
+ version: dep.version,
868
+ dependencies
869
+ });
870
+ }
871
+ }
872
+ nodes.sort((a, b) => a.name.localeCompare(b.name) || a.version.localeCompare(b.version));
873
+ return {
874
+ kind: "npm",
875
+ directDependencies: directSpecs,
876
+ nodes
877
+ };
878
+ };
879
+ var sanitizeValue = (value) => value.replace(/^['"]|['"]$/g, "").trim();
880
+ var parsePackageKey = (rawKey) => {
881
+ const key = sanitizeValue(rawKey.replace(/:$/, ""));
882
+ const withoutSlash = key.startsWith("/") ? key.slice(1) : key;
883
+ const lastAt = withoutSlash.lastIndexOf("@");
884
+ if (lastAt <= 0) {
885
+ return null;
886
+ }
887
+ const name = withoutSlash.slice(0, lastAt);
888
+ const versionWithPeers = withoutSlash.slice(lastAt + 1);
889
+ const version2 = versionWithPeers.split("(")[0] ?? versionWithPeers;
890
+ if (name.length === 0 || version2.length === 0) {
891
+ return null;
892
+ }
893
+ return { name, version: version2 };
894
+ };
895
+ var parsePnpmLockfile = (raw, directSpecs) => {
896
+ const lines = raw.split("\n");
897
+ let state = "root";
898
+ let currentPackage = null;
899
+ let currentDependencyName = null;
900
+ const dependenciesByNode = /* @__PURE__ */ new Map();
901
+ for (const line of lines) {
902
+ if (line.trim().length === 0 || line.trimStart().startsWith("#")) {
903
+ continue;
904
+ }
905
+ if (line.startsWith("importers:")) {
906
+ state = "importers";
907
+ continue;
908
+ }
909
+ if (line.startsWith("packages:")) {
910
+ state = "packages";
911
+ continue;
912
+ }
913
+ if (state === "packages" || state === "packageDeps") {
914
+ const packageMatch = line.match(/^\s{2}([^\s].+):\s*$/);
915
+ if (packageMatch !== null) {
916
+ const parsedKey = parsePackageKey(packageMatch[1] ?? "");
917
+ if (parsedKey !== null) {
918
+ currentPackage = `${parsedKey.name}@${parsedKey.version}`;
919
+ dependenciesByNode.set(currentPackage, /* @__PURE__ */ new Set());
920
+ state = "packageDeps";
921
+ currentDependencyName = null;
922
+ }
923
+ continue;
924
+ }
925
+ }
926
+ if (state === "packageDeps" && currentPackage !== null) {
927
+ const depLine = line.match(/^\s{6}([^:\s]+):\s*(.+)$/);
928
+ if (depLine !== null) {
929
+ const depName = sanitizeValue(depLine[1] ?? "");
930
+ const depRef = sanitizeValue(depLine[2] ?? "");
931
+ const depVersion = depRef.split("(")[0] ?? depRef;
932
+ if (depName.length > 0 && depVersion.length > 0) {
933
+ dependenciesByNode.get(currentPackage)?.add(`${depName}@${depVersion}`);
934
+ }
935
+ currentDependencyName = null;
936
+ continue;
937
+ }
938
+ const depBlockLine = line.match(/^\s{6}([^:\s]+):\s*$/);
939
+ if (depBlockLine !== null) {
940
+ currentDependencyName = sanitizeValue(depBlockLine[1] ?? "");
941
+ continue;
942
+ }
943
+ const depVersionLine = line.match(/^\s{8}version:\s*(.+)$/);
944
+ if (depVersionLine !== null && currentDependencyName !== null) {
945
+ const depRef = sanitizeValue(depVersionLine[1] ?? "");
946
+ const depVersion = depRef.split("(")[0] ?? depRef;
947
+ if (depVersion.length > 0) {
948
+ dependenciesByNode.get(currentPackage)?.add(`${currentDependencyName}@${depVersion}`);
949
+ }
950
+ currentDependencyName = null;
951
+ continue;
952
+ }
953
+ if (line.match(/^\s{4}(dependencies|optionalDependencies):\s*$/) !== null) {
954
+ continue;
955
+ }
956
+ }
957
+ }
958
+ const nodes = [...dependenciesByNode.entries()].map(([nodeId, deps]) => {
959
+ const at = nodeId.lastIndexOf("@");
960
+ return {
961
+ name: nodeId.slice(0, at),
962
+ version: nodeId.slice(at + 1),
963
+ dependencies: [...deps].sort((a, b) => a.localeCompare(b))
964
+ };
965
+ }).sort(
966
+ (a, b) => a.name.localeCompare(b.name) || a.version.localeCompare(b.version)
967
+ );
968
+ return {
969
+ kind: "pnpm",
970
+ directDependencies: directSpecs,
971
+ nodes
972
+ };
973
+ };
974
+ var stripQuotes = (value) => value.replace(/^['"]|['"]$/g, "");
975
+ var parseVersionSelector = (selector) => {
976
+ const npmIndex = selector.lastIndexOf("@npm:");
977
+ if (npmIndex >= 0) {
978
+ return selector.slice(npmIndex + 5);
979
+ }
980
+ const lastAt = selector.lastIndexOf("@");
981
+ if (lastAt <= 0) {
982
+ return null;
983
+ }
984
+ return selector.slice(lastAt + 1);
985
+ };
986
+ var parseYarnLock = (raw, directSpecs) => {
987
+ const lines = raw.split("\n");
988
+ const nodes = [];
989
+ let selectors = [];
990
+ let version2 = null;
991
+ let readingDependencies = false;
992
+ let dependencies = [];
993
+ const flushEntry = () => {
994
+ if (selectors.length === 0 || version2 === null) {
995
+ selectors = [];
996
+ version2 = null;
997
+ dependencies = [];
998
+ readingDependencies = false;
999
+ return;
1000
+ }
1001
+ for (const selector of selectors) {
1002
+ const parsedVersion = parseVersionSelector(selector);
1003
+ const at = selector.lastIndexOf("@");
1004
+ const name = at <= 0 ? selector : selector.slice(0, at);
1005
+ if (name.length === 0) {
1006
+ continue;
1007
+ }
1008
+ nodes.push({
1009
+ name,
1010
+ version: version2,
1011
+ dependencies: [...dependencies].sort((a, b) => a.localeCompare(b))
1012
+ });
1013
+ if (parsedVersion !== null) {
1014
+ nodes.push({
1015
+ name,
1016
+ version: parsedVersion,
1017
+ dependencies: [...dependencies].sort((a, b) => a.localeCompare(b))
1018
+ });
1019
+ }
1020
+ }
1021
+ selectors = [];
1022
+ version2 = null;
1023
+ dependencies = [];
1024
+ readingDependencies = false;
1025
+ };
1026
+ for (const line of lines) {
1027
+ if (line.trim().length === 0) {
1028
+ continue;
1029
+ }
1030
+ if (!line.startsWith(" ") && line.endsWith(":")) {
1031
+ flushEntry();
1032
+ const keyText = line.slice(0, -1);
1033
+ selectors = keyText.split(",").map((part) => stripQuotes(part.trim())).filter((part) => part.length > 0);
1034
+ continue;
1035
+ }
1036
+ if (line.match(/^\s{2}version\s+/) !== null) {
1037
+ const value = line.replace(/^\s{2}version\s+/, "").trim();
1038
+ version2 = stripQuotes(value);
1039
+ readingDependencies = false;
1040
+ continue;
1041
+ }
1042
+ if (line.match(/^\s{2}dependencies:\s*$/) !== null) {
1043
+ readingDependencies = true;
1044
+ continue;
1045
+ }
1046
+ if (readingDependencies && line.match(/^\s{4}[^\s].+$/) !== null) {
1047
+ const depLine = line.trim();
1048
+ const firstSpace = depLine.indexOf(" ");
1049
+ if (firstSpace <= 0) {
1050
+ continue;
1051
+ }
1052
+ const depName = stripQuotes(depLine.slice(0, firstSpace));
1053
+ const depRef = stripQuotes(depLine.slice(firstSpace + 1).trim());
1054
+ const depVersion = parseVersionSelector(depRef) ?? depRef;
1055
+ dependencies.push(`${depName}@${depVersion}`);
1056
+ continue;
1057
+ }
1058
+ readingDependencies = false;
1059
+ }
1060
+ flushEntry();
1061
+ const deduped = /* @__PURE__ */ new Map();
1062
+ for (const node of nodes) {
1063
+ const key = `${node.name}@${node.version}`;
1064
+ if (!deduped.has(key)) {
1065
+ deduped.set(key, node);
1066
+ }
1067
+ }
1068
+ return {
1069
+ kind: "yarn",
1070
+ directDependencies: directSpecs,
1071
+ nodes: [...deduped.values()].sort((a, b) => a.name.localeCompare(b.name) || a.version.localeCompare(b.version))
1072
+ };
1073
+ };
1074
+ var parseBunLock = (_raw, _directSpecs) => {
1075
+ throw new Error("unsupported_lockfile_format");
1076
+ };
1077
+ var withDefaults = (overrides) => ({
1078
+ ...DEFAULT_EXTERNAL_ANALYSIS_CONFIG,
1079
+ ...overrides
1080
+ });
1081
+ var parseExtraction = (lockfileKind, lockfileRaw, directSpecs) => {
1082
+ switch (lockfileKind) {
1083
+ case "pnpm":
1084
+ return parsePnpmLockfile(lockfileRaw, directSpecs);
1085
+ case "npm":
1086
+ case "npm-shrinkwrap":
1087
+ return {
1088
+ ...parsePackageLock(lockfileRaw, directSpecs),
1089
+ kind: lockfileKind
1090
+ };
1091
+ case "yarn":
1092
+ return parseYarnLock(lockfileRaw, directSpecs);
1093
+ case "bun":
1094
+ return parseBunLock(lockfileRaw, directSpecs);
1095
+ default:
1096
+ throw new Error("unsupported_lockfile_format");
1097
+ }
1098
+ };
1099
+ var mapWithConcurrency = async (values, limit, handler) => {
1100
+ const effectiveLimit = Math.max(1, limit);
1101
+ const results = new Array(values.length);
1102
+ let index = 0;
1103
+ const workers = Array.from({ length: Math.min(effectiveLimit, values.length) }, async () => {
1104
+ for (; ; ) {
1105
+ const current = index;
1106
+ index += 1;
1107
+ if (current >= values.length) {
1108
+ return;
1109
+ }
1110
+ const value = values[current];
1111
+ if (value === void 0) {
1112
+ return;
1113
+ }
1114
+ results[current] = await handler(value);
1115
+ }
1116
+ });
1117
+ await Promise.all(workers);
1118
+ return results;
1119
+ };
1120
+ var analyzeDependencyExposure = async (input, metadataProvider, onProgress) => {
1121
+ const packageJson = loadPackageJson(input.repositoryPath);
1122
+ if (packageJson === null) {
1123
+ return {
1124
+ targetPath: input.repositoryPath,
1125
+ available: false,
1126
+ reason: "package_json_not_found"
1127
+ };
1128
+ }
1129
+ onProgress?.({ stage: "package_json_loaded" });
1130
+ const lockfile = selectLockfile(input.repositoryPath);
1131
+ if (lockfile === null) {
1132
+ return {
1133
+ targetPath: input.repositoryPath,
1134
+ available: false,
1135
+ reason: "lockfile_not_found"
1136
+ };
1137
+ }
1138
+ onProgress?.({ stage: "lockfile_selected", kind: lockfile.kind });
1139
+ try {
1140
+ const directSpecs = parsePackageJson(packageJson.raw);
1141
+ const extraction = parseExtraction(lockfile.kind, lockfile.raw, directSpecs);
1142
+ const config = withDefaults(input.config);
1143
+ onProgress?.({
1144
+ stage: "lockfile_parsed",
1145
+ dependencyNodes: extraction.nodes.length,
1146
+ directDependencies: extraction.directDependencies.length
1147
+ });
1148
+ onProgress?.({ stage: "metadata_fetch_started", total: extraction.nodes.length });
1149
+ let completed = 0;
1150
+ const metadataEntries = await mapWithConcurrency(
1151
+ extraction.nodes,
1152
+ config.metadataRequestConcurrency,
1153
+ async (node) => {
1154
+ const result = {
1155
+ key: `${node.name}@${node.version}`,
1156
+ metadata: await metadataProvider.getMetadata(node.name, node.version)
1157
+ };
1158
+ completed += 1;
1159
+ onProgress?.({
1160
+ stage: "metadata_fetch_progress",
1161
+ completed,
1162
+ total: extraction.nodes.length,
1163
+ packageName: node.name
1164
+ });
1165
+ return result;
1166
+ }
1167
+ );
1168
+ onProgress?.({ stage: "metadata_fetch_completed", total: extraction.nodes.length });
1169
+ const metadataByKey = /* @__PURE__ */ new Map();
1170
+ for (const entry of metadataEntries) {
1171
+ metadataByKey.set(entry.key, entry.metadata);
1172
+ }
1173
+ const summary = buildExternalAnalysisSummary(input.repositoryPath, extraction, metadataByKey, config);
1174
+ if (summary.available) {
1175
+ onProgress?.({
1176
+ stage: "summary_built",
1177
+ totalDependencies: summary.metrics.totalDependencies,
1178
+ directDependencies: summary.metrics.directDependencies
1179
+ });
1180
+ }
1181
+ return summary;
1182
+ } catch (error) {
1183
+ const message = error instanceof Error ? error.message : "unknown";
1184
+ if (message.includes("unsupported_lockfile_format")) {
1185
+ return {
1186
+ targetPath: input.repositoryPath,
1187
+ available: false,
1188
+ reason: "unsupported_lockfile_format"
1189
+ };
1190
+ }
1191
+ return {
1192
+ targetPath: input.repositoryPath,
1193
+ available: false,
1194
+ reason: "invalid_lockfile"
1195
+ };
1196
+ }
1197
+ };
1198
+ var ONE_DAY_MS = 24 * 60 * 60 * 1e3;
1199
+ var round42 = (value) => Number(value.toFixed(4));
1200
+ var parseDate = (iso) => {
1201
+ if (iso === void 0) {
1202
+ return null;
1203
+ }
1204
+ const value = Date.parse(iso);
1205
+ return Number.isNaN(value) ? null : value;
1206
+ };
1207
+ var NpmRegistryMetadataProvider = class {
1208
+ cache = /* @__PURE__ */ new Map();
1209
+ async getMetadata(name, version2) {
1210
+ const key = `${name}@${version2}`;
1211
+ if (this.cache.has(key)) {
1212
+ return this.cache.get(key) ?? null;
1213
+ }
1214
+ try {
1215
+ const encodedName = encodeURIComponent(name);
1216
+ const response = await fetch(`https://registry.npmjs.org/${encodedName}`);
1217
+ if (!response.ok) {
1218
+ this.cache.set(key, null);
1219
+ return null;
1220
+ }
1221
+ const payload = await response.json();
1222
+ const timeEntries = payload.time ?? {};
1223
+ const publishDates = Object.entries(timeEntries).filter(([tag]) => tag !== "created" && tag !== "modified").map(([, date]) => parseDate(date)).filter((value) => value !== null).sort((a, b) => a - b);
1224
+ const modifiedAt = parseDate(timeEntries["modified"]);
1225
+ const now = Date.now();
1226
+ const daysSinceLastRelease = modifiedAt === null ? null : Math.max(0, round42((now - modifiedAt) / ONE_DAY_MS));
1227
+ let releaseFrequencyDays = null;
1228
+ if (publishDates.length >= 2) {
1229
+ const totalIntervals = publishDates.length - 1;
1230
+ let sum = 0;
1231
+ for (let i = 1; i < publishDates.length; i += 1) {
1232
+ const current = publishDates[i];
1233
+ const previous = publishDates[i - 1];
1234
+ if (current !== void 0 && previous !== void 0) {
1235
+ sum += current - previous;
1236
+ }
1237
+ }
1238
+ releaseFrequencyDays = round42(sum / totalIntervals / ONE_DAY_MS);
1239
+ }
1240
+ const maintainers = payload.maintainers ?? [];
1241
+ const maintainerCount = maintainers.length > 0 ? maintainers.length : null;
1242
+ const metadata = {
1243
+ name,
1244
+ version: version2,
1245
+ maintainerCount,
1246
+ releaseFrequencyDays,
1247
+ daysSinceLastRelease,
1248
+ repositoryActivity30d: null,
1249
+ busFactor: null
1250
+ };
1251
+ this.cache.set(key, metadata);
1252
+ return metadata;
1253
+ } catch {
1254
+ this.cache.set(key, null);
1255
+ return null;
1256
+ }
1257
+ }
1258
+ };
1259
+ var NoopMetadataProvider = class {
1260
+ async getMetadata(_name, _version) {
1261
+ return null;
1262
+ }
1263
+ };
1264
+ var analyzeDependencyExposureFromProject = async (input, onProgress) => {
1265
+ const metadataProvider = process.env["CODESENTINEL_EXTERNAL_METADATA"] === "none" ? new NoopMetadataProvider() : new NpmRegistryMetadataProvider();
1266
+ return analyzeDependencyExposure(input, metadataProvider, onProgress);
1267
+ };
1268
+
451
1269
  // ../git-analyzer/dist/index.js
452
1270
  import { execFileSync } from "child_process";
453
1271
  var pairKey = (a, b) => `${a}\0${b}`;
454
- var round4 = (value) => Number(value.toFixed(4));
1272
+ var round43 = (value) => Number(value.toFixed(4));
1273
+ var normalizeName = (value) => value.toLowerCase().replace(/[^a-z0-9\s]/g, " ").replace(/\s+/g, " ").trim();
1274
+ var extractEmailStem = (authorId) => {
1275
+ const normalized = authorId.trim().toLowerCase();
1276
+ const githubNoReplyMatch = normalized.match(/^\d+\+([^@]+)@users\.noreply\.github\.com$/);
1277
+ if (githubNoReplyMatch?.[1] !== void 0) {
1278
+ return githubNoReplyMatch[1].replace(/[._+-]/g, "");
1279
+ }
1280
+ const atIndex = normalized.indexOf("@");
1281
+ if (atIndex <= 0) {
1282
+ return null;
1283
+ }
1284
+ return normalized.slice(0, atIndex).replace(/[._+-]/g, "");
1285
+ };
1286
+ var areNamesCompatible = (left, right) => {
1287
+ if (left.length === 0 || right.length === 0) {
1288
+ return false;
1289
+ }
1290
+ if (left === right) {
1291
+ return true;
1292
+ }
1293
+ if (left.startsWith(`${right} `) || right.startsWith(`${left} `)) {
1294
+ return true;
1295
+ }
1296
+ return false;
1297
+ };
1298
+ var chooseCanonicalAuthorId = (profiles) => {
1299
+ const ordered = [...profiles].sort((a, b) => {
1300
+ const aIsNoReply = a.authorId.includes("@users.noreply.github.com");
1301
+ const bIsNoReply = b.authorId.includes("@users.noreply.github.com");
1302
+ if (aIsNoReply !== bIsNoReply) {
1303
+ return aIsNoReply ? 1 : -1;
1304
+ }
1305
+ if (a.commitCount !== b.commitCount) {
1306
+ return b.commitCount - a.commitCount;
1307
+ }
1308
+ return a.authorId.localeCompare(b.authorId);
1309
+ });
1310
+ return ordered[0]?.authorId ?? "";
1311
+ };
1312
+ var buildAuthorAliasMap = (commits) => {
1313
+ const nameCountsByAuthorId = /* @__PURE__ */ new Map();
1314
+ const commitCountByAuthorId = /* @__PURE__ */ new Map();
1315
+ for (const commit of commits) {
1316
+ commitCountByAuthorId.set(commit.authorId, (commitCountByAuthorId.get(commit.authorId) ?? 0) + 1);
1317
+ const normalizedName = normalizeName(commit.authorName);
1318
+ const names = nameCountsByAuthorId.get(commit.authorId) ?? /* @__PURE__ */ new Map();
1319
+ if (normalizedName.length > 0) {
1320
+ names.set(normalizedName, (names.get(normalizedName) ?? 0) + 1);
1321
+ }
1322
+ nameCountsByAuthorId.set(commit.authorId, names);
1323
+ }
1324
+ const profiles = [...commitCountByAuthorId.entries()].map(([authorId, commitCount]) => {
1325
+ const names = nameCountsByAuthorId.get(authorId);
1326
+ const primaryName = names === void 0 ? "" : [...names.entries()].sort((a, b) => b[1] - a[1] || a[0].localeCompare(b[0]))[0]?.[0] ?? "";
1327
+ const normalizedAuthorId = authorId.toLowerCase();
1328
+ const isBot = normalizedAuthorId.includes("[bot]");
1329
+ return {
1330
+ authorId,
1331
+ commitCount,
1332
+ primaryName,
1333
+ emailStem: isBot ? null : extractEmailStem(authorId),
1334
+ isBot
1335
+ };
1336
+ });
1337
+ const groupsByStem = /* @__PURE__ */ new Map();
1338
+ for (const profile of profiles) {
1339
+ if (profile.emailStem === null || profile.emailStem.length < 4) {
1340
+ continue;
1341
+ }
1342
+ const current = groupsByStem.get(profile.emailStem) ?? [];
1343
+ current.push(profile);
1344
+ groupsByStem.set(profile.emailStem, current);
1345
+ }
1346
+ const aliasMap = /* @__PURE__ */ new Map();
1347
+ for (const profile of profiles) {
1348
+ aliasMap.set(profile.authorId, profile.authorId);
1349
+ }
1350
+ for (const group of groupsByStem.values()) {
1351
+ if (group.length < 2) {
1352
+ continue;
1353
+ }
1354
+ const compatible = [];
1355
+ for (const profile of group) {
1356
+ if (profile.isBot || profile.primaryName.length === 0) {
1357
+ continue;
1358
+ }
1359
+ compatible.push(profile);
1360
+ }
1361
+ if (compatible.length < 2) {
1362
+ continue;
1363
+ }
1364
+ const canonical = chooseCanonicalAuthorId(compatible);
1365
+ const canonicalProfile = compatible.find((candidate) => candidate.authorId === canonical);
1366
+ if (canonicalProfile === void 0) {
1367
+ continue;
1368
+ }
1369
+ for (const profile of compatible) {
1370
+ if (areNamesCompatible(profile.primaryName, canonicalProfile.primaryName)) {
1371
+ aliasMap.set(profile.authorId, canonical);
1372
+ }
1373
+ }
1374
+ }
1375
+ return aliasMap;
1376
+ };
455
1377
  var computeBusFactor = (authorDistribution, threshold) => {
456
1378
  if (authorDistribution.length === 0) {
457
1379
  return 0;
@@ -477,7 +1399,7 @@ var finalizeAuthorDistribution = (authorCommits) => {
477
1399
  return [...authorCommits.entries()].map(([authorId, commits]) => ({
478
1400
  authorId,
479
1401
  commits,
480
- share: round4(commits / totalCommits)
1402
+ share: round43(commits / totalCommits)
481
1403
  })).sort((a, b) => b.commits - a.commits || a.authorId.localeCompare(b.authorId));
482
1404
  };
483
1405
  var buildCouplingMatrix = (coChangeByPair, fileCommitCount, consideredCommits, skippedLargeCommits, maxCouplingPairs) => {
@@ -490,7 +1412,7 @@ var buildCouplingMatrix = (coChangeByPair, fileCommitCount, consideredCommits, s
490
1412
  const fileACommits = fileCommitCount.get(fileA) ?? 0;
491
1413
  const fileBCommits = fileCommitCount.get(fileB) ?? 0;
492
1414
  const denominator = fileACommits + fileBCommits - coChangeCommits;
493
- const couplingScore = denominator === 0 ? 0 : round4(coChangeCommits / denominator);
1415
+ const couplingScore = denominator === 0 ? 0 : round43(coChangeCommits / denominator);
494
1416
  allPairs.push({
495
1417
  fileA,
496
1418
  fileB,
@@ -529,6 +1451,7 @@ var selectHotspots = (files, config) => {
529
1451
  return { hotspots, threshold };
530
1452
  };
531
1453
  var computeRepositoryEvolutionSummary = (targetPath, commits, config) => {
1454
+ const authorAliasById = config.authorIdentityMode === "likely_merge" ? buildAuthorAliasMap(commits) : /* @__PURE__ */ new Map();
532
1455
  const fileStats = /* @__PURE__ */ new Map();
533
1456
  const coChangeByPair = /* @__PURE__ */ new Map();
534
1457
  const headCommitTimestamp = commits.length === 0 ? null : commits[commits.length - 1]?.authoredAtUnix ?? null;
@@ -559,7 +1482,8 @@ var computeRepositoryEvolutionSummary = (targetPath, commits, config) => {
559
1482
  if (commit.authoredAtUnix >= recentWindowStart) {
560
1483
  current.recentCommitCount += 1;
561
1484
  }
562
- current.authors.set(commit.authorId, (current.authors.get(commit.authorId) ?? 0) + 1);
1485
+ const effectiveAuthorId = authorAliasById.get(commit.authorId) ?? commit.authorId;
1486
+ current.authors.set(effectiveAuthorId, (current.authors.get(effectiveAuthorId) ?? 0) + 1);
563
1487
  }
564
1488
  const orderedFiles = [...uniqueFiles].sort((a, b) => a.localeCompare(b));
565
1489
  if (orderedFiles.length > 1) {
@@ -587,12 +1511,12 @@ var computeRepositoryEvolutionSummary = (targetPath, commits, config) => {
587
1511
  return {
588
1512
  filePath,
589
1513
  commitCount: stats.commitCount,
590
- frequencyPer100Commits: commits.length === 0 ? 0 : round4(stats.commitCount / commits.length * 100),
1514
+ frequencyPer100Commits: commits.length === 0 ? 0 : round43(stats.commitCount / commits.length * 100),
591
1515
  churnAdded: stats.churnAdded,
592
1516
  churnDeleted: stats.churnDeleted,
593
1517
  churnTotal: stats.churnAdded + stats.churnDeleted,
594
1518
  recentCommitCount: stats.recentCommitCount,
595
- recentVolatility: stats.commitCount === 0 ? 0 : round4(stats.recentCommitCount / stats.commitCount),
1519
+ recentVolatility: stats.commitCount === 0 ? 0 : round43(stats.recentCommitCount / stats.commitCount),
596
1520
  topAuthorShare,
597
1521
  busFactor: computeBusFactor(authorDistribution, config.busFactorCoverageThreshold),
598
1522
  authorDistribution
@@ -624,6 +1548,7 @@ var computeRepositoryEvolutionSummary = (targetPath, commits, config) => {
624
1548
  };
625
1549
  };
626
1550
  var DEFAULT_EVOLUTION_CONFIG = {
1551
+ authorIdentityMode: "likely_merge",
627
1552
  recentWindowDays: 30,
628
1553
  hotspotTopPercent: 0.1,
629
1554
  hotspotMinFiles: 1,
@@ -635,17 +1560,26 @@ var createEffectiveConfig = (overrides) => ({
635
1560
  ...DEFAULT_EVOLUTION_CONFIG,
636
1561
  ...overrides
637
1562
  });
638
- var analyzeRepositoryEvolution = (input, historyProvider) => {
1563
+ var analyzeRepositoryEvolution = (input, historyProvider, onProgress) => {
1564
+ onProgress?.({ stage: "checking_git_repository" });
639
1565
  if (!historyProvider.isGitRepository(input.repositoryPath)) {
1566
+ onProgress?.({ stage: "not_git_repository" });
640
1567
  return {
641
1568
  targetPath: input.repositoryPath,
642
1569
  available: false,
643
1570
  reason: "not_git_repository"
644
1571
  };
645
1572
  }
646
- const commits = historyProvider.getCommitHistory(input.repositoryPath);
1573
+ onProgress?.({ stage: "loading_commit_history" });
1574
+ const commits = historyProvider.getCommitHistory(
1575
+ input.repositoryPath,
1576
+ (event) => onProgress?.({ stage: "history", event })
1577
+ );
647
1578
  const config = createEffectiveConfig(input.config);
648
- return computeRepositoryEvolutionSummary(input.repositoryPath, commits, config);
1579
+ onProgress?.({ stage: "computing_metrics" });
1580
+ const summary = computeRepositoryEvolutionSummary(input.repositoryPath, commits, config);
1581
+ onProgress?.({ stage: "analysis_completed", available: summary.available });
1582
+ return summary;
649
1583
  };
650
1584
  var GitCommandError = class extends Error {
651
1585
  args;
@@ -672,6 +1606,11 @@ var ExecGitCommandClient = class {
672
1606
  var COMMIT_RECORD_SEPARATOR = "";
673
1607
  var COMMIT_FIELD_SEPARATOR = "";
674
1608
  var GIT_LOG_FORMAT = `%x1e%H%x1f%at%x1f%an%x1f%ae`;
1609
+ var mapParseProgressToHistoryProgress = (event) => ({
1610
+ stage: "git_log_parse_progress",
1611
+ parsedRecords: event.parsedRecords,
1612
+ totalRecords: event.totalRecords
1613
+ });
675
1614
  var parseInteger = (value) => {
676
1615
  if (value.length === 0) {
677
1616
  return null;
@@ -682,6 +1621,22 @@ var parseInteger = (value) => {
682
1621
  }
683
1622
  return parsed;
684
1623
  };
1624
+ var normalizeAuthorIdentity = (authorName, authorEmail) => {
1625
+ const normalizedName = authorName.trim().replace(/\s+/g, " ").toLowerCase();
1626
+ const normalizedEmail = authorEmail.trim().toLowerCase();
1627
+ if (/\[bot\]/i.test(normalizedName) || /\[bot\]/i.test(normalizedEmail)) {
1628
+ return normalizedEmail.length > 0 ? normalizedEmail : normalizedName;
1629
+ }
1630
+ const githubNoReplyMatch = normalizedEmail.match(/^\d+\+([^@]+)@users\.noreply\.github\.com$/);
1631
+ const githubHandle = githubNoReplyMatch?.[1]?.trim().toLowerCase();
1632
+ if (githubHandle !== void 0 && githubHandle.length > 0) {
1633
+ return `${githubHandle}@users.noreply.github.com`;
1634
+ }
1635
+ if (normalizedEmail.length > 0) {
1636
+ return normalizedEmail;
1637
+ }
1638
+ return normalizedName;
1639
+ };
685
1640
  var parseRenamedPath = (pathSpec) => {
686
1641
  if (!pathSpec.includes(" => ")) {
687
1642
  return pathSpec;
@@ -718,10 +1673,10 @@ var parseNumstatLine = (line) => {
718
1673
  deletions
719
1674
  };
720
1675
  };
721
- var parseGitLog = (rawLog) => {
1676
+ var parseGitLog = (rawLog, onProgress) => {
722
1677
  const records = rawLog.split(COMMIT_RECORD_SEPARATOR).map((record) => record.trim()).filter((record) => record.length > 0);
723
1678
  const commits = [];
724
- for (const record of records) {
1679
+ for (const [index, record] of records.entries()) {
725
1680
  const lines = record.split("\n").map((line) => line.trimEnd()).filter((line) => line.length > 0);
726
1681
  if (lines.length === 0) {
727
1682
  continue;
@@ -747,11 +1702,15 @@ var parseGitLog = (rawLog) => {
747
1702
  }
748
1703
  commits.push({
749
1704
  hash,
750
- authorId: authorEmail.toLowerCase(),
1705
+ authorId: normalizeAuthorIdentity(authorName, authorEmail),
751
1706
  authorName,
752
1707
  authoredAtUnix,
753
1708
  fileChanges
754
1709
  });
1710
+ const parsedRecords = index + 1;
1711
+ if (parsedRecords === 1 || parsedRecords === records.length || parsedRecords % 500 === 0) {
1712
+ onProgress?.({ parsedRecords, totalRecords: records.length });
1713
+ }
755
1714
  }
756
1715
  commits.sort((a, b) => a.authoredAtUnix - b.authoredAtUnix || a.hash.localeCompare(b.hash));
757
1716
  return commits;
@@ -776,52 +1735,863 @@ var GitCliHistoryProvider = class {
776
1735
  throw error;
777
1736
  }
778
1737
  }
779
- getCommitHistory(repositoryPath) {
1738
+ getCommitHistory(repositoryPath, onProgress) {
780
1739
  const output = this.gitClient.run(repositoryPath, [
781
1740
  "-c",
782
1741
  "core.quotepath=false",
783
1742
  "log",
1743
+ "--use-mailmap",
784
1744
  "--no-merges",
785
1745
  "--date=unix",
786
1746
  `--pretty=format:${GIT_LOG_FORMAT}`,
787
1747
  "--numstat",
788
1748
  "--find-renames"
789
1749
  ]);
790
- return parseGitLog(output);
1750
+ onProgress?.({ stage: "git_log_received", bytes: Buffer.byteLength(output, "utf8") });
1751
+ const commits = parseGitLog(output, (event) => onProgress?.(mapParseProgressToHistoryProgress(event)));
1752
+ onProgress?.({ stage: "git_log_parsed", commits: commits.length });
1753
+ return commits;
791
1754
  }
792
1755
  };
793
- var analyzeRepositoryEvolutionFromGit = (input) => {
1756
+ var analyzeRepositoryEvolutionFromGit = (input, onProgress) => {
794
1757
  const historyProvider = new GitCliHistoryProvider(new ExecGitCommandClient());
795
- return analyzeRepositoryEvolution(input, historyProvider);
1758
+ return analyzeRepositoryEvolution(input, historyProvider, onProgress);
1759
+ };
1760
+
1761
+ // ../risk-engine/dist/index.js
1762
+ var DEFAULT_RISK_ENGINE_CONFIG = {
1763
+ // Base dimensional influence. Risk is never dominated by a single dimension by default.
1764
+ dimensionWeights: {
1765
+ structural: 0.44,
1766
+ evolution: 0.36,
1767
+ external: 0.2
1768
+ },
1769
+ // Interaction terms activate only when both related dimensions are high.
1770
+ interactionWeights: {
1771
+ structuralEvolution: 0.35,
1772
+ centralInstability: 0.25,
1773
+ dependencyAmplification: 0.2
1774
+ },
1775
+ structuralFactorWeights: {
1776
+ fanIn: 0.3,
1777
+ fanOut: 0.25,
1778
+ depth: 0.2,
1779
+ cycleParticipation: 0.25
1780
+ },
1781
+ evolutionFactorWeights: {
1782
+ frequency: 0.26,
1783
+ churn: 0.24,
1784
+ recentVolatility: 0.2,
1785
+ ownershipConcentration: 0.18,
1786
+ busFactorRisk: 0.12
1787
+ },
1788
+ dependencyFactorWeights: {
1789
+ signals: 0.38,
1790
+ staleness: 0.16,
1791
+ maintainerConcentration: 0.16,
1792
+ transitiveBurden: 0.1,
1793
+ centrality: 0.08,
1794
+ chainDepth: 0.06,
1795
+ busFactorRisk: 0.06
1796
+ },
1797
+ quantileClamp: {
1798
+ lower: 0.05,
1799
+ upper: 0.95
1800
+ },
1801
+ hotspotTopPercent: 0.12,
1802
+ hotspotMinFiles: 3,
1803
+ hotspotMaxFiles: 30,
1804
+ couplingCluster: {
1805
+ minCoChangeCommits: 2,
1806
+ percentileThreshold: 0.9,
1807
+ floorScore: 0.35
1808
+ },
1809
+ amplificationZone: {
1810
+ pressureFloor: 0.2,
1811
+ percentileThreshold: 0.85,
1812
+ maxZones: 20
1813
+ },
1814
+ module: {
1815
+ maxPrefixSegments: 2,
1816
+ rootLabel: "(root)",
1817
+ commonSourceRoots: ["src", "lib", "app", "packages"]
1818
+ },
1819
+ dependencySignals: {
1820
+ inheritedSignalMultiplier: 0.45,
1821
+ // At this age, staleness reaches 50% risk.
1822
+ abandonedHalfLifeDays: 540,
1823
+ missingMetadataPenalty: 0.5
1824
+ },
1825
+ externalDimension: {
1826
+ topDependencyPercentile: 0.85,
1827
+ dependencyDepthHalfLife: 6
1828
+ }
1829
+ };
1830
+ var clamp01 = (value) => {
1831
+ if (Number.isNaN(value)) {
1832
+ return 0;
1833
+ }
1834
+ if (value <= 0) {
1835
+ return 0;
1836
+ }
1837
+ if (value >= 1) {
1838
+ return 1;
1839
+ }
1840
+ return value;
1841
+ };
1842
+ var round44 = (value) => Number(value.toFixed(4));
1843
+ var average = (values) => {
1844
+ if (values.length === 0) {
1845
+ return 0;
1846
+ }
1847
+ const total = values.reduce((sum, current) => sum + current, 0);
1848
+ return total / values.length;
1849
+ };
1850
+ var percentile = (values, p) => {
1851
+ if (values.length === 0) {
1852
+ return 0;
1853
+ }
1854
+ if (values.length === 1) {
1855
+ return values[0] ?? 0;
1856
+ }
1857
+ const sorted = [...values].sort((a, b) => a - b);
1858
+ const position = clamp01(p) * (sorted.length - 1);
1859
+ const lowerIndex = Math.floor(position);
1860
+ const upperIndex = Math.ceil(position);
1861
+ const lower = sorted[lowerIndex] ?? 0;
1862
+ const upper = sorted[upperIndex] ?? lower;
1863
+ if (lowerIndex === upperIndex) {
1864
+ return lower;
1865
+ }
1866
+ const ratio = position - lowerIndex;
1867
+ return lower + (upper - lower) * ratio;
1868
+ };
1869
+ var saturatingComposite = (baseline, amplifications) => {
1870
+ let value = clamp01(baseline);
1871
+ for (const amplification of amplifications) {
1872
+ const boundedAmplification = clamp01(amplification);
1873
+ value += (1 - value) * boundedAmplification;
1874
+ }
1875
+ return clamp01(value);
1876
+ };
1877
+ var halfLifeRisk = (value, halfLife) => {
1878
+ if (value <= 0 || halfLife <= 0) {
1879
+ return 0;
1880
+ }
1881
+ return clamp01(value / (value + halfLife));
1882
+ };
1883
+ var normalizeWeights = (weights, enabled) => {
1884
+ let total = 0;
1885
+ const result = { ...weights };
1886
+ for (const key of Object.keys(result)) {
1887
+ const enabledValue = enabled[key];
1888
+ if (!enabledValue) {
1889
+ result[key] = 0;
1890
+ continue;
1891
+ }
1892
+ const value = Math.max(0, result[key]);
1893
+ result[key] = value;
1894
+ total += value;
1895
+ }
1896
+ if (total === 0) {
1897
+ const activeKeys = Object.keys(result).filter((key) => enabled[key]);
1898
+ if (activeKeys.length === 0) {
1899
+ return result;
1900
+ }
1901
+ const uniform = 1 / activeKeys.length;
1902
+ for (const key of activeKeys) {
1903
+ result[key] = uniform;
1904
+ }
1905
+ return result;
1906
+ }
1907
+ for (const key of Object.keys(result)) {
1908
+ if (enabled[key]) {
1909
+ result[key] = result[key] / total;
1910
+ }
1911
+ }
1912
+ return result;
1913
+ };
1914
+ var logScale = (value) => Math.log1p(Math.max(0, value));
1915
+ var buildQuantileScale = (values, lowerPercentile, upperPercentile) => {
1916
+ if (values.length === 0) {
1917
+ return { lower: 0, upper: 0 };
1918
+ }
1919
+ return {
1920
+ lower: percentile(values, lowerPercentile),
1921
+ upper: percentile(values, upperPercentile)
1922
+ };
1923
+ };
1924
+ var normalizeWithScale = (value, scale) => {
1925
+ if (scale.upper <= scale.lower) {
1926
+ return value > 0 ? 1 : 0;
1927
+ }
1928
+ return clamp01((value - scale.lower) / (scale.upper - scale.lower));
1929
+ };
1930
+ var normalizePath2 = (path) => path.replaceAll("\\", "/");
1931
+ var dependencySignalWeights = {
1932
+ single_maintainer: 0.3,
1933
+ abandoned: 0.3,
1934
+ high_centrality: 0.16,
1935
+ deep_chain: 0.14,
1936
+ high_fanout: 0.06,
1937
+ metadata_unavailable: 0.04
1938
+ };
1939
+ var dependencySignalWeightBudget = Object.values(dependencySignalWeights).reduce(
1940
+ (sum, value) => sum + value,
1941
+ 0
1942
+ );
1943
+ var computeDependencySignalScore = (ownSignals, inheritedSignals, inheritedSignalMultiplier) => {
1944
+ const ownWeight = ownSignals.reduce((sum, signal) => sum + (dependencySignalWeights[signal] ?? 0), 0);
1945
+ const inheritedWeight = inheritedSignals.reduce(
1946
+ (sum, signal) => sum + (dependencySignalWeights[signal] ?? 0),
1947
+ 0
1948
+ );
1949
+ const weightedTotal = ownWeight + inheritedWeight * inheritedSignalMultiplier;
1950
+ const maxWeightedTotal = dependencySignalWeightBudget * (1 + inheritedSignalMultiplier);
1951
+ if (maxWeightedTotal <= 0) {
1952
+ return 0;
1953
+ }
1954
+ return clamp01(weightedTotal / maxWeightedTotal);
1955
+ };
1956
+ var computeDependencyScores = (external, config) => {
1957
+ if (!external.available) {
1958
+ return {
1959
+ dependencyScores: [],
1960
+ repositoryExternalPressure: 0
1961
+ };
1962
+ }
1963
+ const transitiveCounts = external.dependencies.map(
1964
+ (dependency) => logScale(dependency.transitiveDependencies.length)
1965
+ );
1966
+ const dependentCounts = external.dependencies.map((dependency) => logScale(dependency.dependents));
1967
+ const chainDepths = external.dependencies.map((dependency) => dependency.dependencyDepth);
1968
+ const transitiveScale = buildQuantileScale(
1969
+ transitiveCounts,
1970
+ config.quantileClamp.lower,
1971
+ config.quantileClamp.upper
1972
+ );
1973
+ const dependentScale = buildQuantileScale(
1974
+ dependentCounts,
1975
+ config.quantileClamp.lower,
1976
+ config.quantileClamp.upper
1977
+ );
1978
+ const chainDepthScale = buildQuantileScale(
1979
+ chainDepths,
1980
+ config.quantileClamp.lower,
1981
+ config.quantileClamp.upper
1982
+ );
1983
+ const dependencyScores = external.dependencies.map((dependency) => {
1984
+ const signalScore = computeDependencySignalScore(
1985
+ dependency.ownRiskSignals,
1986
+ dependency.inheritedRiskSignals,
1987
+ config.dependencySignals.inheritedSignalMultiplier
1988
+ );
1989
+ const maintainerConcentrationRisk = dependency.maintainerCount === null ? config.dependencySignals.missingMetadataPenalty : clamp01(1 / Math.max(1, dependency.maintainerCount));
1990
+ const stalenessRisk = dependency.daysSinceLastRelease === null ? config.dependencySignals.missingMetadataPenalty : halfLifeRisk(
1991
+ dependency.daysSinceLastRelease,
1992
+ config.dependencySignals.abandonedHalfLifeDays
1993
+ );
1994
+ const transitiveBurdenRisk = normalizeWithScale(
1995
+ logScale(dependency.transitiveDependencies.length),
1996
+ transitiveScale
1997
+ );
1998
+ const centralityRisk = normalizeWithScale(logScale(dependency.dependents), dependentScale);
1999
+ const chainDepthRisk = normalizeWithScale(dependency.dependencyDepth, chainDepthScale);
2000
+ const busFactorRisk = dependency.busFactor === null ? config.dependencySignals.missingMetadataPenalty : clamp01(1 / Math.max(1, dependency.busFactor));
2001
+ const weights = config.dependencyFactorWeights;
2002
+ const normalizedScore = clamp01(
2003
+ signalScore * weights.signals + stalenessRisk * weights.staleness + maintainerConcentrationRisk * weights.maintainerConcentration + transitiveBurdenRisk * weights.transitiveBurden + centralityRisk * weights.centrality + chainDepthRisk * weights.chainDepth + busFactorRisk * weights.busFactorRisk
2004
+ );
2005
+ return {
2006
+ dependency: dependency.name,
2007
+ score: round44(normalizedScore * 100),
2008
+ normalizedScore: round44(normalizedScore),
2009
+ ownRiskSignals: dependency.ownRiskSignals,
2010
+ inheritedRiskSignals: dependency.inheritedRiskSignals
2011
+ };
2012
+ }).sort(
2013
+ (a, b) => b.normalizedScore - a.normalizedScore || a.dependency.localeCompare(b.dependency)
2014
+ );
2015
+ const normalizedValues = dependencyScores.map((score) => score.normalizedScore);
2016
+ const highDependencyRisk = dependencyScores.length === 0 ? 0 : percentile(normalizedValues, config.externalDimension.topDependencyPercentile);
2017
+ const averageDependencyRisk = average(normalizedValues);
2018
+ const depthRisk = halfLifeRisk(
2019
+ external.metrics.dependencyDepth,
2020
+ config.externalDimension.dependencyDepthHalfLife
2021
+ );
2022
+ const repositoryExternalPressure = clamp01(
2023
+ highDependencyRisk * 0.5 + averageDependencyRisk * 0.3 + depthRisk * 0.2
2024
+ );
2025
+ return {
2026
+ dependencyScores,
2027
+ repositoryExternalPressure: round44(repositoryExternalPressure)
2028
+ };
2029
+ };
2030
+ var mapEvolutionByFile = (evolution) => {
2031
+ if (!evolution.available) {
2032
+ return /* @__PURE__ */ new Map();
2033
+ }
2034
+ return new Map(
2035
+ evolution.files.map((fileMetrics) => [normalizePath2(fileMetrics.filePath), fileMetrics])
2036
+ );
2037
+ };
2038
+ var computeEvolutionScales = (evolutionByFile, config) => {
2039
+ const evolutionFiles = [...evolutionByFile.values()];
2040
+ return {
2041
+ commitCount: buildQuantileScale(
2042
+ evolutionFiles.map((metrics) => logScale(metrics.commitCount)),
2043
+ config.quantileClamp.lower,
2044
+ config.quantileClamp.upper
2045
+ ),
2046
+ churnTotal: buildQuantileScale(
2047
+ evolutionFiles.map((metrics) => logScale(metrics.churnTotal)),
2048
+ config.quantileClamp.lower,
2049
+ config.quantileClamp.upper
2050
+ ),
2051
+ busFactor: buildQuantileScale(
2052
+ evolutionFiles.map((metrics) => metrics.busFactor),
2053
+ config.quantileClamp.lower,
2054
+ config.quantileClamp.upper
2055
+ )
2056
+ };
2057
+ };
2058
+ var inferModuleName = (filePath, config) => {
2059
+ const normalized = normalizePath2(filePath);
2060
+ const parts = normalized.split("/").filter((part) => part.length > 0);
2061
+ if (parts.length <= 1) {
2062
+ return config.module.rootLabel;
2063
+ }
2064
+ const first = parts[0];
2065
+ if (first === void 0) {
2066
+ return config.module.rootLabel;
2067
+ }
2068
+ if (!config.module.commonSourceRoots.includes(first)) {
2069
+ return first;
2070
+ }
2071
+ if (parts.length <= config.module.maxPrefixSegments) {
2072
+ return first;
2073
+ }
2074
+ return parts.slice(0, config.module.maxPrefixSegments).join("/");
2075
+ };
2076
+ var buildFragileClusters = (structural, evolution, fileScoresByFile, config) => {
2077
+ const clusters = [];
2078
+ let cycleClusterCount = 0;
2079
+ for (const cycle of structural.cycles) {
2080
+ const files = [...new Set(cycle.nodes.map((node) => normalizePath2(node)))].filter(
2081
+ (filePath) => fileScoresByFile.has(filePath)
2082
+ );
2083
+ if (files.length < 2) {
2084
+ continue;
2085
+ }
2086
+ files.sort((a, b) => a.localeCompare(b));
2087
+ const averageRisk = average(
2088
+ files.map((filePath) => fileScoresByFile.get(filePath)?.normalizedScore ?? 0)
2089
+ );
2090
+ const cycleSizeRisk = clamp01((files.length - 1) / 5);
2091
+ const score = round44(clamp01(averageRisk * 0.75 + cycleSizeRisk * 0.25) * 100);
2092
+ cycleClusterCount += 1;
2093
+ clusters.push({
2094
+ id: `cycle:${cycleClusterCount}`,
2095
+ kind: "structural_cycle",
2096
+ files,
2097
+ score
2098
+ });
2099
+ }
2100
+ if (evolution.available && evolution.coupling.pairs.length > 0) {
2101
+ const candidates = evolution.coupling.pairs.filter(
2102
+ (pair) => pair.coChangeCommits >= config.couplingCluster.minCoChangeCommits
2103
+ );
2104
+ const threshold = Math.max(
2105
+ config.couplingCluster.floorScore,
2106
+ percentile(
2107
+ candidates.map((pair) => pair.couplingScore),
2108
+ config.couplingCluster.percentileThreshold
2109
+ )
2110
+ );
2111
+ const selectedPairs = candidates.filter((pair) => pair.couplingScore >= threshold).map((pair) => ({
2112
+ fileA: normalizePath2(pair.fileA),
2113
+ fileB: normalizePath2(pair.fileB),
2114
+ couplingScore: pair.couplingScore
2115
+ })).filter(
2116
+ (pair) => pair.fileA !== pair.fileB && fileScoresByFile.has(pair.fileA) && fileScoresByFile.has(pair.fileB)
2117
+ );
2118
+ const adjacency = /* @__PURE__ */ new Map();
2119
+ for (const pair of selectedPairs) {
2120
+ const aNeighbors = adjacency.get(pair.fileA) ?? /* @__PURE__ */ new Set();
2121
+ aNeighbors.add(pair.fileB);
2122
+ adjacency.set(pair.fileA, aNeighbors);
2123
+ const bNeighbors = adjacency.get(pair.fileB) ?? /* @__PURE__ */ new Set();
2124
+ bNeighbors.add(pair.fileA);
2125
+ adjacency.set(pair.fileB, bNeighbors);
2126
+ }
2127
+ const visited = /* @__PURE__ */ new Set();
2128
+ let couplingClusterCount = 0;
2129
+ const orderedStarts = [...adjacency.keys()].sort((a, b) => a.localeCompare(b));
2130
+ for (const start of orderedStarts) {
2131
+ if (visited.has(start)) {
2132
+ continue;
2133
+ }
2134
+ const stack = [start];
2135
+ const files = [];
2136
+ while (stack.length > 0) {
2137
+ const current = stack.pop();
2138
+ if (current === void 0 || visited.has(current)) {
2139
+ continue;
2140
+ }
2141
+ visited.add(current);
2142
+ files.push(current);
2143
+ const neighbors = adjacency.get(current);
2144
+ if (neighbors === void 0) {
2145
+ continue;
2146
+ }
2147
+ for (const neighbor of neighbors) {
2148
+ if (!visited.has(neighbor)) {
2149
+ stack.push(neighbor);
2150
+ }
2151
+ }
2152
+ }
2153
+ if (files.length < 2) {
2154
+ continue;
2155
+ }
2156
+ files.sort((a, b) => a.localeCompare(b));
2157
+ const fileSet = new Set(files);
2158
+ const componentPairs = selectedPairs.filter(
2159
+ (pair) => fileSet.has(pair.fileA) && fileSet.has(pair.fileB)
2160
+ );
2161
+ const meanFileRisk = average(
2162
+ files.map((filePath) => fileScoresByFile.get(filePath)?.normalizedScore ?? 0)
2163
+ );
2164
+ const meanCoupling = average(componentPairs.map((pair) => pair.couplingScore));
2165
+ const score = round44(clamp01(meanFileRisk * 0.65 + meanCoupling * 0.35) * 100);
2166
+ couplingClusterCount += 1;
2167
+ clusters.push({
2168
+ id: `coupling:${couplingClusterCount}`,
2169
+ kind: "change_coupling",
2170
+ files,
2171
+ score
2172
+ });
2173
+ }
2174
+ }
2175
+ return clusters.sort(
2176
+ (a, b) => b.score - a.score || a.kind.localeCompare(b.kind) || a.id.localeCompare(b.id)
2177
+ );
2178
+ };
2179
+ var computeRiskSummary = (structural, evolution, external, config) => {
2180
+ const dependencyComputation = computeDependencyScores(external, config);
2181
+ const evolutionByFile = mapEvolutionByFile(evolution);
2182
+ const evolutionScales = computeEvolutionScales(evolutionByFile, config);
2183
+ const cycleFileSet = new Set(
2184
+ structural.cycles.flatMap((cycle) => cycle.nodes.map((node) => normalizePath2(node)))
2185
+ );
2186
+ const fanInScale = buildQuantileScale(
2187
+ structural.files.map((file) => logScale(file.fanIn)),
2188
+ config.quantileClamp.lower,
2189
+ config.quantileClamp.upper
2190
+ );
2191
+ const fanOutScale = buildQuantileScale(
2192
+ structural.files.map((file) => logScale(file.fanOut)),
2193
+ config.quantileClamp.lower,
2194
+ config.quantileClamp.upper
2195
+ );
2196
+ const depthScale = buildQuantileScale(
2197
+ structural.files.map((file) => file.depth),
2198
+ config.quantileClamp.lower,
2199
+ config.quantileClamp.upper
2200
+ );
2201
+ const dimensionWeights = normalizeWeights(config.dimensionWeights, {
2202
+ structural: true,
2203
+ evolution: evolution.available,
2204
+ external: external.available
2205
+ });
2206
+ const fileRiskContexts = structural.files.map((file) => {
2207
+ const filePath = normalizePath2(file.id);
2208
+ const inCycle = cycleFileSet.has(filePath) ? 1 : 0;
2209
+ const fanInRisk = normalizeWithScale(logScale(file.fanIn), fanInScale);
2210
+ const fanOutRisk = normalizeWithScale(logScale(file.fanOut), fanOutScale);
2211
+ const depthRisk = normalizeWithScale(file.depth, depthScale);
2212
+ const structuralWeights = config.structuralFactorWeights;
2213
+ const structuralFactor = clamp01(
2214
+ fanInRisk * structuralWeights.fanIn + fanOutRisk * structuralWeights.fanOut + depthRisk * structuralWeights.depth + inCycle * structuralWeights.cycleParticipation
2215
+ );
2216
+ const structuralCentrality = clamp01((fanInRisk + fanOutRisk) / 2);
2217
+ let evolutionFactor = 0;
2218
+ const evolutionMetrics = evolutionByFile.get(filePath);
2219
+ if (evolution.available && evolutionMetrics !== void 0) {
2220
+ const frequencyRisk = normalizeWithScale(
2221
+ logScale(evolutionMetrics.commitCount),
2222
+ evolutionScales.commitCount
2223
+ );
2224
+ const churnRisk = normalizeWithScale(
2225
+ logScale(evolutionMetrics.churnTotal),
2226
+ evolutionScales.churnTotal
2227
+ );
2228
+ const volatilityRisk = clamp01(evolutionMetrics.recentVolatility);
2229
+ const ownershipConcentrationRisk = clamp01(evolutionMetrics.topAuthorShare);
2230
+ const busFactorRisk = clamp01(1 - normalizeWithScale(evolutionMetrics.busFactor, evolutionScales.busFactor));
2231
+ const evolutionWeights = config.evolutionFactorWeights;
2232
+ evolutionFactor = clamp01(
2233
+ frequencyRisk * evolutionWeights.frequency + churnRisk * evolutionWeights.churn + volatilityRisk * evolutionWeights.recentVolatility + ownershipConcentrationRisk * evolutionWeights.ownershipConcentration + busFactorRisk * evolutionWeights.busFactorRisk
2234
+ );
2235
+ }
2236
+ const dependencyAffinity = clamp01(structuralCentrality * 0.6 + evolutionFactor * 0.4);
2237
+ const externalFactor = external.available ? clamp01(dependencyComputation.repositoryExternalPressure * dependencyAffinity) : 0;
2238
+ const baseline = structuralFactor * dimensionWeights.structural + evolutionFactor * dimensionWeights.evolution + externalFactor * dimensionWeights.external;
2239
+ const interactions = [
2240
+ structuralFactor * evolutionFactor * config.interactionWeights.structuralEvolution,
2241
+ structuralCentrality * evolutionFactor * config.interactionWeights.centralInstability,
2242
+ externalFactor * Math.max(structuralFactor, evolutionFactor) * config.interactionWeights.dependencyAmplification
2243
+ ];
2244
+ const normalizedScore = saturatingComposite(baseline, interactions);
2245
+ return {
2246
+ file: filePath,
2247
+ score: round44(normalizedScore * 100),
2248
+ normalizedScore: round44(normalizedScore),
2249
+ factors: {
2250
+ structural: round44(structuralFactor),
2251
+ evolution: round44(evolutionFactor),
2252
+ external: round44(externalFactor)
2253
+ },
2254
+ structuralCentrality: round44(structuralCentrality)
2255
+ };
2256
+ }).sort((a, b) => b.score - a.score || a.file.localeCompare(b.file));
2257
+ const fileScores = fileRiskContexts.map((context) => ({
2258
+ file: context.file,
2259
+ score: context.score,
2260
+ normalizedScore: context.normalizedScore,
2261
+ factors: context.factors
2262
+ }));
2263
+ const fileScoresByFile = new Map(fileScores.map((fileScore) => [fileScore.file, fileScore]));
2264
+ const hotspotsCount = Math.min(
2265
+ config.hotspotMaxFiles,
2266
+ Math.max(config.hotspotMinFiles, Math.ceil(fileScores.length * config.hotspotTopPercent))
2267
+ );
2268
+ const hotspots = fileScores.slice(0, hotspotsCount).map((fileScore) => ({
2269
+ file: fileScore.file,
2270
+ score: fileScore.score,
2271
+ factors: fileScore.factors
2272
+ }));
2273
+ const moduleFiles = /* @__PURE__ */ new Map();
2274
+ for (const fileScore of fileScores) {
2275
+ const moduleName = inferModuleName(fileScore.file, config);
2276
+ const values = moduleFiles.get(moduleName) ?? [];
2277
+ values.push(fileScore.normalizedScore);
2278
+ moduleFiles.set(moduleName, values);
2279
+ }
2280
+ const moduleScores = [...moduleFiles.entries()].map(([module, values]) => {
2281
+ const averageScore = average(values);
2282
+ const peakScore = values.reduce((max, value) => Math.max(max, value), 0);
2283
+ const normalizedScore = clamp01(averageScore * 0.65 + peakScore * 0.35);
2284
+ return {
2285
+ module,
2286
+ score: round44(normalizedScore * 100),
2287
+ normalizedScore: round44(normalizedScore),
2288
+ fileCount: values.length
2289
+ };
2290
+ }).sort((a, b) => b.score - a.score || a.module.localeCompare(b.module));
2291
+ const fragileClusters = buildFragileClusters(structural, evolution, fileScoresByFile, config);
2292
+ const externalPressures = fileScores.map((fileScore) => fileScore.factors.external);
2293
+ const pressureThreshold = Math.max(
2294
+ config.amplificationZone.pressureFloor,
2295
+ percentile(externalPressures, config.amplificationZone.percentileThreshold)
2296
+ );
2297
+ const dependencyAmplificationZones = fileScores.map((fileScore) => {
2298
+ const intensity = clamp01(
2299
+ fileScore.factors.external * Math.max(fileScore.factors.structural, fileScore.factors.evolution)
2300
+ );
2301
+ const normalizedZoneScore = clamp01(intensity * 0.7 + fileScore.normalizedScore * 0.3);
2302
+ return {
2303
+ file: fileScore.file,
2304
+ score: round44(normalizedZoneScore * 100),
2305
+ externalPressure: fileScore.factors.external
2306
+ };
2307
+ }).filter((zone) => external.available && zone.externalPressure >= pressureThreshold).sort((a, b) => b.score - a.score || a.file.localeCompare(b.file)).slice(0, config.amplificationZone.maxZones).map((zone) => ({
2308
+ ...zone,
2309
+ externalPressure: round44(zone.externalPressure)
2310
+ }));
2311
+ const structuralDimension = average(fileScores.map((fileScore) => fileScore.factors.structural));
2312
+ const evolutionDimension = average(fileScores.map((fileScore) => fileScore.factors.evolution));
2313
+ const externalDimension = dependencyComputation.repositoryExternalPressure;
2314
+ const topCentralSlice = Math.max(1, Math.ceil(fileRiskContexts.length * 0.1));
2315
+ const criticalInstability = average(
2316
+ [...fileRiskContexts].sort(
2317
+ (a, b) => b.structuralCentrality * b.factors.evolution - a.structuralCentrality * a.factors.evolution || a.file.localeCompare(b.file)
2318
+ ).slice(0, topCentralSlice).map((context) => context.structuralCentrality * context.factors.evolution)
2319
+ );
2320
+ const dependencyAmplification = average(
2321
+ dependencyAmplificationZones.map(
2322
+ (zone) => clamp01(zone.externalPressure * zone.score / 100)
2323
+ )
2324
+ );
2325
+ const repositoryBaseline = structuralDimension * dimensionWeights.structural + evolutionDimension * dimensionWeights.evolution + externalDimension * dimensionWeights.external;
2326
+ const repositoryNormalizedScore = saturatingComposite(repositoryBaseline, [
2327
+ structuralDimension * evolutionDimension * config.interactionWeights.structuralEvolution,
2328
+ criticalInstability * config.interactionWeights.centralInstability,
2329
+ dependencyAmplification * config.interactionWeights.dependencyAmplification
2330
+ ]);
2331
+ return {
2332
+ repositoryScore: round44(repositoryNormalizedScore * 100),
2333
+ normalizedScore: round44(repositoryNormalizedScore),
2334
+ hotspots,
2335
+ fragileClusters,
2336
+ dependencyAmplificationZones,
2337
+ fileScores,
2338
+ moduleScores,
2339
+ dependencyScores: dependencyComputation.dependencyScores
2340
+ };
2341
+ };
2342
+ var mergeConfig = (overrides) => {
2343
+ if (overrides === void 0) {
2344
+ return DEFAULT_RISK_ENGINE_CONFIG;
2345
+ }
2346
+ return {
2347
+ ...DEFAULT_RISK_ENGINE_CONFIG,
2348
+ ...overrides,
2349
+ dimensionWeights: {
2350
+ ...DEFAULT_RISK_ENGINE_CONFIG.dimensionWeights,
2351
+ ...overrides.dimensionWeights
2352
+ },
2353
+ interactionWeights: {
2354
+ ...DEFAULT_RISK_ENGINE_CONFIG.interactionWeights,
2355
+ ...overrides.interactionWeights
2356
+ },
2357
+ structuralFactorWeights: {
2358
+ ...DEFAULT_RISK_ENGINE_CONFIG.structuralFactorWeights,
2359
+ ...overrides.structuralFactorWeights
2360
+ },
2361
+ evolutionFactorWeights: {
2362
+ ...DEFAULT_RISK_ENGINE_CONFIG.evolutionFactorWeights,
2363
+ ...overrides.evolutionFactorWeights
2364
+ },
2365
+ dependencyFactorWeights: {
2366
+ ...DEFAULT_RISK_ENGINE_CONFIG.dependencyFactorWeights,
2367
+ ...overrides.dependencyFactorWeights
2368
+ },
2369
+ quantileClamp: {
2370
+ ...DEFAULT_RISK_ENGINE_CONFIG.quantileClamp,
2371
+ ...overrides.quantileClamp
2372
+ },
2373
+ couplingCluster: {
2374
+ ...DEFAULT_RISK_ENGINE_CONFIG.couplingCluster,
2375
+ ...overrides.couplingCluster
2376
+ },
2377
+ amplificationZone: {
2378
+ ...DEFAULT_RISK_ENGINE_CONFIG.amplificationZone,
2379
+ ...overrides.amplificationZone
2380
+ },
2381
+ module: {
2382
+ ...DEFAULT_RISK_ENGINE_CONFIG.module,
2383
+ ...overrides.module
2384
+ },
2385
+ dependencySignals: {
2386
+ ...DEFAULT_RISK_ENGINE_CONFIG.dependencySignals,
2387
+ ...overrides.dependencySignals
2388
+ },
2389
+ externalDimension: {
2390
+ ...DEFAULT_RISK_ENGINE_CONFIG.externalDimension,
2391
+ ...overrides.externalDimension
2392
+ }
2393
+ };
2394
+ };
2395
+ var computeRepositoryRiskSummary = (input) => {
2396
+ const config = mergeConfig(input.config);
2397
+ return computeRiskSummary(input.structural, input.evolution, input.external, config);
796
2398
  };
797
2399
 
798
2400
  // src/application/run-analyze-command.ts
799
2401
  var resolveTargetPath = (inputPath, cwd) => resolve2(cwd, inputPath ?? ".");
800
- var runAnalyzeCommand = (inputPath) => {
2402
+ var createExternalProgressReporter = (logger) => {
2403
+ let lastLoggedProgress = 0;
2404
+ return (event) => {
2405
+ switch (event.stage) {
2406
+ case "package_json_loaded":
2407
+ logger.debug("external: package.json loaded");
2408
+ break;
2409
+ case "lockfile_selected":
2410
+ logger.info(`external: lockfile selected (${event.kind})`);
2411
+ break;
2412
+ case "lockfile_parsed":
2413
+ logger.info(
2414
+ `external: parsed ${event.dependencyNodes} locked dependencies (${event.directDependencies} direct)`
2415
+ );
2416
+ break;
2417
+ case "metadata_fetch_started":
2418
+ logger.info(`external: fetching dependency metadata (${event.total} packages)`);
2419
+ break;
2420
+ case "metadata_fetch_progress": {
2421
+ const currentPercent = event.total === 0 ? 100 : Math.floor(event.completed / event.total * 100);
2422
+ if (event.completed === event.total || event.completed === 1 || event.completed - lastLoggedProgress >= 25) {
2423
+ lastLoggedProgress = event.completed;
2424
+ logger.info(
2425
+ `external: metadata progress ${event.completed}/${event.total} (${currentPercent}%)`
2426
+ );
2427
+ logger.debug(`external: last package processed ${event.packageName}`);
2428
+ }
2429
+ break;
2430
+ }
2431
+ case "metadata_fetch_completed":
2432
+ logger.info(`external: metadata fetch completed (${event.total} packages)`);
2433
+ break;
2434
+ case "summary_built":
2435
+ logger.info(
2436
+ `external: summary built (${event.totalDependencies} total, ${event.directDependencies} direct)`
2437
+ );
2438
+ break;
2439
+ }
2440
+ };
2441
+ };
2442
+ var createStructuralProgressReporter = (logger) => {
2443
+ let lastProcessed = 0;
2444
+ return (event) => {
2445
+ switch (event.stage) {
2446
+ case "files_discovered":
2447
+ logger.info(`structural: discovered ${event.totalSourceFiles} source files`);
2448
+ break;
2449
+ case "program_created":
2450
+ logger.debug(`structural: TypeScript program created (${event.totalSourceFiles} files)`);
2451
+ break;
2452
+ case "file_processed":
2453
+ if (event.processed === event.total || event.processed === 1 || event.processed - lastProcessed >= 50) {
2454
+ lastProcessed = event.processed;
2455
+ logger.info(`structural: resolved ${event.processed}/${event.total} files`);
2456
+ logger.debug(`structural: last file processed ${event.filePath}`);
2457
+ }
2458
+ break;
2459
+ case "edges_resolved":
2460
+ logger.info(`structural: resolved ${event.totalEdges} dependency edges`);
2461
+ break;
2462
+ }
2463
+ };
2464
+ };
2465
+ var createEvolutionProgressReporter = (logger) => {
2466
+ let lastParsedRecords = 0;
2467
+ return (event) => {
2468
+ switch (event.stage) {
2469
+ case "checking_git_repository":
2470
+ logger.debug("evolution: checking git repository");
2471
+ break;
2472
+ case "not_git_repository":
2473
+ logger.warn("evolution: target path is not a git repository");
2474
+ break;
2475
+ case "loading_commit_history":
2476
+ logger.info("evolution: loading git history");
2477
+ break;
2478
+ case "history":
2479
+ if (event.event.stage === "git_log_received") {
2480
+ logger.info(`evolution: git log loaded (${event.event.bytes} bytes)`);
2481
+ break;
2482
+ }
2483
+ if (event.event.stage === "git_log_parsed") {
2484
+ logger.info(`evolution: parsed ${event.event.commits} commits`);
2485
+ break;
2486
+ }
2487
+ if (event.event.stage === "git_log_parse_progress" && (event.event.parsedRecords === event.event.totalRecords || event.event.parsedRecords === 1 || event.event.parsedRecords - lastParsedRecords >= 500)) {
2488
+ lastParsedRecords = event.event.parsedRecords;
2489
+ const currentPercent = event.event.totalRecords === 0 ? 100 : Math.floor(event.event.parsedRecords / event.event.totalRecords * 100);
2490
+ logger.info(
2491
+ `evolution: parse progress ${event.event.parsedRecords}/${event.event.totalRecords} (${currentPercent}%)`
2492
+ );
2493
+ }
2494
+ break;
2495
+ case "computing_metrics":
2496
+ logger.info("evolution: computing metrics");
2497
+ break;
2498
+ case "analysis_completed":
2499
+ logger.debug(`evolution: analysis completed (available=${event.available})`);
2500
+ break;
2501
+ }
2502
+ };
2503
+ };
2504
+ var runAnalyzeCommand = async (inputPath, authorIdentityMode, logger = createSilentLogger()) => {
801
2505
  const invocationCwd = process.env["INIT_CWD"] ?? process.cwd();
802
2506
  const targetPath = resolveTargetPath(inputPath, invocationCwd);
803
- const structural = buildProjectGraphSummary({ projectPath: targetPath });
804
- const evolution = analyzeRepositoryEvolutionFromGit({ repositoryPath: targetPath });
2507
+ logger.info(`analyzing repository: ${targetPath}`);
2508
+ logger.info("building structural graph");
2509
+ const structural = buildProjectGraphSummary({
2510
+ projectPath: targetPath,
2511
+ onProgress: createStructuralProgressReporter(logger)
2512
+ });
2513
+ logger.debug(
2514
+ `structural metrics: nodes=${structural.metrics.nodeCount}, edges=${structural.metrics.edgeCount}, cycles=${structural.metrics.cycleCount}`
2515
+ );
2516
+ logger.info(`analyzing git evolution (author identity: ${authorIdentityMode})`);
2517
+ const evolution = analyzeRepositoryEvolutionFromGit({
2518
+ repositoryPath: targetPath,
2519
+ config: { authorIdentityMode }
2520
+ }, createEvolutionProgressReporter(logger));
2521
+ if (evolution.available) {
2522
+ logger.debug(
2523
+ `evolution metrics: commits=${evolution.metrics.totalCommits}, files=${evolution.metrics.totalFiles}, hotspotThreshold=${evolution.metrics.hotspotThresholdCommitCount}`
2524
+ );
2525
+ } else {
2526
+ logger.warn(`evolution analysis unavailable: ${evolution.reason}`);
2527
+ }
2528
+ logger.info("analyzing external dependencies");
2529
+ const external = await analyzeDependencyExposureFromProject(
2530
+ { repositoryPath: targetPath },
2531
+ createExternalProgressReporter(logger)
2532
+ );
2533
+ if (external.available) {
2534
+ logger.debug(
2535
+ `external metrics: total=${external.metrics.totalDependencies}, direct=${external.metrics.directDependencies}, transitive=${external.metrics.transitiveDependencies}`
2536
+ );
2537
+ } else {
2538
+ logger.warn(`external analysis unavailable: ${external.reason}`);
2539
+ }
2540
+ logger.info("computing risk summary");
2541
+ const risk = computeRepositoryRiskSummary({
2542
+ structural,
2543
+ evolution,
2544
+ external
2545
+ });
2546
+ logger.info(`analysis completed (repositoryScore=${risk.repositoryScore})`);
805
2547
  const summary = {
806
2548
  structural,
807
- evolution
2549
+ evolution,
2550
+ external,
2551
+ risk
808
2552
  };
809
- return JSON.stringify(summary, null, 2);
2553
+ return summary;
810
2554
  };
811
2555
 
812
2556
  // src/index.ts
813
2557
  var program = new Command();
814
2558
  var packageJsonPath = resolve3(dirname(fileURLToPath(import.meta.url)), "../package.json");
815
- var { version } = JSON.parse(readFileSync(packageJsonPath, "utf8"));
2559
+ var { version } = JSON.parse(readFileSync2(packageJsonPath, "utf8"));
816
2560
  program.name("codesentinel").description("Structural and evolutionary risk analysis for TypeScript/JavaScript codebases").version(version);
817
- program.command("analyze").argument("[path]", "path to the project to analyze").action((path) => {
818
- const output = runAnalyzeCommand(path);
819
- process.stdout.write(`${output}
2561
+ program.command("analyze").argument("[path]", "path to the project to analyze").addOption(
2562
+ new Option(
2563
+ "--author-identity <mode>",
2564
+ "author identity mode: likely_merge (heuristic) or strict_email (deterministic)"
2565
+ ).choices(["likely_merge", "strict_email"]).default("likely_merge")
2566
+ ).addOption(
2567
+ new Option(
2568
+ "--log-level <level>",
2569
+ "log verbosity: silent, error, warn, info, debug (logs are written to stderr)"
2570
+ ).choices(["silent", "error", "warn", "info", "debug"]).default(parseLogLevel(process.env["CODESENTINEL_LOG_LEVEL"]))
2571
+ ).addOption(
2572
+ new Option(
2573
+ "--output <mode>",
2574
+ "output mode: summary (default) or json (full analysis object)"
2575
+ ).choices(["summary", "json"]).default("summary")
2576
+ ).option("--json", "shortcut for --output json").action(
2577
+ async (path, options) => {
2578
+ const logger = createStderrLogger(options.logLevel);
2579
+ const summary = await runAnalyzeCommand(path, options.authorIdentity, logger);
2580
+ const outputMode = options.json === true ? "json" : options.output;
2581
+ process.stdout.write(`${formatAnalyzeOutput(summary, outputMode)}
820
2582
  `);
821
- });
2583
+ }
2584
+ );
822
2585
  if (process.argv.length <= 2) {
823
2586
  program.outputHelp();
824
2587
  process.exit(0);
825
2588
  }
826
- program.parse(process.argv);
2589
+ var executablePath = process.argv[0] ?? "";
2590
+ var scriptPath = process.argv[1] ?? "";
2591
+ var argv = process.argv[2] === "--" ? [executablePath, scriptPath, ...process.argv.slice(3)] : process.argv;
2592
+ if (argv.length <= 2) {
2593
+ program.outputHelp();
2594
+ process.exit(0);
2595
+ }
2596
+ await program.parseAsync(argv);
827
2597
  //# sourceMappingURL=index.js.map