@getcodesentinel/codesentinel 1.14.0 → 1.16.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1656,6 +1656,7 @@ var createReport = (snapshot, diff) => {
1656
1656
  confidence: repositoryConfidence(snapshot),
1657
1657
  dimensionScores: repositoryDimensionScores(snapshot)
1658
1658
  },
1659
+ quality: snapshot.analysis.quality,
1659
1660
  hotspots: hotspotItems(snapshot),
1660
1661
  structural: {
1661
1662
  cycleCount: snapshot.analysis.structural.metrics.cycleCount,
@@ -1727,6 +1728,26 @@ var renderTextReport = (report) => {
1727
1728
  lines.push(` external: ${report.repository.dimensionScores.external ?? "n/a"}`);
1728
1729
  lines.push(` interactions: ${report.repository.dimensionScores.interactions ?? "n/a"}`);
1729
1730
  lines.push("");
1731
+ lines.push("Quality Summary");
1732
+ lines.push(` qualityScore: ${report.quality.qualityScore}`);
1733
+ lines.push(` normalizedScore: ${report.quality.normalizedScore}`);
1734
+ lines.push(` modularity: ${report.quality.dimensions.modularity}`);
1735
+ lines.push(` changeHygiene: ${report.quality.dimensions.changeHygiene}`);
1736
+ lines.push(` staticAnalysis: ${report.quality.dimensions.staticAnalysis}`);
1737
+ lines.push(` complexity: ${report.quality.dimensions.complexity}`);
1738
+ lines.push(` duplication: ${report.quality.dimensions.duplication}`);
1739
+ lines.push(` testHealth: ${report.quality.dimensions.testHealth}`);
1740
+ lines.push(" topIssues:");
1741
+ for (const issue of report.quality.topIssues.slice(0, 5)) {
1742
+ const ruleSuffix = issue.ruleId === void 0 ? "" : ` [rule=${issue.ruleId}]`;
1743
+ lines.push(
1744
+ ` - [${issue.severity}] (${issue.dimension}) ${issue.id}${ruleSuffix} @ ${issue.target}: ${issue.message}`
1745
+ );
1746
+ }
1747
+ if (report.quality.topIssues.length === 0) {
1748
+ lines.push(" - none");
1749
+ }
1750
+ lines.push("");
1730
1751
  lines.push("Top Hotspots");
1731
1752
  for (const hotspot of report.hotspots) {
1732
1753
  lines.push(` - ${hotspot.target} | score=${hotspot.score}`);
@@ -1802,6 +1823,27 @@ var renderMarkdownReport = (report) => {
1802
1823
  lines.push(`- external: \`${report.repository.dimensionScores.external ?? "n/a"}\``);
1803
1824
  lines.push(`- interactions: \`${report.repository.dimensionScores.interactions ?? "n/a"}\``);
1804
1825
  lines.push("");
1826
+ lines.push("## Quality Summary");
1827
+ lines.push(`- qualityScore: \`${report.quality.qualityScore}\``);
1828
+ lines.push(`- normalizedScore: \`${report.quality.normalizedScore}\``);
1829
+ lines.push(`- modularity: \`${report.quality.dimensions.modularity}\``);
1830
+ lines.push(`- changeHygiene: \`${report.quality.dimensions.changeHygiene}\``);
1831
+ lines.push(`- staticAnalysis: \`${report.quality.dimensions.staticAnalysis}\``);
1832
+ lines.push(`- complexity: \`${report.quality.dimensions.complexity}\``);
1833
+ lines.push(`- duplication: \`${report.quality.dimensions.duplication}\``);
1834
+ lines.push(`- testHealth: \`${report.quality.dimensions.testHealth}\``);
1835
+ if (report.quality.topIssues.length === 0) {
1836
+ lines.push("- top issues: none");
1837
+ } else {
1838
+ lines.push("- top issues:");
1839
+ for (const issue of report.quality.topIssues.slice(0, 5)) {
1840
+ const ruleSuffix = issue.ruleId === void 0 ? "" : ` [rule=${issue.ruleId}]`;
1841
+ lines.push(
1842
+ ` - [${issue.severity}] \`${issue.id}\`${ruleSuffix} (\`${issue.dimension}\`) @ \`${issue.target}\`: ${issue.message}`
1843
+ );
1844
+ }
1845
+ }
1846
+ lines.push("");
1805
1847
  lines.push("## Top Hotspots");
1806
1848
  for (const hotspot of report.hotspots) {
1807
1849
  lines.push(`- **${hotspot.target}** (score: \`${hotspot.score}\`)`);
@@ -1947,14 +1989,20 @@ var requireDiff = (input, gateId) => {
1947
1989
  };
1948
1990
  var validateGateConfig = (input) => {
1949
1991
  const config = input.gateConfig;
1950
- if (config.maxRepoDelta !== void 0 && (!Number.isFinite(config.maxRepoDelta) || config.maxRepoDelta < 0)) {
1951
- throw new GovernanceConfigurationError("max-repo-delta must be a finite number >= 0");
1992
+ if (config.maxRiskDelta !== void 0 && (!Number.isFinite(config.maxRiskDelta) || config.maxRiskDelta < 0)) {
1993
+ throw new GovernanceConfigurationError("max-risk-delta must be a finite number >= 0");
1994
+ }
1995
+ if (config.maxQualityDelta !== void 0 && (!Number.isFinite(config.maxQualityDelta) || config.maxQualityDelta < 0)) {
1996
+ throw new GovernanceConfigurationError("max-quality-delta must be a finite number >= 0");
1952
1997
  }
1953
1998
  if (config.maxNewHotspots !== void 0 && (!Number.isInteger(config.maxNewHotspots) || config.maxNewHotspots < 0)) {
1954
1999
  throw new GovernanceConfigurationError("max-new-hotspots must be an integer >= 0");
1955
2000
  }
1956
- if (config.maxRepoScore !== void 0 && (!Number.isFinite(config.maxRepoScore) || config.maxRepoScore < 0 || config.maxRepoScore > 100)) {
1957
- throw new GovernanceConfigurationError("max-repo-score must be a number in [0, 100]");
2001
+ if (config.maxRiskScore !== void 0 && (!Number.isFinite(config.maxRiskScore) || config.maxRiskScore < 0 || config.maxRiskScore > 100)) {
2002
+ throw new GovernanceConfigurationError("max-risk-score must be a number in [0, 100]");
2003
+ }
2004
+ if (config.minQualityScore !== void 0 && (!Number.isFinite(config.minQualityScore) || config.minQualityScore < 0 || config.minQualityScore > 100)) {
2005
+ throw new GovernanceConfigurationError("min-quality-score must be a number in [0, 100]");
1958
2006
  }
1959
2007
  if (config.newHotspotScoreThreshold !== void 0 && (!Number.isFinite(config.newHotspotScoreThreshold) || config.newHotspotScoreThreshold < 0 || config.newHotspotScoreThreshold > 100)) {
1960
2008
  throw new GovernanceConfigurationError(
@@ -1967,41 +2015,76 @@ var evaluateGates = (input) => {
1967
2015
  const config = input.gateConfig;
1968
2016
  const violations = [];
1969
2017
  const evaluatedGates = [];
1970
- if (config.maxRepoScore !== void 0) {
1971
- evaluatedGates.push("max-repo-score");
2018
+ if (config.maxRiskScore !== void 0) {
2019
+ evaluatedGates.push("max-risk-score");
1972
2020
  const current = input.current.analysis.risk.riskScore;
1973
- if (current > config.maxRepoScore) {
2021
+ if (current > config.maxRiskScore) {
1974
2022
  violations.push(
1975
2023
  makeViolation(
1976
- "max-repo-score",
2024
+ "max-risk-score",
1977
2025
  "error",
1978
- `Repository score ${current} exceeds configured max ${config.maxRepoScore}.`,
2026
+ `Risk score ${current} exceeds configured max ${config.maxRiskScore}.`,
1979
2027
  [input.current.analysis.structural.targetPath],
1980
2028
  [{ kind: "repository_metric", metric: "riskScore" }]
1981
2029
  )
1982
2030
  );
1983
2031
  }
1984
2032
  }
1985
- if (config.maxRepoDelta !== void 0) {
1986
- evaluatedGates.push("max-repo-delta");
1987
- requireDiff(input, "max-repo-delta");
2033
+ if (config.minQualityScore !== void 0) {
2034
+ evaluatedGates.push("min-quality-score");
2035
+ const current = input.current.analysis.quality.qualityScore;
2036
+ if (current < config.minQualityScore) {
2037
+ violations.push(
2038
+ makeViolation(
2039
+ "min-quality-score",
2040
+ "error",
2041
+ `Quality score ${current} is below configured minimum ${config.minQualityScore}.`,
2042
+ [input.current.analysis.structural.targetPath],
2043
+ [{ kind: "repository_metric", metric: "qualityScore" }]
2044
+ )
2045
+ );
2046
+ }
2047
+ }
2048
+ if (config.maxRiskDelta !== void 0) {
2049
+ evaluatedGates.push("max-risk-delta");
2050
+ requireDiff(input, "max-risk-delta");
1988
2051
  const baseline = input.baseline;
1989
2052
  if (baseline === void 0) {
1990
- throw new GovernanceConfigurationError("max-repo-delta requires baseline snapshot");
2053
+ throw new GovernanceConfigurationError("max-risk-delta requires baseline snapshot");
1991
2054
  }
1992
2055
  const delta = input.current.analysis.risk.normalizedScore - baseline.analysis.risk.normalizedScore;
1993
- if (delta > config.maxRepoDelta) {
2056
+ if (delta > config.maxRiskDelta) {
1994
2057
  violations.push(
1995
2058
  makeViolation(
1996
- "max-repo-delta",
2059
+ "max-risk-delta",
1997
2060
  "error",
1998
- `Repository normalized score delta ${delta.toFixed(4)} exceeds allowed ${config.maxRepoDelta}.`,
2061
+ `Risk normalized score delta ${delta.toFixed(4)} exceeds allowed ${config.maxRiskDelta}.`,
1999
2062
  [input.current.analysis.structural.targetPath],
2000
2063
  [{ kind: "repository_metric", metric: "normalizedScore" }]
2001
2064
  )
2002
2065
  );
2003
2066
  }
2004
2067
  }
2068
+ if (config.maxQualityDelta !== void 0) {
2069
+ evaluatedGates.push("max-quality-delta");
2070
+ requireDiff(input, "max-quality-delta");
2071
+ const baseline = input.baseline;
2072
+ if (baseline === void 0) {
2073
+ throw new GovernanceConfigurationError("max-quality-delta requires baseline snapshot");
2074
+ }
2075
+ const delta = input.current.analysis.quality.normalizedScore - baseline.analysis.quality.normalizedScore;
2076
+ if (delta < -config.maxQualityDelta) {
2077
+ violations.push(
2078
+ makeViolation(
2079
+ "max-quality-delta",
2080
+ "error",
2081
+ `Quality normalized score delta ${delta.toFixed(4)} is below allowed minimum ${(-config.maxQualityDelta).toFixed(4)}.`,
2082
+ [input.current.analysis.structural.targetPath],
2083
+ [{ kind: "repository_metric", metric: "qualityNormalizedScore" }]
2084
+ )
2085
+ );
2086
+ }
2087
+ }
2005
2088
  if (config.noNewCycles === true) {
2006
2089
  evaluatedGates.push("no-new-cycles");
2007
2090
  requireDiff(input, "no-new-cycles");
@@ -2459,8 +2542,8 @@ var resolveAutoBaselineRef = async (input) => {
2459
2542
 
2460
2543
  // src/index.ts
2461
2544
  import { readFileSync as readFileSync2 } from "fs";
2462
- import { readFile as readFile5, writeFile as writeFile5 } from "fs/promises";
2463
- import { dirname as dirname2, resolve as resolve5 } from "path";
2545
+ import { readFile as readFile6, writeFile as writeFile5 } from "fs/promises";
2546
+ import { dirname as dirname2, resolve as resolve6 } from "path";
2464
2547
  import { fileURLToPath } from "url";
2465
2548
 
2466
2549
  // src/application/format-analyze-output.ts
@@ -2500,6 +2583,12 @@ var createSummaryShape = (summary) => ({
2500
2583
  })),
2501
2584
  fragileClusterCount: summary.risk.fragileClusters.length,
2502
2585
  dependencyAmplificationZoneCount: summary.risk.dependencyAmplificationZones.length
2586
+ },
2587
+ quality: {
2588
+ qualityScore: summary.quality.qualityScore,
2589
+ normalizedScore: summary.quality.normalizedScore,
2590
+ dimensions: summary.quality.dimensions,
2591
+ topIssues: summary.quality.topIssues.slice(0, 5)
2503
2592
  }
2504
2593
  });
2505
2594
  var formatAnalyzeOutput = (summary, mode) => mode === "json" ? JSON.stringify(summary, null, 2) : JSON.stringify(createSummaryShape(summary), null, 2);
@@ -2850,7 +2939,7 @@ import { mkdir, readFile, writeFile } from "fs/promises";
2850
2939
  import { homedir } from "os";
2851
2940
  import { dirname, join as join3 } from "path";
2852
2941
  import { stderr, stdin } from "process";
2853
- import { clearScreenDown, cursorTo, emitKeypressEvents, moveCursor } from "readline";
2942
+ import { clearScreenDown, cursorTo, emitKeypressEvents } from "readline";
2854
2943
  var UPDATE_CHECK_INTERVAL_MS = 24 * 60 * 60 * 1e3;
2855
2944
  var UPDATE_CACHE_PATH = join3(homedir(), ".cache", "codesentinel", "update-check.json");
2856
2945
  var SEMVER_PATTERN = /^(?<major>\d+)\.(?<minor>\d+)\.(?<patch>\d+)(?:-(?<prerelease>[0-9A-Za-z.-]+))?(?:\+[0-9A-Za-z.-]+)?$/;
@@ -3042,11 +3131,15 @@ var fetchLatestVersion = async (packageName) => {
3042
3131
  }
3043
3132
  return parseNpmViewVersionOutput(result.stdout);
3044
3133
  };
3045
- var renderUpdatePrompt = (latestVersion, currentVersion, selectedIndex) => {
3046
- const options = ["Install update now", "Not now (continue current command)"];
3134
+ var renderUpdatePrompt = (packageName, latestVersion, currentVersion, selectedIndex) => {
3135
+ const options = [
3136
+ `1. Update now (runs \`npm install -g ${packageName}\`)`,
3137
+ "2. Skip"
3138
+ ];
3047
3139
  const lines = [
3048
- `${ANSI.cyan}${ANSI.bold}CodeSentinel Update Available${ANSI.reset}`,
3049
- `${ANSI.dim}Current: ${currentVersion} Latest: ${latestVersion}${ANSI.reset}`,
3140
+ ` ${ANSI.bold}${ANSI.cyan}\u2728 Update available! ${currentVersion} -> ${latestVersion}${ANSI.reset}`,
3141
+ "",
3142
+ ` ${ANSI.dim}Release notes: https://github.com/getcodesentinel/codesentinel/releases/latest${ANSI.reset}`,
3050
3143
  "",
3051
3144
  ...options.map((option, index) => {
3052
3145
  const selected = index === selectedIndex;
@@ -3055,12 +3148,12 @@ var renderUpdatePrompt = (latestVersion, currentVersion, selectedIndex) => {
3055
3148
  return `${prefix} ${text}`;
3056
3149
  }),
3057
3150
  "",
3058
- `${ANSI.dim}Use \u2191/\u2193 to choose, Enter to confirm.${ANSI.reset}`
3151
+ ` ${ANSI.dim}Use \u2191/\u2193 to choose. Press enter to continue${ANSI.reset}`
3059
3152
  ];
3060
3153
  stderr.write(lines.join("\n"));
3061
3154
  return lines.length;
3062
3155
  };
3063
- var promptInstall = async (latestVersion, currentVersion) => {
3156
+ var promptInstall = async (packageName, latestVersion, currentVersion) => {
3064
3157
  if (!stdin.isTTY || !stderr.isTTY || typeof stdin.setRawMode !== "function") {
3065
3158
  stderr.write(
3066
3159
  `New version ${latestVersion} is available (current ${currentVersion}). Run: npm install -g @getcodesentinel/codesentinel@latest
@@ -3068,21 +3161,17 @@ var promptInstall = async (latestVersion, currentVersion) => {
3068
3161
  );
3069
3162
  return "skip";
3070
3163
  }
3071
- return await new Promise((resolve6) => {
3164
+ return await new Promise((resolve7) => {
3072
3165
  emitKeypressEvents(stdin);
3073
3166
  let selectedIndex = 0;
3074
- let renderedLines = 0;
3075
3167
  const previousRawMode = stdin.isRaw;
3076
3168
  const clearPromptArea = () => {
3077
- if (renderedLines > 0) {
3078
- moveCursor(stderr, 0, -(renderedLines - 1));
3079
- }
3080
- cursorTo(stderr, 0);
3169
+ cursorTo(stderr, 0, 0);
3081
3170
  clearScreenDown(stderr);
3082
3171
  };
3083
3172
  const redraw = () => {
3084
3173
  clearPromptArea();
3085
- renderedLines = renderUpdatePrompt(latestVersion, currentVersion, selectedIndex);
3174
+ renderUpdatePrompt(packageName, latestVersion, currentVersion, selectedIndex);
3086
3175
  };
3087
3176
  const cleanup = (choice) => {
3088
3177
  stdin.off("keypress", onKeypress);
@@ -3094,10 +3183,10 @@ var promptInstall = async (latestVersion, currentVersion) => {
3094
3183
  if (choice === "install") {
3095
3184
  stderr.write(`${ANSI.yellow}Installing latest CodeSentinel...${ANSI.reset}
3096
3185
  `);
3097
- } else if (renderedLines > 0) {
3186
+ } else {
3098
3187
  stderr.write("\n");
3099
3188
  }
3100
- resolve6(choice);
3189
+ resolve7(choice);
3101
3190
  };
3102
3191
  const onKeypress = (_str, key) => {
3103
3192
  if (key.ctrl === true && key.name === "c") {
@@ -3153,7 +3242,7 @@ var checkForCliUpdates = async (input) => {
3153
3242
  if (comparison === null || comparison <= 0) {
3154
3243
  return;
3155
3244
  }
3156
- const choice = await promptInstall(latestVersion, input.currentVersion);
3245
+ const choice = await promptInstall(input.packageName, latestVersion, input.currentVersion);
3157
3246
  if (choice === "interrupt") {
3158
3247
  process.exit(130);
3159
3248
  }
@@ -3176,7 +3265,7 @@ var checkForCliUpdates = async (input) => {
3176
3265
  };
3177
3266
 
3178
3267
  // src/application/run-analyze-command.ts
3179
- import { resolve as resolve3 } from "path";
3268
+ import { resolve as resolve4 } from "path";
3180
3269
 
3181
3270
  // ../code-graph/dist/index.js
3182
3271
  import { extname, isAbsolute, relative, resolve as resolve2 } from "path";
@@ -4224,6 +4313,1108 @@ var analyzeRepositoryEvolutionFromGit = (input, onProgress) => {
4224
4313
  return analyzeRepositoryEvolution(input, historyProvider, onProgress);
4225
4314
  };
4226
4315
 
4316
+ // ../quality-signals/dist/index.js
4317
+ import { readFile as readFile2 } from "fs/promises";
4318
+ import { existsSync as existsSync2 } from "fs";
4319
+ import { join as join4, relative as relative2, resolve as resolve3 } from "path";
4320
+ import { ESLint } from "eslint";
4321
+ import * as ts2 from "typescript";
4322
+ import * as ts3 from "typescript";
4323
+ var markerRegex = /\b(?:TODO|FIXME)\b/gi;
4324
+ var countMarkers = (text) => text.match(markerRegex)?.length ?? 0;
4325
+ var countTodoFixmeInComments = (content) => {
4326
+ const scanner = ts3.createScanner(
4327
+ ts3.ScriptTarget.Latest,
4328
+ false,
4329
+ ts3.LanguageVariant.Standard,
4330
+ content
4331
+ );
4332
+ let total = 0;
4333
+ let token = scanner.scan();
4334
+ while (token !== ts3.SyntaxKind.EndOfFileToken) {
4335
+ if (token === ts3.SyntaxKind.SingleLineCommentTrivia || token === ts3.SyntaxKind.MultiLineCommentTrivia) {
4336
+ total += countMarkers(scanner.getTokenText());
4337
+ }
4338
+ token = scanner.scan();
4339
+ }
4340
+ return total;
4341
+ };
4342
+ var SOURCE_EXTENSIONS2 = /* @__PURE__ */ new Set([".ts", ".tsx", ".mts", ".cts", ".js", ".jsx", ".mjs", ".cjs"]);
4343
+ var normalizePath2 = (value) => value.replaceAll("\\", "/");
4344
+ var isTestPath = (path) => {
4345
+ const normalized = normalizePath2(path);
4346
+ return normalized.includes("/__tests__/") || normalized.includes("\\__tests__\\") || normalized.includes(".test.") || normalized.includes(".spec.");
4347
+ };
4348
+ var collectTodoFixmeCommentCount = async (targetPath, structural) => {
4349
+ const filePaths2 = [...structural.files].map((file) => file.relativePath).sort((a, b) => a.localeCompare(b));
4350
+ let total = 0;
4351
+ for (const relativePath of filePaths2) {
4352
+ try {
4353
+ const content = await readFile2(join4(targetPath, relativePath), "utf8");
4354
+ total += countTodoFixmeInComments(content);
4355
+ } catch {
4356
+ }
4357
+ }
4358
+ return total;
4359
+ };
4360
+ var collectEslintSignals = async (targetPath, structural, logger) => {
4361
+ const absoluteFiles = structural.files.map((file) => join4(targetPath, file.relativePath));
4362
+ if (absoluteFiles.length === 0) {
4363
+ return {
4364
+ errorCount: 0,
4365
+ warningCount: 0,
4366
+ filesWithIssues: 0,
4367
+ ruleCounts: []
4368
+ };
4369
+ }
4370
+ try {
4371
+ const eslint = new ESLint({ cwd: targetPath, errorOnUnmatchedPattern: false });
4372
+ const results = await eslint.lintFiles(absoluteFiles);
4373
+ let errorCount = 0;
4374
+ let warningCount = 0;
4375
+ let filesWithIssues = 0;
4376
+ const ruleCounts = /* @__PURE__ */ new Map();
4377
+ for (const result of results) {
4378
+ if (result.errorCount + result.warningCount > 0) {
4379
+ filesWithIssues += 1;
4380
+ }
4381
+ errorCount += result.errorCount;
4382
+ warningCount += result.warningCount;
4383
+ for (const message of result.messages) {
4384
+ if (message.ruleId === null) {
4385
+ continue;
4386
+ }
4387
+ const severity = message.severity >= 2 ? "error" : "warn";
4388
+ const current = ruleCounts.get(message.ruleId);
4389
+ if (current === void 0) {
4390
+ ruleCounts.set(message.ruleId, {
4391
+ ruleId: message.ruleId,
4392
+ severity,
4393
+ count: 1
4394
+ });
4395
+ } else {
4396
+ ruleCounts.set(message.ruleId, {
4397
+ ruleId: current.ruleId,
4398
+ severity: current.severity === "error" || severity === "error" ? "error" : "warn",
4399
+ count: current.count + 1
4400
+ });
4401
+ }
4402
+ }
4403
+ }
4404
+ return {
4405
+ errorCount,
4406
+ warningCount,
4407
+ filesWithIssues,
4408
+ ruleCounts: [...ruleCounts.values()].sort(
4409
+ (a, b) => b.count - a.count || a.ruleId.localeCompare(b.ruleId)
4410
+ )
4411
+ };
4412
+ } catch (error) {
4413
+ logger.warn(
4414
+ `quality signals: eslint collection unavailable (${error instanceof Error ? error.message : "unknown error"})`
4415
+ );
4416
+ return void 0;
4417
+ }
4418
+ };
4419
+ var collectTypeScriptSignals = (targetPath, logger) => {
4420
+ const tsconfigPath = ts2.findConfigFile(targetPath, ts2.sys.fileExists, "tsconfig.json");
4421
+ if (tsconfigPath === void 0) {
4422
+ return void 0;
4423
+ }
4424
+ try {
4425
+ const parsed = ts2.getParsedCommandLineOfConfigFile(
4426
+ tsconfigPath,
4427
+ {},
4428
+ {
4429
+ ...ts2.sys,
4430
+ onUnRecoverableConfigFileDiagnostic: () => {
4431
+ throw new Error(`failed to parse ${tsconfigPath}`);
4432
+ }
4433
+ }
4434
+ );
4435
+ if (parsed === void 0) {
4436
+ return void 0;
4437
+ }
4438
+ const program2 = ts2.createProgram({ rootNames: parsed.fileNames, options: parsed.options });
4439
+ const diagnostics = [
4440
+ ...program2.getOptionsDiagnostics(),
4441
+ ...program2.getGlobalDiagnostics(),
4442
+ ...program2.getSyntacticDiagnostics(),
4443
+ ...program2.getSemanticDiagnostics()
4444
+ ];
4445
+ let errorCount = 0;
4446
+ let warningCount = 0;
4447
+ const fileSet = /* @__PURE__ */ new Set();
4448
+ for (const diagnostic of diagnostics) {
4449
+ if (diagnostic.category === ts2.DiagnosticCategory.Error) {
4450
+ errorCount += 1;
4451
+ } else if (diagnostic.category === ts2.DiagnosticCategory.Warning) {
4452
+ warningCount += 1;
4453
+ }
4454
+ if (diagnostic.file !== void 0) {
4455
+ const path = normalizePath2(relative2(targetPath, diagnostic.file.fileName));
4456
+ fileSet.add(path);
4457
+ }
4458
+ }
4459
+ return {
4460
+ errorCount,
4461
+ warningCount,
4462
+ filesWithDiagnostics: fileSet.size
4463
+ };
4464
+ } catch (error) {
4465
+ logger.warn(
4466
+ `quality signals: typescript diagnostic collection unavailable (${error instanceof Error ? error.message : "unknown error"})`
4467
+ );
4468
+ return void 0;
4469
+ }
4470
+ };
4471
+ var cyclomaticIncrement = (node) => {
4472
+ if (ts2.isIfStatement(node) || ts2.isForStatement(node) || ts2.isForInStatement(node) || ts2.isForOfStatement(node) || ts2.isWhileStatement(node) || ts2.isDoStatement(node) || ts2.isCatchClause(node) || ts2.isConditionalExpression(node)) {
4473
+ return 1;
4474
+ }
4475
+ if (ts2.isCaseClause(node)) {
4476
+ return 1;
4477
+ }
4478
+ if (ts2.isBinaryExpression(node)) {
4479
+ if (node.operatorToken.kind === ts2.SyntaxKind.AmpersandAmpersandToken || node.operatorToken.kind === ts2.SyntaxKind.BarBarToken || node.operatorToken.kind === ts2.SyntaxKind.QuestionQuestionToken) {
4480
+ return 1;
4481
+ }
4482
+ }
4483
+ return 0;
4484
+ };
4485
+ var computeCyclomaticComplexity = (node) => {
4486
+ let complexity = 1;
4487
+ const visit = (current) => {
4488
+ complexity += cyclomaticIncrement(current);
4489
+ if (current !== node && (ts2.isFunctionLike(current) || ts2.isArrowFunction(current) || ts2.isMethodDeclaration(current) || ts2.isConstructorDeclaration(current))) {
4490
+ return;
4491
+ }
4492
+ ts2.forEachChild(current, visit);
4493
+ };
4494
+ visit(node);
4495
+ return complexity;
4496
+ };
4497
+ var collectFunctionComplexities = (content, fileName) => {
4498
+ const sourceFile = ts2.createSourceFile(fileName, content, ts2.ScriptTarget.Latest, true);
4499
+ const complexities = [];
4500
+ const visit = (node) => {
4501
+ if (ts2.isFunctionDeclaration(node) || ts2.isMethodDeclaration(node) || ts2.isFunctionExpression(node) || ts2.isArrowFunction(node) || ts2.isConstructorDeclaration(node) || ts2.isGetAccessorDeclaration(node) || ts2.isSetAccessorDeclaration(node)) {
4502
+ complexities.push(computeCyclomaticComplexity(node));
4503
+ }
4504
+ ts2.forEachChild(node, visit);
4505
+ };
4506
+ visit(sourceFile);
4507
+ if (complexities.length === 0) {
4508
+ return [computeCyclomaticComplexity(sourceFile)];
4509
+ }
4510
+ return complexities;
4511
+ };
4512
+ var collectComplexitySignals = async (targetPath, structural) => {
4513
+ const complexities = [];
4514
+ for (const file of structural.files) {
4515
+ const extension = file.relativePath.slice(file.relativePath.lastIndexOf("."));
4516
+ if (!SOURCE_EXTENSIONS2.has(extension)) {
4517
+ continue;
4518
+ }
4519
+ try {
4520
+ const content = await readFile2(join4(targetPath, file.relativePath), "utf8");
4521
+ complexities.push(...collectFunctionComplexities(content, file.relativePath));
4522
+ } catch {
4523
+ }
4524
+ }
4525
+ if (complexities.length === 0) {
4526
+ return void 0;
4527
+ }
4528
+ const averageCyclomatic = complexities.reduce((sum, value) => sum + value, 0) / complexities.length;
4529
+ const maxCyclomatic = Math.max(...complexities);
4530
+ const highComplexityFileCount = complexities.filter((value) => value >= 15).length;
4531
+ return {
4532
+ averageCyclomatic,
4533
+ maxCyclomatic,
4534
+ highComplexityFileCount,
4535
+ analyzedFileCount: complexities.length
4536
+ };
4537
+ };
4538
+ var DUPLICATION_MIN_BLOCK_TOKENS = 40;
4539
+ var DUPLICATION_KGRAM_TOKENS = 25;
4540
+ var DUPLICATION_WINDOW_SIZE = 4;
4541
+ var DUPLICATION_MAX_FILES = 5e3;
4542
+ var DUPLICATION_MAX_TOKENS_PER_FILE = 12e3;
4543
+ var DUPLICATION_MAX_FINGERPRINTS_PER_FILE = 1200;
4544
+ var DUPLICATION_EXACT_MAX_WINDOWS = 25e4;
4545
+ var HASH_BASE = 16777619;
4546
+ var hashString32 = (value) => {
4547
+ let hash = 2166136261;
4548
+ for (let index = 0; index < value.length; index += 1) {
4549
+ hash ^= value.charCodeAt(index);
4550
+ hash = Math.imul(hash, 16777619) >>> 0;
4551
+ }
4552
+ return hash >>> 0;
4553
+ };
4554
+ var computeRollingBasePower = (kgramSize) => {
4555
+ let value = 1;
4556
+ for (let index = 1; index < kgramSize; index += 1) {
4557
+ value = Math.imul(value, HASH_BASE) >>> 0;
4558
+ }
4559
+ return value;
4560
+ };
4561
+ var tokenizeForDuplication = (content, filePath) => {
4562
+ const languageVariant = filePath.endsWith(".tsx") || filePath.endsWith(".jsx") ? ts2.LanguageVariant.JSX : ts2.LanguageVariant.Standard;
4563
+ const scanner = ts2.createScanner(ts2.ScriptTarget.Latest, true, languageVariant, content);
4564
+ const tokens = [];
4565
+ let token = scanner.scan();
4566
+ while (token !== ts2.SyntaxKind.EndOfFileToken) {
4567
+ if (token !== ts2.SyntaxKind.WhitespaceTrivia && token !== ts2.SyntaxKind.NewLineTrivia && token !== ts2.SyntaxKind.SingleLineCommentTrivia && token !== ts2.SyntaxKind.MultiLineCommentTrivia) {
4568
+ if (token === ts2.SyntaxKind.Identifier || token === ts2.SyntaxKind.PrivateIdentifier) {
4569
+ tokens.push("id");
4570
+ } else if (token === ts2.SyntaxKind.StringLiteral || token === ts2.SyntaxKind.NoSubstitutionTemplateLiteral || token === ts2.SyntaxKind.TemplateHead || token === ts2.SyntaxKind.TemplateMiddle || token === ts2.SyntaxKind.TemplateTail || token === ts2.SyntaxKind.NumericLiteral || token === ts2.SyntaxKind.BigIntLiteral || token === ts2.SyntaxKind.RegularExpressionLiteral) {
4571
+ tokens.push("lit");
4572
+ } else {
4573
+ const stable = ts2.tokenToString(token) ?? ts2.SyntaxKind[token] ?? `${token}`;
4574
+ tokens.push(stable);
4575
+ }
4576
+ }
4577
+ token = scanner.scan();
4578
+ }
4579
+ return tokens;
4580
+ };
4581
+ var buildKgramHashes = (tokenValues, kgramSize) => {
4582
+ if (tokenValues.length < kgramSize) {
4583
+ return [];
4584
+ }
4585
+ const fingerprints = [];
4586
+ const removePower = computeRollingBasePower(kgramSize);
4587
+ let hash = 0;
4588
+ for (let index = 0; index < kgramSize; index += 1) {
4589
+ hash = Math.imul(hash, HASH_BASE) + (tokenValues[index] ?? 0) >>> 0;
4590
+ }
4591
+ fingerprints.push({ hash, start: 0 });
4592
+ for (let start = 1; start <= tokenValues.length - kgramSize; start += 1) {
4593
+ const removed = tokenValues[start - 1] ?? 0;
4594
+ const added = tokenValues[start + kgramSize - 1] ?? 0;
4595
+ const removedContribution = Math.imul(removed, removePower) >>> 0;
4596
+ const shifted = Math.imul(hash - removedContribution >>> 0, HASH_BASE) >>> 0;
4597
+ hash = shifted + added >>> 0;
4598
+ fingerprints.push({ hash, start });
4599
+ }
4600
+ return fingerprints;
4601
+ };
4602
+ var winnowFingerprints = (kgrams, windowSize) => {
4603
+ if (kgrams.length === 0) {
4604
+ return [];
4605
+ }
4606
+ if (kgrams.length <= windowSize) {
4607
+ const minimum = [...kgrams].sort(
4608
+ (left, right) => left.hash - right.hash || right.start - left.start
4609
+ )[0];
4610
+ return minimum === void 0 ? [] : [minimum];
4611
+ }
4612
+ const selected = /* @__PURE__ */ new Map();
4613
+ for (let start = 0; start <= kgrams.length - windowSize; start += 1) {
4614
+ let best = kgrams[start];
4615
+ if (best === void 0) {
4616
+ continue;
4617
+ }
4618
+ for (let offset = 1; offset < windowSize; offset += 1) {
4619
+ const candidate = kgrams[start + offset];
4620
+ if (candidate === void 0) {
4621
+ continue;
4622
+ }
4623
+ if (candidate.hash < best.hash || candidate.hash === best.hash && candidate.start > best.start) {
4624
+ best = candidate;
4625
+ }
4626
+ }
4627
+ selected.set(`${best.hash}:${best.start}`, best);
4628
+ }
4629
+ return [...selected.values()].sort((left, right) => left.start - right.start);
4630
+ };
4631
+ var capFingerprints = (fingerprints, maxFingerprints) => {
4632
+ if (fingerprints.length <= maxFingerprints) {
4633
+ return fingerprints;
4634
+ }
4635
+ const step = fingerprints.length / maxFingerprints;
4636
+ const capped = [];
4637
+ for (let index = 0; index < maxFingerprints; index += 1) {
4638
+ const selected = fingerprints[Math.floor(index * step)];
4639
+ if (selected !== void 0) {
4640
+ capped.push(selected);
4641
+ }
4642
+ }
4643
+ return capped;
4644
+ };
4645
+ var tokenBlockSignature = (tokens, start, blockLength) => {
4646
+ if (start < 0 || start + blockLength > tokens.length) {
4647
+ return void 0;
4648
+ }
4649
+ return tokens.slice(start, start + blockLength).join(" ");
4650
+ };
4651
+ var mergeTokenRanges = (ranges) => {
4652
+ if (ranges.length === 0) {
4653
+ return [];
4654
+ }
4655
+ const sorted = [...ranges].sort(
4656
+ (left, right) => left.start - right.start || left.end - right.end
4657
+ );
4658
+ const merged = [];
4659
+ for (const range of sorted) {
4660
+ const previous = merged[merged.length - 1];
4661
+ if (previous === void 0 || range.start > previous.end) {
4662
+ merged.push({ ...range });
4663
+ continue;
4664
+ }
4665
+ previous.end = Math.max(previous.end, range.end);
4666
+ }
4667
+ return merged;
4668
+ };
4669
+ var aggregateDuplicationFromSignatures = (signatures, fileByPath) => {
4670
+ let duplicatedBlockCount = 0;
4671
+ const duplicatedRanges = /* @__PURE__ */ new Map();
4672
+ for (const entries of signatures.values()) {
4673
+ if (entries.length <= 1) {
4674
+ continue;
4675
+ }
4676
+ const uniqueEntries = /* @__PURE__ */ new Map();
4677
+ for (const entry of entries) {
4678
+ uniqueEntries.set(`${entry.file}:${entry.start}`, entry);
4679
+ }
4680
+ if (uniqueEntries.size <= 1) {
4681
+ continue;
4682
+ }
4683
+ duplicatedBlockCount += uniqueEntries.size - 1;
4684
+ for (const entry of uniqueEntries.values()) {
4685
+ const source = fileByPath.get(entry.file);
4686
+ if (source === void 0) {
4687
+ continue;
4688
+ }
4689
+ const signature = tokenBlockSignature(
4690
+ source.tokens,
4691
+ entry.start,
4692
+ DUPLICATION_MIN_BLOCK_TOKENS
4693
+ );
4694
+ if (signature === void 0) {
4695
+ continue;
4696
+ }
4697
+ const ranges = duplicatedRanges.get(entry.file) ?? [];
4698
+ ranges.push({
4699
+ start: entry.start,
4700
+ end: Math.min(source.tokens.length, entry.start + DUPLICATION_MIN_BLOCK_TOKENS)
4701
+ });
4702
+ duplicatedRanges.set(entry.file, ranges);
4703
+ }
4704
+ }
4705
+ let duplicatedTokenCount = 0;
4706
+ for (const ranges of duplicatedRanges.values()) {
4707
+ const mergedRanges = mergeTokenRanges(ranges);
4708
+ duplicatedTokenCount += mergedRanges.reduce((sum, range) => sum + (range.end - range.start), 0);
4709
+ }
4710
+ return {
4711
+ duplicatedBlockCount,
4712
+ duplicatedTokenCount,
4713
+ filesWithDuplication: duplicatedRanges.size
4714
+ };
4715
+ };
4716
+ var collectExactTokenDuplication = (analyzedFiles) => {
4717
+ const signatures = /* @__PURE__ */ new Map();
4718
+ for (const file of analyzedFiles) {
4719
+ const tokenValues = file.tokens.map((token) => hashString32(token));
4720
+ const windows = buildKgramHashes(tokenValues, DUPLICATION_MIN_BLOCK_TOKENS);
4721
+ for (const window of windows) {
4722
+ const signature = tokenBlockSignature(
4723
+ file.tokens,
4724
+ window.start,
4725
+ DUPLICATION_MIN_BLOCK_TOKENS
4726
+ );
4727
+ if (signature === void 0) {
4728
+ continue;
4729
+ }
4730
+ const entries = signatures.get(signature) ?? [];
4731
+ entries.push({ file: file.file, start: window.start });
4732
+ signatures.set(signature, entries);
4733
+ }
4734
+ }
4735
+ const fileByPath = new Map(analyzedFiles.map((file) => [file.file, file]));
4736
+ return aggregateDuplicationFromSignatures(signatures, fileByPath);
4737
+ };
4738
+ var collectWinnowingDuplication = (analyzedFiles) => {
4739
+ const signatures = /* @__PURE__ */ new Map();
4740
+ for (const file of analyzedFiles) {
4741
+ const tokenValues = file.tokens.map((token) => hashString32(token));
4742
+ const kgrams = buildKgramHashes(tokenValues, DUPLICATION_KGRAM_TOKENS);
4743
+ const fingerprints = capFingerprints(
4744
+ winnowFingerprints(kgrams, DUPLICATION_WINDOW_SIZE),
4745
+ DUPLICATION_MAX_FINGERPRINTS_PER_FILE
4746
+ );
4747
+ for (const fingerprint of fingerprints) {
4748
+ const signature = tokenBlockSignature(
4749
+ file.tokens,
4750
+ fingerprint.start,
4751
+ DUPLICATION_MIN_BLOCK_TOKENS
4752
+ );
4753
+ if (signature === void 0) {
4754
+ continue;
4755
+ }
4756
+ const entries = signatures.get(signature) ?? [];
4757
+ entries.push({ file: file.file, start: fingerprint.start });
4758
+ signatures.set(signature, entries);
4759
+ }
4760
+ }
4761
+ const fileByPath = new Map(analyzedFiles.map((file) => [file.file, file]));
4762
+ return aggregateDuplicationFromSignatures(signatures, fileByPath);
4763
+ };
4764
+ var collectDuplicationSignals = async (targetPath, structural) => {
4765
+ const files = [...structural.files].map((file) => file.relativePath).sort((left, right) => left.localeCompare(right)).filter((filePath) => SOURCE_EXTENSIONS2.has(filePath.slice(filePath.lastIndexOf(".")))).filter((filePath) => isTestPath(filePath) === false).slice(0, DUPLICATION_MAX_FILES);
4766
+ const analyzedFiles = [];
4767
+ let significantTokenCount = 0;
4768
+ let exactWindowCount = 0;
4769
+ for (const relativePath of files) {
4770
+ try {
4771
+ const content = await readFile2(join4(targetPath, relativePath), "utf8");
4772
+ const tokens = tokenizeForDuplication(content, relativePath).slice(
4773
+ 0,
4774
+ DUPLICATION_MAX_TOKENS_PER_FILE
4775
+ );
4776
+ significantTokenCount += tokens.length;
4777
+ if (tokens.length < DUPLICATION_MIN_BLOCK_TOKENS) {
4778
+ continue;
4779
+ }
4780
+ exactWindowCount += tokens.length - DUPLICATION_MIN_BLOCK_TOKENS + 1;
4781
+ analyzedFiles.push({
4782
+ file: relativePath,
4783
+ tokens
4784
+ });
4785
+ } catch {
4786
+ }
4787
+ }
4788
+ if (analyzedFiles.length === 0) {
4789
+ return void 0;
4790
+ }
4791
+ const mode = exactWindowCount <= DUPLICATION_EXACT_MAX_WINDOWS ? "exact-token" : "winnowing";
4792
+ const aggregated = mode === "exact-token" ? collectExactTokenDuplication(analyzedFiles) : collectWinnowingDuplication(analyzedFiles);
4793
+ const duplicatedLineRatio = significantTokenCount === 0 ? 0 : Math.min(1, aggregated.duplicatedTokenCount / significantTokenCount);
4794
+ return {
4795
+ mode,
4796
+ duplicatedLineRatio,
4797
+ duplicatedBlockCount: aggregated.duplicatedBlockCount,
4798
+ filesWithDuplication: aggregated.filesWithDuplication
4799
+ };
4800
+ };
4801
+ var toRatio = (value) => {
4802
+ if (typeof value !== "number" || Number.isFinite(value) === false) {
4803
+ return null;
4804
+ }
4805
+ return Math.min(1, Math.max(0, value / 100));
4806
+ };
4807
+ var collectCoverageSignals = async (targetPath, logger) => {
4808
+ const configuredPath = process.env["CODESENTINEL_QUALITY_COVERAGE_SUMMARY"];
4809
+ const summaryPath = configuredPath === void 0 || configuredPath.trim().length === 0 ? join4(targetPath, "coverage", "coverage-summary.json") : resolve3(targetPath, configuredPath);
4810
+ if (!existsSync2(summaryPath)) {
4811
+ return void 0;
4812
+ }
4813
+ try {
4814
+ const raw = await readFile2(summaryPath, "utf8");
4815
+ const parsed = JSON.parse(raw);
4816
+ return {
4817
+ lineCoverage: toRatio(parsed.total?.lines?.pct),
4818
+ branchCoverage: toRatio(parsed.total?.branches?.pct),
4819
+ functionCoverage: toRatio(parsed.total?.functions?.pct),
4820
+ statementCoverage: toRatio(parsed.total?.statements?.pct)
4821
+ };
4822
+ } catch (error) {
4823
+ logger.warn(
4824
+ `quality signals: coverage summary parse failed at ${summaryPath} (${error instanceof Error ? error.message : "unknown error"})`
4825
+ );
4826
+ return void 0;
4827
+ }
4828
+ };
4829
+ var collectQualitySignals = async (targetPath, structural, logger) => {
4830
+ const [todoFixmeCommentCount, eslint, complexity, duplication, coverage] = await Promise.all([
4831
+ collectTodoFixmeCommentCount(targetPath, structural),
4832
+ collectEslintSignals(targetPath, structural, logger),
4833
+ collectComplexitySignals(targetPath, structural),
4834
+ collectDuplicationSignals(targetPath, structural),
4835
+ collectCoverageSignals(targetPath, logger)
4836
+ ]);
4837
+ const typescript = collectTypeScriptSignals(targetPath, logger);
4838
+ return {
4839
+ todoFixmeCommentCount,
4840
+ ...eslint === void 0 ? {} : { eslint },
4841
+ ...typescript === void 0 ? {} : { typescript },
4842
+ ...complexity === void 0 ? {} : { complexity },
4843
+ ...duplication === void 0 ? {} : { duplication },
4844
+ ...coverage === void 0 ? {} : { coverage }
4845
+ };
4846
+ };
4847
+
4848
+ // ../quality-engine/dist/index.js
4849
+ var clamp01 = (value) => {
4850
+ if (!Number.isFinite(value)) {
4851
+ return 0;
4852
+ }
4853
+ if (value <= 0) {
4854
+ return 0;
4855
+ }
4856
+ if (value >= 1) {
4857
+ return 1;
4858
+ }
4859
+ return value;
4860
+ };
4861
+ var round45 = (value) => Number(value.toFixed(4));
4862
+ var average = (values) => {
4863
+ if (values.length === 0) {
4864
+ return 0;
4865
+ }
4866
+ const total = values.reduce((sum, value) => sum + value, 0);
4867
+ return total / values.length;
4868
+ };
4869
+ var concentration = (rawValues) => {
4870
+ const values = rawValues.filter((value) => value > 0);
4871
+ const count = values.length;
4872
+ if (count <= 1) {
4873
+ return 0;
4874
+ }
4875
+ const total = values.reduce((sum, value) => sum + value, 0);
4876
+ if (total <= 0) {
4877
+ return 0;
4878
+ }
4879
+ const hhi = values.reduce((sum, value) => {
4880
+ const share = value / total;
4881
+ return sum + share * share;
4882
+ }, 0);
4883
+ const minHhi = 1 / count;
4884
+ const normalized = (hhi - minHhi) / (1 - minHhi);
4885
+ return clamp01(normalized);
4886
+ };
4887
+ var DIMENSION_WEIGHTS = {
4888
+ modularity: 0.2,
4889
+ changeHygiene: 0.2,
4890
+ staticAnalysis: 0.2,
4891
+ complexity: 0.15,
4892
+ duplication: 0.1,
4893
+ testHealth: 0.15
4894
+ };
4895
+ var QUALITY_TRACE_VERSION = "1";
4896
+ var toPercentage = (normalizedQuality) => round45(clamp01(normalizedQuality) * 100);
4897
+ var logScaled = (value, scale) => {
4898
+ if (scale <= 0) {
4899
+ return 0;
4900
+ }
4901
+ return clamp01(Math.log1p(Math.max(0, value)) / Math.log1p(scale));
4902
+ };
4903
+ var toFactorTrace = (spec) => ({
4904
+ factorId: spec.factorId,
4905
+ contribution: round45(spec.penalty * spec.weight * 100),
4906
+ penalty: round45(spec.penalty),
4907
+ rawMetrics: spec.rawMetrics,
4908
+ normalizedMetrics: spec.normalizedMetrics,
4909
+ weight: round45(spec.weight),
4910
+ evidence: spec.evidence
4911
+ });
4912
+ var createDimensionTrace = (dimension, quality, factors) => ({
4913
+ dimension,
4914
+ normalizedScore: round45(clamp01(quality)),
4915
+ score: toPercentage(quality),
4916
+ factors: factors.map((factor) => toFactorTrace(factor))
4917
+ });
4918
+ var filePaths = (structural) => structural.files.map((file) => file.relativePath);
4919
+ var isTestPath2 = (path) => {
4920
+ const normalized = path.toLowerCase();
4921
+ return normalized.includes("/__tests__/") || normalized.includes("\\__tests__\\") || normalized.includes(".test.") || normalized.includes(".spec.");
4922
+ };
4923
+ var isSourcePath = (path) => {
4924
+ if (path.endsWith(".d.ts")) {
4925
+ return false;
4926
+ }
4927
+ return !isTestPath2(path);
4928
+ };
4929
+ var pushIssue = (issues, issue) => {
4930
+ issues.push({
4931
+ ...issue,
4932
+ severity: issue.severity ?? "warn"
4933
+ });
4934
+ };
4935
+ var computeRepositoryQualitySummary = (input) => {
4936
+ const issues = [];
4937
+ const sourceFileSet = new Set(input.structural.files.map((file) => file.relativePath));
4938
+ const signals = input.signals;
4939
+ const cycleCount = input.structural.metrics.cycleCount;
4940
+ const cycleSizeAverage = input.structural.cycles.length === 0 ? 0 : average(input.structural.cycles.map((cycle) => cycle.nodes.length));
4941
+ const cyclePenalty = clamp01(cycleCount / 6) * 0.7 + clamp01((cycleSizeAverage - 2) / 8) * 0.3;
4942
+ const fanInConcentration = concentration(input.structural.files.map((file) => file.fanIn));
4943
+ const fanOutConcentration = concentration(input.structural.files.map((file) => file.fanOut));
4944
+ const centralityConcentration = average([fanInConcentration, fanOutConcentration]);
4945
+ if (cycleCount > 0) {
4946
+ pushIssue(issues, {
4947
+ id: "quality.modularity.structural_cycles",
4948
+ ruleId: "graph.structural_cycles",
4949
+ dimension: "modularity",
4950
+ target: input.structural.cycles[0]?.nodes.slice().sort((a, b) => a.localeCompare(b)).join(" -> ") ?? input.structural.targetPath,
4951
+ message: `${cycleCount} structural cycle(s) increase coupling and refactor cost.`,
4952
+ severity: cycleCount >= 3 ? "error" : "warn",
4953
+ impact: round45(cyclePenalty * 0.55)
4954
+ });
4955
+ }
4956
+ if (centralityConcentration >= 0.5) {
4957
+ const hottest = [...input.structural.files].map((file) => ({
4958
+ path: file.relativePath,
4959
+ pressure: file.fanIn + file.fanOut
4960
+ })).sort((a, b) => b.pressure - a.pressure || a.path.localeCompare(b.path))[0];
4961
+ pushIssue(issues, {
4962
+ id: "quality.modularity.centrality_concentration",
4963
+ ruleId: "graph.centrality_concentration",
4964
+ dimension: "modularity",
4965
+ target: hottest?.path ?? input.structural.targetPath,
4966
+ message: "Fan-in/fan-out pressure is concentrated in a small set of files.",
4967
+ impact: round45(centralityConcentration * 0.45)
4968
+ });
4969
+ }
4970
+ const modularityFactors = [
4971
+ {
4972
+ factorId: "quality.modularity.structural_cycles",
4973
+ penalty: cyclePenalty,
4974
+ rawMetrics: {
4975
+ cycleCount,
4976
+ averageCycleSize: round45(cycleSizeAverage)
4977
+ },
4978
+ normalizedMetrics: {
4979
+ cyclePenalty: round45(cyclePenalty)
4980
+ },
4981
+ weight: 0.55,
4982
+ evidence: [{ kind: "repository_metric", metric: "structural.cycles" }]
4983
+ },
4984
+ {
4985
+ factorId: "quality.modularity.centrality_concentration",
4986
+ penalty: centralityConcentration,
4987
+ rawMetrics: {
4988
+ fanInConcentration: round45(fanInConcentration),
4989
+ fanOutConcentration: round45(fanOutConcentration)
4990
+ },
4991
+ normalizedMetrics: {
4992
+ centralityConcentration: round45(centralityConcentration)
4993
+ },
4994
+ weight: 0.45,
4995
+ evidence: [{ kind: "repository_metric", metric: "structural.files.fanIn/fanOut" }]
4996
+ }
4997
+ ];
4998
+ const modularityPenalty = clamp01(
4999
+ modularityFactors.reduce((sum, factor) => sum + factor.penalty * factor.weight, 0)
5000
+ );
5001
+ let churnConcentration = 0;
5002
+ let volatilityConcentration = 0;
5003
+ let couplingDensity = 0;
5004
+ let couplingIntensity = 0;
5005
+ if (input.evolution.available) {
5006
+ const evolutionSourceFiles = input.evolution.files.filter(
5007
+ (file) => sourceFileSet.has(file.filePath)
5008
+ );
5009
+ churnConcentration = concentration(evolutionSourceFiles.map((file) => file.churnTotal));
5010
+ volatilityConcentration = concentration(
5011
+ evolutionSourceFiles.map((file) => file.recentVolatility)
5012
+ );
5013
+ const fileCount = Math.max(1, evolutionSourceFiles.length);
5014
+ const maxPairs = fileCount * (fileCount - 1) / 2;
5015
+ const sourcePairs = input.evolution.coupling.pairs.filter(
5016
+ (pair) => sourceFileSet.has(pair.fileA) && sourceFileSet.has(pair.fileB)
5017
+ );
5018
+ couplingDensity = maxPairs <= 0 ? 0 : clamp01(sourcePairs.length / maxPairs);
5019
+ couplingIntensity = average(sourcePairs.map((pair) => pair.couplingScore));
5020
+ if (churnConcentration >= 0.45) {
5021
+ const mostChurn = [...evolutionSourceFiles].sort(
5022
+ (a, b) => b.churnTotal - a.churnTotal || a.filePath.localeCompare(b.filePath)
5023
+ )[0];
5024
+ pushIssue(issues, {
5025
+ id: "quality.change_hygiene.churn_concentration",
5026
+ ruleId: "git.churn_concentration",
5027
+ dimension: "changeHygiene",
5028
+ target: mostChurn?.filePath ?? input.structural.targetPath,
5029
+ message: "Churn is concentrated in a narrow part of the codebase.",
5030
+ impact: round45(churnConcentration * 0.4)
5031
+ });
5032
+ }
5033
+ if (volatilityConcentration >= 0.45) {
5034
+ const volatileFile = [...evolutionSourceFiles].sort(
5035
+ (a, b) => b.recentVolatility - a.recentVolatility || a.filePath.localeCompare(b.filePath)
5036
+ )[0];
5037
+ pushIssue(issues, {
5038
+ id: "quality.change_hygiene.volatility_concentration",
5039
+ ruleId: "git.volatility_concentration",
5040
+ dimension: "changeHygiene",
5041
+ target: volatileFile?.filePath ?? input.structural.targetPath,
5042
+ message: "Recent volatility is concentrated in files that change frequently.",
5043
+ impact: round45(volatilityConcentration * 0.3)
5044
+ });
5045
+ }
5046
+ if (couplingDensity >= 0.35 || couplingIntensity >= 0.45) {
5047
+ const strongestPair = [...sourcePairs].sort(
5048
+ (a, b) => b.couplingScore - a.couplingScore || `${a.fileA}|${a.fileB}`.localeCompare(`${b.fileA}|${b.fileB}`)
5049
+ )[0];
5050
+ pushIssue(issues, {
5051
+ id: "quality.change_hygiene.coupling_density",
5052
+ ruleId: "git.coupling_density",
5053
+ dimension: "changeHygiene",
5054
+ target: strongestPair === void 0 ? input.structural.targetPath : `${strongestPair.fileA}<->${strongestPair.fileB}`,
5055
+ message: "Co-change relationships are dense, increasing coordination overhead.",
5056
+ impact: round45(average([couplingDensity, couplingIntensity]) * 0.3)
5057
+ });
5058
+ }
5059
+ }
5060
+ const todoFixmeCommentCount = Math.max(0, signals?.todoFixmeCommentCount ?? 0);
5061
+ const todoFixmePenalty = logScaled(todoFixmeCommentCount, 80) * 0.08;
5062
+ if (todoFixmeCommentCount > 0) {
5063
+ pushIssue(issues, {
5064
+ id: "quality.change_hygiene.todo_fixme_load",
5065
+ ruleId: "comments.todo_fixme",
5066
+ dimension: "changeHygiene",
5067
+ target: input.structural.targetPath,
5068
+ message: `Found ${todoFixmeCommentCount} TODO/FIXME comment marker(s); cleanup debt is accumulating.`,
5069
+ impact: round45(todoFixmePenalty * 0.4)
5070
+ });
5071
+ }
5072
+ const changeHygieneFactors = [
5073
+ {
5074
+ factorId: "quality.change_hygiene.churn_concentration",
5075
+ penalty: churnConcentration,
5076
+ rawMetrics: {
5077
+ churnConcentration: round45(churnConcentration)
5078
+ },
5079
+ normalizedMetrics: {
5080
+ churnConcentration: round45(churnConcentration)
5081
+ },
5082
+ weight: 0.35,
5083
+ evidence: [{ kind: "repository_metric", metric: "evolution.churn" }]
5084
+ },
5085
+ {
5086
+ factorId: "quality.change_hygiene.volatility_concentration",
5087
+ penalty: volatilityConcentration,
5088
+ rawMetrics: {
5089
+ volatilityConcentration: round45(volatilityConcentration)
5090
+ },
5091
+ normalizedMetrics: {
5092
+ volatilityConcentration: round45(volatilityConcentration)
5093
+ },
5094
+ weight: 0.25,
5095
+ evidence: [{ kind: "repository_metric", metric: "evolution.recentVolatility" }]
5096
+ },
5097
+ {
5098
+ factorId: "quality.change_hygiene.coupling_density",
5099
+ penalty: average([couplingDensity, couplingIntensity]),
5100
+ rawMetrics: {
5101
+ couplingDensity: round45(couplingDensity),
5102
+ couplingIntensity: round45(couplingIntensity)
5103
+ },
5104
+ normalizedMetrics: {
5105
+ couplingPressure: round45(average([couplingDensity, couplingIntensity]))
5106
+ },
5107
+ weight: 0.3,
5108
+ evidence: [{ kind: "repository_metric", metric: "evolution.coupling" }]
5109
+ },
5110
+ {
5111
+ factorId: "quality.change_hygiene.todo_fixme_load",
5112
+ penalty: todoFixmePenalty,
5113
+ rawMetrics: {
5114
+ todoFixmeCommentCount
5115
+ },
5116
+ normalizedMetrics: {
5117
+ todoFixmePenalty: round45(todoFixmePenalty)
5118
+ },
5119
+ weight: 0.1,
5120
+ evidence: [{ kind: "repository_metric", metric: "comments.todo_fixme" }]
5121
+ }
5122
+ ];
5123
+ const changeHygienePenalty = input.evolution.available ? clamp01(changeHygieneFactors.reduce((sum, factor) => sum + factor.penalty * factor.weight, 0)) : 0.2;
5124
+ const eslint = signals?.eslint;
5125
+ const tsc = signals?.typescript;
5126
+ const sourceCount = Math.max(1, input.structural.files.length);
5127
+ const eslintErrorRate = (eslint?.errorCount ?? 0) / sourceCount;
5128
+ const eslintWarnRate = (eslint?.warningCount ?? 0) / sourceCount;
5129
+ const tsErrorRate = (tsc?.errorCount ?? 0) / sourceCount;
5130
+ const tsWarnRate = (tsc?.warningCount ?? 0) / sourceCount;
5131
+ const staticAnalysisFactors = [
5132
+ {
5133
+ factorId: "quality.static_analysis.eslint_errors",
5134
+ penalty: clamp01(eslintErrorRate / 0.5),
5135
+ rawMetrics: {
5136
+ eslintErrorCount: eslint?.errorCount ?? 0,
5137
+ eslintFilesWithIssues: eslint?.filesWithIssues ?? 0
5138
+ },
5139
+ normalizedMetrics: {
5140
+ eslintErrorRate: round45(eslintErrorRate)
5141
+ },
5142
+ weight: 0.5,
5143
+ evidence: [{ kind: "repository_metric", metric: "eslint.errorCount" }]
5144
+ },
5145
+ {
5146
+ factorId: "quality.static_analysis.eslint_warnings",
5147
+ penalty: clamp01(eslintWarnRate / 1.2),
5148
+ rawMetrics: {
5149
+ eslintWarningCount: eslint?.warningCount ?? 0
5150
+ },
5151
+ normalizedMetrics: {
5152
+ eslintWarningRate: round45(eslintWarnRate)
5153
+ },
5154
+ weight: 0.2,
5155
+ evidence: [{ kind: "repository_metric", metric: "eslint.warningCount" }]
5156
+ },
5157
+ {
5158
+ factorId: "quality.static_analysis.typescript_errors",
5159
+ penalty: clamp01(tsErrorRate / 0.35),
5160
+ rawMetrics: {
5161
+ typeScriptErrorCount: tsc?.errorCount ?? 0,
5162
+ typeScriptFilesWithDiagnostics: tsc?.filesWithDiagnostics ?? 0
5163
+ },
5164
+ normalizedMetrics: {
5165
+ typeScriptErrorRate: round45(tsErrorRate)
5166
+ },
5167
+ weight: 0.2,
5168
+ evidence: [{ kind: "repository_metric", metric: "typescript.errorCount" }]
5169
+ },
5170
+ {
5171
+ factorId: "quality.static_analysis.typescript_warnings",
5172
+ penalty: clamp01(tsWarnRate / 0.9),
5173
+ rawMetrics: {
5174
+ typeScriptWarningCount: tsc?.warningCount ?? 0
5175
+ },
5176
+ normalizedMetrics: {
5177
+ typeScriptWarningRate: round45(tsWarnRate)
5178
+ },
5179
+ weight: 0.1,
5180
+ evidence: [{ kind: "repository_metric", metric: "typescript.warningCount" }]
5181
+ }
5182
+ ];
5183
+ const staticAnalysisPenalty = clamp01(
5184
+ staticAnalysisFactors.reduce((sum, factor) => sum + factor.penalty * factor.weight, 0)
5185
+ );
5186
+ if ((eslint?.errorCount ?? 0) > 0) {
5187
+ const topRule = [...eslint?.ruleCounts ?? []].sort(
5188
+ (a, b) => b.count - a.count || a.ruleId.localeCompare(b.ruleId)
5189
+ )[0];
5190
+ pushIssue(issues, {
5191
+ id: "quality.static_analysis.eslint_errors",
5192
+ ruleId: topRule?.ruleId ?? "eslint",
5193
+ dimension: "staticAnalysis",
5194
+ target: input.structural.targetPath,
5195
+ message: topRule === void 0 ? `ESLint reported ${eslint?.errorCount ?? 0} error(s).` : `ESLint reported ${eslint?.errorCount ?? 0} error(s); top rule ${topRule.ruleId} (${topRule.count}).`,
5196
+ severity: "error",
5197
+ impact: round45(staticAnalysisPenalty * 0.5)
5198
+ });
5199
+ }
5200
+ if ((tsc?.errorCount ?? 0) > 0) {
5201
+ pushIssue(issues, {
5202
+ id: "quality.static_analysis.typescript_errors",
5203
+ ruleId: "typescript",
5204
+ dimension: "staticAnalysis",
5205
+ target: input.structural.targetPath,
5206
+ message: `TypeScript reported ${tsc?.errorCount ?? 0} error diagnostic(s).`,
5207
+ severity: "error",
5208
+ impact: round45(staticAnalysisPenalty * 0.4)
5209
+ });
5210
+ }
5211
+ const complexity = signals?.complexity;
5212
+ const avgComplexity = complexity?.averageCyclomatic ?? 0;
5213
+ const maxComplexity = complexity?.maxCyclomatic ?? 0;
5214
+ const highComplexityRatio = (complexity?.analyzedFileCount ?? 0) === 0 ? 0 : (complexity?.highComplexityFileCount ?? 0) / Math.max(1, complexity?.analyzedFileCount ?? 1);
5215
+ const complexityFactors = [
5216
+ {
5217
+ factorId: "quality.complexity.average_cyclomatic",
5218
+ penalty: clamp01(avgComplexity / 16),
5219
+ rawMetrics: {
5220
+ averageCyclomatic: round45(avgComplexity)
5221
+ },
5222
+ normalizedMetrics: {
5223
+ averageCyclomaticPenalty: round45(clamp01(avgComplexity / 16))
5224
+ },
5225
+ weight: 0.4,
5226
+ evidence: [{ kind: "repository_metric", metric: "complexity.averageCyclomatic" }]
5227
+ },
5228
+ {
5229
+ factorId: "quality.complexity.max_cyclomatic",
5230
+ penalty: clamp01(maxComplexity / 35),
5231
+ rawMetrics: {
5232
+ maxCyclomatic: round45(maxComplexity)
5233
+ },
5234
+ normalizedMetrics: {
5235
+ maxCyclomaticPenalty: round45(clamp01(maxComplexity / 35))
5236
+ },
5237
+ weight: 0.35,
5238
+ evidence: [{ kind: "repository_metric", metric: "complexity.maxCyclomatic" }]
5239
+ },
5240
+ {
5241
+ factorId: "quality.complexity.high_complexity_ratio",
5242
+ penalty: clamp01(highComplexityRatio / 0.35),
5243
+ rawMetrics: {
5244
+ highComplexityFileCount: complexity?.highComplexityFileCount ?? 0,
5245
+ analyzedFileCount: complexity?.analyzedFileCount ?? 0
5246
+ },
5247
+ normalizedMetrics: {
5248
+ highComplexityRatio: round45(highComplexityRatio)
5249
+ },
5250
+ weight: 0.25,
5251
+ evidence: [{ kind: "repository_metric", metric: "complexity.highComplexityFileCount" }]
5252
+ }
5253
+ ];
5254
+ const complexityPenalty = clamp01(
5255
+ complexityFactors.reduce((sum, factor) => sum + factor.penalty * factor.weight, 0)
5256
+ );
5257
+ if (maxComplexity >= 20 || highComplexityRatio >= 0.2) {
5258
+ pushIssue(issues, {
5259
+ id: "quality.complexity.high_cyclomatic",
5260
+ ruleId: "complexity.cyclomatic",
5261
+ dimension: "complexity",
5262
+ target: input.structural.targetPath,
5263
+ message: `Complexity is elevated (avg=${round45(avgComplexity)}, max=${round45(maxComplexity)}).`,
5264
+ impact: round45(complexityPenalty * 0.6)
5265
+ });
5266
+ }
5267
+ const duplication = signals?.duplication;
5268
+ const duplicatedLineRatio = duplication?.duplicatedLineRatio ?? 0;
5269
+ const duplicatedBlockCount = duplication?.duplicatedBlockCount ?? 0;
5270
+ const duplicationFactors = [
5271
+ {
5272
+ factorId: "quality.duplication.line_ratio",
5273
+ penalty: clamp01(duplicatedLineRatio / 0.25),
5274
+ rawMetrics: {
5275
+ duplicatedLineRatio: round45(duplicatedLineRatio)
5276
+ },
5277
+ normalizedMetrics: {
5278
+ duplicatedLineRatioPenalty: round45(clamp01(duplicatedLineRatio / 0.25))
5279
+ },
5280
+ weight: 0.7,
5281
+ evidence: [{ kind: "repository_metric", metric: "duplication.duplicatedLineRatio" }]
5282
+ },
5283
+ {
5284
+ factorId: "quality.duplication.block_count",
5285
+ penalty: logScaled(duplicatedBlockCount, 120),
5286
+ rawMetrics: {
5287
+ duplicatedBlockCount,
5288
+ filesWithDuplication: duplication?.filesWithDuplication ?? 0
5289
+ },
5290
+ normalizedMetrics: {
5291
+ duplicatedBlockPenalty: round45(logScaled(duplicatedBlockCount, 120))
5292
+ },
5293
+ weight: 0.3,
5294
+ evidence: [{ kind: "repository_metric", metric: "duplication.duplicatedBlockCount" }]
5295
+ }
5296
+ ];
5297
+ const duplicationPenalty = clamp01(
5298
+ duplicationFactors.reduce((sum, factor) => sum + factor.penalty * factor.weight, 0)
5299
+ );
5300
+ if (duplicatedLineRatio >= 0.08) {
5301
+ pushIssue(issues, {
5302
+ id: "quality.duplication.high_duplication",
5303
+ ruleId: "duplication.line_ratio",
5304
+ dimension: "duplication",
5305
+ target: input.structural.targetPath,
5306
+ message: `Duplication ratio is high (${toPercentage(duplicatedLineRatio)}%).`,
5307
+ impact: round45(duplicationPenalty * 0.6)
5308
+ });
5309
+ }
5310
+ const paths = filePaths(input.structural);
5311
+ const testFiles = paths.filter((path) => isTestPath2(path)).length;
5312
+ const sourceFiles = paths.filter((path) => isSourcePath(path)).length;
5313
+ const testRatio = sourceFiles <= 0 ? 1 : testFiles / sourceFiles;
5314
+ const testPresencePenalty = sourceFiles <= 0 ? 0 : 1 - clamp01(testRatio / 0.35);
5315
+ const coverageSignals = signals?.coverage;
5316
+ const coverageValues = [
5317
+ coverageSignals?.lineCoverage,
5318
+ coverageSignals?.branchCoverage,
5319
+ coverageSignals?.functionCoverage,
5320
+ coverageSignals?.statementCoverage
5321
+ ].filter((value) => value !== null && value !== void 0);
5322
+ const coverageRatio = coverageValues.length === 0 ? null : average(coverageValues);
5323
+ const coveragePenalty = coverageRatio === null ? 0.2 : 1 - clamp01(coverageRatio / 0.8);
5324
+ const testHealthFactors = [
5325
+ {
5326
+ factorId: "quality.test_health.test_presence",
5327
+ penalty: testPresencePenalty,
5328
+ rawMetrics: {
5329
+ sourceFiles,
5330
+ testFiles,
5331
+ testRatio: round45(testRatio)
5332
+ },
5333
+ normalizedMetrics: {
5334
+ testPresencePenalty: round45(testPresencePenalty)
5335
+ },
5336
+ weight: 0.55,
5337
+ evidence: [{ kind: "repository_metric", metric: "tests.file_ratio" }]
5338
+ },
5339
+ {
5340
+ factorId: "quality.test_health.coverage",
5341
+ penalty: coveragePenalty,
5342
+ rawMetrics: {
5343
+ lineCoverage: coverageSignals?.lineCoverage ?? null,
5344
+ branchCoverage: coverageSignals?.branchCoverage ?? null,
5345
+ functionCoverage: coverageSignals?.functionCoverage ?? null,
5346
+ statementCoverage: coverageSignals?.statementCoverage ?? null
5347
+ },
5348
+ normalizedMetrics: {
5349
+ coverageRatio: coverageRatio === null ? null : round45(coverageRatio),
5350
+ coveragePenalty: round45(coveragePenalty)
5351
+ },
5352
+ weight: 0.45,
5353
+ evidence: [{ kind: "repository_metric", metric: "coverage.summary" }]
5354
+ }
5355
+ ];
5356
+ const testHealthPenalty = clamp01(
5357
+ testHealthFactors.reduce((sum, factor) => sum + factor.penalty * factor.weight, 0)
5358
+ );
5359
+ if (sourceFiles > 0 && testRatio < 0.2) {
5360
+ pushIssue(issues, {
5361
+ id: "quality.test_health.low_test_presence",
5362
+ ruleId: "tests.file_ratio",
5363
+ dimension: "testHealth",
5364
+ target: input.structural.targetPath,
5365
+ message: `Detected ${testFiles} test file(s) for ${sourceFiles} source file(s).`,
5366
+ severity: testRatio === 0 ? "error" : "warn",
5367
+ impact: round45(testHealthPenalty * 0.4)
5368
+ });
5369
+ }
5370
+ if (coverageRatio !== null && coverageRatio < 0.6) {
5371
+ pushIssue(issues, {
5372
+ id: "quality.test_health.low_coverage",
5373
+ ruleId: "coverage.threshold",
5374
+ dimension: "testHealth",
5375
+ target: input.structural.targetPath,
5376
+ message: `Coverage is below threshold (${toPercentage(coverageRatio)}%).`,
5377
+ impact: round45(testHealthPenalty * 0.35)
5378
+ });
5379
+ }
5380
+ const modularityQuality = clamp01(1 - modularityPenalty);
5381
+ const changeHygieneQuality = clamp01(1 - changeHygienePenalty);
5382
+ const staticAnalysisQuality = clamp01(1 - staticAnalysisPenalty);
5383
+ const complexityQuality = clamp01(1 - complexityPenalty);
5384
+ const duplicationQuality = clamp01(1 - duplicationPenalty);
5385
+ const testHealthQuality = clamp01(1 - testHealthPenalty);
5386
+ const normalizedScore = clamp01(
5387
+ modularityQuality * DIMENSION_WEIGHTS.modularity + changeHygieneQuality * DIMENSION_WEIGHTS.changeHygiene + staticAnalysisQuality * DIMENSION_WEIGHTS.staticAnalysis + complexityQuality * DIMENSION_WEIGHTS.complexity + duplicationQuality * DIMENSION_WEIGHTS.duplication + testHealthQuality * DIMENSION_WEIGHTS.testHealth
5388
+ );
5389
+ const topIssues = [...issues].sort(
5390
+ (a, b) => b.impact - a.impact || a.id.localeCompare(b.id) || a.target.localeCompare(b.target)
5391
+ ).slice(0, 12).map(({ impact: _impact, ...issue }) => issue);
5392
+ return {
5393
+ qualityScore: toPercentage(normalizedScore),
5394
+ normalizedScore: round45(normalizedScore),
5395
+ dimensions: {
5396
+ modularity: toPercentage(modularityQuality),
5397
+ changeHygiene: toPercentage(changeHygieneQuality),
5398
+ staticAnalysis: toPercentage(staticAnalysisQuality),
5399
+ complexity: toPercentage(complexityQuality),
5400
+ duplication: toPercentage(duplicationQuality),
5401
+ testHealth: toPercentage(testHealthQuality)
5402
+ },
5403
+ topIssues,
5404
+ trace: {
5405
+ schemaVersion: QUALITY_TRACE_VERSION,
5406
+ dimensions: [
5407
+ createDimensionTrace("modularity", modularityQuality, modularityFactors),
5408
+ createDimensionTrace("changeHygiene", changeHygieneQuality, changeHygieneFactors),
5409
+ createDimensionTrace("staticAnalysis", staticAnalysisQuality, staticAnalysisFactors),
5410
+ createDimensionTrace("complexity", complexityQuality, complexityFactors),
5411
+ createDimensionTrace("duplication", duplicationQuality, duplicationFactors),
5412
+ createDimensionTrace("testHealth", testHealthQuality, testHealthFactors)
5413
+ ]
5414
+ }
5415
+ };
5416
+ };
5417
+
4227
5418
  // ../risk-engine/dist/index.js
4228
5419
  var DEFAULT_RISK_ENGINE_CONFIG = {
4229
5420
  // Base dimensional influence. Risk is never dominated by a single dimension by default.
@@ -4309,8 +5500,8 @@ var DEFAULT_RISK_ENGINE_CONFIG = {
4309
5500
  }
4310
5501
  };
4311
5502
  var toUnitInterval = (value) => Number.isFinite(value) ? Math.min(1, Math.max(0, value)) : 0;
4312
- var round45 = (value) => Number(value.toFixed(4));
4313
- var average = (values) => {
5503
+ var round46 = (value) => Number(value.toFixed(4));
5504
+ var average2 = (values) => {
4314
5505
  if (values.length === 0) {
4315
5506
  return 0;
4316
5507
  }
@@ -4397,7 +5588,7 @@ var normalizeWithScale = (value, scale) => {
4397
5588
  }
4398
5589
  return toUnitInterval((value - scale.lower) / (scale.upper - scale.lower));
4399
5590
  };
4400
- var normalizePath2 = (path) => path.replaceAll("\\", "/");
5591
+ var normalizePath3 = (path) => path.replaceAll("\\", "/");
4401
5592
  var computeAggregatorAttenuation = (input) => {
4402
5593
  const { fanIn, fanOut, inCycle, evolutionMetrics, config } = input;
4403
5594
  if (!config.enabled || inCycle > 0) {
@@ -4418,14 +5609,14 @@ var computeAggregatorAttenuation = (input) => {
4418
5609
  const fanOutSignal = toUnitInterval((fanOut - config.minFanOut) / Math.max(1, config.minFanOut));
4419
5610
  const lowChurnPerCommitSignal = 1 - toUnitInterval(churnPerCommit / config.maxChurnPerCommit);
4420
5611
  const lowChurnPerDependencySignal = 1 - toUnitInterval(churnPerDependency / config.maxChurnPerDependency);
4421
- const attenuationConfidence = average([
5612
+ const attenuationConfidence = average2([
4422
5613
  fanInSignal,
4423
5614
  fanOutSignal,
4424
5615
  lowChurnPerCommitSignal,
4425
5616
  lowChurnPerDependencySignal
4426
5617
  ]);
4427
5618
  const reduction = toUnitInterval(config.maxStructuralReduction) * attenuationConfidence;
4428
- return round45(toUnitInterval(1 - reduction));
5619
+ return round46(toUnitInterval(1 - reduction));
4429
5620
  };
4430
5621
  var dependencySignalWeights = {
4431
5622
  single_maintainer: 0.3,
@@ -4455,12 +5646,12 @@ var computeDependencySignalScore = (ownSignals, inheritedSignals, inheritedSigna
4455
5646
  }
4456
5647
  return toUnitInterval(weightedTotal / maxWeightedTotal);
4457
5648
  };
4458
- var clampConfidence = (value) => round45(toUnitInterval(value));
5649
+ var clampConfidence = (value) => round46(toUnitInterval(value));
4459
5650
  var computeExternalMetadataConfidence = (external) => {
4460
5651
  if (!external.available) {
4461
5652
  return 0;
4462
5653
  }
4463
- return round45(toUnitInterval(0.35 + external.metrics.metadataCoverage * 0.65));
5654
+ return round46(toUnitInterval(0.35 + external.metrics.metadataCoverage * 0.65));
4464
5655
  };
4465
5656
  var computeEvolutionHistoryConfidence = (structural, evolution, evolutionByFile) => {
4466
5657
  if (!evolution.available) {
@@ -4472,12 +5663,12 @@ var computeEvolutionHistoryConfidence = (structural, evolution, evolutionByFile)
4472
5663
  }
4473
5664
  let coveredFiles = 0;
4474
5665
  for (const file of structural.files) {
4475
- if (evolutionByFile.has(normalizePath2(file.id))) {
5666
+ if (evolutionByFile.has(normalizePath3(file.id))) {
4476
5667
  coveredFiles += 1;
4477
5668
  }
4478
5669
  }
4479
5670
  const coverage = coveredFiles / totalFiles;
4480
- return round45(toUnitInterval(0.3 + coverage * 0.7));
5671
+ return round46(toUnitInterval(0.3 + coverage * 0.7));
4481
5672
  };
4482
5673
  var buildFactorTraces = (totalScore, inputs) => {
4483
5674
  const positiveInputs = inputs.filter((input) => input.strength > 0);
@@ -4515,7 +5706,7 @@ var buildFactorTraces = (totalScore, inputs) => {
4515
5706
  continue;
4516
5707
  }
4517
5708
  if (index === scored.length - 1) {
4518
- const remaining = round45(totalScore - distributed);
5709
+ const remaining = round46(totalScore - distributed);
4519
5710
  traces[traceIndex] = {
4520
5711
  ...existing,
4521
5712
  contribution: Math.max(0, remaining)
@@ -4523,7 +5714,7 @@ var buildFactorTraces = (totalScore, inputs) => {
4523
5714
  distributed += Math.max(0, remaining);
4524
5715
  continue;
4525
5716
  }
4526
- const rounded = round45(current.contribution);
5717
+ const rounded = round46(current.contribution);
4527
5718
  traces[traceIndex] = {
4528
5719
  ...existing,
4529
5720
  contribution: rounded
@@ -4534,15 +5725,15 @@ var buildFactorTraces = (totalScore, inputs) => {
4534
5725
  };
4535
5726
  var buildReductionLevers = (factors) => factors.filter((factor) => factor.contribution > 0).sort((a, b) => b.contribution - a.contribution || a.factorId.localeCompare(b.factorId)).slice(0, 3).map((factor) => ({
4536
5727
  factorId: factor.factorId,
4537
- estimatedImpact: round45(factor.contribution)
5728
+ estimatedImpact: round46(factor.contribution)
4538
5729
  }));
4539
5730
  var buildTargetTrace = (targetType, targetId, totalScore, normalizedScore, factors) => {
4540
5731
  const dominantFactors = [...factors].filter((factor) => factor.contribution > 0).sort((a, b) => b.contribution - a.contribution || a.factorId.localeCompare(b.factorId)).slice(0, 3).map((factor) => factor.factorId);
4541
5732
  return {
4542
5733
  targetType,
4543
5734
  targetId,
4544
- totalScore: round45(totalScore),
4545
- normalizedScore: round45(normalizedScore),
5735
+ totalScore: round46(totalScore),
5736
+ normalizedScore: round46(normalizedScore),
4546
5737
  factors,
4547
5738
  dominantFactors,
4548
5739
  reductionLevers: buildReductionLevers(factors)
@@ -4616,14 +5807,14 @@ var computeDependencyScores = (external, config) => {
4616
5807
  ].filter((value) => value !== null).length;
4617
5808
  const confidence = toUnitInterval((0.5 + availableMetricCount * 0.125) * metadataConfidence);
4618
5809
  dependencyContexts.set(dependency.name, {
4619
- signalScore: round45(signalScore),
4620
- stalenessRisk: round45(stalenessRisk),
4621
- maintainerConcentrationRisk: round45(maintainerConcentrationRisk),
4622
- transitiveBurdenRisk: round45(transitiveBurdenRisk),
4623
- centralityRisk: round45(centralityRisk),
4624
- chainDepthRisk: round45(chainDepthRisk),
4625
- busFactorRisk: round45(busFactorRisk),
4626
- popularityDampener: round45(popularityDampener),
5810
+ signalScore: round46(signalScore),
5811
+ stalenessRisk: round46(stalenessRisk),
5812
+ maintainerConcentrationRisk: round46(maintainerConcentrationRisk),
5813
+ transitiveBurdenRisk: round46(transitiveBurdenRisk),
5814
+ centralityRisk: round46(centralityRisk),
5815
+ chainDepthRisk: round46(chainDepthRisk),
5816
+ busFactorRisk: round46(busFactorRisk),
5817
+ popularityDampener: round46(popularityDampener),
4627
5818
  rawMetrics: {
4628
5819
  daysSinceLastRelease: dependency.daysSinceLastRelease,
4629
5820
  maintainerCount: dependency.maintainerCount,
@@ -4633,12 +5824,12 @@ var computeDependencyScores = (external, config) => {
4633
5824
  busFactor: dependency.busFactor,
4634
5825
  weeklyDownloads: dependency.weeklyDownloads
4635
5826
  },
4636
- confidence: round45(confidence)
5827
+ confidence: round46(confidence)
4637
5828
  });
4638
5829
  return {
4639
5830
  dependency: dependency.name,
4640
- score: round45(normalizedScore * 100),
4641
- normalizedScore: round45(normalizedScore),
5831
+ score: round46(normalizedScore * 100),
5832
+ normalizedScore: round46(normalizedScore),
4642
5833
  ownRiskSignals: dependency.ownRiskSignals,
4643
5834
  inheritedRiskSignals: dependency.inheritedRiskSignals
4644
5835
  };
@@ -4647,7 +5838,7 @@ var computeDependencyScores = (external, config) => {
4647
5838
  );
4648
5839
  const normalizedValues = dependencyScores.map((score) => score.normalizedScore);
4649
5840
  const highDependencyRisk = dependencyScores.length === 0 ? 0 : percentile(normalizedValues, config.externalDimension.topDependencyPercentile);
4650
- const averageDependencyRisk = average(normalizedValues);
5841
+ const averageDependencyRisk = average2(normalizedValues);
4651
5842
  const depthRisk = halfLifeRisk(
4652
5843
  external.metrics.dependencyDepth,
4653
5844
  config.externalDimension.dependencyDepthHalfLife
@@ -4657,7 +5848,7 @@ var computeDependencyScores = (external, config) => {
4657
5848
  );
4658
5849
  return {
4659
5850
  dependencyScores,
4660
- repositoryExternalPressure: round45(repositoryExternalPressure),
5851
+ repositoryExternalPressure: round46(repositoryExternalPressure),
4661
5852
  dependencyContexts
4662
5853
  };
4663
5854
  };
@@ -4666,7 +5857,7 @@ var mapEvolutionByFile = (evolution) => {
4666
5857
  return /* @__PURE__ */ new Map();
4667
5858
  }
4668
5859
  return new Map(
4669
- evolution.files.map((fileMetrics) => [normalizePath2(fileMetrics.filePath), fileMetrics])
5860
+ evolution.files.map((fileMetrics) => [normalizePath3(fileMetrics.filePath), fileMetrics])
4670
5861
  );
4671
5862
  };
4672
5863
  var computeEvolutionScales = (evolutionByFile, config) => {
@@ -4690,7 +5881,7 @@ var computeEvolutionScales = (evolutionByFile, config) => {
4690
5881
  };
4691
5882
  };
4692
5883
  var inferModuleName = (filePath, config) => {
4693
- const normalized = normalizePath2(filePath);
5884
+ const normalized = normalizePath3(filePath);
4694
5885
  const parts = normalized.split("/").filter((part) => part.length > 0);
4695
5886
  if (parts.length <= 1) {
4696
5887
  return config.module.rootLabel;
@@ -4711,18 +5902,18 @@ var buildFragileClusters = (structural, evolution, fileScoresByFile, config) =>
4711
5902
  const clusters = [];
4712
5903
  let cycleClusterCount = 0;
4713
5904
  for (const cycle of structural.cycles) {
4714
- const files = [...new Set(cycle.nodes.map((node) => normalizePath2(node)))].filter(
5905
+ const files = [...new Set(cycle.nodes.map((node) => normalizePath3(node)))].filter(
4715
5906
  (filePath) => fileScoresByFile.has(filePath)
4716
5907
  );
4717
5908
  if (files.length < 2) {
4718
5909
  continue;
4719
5910
  }
4720
5911
  files.sort((a, b) => a.localeCompare(b));
4721
- const averageRisk = average(
5912
+ const averageRisk = average2(
4722
5913
  files.map((filePath) => fileScoresByFile.get(filePath)?.normalizedScore ?? 0)
4723
5914
  );
4724
5915
  const cycleSizeRisk = toUnitInterval((files.length - 1) / 5);
4725
- const score = round45(toUnitInterval(averageRisk * 0.75 + cycleSizeRisk * 0.25) * 100);
5916
+ const score = round46(toUnitInterval(averageRisk * 0.75 + cycleSizeRisk * 0.25) * 100);
4726
5917
  cycleClusterCount += 1;
4727
5918
  clusters.push({
4728
5919
  id: `cycle:${cycleClusterCount}`,
@@ -4743,8 +5934,8 @@ var buildFragileClusters = (structural, evolution, fileScoresByFile, config) =>
4743
5934
  )
4744
5935
  );
4745
5936
  const selectedPairs = candidates.filter((pair) => pair.couplingScore >= threshold).map((pair) => ({
4746
- fileA: normalizePath2(pair.fileA),
4747
- fileB: normalizePath2(pair.fileB),
5937
+ fileA: normalizePath3(pair.fileA),
5938
+ fileB: normalizePath3(pair.fileB),
4748
5939
  couplingScore: pair.couplingScore
4749
5940
  })).filter(
4750
5941
  (pair) => pair.fileA !== pair.fileB && fileScoresByFile.has(pair.fileA) && fileScoresByFile.has(pair.fileB)
@@ -4792,11 +5983,11 @@ var buildFragileClusters = (structural, evolution, fileScoresByFile, config) =>
4792
5983
  const componentPairs = selectedPairs.filter(
4793
5984
  (pair) => fileSet.has(pair.fileA) && fileSet.has(pair.fileB)
4794
5985
  );
4795
- const meanFileRisk = average(
5986
+ const meanFileRisk = average2(
4796
5987
  files.map((filePath) => fileScoresByFile.get(filePath)?.normalizedScore ?? 0)
4797
5988
  );
4798
- const meanCoupling = average(componentPairs.map((pair) => pair.couplingScore));
4799
- const score = round45(toUnitInterval(meanFileRisk * 0.65 + meanCoupling * 0.35) * 100);
5989
+ const meanCoupling = average2(componentPairs.map((pair) => pair.couplingScore));
5990
+ const score = round46(toUnitInterval(meanFileRisk * 0.65 + meanCoupling * 0.35) * 100);
4800
5991
  couplingClusterCount += 1;
4801
5992
  clusters.push({
4802
5993
  id: `coupling:${couplingClusterCount}`,
@@ -4822,7 +6013,7 @@ var computeRiskSummary = (structural, evolution, external, config, traceCollecto
4822
6013
  );
4823
6014
  const evolutionScales = computeEvolutionScales(evolutionByFile, config);
4824
6015
  const cycleFileSet = new Set(
4825
- structural.cycles.flatMap((cycle) => cycle.nodes.map((node) => normalizePath2(node)))
6016
+ structural.cycles.flatMap((cycle) => cycle.nodes.map((node) => normalizePath3(node)))
4826
6017
  );
4827
6018
  const fanInScale = buildQuantileScale(
4828
6019
  structural.files.map((file) => logScale(file.fanIn)),
@@ -4845,7 +6036,7 @@ var computeRiskSummary = (structural, evolution, external, config, traceCollecto
4845
6036
  external: external.available
4846
6037
  });
4847
6038
  const fileRiskContexts = structural.files.map((file) => {
4848
- const filePath = normalizePath2(file.id);
6039
+ const filePath = normalizePath3(file.id);
4849
6040
  const inCycle = cycleFileSet.has(filePath) ? 1 : 0;
4850
6041
  const fanInRisk = normalizeWithScale(logScale(file.fanIn), fanInScale);
4851
6042
  const fanOutRisk = normalizeWithScale(logScale(file.fanOut), fanOutScale);
@@ -4907,21 +6098,21 @@ var computeRiskSummary = (structural, evolution, external, config, traceCollecto
4907
6098
  const normalizedScore = saturatingComposite(baseline, interactions);
4908
6099
  return {
4909
6100
  file: filePath,
4910
- score: round45(normalizedScore * 100),
4911
- normalizedScore: round45(normalizedScore),
6101
+ score: round46(normalizedScore * 100),
6102
+ normalizedScore: round46(normalizedScore),
4912
6103
  factors: {
4913
- structural: round45(structuralFactor),
4914
- evolution: round45(evolutionFactor),
4915
- external: round45(externalFactor)
6104
+ structural: round46(structuralFactor),
6105
+ evolution: round46(evolutionFactor),
6106
+ external: round46(externalFactor)
4916
6107
  },
4917
- structuralCentrality: round45(structuralCentrality),
6108
+ structuralCentrality: round46(structuralCentrality),
4918
6109
  traceTerms: {
4919
- structuralBase: round45(structuralBase),
4920
- evolutionBase: round45(evolutionBase),
4921
- externalBase: round45(externalBase),
4922
- interactionStructuralEvolution: round45(interactionStructuralEvolution),
4923
- interactionCentralInstability: round45(interactionCentralInstability),
4924
- interactionDependencyAmplification: round45(interactionDependencyAmplification)
6110
+ structuralBase: round46(structuralBase),
6111
+ evolutionBase: round46(evolutionBase),
6112
+ externalBase: round46(externalBase),
6113
+ interactionStructuralEvolution: round46(interactionStructuralEvolution),
6114
+ interactionCentralInstability: round46(interactionCentralInstability),
6115
+ interactionDependencyAmplification: round46(interactionDependencyAmplification)
4925
6116
  },
4926
6117
  rawMetrics: {
4927
6118
  fanIn: file.fanIn,
@@ -4933,19 +6124,19 @@ var computeRiskSummary = (structural, evolution, external, config, traceCollecto
4933
6124
  recentVolatility: evolutionMetrics?.recentVolatility ?? null,
4934
6125
  topAuthorShare: evolutionMetrics?.topAuthorShare ?? null,
4935
6126
  busFactor: evolutionMetrics?.busFactor ?? null,
4936
- dependencyAffinity: round45(dependencyAffinity),
4937
- repositoryExternalPressure: round45(dependencyComputation.repositoryExternalPressure),
4938
- structuralAttenuation: round45(structuralAttenuation)
6127
+ dependencyAffinity: round46(dependencyAffinity),
6128
+ repositoryExternalPressure: round46(dependencyComputation.repositoryExternalPressure),
6129
+ structuralAttenuation: round46(structuralAttenuation)
4939
6130
  },
4940
6131
  normalizedMetrics: {
4941
- fanInRisk: round45(fanInRisk),
4942
- fanOutRisk: round45(fanOutRisk),
4943
- depthRisk: round45(depthRisk),
4944
- frequencyRisk: round45(frequencyRisk),
4945
- churnRisk: round45(churnRisk),
4946
- volatilityRisk: round45(volatilityRisk),
4947
- ownershipConcentrationRisk: round45(ownershipConcentrationRisk),
4948
- busFactorRisk: round45(busFactorRisk)
6132
+ fanInRisk: round46(fanInRisk),
6133
+ fanOutRisk: round46(fanOutRisk),
6134
+ depthRisk: round46(depthRisk),
6135
+ frequencyRisk: round46(frequencyRisk),
6136
+ churnRisk: round46(churnRisk),
6137
+ volatilityRisk: round46(volatilityRisk),
6138
+ ownershipConcentrationRisk: round46(ownershipConcentrationRisk),
6139
+ busFactorRisk: round46(busFactorRisk)
4949
6140
  }
4950
6141
  };
4951
6142
  }).sort((a, b) => b.score - a.score || a.file.localeCompare(b.file));
@@ -5071,29 +6262,29 @@ var computeRiskSummary = (structural, evolution, external, config, traceCollecto
5071
6262
  moduleFiles.set(moduleName, values);
5072
6263
  }
5073
6264
  const moduleScores = [...moduleFiles.entries()].map(([module, values]) => {
5074
- const averageScore = average(values);
6265
+ const averageScore = average2(values);
5075
6266
  const peakScore = values.reduce((max, value) => Math.max(max, value), 0);
5076
6267
  const normalizedScore = toUnitInterval(averageScore * 0.65 + peakScore * 0.35);
5077
6268
  return {
5078
6269
  module,
5079
- score: round45(normalizedScore * 100),
5080
- normalizedScore: round45(normalizedScore),
6270
+ score: round46(normalizedScore * 100),
6271
+ normalizedScore: round46(normalizedScore),
5081
6272
  fileCount: values.length
5082
6273
  };
5083
6274
  }).sort((a, b) => b.score - a.score || a.module.localeCompare(b.module));
5084
6275
  if (collector !== void 0) {
5085
6276
  for (const [module, values] of moduleFiles.entries()) {
5086
- const averageScore = average(values);
6277
+ const averageScore = average2(values);
5087
6278
  const peakScore = values.reduce((max, value) => Math.max(max, value), 0);
5088
6279
  const normalizedScore = toUnitInterval(averageScore * 0.65 + peakScore * 0.35);
5089
- const totalScore = round45(normalizedScore * 100);
6280
+ const totalScore = round46(normalizedScore * 100);
5090
6281
  const factors = buildFactorTraces(totalScore, [
5091
6282
  {
5092
6283
  factorId: "module.average_file_risk",
5093
6284
  family: "composite",
5094
6285
  strength: averageScore * 0.65,
5095
- rawMetrics: { averageFileRisk: round45(averageScore), fileCount: values.length },
5096
- normalizedMetrics: { normalizedModuleRisk: round45(normalizedScore) },
6286
+ rawMetrics: { averageFileRisk: round46(averageScore), fileCount: values.length },
6287
+ normalizedMetrics: { normalizedModuleRisk: round46(normalizedScore) },
5097
6288
  weight: 0.65,
5098
6289
  amplification: null,
5099
6290
  evidence: [{ kind: "repository_metric", metric: "moduleAggregation.average" }],
@@ -5103,8 +6294,8 @@ var computeRiskSummary = (structural, evolution, external, config, traceCollecto
5103
6294
  factorId: "module.peak_file_risk",
5104
6295
  family: "composite",
5105
6296
  strength: peakScore * 0.35,
5106
- rawMetrics: { peakFileRisk: round45(peakScore), fileCount: values.length },
5107
- normalizedMetrics: { normalizedModuleRisk: round45(normalizedScore) },
6297
+ rawMetrics: { peakFileRisk: round46(peakScore), fileCount: values.length },
6298
+ normalizedMetrics: { normalizedModuleRisk: round46(normalizedScore) },
5108
6299
  weight: 0.35,
5109
6300
  amplification: null,
5110
6301
  evidence: [{ kind: "repository_metric", metric: "moduleAggregation.peak" }],
@@ -5127,12 +6318,12 @@ var computeRiskSummary = (structural, evolution, external, config, traceCollecto
5127
6318
  const normalizedZoneScore = toUnitInterval(intensity * 0.7 + fileScore.normalizedScore * 0.3);
5128
6319
  return {
5129
6320
  file: fileScore.file,
5130
- score: round45(normalizedZoneScore * 100),
6321
+ score: round46(normalizedZoneScore * 100),
5131
6322
  externalPressure: fileScore.factors.external
5132
6323
  };
5133
6324
  }).filter((zone) => external.available && zone.externalPressure >= pressureThreshold).sort((a, b) => b.score - a.score || a.file.localeCompare(b.file)).slice(0, config.amplificationZone.maxZones).map((zone) => ({
5134
6325
  ...zone,
5135
- externalPressure: round45(zone.externalPressure)
6326
+ externalPressure: round46(zone.externalPressure)
5136
6327
  }));
5137
6328
  if (collector !== void 0 && external.available) {
5138
6329
  const dependencyByName = new Map(
@@ -5245,16 +6436,16 @@ var computeRiskSummary = (structural, evolution, external, config, traceCollecto
5245
6436
  );
5246
6437
  }
5247
6438
  }
5248
- const structuralDimension = average(fileScores.map((fileScore) => fileScore.factors.structural));
5249
- const evolutionDimension = average(fileScores.map((fileScore) => fileScore.factors.evolution));
6439
+ const structuralDimension = average2(fileScores.map((fileScore) => fileScore.factors.structural));
6440
+ const evolutionDimension = average2(fileScores.map((fileScore) => fileScore.factors.evolution));
5250
6441
  const externalDimension = dependencyComputation.repositoryExternalPressure;
5251
6442
  const topCentralSlice = Math.max(1, Math.ceil(fileRiskContexts.length * 0.1));
5252
- const criticalInstability = average(
6443
+ const criticalInstability = average2(
5253
6444
  [...fileRiskContexts].sort(
5254
6445
  (a, b) => b.structuralCentrality * b.factors.evolution - a.structuralCentrality * a.factors.evolution || a.file.localeCompare(b.file)
5255
6446
  ).slice(0, topCentralSlice).map((context) => context.structuralCentrality * context.factors.evolution)
5256
6447
  );
5257
- const dependencyAmplification = average(
6448
+ const dependencyAmplification = average2(
5258
6449
  dependencyAmplificationZones.map(
5259
6450
  (zone) => toUnitInterval(zone.externalPressure * zone.score / 100)
5260
6451
  )
@@ -5265,15 +6456,15 @@ var computeRiskSummary = (structural, evolution, external, config, traceCollecto
5265
6456
  criticalInstability * config.interactionWeights.centralInstability,
5266
6457
  dependencyAmplification * config.interactionWeights.dependencyAmplification
5267
6458
  ]);
5268
- const riskScore = round45(repositoryNormalizedScore * 100);
6459
+ const riskScore = round46(repositoryNormalizedScore * 100);
5269
6460
  if (collector !== void 0) {
5270
6461
  const repositoryFactors = buildFactorTraces(riskScore, [
5271
6462
  {
5272
6463
  factorId: "repository.structural",
5273
6464
  family: "structural",
5274
6465
  strength: structuralDimension * dimensionWeights.structural,
5275
- rawMetrics: { structuralDimension: round45(structuralDimension) },
5276
- normalizedMetrics: { dimensionWeight: round45(dimensionWeights.structural) },
6466
+ rawMetrics: { structuralDimension: round46(structuralDimension) },
6467
+ normalizedMetrics: { dimensionWeight: round46(dimensionWeights.structural) },
5277
6468
  weight: dimensionWeights.structural,
5278
6469
  amplification: null,
5279
6470
  evidence: [{ kind: "repository_metric", metric: "structuralDimension" }],
@@ -5283,8 +6474,8 @@ var computeRiskSummary = (structural, evolution, external, config, traceCollecto
5283
6474
  factorId: "repository.evolution",
5284
6475
  family: "evolution",
5285
6476
  strength: evolutionDimension * dimensionWeights.evolution,
5286
- rawMetrics: { evolutionDimension: round45(evolutionDimension) },
5287
- normalizedMetrics: { dimensionWeight: round45(dimensionWeights.evolution) },
6477
+ rawMetrics: { evolutionDimension: round46(evolutionDimension) },
6478
+ normalizedMetrics: { dimensionWeight: round46(dimensionWeights.evolution) },
5288
6479
  weight: dimensionWeights.evolution,
5289
6480
  amplification: null,
5290
6481
  evidence: [{ kind: "repository_metric", metric: "evolutionDimension" }],
@@ -5294,8 +6485,8 @@ var computeRiskSummary = (structural, evolution, external, config, traceCollecto
5294
6485
  factorId: "repository.external",
5295
6486
  family: "external",
5296
6487
  strength: externalDimension * dimensionWeights.external,
5297
- rawMetrics: { externalDimension: round45(externalDimension) },
5298
- normalizedMetrics: { dimensionWeight: round45(dimensionWeights.external) },
6488
+ rawMetrics: { externalDimension: round46(externalDimension) },
6489
+ normalizedMetrics: { dimensionWeight: round46(dimensionWeights.external) },
5299
6490
  weight: dimensionWeights.external,
5300
6491
  amplification: null,
5301
6492
  evidence: [{ kind: "repository_metric", metric: "externalDimension" }],
@@ -5306,19 +6497,19 @@ var computeRiskSummary = (structural, evolution, external, config, traceCollecto
5306
6497
  family: "composite",
5307
6498
  strength: structuralDimension * evolutionDimension * config.interactionWeights.structuralEvolution + criticalInstability * config.interactionWeights.centralInstability + dependencyAmplification * config.interactionWeights.dependencyAmplification,
5308
6499
  rawMetrics: {
5309
- structuralEvolution: round45(
6500
+ structuralEvolution: round46(
5310
6501
  structuralDimension * evolutionDimension * config.interactionWeights.structuralEvolution
5311
6502
  ),
5312
- centralInstability: round45(
6503
+ centralInstability: round46(
5313
6504
  criticalInstability * config.interactionWeights.centralInstability
5314
6505
  ),
5315
- dependencyAmplification: round45(
6506
+ dependencyAmplification: round46(
5316
6507
  dependencyAmplification * config.interactionWeights.dependencyAmplification
5317
6508
  )
5318
6509
  },
5319
6510
  normalizedMetrics: {
5320
- criticalInstability: round45(criticalInstability),
5321
- dependencyAmplification: round45(dependencyAmplification)
6511
+ criticalInstability: round46(criticalInstability),
6512
+ dependencyAmplification: round46(dependencyAmplification)
5322
6513
  },
5323
6514
  weight: null,
5324
6515
  amplification: config.interactionWeights.structuralEvolution + config.interactionWeights.centralInstability + config.interactionWeights.dependencyAmplification,
@@ -5338,7 +6529,7 @@ var computeRiskSummary = (structural, evolution, external, config, traceCollecto
5338
6529
  }
5339
6530
  return {
5340
6531
  riskScore,
5341
- normalizedScore: round45(repositoryNormalizedScore),
6532
+ normalizedScore: round46(repositoryNormalizedScore),
5342
6533
  hotspots,
5343
6534
  fragileClusters,
5344
6535
  dependencyAmplificationZones,
@@ -5462,7 +6653,7 @@ var evaluateRepositoryRisk = (input, options = {}) => {
5462
6653
  };
5463
6654
 
5464
6655
  // src/application/run-analyze-command.ts
5465
- var resolveTargetPath = (inputPath, cwd) => resolve3(cwd, inputPath ?? ".");
6656
+ var resolveTargetPath = (inputPath, cwd) => resolve4(cwd, inputPath ?? ".");
5466
6657
  var riskProfileConfig = {
5467
6658
  default: void 0,
5468
6659
  personal: {
@@ -5631,10 +6822,16 @@ var collectAnalysisInputs = async (inputPath, authorIdentityMode, options = {},
5631
6822
  } else {
5632
6823
  logger.warn(`external analysis unavailable: ${external.reason}`);
5633
6824
  }
6825
+ logger.info("collecting quality signals");
6826
+ const qualitySignals = await collectQualitySignals(targetPath, structural, logger);
6827
+ logger.debug(
6828
+ `quality signals: todoFixmeCommentCount=${qualitySignals.todoFixmeCommentCount ?? 0}, eslintErrors=${qualitySignals.eslint?.errorCount ?? 0}, tscErrors=${qualitySignals.typescript?.errorCount ?? 0}`
6829
+ );
5634
6830
  return {
5635
6831
  structural,
5636
6832
  evolution,
5637
- external
6833
+ external,
6834
+ qualitySignals
5638
6835
  };
5639
6836
  };
5640
6837
  var runAnalyzeCommand = async (inputPath, authorIdentityMode, options = {}, logger = createSilentLogger()) => {
@@ -5647,18 +6844,30 @@ var runAnalyzeCommand = async (inputPath, authorIdentityMode, options = {}, logg
5647
6844
  logger.info("computing risk summary");
5648
6845
  const riskConfig = resolveRiskConfigForProfile(options.riskProfile);
5649
6846
  const risk = computeRepositoryRiskSummary({
5650
- ...analysisInputs,
6847
+ structural: analysisInputs.structural,
6848
+ evolution: analysisInputs.evolution,
6849
+ external: analysisInputs.external,
5651
6850
  ...riskConfig === void 0 ? {} : { config: riskConfig }
5652
6851
  });
5653
- logger.info(`analysis completed (riskScore=${risk.riskScore})`);
6852
+ const quality = computeRepositoryQualitySummary({
6853
+ structural: analysisInputs.structural,
6854
+ evolution: analysisInputs.evolution,
6855
+ signals: analysisInputs.qualitySignals
6856
+ });
6857
+ logger.info(
6858
+ `analysis completed (riskScore=${risk.riskScore}, qualityScore=${quality.qualityScore})`
6859
+ );
5654
6860
  return {
5655
- ...analysisInputs,
5656
- risk
6861
+ structural: analysisInputs.structural,
6862
+ evolution: analysisInputs.evolution,
6863
+ external: analysisInputs.external,
6864
+ risk,
6865
+ quality
5657
6866
  };
5658
6867
  };
5659
6868
 
5660
6869
  // src/application/run-check-command.ts
5661
- import { readFile as readFile2, writeFile as writeFile2 } from "fs/promises";
6870
+ import { readFile as readFile3, writeFile as writeFile2 } from "fs/promises";
5662
6871
 
5663
6872
  // src/application/build-analysis-snapshot.ts
5664
6873
  var buildAnalysisSnapshot = async (inputPath, authorIdentityMode, options, logger) => {
@@ -5673,14 +6882,23 @@ var buildAnalysisSnapshot = async (inputPath, authorIdentityMode, options, logge
5673
6882
  const riskConfig = resolveRiskConfigForProfile(options.riskProfile);
5674
6883
  const evaluation = evaluateRepositoryRisk(
5675
6884
  {
5676
- ...analysisInputs,
6885
+ structural: analysisInputs.structural,
6886
+ evolution: analysisInputs.evolution,
6887
+ external: analysisInputs.external,
5677
6888
  ...riskConfig === void 0 ? {} : { config: riskConfig }
5678
6889
  },
5679
6890
  { explain: options.includeTrace }
5680
6891
  );
5681
6892
  const summary = {
5682
- ...analysisInputs,
5683
- risk: evaluation.summary
6893
+ structural: analysisInputs.structural,
6894
+ evolution: analysisInputs.evolution,
6895
+ external: analysisInputs.external,
6896
+ risk: evaluation.summary,
6897
+ quality: computeRepositoryQualitySummary({
6898
+ structural: analysisInputs.structural,
6899
+ evolution: analysisInputs.evolution,
6900
+ signals: analysisInputs.qualitySignals
6901
+ })
5684
6902
  };
5685
6903
  return createSnapshot({
5686
6904
  analysis: summary,
@@ -5732,7 +6950,7 @@ var runCheckCommand = async (inputPath, authorIdentityMode, options, logger = cr
5732
6950
  let diff;
5733
6951
  if (options.baselinePath !== void 0) {
5734
6952
  logger.info(`loading baseline snapshot: ${options.baselinePath}`);
5735
- const baselineRaw = await readFile2(options.baselinePath, "utf8");
6953
+ const baselineRaw = await readFile3(options.baselinePath, "utf8");
5736
6954
  try {
5737
6955
  baseline = parseSnapshot(baselineRaw);
5738
6956
  } catch (error) {
@@ -5771,8 +6989,8 @@ var runCheckCommand = async (inputPath, authorIdentityMode, options, logger = cr
5771
6989
  };
5772
6990
 
5773
6991
  // src/application/run-ci-command.ts
5774
- import { readFile as readFile3, writeFile as writeFile3 } from "fs/promises";
5775
- import { relative as relative2, resolve as resolve4 } from "path";
6992
+ import { readFile as readFile4, writeFile as writeFile3 } from "fs/promises";
6993
+ import { relative as relative3, resolve as resolve5 } from "path";
5776
6994
  var isPathOutsideBase = (value) => {
5777
6995
  return value === ".." || value.startsWith("../") || value.startsWith("..\\");
5778
6996
  };
@@ -5785,7 +7003,7 @@ var runCiCommand = async (inputPath, authorIdentityMode, options, logger = creat
5785
7003
  if (options.baselineSha !== void 0 && options.baselineRef !== "auto") {
5786
7004
  throw new GovernanceConfigurationError("baseline-sha requires --baseline-ref auto");
5787
7005
  }
5788
- const resolvedTargetPath = resolve4(inputPath ?? process.cwd());
7006
+ const resolvedTargetPath = resolve5(inputPath ?? process.cwd());
5789
7007
  logger.info("building current snapshot");
5790
7008
  const current = await buildAnalysisSnapshot(
5791
7009
  inputPath,
@@ -5839,13 +7057,13 @@ var runCiCommand = async (inputPath, authorIdentityMode, options, logger = creat
5839
7057
  repositoryPath: resolvedTargetPath,
5840
7058
  baselineRef,
5841
7059
  analyzeWorktree: async (worktreePath, repositoryRoot) => {
5842
- const relativeTargetPath = relative2(repositoryRoot, resolvedTargetPath);
7060
+ const relativeTargetPath = relative3(repositoryRoot, resolvedTargetPath);
5843
7061
  if (isPathOutsideBase(relativeTargetPath)) {
5844
7062
  throw new GovernanceConfigurationError(
5845
7063
  `target path is outside git repository root: ${resolvedTargetPath}`
5846
7064
  );
5847
7065
  }
5848
- const baselineTargetPath = relativeTargetPath.length === 0 || relativeTargetPath === "." ? worktreePath : resolve4(worktreePath, relativeTargetPath);
7066
+ const baselineTargetPath = relativeTargetPath.length === 0 || relativeTargetPath === "." ? worktreePath : resolve5(worktreePath, relativeTargetPath);
5849
7067
  return buildAnalysisSnapshot(
5850
7068
  baselineTargetPath,
5851
7069
  authorIdentityMode,
@@ -5871,7 +7089,7 @@ var runCiCommand = async (inputPath, authorIdentityMode, options, logger = creat
5871
7089
  diff = compareSnapshots(current, baseline);
5872
7090
  } else if (options.baselinePath !== void 0) {
5873
7091
  logger.info(`loading baseline snapshot: ${options.baselinePath}`);
5874
- const baselineRaw = await readFile3(options.baselinePath, "utf8");
7092
+ const baselineRaw = await readFile4(options.baselinePath, "utf8");
5875
7093
  try {
5876
7094
  baseline = parseSnapshot(baselineRaw);
5877
7095
  } catch (error) {
@@ -5919,7 +7137,7 @@ ${ciMarkdown}`;
5919
7137
  };
5920
7138
 
5921
7139
  // src/application/run-report-command.ts
5922
- import { readFile as readFile4, writeFile as writeFile4 } from "fs/promises";
7140
+ import { readFile as readFile5, writeFile as writeFile4 } from "fs/promises";
5923
7141
  var runReportCommand = async (inputPath, authorIdentityMode, options, logger = createSilentLogger()) => {
5924
7142
  logger.info("building analysis snapshot");
5925
7143
  const current = await buildAnalysisSnapshot(
@@ -5941,7 +7159,7 @@ var runReportCommand = async (inputPath, authorIdentityMode, options, logger = c
5941
7159
  report = createReport(current);
5942
7160
  } else {
5943
7161
  logger.info(`loading baseline snapshot: ${options.comparePath}`);
5944
- const baselineRaw = await readFile4(options.comparePath, "utf8");
7162
+ const baselineRaw = await readFile5(options.comparePath, "utf8");
5945
7163
  const baseline = parseSnapshot(baselineRaw);
5946
7164
  const diff = compareSnapshots(current, baseline);
5947
7165
  report = createReport(current, diff);
@@ -5987,7 +7205,9 @@ var runExplainCommand = async (inputPath, authorIdentityMode, options, logger =
5987
7205
  const riskConfig = resolveRiskConfigForProfile(options.riskProfile);
5988
7206
  const evaluation = evaluateRepositoryRisk(
5989
7207
  {
5990
- ...analysisInputs,
7208
+ structural: analysisInputs.structural,
7209
+ evolution: analysisInputs.evolution,
7210
+ external: analysisInputs.external,
5991
7211
  ...riskConfig === void 0 ? {} : { config: riskConfig }
5992
7212
  },
5993
7213
  { explain: true }
@@ -5996,10 +7216,19 @@ var runExplainCommand = async (inputPath, authorIdentityMode, options, logger =
5996
7216
  throw new Error("risk trace unavailable");
5997
7217
  }
5998
7218
  const summary = {
5999
- ...analysisInputs,
6000
- risk: evaluation.summary
7219
+ structural: analysisInputs.structural,
7220
+ evolution: analysisInputs.evolution,
7221
+ external: analysisInputs.external,
7222
+ risk: evaluation.summary,
7223
+ quality: computeRepositoryQualitySummary({
7224
+ structural: analysisInputs.structural,
7225
+ evolution: analysisInputs.evolution,
7226
+ signals: analysisInputs.qualitySignals
7227
+ })
6001
7228
  };
6002
- logger.info(`explanation completed (riskScore=${summary.risk.riskScore})`);
7229
+ logger.info(
7230
+ `explanation completed (riskScore=${summary.risk.riskScore}, qualityScore=${summary.quality.qualityScore})`
7231
+ );
6003
7232
  return {
6004
7233
  summary,
6005
7234
  trace: evaluation.trace,
@@ -6009,7 +7238,7 @@ var runExplainCommand = async (inputPath, authorIdentityMode, options, logger =
6009
7238
 
6010
7239
  // src/index.ts
6011
7240
  var program = new Command();
6012
- var packageJsonPath = resolve5(dirname2(fileURLToPath(import.meta.url)), "../package.json");
7241
+ var packageJsonPath = resolve6(dirname2(fileURLToPath(import.meta.url)), "../package.json");
6013
7242
  var { version } = JSON.parse(readFileSync2(packageJsonPath, "utf8"));
6014
7243
  var parseRecentWindowDays = (value) => {
6015
7244
  const parsed = Number.parseInt(value, 10);
@@ -6055,6 +7284,7 @@ var renderReportHighlightsText = (report) => {
6055
7284
  lines.push("Repository Summary");
6056
7285
  lines.push(` target: ${report.repository.targetPath}`);
6057
7286
  lines.push(` riskScore: ${report.repository.riskScore}`);
7287
+ lines.push(` qualityScore: ${report.quality.qualityScore}`);
6058
7288
  lines.push(` normalizedScore: ${report.repository.normalizedScore}`);
6059
7289
  lines.push(` riskTier: ${report.repository.riskTier}`);
6060
7290
  lines.push("");
@@ -6070,6 +7300,7 @@ var renderReportHighlightsMarkdown = (report) => {
6070
7300
  lines.push("## Repository Summary");
6071
7301
  lines.push(`- target: \`${report.repository.targetPath}\``);
6072
7302
  lines.push(`- riskScore: \`${report.repository.riskScore}\``);
7303
+ lines.push(`- qualityScore: \`${report.quality.qualityScore}\``);
6073
7304
  lines.push(`- normalizedScore: \`${report.repository.normalizedScore}\``);
6074
7305
  lines.push(`- riskTier: \`${report.repository.riskTier}\``);
6075
7306
  lines.push("");
@@ -6087,6 +7318,7 @@ var renderCompactText = (report, explainSummary) => {
6087
7318
  lines.push("Repository");
6088
7319
  lines.push(` target: ${report.repository.targetPath}`);
6089
7320
  lines.push(` riskScore: ${report.repository.riskScore}`);
7321
+ lines.push(` qualityScore: ${report.quality.qualityScore}`);
6090
7322
  lines.push(` riskTier: ${report.repository.riskTier}`);
6091
7323
  lines.push(
6092
7324
  ` dimensions: structural=${report.repository.dimensionScores.structural ?? "n/a"}, evolution=${report.repository.dimensionScores.evolution ?? "n/a"}, external=${report.repository.dimensionScores.external ?? "n/a"}, interactions=${report.repository.dimensionScores.interactions ?? "n/a"}`
@@ -6112,6 +7344,7 @@ var renderCompactMarkdown = (report, explainSummary) => {
6112
7344
  lines.push("## Repository");
6113
7345
  lines.push(`- target: \`${report.repository.targetPath}\``);
6114
7346
  lines.push(`- riskScore: \`${report.repository.riskScore}\``);
7347
+ lines.push(`- qualityScore: \`${report.quality.qualityScore}\``);
6115
7348
  lines.push(`- riskTier: \`${report.repository.riskTier}\``);
6116
7349
  lines.push(
6117
7350
  `- dimensions: structural=\`${report.repository.dimensionScores.structural ?? "n/a"}\`, evolution=\`${report.repository.dimensionScores.evolution ?? "n/a"}\`, external=\`${report.repository.dimensionScores.external ?? "n/a"}\`, interactions=\`${report.repository.dimensionScores.interactions ?? "n/a"}\``
@@ -6281,7 +7514,7 @@ program.command("run").argument("[path]", "path to the project to analyze").addO
6281
7514
  "log verbosity: silent, error, warn, info, debug (logs are written to stderr)"
6282
7515
  ).choices(["silent", "error", "warn", "info", "debug"]).default(parseLogLevel(process.env["CODESENTINEL_LOG_LEVEL"]))
6283
7516
  ).addOption(
6284
- new Option("--format <mode>", "combined output format: text, md, json").choices(["text", "md", "json"]).default("text")
7517
+ new Option("--format <mode>", "combined output format: text, md, json").choices(["text", "md", "json"]).default("md")
6285
7518
  ).addOption(
6286
7519
  new Option("--detail <level>", "run detail level: compact (default), standard, full").choices(["compact", "standard", "full"]).default("compact")
6287
7520
  ).option("--file <path>", "explain a specific file target").option("--module <name>", "explain a specific module target").option("--top <count>", "number of top hotspots to explain when no target is selected", "5").option("--compare <baseline>", "compare against a baseline snapshot JSON file").option("--snapshot <path>", "write current snapshot JSON artifact").option("--no-trace", "disable trace embedding in generated snapshot").addOption(
@@ -6316,7 +7549,7 @@ program.command("run").argument("[path]", "path to the project to analyze").addO
6316
7549
  }
6317
7550
  const report = options.compare === void 0 ? createReport(snapshot) : createReport(
6318
7551
  snapshot,
6319
- compareSnapshots(snapshot, parseSnapshot(await readFile5(options.compare, "utf8")))
7552
+ compareSnapshots(snapshot, parseSnapshot(await readFile6(options.compare, "utf8")))
6320
7553
  );
6321
7554
  if (options.format === "json") {
6322
7555
  const analyzeSummaryOutput = formatAnalyzeOutput(explain.summary, "summary");
@@ -6360,6 +7593,7 @@ program.command("run").argument("[path]", "path to the project to analyze").addO
6360
7593
  },
6361
7594
  report: {
6362
7595
  repository: report.repository,
7596
+ quality: report.quality,
6363
7597
  hotspots: report.hotspots.slice(0, 5),
6364
7598
  structural: report.structural,
6365
7599
  external: report.external
@@ -6507,19 +7741,23 @@ var parseMainBranches = (options) => {
6507
7741
  return unique.length > 0 ? unique : void 0;
6508
7742
  };
6509
7743
  var buildGateConfigFromOptions = (options) => {
6510
- const maxRepoDelta = parseGateNumber(options.maxRepoDelta, "--max-repo-delta");
7744
+ const maxRiskDelta = parseGateNumber(options.maxRiskDelta, "--max-risk-delta");
7745
+ const maxQualityDelta = parseGateNumber(options.maxQualityDelta, "--max-quality-delta");
6511
7746
  const maxNewHotspots = parseGateNumber(options.maxNewHotspots, "--max-new-hotspots");
6512
- const maxRepoScore = parseGateNumber(options.maxRepoScore, "--max-repo-score");
7747
+ const maxRiskScore = parseGateNumber(options.maxRiskScore, "--max-risk-score");
7748
+ const minQualityScore = parseGateNumber(options.minQualityScore, "--min-quality-score");
6513
7749
  const newHotspotScoreThreshold = parseGateNumber(
6514
7750
  options.newHotspotScoreThreshold,
6515
7751
  "--new-hotspot-score-threshold"
6516
7752
  );
6517
7753
  return {
6518
- ...maxRepoDelta === void 0 ? {} : { maxRepoDelta },
7754
+ ...maxRiskDelta === void 0 ? {} : { maxRiskDelta },
7755
+ ...maxQualityDelta === void 0 ? {} : { maxQualityDelta },
6519
7756
  ...options.noNewCycles === true ? { noNewCycles: true } : {},
6520
7757
  ...options.noNewHighRiskDeps === true ? { noNewHighRiskDeps: true } : {},
6521
7758
  ...maxNewHotspots === void 0 ? {} : { maxNewHotspots },
6522
- ...maxRepoScore === void 0 ? {} : { maxRepoScore },
7759
+ ...maxRiskScore === void 0 ? {} : { maxRiskScore },
7760
+ ...minQualityScore === void 0 ? {} : { minQualityScore },
6523
7761
  ...newHotspotScoreThreshold === void 0 ? {} : { newHotspotScoreThreshold },
6524
7762
  failOn: options.failOn
6525
7763
  };
@@ -6534,7 +7772,10 @@ program.command("check").argument("[path]", "path to the project to analyze").ad
6534
7772
  "--log-level <level>",
6535
7773
  "log verbosity: silent, error, warn, info, debug (logs are written to stderr)"
6536
7774
  ).choices(["silent", "error", "warn", "info", "debug"]).default(parseLogLevel(process.env["CODESENTINEL_LOG_LEVEL"]))
6537
- ).option("--compare <baseline>", "baseline snapshot path").option("--max-repo-delta <value>", "maximum allowed normalized repository score increase").option("--no-new-cycles", "fail if new structural cycles are introduced").option("--no-new-high-risk-deps", "fail if new high-risk direct dependencies are introduced").option("--max-new-hotspots <count>", "maximum allowed number of new hotspots").option("--new-hotspot-score-threshold <score>", "minimum hotspot score to count as new hotspot").option("--max-repo-score <score>", "absolute repository score limit (0..100)").addOption(
7775
+ ).option("--compare <baseline>", "baseline snapshot path").option("--max-risk-delta <value>", "maximum allowed normalized risk score increase").option(
7776
+ "--max-quality-delta <value>",
7777
+ "maximum allowed normalized quality score regression versus baseline (requires --compare)"
7778
+ ).option("--no-new-cycles", "fail if new structural cycles are introduced").option("--no-new-high-risk-deps", "fail if new high-risk direct dependencies are introduced").option("--max-new-hotspots <count>", "maximum allowed number of new hotspots").option("--new-hotspot-score-threshold <score>", "minimum hotspot score to count as new hotspot").option("--max-risk-score <score>", "absolute risk score limit (0..100)").option("--min-quality-score <score>", "minimum quality score threshold (0..100)").addOption(
6538
7779
  new Option("--fail-on <level>", "failing severity threshold").choices(["error", "warn"]).default("error")
6539
7780
  ).addOption(
6540
7781
  new Option("--format <mode>", "output format: text, json, md").choices(["text", "json", "md"]).default("text")
@@ -6602,7 +7843,10 @@ program.command("ci").argument("[path]", "path to the project to analyze").addOp
6602
7843
  ).option(
6603
7844
  "--main-branches <names>",
6604
7845
  "comma-separated default branch candidates for auto baseline resolution (for example: main,master)"
6605
- ).option("--snapshot <path>", "write current snapshot JSON to path").option("--report <path>", "write markdown CI summary report").option("--json-output <path>", "write machine-readable CI JSON output").option("--max-repo-delta <value>", "maximum allowed normalized repository score increase").option("--no-new-cycles", "fail if new structural cycles are introduced").option("--no-new-high-risk-deps", "fail if new high-risk direct dependencies are introduced").option("--max-new-hotspots <count>", "maximum allowed number of new hotspots").option("--new-hotspot-score-threshold <score>", "minimum hotspot score to count as new hotspot").option("--max-repo-score <score>", "absolute repository score limit (0..100)").addOption(
7846
+ ).option("--snapshot <path>", "write current snapshot JSON to path").option("--report <path>", "write markdown CI summary report").option("--json-output <path>", "write machine-readable CI JSON output").option("--max-risk-delta <value>", "maximum allowed normalized risk score increase").option(
7847
+ "--max-quality-delta <value>",
7848
+ "maximum allowed normalized quality score regression versus baseline"
7849
+ ).option("--no-new-cycles", "fail if new structural cycles are introduced").option("--no-new-high-risk-deps", "fail if new high-risk direct dependencies are introduced").option("--max-new-hotspots <count>", "maximum allowed number of new hotspots").option("--new-hotspot-score-threshold <score>", "minimum hotspot score to count as new hotspot").option("--max-risk-score <score>", "absolute risk score limit (0..100)").option("--min-quality-score <score>", "minimum quality score threshold (0..100)").addOption(
6606
7850
  new Option("--fail-on <level>", "failing severity threshold").choices(["error", "warn"]).default("error")
6607
7851
  ).option("--no-trace", "disable trace embedding in generated snapshot").addOption(
6608
7852
  new Option(