@getcodesentinel/codesentinel 1.15.0 → 1.16.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1733,11 +1733,15 @@ var renderTextReport = (report) => {
1733
1733
  lines.push(` normalizedScore: ${report.quality.normalizedScore}`);
1734
1734
  lines.push(` modularity: ${report.quality.dimensions.modularity}`);
1735
1735
  lines.push(` changeHygiene: ${report.quality.dimensions.changeHygiene}`);
1736
+ lines.push(` staticAnalysis: ${report.quality.dimensions.staticAnalysis}`);
1737
+ lines.push(` complexity: ${report.quality.dimensions.complexity}`);
1738
+ lines.push(` duplication: ${report.quality.dimensions.duplication}`);
1736
1739
  lines.push(` testHealth: ${report.quality.dimensions.testHealth}`);
1737
1740
  lines.push(" topIssues:");
1738
1741
  for (const issue of report.quality.topIssues.slice(0, 5)) {
1742
+ const ruleSuffix = issue.ruleId === void 0 ? "" : ` [rule=${issue.ruleId}]`;
1739
1743
  lines.push(
1740
- ` - [${issue.severity}] (${issue.dimension}) ${issue.id} @ ${issue.target}: ${issue.message}`
1744
+ ` - [${issue.severity}] (${issue.dimension}) ${issue.id}${ruleSuffix} @ ${issue.target}: ${issue.message}`
1741
1745
  );
1742
1746
  }
1743
1747
  if (report.quality.topIssues.length === 0) {
@@ -1824,14 +1828,18 @@ var renderMarkdownReport = (report) => {
1824
1828
  lines.push(`- normalizedScore: \`${report.quality.normalizedScore}\``);
1825
1829
  lines.push(`- modularity: \`${report.quality.dimensions.modularity}\``);
1826
1830
  lines.push(`- changeHygiene: \`${report.quality.dimensions.changeHygiene}\``);
1831
+ lines.push(`- staticAnalysis: \`${report.quality.dimensions.staticAnalysis}\``);
1832
+ lines.push(`- complexity: \`${report.quality.dimensions.complexity}\``);
1833
+ lines.push(`- duplication: \`${report.quality.dimensions.duplication}\``);
1827
1834
  lines.push(`- testHealth: \`${report.quality.dimensions.testHealth}\``);
1828
1835
  if (report.quality.topIssues.length === 0) {
1829
1836
  lines.push("- top issues: none");
1830
1837
  } else {
1831
1838
  lines.push("- top issues:");
1832
1839
  for (const issue of report.quality.topIssues.slice(0, 5)) {
1840
+ const ruleSuffix = issue.ruleId === void 0 ? "" : ` [rule=${issue.ruleId}]`;
1833
1841
  lines.push(
1834
- ` - [${issue.severity}] \`${issue.id}\` (\`${issue.dimension}\`) @ \`${issue.target}\`: ${issue.message}`
1842
+ ` - [${issue.severity}] \`${issue.id}\`${ruleSuffix} (\`${issue.dimension}\`) @ \`${issue.target}\`: ${issue.message}`
1835
1843
  );
1836
1844
  }
1837
1845
  }
@@ -1981,14 +1989,20 @@ var requireDiff = (input, gateId) => {
1981
1989
  };
1982
1990
  var validateGateConfig = (input) => {
1983
1991
  const config = input.gateConfig;
1984
- if (config.maxRepoDelta !== void 0 && (!Number.isFinite(config.maxRepoDelta) || config.maxRepoDelta < 0)) {
1985
- throw new GovernanceConfigurationError("max-repo-delta must be a finite number >= 0");
1992
+ if (config.maxRiskDelta !== void 0 && (!Number.isFinite(config.maxRiskDelta) || config.maxRiskDelta < 0)) {
1993
+ throw new GovernanceConfigurationError("max-risk-delta must be a finite number >= 0");
1994
+ }
1995
+ if (config.maxQualityDelta !== void 0 && (!Number.isFinite(config.maxQualityDelta) || config.maxQualityDelta < 0)) {
1996
+ throw new GovernanceConfigurationError("max-quality-delta must be a finite number >= 0");
1986
1997
  }
1987
1998
  if (config.maxNewHotspots !== void 0 && (!Number.isInteger(config.maxNewHotspots) || config.maxNewHotspots < 0)) {
1988
1999
  throw new GovernanceConfigurationError("max-new-hotspots must be an integer >= 0");
1989
2000
  }
1990
- if (config.maxRepoScore !== void 0 && (!Number.isFinite(config.maxRepoScore) || config.maxRepoScore < 0 || config.maxRepoScore > 100)) {
1991
- throw new GovernanceConfigurationError("max-repo-score must be a number in [0, 100]");
2001
+ if (config.maxRiskScore !== void 0 && (!Number.isFinite(config.maxRiskScore) || config.maxRiskScore < 0 || config.maxRiskScore > 100)) {
2002
+ throw new GovernanceConfigurationError("max-risk-score must be a number in [0, 100]");
2003
+ }
2004
+ if (config.minQualityScore !== void 0 && (!Number.isFinite(config.minQualityScore) || config.minQualityScore < 0 || config.minQualityScore > 100)) {
2005
+ throw new GovernanceConfigurationError("min-quality-score must be a number in [0, 100]");
1992
2006
  }
1993
2007
  if (config.newHotspotScoreThreshold !== void 0 && (!Number.isFinite(config.newHotspotScoreThreshold) || config.newHotspotScoreThreshold < 0 || config.newHotspotScoreThreshold > 100)) {
1994
2008
  throw new GovernanceConfigurationError(
@@ -2001,41 +2015,76 @@ var evaluateGates = (input) => {
2001
2015
  const config = input.gateConfig;
2002
2016
  const violations = [];
2003
2017
  const evaluatedGates = [];
2004
- if (config.maxRepoScore !== void 0) {
2005
- evaluatedGates.push("max-repo-score");
2018
+ if (config.maxRiskScore !== void 0) {
2019
+ evaluatedGates.push("max-risk-score");
2006
2020
  const current = input.current.analysis.risk.riskScore;
2007
- if (current > config.maxRepoScore) {
2021
+ if (current > config.maxRiskScore) {
2008
2022
  violations.push(
2009
2023
  makeViolation(
2010
- "max-repo-score",
2024
+ "max-risk-score",
2011
2025
  "error",
2012
- `Repository score ${current} exceeds configured max ${config.maxRepoScore}.`,
2026
+ `Risk score ${current} exceeds configured max ${config.maxRiskScore}.`,
2013
2027
  [input.current.analysis.structural.targetPath],
2014
2028
  [{ kind: "repository_metric", metric: "riskScore" }]
2015
2029
  )
2016
2030
  );
2017
2031
  }
2018
2032
  }
2019
- if (config.maxRepoDelta !== void 0) {
2020
- evaluatedGates.push("max-repo-delta");
2021
- requireDiff(input, "max-repo-delta");
2033
+ if (config.minQualityScore !== void 0) {
2034
+ evaluatedGates.push("min-quality-score");
2035
+ const current = input.current.analysis.quality.qualityScore;
2036
+ if (current < config.minQualityScore) {
2037
+ violations.push(
2038
+ makeViolation(
2039
+ "min-quality-score",
2040
+ "error",
2041
+ `Quality score ${current} is below configured minimum ${config.minQualityScore}.`,
2042
+ [input.current.analysis.structural.targetPath],
2043
+ [{ kind: "repository_metric", metric: "qualityScore" }]
2044
+ )
2045
+ );
2046
+ }
2047
+ }
2048
+ if (config.maxRiskDelta !== void 0) {
2049
+ evaluatedGates.push("max-risk-delta");
2050
+ requireDiff(input, "max-risk-delta");
2022
2051
  const baseline = input.baseline;
2023
2052
  if (baseline === void 0) {
2024
- throw new GovernanceConfigurationError("max-repo-delta requires baseline snapshot");
2053
+ throw new GovernanceConfigurationError("max-risk-delta requires baseline snapshot");
2025
2054
  }
2026
2055
  const delta = input.current.analysis.risk.normalizedScore - baseline.analysis.risk.normalizedScore;
2027
- if (delta > config.maxRepoDelta) {
2056
+ if (delta > config.maxRiskDelta) {
2028
2057
  violations.push(
2029
2058
  makeViolation(
2030
- "max-repo-delta",
2059
+ "max-risk-delta",
2031
2060
  "error",
2032
- `Repository normalized score delta ${delta.toFixed(4)} exceeds allowed ${config.maxRepoDelta}.`,
2061
+ `Risk normalized score delta ${delta.toFixed(4)} exceeds allowed ${config.maxRiskDelta}.`,
2033
2062
  [input.current.analysis.structural.targetPath],
2034
2063
  [{ kind: "repository_metric", metric: "normalizedScore" }]
2035
2064
  )
2036
2065
  );
2037
2066
  }
2038
2067
  }
2068
+ if (config.maxQualityDelta !== void 0) {
2069
+ evaluatedGates.push("max-quality-delta");
2070
+ requireDiff(input, "max-quality-delta");
2071
+ const baseline = input.baseline;
2072
+ if (baseline === void 0) {
2073
+ throw new GovernanceConfigurationError("max-quality-delta requires baseline snapshot");
2074
+ }
2075
+ const delta = input.current.analysis.quality.normalizedScore - baseline.analysis.quality.normalizedScore;
2076
+ if (delta < -config.maxQualityDelta) {
2077
+ violations.push(
2078
+ makeViolation(
2079
+ "max-quality-delta",
2080
+ "error",
2081
+ `Quality normalized score delta ${delta.toFixed(4)} is below allowed minimum ${(-config.maxQualityDelta).toFixed(4)}.`,
2082
+ [input.current.analysis.structural.targetPath],
2083
+ [{ kind: "repository_metric", metric: "qualityNormalizedScore" }]
2084
+ )
2085
+ );
2086
+ }
2087
+ }
2039
2088
  if (config.noNewCycles === true) {
2040
2089
  evaluatedGates.push("no-new-cycles");
2041
2090
  requireDiff(input, "no-new-cycles");
@@ -2494,7 +2543,7 @@ var resolveAutoBaselineRef = async (input) => {
2494
2543
  // src/index.ts
2495
2544
  import { readFileSync as readFileSync2 } from "fs";
2496
2545
  import { readFile as readFile6, writeFile as writeFile5 } from "fs/promises";
2497
- import { dirname as dirname2, resolve as resolve5 } from "path";
2546
+ import { dirname as dirname2, resolve as resolve6 } from "path";
2498
2547
  import { fileURLToPath } from "url";
2499
2548
 
2500
2549
  // src/application/format-analyze-output.ts
@@ -2890,7 +2939,7 @@ import { mkdir, readFile, writeFile } from "fs/promises";
2890
2939
  import { homedir } from "os";
2891
2940
  import { dirname, join as join3 } from "path";
2892
2941
  import { stderr, stdin } from "process";
2893
- import { clearScreenDown, cursorTo, emitKeypressEvents, moveCursor } from "readline";
2942
+ import { clearScreenDown, cursorTo, emitKeypressEvents } from "readline";
2894
2943
  var UPDATE_CHECK_INTERVAL_MS = 24 * 60 * 60 * 1e3;
2895
2944
  var UPDATE_CACHE_PATH = join3(homedir(), ".cache", "codesentinel", "update-check.json");
2896
2945
  var SEMVER_PATTERN = /^(?<major>\d+)\.(?<minor>\d+)\.(?<patch>\d+)(?:-(?<prerelease>[0-9A-Za-z.-]+))?(?:\+[0-9A-Za-z.-]+)?$/;
@@ -3082,11 +3131,15 @@ var fetchLatestVersion = async (packageName) => {
3082
3131
  }
3083
3132
  return parseNpmViewVersionOutput(result.stdout);
3084
3133
  };
3085
- var renderUpdatePrompt = (latestVersion, currentVersion, selectedIndex) => {
3086
- const options = ["Install update now", "Not now (continue current command)"];
3134
+ var renderUpdatePrompt = (packageName, latestVersion, currentVersion, selectedIndex) => {
3135
+ const options = [
3136
+ `1. Update now (runs \`npm install -g ${packageName}\`)`,
3137
+ "2. Skip"
3138
+ ];
3087
3139
  const lines = [
3088
- `${ANSI.cyan}${ANSI.bold}CodeSentinel Update Available${ANSI.reset}`,
3089
- `${ANSI.dim}Current: ${currentVersion} Latest: ${latestVersion}${ANSI.reset}`,
3140
+ ` ${ANSI.bold}${ANSI.cyan}\u2728 Update available! ${currentVersion} -> ${latestVersion}${ANSI.reset}`,
3141
+ "",
3142
+ ` ${ANSI.dim}Release notes: https://github.com/getcodesentinel/codesentinel/releases/latest${ANSI.reset}`,
3090
3143
  "",
3091
3144
  ...options.map((option, index) => {
3092
3145
  const selected = index === selectedIndex;
@@ -3095,12 +3148,12 @@ var renderUpdatePrompt = (latestVersion, currentVersion, selectedIndex) => {
3095
3148
  return `${prefix} ${text}`;
3096
3149
  }),
3097
3150
  "",
3098
- `${ANSI.dim}Use \u2191/\u2193 to choose, Enter to confirm.${ANSI.reset}`
3151
+ ` ${ANSI.dim}Use \u2191/\u2193 to choose. Press enter to continue${ANSI.reset}`
3099
3152
  ];
3100
3153
  stderr.write(lines.join("\n"));
3101
3154
  return lines.length;
3102
3155
  };
3103
- var promptInstall = async (latestVersion, currentVersion) => {
3156
+ var promptInstall = async (packageName, latestVersion, currentVersion) => {
3104
3157
  if (!stdin.isTTY || !stderr.isTTY || typeof stdin.setRawMode !== "function") {
3105
3158
  stderr.write(
3106
3159
  `New version ${latestVersion} is available (current ${currentVersion}). Run: npm install -g @getcodesentinel/codesentinel@latest
@@ -3108,21 +3161,17 @@ var promptInstall = async (latestVersion, currentVersion) => {
3108
3161
  );
3109
3162
  return "skip";
3110
3163
  }
3111
- return await new Promise((resolve6) => {
3164
+ return await new Promise((resolve7) => {
3112
3165
  emitKeypressEvents(stdin);
3113
3166
  let selectedIndex = 0;
3114
- let renderedLines = 0;
3115
3167
  const previousRawMode = stdin.isRaw;
3116
3168
  const clearPromptArea = () => {
3117
- if (renderedLines > 0) {
3118
- moveCursor(stderr, 0, -(renderedLines - 1));
3119
- }
3120
- cursorTo(stderr, 0);
3169
+ cursorTo(stderr, 0, 0);
3121
3170
  clearScreenDown(stderr);
3122
3171
  };
3123
3172
  const redraw = () => {
3124
3173
  clearPromptArea();
3125
- renderedLines = renderUpdatePrompt(latestVersion, currentVersion, selectedIndex);
3174
+ renderUpdatePrompt(packageName, latestVersion, currentVersion, selectedIndex);
3126
3175
  };
3127
3176
  const cleanup = (choice) => {
3128
3177
  stdin.off("keypress", onKeypress);
@@ -3134,10 +3183,10 @@ var promptInstall = async (latestVersion, currentVersion) => {
3134
3183
  if (choice === "install") {
3135
3184
  stderr.write(`${ANSI.yellow}Installing latest CodeSentinel...${ANSI.reset}
3136
3185
  `);
3137
- } else if (renderedLines > 0) {
3186
+ } else {
3138
3187
  stderr.write("\n");
3139
3188
  }
3140
- resolve6(choice);
3189
+ resolve7(choice);
3141
3190
  };
3142
3191
  const onKeypress = (_str, key) => {
3143
3192
  if (key.ctrl === true && key.name === "c") {
@@ -3193,7 +3242,7 @@ var checkForCliUpdates = async (input) => {
3193
3242
  if (comparison === null || comparison <= 0) {
3194
3243
  return;
3195
3244
  }
3196
- const choice = await promptInstall(latestVersion, input.currentVersion);
3245
+ const choice = await promptInstall(input.packageName, latestVersion, input.currentVersion);
3197
3246
  if (choice === "interrupt") {
3198
3247
  process.exit(130);
3199
3248
  }
@@ -3216,8 +3265,7 @@ var checkForCliUpdates = async (input) => {
3216
3265
  };
3217
3266
 
3218
3267
  // src/application/run-analyze-command.ts
3219
- import { readFile as readFile2 } from "fs/promises";
3220
- import { join as join4, resolve as resolve3 } from "path";
3268
+ import { resolve as resolve4 } from "path";
3221
3269
 
3222
3270
  // ../code-graph/dist/index.js
3223
3271
  import { extname, isAbsolute, relative, resolve as resolve2 } from "path";
@@ -4265,6 +4313,538 @@ var analyzeRepositoryEvolutionFromGit = (input, onProgress) => {
4265
4313
  return analyzeRepositoryEvolution(input, historyProvider, onProgress);
4266
4314
  };
4267
4315
 
4316
+ // ../quality-signals/dist/index.js
4317
+ import { readFile as readFile2 } from "fs/promises";
4318
+ import { existsSync as existsSync2 } from "fs";
4319
+ import { join as join4, relative as relative2, resolve as resolve3 } from "path";
4320
+ import { ESLint } from "eslint";
4321
+ import * as ts2 from "typescript";
4322
+ import * as ts3 from "typescript";
4323
+ var markerRegex = /\b(?:TODO|FIXME)\b/gi;
4324
+ var countMarkers = (text) => text.match(markerRegex)?.length ?? 0;
4325
+ var countTodoFixmeInComments = (content) => {
4326
+ const scanner = ts3.createScanner(
4327
+ ts3.ScriptTarget.Latest,
4328
+ false,
4329
+ ts3.LanguageVariant.Standard,
4330
+ content
4331
+ );
4332
+ let total = 0;
4333
+ let token = scanner.scan();
4334
+ while (token !== ts3.SyntaxKind.EndOfFileToken) {
4335
+ if (token === ts3.SyntaxKind.SingleLineCommentTrivia || token === ts3.SyntaxKind.MultiLineCommentTrivia) {
4336
+ total += countMarkers(scanner.getTokenText());
4337
+ }
4338
+ token = scanner.scan();
4339
+ }
4340
+ return total;
4341
+ };
4342
+ var SOURCE_EXTENSIONS2 = /* @__PURE__ */ new Set([".ts", ".tsx", ".mts", ".cts", ".js", ".jsx", ".mjs", ".cjs"]);
4343
+ var normalizePath2 = (value) => value.replaceAll("\\", "/");
4344
+ var isTestPath = (path) => {
4345
+ const normalized = normalizePath2(path);
4346
+ return normalized.includes("/__tests__/") || normalized.includes("\\__tests__\\") || normalized.includes(".test.") || normalized.includes(".spec.");
4347
+ };
4348
+ var collectTodoFixmeCommentCount = async (targetPath, structural) => {
4349
+ const filePaths2 = [...structural.files].map((file) => file.relativePath).sort((a, b) => a.localeCompare(b));
4350
+ let total = 0;
4351
+ for (const relativePath of filePaths2) {
4352
+ try {
4353
+ const content = await readFile2(join4(targetPath, relativePath), "utf8");
4354
+ total += countTodoFixmeInComments(content);
4355
+ } catch {
4356
+ }
4357
+ }
4358
+ return total;
4359
+ };
4360
+ var collectEslintSignals = async (targetPath, structural, logger) => {
4361
+ const absoluteFiles = structural.files.map((file) => join4(targetPath, file.relativePath));
4362
+ if (absoluteFiles.length === 0) {
4363
+ return {
4364
+ errorCount: 0,
4365
+ warningCount: 0,
4366
+ filesWithIssues: 0,
4367
+ ruleCounts: []
4368
+ };
4369
+ }
4370
+ try {
4371
+ const eslint = new ESLint({ cwd: targetPath, errorOnUnmatchedPattern: false });
4372
+ const results = await eslint.lintFiles(absoluteFiles);
4373
+ let errorCount = 0;
4374
+ let warningCount = 0;
4375
+ let filesWithIssues = 0;
4376
+ const ruleCounts = /* @__PURE__ */ new Map();
4377
+ for (const result of results) {
4378
+ if (result.errorCount + result.warningCount > 0) {
4379
+ filesWithIssues += 1;
4380
+ }
4381
+ errorCount += result.errorCount;
4382
+ warningCount += result.warningCount;
4383
+ for (const message of result.messages) {
4384
+ if (message.ruleId === null) {
4385
+ continue;
4386
+ }
4387
+ const severity = message.severity >= 2 ? "error" : "warn";
4388
+ const current = ruleCounts.get(message.ruleId);
4389
+ if (current === void 0) {
4390
+ ruleCounts.set(message.ruleId, {
4391
+ ruleId: message.ruleId,
4392
+ severity,
4393
+ count: 1
4394
+ });
4395
+ } else {
4396
+ ruleCounts.set(message.ruleId, {
4397
+ ruleId: current.ruleId,
4398
+ severity: current.severity === "error" || severity === "error" ? "error" : "warn",
4399
+ count: current.count + 1
4400
+ });
4401
+ }
4402
+ }
4403
+ }
4404
+ return {
4405
+ errorCount,
4406
+ warningCount,
4407
+ filesWithIssues,
4408
+ ruleCounts: [...ruleCounts.values()].sort(
4409
+ (a, b) => b.count - a.count || a.ruleId.localeCompare(b.ruleId)
4410
+ )
4411
+ };
4412
+ } catch (error) {
4413
+ logger.warn(
4414
+ `quality signals: eslint collection unavailable (${error instanceof Error ? error.message : "unknown error"})`
4415
+ );
4416
+ return void 0;
4417
+ }
4418
+ };
4419
+ var collectTypeScriptSignals = (targetPath, logger) => {
4420
+ const tsconfigPath = ts2.findConfigFile(targetPath, ts2.sys.fileExists, "tsconfig.json");
4421
+ if (tsconfigPath === void 0) {
4422
+ return void 0;
4423
+ }
4424
+ try {
4425
+ const parsed = ts2.getParsedCommandLineOfConfigFile(
4426
+ tsconfigPath,
4427
+ {},
4428
+ {
4429
+ ...ts2.sys,
4430
+ onUnRecoverableConfigFileDiagnostic: () => {
4431
+ throw new Error(`failed to parse ${tsconfigPath}`);
4432
+ }
4433
+ }
4434
+ );
4435
+ if (parsed === void 0) {
4436
+ return void 0;
4437
+ }
4438
+ const program2 = ts2.createProgram({ rootNames: parsed.fileNames, options: parsed.options });
4439
+ const diagnostics = [
4440
+ ...program2.getOptionsDiagnostics(),
4441
+ ...program2.getGlobalDiagnostics(),
4442
+ ...program2.getSyntacticDiagnostics(),
4443
+ ...program2.getSemanticDiagnostics()
4444
+ ];
4445
+ let errorCount = 0;
4446
+ let warningCount = 0;
4447
+ const fileSet = /* @__PURE__ */ new Set();
4448
+ for (const diagnostic of diagnostics) {
4449
+ if (diagnostic.category === ts2.DiagnosticCategory.Error) {
4450
+ errorCount += 1;
4451
+ } else if (diagnostic.category === ts2.DiagnosticCategory.Warning) {
4452
+ warningCount += 1;
4453
+ }
4454
+ if (diagnostic.file !== void 0) {
4455
+ const path = normalizePath2(relative2(targetPath, diagnostic.file.fileName));
4456
+ fileSet.add(path);
4457
+ }
4458
+ }
4459
+ return {
4460
+ errorCount,
4461
+ warningCount,
4462
+ filesWithDiagnostics: fileSet.size
4463
+ };
4464
+ } catch (error) {
4465
+ logger.warn(
4466
+ `quality signals: typescript diagnostic collection unavailable (${error instanceof Error ? error.message : "unknown error"})`
4467
+ );
4468
+ return void 0;
4469
+ }
4470
+ };
4471
+ var cyclomaticIncrement = (node) => {
4472
+ if (ts2.isIfStatement(node) || ts2.isForStatement(node) || ts2.isForInStatement(node) || ts2.isForOfStatement(node) || ts2.isWhileStatement(node) || ts2.isDoStatement(node) || ts2.isCatchClause(node) || ts2.isConditionalExpression(node)) {
4473
+ return 1;
4474
+ }
4475
+ if (ts2.isCaseClause(node)) {
4476
+ return 1;
4477
+ }
4478
+ if (ts2.isBinaryExpression(node)) {
4479
+ if (node.operatorToken.kind === ts2.SyntaxKind.AmpersandAmpersandToken || node.operatorToken.kind === ts2.SyntaxKind.BarBarToken || node.operatorToken.kind === ts2.SyntaxKind.QuestionQuestionToken) {
4480
+ return 1;
4481
+ }
4482
+ }
4483
+ return 0;
4484
+ };
4485
+ var computeCyclomaticComplexity = (node) => {
4486
+ let complexity = 1;
4487
+ const visit = (current) => {
4488
+ complexity += cyclomaticIncrement(current);
4489
+ if (current !== node && (ts2.isFunctionLike(current) || ts2.isArrowFunction(current) || ts2.isMethodDeclaration(current) || ts2.isConstructorDeclaration(current))) {
4490
+ return;
4491
+ }
4492
+ ts2.forEachChild(current, visit);
4493
+ };
4494
+ visit(node);
4495
+ return complexity;
4496
+ };
4497
+ var collectFunctionComplexities = (content, fileName) => {
4498
+ const sourceFile = ts2.createSourceFile(fileName, content, ts2.ScriptTarget.Latest, true);
4499
+ const complexities = [];
4500
+ const visit = (node) => {
4501
+ if (ts2.isFunctionDeclaration(node) || ts2.isMethodDeclaration(node) || ts2.isFunctionExpression(node) || ts2.isArrowFunction(node) || ts2.isConstructorDeclaration(node) || ts2.isGetAccessorDeclaration(node) || ts2.isSetAccessorDeclaration(node)) {
4502
+ complexities.push(computeCyclomaticComplexity(node));
4503
+ }
4504
+ ts2.forEachChild(node, visit);
4505
+ };
4506
+ visit(sourceFile);
4507
+ if (complexities.length === 0) {
4508
+ return [computeCyclomaticComplexity(sourceFile)];
4509
+ }
4510
+ return complexities;
4511
+ };
4512
+ var collectComplexitySignals = async (targetPath, structural) => {
4513
+ const complexities = [];
4514
+ for (const file of structural.files) {
4515
+ const extension = file.relativePath.slice(file.relativePath.lastIndexOf("."));
4516
+ if (!SOURCE_EXTENSIONS2.has(extension)) {
4517
+ continue;
4518
+ }
4519
+ try {
4520
+ const content = await readFile2(join4(targetPath, file.relativePath), "utf8");
4521
+ complexities.push(...collectFunctionComplexities(content, file.relativePath));
4522
+ } catch {
4523
+ }
4524
+ }
4525
+ if (complexities.length === 0) {
4526
+ return void 0;
4527
+ }
4528
+ const averageCyclomatic = complexities.reduce((sum, value) => sum + value, 0) / complexities.length;
4529
+ const maxCyclomatic = Math.max(...complexities);
4530
+ const highComplexityFileCount = complexities.filter((value) => value >= 15).length;
4531
+ return {
4532
+ averageCyclomatic,
4533
+ maxCyclomatic,
4534
+ highComplexityFileCount,
4535
+ analyzedFileCount: complexities.length
4536
+ };
4537
+ };
4538
+ var DUPLICATION_MIN_BLOCK_TOKENS = 40;
4539
+ var DUPLICATION_KGRAM_TOKENS = 25;
4540
+ var DUPLICATION_WINDOW_SIZE = 4;
4541
+ var DUPLICATION_MAX_FILES = 5e3;
4542
+ var DUPLICATION_MAX_TOKENS_PER_FILE = 12e3;
4543
+ var DUPLICATION_MAX_FINGERPRINTS_PER_FILE = 1200;
4544
+ var DUPLICATION_EXACT_MAX_WINDOWS = 25e4;
4545
+ var HASH_BASE = 16777619;
4546
+ var hashString32 = (value) => {
4547
+ let hash = 2166136261;
4548
+ for (let index = 0; index < value.length; index += 1) {
4549
+ hash ^= value.charCodeAt(index);
4550
+ hash = Math.imul(hash, 16777619) >>> 0;
4551
+ }
4552
+ return hash >>> 0;
4553
+ };
4554
+ var computeRollingBasePower = (kgramSize) => {
4555
+ let value = 1;
4556
+ for (let index = 1; index < kgramSize; index += 1) {
4557
+ value = Math.imul(value, HASH_BASE) >>> 0;
4558
+ }
4559
+ return value;
4560
+ };
4561
+ var tokenizeForDuplication = (content, filePath) => {
4562
+ const languageVariant = filePath.endsWith(".tsx") || filePath.endsWith(".jsx") ? ts2.LanguageVariant.JSX : ts2.LanguageVariant.Standard;
4563
+ const scanner = ts2.createScanner(ts2.ScriptTarget.Latest, true, languageVariant, content);
4564
+ const tokens = [];
4565
+ let token = scanner.scan();
4566
+ while (token !== ts2.SyntaxKind.EndOfFileToken) {
4567
+ if (token !== ts2.SyntaxKind.WhitespaceTrivia && token !== ts2.SyntaxKind.NewLineTrivia && token !== ts2.SyntaxKind.SingleLineCommentTrivia && token !== ts2.SyntaxKind.MultiLineCommentTrivia) {
4568
+ if (token === ts2.SyntaxKind.Identifier || token === ts2.SyntaxKind.PrivateIdentifier) {
4569
+ tokens.push("id");
4570
+ } else if (token === ts2.SyntaxKind.StringLiteral || token === ts2.SyntaxKind.NoSubstitutionTemplateLiteral || token === ts2.SyntaxKind.TemplateHead || token === ts2.SyntaxKind.TemplateMiddle || token === ts2.SyntaxKind.TemplateTail || token === ts2.SyntaxKind.NumericLiteral || token === ts2.SyntaxKind.BigIntLiteral || token === ts2.SyntaxKind.RegularExpressionLiteral) {
4571
+ tokens.push("lit");
4572
+ } else {
4573
+ const stable = ts2.tokenToString(token) ?? ts2.SyntaxKind[token] ?? `${token}`;
4574
+ tokens.push(stable);
4575
+ }
4576
+ }
4577
+ token = scanner.scan();
4578
+ }
4579
+ return tokens;
4580
+ };
4581
+ var buildKgramHashes = (tokenValues, kgramSize) => {
4582
+ if (tokenValues.length < kgramSize) {
4583
+ return [];
4584
+ }
4585
+ const fingerprints = [];
4586
+ const removePower = computeRollingBasePower(kgramSize);
4587
+ let hash = 0;
4588
+ for (let index = 0; index < kgramSize; index += 1) {
4589
+ hash = Math.imul(hash, HASH_BASE) + (tokenValues[index] ?? 0) >>> 0;
4590
+ }
4591
+ fingerprints.push({ hash, start: 0 });
4592
+ for (let start = 1; start <= tokenValues.length - kgramSize; start += 1) {
4593
+ const removed = tokenValues[start - 1] ?? 0;
4594
+ const added = tokenValues[start + kgramSize - 1] ?? 0;
4595
+ const removedContribution = Math.imul(removed, removePower) >>> 0;
4596
+ const shifted = Math.imul(hash - removedContribution >>> 0, HASH_BASE) >>> 0;
4597
+ hash = shifted + added >>> 0;
4598
+ fingerprints.push({ hash, start });
4599
+ }
4600
+ return fingerprints;
4601
+ };
4602
+ var winnowFingerprints = (kgrams, windowSize) => {
4603
+ if (kgrams.length === 0) {
4604
+ return [];
4605
+ }
4606
+ if (kgrams.length <= windowSize) {
4607
+ const minimum = [...kgrams].sort(
4608
+ (left, right) => left.hash - right.hash || right.start - left.start
4609
+ )[0];
4610
+ return minimum === void 0 ? [] : [minimum];
4611
+ }
4612
+ const selected = /* @__PURE__ */ new Map();
4613
+ for (let start = 0; start <= kgrams.length - windowSize; start += 1) {
4614
+ let best = kgrams[start];
4615
+ if (best === void 0) {
4616
+ continue;
4617
+ }
4618
+ for (let offset = 1; offset < windowSize; offset += 1) {
4619
+ const candidate = kgrams[start + offset];
4620
+ if (candidate === void 0) {
4621
+ continue;
4622
+ }
4623
+ if (candidate.hash < best.hash || candidate.hash === best.hash && candidate.start > best.start) {
4624
+ best = candidate;
4625
+ }
4626
+ }
4627
+ selected.set(`${best.hash}:${best.start}`, best);
4628
+ }
4629
+ return [...selected.values()].sort((left, right) => left.start - right.start);
4630
+ };
4631
+ var capFingerprints = (fingerprints, maxFingerprints) => {
4632
+ if (fingerprints.length <= maxFingerprints) {
4633
+ return fingerprints;
4634
+ }
4635
+ const step = fingerprints.length / maxFingerprints;
4636
+ const capped = [];
4637
+ for (let index = 0; index < maxFingerprints; index += 1) {
4638
+ const selected = fingerprints[Math.floor(index * step)];
4639
+ if (selected !== void 0) {
4640
+ capped.push(selected);
4641
+ }
4642
+ }
4643
+ return capped;
4644
+ };
4645
+ var tokenBlockSignature = (tokens, start, blockLength) => {
4646
+ if (start < 0 || start + blockLength > tokens.length) {
4647
+ return void 0;
4648
+ }
4649
+ return tokens.slice(start, start + blockLength).join(" ");
4650
+ };
4651
+ var mergeTokenRanges = (ranges) => {
4652
+ if (ranges.length === 0) {
4653
+ return [];
4654
+ }
4655
+ const sorted = [...ranges].sort(
4656
+ (left, right) => left.start - right.start || left.end - right.end
4657
+ );
4658
+ const merged = [];
4659
+ for (const range of sorted) {
4660
+ const previous = merged[merged.length - 1];
4661
+ if (previous === void 0 || range.start > previous.end) {
4662
+ merged.push({ ...range });
4663
+ continue;
4664
+ }
4665
+ previous.end = Math.max(previous.end, range.end);
4666
+ }
4667
+ return merged;
4668
+ };
4669
+ var aggregateDuplicationFromSignatures = (signatures, fileByPath) => {
4670
+ let duplicatedBlockCount = 0;
4671
+ const duplicatedRanges = /* @__PURE__ */ new Map();
4672
+ for (const entries of signatures.values()) {
4673
+ if (entries.length <= 1) {
4674
+ continue;
4675
+ }
4676
+ const uniqueEntries = /* @__PURE__ */ new Map();
4677
+ for (const entry of entries) {
4678
+ uniqueEntries.set(`${entry.file}:${entry.start}`, entry);
4679
+ }
4680
+ if (uniqueEntries.size <= 1) {
4681
+ continue;
4682
+ }
4683
+ duplicatedBlockCount += uniqueEntries.size - 1;
4684
+ for (const entry of uniqueEntries.values()) {
4685
+ const source = fileByPath.get(entry.file);
4686
+ if (source === void 0) {
4687
+ continue;
4688
+ }
4689
+ const signature = tokenBlockSignature(
4690
+ source.tokens,
4691
+ entry.start,
4692
+ DUPLICATION_MIN_BLOCK_TOKENS
4693
+ );
4694
+ if (signature === void 0) {
4695
+ continue;
4696
+ }
4697
+ const ranges = duplicatedRanges.get(entry.file) ?? [];
4698
+ ranges.push({
4699
+ start: entry.start,
4700
+ end: Math.min(source.tokens.length, entry.start + DUPLICATION_MIN_BLOCK_TOKENS)
4701
+ });
4702
+ duplicatedRanges.set(entry.file, ranges);
4703
+ }
4704
+ }
4705
+ let duplicatedTokenCount = 0;
4706
+ for (const ranges of duplicatedRanges.values()) {
4707
+ const mergedRanges = mergeTokenRanges(ranges);
4708
+ duplicatedTokenCount += mergedRanges.reduce((sum, range) => sum + (range.end - range.start), 0);
4709
+ }
4710
+ return {
4711
+ duplicatedBlockCount,
4712
+ duplicatedTokenCount,
4713
+ filesWithDuplication: duplicatedRanges.size
4714
+ };
4715
+ };
4716
+ var collectExactTokenDuplication = (analyzedFiles) => {
4717
+ const signatures = /* @__PURE__ */ new Map();
4718
+ for (const file of analyzedFiles) {
4719
+ const tokenValues = file.tokens.map((token) => hashString32(token));
4720
+ const windows = buildKgramHashes(tokenValues, DUPLICATION_MIN_BLOCK_TOKENS);
4721
+ for (const window of windows) {
4722
+ const signature = tokenBlockSignature(
4723
+ file.tokens,
4724
+ window.start,
4725
+ DUPLICATION_MIN_BLOCK_TOKENS
4726
+ );
4727
+ if (signature === void 0) {
4728
+ continue;
4729
+ }
4730
+ const entries = signatures.get(signature) ?? [];
4731
+ entries.push({ file: file.file, start: window.start });
4732
+ signatures.set(signature, entries);
4733
+ }
4734
+ }
4735
+ const fileByPath = new Map(analyzedFiles.map((file) => [file.file, file]));
4736
+ return aggregateDuplicationFromSignatures(signatures, fileByPath);
4737
+ };
4738
+ var collectWinnowingDuplication = (analyzedFiles) => {
4739
+ const signatures = /* @__PURE__ */ new Map();
4740
+ for (const file of analyzedFiles) {
4741
+ const tokenValues = file.tokens.map((token) => hashString32(token));
4742
+ const kgrams = buildKgramHashes(tokenValues, DUPLICATION_KGRAM_TOKENS);
4743
+ const fingerprints = capFingerprints(
4744
+ winnowFingerprints(kgrams, DUPLICATION_WINDOW_SIZE),
4745
+ DUPLICATION_MAX_FINGERPRINTS_PER_FILE
4746
+ );
4747
+ for (const fingerprint of fingerprints) {
4748
+ const signature = tokenBlockSignature(
4749
+ file.tokens,
4750
+ fingerprint.start,
4751
+ DUPLICATION_MIN_BLOCK_TOKENS
4752
+ );
4753
+ if (signature === void 0) {
4754
+ continue;
4755
+ }
4756
+ const entries = signatures.get(signature) ?? [];
4757
+ entries.push({ file: file.file, start: fingerprint.start });
4758
+ signatures.set(signature, entries);
4759
+ }
4760
+ }
4761
+ const fileByPath = new Map(analyzedFiles.map((file) => [file.file, file]));
4762
+ return aggregateDuplicationFromSignatures(signatures, fileByPath);
4763
+ };
4764
+ var collectDuplicationSignals = async (targetPath, structural) => {
4765
+ const files = [...structural.files].map((file) => file.relativePath).sort((left, right) => left.localeCompare(right)).filter((filePath) => SOURCE_EXTENSIONS2.has(filePath.slice(filePath.lastIndexOf(".")))).filter((filePath) => isTestPath(filePath) === false).slice(0, DUPLICATION_MAX_FILES);
4766
+ const analyzedFiles = [];
4767
+ let significantTokenCount = 0;
4768
+ let exactWindowCount = 0;
4769
+ for (const relativePath of files) {
4770
+ try {
4771
+ const content = await readFile2(join4(targetPath, relativePath), "utf8");
4772
+ const tokens = tokenizeForDuplication(content, relativePath).slice(
4773
+ 0,
4774
+ DUPLICATION_MAX_TOKENS_PER_FILE
4775
+ );
4776
+ significantTokenCount += tokens.length;
4777
+ if (tokens.length < DUPLICATION_MIN_BLOCK_TOKENS) {
4778
+ continue;
4779
+ }
4780
+ exactWindowCount += tokens.length - DUPLICATION_MIN_BLOCK_TOKENS + 1;
4781
+ analyzedFiles.push({
4782
+ file: relativePath,
4783
+ tokens
4784
+ });
4785
+ } catch {
4786
+ }
4787
+ }
4788
+ if (analyzedFiles.length === 0) {
4789
+ return void 0;
4790
+ }
4791
+ const mode = exactWindowCount <= DUPLICATION_EXACT_MAX_WINDOWS ? "exact-token" : "winnowing";
4792
+ const aggregated = mode === "exact-token" ? collectExactTokenDuplication(analyzedFiles) : collectWinnowingDuplication(analyzedFiles);
4793
+ const duplicatedLineRatio = significantTokenCount === 0 ? 0 : Math.min(1, aggregated.duplicatedTokenCount / significantTokenCount);
4794
+ return {
4795
+ mode,
4796
+ duplicatedLineRatio,
4797
+ duplicatedBlockCount: aggregated.duplicatedBlockCount,
4798
+ filesWithDuplication: aggregated.filesWithDuplication
4799
+ };
4800
+ };
4801
+ var toRatio = (value) => {
4802
+ if (typeof value !== "number" || Number.isFinite(value) === false) {
4803
+ return null;
4804
+ }
4805
+ return Math.min(1, Math.max(0, value / 100));
4806
+ };
4807
+ var collectCoverageSignals = async (targetPath, logger) => {
4808
+ const configuredPath = process.env["CODESENTINEL_QUALITY_COVERAGE_SUMMARY"];
4809
+ const summaryPath = configuredPath === void 0 || configuredPath.trim().length === 0 ? join4(targetPath, "coverage", "coverage-summary.json") : resolve3(targetPath, configuredPath);
4810
+ if (!existsSync2(summaryPath)) {
4811
+ return void 0;
4812
+ }
4813
+ try {
4814
+ const raw = await readFile2(summaryPath, "utf8");
4815
+ const parsed = JSON.parse(raw);
4816
+ return {
4817
+ lineCoverage: toRatio(parsed.total?.lines?.pct),
4818
+ branchCoverage: toRatio(parsed.total?.branches?.pct),
4819
+ functionCoverage: toRatio(parsed.total?.functions?.pct),
4820
+ statementCoverage: toRatio(parsed.total?.statements?.pct)
4821
+ };
4822
+ } catch (error) {
4823
+ logger.warn(
4824
+ `quality signals: coverage summary parse failed at ${summaryPath} (${error instanceof Error ? error.message : "unknown error"})`
4825
+ );
4826
+ return void 0;
4827
+ }
4828
+ };
4829
+ var collectQualitySignals = async (targetPath, structural, logger) => {
4830
+ const [todoFixmeCommentCount, eslint, complexity, duplication, coverage] = await Promise.all([
4831
+ collectTodoFixmeCommentCount(targetPath, structural),
4832
+ collectEslintSignals(targetPath, structural, logger),
4833
+ collectComplexitySignals(targetPath, structural),
4834
+ collectDuplicationSignals(targetPath, structural),
4835
+ collectCoverageSignals(targetPath, logger)
4836
+ ]);
4837
+ const typescript = collectTypeScriptSignals(targetPath, logger);
4838
+ return {
4839
+ todoFixmeCommentCount,
4840
+ ...eslint === void 0 ? {} : { eslint },
4841
+ ...typescript === void 0 ? {} : { typescript },
4842
+ ...complexity === void 0 ? {} : { complexity },
4843
+ ...duplication === void 0 ? {} : { duplication },
4844
+ ...coverage === void 0 ? {} : { coverage }
4845
+ };
4846
+ };
4847
+
4268
4848
  // ../quality-engine/dist/index.js
4269
4849
  var clamp01 = (value) => {
4270
4850
  if (!Number.isFinite(value)) {
@@ -4305,14 +4885,38 @@ var concentration = (rawValues) => {
4305
4885
  return clamp01(normalized);
4306
4886
  };
4307
4887
  var DIMENSION_WEIGHTS = {
4308
- modularity: 0.45,
4309
- changeHygiene: 0.35,
4310
- testHealth: 0.2
4311
- };
4312
- var TODO_FIXME_MAX_IMPACT = 0.08;
4888
+ modularity: 0.2,
4889
+ changeHygiene: 0.2,
4890
+ staticAnalysis: 0.2,
4891
+ complexity: 0.15,
4892
+ duplication: 0.1,
4893
+ testHealth: 0.15
4894
+ };
4895
+ var QUALITY_TRACE_VERSION = "1";
4313
4896
  var toPercentage = (normalizedQuality) => round45(clamp01(normalizedQuality) * 100);
4897
+ var logScaled = (value, scale) => {
4898
+ if (scale <= 0) {
4899
+ return 0;
4900
+ }
4901
+ return clamp01(Math.log1p(Math.max(0, value)) / Math.log1p(scale));
4902
+ };
4903
+ var toFactorTrace = (spec) => ({
4904
+ factorId: spec.factorId,
4905
+ contribution: round45(spec.penalty * spec.weight * 100),
4906
+ penalty: round45(spec.penalty),
4907
+ rawMetrics: spec.rawMetrics,
4908
+ normalizedMetrics: spec.normalizedMetrics,
4909
+ weight: round45(spec.weight),
4910
+ evidence: spec.evidence
4911
+ });
4912
+ var createDimensionTrace = (dimension, quality, factors) => ({
4913
+ dimension,
4914
+ normalizedScore: round45(clamp01(quality)),
4915
+ score: toPercentage(quality),
4916
+ factors: factors.map((factor) => toFactorTrace(factor))
4917
+ });
4314
4918
  var filePaths = (structural) => structural.files.map((file) => file.relativePath);
4315
- var isTestPath = (path) => {
4919
+ var isTestPath2 = (path) => {
4316
4920
  const normalized = path.toLowerCase();
4317
4921
  return normalized.includes("/__tests__/") || normalized.includes("\\__tests__\\") || normalized.includes(".test.") || normalized.includes(".spec.");
4318
4922
  };
@@ -4320,7 +4924,7 @@ var isSourcePath = (path) => {
4320
4924
  if (path.endsWith(".d.ts")) {
4321
4925
  return false;
4322
4926
  }
4323
- return !isTestPath(path);
4927
+ return !isTestPath2(path);
4324
4928
  };
4325
4929
  var pushIssue = (issues, issue) => {
4326
4930
  issues.push({
@@ -4331,22 +4935,24 @@ var pushIssue = (issues, issue) => {
4331
4935
  var computeRepositoryQualitySummary = (input) => {
4332
4936
  const issues = [];
4333
4937
  const sourceFileSet = new Set(input.structural.files.map((file) => file.relativePath));
4938
+ const signals = input.signals;
4334
4939
  const cycleCount = input.structural.metrics.cycleCount;
4335
4940
  const cycleSizeAverage = input.structural.cycles.length === 0 ? 0 : average(input.structural.cycles.map((cycle) => cycle.nodes.length));
4336
4941
  const cyclePenalty = clamp01(cycleCount / 6) * 0.7 + clamp01((cycleSizeAverage - 2) / 8) * 0.3;
4942
+ const fanInConcentration = concentration(input.structural.files.map((file) => file.fanIn));
4943
+ const fanOutConcentration = concentration(input.structural.files.map((file) => file.fanOut));
4944
+ const centralityConcentration = average([fanInConcentration, fanOutConcentration]);
4337
4945
  if (cycleCount > 0) {
4338
4946
  pushIssue(issues, {
4339
4947
  id: "quality.modularity.structural_cycles",
4948
+ ruleId: "graph.structural_cycles",
4340
4949
  dimension: "modularity",
4341
4950
  target: input.structural.cycles[0]?.nodes.slice().sort((a, b) => a.localeCompare(b)).join(" -> ") ?? input.structural.targetPath,
4342
4951
  message: `${cycleCount} structural cycle(s) increase coupling and refactor cost.`,
4343
4952
  severity: cycleCount >= 3 ? "error" : "warn",
4344
- impact: round45(cyclePenalty * 0.6)
4953
+ impact: round45(cyclePenalty * 0.55)
4345
4954
  });
4346
4955
  }
4347
- const fanInConcentration = concentration(input.structural.files.map((file) => file.fanIn));
4348
- const fanOutConcentration = concentration(input.structural.files.map((file) => file.fanOut));
4349
- const centralityConcentration = average([fanInConcentration, fanOutConcentration]);
4350
4956
  if (centralityConcentration >= 0.5) {
4351
4957
  const hottest = [...input.structural.files].map((file) => ({
4352
4958
  path: file.relativePath,
@@ -4354,12 +4960,44 @@ var computeRepositoryQualitySummary = (input) => {
4354
4960
  })).sort((a, b) => b.pressure - a.pressure || a.path.localeCompare(b.path))[0];
4355
4961
  pushIssue(issues, {
4356
4962
  id: "quality.modularity.centrality_concentration",
4963
+ ruleId: "graph.centrality_concentration",
4357
4964
  dimension: "modularity",
4358
4965
  target: hottest?.path ?? input.structural.targetPath,
4359
4966
  message: "Fan-in/fan-out pressure is concentrated in a small set of files.",
4360
- impact: round45(centralityConcentration * 0.5)
4967
+ impact: round45(centralityConcentration * 0.45)
4361
4968
  });
4362
4969
  }
4970
+ const modularityFactors = [
4971
+ {
4972
+ factorId: "quality.modularity.structural_cycles",
4973
+ penalty: cyclePenalty,
4974
+ rawMetrics: {
4975
+ cycleCount,
4976
+ averageCycleSize: round45(cycleSizeAverage)
4977
+ },
4978
+ normalizedMetrics: {
4979
+ cyclePenalty: round45(cyclePenalty)
4980
+ },
4981
+ weight: 0.55,
4982
+ evidence: [{ kind: "repository_metric", metric: "structural.cycles" }]
4983
+ },
4984
+ {
4985
+ factorId: "quality.modularity.centrality_concentration",
4986
+ penalty: centralityConcentration,
4987
+ rawMetrics: {
4988
+ fanInConcentration: round45(fanInConcentration),
4989
+ fanOutConcentration: round45(fanOutConcentration)
4990
+ },
4991
+ normalizedMetrics: {
4992
+ centralityConcentration: round45(centralityConcentration)
4993
+ },
4994
+ weight: 0.45,
4995
+ evidence: [{ kind: "repository_metric", metric: "structural.files.fanIn/fanOut" }]
4996
+ }
4997
+ ];
4998
+ const modularityPenalty = clamp01(
4999
+ modularityFactors.reduce((sum, factor) => sum + factor.penalty * factor.weight, 0)
5000
+ );
4363
5001
  let churnConcentration = 0;
4364
5002
  let volatilityConcentration = 0;
4365
5003
  let couplingDensity = 0;
@@ -4385,10 +5023,11 @@ var computeRepositoryQualitySummary = (input) => {
4385
5023
  )[0];
4386
5024
  pushIssue(issues, {
4387
5025
  id: "quality.change_hygiene.churn_concentration",
5026
+ ruleId: "git.churn_concentration",
4388
5027
  dimension: "changeHygiene",
4389
5028
  target: mostChurn?.filePath ?? input.structural.targetPath,
4390
5029
  message: "Churn is concentrated in a narrow part of the codebase.",
4391
- impact: round45(churnConcentration * 0.45)
5030
+ impact: round45(churnConcentration * 0.4)
4392
5031
  });
4393
5032
  }
4394
5033
  if (volatilityConcentration >= 0.45) {
@@ -4397,10 +5036,11 @@ var computeRepositoryQualitySummary = (input) => {
4397
5036
  )[0];
4398
5037
  pushIssue(issues, {
4399
5038
  id: "quality.change_hygiene.volatility_concentration",
5039
+ ruleId: "git.volatility_concentration",
4400
5040
  dimension: "changeHygiene",
4401
5041
  target: volatileFile?.filePath ?? input.structural.targetPath,
4402
5042
  message: "Recent volatility is concentrated in files that change frequently.",
4403
- impact: round45(volatilityConcentration * 0.4)
5043
+ impact: round45(volatilityConcentration * 0.3)
4404
5044
  });
4405
5045
  }
4406
5046
  if (couplingDensity >= 0.35 || couplingIntensity >= 0.45) {
@@ -4409,61 +5049,369 @@ var computeRepositoryQualitySummary = (input) => {
4409
5049
  )[0];
4410
5050
  pushIssue(issues, {
4411
5051
  id: "quality.change_hygiene.coupling_density",
5052
+ ruleId: "git.coupling_density",
4412
5053
  dimension: "changeHygiene",
4413
5054
  target: strongestPair === void 0 ? input.structural.targetPath : `${strongestPair.fileA}<->${strongestPair.fileB}`,
4414
5055
  message: "Co-change relationships are dense, increasing coordination overhead.",
4415
- impact: round45(average([couplingDensity, couplingIntensity]) * 0.35)
5056
+ impact: round45(average([couplingDensity, couplingIntensity]) * 0.3)
4416
5057
  });
4417
5058
  }
4418
5059
  }
4419
- const modularityPenalty = clamp01(cyclePenalty * 0.55 + centralityConcentration * 0.45);
4420
- const changeHygienePenalty = input.evolution.available ? clamp01(
4421
- churnConcentration * 0.4 + volatilityConcentration * 0.35 + couplingDensity * 0.15 + couplingIntensity * 0.1
4422
- ) : 0.25;
5060
+ const todoFixmeCommentCount = Math.max(0, signals?.todoFixmeCommentCount ?? 0);
5061
+ const todoFixmePenalty = logScaled(todoFixmeCommentCount, 80) * 0.08;
5062
+ if (todoFixmeCommentCount > 0) {
5063
+ pushIssue(issues, {
5064
+ id: "quality.change_hygiene.todo_fixme_load",
5065
+ ruleId: "comments.todo_fixme",
5066
+ dimension: "changeHygiene",
5067
+ target: input.structural.targetPath,
5068
+ message: `Found ${todoFixmeCommentCount} TODO/FIXME comment marker(s); cleanup debt is accumulating.`,
5069
+ impact: round45(todoFixmePenalty * 0.4)
5070
+ });
5071
+ }
5072
+ const changeHygieneFactors = [
5073
+ {
5074
+ factorId: "quality.change_hygiene.churn_concentration",
5075
+ penalty: churnConcentration,
5076
+ rawMetrics: {
5077
+ churnConcentration: round45(churnConcentration)
5078
+ },
5079
+ normalizedMetrics: {
5080
+ churnConcentration: round45(churnConcentration)
5081
+ },
5082
+ weight: 0.35,
5083
+ evidence: [{ kind: "repository_metric", metric: "evolution.churn" }]
5084
+ },
5085
+ {
5086
+ factorId: "quality.change_hygiene.volatility_concentration",
5087
+ penalty: volatilityConcentration,
5088
+ rawMetrics: {
5089
+ volatilityConcentration: round45(volatilityConcentration)
5090
+ },
5091
+ normalizedMetrics: {
5092
+ volatilityConcentration: round45(volatilityConcentration)
5093
+ },
5094
+ weight: 0.25,
5095
+ evidence: [{ kind: "repository_metric", metric: "evolution.recentVolatility" }]
5096
+ },
5097
+ {
5098
+ factorId: "quality.change_hygiene.coupling_density",
5099
+ penalty: average([couplingDensity, couplingIntensity]),
5100
+ rawMetrics: {
5101
+ couplingDensity: round45(couplingDensity),
5102
+ couplingIntensity: round45(couplingIntensity)
5103
+ },
5104
+ normalizedMetrics: {
5105
+ couplingPressure: round45(average([couplingDensity, couplingIntensity]))
5106
+ },
5107
+ weight: 0.3,
5108
+ evidence: [{ kind: "repository_metric", metric: "evolution.coupling" }]
5109
+ },
5110
+ {
5111
+ factorId: "quality.change_hygiene.todo_fixme_load",
5112
+ penalty: todoFixmePenalty,
5113
+ rawMetrics: {
5114
+ todoFixmeCommentCount
5115
+ },
5116
+ normalizedMetrics: {
5117
+ todoFixmePenalty: round45(todoFixmePenalty)
5118
+ },
5119
+ weight: 0.1,
5120
+ evidence: [{ kind: "repository_metric", metric: "comments.todo_fixme" }]
5121
+ }
5122
+ ];
5123
+ const changeHygienePenalty = input.evolution.available ? clamp01(changeHygieneFactors.reduce((sum, factor) => sum + factor.penalty * factor.weight, 0)) : 0.2;
5124
+ const eslint = signals?.eslint;
5125
+ const tsc = signals?.typescript;
5126
+ const sourceCount = Math.max(1, input.structural.files.length);
5127
+ const eslintErrorRate = (eslint?.errorCount ?? 0) / sourceCount;
5128
+ const eslintWarnRate = (eslint?.warningCount ?? 0) / sourceCount;
5129
+ const tsErrorRate = (tsc?.errorCount ?? 0) / sourceCount;
5130
+ const tsWarnRate = (tsc?.warningCount ?? 0) / sourceCount;
5131
+ const staticAnalysisFactors = [
5132
+ {
5133
+ factorId: "quality.static_analysis.eslint_errors",
5134
+ penalty: clamp01(eslintErrorRate / 0.5),
5135
+ rawMetrics: {
5136
+ eslintErrorCount: eslint?.errorCount ?? 0,
5137
+ eslintFilesWithIssues: eslint?.filesWithIssues ?? 0
5138
+ },
5139
+ normalizedMetrics: {
5140
+ eslintErrorRate: round45(eslintErrorRate)
5141
+ },
5142
+ weight: 0.5,
5143
+ evidence: [{ kind: "repository_metric", metric: "eslint.errorCount" }]
5144
+ },
5145
+ {
5146
+ factorId: "quality.static_analysis.eslint_warnings",
5147
+ penalty: clamp01(eslintWarnRate / 1.2),
5148
+ rawMetrics: {
5149
+ eslintWarningCount: eslint?.warningCount ?? 0
5150
+ },
5151
+ normalizedMetrics: {
5152
+ eslintWarningRate: round45(eslintWarnRate)
5153
+ },
5154
+ weight: 0.2,
5155
+ evidence: [{ kind: "repository_metric", metric: "eslint.warningCount" }]
5156
+ },
5157
+ {
5158
+ factorId: "quality.static_analysis.typescript_errors",
5159
+ penalty: clamp01(tsErrorRate / 0.35),
5160
+ rawMetrics: {
5161
+ typeScriptErrorCount: tsc?.errorCount ?? 0,
5162
+ typeScriptFilesWithDiagnostics: tsc?.filesWithDiagnostics ?? 0
5163
+ },
5164
+ normalizedMetrics: {
5165
+ typeScriptErrorRate: round45(tsErrorRate)
5166
+ },
5167
+ weight: 0.2,
5168
+ evidence: [{ kind: "repository_metric", metric: "typescript.errorCount" }]
5169
+ },
5170
+ {
5171
+ factorId: "quality.static_analysis.typescript_warnings",
5172
+ penalty: clamp01(tsWarnRate / 0.9),
5173
+ rawMetrics: {
5174
+ typeScriptWarningCount: tsc?.warningCount ?? 0
5175
+ },
5176
+ normalizedMetrics: {
5177
+ typeScriptWarningRate: round45(tsWarnRate)
5178
+ },
5179
+ weight: 0.1,
5180
+ evidence: [{ kind: "repository_metric", metric: "typescript.warningCount" }]
5181
+ }
5182
+ ];
5183
+ const staticAnalysisPenalty = clamp01(
5184
+ staticAnalysisFactors.reduce((sum, factor) => sum + factor.penalty * factor.weight, 0)
5185
+ );
5186
+ if ((eslint?.errorCount ?? 0) > 0) {
5187
+ const topRule = [...eslint?.ruleCounts ?? []].sort(
5188
+ (a, b) => b.count - a.count || a.ruleId.localeCompare(b.ruleId)
5189
+ )[0];
5190
+ pushIssue(issues, {
5191
+ id: "quality.static_analysis.eslint_errors",
5192
+ ruleId: topRule?.ruleId ?? "eslint",
5193
+ dimension: "staticAnalysis",
5194
+ target: input.structural.targetPath,
5195
+ message: topRule === void 0 ? `ESLint reported ${eslint?.errorCount ?? 0} error(s).` : `ESLint reported ${eslint?.errorCount ?? 0} error(s); top rule ${topRule.ruleId} (${topRule.count}).`,
5196
+ severity: "error",
5197
+ impact: round45(staticAnalysisPenalty * 0.5)
5198
+ });
5199
+ }
5200
+ if ((tsc?.errorCount ?? 0) > 0) {
5201
+ pushIssue(issues, {
5202
+ id: "quality.static_analysis.typescript_errors",
5203
+ ruleId: "typescript",
5204
+ dimension: "staticAnalysis",
5205
+ target: input.structural.targetPath,
5206
+ message: `TypeScript reported ${tsc?.errorCount ?? 0} error diagnostic(s).`,
5207
+ severity: "error",
5208
+ impact: round45(staticAnalysisPenalty * 0.4)
5209
+ });
5210
+ }
5211
+ const complexity = signals?.complexity;
5212
+ const avgComplexity = complexity?.averageCyclomatic ?? 0;
5213
+ const maxComplexity = complexity?.maxCyclomatic ?? 0;
5214
+ const highComplexityRatio = (complexity?.analyzedFileCount ?? 0) === 0 ? 0 : (complexity?.highComplexityFileCount ?? 0) / Math.max(1, complexity?.analyzedFileCount ?? 1);
5215
+ const complexityFactors = [
5216
+ {
5217
+ factorId: "quality.complexity.average_cyclomatic",
5218
+ penalty: clamp01(avgComplexity / 16),
5219
+ rawMetrics: {
5220
+ averageCyclomatic: round45(avgComplexity)
5221
+ },
5222
+ normalizedMetrics: {
5223
+ averageCyclomaticPenalty: round45(clamp01(avgComplexity / 16))
5224
+ },
5225
+ weight: 0.4,
5226
+ evidence: [{ kind: "repository_metric", metric: "complexity.averageCyclomatic" }]
5227
+ },
5228
+ {
5229
+ factorId: "quality.complexity.max_cyclomatic",
5230
+ penalty: clamp01(maxComplexity / 35),
5231
+ rawMetrics: {
5232
+ maxCyclomatic: round45(maxComplexity)
5233
+ },
5234
+ normalizedMetrics: {
5235
+ maxCyclomaticPenalty: round45(clamp01(maxComplexity / 35))
5236
+ },
5237
+ weight: 0.35,
5238
+ evidence: [{ kind: "repository_metric", metric: "complexity.maxCyclomatic" }]
5239
+ },
5240
+ {
5241
+ factorId: "quality.complexity.high_complexity_ratio",
5242
+ penalty: clamp01(highComplexityRatio / 0.35),
5243
+ rawMetrics: {
5244
+ highComplexityFileCount: complexity?.highComplexityFileCount ?? 0,
5245
+ analyzedFileCount: complexity?.analyzedFileCount ?? 0
5246
+ },
5247
+ normalizedMetrics: {
5248
+ highComplexityRatio: round45(highComplexityRatio)
5249
+ },
5250
+ weight: 0.25,
5251
+ evidence: [{ kind: "repository_metric", metric: "complexity.highComplexityFileCount" }]
5252
+ }
5253
+ ];
5254
+ const complexityPenalty = clamp01(
5255
+ complexityFactors.reduce((sum, factor) => sum + factor.penalty * factor.weight, 0)
5256
+ );
5257
+ if (maxComplexity >= 20 || highComplexityRatio >= 0.2) {
5258
+ pushIssue(issues, {
5259
+ id: "quality.complexity.high_cyclomatic",
5260
+ ruleId: "complexity.cyclomatic",
5261
+ dimension: "complexity",
5262
+ target: input.structural.targetPath,
5263
+ message: `Complexity is elevated (avg=${round45(avgComplexity)}, max=${round45(maxComplexity)}).`,
5264
+ impact: round45(complexityPenalty * 0.6)
5265
+ });
5266
+ }
5267
+ const duplication = signals?.duplication;
5268
+ const duplicatedLineRatio = duplication?.duplicatedLineRatio ?? 0;
5269
+ const duplicatedBlockCount = duplication?.duplicatedBlockCount ?? 0;
5270
+ const duplicationFactors = [
5271
+ {
5272
+ factorId: "quality.duplication.line_ratio",
5273
+ penalty: clamp01(duplicatedLineRatio / 0.25),
5274
+ rawMetrics: {
5275
+ duplicatedLineRatio: round45(duplicatedLineRatio)
5276
+ },
5277
+ normalizedMetrics: {
5278
+ duplicatedLineRatioPenalty: round45(clamp01(duplicatedLineRatio / 0.25))
5279
+ },
5280
+ weight: 0.7,
5281
+ evidence: [{ kind: "repository_metric", metric: "duplication.duplicatedLineRatio" }]
5282
+ },
5283
+ {
5284
+ factorId: "quality.duplication.block_count",
5285
+ penalty: logScaled(duplicatedBlockCount, 120),
5286
+ rawMetrics: {
5287
+ duplicatedBlockCount,
5288
+ filesWithDuplication: duplication?.filesWithDuplication ?? 0
5289
+ },
5290
+ normalizedMetrics: {
5291
+ duplicatedBlockPenalty: round45(logScaled(duplicatedBlockCount, 120))
5292
+ },
5293
+ weight: 0.3,
5294
+ evidence: [{ kind: "repository_metric", metric: "duplication.duplicatedBlockCount" }]
5295
+ }
5296
+ ];
5297
+ const duplicationPenalty = clamp01(
5298
+ duplicationFactors.reduce((sum, factor) => sum + factor.penalty * factor.weight, 0)
5299
+ );
5300
+ if (duplicatedLineRatio >= 0.08) {
5301
+ pushIssue(issues, {
5302
+ id: "quality.duplication.high_duplication",
5303
+ ruleId: "duplication.line_ratio",
5304
+ dimension: "duplication",
5305
+ target: input.structural.targetPath,
5306
+ message: `Duplication ratio is high (${toPercentage(duplicatedLineRatio)}%).`,
5307
+ impact: round45(duplicationPenalty * 0.6)
5308
+ });
5309
+ }
4423
5310
  const paths = filePaths(input.structural);
4424
- const testFiles = paths.filter((path) => isTestPath(path)).length;
5311
+ const testFiles = paths.filter((path) => isTestPath2(path)).length;
4425
5312
  const sourceFiles = paths.filter((path) => isSourcePath(path)).length;
4426
5313
  const testRatio = sourceFiles <= 0 ? 1 : testFiles / sourceFiles;
4427
- const testPresencePenalty = sourceFiles <= 0 ? 0 : 1 - clamp01(testRatio / 0.3);
5314
+ const testPresencePenalty = sourceFiles <= 0 ? 0 : 1 - clamp01(testRatio / 0.35);
5315
+ const coverageSignals = signals?.coverage;
5316
+ const coverageValues = [
5317
+ coverageSignals?.lineCoverage,
5318
+ coverageSignals?.branchCoverage,
5319
+ coverageSignals?.functionCoverage,
5320
+ coverageSignals?.statementCoverage
5321
+ ].filter((value) => value !== null && value !== void 0);
5322
+ const coverageRatio = coverageValues.length === 0 ? null : average(coverageValues);
5323
+ const coveragePenalty = coverageRatio === null ? 0.2 : 1 - clamp01(coverageRatio / 0.8);
5324
+ const testHealthFactors = [
5325
+ {
5326
+ factorId: "quality.test_health.test_presence",
5327
+ penalty: testPresencePenalty,
5328
+ rawMetrics: {
5329
+ sourceFiles,
5330
+ testFiles,
5331
+ testRatio: round45(testRatio)
5332
+ },
5333
+ normalizedMetrics: {
5334
+ testPresencePenalty: round45(testPresencePenalty)
5335
+ },
5336
+ weight: 0.55,
5337
+ evidence: [{ kind: "repository_metric", metric: "tests.file_ratio" }]
5338
+ },
5339
+ {
5340
+ factorId: "quality.test_health.coverage",
5341
+ penalty: coveragePenalty,
5342
+ rawMetrics: {
5343
+ lineCoverage: coverageSignals?.lineCoverage ?? null,
5344
+ branchCoverage: coverageSignals?.branchCoverage ?? null,
5345
+ functionCoverage: coverageSignals?.functionCoverage ?? null,
5346
+ statementCoverage: coverageSignals?.statementCoverage ?? null
5347
+ },
5348
+ normalizedMetrics: {
5349
+ coverageRatio: coverageRatio === null ? null : round45(coverageRatio),
5350
+ coveragePenalty: round45(coveragePenalty)
5351
+ },
5352
+ weight: 0.45,
5353
+ evidence: [{ kind: "repository_metric", metric: "coverage.summary" }]
5354
+ }
5355
+ ];
5356
+ const testHealthPenalty = clamp01(
5357
+ testHealthFactors.reduce((sum, factor) => sum + factor.penalty * factor.weight, 0)
5358
+ );
4428
5359
  if (sourceFiles > 0 && testRatio < 0.2) {
4429
5360
  pushIssue(issues, {
4430
5361
  id: "quality.test_health.low_test_presence",
5362
+ ruleId: "tests.file_ratio",
4431
5363
  dimension: "testHealth",
4432
5364
  target: input.structural.targetPath,
4433
5365
  message: `Detected ${testFiles} test file(s) for ${sourceFiles} source file(s).`,
4434
5366
  severity: testRatio === 0 ? "error" : "warn",
4435
- impact: round45(testPresencePenalty * 0.5)
5367
+ impact: round45(testHealthPenalty * 0.4)
4436
5368
  });
4437
5369
  }
4438
- const todoFixmeCount = Math.max(0, input.todoFixmeCount ?? 0);
4439
- const todoFixmePenalty = clamp01(todoFixmeCount / 120) * TODO_FIXME_MAX_IMPACT;
4440
- if (todoFixmeCount > 0) {
5370
+ if (coverageRatio !== null && coverageRatio < 0.6) {
4441
5371
  pushIssue(issues, {
4442
- id: "quality.change_hygiene.todo_fixme_load",
4443
- dimension: "changeHygiene",
5372
+ id: "quality.test_health.low_coverage",
5373
+ ruleId: "coverage.threshold",
5374
+ dimension: "testHealth",
4444
5375
  target: input.structural.targetPath,
4445
- message: `Found ${todoFixmeCount} TODO/FIXME marker(s); cleanup debt is accumulating.`,
4446
- impact: round45(todoFixmePenalty * 0.2)
5376
+ message: `Coverage is below threshold (${toPercentage(coverageRatio)}%).`,
5377
+ impact: round45(testHealthPenalty * 0.35)
4447
5378
  });
4448
5379
  }
4449
5380
  const modularityQuality = clamp01(1 - modularityPenalty);
4450
- const changeHygieneQuality = clamp01(1 - clamp01(changeHygienePenalty + todoFixmePenalty));
4451
- const testHealthQuality = clamp01(1 - testPresencePenalty);
5381
+ const changeHygieneQuality = clamp01(1 - changeHygienePenalty);
5382
+ const staticAnalysisQuality = clamp01(1 - staticAnalysisPenalty);
5383
+ const complexityQuality = clamp01(1 - complexityPenalty);
5384
+ const duplicationQuality = clamp01(1 - duplicationPenalty);
5385
+ const testHealthQuality = clamp01(1 - testHealthPenalty);
4452
5386
  const normalizedScore = clamp01(
4453
- modularityQuality * DIMENSION_WEIGHTS.modularity + changeHygieneQuality * DIMENSION_WEIGHTS.changeHygiene + testHealthQuality * DIMENSION_WEIGHTS.testHealth
5387
+ modularityQuality * DIMENSION_WEIGHTS.modularity + changeHygieneQuality * DIMENSION_WEIGHTS.changeHygiene + staticAnalysisQuality * DIMENSION_WEIGHTS.staticAnalysis + complexityQuality * DIMENSION_WEIGHTS.complexity + duplicationQuality * DIMENSION_WEIGHTS.duplication + testHealthQuality * DIMENSION_WEIGHTS.testHealth
4454
5388
  );
4455
5389
  const topIssues = [...issues].sort(
4456
5390
  (a, b) => b.impact - a.impact || a.id.localeCompare(b.id) || a.target.localeCompare(b.target)
4457
- ).slice(0, 8).map(({ impact: _impact, ...issue }) => issue);
5391
+ ).slice(0, 12).map(({ impact: _impact, ...issue }) => issue);
4458
5392
  return {
4459
5393
  qualityScore: toPercentage(normalizedScore),
4460
5394
  normalizedScore: round45(normalizedScore),
4461
5395
  dimensions: {
4462
5396
  modularity: toPercentage(modularityQuality),
4463
5397
  changeHygiene: toPercentage(changeHygieneQuality),
5398
+ staticAnalysis: toPercentage(staticAnalysisQuality),
5399
+ complexity: toPercentage(complexityQuality),
5400
+ duplication: toPercentage(duplicationQuality),
4464
5401
  testHealth: toPercentage(testHealthQuality)
4465
5402
  },
4466
- topIssues
5403
+ topIssues,
5404
+ trace: {
5405
+ schemaVersion: QUALITY_TRACE_VERSION,
5406
+ dimensions: [
5407
+ createDimensionTrace("modularity", modularityQuality, modularityFactors),
5408
+ createDimensionTrace("changeHygiene", changeHygieneQuality, changeHygieneFactors),
5409
+ createDimensionTrace("staticAnalysis", staticAnalysisQuality, staticAnalysisFactors),
5410
+ createDimensionTrace("complexity", complexityQuality, complexityFactors),
5411
+ createDimensionTrace("duplication", duplicationQuality, duplicationFactors),
5412
+ createDimensionTrace("testHealth", testHealthQuality, testHealthFactors)
5413
+ ]
5414
+ }
4467
5415
  };
4468
5416
  };
4469
5417
 
@@ -4640,7 +5588,7 @@ var normalizeWithScale = (value, scale) => {
4640
5588
  }
4641
5589
  return toUnitInterval((value - scale.lower) / (scale.upper - scale.lower));
4642
5590
  };
4643
- var normalizePath2 = (path) => path.replaceAll("\\", "/");
5591
+ var normalizePath3 = (path) => path.replaceAll("\\", "/");
4644
5592
  var computeAggregatorAttenuation = (input) => {
4645
5593
  const { fanIn, fanOut, inCycle, evolutionMetrics, config } = input;
4646
5594
  if (!config.enabled || inCycle > 0) {
@@ -4715,7 +5663,7 @@ var computeEvolutionHistoryConfidence = (structural, evolution, evolutionByFile)
4715
5663
  }
4716
5664
  let coveredFiles = 0;
4717
5665
  for (const file of structural.files) {
4718
- if (evolutionByFile.has(normalizePath2(file.id))) {
5666
+ if (evolutionByFile.has(normalizePath3(file.id))) {
4719
5667
  coveredFiles += 1;
4720
5668
  }
4721
5669
  }
@@ -4909,7 +5857,7 @@ var mapEvolutionByFile = (evolution) => {
4909
5857
  return /* @__PURE__ */ new Map();
4910
5858
  }
4911
5859
  return new Map(
4912
- evolution.files.map((fileMetrics) => [normalizePath2(fileMetrics.filePath), fileMetrics])
5860
+ evolution.files.map((fileMetrics) => [normalizePath3(fileMetrics.filePath), fileMetrics])
4913
5861
  );
4914
5862
  };
4915
5863
  var computeEvolutionScales = (evolutionByFile, config) => {
@@ -4933,7 +5881,7 @@ var computeEvolutionScales = (evolutionByFile, config) => {
4933
5881
  };
4934
5882
  };
4935
5883
  var inferModuleName = (filePath, config) => {
4936
- const normalized = normalizePath2(filePath);
5884
+ const normalized = normalizePath3(filePath);
4937
5885
  const parts = normalized.split("/").filter((part) => part.length > 0);
4938
5886
  if (parts.length <= 1) {
4939
5887
  return config.module.rootLabel;
@@ -4954,7 +5902,7 @@ var buildFragileClusters = (structural, evolution, fileScoresByFile, config) =>
4954
5902
  const clusters = [];
4955
5903
  let cycleClusterCount = 0;
4956
5904
  for (const cycle of structural.cycles) {
4957
- const files = [...new Set(cycle.nodes.map((node) => normalizePath2(node)))].filter(
5905
+ const files = [...new Set(cycle.nodes.map((node) => normalizePath3(node)))].filter(
4958
5906
  (filePath) => fileScoresByFile.has(filePath)
4959
5907
  );
4960
5908
  if (files.length < 2) {
@@ -4986,8 +5934,8 @@ var buildFragileClusters = (structural, evolution, fileScoresByFile, config) =>
4986
5934
  )
4987
5935
  );
4988
5936
  const selectedPairs = candidates.filter((pair) => pair.couplingScore >= threshold).map((pair) => ({
4989
- fileA: normalizePath2(pair.fileA),
4990
- fileB: normalizePath2(pair.fileB),
5937
+ fileA: normalizePath3(pair.fileA),
5938
+ fileB: normalizePath3(pair.fileB),
4991
5939
  couplingScore: pair.couplingScore
4992
5940
  })).filter(
4993
5941
  (pair) => pair.fileA !== pair.fileB && fileScoresByFile.has(pair.fileA) && fileScoresByFile.has(pair.fileB)
@@ -5065,7 +6013,7 @@ var computeRiskSummary = (structural, evolution, external, config, traceCollecto
5065
6013
  );
5066
6014
  const evolutionScales = computeEvolutionScales(evolutionByFile, config);
5067
6015
  const cycleFileSet = new Set(
5068
- structural.cycles.flatMap((cycle) => cycle.nodes.map((node) => normalizePath2(node)))
6016
+ structural.cycles.flatMap((cycle) => cycle.nodes.map((node) => normalizePath3(node)))
5069
6017
  );
5070
6018
  const fanInScale = buildQuantileScale(
5071
6019
  structural.files.map((file) => logScale(file.fanIn)),
@@ -5088,7 +6036,7 @@ var computeRiskSummary = (structural, evolution, external, config, traceCollecto
5088
6036
  external: external.available
5089
6037
  });
5090
6038
  const fileRiskContexts = structural.files.map((file) => {
5091
- const filePath = normalizePath2(file.id);
6039
+ const filePath = normalizePath3(file.id);
5092
6040
  const inCycle = cycleFileSet.has(filePath) ? 1 : 0;
5093
6041
  const fanInRisk = normalizeWithScale(logScale(file.fanIn), fanInScale);
5094
6042
  const fanOutRisk = normalizeWithScale(logScale(file.fanOut), fanOutScale);
@@ -5704,30 +6652,8 @@ var evaluateRepositoryRisk = (input, options = {}) => {
5704
6652
  };
5705
6653
  };
5706
6654
 
5707
- // src/application/todo-fixme-counter.ts
5708
- import * as ts2 from "typescript";
5709
- var markerRegex = /\b(?:TODO|FIXME)\b/gi;
5710
- var countMarkers = (text) => text.match(markerRegex)?.length ?? 0;
5711
- var countTodoFixmeInComments = (content) => {
5712
- const scanner = ts2.createScanner(
5713
- ts2.ScriptTarget.Latest,
5714
- false,
5715
- ts2.LanguageVariant.Standard,
5716
- content
5717
- );
5718
- let total = 0;
5719
- let token = scanner.scan();
5720
- while (token !== ts2.SyntaxKind.EndOfFileToken) {
5721
- if (token === ts2.SyntaxKind.SingleLineCommentTrivia || token === ts2.SyntaxKind.MultiLineCommentTrivia) {
5722
- total += countMarkers(scanner.getTokenText());
5723
- }
5724
- token = scanner.scan();
5725
- }
5726
- return total;
5727
- };
5728
-
5729
6655
  // src/application/run-analyze-command.ts
5730
- var resolveTargetPath = (inputPath, cwd) => resolve3(cwd, inputPath ?? ".");
6656
+ var resolveTargetPath = (inputPath, cwd) => resolve4(cwd, inputPath ?? ".");
5731
6657
  var riskProfileConfig = {
5732
6658
  default: void 0,
5733
6659
  personal: {
@@ -5854,18 +6780,6 @@ var createEvolutionProgressReporter = (logger) => {
5854
6780
  }
5855
6781
  };
5856
6782
  };
5857
- var collectTodoFixmeCount = async (targetPath, structural) => {
5858
- const filePaths2 = [...structural.files].map((file) => file.relativePath).sort((a, b) => a.localeCompare(b));
5859
- let total = 0;
5860
- for (const relativePath of filePaths2) {
5861
- try {
5862
- const content = await readFile2(join4(targetPath, relativePath), "utf8");
5863
- total += countTodoFixmeInComments(content);
5864
- } catch {
5865
- }
5866
- }
5867
- return total;
5868
- };
5869
6783
  var collectAnalysisInputs = async (inputPath, authorIdentityMode, options = {}, logger = createSilentLogger()) => {
5870
6784
  const invocationCwd = process.env["INIT_CWD"] ?? process.cwd();
5871
6785
  const targetPath = resolveTargetPath(inputPath, invocationCwd);
@@ -5908,14 +6822,16 @@ var collectAnalysisInputs = async (inputPath, authorIdentityMode, options = {},
5908
6822
  } else {
5909
6823
  logger.warn(`external analysis unavailable: ${external.reason}`);
5910
6824
  }
5911
- logger.info("collecting quality text signals");
5912
- const todoFixmeCount = await collectTodoFixmeCount(targetPath, structural);
5913
- logger.debug(`quality text signals: todoFixmeCount=${todoFixmeCount}`);
6825
+ logger.info("collecting quality signals");
6826
+ const qualitySignals = await collectQualitySignals(targetPath, structural, logger);
6827
+ logger.debug(
6828
+ `quality signals: todoFixmeCommentCount=${qualitySignals.todoFixmeCommentCount ?? 0}, eslintErrors=${qualitySignals.eslint?.errorCount ?? 0}, tscErrors=${qualitySignals.typescript?.errorCount ?? 0}`
6829
+ );
5914
6830
  return {
5915
6831
  structural,
5916
6832
  evolution,
5917
6833
  external,
5918
- todoFixmeCount
6834
+ qualitySignals
5919
6835
  };
5920
6836
  };
5921
6837
  var runAnalyzeCommand = async (inputPath, authorIdentityMode, options = {}, logger = createSilentLogger()) => {
@@ -5936,7 +6852,7 @@ var runAnalyzeCommand = async (inputPath, authorIdentityMode, options = {}, logg
5936
6852
  const quality = computeRepositoryQualitySummary({
5937
6853
  structural: analysisInputs.structural,
5938
6854
  evolution: analysisInputs.evolution,
5939
- todoFixmeCount: analysisInputs.todoFixmeCount
6855
+ signals: analysisInputs.qualitySignals
5940
6856
  });
5941
6857
  logger.info(
5942
6858
  `analysis completed (riskScore=${risk.riskScore}, qualityScore=${quality.qualityScore})`
@@ -5981,7 +6897,7 @@ var buildAnalysisSnapshot = async (inputPath, authorIdentityMode, options, logge
5981
6897
  quality: computeRepositoryQualitySummary({
5982
6898
  structural: analysisInputs.structural,
5983
6899
  evolution: analysisInputs.evolution,
5984
- todoFixmeCount: analysisInputs.todoFixmeCount
6900
+ signals: analysisInputs.qualitySignals
5985
6901
  })
5986
6902
  };
5987
6903
  return createSnapshot({
@@ -6074,7 +6990,7 @@ var runCheckCommand = async (inputPath, authorIdentityMode, options, logger = cr
6074
6990
 
6075
6991
  // src/application/run-ci-command.ts
6076
6992
  import { readFile as readFile4, writeFile as writeFile3 } from "fs/promises";
6077
- import { relative as relative2, resolve as resolve4 } from "path";
6993
+ import { relative as relative3, resolve as resolve5 } from "path";
6078
6994
  var isPathOutsideBase = (value) => {
6079
6995
  return value === ".." || value.startsWith("../") || value.startsWith("..\\");
6080
6996
  };
@@ -6087,7 +7003,7 @@ var runCiCommand = async (inputPath, authorIdentityMode, options, logger = creat
6087
7003
  if (options.baselineSha !== void 0 && options.baselineRef !== "auto") {
6088
7004
  throw new GovernanceConfigurationError("baseline-sha requires --baseline-ref auto");
6089
7005
  }
6090
- const resolvedTargetPath = resolve4(inputPath ?? process.cwd());
7006
+ const resolvedTargetPath = resolve5(inputPath ?? process.cwd());
6091
7007
  logger.info("building current snapshot");
6092
7008
  const current = await buildAnalysisSnapshot(
6093
7009
  inputPath,
@@ -6141,13 +7057,13 @@ var runCiCommand = async (inputPath, authorIdentityMode, options, logger = creat
6141
7057
  repositoryPath: resolvedTargetPath,
6142
7058
  baselineRef,
6143
7059
  analyzeWorktree: async (worktreePath, repositoryRoot) => {
6144
- const relativeTargetPath = relative2(repositoryRoot, resolvedTargetPath);
7060
+ const relativeTargetPath = relative3(repositoryRoot, resolvedTargetPath);
6145
7061
  if (isPathOutsideBase(relativeTargetPath)) {
6146
7062
  throw new GovernanceConfigurationError(
6147
7063
  `target path is outside git repository root: ${resolvedTargetPath}`
6148
7064
  );
6149
7065
  }
6150
- const baselineTargetPath = relativeTargetPath.length === 0 || relativeTargetPath === "." ? worktreePath : resolve4(worktreePath, relativeTargetPath);
7066
+ const baselineTargetPath = relativeTargetPath.length === 0 || relativeTargetPath === "." ? worktreePath : resolve5(worktreePath, relativeTargetPath);
6151
7067
  return buildAnalysisSnapshot(
6152
7068
  baselineTargetPath,
6153
7069
  authorIdentityMode,
@@ -6307,7 +7223,7 @@ var runExplainCommand = async (inputPath, authorIdentityMode, options, logger =
6307
7223
  quality: computeRepositoryQualitySummary({
6308
7224
  structural: analysisInputs.structural,
6309
7225
  evolution: analysisInputs.evolution,
6310
- todoFixmeCount: analysisInputs.todoFixmeCount
7226
+ signals: analysisInputs.qualitySignals
6311
7227
  })
6312
7228
  };
6313
7229
  logger.info(
@@ -6322,7 +7238,7 @@ var runExplainCommand = async (inputPath, authorIdentityMode, options, logger =
6322
7238
 
6323
7239
  // src/index.ts
6324
7240
  var program = new Command();
6325
- var packageJsonPath = resolve5(dirname2(fileURLToPath(import.meta.url)), "../package.json");
7241
+ var packageJsonPath = resolve6(dirname2(fileURLToPath(import.meta.url)), "../package.json");
6326
7242
  var { version } = JSON.parse(readFileSync2(packageJsonPath, "utf8"));
6327
7243
  var parseRecentWindowDays = (value) => {
6328
7244
  const parsed = Number.parseInt(value, 10);
@@ -6598,7 +7514,7 @@ program.command("run").argument("[path]", "path to the project to analyze").addO
6598
7514
  "log verbosity: silent, error, warn, info, debug (logs are written to stderr)"
6599
7515
  ).choices(["silent", "error", "warn", "info", "debug"]).default(parseLogLevel(process.env["CODESENTINEL_LOG_LEVEL"]))
6600
7516
  ).addOption(
6601
- new Option("--format <mode>", "combined output format: text, md, json").choices(["text", "md", "json"]).default("text")
7517
+ new Option("--format <mode>", "combined output format: text, md, json").choices(["text", "md", "json"]).default("md")
6602
7518
  ).addOption(
6603
7519
  new Option("--detail <level>", "run detail level: compact (default), standard, full").choices(["compact", "standard", "full"]).default("compact")
6604
7520
  ).option("--file <path>", "explain a specific file target").option("--module <name>", "explain a specific module target").option("--top <count>", "number of top hotspots to explain when no target is selected", "5").option("--compare <baseline>", "compare against a baseline snapshot JSON file").option("--snapshot <path>", "write current snapshot JSON artifact").option("--no-trace", "disable trace embedding in generated snapshot").addOption(
@@ -6825,19 +7741,23 @@ var parseMainBranches = (options) => {
6825
7741
  return unique.length > 0 ? unique : void 0;
6826
7742
  };
6827
7743
  var buildGateConfigFromOptions = (options) => {
6828
- const maxRepoDelta = parseGateNumber(options.maxRepoDelta, "--max-repo-delta");
7744
+ const maxRiskDelta = parseGateNumber(options.maxRiskDelta, "--max-risk-delta");
7745
+ const maxQualityDelta = parseGateNumber(options.maxQualityDelta, "--max-quality-delta");
6829
7746
  const maxNewHotspots = parseGateNumber(options.maxNewHotspots, "--max-new-hotspots");
6830
- const maxRepoScore = parseGateNumber(options.maxRepoScore, "--max-repo-score");
7747
+ const maxRiskScore = parseGateNumber(options.maxRiskScore, "--max-risk-score");
7748
+ const minQualityScore = parseGateNumber(options.minQualityScore, "--min-quality-score");
6831
7749
  const newHotspotScoreThreshold = parseGateNumber(
6832
7750
  options.newHotspotScoreThreshold,
6833
7751
  "--new-hotspot-score-threshold"
6834
7752
  );
6835
7753
  return {
6836
- ...maxRepoDelta === void 0 ? {} : { maxRepoDelta },
7754
+ ...maxRiskDelta === void 0 ? {} : { maxRiskDelta },
7755
+ ...maxQualityDelta === void 0 ? {} : { maxQualityDelta },
6837
7756
  ...options.noNewCycles === true ? { noNewCycles: true } : {},
6838
7757
  ...options.noNewHighRiskDeps === true ? { noNewHighRiskDeps: true } : {},
6839
7758
  ...maxNewHotspots === void 0 ? {} : { maxNewHotspots },
6840
- ...maxRepoScore === void 0 ? {} : { maxRepoScore },
7759
+ ...maxRiskScore === void 0 ? {} : { maxRiskScore },
7760
+ ...minQualityScore === void 0 ? {} : { minQualityScore },
6841
7761
  ...newHotspotScoreThreshold === void 0 ? {} : { newHotspotScoreThreshold },
6842
7762
  failOn: options.failOn
6843
7763
  };
@@ -6852,7 +7772,10 @@ program.command("check").argument("[path]", "path to the project to analyze").ad
6852
7772
  "--log-level <level>",
6853
7773
  "log verbosity: silent, error, warn, info, debug (logs are written to stderr)"
6854
7774
  ).choices(["silent", "error", "warn", "info", "debug"]).default(parseLogLevel(process.env["CODESENTINEL_LOG_LEVEL"]))
6855
- ).option("--compare <baseline>", "baseline snapshot path").option("--max-repo-delta <value>", "maximum allowed normalized repository score increase").option("--no-new-cycles", "fail if new structural cycles are introduced").option("--no-new-high-risk-deps", "fail if new high-risk direct dependencies are introduced").option("--max-new-hotspots <count>", "maximum allowed number of new hotspots").option("--new-hotspot-score-threshold <score>", "minimum hotspot score to count as new hotspot").option("--max-repo-score <score>", "absolute repository score limit (0..100)").addOption(
7775
+ ).option("--compare <baseline>", "baseline snapshot path").option("--max-risk-delta <value>", "maximum allowed normalized risk score increase").option(
7776
+ "--max-quality-delta <value>",
7777
+ "maximum allowed normalized quality score regression versus baseline (requires --compare)"
7778
+ ).option("--no-new-cycles", "fail if new structural cycles are introduced").option("--no-new-high-risk-deps", "fail if new high-risk direct dependencies are introduced").option("--max-new-hotspots <count>", "maximum allowed number of new hotspots").option("--new-hotspot-score-threshold <score>", "minimum hotspot score to count as new hotspot").option("--max-risk-score <score>", "absolute risk score limit (0..100)").option("--min-quality-score <score>", "minimum quality score threshold (0..100)").addOption(
6856
7779
  new Option("--fail-on <level>", "failing severity threshold").choices(["error", "warn"]).default("error")
6857
7780
  ).addOption(
6858
7781
  new Option("--format <mode>", "output format: text, json, md").choices(["text", "json", "md"]).default("text")
@@ -6920,7 +7843,10 @@ program.command("ci").argument("[path]", "path to the project to analyze").addOp
6920
7843
  ).option(
6921
7844
  "--main-branches <names>",
6922
7845
  "comma-separated default branch candidates for auto baseline resolution (for example: main,master)"
6923
- ).option("--snapshot <path>", "write current snapshot JSON to path").option("--report <path>", "write markdown CI summary report").option("--json-output <path>", "write machine-readable CI JSON output").option("--max-repo-delta <value>", "maximum allowed normalized repository score increase").option("--no-new-cycles", "fail if new structural cycles are introduced").option("--no-new-high-risk-deps", "fail if new high-risk direct dependencies are introduced").option("--max-new-hotspots <count>", "maximum allowed number of new hotspots").option("--new-hotspot-score-threshold <score>", "minimum hotspot score to count as new hotspot").option("--max-repo-score <score>", "absolute repository score limit (0..100)").addOption(
7846
+ ).option("--snapshot <path>", "write current snapshot JSON to path").option("--report <path>", "write markdown CI summary report").option("--json-output <path>", "write machine-readable CI JSON output").option("--max-risk-delta <value>", "maximum allowed normalized risk score increase").option(
7847
+ "--max-quality-delta <value>",
7848
+ "maximum allowed normalized quality score regression versus baseline"
7849
+ ).option("--no-new-cycles", "fail if new structural cycles are introduced").option("--no-new-high-risk-deps", "fail if new high-risk direct dependencies are introduced").option("--max-new-hotspots <count>", "maximum allowed number of new hotspots").option("--new-hotspot-score-threshold <score>", "minimum hotspot score to count as new hotspot").option("--max-risk-score <score>", "absolute risk score limit (0..100)").option("--min-quality-score <score>", "minimum quality score threshold (0..100)").addOption(
6924
7850
  new Option("--fail-on <level>", "failing severity threshold").choices(["error", "warn"]).default("error")
6925
7851
  ).option("--no-trace", "disable trace embedding in generated snapshot").addOption(
6926
7852
  new Option(