@harness-engineering/core 0.13.0 → 0.14.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -36,11 +36,12 @@ import {
36
36
  fileExists,
37
37
  findFiles,
38
38
  readFileContent,
39
+ relativePosix,
39
40
  resolveFileToLayer,
40
41
  runAll,
41
42
  validateDependencies,
42
43
  violationId
43
- } from "./chunk-ZHGBWFYD.mjs";
44
+ } from "./chunk-BQUWXBGR.mjs";
44
45
 
45
46
  // src/index.ts
46
47
  export * from "@harness-engineering/types";
@@ -83,15 +84,15 @@ function validateConfig(data, schema) {
83
84
  let message = "Configuration validation failed";
84
85
  const suggestions = [];
85
86
  if (firstError) {
86
- const path20 = firstError.path.join(".");
87
- const pathDisplay = path20 ? ` at "${path20}"` : "";
87
+ const path22 = firstError.path.join(".");
88
+ const pathDisplay = path22 ? ` at "${path22}"` : "";
88
89
  if (firstError.code === "invalid_type") {
89
90
  const received = firstError.received;
90
91
  const expected = firstError.expected;
91
92
  if (received === "undefined") {
92
93
  code = "MISSING_FIELD";
93
94
  message = `Missing required field${pathDisplay}: ${firstError.message}`;
94
- suggestions.push(`Field "${path20}" is required and must be of type "${expected}"`);
95
+ suggestions.push(`Field "${path22}" is required and must be of type "${expected}"`);
95
96
  } else {
96
97
  code = "INVALID_TYPE";
97
98
  message = `Invalid type${pathDisplay}: ${firstError.message}`;
@@ -245,6 +246,43 @@ function extractMarkdownLinks(content) {
245
246
  }
246
247
  return links;
247
248
  }
249
+ function isDescriptionTerminator(trimmed) {
250
+ return trimmed.startsWith("#") || trimmed.startsWith("-") || trimmed.startsWith("*") || trimmed.startsWith("```");
251
+ }
252
+ function extractDescription(sectionLines) {
253
+ const descriptionLines = [];
254
+ for (const line of sectionLines) {
255
+ const trimmed = line.trim();
256
+ if (trimmed === "") {
257
+ if (descriptionLines.length > 0) break;
258
+ continue;
259
+ }
260
+ if (isDescriptionTerminator(trimmed)) break;
261
+ descriptionLines.push(trimmed);
262
+ }
263
+ return descriptionLines.length > 0 ? descriptionLines.join(" ") : void 0;
264
+ }
265
+ function buildAgentMapSection(section, lines) {
266
+ const endIndex = section.endIndex ?? lines.length;
267
+ const sectionLines = lines.slice(section.startIndex + 1, endIndex);
268
+ const sectionContent = sectionLines.join("\n");
269
+ const links = extractMarkdownLinks(sectionContent).map((link) => ({
270
+ ...link,
271
+ line: link.line + section.startIndex + 1,
272
+ exists: false
273
+ }));
274
+ const result = {
275
+ title: section.title,
276
+ level: section.level,
277
+ line: section.line,
278
+ links
279
+ };
280
+ const description = extractDescription(sectionLines);
281
+ if (description) {
282
+ result.description = description;
283
+ }
284
+ return result;
285
+ }
248
286
  function extractSections(content) {
249
287
  const lines = content.split("\n");
250
288
  const sections = [];
@@ -257,7 +295,6 @@ function extractSections(content) {
257
295
  title: match[2].trim(),
258
296
  level: match[1].length,
259
297
  line: i + 1,
260
- // 1-indexed
261
298
  startIndex: i
262
299
  });
263
300
  }
@@ -269,62 +306,29 @@ function extractSections(content) {
269
306
  currentSection.endIndex = nextSection ? nextSection.startIndex : lines.length;
270
307
  }
271
308
  }
272
- return sections.map((section) => {
273
- const endIndex = section.endIndex ?? lines.length;
274
- const sectionLines = lines.slice(section.startIndex + 1, endIndex);
275
- const sectionContent = sectionLines.join("\n");
276
- const links = extractMarkdownLinks(sectionContent).map((link) => ({
277
- ...link,
278
- line: link.line + section.startIndex + 1,
279
- // Adjust line number
280
- exists: false
281
- // Will be set later by validateAgentsMap
282
- }));
283
- const descriptionLines = [];
284
- for (const line of sectionLines) {
285
- const trimmed = line.trim();
286
- if (trimmed === "") {
287
- if (descriptionLines.length > 0) break;
288
- continue;
289
- }
290
- if (trimmed.startsWith("#")) break;
291
- if (trimmed.startsWith("-") || trimmed.startsWith("*")) break;
292
- if (trimmed.startsWith("```")) break;
293
- descriptionLines.push(trimmed);
294
- }
295
- const result = {
296
- title: section.title,
297
- level: section.level,
298
- line: section.line,
299
- links
300
- };
301
- if (descriptionLines.length > 0) {
302
- result.description = descriptionLines.join(" ");
303
- }
304
- return result;
305
- });
309
+ return sections.map((section) => buildAgentMapSection(section, lines));
306
310
  }
307
- function isExternalLink(path20) {
308
- return path20.startsWith("http://") || path20.startsWith("https://") || path20.startsWith("#") || path20.startsWith("mailto:");
311
+ function isExternalLink(path22) {
312
+ return path22.startsWith("http://") || path22.startsWith("https://") || path22.startsWith("#") || path22.startsWith("mailto:");
309
313
  }
310
314
  function resolveLinkPath(linkPath, baseDir) {
311
315
  return linkPath.startsWith(".") ? join(baseDir, linkPath) : linkPath;
312
316
  }
313
- async function validateAgentsMap(path20 = "./AGENTS.md") {
314
- const contentResult = await readFileContent(path20);
317
+ async function validateAgentsMap(path22 = "./AGENTS.md") {
318
+ const contentResult = await readFileContent(path22);
315
319
  if (!contentResult.ok) {
316
320
  return Err(
317
321
  createError(
318
322
  "PARSE_ERROR",
319
323
  `Failed to read AGENTS.md: ${contentResult.error.message}`,
320
- { path: path20 },
324
+ { path: path22 },
321
325
  ["Ensure the file exists", "Check file permissions"]
322
326
  )
323
327
  );
324
328
  }
325
329
  const content = contentResult.value;
326
330
  const sections = extractSections(content);
327
- const baseDir = dirname(path20);
331
+ const baseDir = dirname(path22);
328
332
  const sectionTitles = sections.map((s) => s.title);
329
333
  const missingSections = REQUIRED_SECTIONS.filter(
330
334
  (required) => !sectionTitles.some((title) => title.toLowerCase().includes(required.toLowerCase()))
@@ -365,7 +369,7 @@ async function validateAgentsMap(path20 = "./AGENTS.md") {
365
369
 
366
370
  // src/context/doc-coverage.ts
367
371
  import { minimatch } from "minimatch";
368
- import { basename, relative } from "path";
372
+ import { basename } from "path";
369
373
  function determineImportance(filePath) {
370
374
  const name = basename(filePath).toLowerCase();
371
375
  if (name === "index.ts" || name === "index.js" || name === "main.ts") {
@@ -405,7 +409,7 @@ async function checkDocCoverage(domain, options = {}) {
405
409
  try {
406
410
  const sourceFiles = await findFiles("**/*.{ts,js,tsx,jsx}", sourceDir);
407
411
  const filteredSourceFiles = sourceFiles.filter((file) => {
408
- const relativePath = relative(sourceDir, file);
412
+ const relativePath = relativePosix(sourceDir, file);
409
413
  return !excludePatterns.some((pattern) => {
410
414
  return minimatch(relativePath, pattern, { dot: true }) || minimatch(file, pattern, { dot: true });
411
415
  });
@@ -428,7 +432,7 @@ async function checkDocCoverage(domain, options = {}) {
428
432
  const undocumented = [];
429
433
  const gaps = [];
430
434
  for (const sourceFile of filteredSourceFiles) {
431
- const relativePath = relative(sourceDir, sourceFile);
435
+ const relativePath = relativePosix(sourceDir, sourceFile);
432
436
  const fileName = basename(sourceFile);
433
437
  const isDocumented = documentedPaths.has(relativePath) || documentedPaths.has(fileName) || documentedPaths.has(`src/${relativePath}`);
434
438
  if (isDocumented) {
@@ -464,9 +468,9 @@ async function checkDocCoverage(domain, options = {}) {
464
468
  }
465
469
 
466
470
  // src/context/knowledge-map.ts
467
- import { join as join2, basename as basename2, relative as relative2 } from "path";
468
- function suggestFix(path20, existingFiles) {
469
- const targetName = basename2(path20).toLowerCase();
471
+ import { join as join2, basename as basename2 } from "path";
472
+ function suggestFix(path22, existingFiles) {
473
+ const targetName = basename2(path22).toLowerCase();
470
474
  const similar = existingFiles.find((file) => {
471
475
  const fileName = basename2(file).toLowerCase();
472
476
  return fileName.includes(targetName) || targetName.includes(fileName);
@@ -474,7 +478,7 @@ function suggestFix(path20, existingFiles) {
474
478
  if (similar) {
475
479
  return `Did you mean "${similar}"?`;
476
480
  }
477
- return `Create the file "${path20}" or remove the link`;
481
+ return `Create the file "${path22}" or remove the link`;
478
482
  }
479
483
  async function validateKnowledgeMap(rootDir = process.cwd()) {
480
484
  const agentsPath = join2(rootDir, "AGENTS.md");
@@ -488,7 +492,7 @@ async function validateKnowledgeMap(rootDir = process.cwd()) {
488
492
  totalLinks: agentsTotalLinks
489
493
  } = agentsResult.value;
490
494
  const existingFiles = await findFiles("**/*", rootDir);
491
- const relativeExistingFiles = existingFiles.map((f) => relative2(rootDir, f));
495
+ const relativeExistingFiles = existingFiles.map((f) => relativePosix(rootDir, f));
492
496
  const brokenLinks = agentsBrokenLinks.map((link) => {
493
497
  const section = sections.find(
494
498
  (s) => s.links.some((l) => l.path === link.path && l.line === link.line)
@@ -513,7 +517,7 @@ async function validateKnowledgeMap(rootDir = process.cwd()) {
513
517
  }
514
518
 
515
519
  // src/context/generate.ts
516
- import { relative as relative3, basename as basename3, dirname as dirname2 } from "path";
520
+ import { basename as basename3, dirname as dirname2 } from "path";
517
521
  var DEFAULT_SECTIONS = [
518
522
  {
519
523
  name: "Documentation",
@@ -529,7 +533,7 @@ var DEFAULT_SECTIONS = [
529
533
  function groupByDirectory(files, rootDir) {
530
534
  const groups = /* @__PURE__ */ new Map();
531
535
  for (const file of files) {
532
- const relativePath = relative3(rootDir, file);
536
+ const relativePath = relativePosix(rootDir, file);
533
537
  const dir = dirname2(relativePath);
534
538
  if (!groups.has(dir)) {
535
539
  groups.set(dir, []);
@@ -585,7 +589,7 @@ async function generateAgentsMap(config, graphSections) {
585
589
  allFiles.push(...files);
586
590
  }
587
591
  const filteredFiles = allFiles.filter((file) => {
588
- const relativePath = relative3(rootDir, file);
592
+ const relativePath = relativePosix(rootDir, file);
589
593
  return !matchesExcludePattern(relativePath, excludePaths);
590
594
  });
591
595
  lines.push("## Repository Structure");
@@ -613,11 +617,11 @@ async function generateAgentsMap(config, graphSections) {
613
617
  }
614
618
  const sectionFiles = await findFiles(section.pattern, rootDir);
615
619
  const filteredSectionFiles = sectionFiles.filter((file) => {
616
- const relativePath = relative3(rootDir, file);
620
+ const relativePath = relativePosix(rootDir, file);
617
621
  return !matchesExcludePattern(relativePath, excludePaths);
618
622
  });
619
623
  for (const file of filteredSectionFiles.slice(0, 20)) {
620
- lines.push(formatFileLink(relative3(rootDir, file)));
624
+ lines.push(formatFileLink(relativePosix(rootDir, file)));
621
625
  }
622
626
  if (filteredSectionFiles.length > 20) {
623
627
  lines.push(`- _... and ${filteredSectionFiles.length - 20} more files_`);
@@ -826,8 +830,8 @@ function createBoundaryValidator(schema, name) {
826
830
  return Ok(result.data);
827
831
  }
828
832
  const suggestions = result.error.issues.map((issue) => {
829
- const path20 = issue.path.join(".");
830
- return path20 ? `${path20}: ${issue.message}` : issue.message;
833
+ const path22 = issue.path.join(".");
834
+ return path22 ? `${path22}: ${issue.message}` : issue.message;
831
835
  });
832
836
  return Err(
833
837
  createError(
@@ -1049,175 +1053,183 @@ function stringArraysEqual(a, b) {
1049
1053
  const sortedB = [...b].sort();
1050
1054
  return sortedA.every((val, i) => val === sortedB[i]);
1051
1055
  }
1052
- function deepMergeConstraints(localConfig, bundleConstraints, _existingContributions) {
1053
- const config = { ...localConfig };
1054
- const contributions = {};
1055
- const conflicts = [];
1056
- if (bundleConstraints.layers && bundleConstraints.layers.length > 0) {
1057
- const localLayers = Array.isArray(localConfig.layers) ? localConfig.layers : [];
1058
- const mergedLayers = [...localLayers];
1059
- const contributedLayerNames = [];
1060
- for (const bundleLayer of bundleConstraints.layers) {
1061
- const existing = localLayers.find((l) => l.name === bundleLayer.name);
1062
- if (!existing) {
1063
- mergedLayers.push(bundleLayer);
1064
- contributedLayerNames.push(bundleLayer.name);
1065
- } else {
1066
- const same = existing.pattern === bundleLayer.pattern && stringArraysEqual(existing.allowedDependencies, bundleLayer.allowedDependencies);
1067
- if (!same) {
1068
- conflicts.push({
1069
- section: "layers",
1070
- key: bundleLayer.name,
1071
- localValue: existing,
1072
- packageValue: bundleLayer,
1073
- description: `Layer '${bundleLayer.name}' already exists locally with different configuration`
1074
- });
1075
- }
1056
+ function mergeLayers(localConfig, bundleLayers, config, contributions, conflicts) {
1057
+ const localLayers = Array.isArray(localConfig.layers) ? localConfig.layers : [];
1058
+ const mergedLayers = [...localLayers];
1059
+ const contributedLayerNames = [];
1060
+ for (const bundleLayer of bundleLayers) {
1061
+ const existing = localLayers.find((l) => l.name === bundleLayer.name);
1062
+ if (!existing) {
1063
+ mergedLayers.push(bundleLayer);
1064
+ contributedLayerNames.push(bundleLayer.name);
1065
+ } else {
1066
+ const same = existing.pattern === bundleLayer.pattern && stringArraysEqual(existing.allowedDependencies, bundleLayer.allowedDependencies);
1067
+ if (!same) {
1068
+ conflicts.push({
1069
+ section: "layers",
1070
+ key: bundleLayer.name,
1071
+ localValue: existing,
1072
+ packageValue: bundleLayer,
1073
+ description: `Layer '${bundleLayer.name}' already exists locally with different configuration`
1074
+ });
1075
+ }
1076
+ }
1077
+ }
1078
+ config.layers = mergedLayers;
1079
+ if (contributedLayerNames.length > 0) contributions.layers = contributedLayerNames;
1080
+ }
1081
+ function mergeForbiddenImports(localConfig, bundleRules, config, contributions, conflicts) {
1082
+ const localFI = Array.isArray(localConfig.forbiddenImports) ? localConfig.forbiddenImports : [];
1083
+ const mergedFI = [...localFI];
1084
+ const contributedFromKeys = [];
1085
+ for (const bundleRule of bundleRules) {
1086
+ const existing = localFI.find((r) => r.from === bundleRule.from);
1087
+ if (!existing) {
1088
+ const entry = { from: bundleRule.from, disallow: bundleRule.disallow };
1089
+ if (bundleRule.message !== void 0) entry.message = bundleRule.message;
1090
+ mergedFI.push(entry);
1091
+ contributedFromKeys.push(bundleRule.from);
1092
+ } else {
1093
+ if (!stringArraysEqual(existing.disallow, bundleRule.disallow)) {
1094
+ conflicts.push({
1095
+ section: "forbiddenImports",
1096
+ key: bundleRule.from,
1097
+ localValue: existing,
1098
+ packageValue: bundleRule,
1099
+ description: `Forbidden import rule for '${bundleRule.from}' already exists locally with different disallow list`
1100
+ });
1076
1101
  }
1077
1102
  }
1078
- config.layers = mergedLayers;
1079
- if (contributedLayerNames.length > 0) {
1080
- contributions.layers = contributedLayerNames;
1103
+ }
1104
+ config.forbiddenImports = mergedFI;
1105
+ if (contributedFromKeys.length > 0) contributions.forbiddenImports = contributedFromKeys;
1106
+ }
1107
+ function mergeBoundaries(localConfig, bundleBoundaries, config, contributions) {
1108
+ const localBoundaries = localConfig.boundaries ?? { requireSchema: [] };
1109
+ const localSchemas = new Set(localBoundaries.requireSchema ?? []);
1110
+ const newSchemas = [];
1111
+ for (const schema of bundleBoundaries.requireSchema ?? []) {
1112
+ if (!localSchemas.has(schema)) {
1113
+ newSchemas.push(schema);
1114
+ localSchemas.add(schema);
1115
+ }
1116
+ }
1117
+ config.boundaries = { requireSchema: [...localBoundaries.requireSchema ?? [], ...newSchemas] };
1118
+ if (newSchemas.length > 0) contributions.boundaries = newSchemas;
1119
+ }
1120
+ function mergeArchitecture(localConfig, bundleArch, config, contributions, conflicts) {
1121
+ const localArch = localConfig.architecture ?? { thresholds: {}, modules: {} };
1122
+ const mergedThresholds = { ...localArch.thresholds };
1123
+ const contributedThresholdKeys = [];
1124
+ for (const [category, value] of Object.entries(bundleArch.thresholds ?? {})) {
1125
+ if (!(category in mergedThresholds)) {
1126
+ mergedThresholds[category] = value;
1127
+ contributedThresholdKeys.push(category);
1128
+ } else if (!deepEqual(mergedThresholds[category], value)) {
1129
+ conflicts.push({
1130
+ section: "architecture.thresholds",
1131
+ key: category,
1132
+ localValue: mergedThresholds[category],
1133
+ packageValue: value,
1134
+ description: `Architecture threshold '${category}' already exists locally with a different value`
1135
+ });
1081
1136
  }
1082
1137
  }
1083
- if (bundleConstraints.forbiddenImports && bundleConstraints.forbiddenImports.length > 0) {
1084
- const localFI = Array.isArray(localConfig.forbiddenImports) ? localConfig.forbiddenImports : [];
1085
- const mergedFI = [...localFI];
1086
- const contributedFromKeys = [];
1087
- for (const bundleRule of bundleConstraints.forbiddenImports) {
1088
- const existing = localFI.find((r) => r.from === bundleRule.from);
1089
- if (!existing) {
1090
- const entry = {
1091
- from: bundleRule.from,
1092
- disallow: bundleRule.disallow
1093
- };
1094
- if (bundleRule.message !== void 0) {
1095
- entry.message = bundleRule.message;
1096
- }
1097
- mergedFI.push(entry);
1098
- contributedFromKeys.push(bundleRule.from);
1099
- } else {
1100
- const same = stringArraysEqual(existing.disallow, bundleRule.disallow);
1101
- if (!same) {
1138
+ const mergedModules = { ...localArch.modules };
1139
+ const contributedModuleKeys = [];
1140
+ for (const [modulePath, bundleCategoryMap] of Object.entries(bundleArch.modules ?? {})) {
1141
+ if (!(modulePath in mergedModules)) {
1142
+ mergedModules[modulePath] = bundleCategoryMap;
1143
+ for (const cat of Object.keys(bundleCategoryMap))
1144
+ contributedModuleKeys.push(`${modulePath}:${cat}`);
1145
+ } else {
1146
+ const mergedCategoryMap = { ...mergedModules[modulePath] };
1147
+ for (const [category, value] of Object.entries(bundleCategoryMap)) {
1148
+ if (!(category in mergedCategoryMap)) {
1149
+ mergedCategoryMap[category] = value;
1150
+ contributedModuleKeys.push(`${modulePath}:${category}`);
1151
+ } else if (!deepEqual(mergedCategoryMap[category], value)) {
1102
1152
  conflicts.push({
1103
- section: "forbiddenImports",
1104
- key: bundleRule.from,
1105
- localValue: existing,
1106
- packageValue: bundleRule,
1107
- description: `Forbidden import rule for '${bundleRule.from}' already exists locally with different disallow list`
1153
+ section: "architecture.modules",
1154
+ key: `${modulePath}:${category}`,
1155
+ localValue: mergedCategoryMap[category],
1156
+ packageValue: value,
1157
+ description: `Architecture module override '${modulePath}' category '${category}' already exists locally with a different value`
1108
1158
  });
1109
1159
  }
1110
1160
  }
1161
+ mergedModules[modulePath] = mergedCategoryMap;
1162
+ }
1163
+ }
1164
+ config.architecture = { ...localArch, thresholds: mergedThresholds, modules: mergedModules };
1165
+ if (contributedThresholdKeys.length > 0)
1166
+ contributions["architecture.thresholds"] = contributedThresholdKeys;
1167
+ if (contributedModuleKeys.length > 0)
1168
+ contributions["architecture.modules"] = contributedModuleKeys;
1169
+ }
1170
+ function mergeSecurityRules(localConfig, bundleRules, config, contributions, conflicts) {
1171
+ const localSecurity = localConfig.security ?? { rules: {} };
1172
+ const localRules = localSecurity.rules ?? {};
1173
+ const mergedRules = { ...localRules };
1174
+ const contributedRuleIds = [];
1175
+ for (const [ruleId, severity] of Object.entries(bundleRules)) {
1176
+ if (!(ruleId in mergedRules)) {
1177
+ mergedRules[ruleId] = severity;
1178
+ contributedRuleIds.push(ruleId);
1179
+ } else if (mergedRules[ruleId] !== severity) {
1180
+ conflicts.push({
1181
+ section: "security.rules",
1182
+ key: ruleId,
1183
+ localValue: mergedRules[ruleId],
1184
+ packageValue: severity,
1185
+ description: `Security rule '${ruleId}' already exists locally with severity '${mergedRules[ruleId]}', bundle has '${severity}'`
1186
+ });
1111
1187
  }
1112
- config.forbiddenImports = mergedFI;
1113
- if (contributedFromKeys.length > 0) {
1114
- contributions.forbiddenImports = contributedFromKeys;
1115
- }
1188
+ }
1189
+ config.security = { ...localSecurity, rules: mergedRules };
1190
+ if (contributedRuleIds.length > 0) contributions["security.rules"] = contributedRuleIds;
1191
+ }
1192
+ function deepMergeConstraints(localConfig, bundleConstraints, _existingContributions) {
1193
+ const config = { ...localConfig };
1194
+ const contributions = {};
1195
+ const conflicts = [];
1196
+ if (bundleConstraints.layers && bundleConstraints.layers.length > 0) {
1197
+ mergeLayers(localConfig, bundleConstraints.layers, config, contributions, conflicts);
1198
+ }
1199
+ if (bundleConstraints.forbiddenImports && bundleConstraints.forbiddenImports.length > 0) {
1200
+ mergeForbiddenImports(
1201
+ localConfig,
1202
+ bundleConstraints.forbiddenImports,
1203
+ config,
1204
+ contributions,
1205
+ conflicts
1206
+ );
1116
1207
  }
1117
1208
  if (bundleConstraints.boundaries) {
1118
- const localBoundaries = localConfig.boundaries ?? { requireSchema: [] };
1119
- const localSchemas = new Set(localBoundaries.requireSchema ?? []);
1120
- const bundleSchemas = bundleConstraints.boundaries.requireSchema ?? [];
1121
- const newSchemas = [];
1122
- for (const schema of bundleSchemas) {
1123
- if (!localSchemas.has(schema)) {
1124
- newSchemas.push(schema);
1125
- localSchemas.add(schema);
1126
- }
1127
- }
1128
- config.boundaries = {
1129
- requireSchema: [...localBoundaries.requireSchema ?? [], ...newSchemas]
1130
- };
1131
- if (newSchemas.length > 0) {
1132
- contributions.boundaries = newSchemas;
1133
- }
1209
+ mergeBoundaries(
1210
+ localConfig,
1211
+ bundleConstraints.boundaries,
1212
+ config,
1213
+ contributions
1214
+ );
1134
1215
  }
1135
1216
  if (bundleConstraints.architecture) {
1136
- const localArch = localConfig.architecture ?? {
1137
- thresholds: {},
1138
- modules: {}
1139
- };
1140
- const mergedThresholds = { ...localArch.thresholds };
1141
- const contributedThresholdKeys = [];
1142
- const bundleThresholds = bundleConstraints.architecture.thresholds ?? {};
1143
- for (const [category, value] of Object.entries(bundleThresholds)) {
1144
- if (!(category in mergedThresholds)) {
1145
- mergedThresholds[category] = value;
1146
- contributedThresholdKeys.push(category);
1147
- } else if (!deepEqual(mergedThresholds[category], value)) {
1148
- conflicts.push({
1149
- section: "architecture.thresholds",
1150
- key: category,
1151
- localValue: mergedThresholds[category],
1152
- packageValue: value,
1153
- description: `Architecture threshold '${category}' already exists locally with a different value`
1154
- });
1155
- }
1156
- }
1157
- const mergedModules = { ...localArch.modules };
1158
- const contributedModuleKeys = [];
1159
- const bundleModules = bundleConstraints.architecture.modules ?? {};
1160
- for (const [modulePath, bundleCategoryMap] of Object.entries(bundleModules)) {
1161
- if (!(modulePath in mergedModules)) {
1162
- mergedModules[modulePath] = bundleCategoryMap;
1163
- for (const cat of Object.keys(bundleCategoryMap)) {
1164
- contributedModuleKeys.push(`${modulePath}:${cat}`);
1165
- }
1166
- } else {
1167
- const localCategoryMap = mergedModules[modulePath];
1168
- const mergedCategoryMap = { ...localCategoryMap };
1169
- for (const [category, value] of Object.entries(bundleCategoryMap)) {
1170
- if (!(category in mergedCategoryMap)) {
1171
- mergedCategoryMap[category] = value;
1172
- contributedModuleKeys.push(`${modulePath}:${category}`);
1173
- } else if (!deepEqual(mergedCategoryMap[category], value)) {
1174
- conflicts.push({
1175
- section: "architecture.modules",
1176
- key: `${modulePath}:${category}`,
1177
- localValue: mergedCategoryMap[category],
1178
- packageValue: value,
1179
- description: `Architecture module override '${modulePath}' category '${category}' already exists locally with a different value`
1180
- });
1181
- }
1182
- }
1183
- mergedModules[modulePath] = mergedCategoryMap;
1184
- }
1185
- }
1186
- config.architecture = {
1187
- ...localArch,
1188
- thresholds: mergedThresholds,
1189
- modules: mergedModules
1190
- };
1191
- if (contributedThresholdKeys.length > 0) {
1192
- contributions["architecture.thresholds"] = contributedThresholdKeys;
1193
- }
1194
- if (contributedModuleKeys.length > 0) {
1195
- contributions["architecture.modules"] = contributedModuleKeys;
1196
- }
1217
+ mergeArchitecture(
1218
+ localConfig,
1219
+ bundleConstraints.architecture,
1220
+ config,
1221
+ contributions,
1222
+ conflicts
1223
+ );
1197
1224
  }
1198
1225
  if (bundleConstraints.security?.rules) {
1199
- const localSecurity = localConfig.security ?? { rules: {} };
1200
- const localRules = localSecurity.rules ?? {};
1201
- const mergedRules = { ...localRules };
1202
- const contributedRuleIds = [];
1203
- for (const [ruleId, severity] of Object.entries(bundleConstraints.security.rules)) {
1204
- if (!(ruleId in mergedRules)) {
1205
- mergedRules[ruleId] = severity;
1206
- contributedRuleIds.push(ruleId);
1207
- } else if (mergedRules[ruleId] !== severity) {
1208
- conflicts.push({
1209
- section: "security.rules",
1210
- key: ruleId,
1211
- localValue: mergedRules[ruleId],
1212
- packageValue: severity,
1213
- description: `Security rule '${ruleId}' already exists locally with severity '${mergedRules[ruleId]}', bundle has '${severity}'`
1214
- });
1215
- }
1216
- }
1217
- config.security = { ...localSecurity, rules: mergedRules };
1218
- if (contributedRuleIds.length > 0) {
1219
- contributions["security.rules"] = contributedRuleIds;
1220
- }
1226
+ mergeSecurityRules(
1227
+ localConfig,
1228
+ bundleConstraints.security.rules,
1229
+ config,
1230
+ contributions,
1231
+ conflicts
1232
+ );
1221
1233
  }
1222
1234
  return { config, contributions, conflicts };
1223
1235
  }
@@ -1378,14 +1390,84 @@ function walk(node, visitor) {
1378
1390
  }
1379
1391
  }
1380
1392
  }
1393
+ function makeLocation(node) {
1394
+ return {
1395
+ file: "",
1396
+ line: node.loc?.start.line ?? 0,
1397
+ column: node.loc?.start.column ?? 0
1398
+ };
1399
+ }
1400
+ function processImportSpecifiers(importDecl, imp) {
1401
+ for (const spec of importDecl.specifiers) {
1402
+ if (spec.type === "ImportDefaultSpecifier") {
1403
+ imp.default = spec.local.name;
1404
+ } else if (spec.type === "ImportNamespaceSpecifier") {
1405
+ imp.namespace = spec.local.name;
1406
+ } else if (spec.type === "ImportSpecifier") {
1407
+ imp.specifiers.push(spec.local.name);
1408
+ if (spec.importKind === "type") {
1409
+ imp.kind = "type";
1410
+ }
1411
+ }
1412
+ }
1413
+ }
1414
+ function getExportedName(exported) {
1415
+ return exported.type === "Identifier" ? exported.name : String(exported.value);
1416
+ }
1417
+ function processReExportSpecifiers(exportDecl, exports) {
1418
+ for (const spec of exportDecl.specifiers) {
1419
+ if (spec.type !== "ExportSpecifier") continue;
1420
+ exports.push({
1421
+ name: getExportedName(spec.exported),
1422
+ type: "named",
1423
+ location: makeLocation(exportDecl),
1424
+ isReExport: true,
1425
+ source: exportDecl.source.value
1426
+ });
1427
+ }
1428
+ }
1429
+ function processExportDeclaration(exportDecl, exports) {
1430
+ const decl = exportDecl.declaration;
1431
+ if (!decl) return;
1432
+ if (decl.type === "VariableDeclaration") {
1433
+ for (const declarator of decl.declarations) {
1434
+ if (declarator.id.type === "Identifier") {
1435
+ exports.push({
1436
+ name: declarator.id.name,
1437
+ type: "named",
1438
+ location: makeLocation(decl),
1439
+ isReExport: false
1440
+ });
1441
+ }
1442
+ }
1443
+ } else if ((decl.type === "FunctionDeclaration" || decl.type === "ClassDeclaration") && decl.id) {
1444
+ exports.push({
1445
+ name: decl.id.name,
1446
+ type: "named",
1447
+ location: makeLocation(decl),
1448
+ isReExport: false
1449
+ });
1450
+ }
1451
+ }
1452
+ function processExportListSpecifiers(exportDecl, exports) {
1453
+ for (const spec of exportDecl.specifiers) {
1454
+ if (spec.type !== "ExportSpecifier") continue;
1455
+ exports.push({
1456
+ name: getExportedName(spec.exported),
1457
+ type: "named",
1458
+ location: makeLocation(exportDecl),
1459
+ isReExport: false
1460
+ });
1461
+ }
1462
+ }
1381
1463
  var TypeScriptParser = class {
1382
1464
  name = "typescript";
1383
1465
  extensions = [".ts", ".tsx", ".mts", ".cts"];
1384
- async parseFile(path20) {
1385
- const contentResult = await readFileContent(path20);
1466
+ async parseFile(path22) {
1467
+ const contentResult = await readFileContent(path22);
1386
1468
  if (!contentResult.ok) {
1387
1469
  return Err(
1388
- createParseError("NOT_FOUND", `File not found: ${path20}`, { path: path20 }, [
1470
+ createParseError("NOT_FOUND", `File not found: ${path22}`, { path: path22 }, [
1389
1471
  "Check that the file exists",
1390
1472
  "Verify the path is correct"
1391
1473
  ])
@@ -1395,7 +1477,7 @@ var TypeScriptParser = class {
1395
1477
  const ast = parse(contentResult.value, {
1396
1478
  loc: true,
1397
1479
  range: true,
1398
- jsx: path20.endsWith(".tsx"),
1480
+ jsx: path22.endsWith(".tsx"),
1399
1481
  errorOnUnknownASTType: false
1400
1482
  });
1401
1483
  return Ok({
@@ -1406,7 +1488,7 @@ var TypeScriptParser = class {
1406
1488
  } catch (e) {
1407
1489
  const error = e;
1408
1490
  return Err(
1409
- createParseError("SYNTAX_ERROR", `Failed to parse ${path20}: ${error.message}`, { path: path20 }, [
1491
+ createParseError("SYNTAX_ERROR", `Failed to parse ${path22}: ${error.message}`, { path: path22 }, [
1410
1492
  "Check for syntax errors in the file",
1411
1493
  "Ensure valid TypeScript syntax"
1412
1494
  ])
@@ -1422,26 +1504,12 @@ var TypeScriptParser = class {
1422
1504
  const imp = {
1423
1505
  source: importDecl.source.value,
1424
1506
  specifiers: [],
1425
- location: {
1426
- file: "",
1427
- line: importDecl.loc?.start.line ?? 0,
1428
- column: importDecl.loc?.start.column ?? 0
1429
- },
1507
+ location: makeLocation(importDecl),
1430
1508
  kind: importDecl.importKind === "type" ? "type" : "value"
1431
1509
  };
1432
- for (const spec of importDecl.specifiers) {
1433
- if (spec.type === "ImportDefaultSpecifier") {
1434
- imp.default = spec.local.name;
1435
- } else if (spec.type === "ImportNamespaceSpecifier") {
1436
- imp.namespace = spec.local.name;
1437
- } else if (spec.type === "ImportSpecifier") {
1438
- imp.specifiers.push(spec.local.name);
1439
- if (spec.importKind === "type") {
1440
- imp.kind = "type";
1441
- }
1442
- }
1443
- }
1510
+ processImportSpecifiers(importDecl, imp);
1444
1511
  imports.push(imp);
1512
+ return;
1445
1513
  }
1446
1514
  if (node.type === "ImportExpression") {
1447
1515
  const importExpr = node;
@@ -1449,11 +1517,7 @@ var TypeScriptParser = class {
1449
1517
  imports.push({
1450
1518
  source: importExpr.source.value,
1451
1519
  specifiers: [],
1452
- location: {
1453
- file: "",
1454
- line: importExpr.loc?.start.line ?? 0,
1455
- column: importExpr.loc?.start.column ?? 0
1456
- },
1520
+ location: makeLocation(importExpr),
1457
1521
  kind: "value"
1458
1522
  });
1459
1523
  }
@@ -1468,97 +1532,29 @@ var TypeScriptParser = class {
1468
1532
  if (node.type === "ExportNamedDeclaration") {
1469
1533
  const exportDecl = node;
1470
1534
  if (exportDecl.source) {
1471
- for (const spec of exportDecl.specifiers) {
1472
- if (spec.type === "ExportSpecifier") {
1473
- const exported = spec.exported;
1474
- const name = exported.type === "Identifier" ? exported.name : String(exported.value);
1475
- exports.push({
1476
- name,
1477
- type: "named",
1478
- location: {
1479
- file: "",
1480
- line: exportDecl.loc?.start.line ?? 0,
1481
- column: exportDecl.loc?.start.column ?? 0
1482
- },
1483
- isReExport: true,
1484
- source: exportDecl.source.value
1485
- });
1486
- }
1487
- }
1535
+ processReExportSpecifiers(exportDecl, exports);
1488
1536
  return;
1489
1537
  }
1490
- if (exportDecl.declaration) {
1491
- const decl = exportDecl.declaration;
1492
- if (decl.type === "VariableDeclaration") {
1493
- for (const declarator of decl.declarations) {
1494
- if (declarator.id.type === "Identifier") {
1495
- exports.push({
1496
- name: declarator.id.name,
1497
- type: "named",
1498
- location: {
1499
- file: "",
1500
- line: decl.loc?.start.line ?? 0,
1501
- column: decl.loc?.start.column ?? 0
1502
- },
1503
- isReExport: false
1504
- });
1505
- }
1506
- }
1507
- } else if (decl.type === "FunctionDeclaration" || decl.type === "ClassDeclaration") {
1508
- if (decl.id) {
1509
- exports.push({
1510
- name: decl.id.name,
1511
- type: "named",
1512
- location: {
1513
- file: "",
1514
- line: decl.loc?.start.line ?? 0,
1515
- column: decl.loc?.start.column ?? 0
1516
- },
1517
- isReExport: false
1518
- });
1519
- }
1520
- }
1521
- }
1522
- for (const spec of exportDecl.specifiers) {
1523
- if (spec.type === "ExportSpecifier") {
1524
- const exported = spec.exported;
1525
- const name = exported.type === "Identifier" ? exported.name : String(exported.value);
1526
- exports.push({
1527
- name,
1528
- type: "named",
1529
- location: {
1530
- file: "",
1531
- line: exportDecl.loc?.start.line ?? 0,
1532
- column: exportDecl.loc?.start.column ?? 0
1533
- },
1534
- isReExport: false
1535
- });
1536
- }
1537
- }
1538
+ processExportDeclaration(exportDecl, exports);
1539
+ processExportListSpecifiers(exportDecl, exports);
1540
+ return;
1538
1541
  }
1539
1542
  if (node.type === "ExportDefaultDeclaration") {
1540
1543
  const exportDecl = node;
1541
1544
  exports.push({
1542
1545
  name: "default",
1543
1546
  type: "default",
1544
- location: {
1545
- file: "",
1546
- line: exportDecl.loc?.start.line ?? 0,
1547
- column: exportDecl.loc?.start.column ?? 0
1548
- },
1547
+ location: makeLocation(exportDecl),
1549
1548
  isReExport: false
1550
1549
  });
1550
+ return;
1551
1551
  }
1552
1552
  if (node.type === "ExportAllDeclaration") {
1553
1553
  const exportDecl = node;
1554
1554
  exports.push({
1555
1555
  name: exportDecl.exported?.name ?? "*",
1556
1556
  type: "namespace",
1557
- location: {
1558
- file: "",
1559
- line: exportDecl.loc?.start.line ?? 0,
1560
- column: exportDecl.loc?.start.column ?? 0
1561
- },
1557
+ location: makeLocation(exportDecl),
1562
1558
  isReExport: true,
1563
1559
  source: exportDecl.source.value
1564
1560
  });
@@ -1572,12 +1568,29 @@ var TypeScriptParser = class {
1572
1568
  };
1573
1569
 
1574
1570
  // src/entropy/snapshot.ts
1575
- import { join as join3, resolve, relative as relative4 } from "path";
1571
+ import { join as join3, resolve } from "path";
1576
1572
  import { minimatch as minimatch2 } from "minimatch";
1573
+ function collectFieldEntries(rootDir, field) {
1574
+ if (typeof field === "string") return [resolve(rootDir, field)];
1575
+ if (typeof field === "object" && field !== null) {
1576
+ return Object.values(field).filter((v) => typeof v === "string").map((v) => resolve(rootDir, v));
1577
+ }
1578
+ return [];
1579
+ }
1580
+ function extractPackageEntries(rootDir, pkg) {
1581
+ const entries = [];
1582
+ entries.push(...collectFieldEntries(rootDir, pkg["exports"]));
1583
+ if (entries.length === 0 && typeof pkg["main"] === "string") {
1584
+ entries.push(resolve(rootDir, pkg["main"]));
1585
+ }
1586
+ if (pkg["bin"]) {
1587
+ entries.push(...collectFieldEntries(rootDir, pkg["bin"]));
1588
+ }
1589
+ return entries;
1590
+ }
1577
1591
  async function resolveEntryPoints(rootDir, explicitEntries) {
1578
1592
  if (explicitEntries && explicitEntries.length > 0) {
1579
- const resolved = explicitEntries.map((e) => resolve(rootDir, e));
1580
- return Ok(resolved);
1593
+ return Ok(explicitEntries.map((e) => resolve(rootDir, e)));
1581
1594
  }
1582
1595
  const pkgPath = join3(rootDir, "package.json");
1583
1596
  if (await fileExists(pkgPath)) {
@@ -1585,38 +1598,8 @@ async function resolveEntryPoints(rootDir, explicitEntries) {
1585
1598
  if (pkgContent.ok) {
1586
1599
  try {
1587
1600
  const pkg = JSON.parse(pkgContent.value);
1588
- const entries = [];
1589
- if (pkg["exports"]) {
1590
- const exports = pkg["exports"];
1591
- if (typeof exports === "string") {
1592
- entries.push(resolve(rootDir, exports));
1593
- } else if (typeof exports === "object" && exports !== null) {
1594
- for (const value of Object.values(exports)) {
1595
- if (typeof value === "string") {
1596
- entries.push(resolve(rootDir, value));
1597
- }
1598
- }
1599
- }
1600
- }
1601
- const main = pkg["main"];
1602
- if (typeof main === "string" && entries.length === 0) {
1603
- entries.push(resolve(rootDir, main));
1604
- }
1605
- const bin = pkg["bin"];
1606
- if (bin) {
1607
- if (typeof bin === "string") {
1608
- entries.push(resolve(rootDir, bin));
1609
- } else if (typeof bin === "object") {
1610
- for (const value of Object.values(bin)) {
1611
- if (typeof value === "string") {
1612
- entries.push(resolve(rootDir, value));
1613
- }
1614
- }
1615
- }
1616
- }
1617
- if (entries.length > 0) {
1618
- return Ok(entries);
1619
- }
1601
+ const entries = extractPackageEntries(rootDir, pkg);
1602
+ if (entries.length > 0) return Ok(entries);
1620
1603
  } catch {
1621
1604
  }
1622
1605
  }
@@ -1690,66 +1673,49 @@ function extractInlineRefs(content) {
1690
1673
  }
1691
1674
  return refs;
1692
1675
  }
1693
- async function parseDocumentationFile(path20) {
1694
- const contentResult = await readFileContent(path20);
1676
+ async function parseDocumentationFile(path22) {
1677
+ const contentResult = await readFileContent(path22);
1695
1678
  if (!contentResult.ok) {
1696
1679
  return Err(
1697
1680
  createEntropyError(
1698
1681
  "PARSE_ERROR",
1699
- `Failed to read documentation file: ${path20}`,
1700
- { file: path20 },
1682
+ `Failed to read documentation file: ${path22}`,
1683
+ { file: path22 },
1701
1684
  ["Check that the file exists"]
1702
1685
  )
1703
1686
  );
1704
1687
  }
1705
1688
  const content = contentResult.value;
1706
- const type = path20.endsWith(".md") ? "markdown" : "text";
1689
+ const type = path22.endsWith(".md") ? "markdown" : "text";
1707
1690
  return Ok({
1708
- path: path20,
1691
+ path: path22,
1709
1692
  type,
1710
1693
  content,
1711
1694
  codeBlocks: extractCodeBlocks(content),
1712
1695
  inlineRefs: extractInlineRefs(content)
1713
1696
  });
1714
1697
  }
1698
+ function makeInternalSymbol(name, type, line) {
1699
+ return { name, type, line, references: 0, calledBy: [] };
1700
+ }
1701
+ function extractSymbolsFromNode(node) {
1702
+ const line = node.loc?.start?.line || 0;
1703
+ if (node.type === "FunctionDeclaration" && node.id?.name) {
1704
+ return [makeInternalSymbol(node.id.name, "function", line)];
1705
+ }
1706
+ if (node.type === "VariableDeclaration") {
1707
+ return (node.declarations || []).filter((decl) => decl.id?.name).map((decl) => makeInternalSymbol(decl.id.name, "variable", line));
1708
+ }
1709
+ if (node.type === "ClassDeclaration" && node.id?.name) {
1710
+ return [makeInternalSymbol(node.id.name, "class", line)];
1711
+ }
1712
+ return [];
1713
+ }
1715
1714
  function extractInternalSymbols(ast) {
1716
- const symbols = [];
1717
1715
  const body = ast.body;
1718
- if (!body?.body) return symbols;
1719
- for (const node of body.body) {
1720
- if (node.type === "FunctionDeclaration" && node.id?.name) {
1721
- symbols.push({
1722
- name: node.id.name,
1723
- type: "function",
1724
- line: node.loc?.start?.line || 0,
1725
- references: 0,
1726
- calledBy: []
1727
- });
1728
- }
1729
- if (node.type === "VariableDeclaration") {
1730
- for (const decl of node.declarations || []) {
1731
- if (decl.id?.name) {
1732
- symbols.push({
1733
- name: decl.id.name,
1734
- type: "variable",
1735
- line: node.loc?.start?.line || 0,
1736
- references: 0,
1737
- calledBy: []
1738
- });
1739
- }
1740
- }
1741
- }
1742
- if (node.type === "ClassDeclaration" && node.id?.name) {
1743
- symbols.push({
1744
- name: node.id.name,
1745
- type: "class",
1746
- line: node.loc?.start?.line || 0,
1747
- references: 0,
1748
- calledBy: []
1749
- });
1750
- }
1751
- }
1752
- return symbols;
1716
+ if (!body?.body) return [];
1717
+ const nodes = body.body;
1718
+ return nodes.flatMap(extractSymbolsFromNode);
1753
1719
  }
1754
1720
  function extractJSDocComments(ast) {
1755
1721
  const comments = [];
@@ -1836,7 +1802,7 @@ async function buildSnapshot(config) {
1836
1802
  sourceFilePaths.push(...files2);
1837
1803
  }
1838
1804
  sourceFilePaths = sourceFilePaths.filter((f) => {
1839
- const rel = relative4(rootDir, f);
1805
+ const rel = relativePosix(rootDir, f);
1840
1806
  return !excludePatterns.some((p) => minimatch2(rel, p));
1841
1807
  });
1842
1808
  const files = [];
@@ -1890,27 +1856,34 @@ async function buildSnapshot(config) {
1890
1856
 
1891
1857
  // src/entropy/detectors/drift.ts
1892
1858
  import { dirname as dirname3, resolve as resolve2 } from "path";
1893
- function levenshteinDistance(a, b) {
1859
+ function initLevenshteinMatrix(aLen, bLen) {
1894
1860
  const matrix = [];
1895
- for (let i = 0; i <= b.length; i++) {
1861
+ for (let i = 0; i <= bLen; i++) {
1896
1862
  matrix[i] = [i];
1897
1863
  }
1898
- for (let j = 0; j <= a.length; j++) {
1899
- const row = matrix[0];
1900
- if (row) {
1901
- row[j] = j;
1864
+ const firstRow = matrix[0];
1865
+ if (firstRow) {
1866
+ for (let j = 0; j <= aLen; j++) {
1867
+ firstRow[j] = j;
1902
1868
  }
1903
1869
  }
1870
+ return matrix;
1871
+ }
1872
+ function computeLevenshteinCell(row, prevRow, j, charsMatch) {
1873
+ if (charsMatch) {
1874
+ row[j] = prevRow[j - 1] ?? 0;
1875
+ } else {
1876
+ row[j] = Math.min((prevRow[j - 1] ?? 0) + 1, (row[j - 1] ?? 0) + 1, (prevRow[j] ?? 0) + 1);
1877
+ }
1878
+ }
1879
+ function levenshteinDistance(a, b) {
1880
+ const matrix = initLevenshteinMatrix(a.length, b.length);
1904
1881
  for (let i = 1; i <= b.length; i++) {
1905
1882
  for (let j = 1; j <= a.length; j++) {
1906
1883
  const row = matrix[i];
1907
1884
  const prevRow = matrix[i - 1];
1908
1885
  if (!row || !prevRow) continue;
1909
- if (b.charAt(i - 1) === a.charAt(j - 1)) {
1910
- row[j] = prevRow[j - 1] ?? 0;
1911
- } else {
1912
- row[j] = Math.min((prevRow[j - 1] ?? 0) + 1, (row[j - 1] ?? 0) + 1, (prevRow[j] ?? 0) + 1);
1913
- }
1886
+ computeLevenshteinCell(row, prevRow, j, b.charAt(i - 1) === a.charAt(j - 1));
1914
1887
  }
1915
1888
  }
1916
1889
  const lastRow = matrix[b.length];
@@ -2196,32 +2169,27 @@ function findDeadExports(snapshot, usageMap, reachability) {
2196
2169
  }
2197
2170
  return deadExports;
2198
2171
  }
2199
- function countLinesFromAST(ast) {
2200
- if (ast.body && Array.isArray(ast.body)) {
2201
- let maxLine = 0;
2202
- const traverse = (node) => {
2203
- if (node && typeof node === "object") {
2204
- const n = node;
2205
- if (n.loc?.end?.line && n.loc.end.line > maxLine) {
2206
- maxLine = n.loc.end.line;
2207
- }
2208
- for (const key of Object.keys(node)) {
2209
- const value = node[key];
2210
- if (Array.isArray(value)) {
2211
- for (const item of value) {
2212
- traverse(item);
2213
- }
2214
- } else if (value && typeof value === "object") {
2215
- traverse(value);
2216
- }
2217
- }
2172
+ function findMaxLineInNode(node) {
2173
+ if (!node || typeof node !== "object") return 0;
2174
+ const n = node;
2175
+ let maxLine = n.loc?.end?.line ?? 0;
2176
+ for (const key of Object.keys(node)) {
2177
+ const value = node[key];
2178
+ if (Array.isArray(value)) {
2179
+ for (const item of value) {
2180
+ maxLine = Math.max(maxLine, findMaxLineInNode(item));
2218
2181
  }
2219
- };
2220
- traverse(ast);
2221
- if (maxLine > 0) return maxLine;
2222
- return Math.max(ast.body.length * 3, 1);
2182
+ } else if (value && typeof value === "object") {
2183
+ maxLine = Math.max(maxLine, findMaxLineInNode(value));
2184
+ }
2223
2185
  }
2224
- return 1;
2186
+ return maxLine;
2187
+ }
2188
+ function countLinesFromAST(ast) {
2189
+ if (!ast.body || !Array.isArray(ast.body)) return 1;
2190
+ const maxLine = findMaxLineInNode(ast);
2191
+ if (maxLine > 0) return maxLine;
2192
+ return Math.max(ast.body.length * 3, 1);
2225
2193
  }
2226
2194
  function findDeadFiles(snapshot, reachability) {
2227
2195
  const deadFiles = [];
@@ -2368,135 +2336,150 @@ async function detectDeadCode(snapshot, graphDeadCodeData) {
2368
2336
 
2369
2337
  // src/entropy/detectors/patterns.ts
2370
2338
  import { minimatch as minimatch3 } from "minimatch";
2371
- import { relative as relative5 } from "path";
2372
2339
  function fileMatchesPattern(filePath, pattern, rootDir) {
2373
- const relativePath = relative5(rootDir, filePath);
2340
+ const relativePath = relativePosix(rootDir, filePath);
2374
2341
  return minimatch3(relativePath, pattern);
2375
2342
  }
2376
- function checkConfigPattern(pattern, file, rootDir) {
2343
+ var CONVENTION_DESCRIPTIONS = {
2344
+ camelCase: "camelCase (e.g., myFunction)",
2345
+ PascalCase: "PascalCase (e.g., MyClass)",
2346
+ UPPER_SNAKE: "UPPER_SNAKE_CASE (e.g., MY_CONSTANT)",
2347
+ "kebab-case": "kebab-case (e.g., my-component)"
2348
+ };
2349
+ function checkMustExport(rule, file, message) {
2350
+ if (rule.type !== "must-export") return [];
2377
2351
  const matches = [];
2378
- const fileMatches = pattern.files.some((glob) => fileMatchesPattern(file.path, glob, rootDir));
2379
- if (!fileMatches) {
2380
- return matches;
2381
- }
2382
- const rule = pattern.rule;
2383
- switch (rule.type) {
2384
- case "must-export": {
2385
- for (const name of rule.names) {
2386
- const hasExport = file.exports.some((e) => e.name === name);
2387
- if (!hasExport) {
2388
- matches.push({
2389
- line: 1,
2390
- message: pattern.message || `Missing required export: "${name}"`,
2391
- suggestion: `Add export for "${name}"`
2392
- });
2393
- }
2394
- }
2395
- break;
2396
- }
2397
- case "must-export-default": {
2398
- const hasDefault = file.exports.some((e) => e.type === "default");
2399
- if (!hasDefault) {
2400
- matches.push({
2401
- line: 1,
2402
- message: pattern.message || "File must have a default export",
2403
- suggestion: "Add a default export"
2404
- });
2405
- }
2406
- break;
2407
- }
2408
- case "no-export": {
2409
- for (const name of rule.names) {
2410
- const exp = file.exports.find((e) => e.name === name);
2411
- if (exp) {
2412
- matches.push({
2413
- line: exp.location.line,
2414
- message: pattern.message || `Forbidden export: "${name}"`,
2415
- suggestion: `Remove export "${name}"`
2416
- });
2417
- }
2418
- }
2419
- break;
2352
+ for (const name of rule.names) {
2353
+ if (!file.exports.some((e) => e.name === name)) {
2354
+ matches.push({
2355
+ line: 1,
2356
+ message: message || `Missing required export: "${name}"`,
2357
+ suggestion: `Add export for "${name}"`
2358
+ });
2420
2359
  }
2421
- case "must-import": {
2422
- const hasImport = file.imports.some(
2423
- (i) => i.source === rule.from || i.source.endsWith(rule.from)
2424
- );
2425
- if (!hasImport) {
2426
- matches.push({
2427
- line: 1,
2428
- message: pattern.message || `Missing required import from "${rule.from}"`,
2429
- suggestion: `Add import from "${rule.from}"`
2430
- });
2360
+ }
2361
+ return matches;
2362
+ }
2363
+ function checkMustExportDefault(_rule, file, message) {
2364
+ if (!file.exports.some((e) => e.type === "default")) {
2365
+ return [
2366
+ {
2367
+ line: 1,
2368
+ message: message || "File must have a default export",
2369
+ suggestion: "Add a default export"
2431
2370
  }
2432
- break;
2371
+ ];
2372
+ }
2373
+ return [];
2374
+ }
2375
+ function checkNoExport(rule, file, message) {
2376
+ if (rule.type !== "no-export") return [];
2377
+ const matches = [];
2378
+ for (const name of rule.names) {
2379
+ const exp = file.exports.find((e) => e.name === name);
2380
+ if (exp) {
2381
+ matches.push({
2382
+ line: exp.location.line,
2383
+ message: message || `Forbidden export: "${name}"`,
2384
+ suggestion: `Remove export "${name}"`
2385
+ });
2433
2386
  }
2434
- case "no-import": {
2435
- const forbiddenImport = file.imports.find(
2436
- (i) => i.source === rule.from || i.source.endsWith(rule.from)
2437
- );
2438
- if (forbiddenImport) {
2439
- matches.push({
2440
- line: forbiddenImport.location.line,
2441
- message: pattern.message || `Forbidden import from "${rule.from}"`,
2442
- suggestion: `Remove import from "${rule.from}"`
2443
- });
2387
+ }
2388
+ return matches;
2389
+ }
2390
+ function checkMustImport(rule, file, message) {
2391
+ if (rule.type !== "must-import") return [];
2392
+ const hasImport = file.imports.some(
2393
+ (i) => i.source === rule.from || i.source.endsWith(rule.from)
2394
+ );
2395
+ if (!hasImport) {
2396
+ return [
2397
+ {
2398
+ line: 1,
2399
+ message: message || `Missing required import from "${rule.from}"`,
2400
+ suggestion: `Add import from "${rule.from}"`
2444
2401
  }
2445
- break;
2446
- }
2447
- case "naming": {
2448
- const regex = new RegExp(rule.match);
2449
- for (const exp of file.exports) {
2450
- if (!regex.test(exp.name)) {
2451
- let expected = "";
2452
- switch (rule.convention) {
2453
- case "camelCase":
2454
- expected = "camelCase (e.g., myFunction)";
2455
- break;
2456
- case "PascalCase":
2457
- expected = "PascalCase (e.g., MyClass)";
2458
- break;
2459
- case "UPPER_SNAKE":
2460
- expected = "UPPER_SNAKE_CASE (e.g., MY_CONSTANT)";
2461
- break;
2462
- case "kebab-case":
2463
- expected = "kebab-case (e.g., my-component)";
2464
- break;
2465
- }
2466
- matches.push({
2467
- line: exp.location.line,
2468
- message: pattern.message || `"${exp.name}" does not follow ${rule.convention} convention`,
2469
- suggestion: `Rename to follow ${expected}`
2470
- });
2471
- }
2402
+ ];
2403
+ }
2404
+ return [];
2405
+ }
2406
+ function checkNoImport(rule, file, message) {
2407
+ if (rule.type !== "no-import") return [];
2408
+ const forbiddenImport = file.imports.find(
2409
+ (i) => i.source === rule.from || i.source.endsWith(rule.from)
2410
+ );
2411
+ if (forbiddenImport) {
2412
+ return [
2413
+ {
2414
+ line: forbiddenImport.location.line,
2415
+ message: message || `Forbidden import from "${rule.from}"`,
2416
+ suggestion: `Remove import from "${rule.from}"`
2472
2417
  }
2473
- break;
2418
+ ];
2419
+ }
2420
+ return [];
2421
+ }
2422
+ function checkNaming(rule, file, message) {
2423
+ if (rule.type !== "naming") return [];
2424
+ const regex = new RegExp(rule.match);
2425
+ const matches = [];
2426
+ for (const exp of file.exports) {
2427
+ if (!regex.test(exp.name)) {
2428
+ const expected = CONVENTION_DESCRIPTIONS[rule.convention] ?? rule.convention;
2429
+ matches.push({
2430
+ line: exp.location.line,
2431
+ message: message || `"${exp.name}" does not follow ${rule.convention} convention`,
2432
+ suggestion: `Rename to follow ${expected}`
2433
+ });
2474
2434
  }
2475
- case "max-exports": {
2476
- if (file.exports.length > rule.count) {
2477
- matches.push({
2478
- line: 1,
2479
- message: pattern.message || `File has ${file.exports.length} exports, max is ${rule.count}`,
2480
- suggestion: `Split into multiple files or reduce exports to ${rule.count}`
2481
- });
2435
+ }
2436
+ return matches;
2437
+ }
2438
+ function checkMaxExports(rule, file, message) {
2439
+ if (rule.type !== "max-exports") return [];
2440
+ if (file.exports.length > rule.count) {
2441
+ return [
2442
+ {
2443
+ line: 1,
2444
+ message: message || `File has ${file.exports.length} exports, max is ${rule.count}`,
2445
+ suggestion: `Split into multiple files or reduce exports to ${rule.count}`
2482
2446
  }
2483
- break;
2484
- }
2485
- case "max-lines": {
2486
- break;
2487
- }
2488
- case "require-jsdoc": {
2489
- if (file.jsDocComments.length === 0 && file.exports.length > 0) {
2490
- matches.push({
2491
- line: 1,
2492
- message: pattern.message || "Exported symbols require JSDoc documentation",
2493
- suggestion: "Add JSDoc comments to exports"
2494
- });
2447
+ ];
2448
+ }
2449
+ return [];
2450
+ }
2451
+ function checkMaxLines(_rule, _file, _message) {
2452
+ return [];
2453
+ }
2454
+ function checkRequireJsdoc(_rule, file, message) {
2455
+ if (file.jsDocComments.length === 0 && file.exports.length > 0) {
2456
+ return [
2457
+ {
2458
+ line: 1,
2459
+ message: message || "Exported symbols require JSDoc documentation",
2460
+ suggestion: "Add JSDoc comments to exports"
2495
2461
  }
2496
- break;
2497
- }
2462
+ ];
2498
2463
  }
2499
- return matches;
2464
+ return [];
2465
+ }
2466
+ var RULE_CHECKERS = {
2467
+ "must-export": checkMustExport,
2468
+ "must-export-default": checkMustExportDefault,
2469
+ "no-export": checkNoExport,
2470
+ "must-import": checkMustImport,
2471
+ "no-import": checkNoImport,
2472
+ naming: checkNaming,
2473
+ "max-exports": checkMaxExports,
2474
+ "max-lines": checkMaxLines,
2475
+ "require-jsdoc": checkRequireJsdoc
2476
+ };
2477
+ function checkConfigPattern(pattern, file, rootDir) {
2478
+ const fileMatches = pattern.files.some((glob) => fileMatchesPattern(file.path, glob, rootDir));
2479
+ if (!fileMatches) return [];
2480
+ const checker = RULE_CHECKERS[pattern.rule.type];
2481
+ if (!checker) return [];
2482
+ return checker(pattern.rule, file, pattern.message);
2500
2483
  }
2501
2484
  async function detectPatternViolations(snapshot, config) {
2502
2485
  const violations = [];
@@ -3015,19 +2998,37 @@ function createUnusedImportFixes(deadCodeReport) {
3015
2998
  reversible: true
3016
2999
  }));
3017
3000
  }
3018
- function createDeadExportFixes(deadCodeReport) {
3019
- return deadCodeReport.deadExports.filter((exp) => exp.reason === "NO_IMPORTERS").map((exp) => ({
3020
- type: "dead-exports",
3021
- file: exp.file,
3022
- description: `Remove export keyword from ${exp.name} (${exp.reason})`,
3023
- action: "replace",
3024
- oldContent: exp.isDefault ? `export default ${exp.type === "class" ? "class" : exp.type === "function" ? "function" : ""} ${exp.name}` : `export ${exp.type === "class" ? "class" : exp.type === "function" ? "function" : exp.type === "variable" ? "const" : exp.type === "type" ? "type" : exp.type === "interface" ? "interface" : "enum"} ${exp.name}`,
3025
- newContent: exp.isDefault ? `${exp.type === "class" ? "class" : exp.type === "function" ? "function" : ""} ${exp.name}` : `${exp.type === "class" ? "class" : exp.type === "function" ? "function" : exp.type === "variable" ? "const" : exp.type === "type" ? "type" : exp.type === "interface" ? "interface" : "enum"} ${exp.name}`,
3026
- safe: true,
3027
- reversible: true
3028
- }));
3001
+ var EXPORT_TYPE_KEYWORD = {
3002
+ class: "class",
3003
+ function: "function",
3004
+ variable: "const",
3005
+ type: "type",
3006
+ interface: "interface",
3007
+ enum: "enum"
3008
+ };
3009
+ function getExportKeyword(exportType) {
3010
+ return EXPORT_TYPE_KEYWORD[exportType] ?? "enum";
3029
3011
  }
3030
- function createCommentedCodeFixes(blocks) {
3012
+ function getDefaultExportKeyword(exportType) {
3013
+ if (exportType === "class" || exportType === "function") return exportType;
3014
+ return "";
3015
+ }
3016
+ function createDeadExportFixes(deadCodeReport) {
3017
+ return deadCodeReport.deadExports.filter((exp) => exp.reason === "NO_IMPORTERS").map((exp) => {
3018
+ const keyword = exp.isDefault ? getDefaultExportKeyword(exp.type) : getExportKeyword(exp.type);
3019
+ return {
3020
+ type: "dead-exports",
3021
+ file: exp.file,
3022
+ description: `Remove export keyword from ${exp.name} (${exp.reason})`,
3023
+ action: "replace",
3024
+ oldContent: exp.isDefault ? `export default ${keyword} ${exp.name}` : `export ${keyword} ${exp.name}`,
3025
+ newContent: `${keyword} ${exp.name}`,
3026
+ safe: true,
3027
+ reversible: true
3028
+ };
3029
+ });
3030
+ }
3031
+ function createCommentedCodeFixes(blocks) {
3031
3032
  return blocks.map((block) => ({
3032
3033
  type: "commented-code",
3033
3034
  file: block.file,
@@ -3204,53 +3205,80 @@ var ALWAYS_UNSAFE_TYPES = /* @__PURE__ */ new Set([
3204
3205
  "dead-internal"
3205
3206
  ]);
3206
3207
  var idCounter = 0;
3208
+ var DEAD_CODE_FIX_ACTIONS = {
3209
+ "dead-export": "Remove export keyword",
3210
+ "dead-file": "Delete file",
3211
+ "commented-code": "Delete commented block",
3212
+ "unused-import": "Remove import"
3213
+ };
3214
+ function classifyDeadCode(input) {
3215
+ if (input.isPublicApi) {
3216
+ return {
3217
+ safety: "unsafe",
3218
+ safetyReason: "Public API export may have external consumers",
3219
+ suggestion: "Deprecate before removing"
3220
+ };
3221
+ }
3222
+ const fixAction = DEAD_CODE_FIX_ACTIONS[input.type];
3223
+ if (fixAction) {
3224
+ return {
3225
+ safety: "safe",
3226
+ safetyReason: "zero importers, non-public",
3227
+ fixAction,
3228
+ suggestion: fixAction
3229
+ };
3230
+ }
3231
+ if (input.type === "orphaned-dep") {
3232
+ return {
3233
+ safety: "probably-safe",
3234
+ safetyReason: "No imports found, but needs install+test verification",
3235
+ fixAction: "Remove from package.json",
3236
+ suggestion: "Remove from package.json"
3237
+ };
3238
+ }
3239
+ return {
3240
+ safety: "unsafe",
3241
+ safetyReason: "Unknown dead code type",
3242
+ suggestion: "Manual review required"
3243
+ };
3244
+ }
3245
+ function classifyArchitecture(input) {
3246
+ if (input.type === "import-ordering") {
3247
+ return {
3248
+ safety: "safe",
3249
+ safetyReason: "Mechanical reorder, no semantic change",
3250
+ fixAction: "Reorder imports",
3251
+ suggestion: "Reorder imports"
3252
+ };
3253
+ }
3254
+ if (input.type === "forbidden-import" && input.hasAlternative) {
3255
+ return {
3256
+ safety: "probably-safe",
3257
+ safetyReason: "Alternative configured, needs typecheck+test",
3258
+ fixAction: "Replace with configured alternative",
3259
+ suggestion: "Replace with configured alternative"
3260
+ };
3261
+ }
3262
+ return {
3263
+ safety: "unsafe",
3264
+ safetyReason: `${input.type} requires structural changes`,
3265
+ suggestion: "Restructure code to fix violation"
3266
+ };
3267
+ }
3207
3268
  function classifyFinding(input) {
3208
3269
  idCounter++;
3209
3270
  const id = `${input.concern === "dead-code" ? "dc" : "arch"}-${idCounter}`;
3210
- let safety;
3211
- let safetyReason;
3212
- let fixAction;
3213
- let suggestion;
3271
+ let classification;
3214
3272
  if (ALWAYS_UNSAFE_TYPES.has(input.type)) {
3215
- safety = "unsafe";
3216
- safetyReason = `${input.type} requires human judgment`;
3217
- suggestion = "Review and refactor manually";
3273
+ classification = {
3274
+ safety: "unsafe",
3275
+ safetyReason: `${input.type} requires human judgment`,
3276
+ suggestion: "Review and refactor manually"
3277
+ };
3218
3278
  } else if (input.concern === "dead-code") {
3219
- if (input.isPublicApi) {
3220
- safety = "unsafe";
3221
- safetyReason = "Public API export may have external consumers";
3222
- suggestion = "Deprecate before removing";
3223
- } else if (input.type === "dead-export" || input.type === "unused-import" || input.type === "commented-code" || input.type === "dead-file") {
3224
- safety = "safe";
3225
- safetyReason = "zero importers, non-public";
3226
- fixAction = input.type === "dead-export" ? "Remove export keyword" : input.type === "dead-file" ? "Delete file" : input.type === "commented-code" ? "Delete commented block" : "Remove import";
3227
- suggestion = fixAction;
3228
- } else if (input.type === "orphaned-dep") {
3229
- safety = "probably-safe";
3230
- safetyReason = "No imports found, but needs install+test verification";
3231
- fixAction = "Remove from package.json";
3232
- suggestion = fixAction;
3233
- } else {
3234
- safety = "unsafe";
3235
- safetyReason = "Unknown dead code type";
3236
- suggestion = "Manual review required";
3237
- }
3279
+ classification = classifyDeadCode(input);
3238
3280
  } else {
3239
- if (input.type === "import-ordering") {
3240
- safety = "safe";
3241
- safetyReason = "Mechanical reorder, no semantic change";
3242
- fixAction = "Reorder imports";
3243
- suggestion = fixAction;
3244
- } else if (input.type === "forbidden-import" && input.hasAlternative) {
3245
- safety = "probably-safe";
3246
- safetyReason = "Alternative configured, needs typecheck+test";
3247
- fixAction = "Replace with configured alternative";
3248
- suggestion = fixAction;
3249
- } else {
3250
- safety = "unsafe";
3251
- safetyReason = `${input.type} requires structural changes`;
3252
- suggestion = "Restructure code to fix violation";
3253
- }
3281
+ classification = classifyArchitecture(input);
3254
3282
  }
3255
3283
  return {
3256
3284
  id,
@@ -3259,11 +3287,11 @@ function classifyFinding(input) {
3259
3287
  ...input.line !== void 0 ? { line: input.line } : {},
3260
3288
  type: input.type,
3261
3289
  description: input.description,
3262
- safety,
3263
- safetyReason,
3290
+ safety: classification.safety,
3291
+ safetyReason: classification.safetyReason,
3264
3292
  hotspotDowngraded: false,
3265
- ...fixAction !== void 0 ? { fixAction } : {},
3266
- suggestion
3293
+ ...classification.fixAction !== void 0 ? { fixAction: classification.fixAction } : {},
3294
+ suggestion: classification.suggestion
3267
3295
  };
3268
3296
  }
3269
3297
  function applyHotspotDowngrade(finding, hotspot) {
@@ -3557,43 +3585,57 @@ var BenchmarkRunner = class {
3557
3585
  };
3558
3586
  }
3559
3587
  }
3588
+ /**
3589
+ * Extract a BenchmarkResult from a single assertion with benchmark data.
3590
+ */
3591
+ parseBenchAssertion(assertion, file) {
3592
+ if (!assertion.benchmark) return null;
3593
+ const bench = assertion.benchmark;
3594
+ return {
3595
+ name: assertion.fullName || assertion.title || "unknown",
3596
+ file: file.replace(process.cwd() + "/", ""),
3597
+ opsPerSec: Math.round(bench.hz || 0),
3598
+ meanMs: bench.mean ? bench.mean * 1e3 : 0,
3599
+ p99Ms: bench.p99 ? bench.p99 * 1e3 : bench.mean ? bench.mean * 1e3 * 1.5 : 0,
3600
+ marginOfError: bench.rme ? bench.rme / 100 : 0.05
3601
+ };
3602
+ }
3603
+ /**
3604
+ * Extract JSON from output that may contain non-JSON preamble.
3605
+ */
3606
+ extractJson(output) {
3607
+ const jsonStart = output.indexOf("{");
3608
+ const jsonEnd = output.lastIndexOf("}");
3609
+ if (jsonStart === -1 || jsonEnd === -1) return null;
3610
+ return JSON.parse(output.slice(jsonStart, jsonEnd + 1));
3611
+ }
3560
3612
  /**
3561
3613
  * Parse vitest bench JSON reporter output into BenchmarkResult[].
3562
3614
  * Vitest bench JSON output contains testResults with benchmark data.
3563
3615
  */
3564
- parseVitestBenchOutput(output) {
3616
+ collectAssertionResults(testResults) {
3565
3617
  const results = [];
3566
- try {
3567
- const jsonStart = output.indexOf("{");
3568
- const jsonEnd = output.lastIndexOf("}");
3569
- if (jsonStart === -1 || jsonEnd === -1) return results;
3570
- const jsonStr = output.slice(jsonStart, jsonEnd + 1);
3571
- const parsed = JSON.parse(jsonStr);
3572
- if (parsed.testResults) {
3573
- for (const testResult of parsed.testResults) {
3574
- const file = testResult.name || testResult.filepath || "";
3575
- if (testResult.assertionResults) {
3576
- for (const assertion of testResult.assertionResults) {
3577
- if (assertion.benchmark) {
3578
- const bench = assertion.benchmark;
3579
- results.push({
3580
- name: assertion.fullName || assertion.title || "unknown",
3581
- file: file.replace(process.cwd() + "/", ""),
3582
- opsPerSec: Math.round(bench.hz || 0),
3583
- meanMs: bench.mean ? bench.mean * 1e3 : 0,
3584
- // p99: use actual p99 if available, otherwise estimate as 1.5× mean
3585
- p99Ms: bench.p99 ? bench.p99 * 1e3 : bench.mean ? bench.mean * 1e3 * 1.5 : 0,
3586
- marginOfError: bench.rme ? bench.rme / 100 : 0.05
3587
- });
3588
- }
3589
- }
3590
- }
3591
- }
3618
+ for (const testResult of testResults) {
3619
+ const file = testResult.name || testResult.filepath || "";
3620
+ const assertions = testResult.assertionResults ?? [];
3621
+ for (const assertion of assertions) {
3622
+ const result = this.parseBenchAssertion(assertion, file);
3623
+ if (result) results.push(result);
3592
3624
  }
3593
- } catch {
3594
3625
  }
3595
3626
  return results;
3596
3627
  }
3628
+ parseVitestBenchOutput(output) {
3629
+ try {
3630
+ const parsed = this.extractJson(output);
3631
+ if (!parsed) return [];
3632
+ const testResults = parsed.testResults;
3633
+ if (!testResults) return [];
3634
+ return this.collectAssertionResults(testResults);
3635
+ } catch {
3636
+ return [];
3637
+ }
3638
+ }
3597
3639
  };
3598
3640
 
3599
3641
  // src/performance/regression-detector.ts
@@ -3903,39 +3945,31 @@ function resetFeedbackConfig() {
3903
3945
  }
3904
3946
 
3905
3947
  // src/feedback/review/diff-analyzer.ts
3948
+ function detectFileStatus(part) {
3949
+ if (/new file mode/.test(part)) return "added";
3950
+ if (/deleted file mode/.test(part)) return "deleted";
3951
+ if (part.includes("rename from")) return "renamed";
3952
+ return "modified";
3953
+ }
3954
+ function parseDiffPart(part) {
3955
+ if (!part.trim()) return null;
3956
+ const headerMatch = /diff --git a\/(.+?) b\/(.+?)(?:\n|$)/.exec(part);
3957
+ if (!headerMatch || !headerMatch[2]) return null;
3958
+ const additionRegex = /^\+(?!\+\+)/gm;
3959
+ const deletionRegex = /^-(?!--)/gm;
3960
+ return {
3961
+ path: headerMatch[2],
3962
+ status: detectFileStatus(part),
3963
+ additions: (part.match(additionRegex) || []).length,
3964
+ deletions: (part.match(deletionRegex) || []).length
3965
+ };
3966
+ }
3906
3967
  function parseDiff(diff2) {
3907
3968
  try {
3908
3969
  if (!diff2.trim()) {
3909
3970
  return Ok({ diff: diff2, files: [] });
3910
3971
  }
3911
- const files = [];
3912
- const newFileRegex = /new file mode/;
3913
- const deletedFileRegex = /deleted file mode/;
3914
- const additionRegex = /^\+(?!\+\+)/gm;
3915
- const deletionRegex = /^-(?!--)/gm;
3916
- const diffParts = diff2.split(/(?=diff --git)/);
3917
- for (const part of diffParts) {
3918
- if (!part.trim()) continue;
3919
- const headerMatch = /diff --git a\/(.+?) b\/(.+?)(?:\n|$)/.exec(part);
3920
- if (!headerMatch || !headerMatch[2]) continue;
3921
- const filePath = headerMatch[2];
3922
- let status = "modified";
3923
- if (newFileRegex.test(part)) {
3924
- status = "added";
3925
- } else if (deletedFileRegex.test(part)) {
3926
- status = "deleted";
3927
- } else if (part.includes("rename from")) {
3928
- status = "renamed";
3929
- }
3930
- const additions = (part.match(additionRegex) || []).length;
3931
- const deletions = (part.match(deletionRegex) || []).length;
3932
- files.push({
3933
- path: filePath,
3934
- status,
3935
- additions,
3936
- deletions
3937
- });
3938
- }
3972
+ const files = diff2.split(/(?=diff --git)/).map(parseDiffPart).filter((f) => f !== null);
3939
3973
  return Ok({ diff: diff2, files });
3940
3974
  } catch (error) {
3941
3975
  return Err({
@@ -4101,107 +4135,123 @@ var ChecklistBuilder = class {
4101
4135
  this.graphImpactData = graphImpactData;
4102
4136
  return this;
4103
4137
  }
4104
- async run(changes) {
4105
- const startTime = Date.now();
4138
+ /**
4139
+ * Build a single harness check item with or without graph data.
4140
+ */
4141
+ buildHarnessCheckItem(id, check, fallbackDetails, graphItemBuilder) {
4142
+ if (this.graphHarnessData && graphItemBuilder) {
4143
+ return graphItemBuilder();
4144
+ }
4145
+ return {
4146
+ id,
4147
+ category: "harness",
4148
+ check,
4149
+ passed: true,
4150
+ severity: "info",
4151
+ details: fallbackDetails
4152
+ };
4153
+ }
4154
+ /**
4155
+ * Build all harness check items based on harnessOptions and graph data.
4156
+ */
4157
+ buildHarnessItems() {
4158
+ if (!this.harnessOptions) return [];
4106
4159
  const items = [];
4107
- if (this.harnessOptions) {
4108
- if (this.harnessOptions.context !== false) {
4109
- if (this.graphHarnessData) {
4110
- items.push({
4111
- id: "harness-context",
4112
- category: "harness",
4113
- check: "Context validation",
4114
- passed: this.graphHarnessData.graphExists && this.graphHarnessData.nodeCount > 0,
4115
- severity: "info",
4116
- details: this.graphHarnessData.graphExists ? `Graph loaded: ${this.graphHarnessData.nodeCount} nodes, ${this.graphHarnessData.edgeCount} edges` : "No graph available \u2014 run harness scan to build the knowledge graph"
4117
- });
4118
- } else {
4119
- items.push({
4160
+ const graphData = this.graphHarnessData;
4161
+ if (this.harnessOptions.context !== false) {
4162
+ items.push(
4163
+ this.buildHarnessCheckItem(
4164
+ "harness-context",
4165
+ "Context validation",
4166
+ "Harness context validation not yet integrated (run with graph for real checks)",
4167
+ graphData ? () => ({
4120
4168
  id: "harness-context",
4121
4169
  category: "harness",
4122
4170
  check: "Context validation",
4123
- passed: true,
4124
- severity: "info",
4125
- details: "Harness context validation not yet integrated (run with graph for real checks)"
4126
- });
4127
- }
4128
- }
4129
- if (this.harnessOptions.constraints !== false) {
4130
- if (this.graphHarnessData) {
4131
- const violations = this.graphHarnessData.constraintViolations;
4132
- items.push({
4133
- id: "harness-constraints",
4134
- category: "harness",
4135
- check: "Constraint validation",
4136
- passed: violations === 0,
4137
- severity: violations > 0 ? "error" : "info",
4138
- details: violations === 0 ? "No constraint violations detected" : `${violations} constraint violation(s) detected`
4139
- });
4140
- } else {
4141
- items.push({
4142
- id: "harness-constraints",
4143
- category: "harness",
4144
- check: "Constraint validation",
4145
- passed: true,
4146
- severity: "info",
4147
- details: "Harness constraint validation not yet integrated (run with graph for real checks)"
4148
- });
4149
- }
4150
- }
4151
- if (this.harnessOptions.entropy !== false) {
4152
- if (this.graphHarnessData) {
4153
- const issues = this.graphHarnessData.unreachableNodes + this.graphHarnessData.undocumentedFiles;
4154
- items.push({
4155
- id: "harness-entropy",
4156
- category: "harness",
4157
- check: "Entropy detection",
4158
- passed: issues === 0,
4159
- severity: issues > 0 ? "warning" : "info",
4160
- details: issues === 0 ? "No entropy issues detected" : `${this.graphHarnessData.unreachableNodes} unreachable node(s), ${this.graphHarnessData.undocumentedFiles} undocumented file(s)`
4161
- });
4162
- } else {
4163
- items.push({
4164
- id: "harness-entropy",
4165
- category: "harness",
4166
- check: "Entropy detection",
4167
- passed: true,
4171
+ passed: graphData.graphExists && graphData.nodeCount > 0,
4168
4172
  severity: "info",
4169
- details: "Harness entropy detection not yet integrated (run with graph for real checks)"
4170
- });
4171
- }
4172
- }
4173
+ details: graphData.graphExists ? `Graph loaded: ${graphData.nodeCount} nodes, ${graphData.edgeCount} edges` : "No graph available \u2014 run harness scan to build the knowledge graph"
4174
+ }) : void 0
4175
+ )
4176
+ );
4177
+ }
4178
+ if (this.harnessOptions.constraints !== false) {
4179
+ items.push(
4180
+ this.buildHarnessCheckItem(
4181
+ "harness-constraints",
4182
+ "Constraint validation",
4183
+ "Harness constraint validation not yet integrated (run with graph for real checks)",
4184
+ graphData ? () => {
4185
+ const violations = graphData.constraintViolations;
4186
+ return {
4187
+ id: "harness-constraints",
4188
+ category: "harness",
4189
+ check: "Constraint validation",
4190
+ passed: violations === 0,
4191
+ severity: violations > 0 ? "error" : "info",
4192
+ details: violations === 0 ? "No constraint violations detected" : `${violations} constraint violation(s) detected`
4193
+ };
4194
+ } : void 0
4195
+ )
4196
+ );
4197
+ }
4198
+ if (this.harnessOptions.entropy !== false) {
4199
+ items.push(
4200
+ this.buildHarnessCheckItem(
4201
+ "harness-entropy",
4202
+ "Entropy detection",
4203
+ "Harness entropy detection not yet integrated (run with graph for real checks)",
4204
+ graphData ? () => {
4205
+ const issues = graphData.unreachableNodes + graphData.undocumentedFiles;
4206
+ return {
4207
+ id: "harness-entropy",
4208
+ category: "harness",
4209
+ check: "Entropy detection",
4210
+ passed: issues === 0,
4211
+ severity: issues > 0 ? "warning" : "info",
4212
+ details: issues === 0 ? "No entropy issues detected" : `${graphData.unreachableNodes} unreachable node(s), ${graphData.undocumentedFiles} undocumented file(s)`
4213
+ };
4214
+ } : void 0
4215
+ )
4216
+ );
4217
+ }
4218
+ return items;
4219
+ }
4220
+ /**
4221
+ * Execute a single custom rule and return a ReviewItem.
4222
+ */
4223
+ async executeCustomRule(rule, changes) {
4224
+ try {
4225
+ const result = await rule.check(changes, this.rootDir);
4226
+ const item = {
4227
+ id: rule.id,
4228
+ category: "custom",
4229
+ check: rule.name,
4230
+ passed: result.passed,
4231
+ severity: rule.severity,
4232
+ details: result.details
4233
+ };
4234
+ if (result.suggestion !== void 0) item.suggestion = result.suggestion;
4235
+ if (result.file !== void 0) item.file = result.file;
4236
+ if (result.line !== void 0) item.line = result.line;
4237
+ return item;
4238
+ } catch (error) {
4239
+ return {
4240
+ id: rule.id,
4241
+ category: "custom",
4242
+ check: rule.name,
4243
+ passed: false,
4244
+ severity: "error",
4245
+ details: `Rule execution failed: ${String(error)}`
4246
+ };
4173
4247
  }
4248
+ }
4249
+ async run(changes) {
4250
+ const startTime = Date.now();
4251
+ const items = [];
4252
+ items.push(...this.buildHarnessItems());
4174
4253
  for (const rule of this.customRules) {
4175
- try {
4176
- const result = await rule.check(changes, this.rootDir);
4177
- const item = {
4178
- id: rule.id,
4179
- category: "custom",
4180
- check: rule.name,
4181
- passed: result.passed,
4182
- severity: rule.severity,
4183
- details: result.details
4184
- };
4185
- if (result.suggestion !== void 0) {
4186
- item.suggestion = result.suggestion;
4187
- }
4188
- if (result.file !== void 0) {
4189
- item.file = result.file;
4190
- }
4191
- if (result.line !== void 0) {
4192
- item.line = result.line;
4193
- }
4194
- items.push(item);
4195
- } catch (error) {
4196
- items.push({
4197
- id: rule.id,
4198
- category: "custom",
4199
- check: rule.name,
4200
- passed: false,
4201
- severity: "error",
4202
- details: `Rule execution failed: ${String(error)}`
4203
- });
4204
- }
4254
+ items.push(await this.executeCustomRule(rule, changes));
4205
4255
  }
4206
4256
  if (this.diffOptions) {
4207
4257
  const diffResult = await analyzeDiff(changes, this.diffOptions, this.graphImpactData);
@@ -4216,7 +4266,6 @@ var ChecklistBuilder = class {
4216
4266
  const checklist = {
4217
4267
  items,
4218
4268
  passed: failed === 0,
4219
- // Pass if no failed items
4220
4269
  summary: {
4221
4270
  total: items.length,
4222
4271
  passed,
@@ -4769,6 +4818,8 @@ var INDEX_FILE = "index.json";
4769
4818
  var SESSIONS_DIR = "sessions";
4770
4819
  var SESSION_INDEX_FILE = "index.md";
4771
4820
  var SUMMARY_FILE = "summary.md";
4821
+ var SESSION_STATE_FILE = "session-state.json";
4822
+ var ARCHIVE_DIR = "archive";
4772
4823
 
4773
4824
  // src/state/stream-resolver.ts
4774
4825
  var STREAMS_DIR = "streams";
@@ -5677,6 +5728,143 @@ function listActiveSessions(projectPath) {
5677
5728
  }
5678
5729
  }
5679
5730
 
5731
+ // src/state/session-sections.ts
5732
+ import * as fs14 from "fs";
5733
+ import * as path11 from "path";
5734
+ import { SESSION_SECTION_NAMES } from "@harness-engineering/types";
5735
+ function emptySections() {
5736
+ const sections = {};
5737
+ for (const name of SESSION_SECTION_NAMES) {
5738
+ sections[name] = [];
5739
+ }
5740
+ return sections;
5741
+ }
5742
+ async function loadSessionState(projectPath, sessionSlug) {
5743
+ const dirResult = resolveSessionDir(projectPath, sessionSlug);
5744
+ if (!dirResult.ok) return dirResult;
5745
+ const sessionDir = dirResult.value;
5746
+ const filePath = path11.join(sessionDir, SESSION_STATE_FILE);
5747
+ if (!fs14.existsSync(filePath)) {
5748
+ return Ok(emptySections());
5749
+ }
5750
+ try {
5751
+ const raw = fs14.readFileSync(filePath, "utf-8");
5752
+ const parsed = JSON.parse(raw);
5753
+ const sections = emptySections();
5754
+ for (const name of SESSION_SECTION_NAMES) {
5755
+ if (Array.isArray(parsed[name])) {
5756
+ sections[name] = parsed[name];
5757
+ }
5758
+ }
5759
+ return Ok(sections);
5760
+ } catch (error) {
5761
+ return Err(
5762
+ new Error(
5763
+ `Failed to load session state: ${error instanceof Error ? error.message : String(error)}`
5764
+ )
5765
+ );
5766
+ }
5767
+ }
5768
+ async function saveSessionState(projectPath, sessionSlug, sections) {
5769
+ const dirResult = resolveSessionDir(projectPath, sessionSlug, { create: true });
5770
+ if (!dirResult.ok) return dirResult;
5771
+ const sessionDir = dirResult.value;
5772
+ const filePath = path11.join(sessionDir, SESSION_STATE_FILE);
5773
+ try {
5774
+ fs14.writeFileSync(filePath, JSON.stringify(sections, null, 2));
5775
+ return Ok(void 0);
5776
+ } catch (error) {
5777
+ return Err(
5778
+ new Error(
5779
+ `Failed to save session state: ${error instanceof Error ? error.message : String(error)}`
5780
+ )
5781
+ );
5782
+ }
5783
+ }
5784
+ async function readSessionSections(projectPath, sessionSlug) {
5785
+ return loadSessionState(projectPath, sessionSlug);
5786
+ }
5787
+ async function readSessionSection(projectPath, sessionSlug, section) {
5788
+ const result = await loadSessionState(projectPath, sessionSlug);
5789
+ if (!result.ok) return result;
5790
+ return Ok(result.value[section]);
5791
+ }
5792
+ async function appendSessionEntry(projectPath, sessionSlug, section, authorSkill, content) {
5793
+ const loadResult = await loadSessionState(projectPath, sessionSlug);
5794
+ if (!loadResult.ok) return loadResult;
5795
+ const sections = loadResult.value;
5796
+ const entry = {
5797
+ id: generateEntryId(),
5798
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
5799
+ authorSkill,
5800
+ content,
5801
+ status: "active"
5802
+ };
5803
+ sections[section].push(entry);
5804
+ const saveResult = await saveSessionState(projectPath, sessionSlug, sections);
5805
+ if (!saveResult.ok) return saveResult;
5806
+ return Ok(entry);
5807
+ }
5808
+ async function updateSessionEntryStatus(projectPath, sessionSlug, section, entryId, newStatus) {
5809
+ const loadResult = await loadSessionState(projectPath, sessionSlug);
5810
+ if (!loadResult.ok) return loadResult;
5811
+ const sections = loadResult.value;
5812
+ const entry = sections[section].find((e) => e.id === entryId);
5813
+ if (!entry) {
5814
+ return Err(new Error(`Entry '${entryId}' not found in section '${section}'`));
5815
+ }
5816
+ entry.status = newStatus;
5817
+ const saveResult = await saveSessionState(projectPath, sessionSlug, sections);
5818
+ if (!saveResult.ok) return saveResult;
5819
+ return Ok(entry);
5820
+ }
5821
+ function generateEntryId() {
5822
+ const timestamp = Date.now().toString(36);
5823
+ const random = Math.random().toString(36).substring(2, 8);
5824
+ return `${timestamp}-${random}`;
5825
+ }
5826
+
5827
+ // src/state/session-archive.ts
5828
+ import * as fs15 from "fs";
5829
+ import * as path12 from "path";
5830
+ async function archiveSession(projectPath, sessionSlug) {
5831
+ const dirResult = resolveSessionDir(projectPath, sessionSlug);
5832
+ if (!dirResult.ok) return dirResult;
5833
+ const sessionDir = dirResult.value;
5834
+ if (!fs15.existsSync(sessionDir)) {
5835
+ return Err(new Error(`Session '${sessionSlug}' not found at ${sessionDir}`));
5836
+ }
5837
+ const archiveBase = path12.join(projectPath, HARNESS_DIR, ARCHIVE_DIR, "sessions");
5838
+ try {
5839
+ fs15.mkdirSync(archiveBase, { recursive: true });
5840
+ const date = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
5841
+ let archiveName = `${sessionSlug}-${date}`;
5842
+ let counter = 1;
5843
+ while (fs15.existsSync(path12.join(archiveBase, archiveName))) {
5844
+ archiveName = `${sessionSlug}-${date}-${counter}`;
5845
+ counter++;
5846
+ }
5847
+ const dest = path12.join(archiveBase, archiveName);
5848
+ try {
5849
+ fs15.renameSync(sessionDir, dest);
5850
+ } catch (renameErr) {
5851
+ if (renameErr instanceof Error && "code" in renameErr && renameErr.code === "EXDEV") {
5852
+ fs15.cpSync(sessionDir, dest, { recursive: true });
5853
+ fs15.rmSync(sessionDir, { recursive: true });
5854
+ } else {
5855
+ throw renameErr;
5856
+ }
5857
+ }
5858
+ return Ok(void 0);
5859
+ } catch (error) {
5860
+ return Err(
5861
+ new Error(
5862
+ `Failed to archive session: ${error instanceof Error ? error.message : String(error)}`
5863
+ )
5864
+ );
5865
+ }
5866
+ }
5867
+
5680
5868
  // src/workflow/runner.ts
5681
5869
  async function executeWorkflow(workflow, executor) {
5682
5870
  const stepResults = [];
@@ -5826,7 +6014,7 @@ async function runMultiTurnPipeline(initialContext, turnExecutor, options) {
5826
6014
  }
5827
6015
 
5828
6016
  // src/security/scanner.ts
5829
- import * as fs15 from "fs/promises";
6017
+ import * as fs17 from "fs/promises";
5830
6018
 
5831
6019
  // src/security/rules/registry.ts
5832
6020
  var RuleRegistry = class {
@@ -5913,15 +6101,15 @@ function resolveRuleSeverity(ruleId, defaultSeverity, overrides, strict) {
5913
6101
  }
5914
6102
 
5915
6103
  // src/security/stack-detector.ts
5916
- import * as fs14 from "fs";
5917
- import * as path11 from "path";
6104
+ import * as fs16 from "fs";
6105
+ import * as path13 from "path";
5918
6106
  function detectStack(projectRoot) {
5919
6107
  const stacks = [];
5920
- const pkgJsonPath = path11.join(projectRoot, "package.json");
5921
- if (fs14.existsSync(pkgJsonPath)) {
6108
+ const pkgJsonPath = path13.join(projectRoot, "package.json");
6109
+ if (fs16.existsSync(pkgJsonPath)) {
5922
6110
  stacks.push("node");
5923
6111
  try {
5924
- const pkgJson = JSON.parse(fs14.readFileSync(pkgJsonPath, "utf-8"));
6112
+ const pkgJson = JSON.parse(fs16.readFileSync(pkgJsonPath, "utf-8"));
5925
6113
  const allDeps = {
5926
6114
  ...pkgJson.dependencies,
5927
6115
  ...pkgJson.devDependencies
@@ -5936,13 +6124,13 @@ function detectStack(projectRoot) {
5936
6124
  } catch {
5937
6125
  }
5938
6126
  }
5939
- const goModPath = path11.join(projectRoot, "go.mod");
5940
- if (fs14.existsSync(goModPath)) {
6127
+ const goModPath = path13.join(projectRoot, "go.mod");
6128
+ if (fs16.existsSync(goModPath)) {
5941
6129
  stacks.push("go");
5942
6130
  }
5943
- const requirementsPath = path11.join(projectRoot, "requirements.txt");
5944
- const pyprojectPath = path11.join(projectRoot, "pyproject.toml");
5945
- if (fs14.existsSync(requirementsPath) || fs14.existsSync(pyprojectPath)) {
6131
+ const requirementsPath = path13.join(projectRoot, "requirements.txt");
6132
+ const pyprojectPath = path13.join(projectRoot, "pyproject.toml");
6133
+ if (fs16.existsSync(requirementsPath) || fs16.existsSync(pyprojectPath)) {
5946
6134
  stacks.push("python");
5947
6135
  }
5948
6136
  return stacks;
@@ -6369,7 +6557,7 @@ var SecurityScanner = class {
6369
6557
  }
6370
6558
  async scanFile(filePath) {
6371
6559
  if (!this.config.enabled) return [];
6372
- const content = await fs15.readFile(filePath, "utf-8");
6560
+ const content = await fs17.readFile(filePath, "utf-8");
6373
6561
  return this.scanContent(content, filePath, 1);
6374
6562
  }
6375
6563
  async scanFiles(filePaths) {
@@ -6394,7 +6582,7 @@ var SecurityScanner = class {
6394
6582
  };
6395
6583
 
6396
6584
  // src/ci/check-orchestrator.ts
6397
- import * as path12 from "path";
6585
+ import * as path14 from "path";
6398
6586
  var ALL_CHECKS = [
6399
6587
  "validate",
6400
6588
  "deps",
@@ -6405,238 +6593,276 @@ var ALL_CHECKS = [
6405
6593
  "phase-gate",
6406
6594
  "arch"
6407
6595
  ];
6408
- async function runSingleCheck(name, projectRoot, config) {
6409
- const start = Date.now();
6596
+ async function runValidateCheck(projectRoot, config) {
6410
6597
  const issues = [];
6411
- try {
6412
- switch (name) {
6413
- case "validate": {
6414
- const agentsPath = path12.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
6415
- const result = await validateAgentsMap(agentsPath);
6416
- if (!result.ok) {
6417
- issues.push({ severity: "error", message: result.error.message });
6418
- } else if (!result.value.valid) {
6419
- if (result.value.errors) {
6420
- for (const err of result.value.errors) {
6421
- issues.push({ severity: "error", message: err.message });
6422
- }
6423
- }
6424
- for (const section of result.value.missingSections) {
6425
- issues.push({ severity: "warning", message: `Missing section: ${section}` });
6426
- }
6427
- for (const link of result.value.brokenLinks) {
6428
- issues.push({
6429
- severity: "warning",
6430
- message: `Broken link: ${link.text} \u2192 ${link.path}`,
6431
- file: link.path
6432
- });
6433
- }
6434
- }
6435
- break;
6436
- }
6437
- case "deps": {
6438
- const rawLayers = config.layers;
6439
- if (rawLayers && rawLayers.length > 0) {
6440
- const parser = new TypeScriptParser();
6441
- const layers = rawLayers.map(
6442
- (l) => defineLayer(
6443
- l.name,
6444
- Array.isArray(l.patterns) ? l.patterns : [l.pattern],
6445
- l.allowedDependencies
6446
- )
6447
- );
6448
- const result = await validateDependencies({
6449
- layers,
6450
- rootDir: projectRoot,
6451
- parser
6452
- });
6453
- if (!result.ok) {
6454
- issues.push({ severity: "error", message: result.error.message });
6455
- } else if (result.value.violations.length > 0) {
6456
- for (const v of result.value.violations) {
6457
- issues.push({
6458
- severity: "error",
6459
- message: `${v.reason}: ${v.file} imports ${v.imports} (${v.fromLayer} \u2192 ${v.toLayer})`,
6460
- file: v.file,
6461
- line: v.line
6462
- });
6463
- }
6464
- }
6465
- }
6466
- break;
6598
+ const agentsPath = path14.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
6599
+ const result = await validateAgentsMap(agentsPath);
6600
+ if (!result.ok) {
6601
+ issues.push({ severity: "error", message: result.error.message });
6602
+ } else if (!result.value.valid) {
6603
+ if (result.value.errors) {
6604
+ for (const err of result.value.errors) {
6605
+ issues.push({ severity: "error", message: err.message });
6606
+ }
6607
+ }
6608
+ for (const section of result.value.missingSections) {
6609
+ issues.push({ severity: "warning", message: `Missing section: ${section}` });
6610
+ }
6611
+ for (const link of result.value.brokenLinks) {
6612
+ issues.push({
6613
+ severity: "warning",
6614
+ message: `Broken link: ${link.text} \u2192 ${link.path}`,
6615
+ file: link.path
6616
+ });
6617
+ }
6618
+ }
6619
+ return issues;
6620
+ }
6621
+ async function runDepsCheck(projectRoot, config) {
6622
+ const issues = [];
6623
+ const rawLayers = config.layers;
6624
+ if (rawLayers && rawLayers.length > 0) {
6625
+ const parser = new TypeScriptParser();
6626
+ const layers = rawLayers.map(
6627
+ (l) => defineLayer(
6628
+ l.name,
6629
+ Array.isArray(l.patterns) ? l.patterns : [l.pattern],
6630
+ l.allowedDependencies
6631
+ )
6632
+ );
6633
+ const result = await validateDependencies({
6634
+ layers,
6635
+ rootDir: projectRoot,
6636
+ parser
6637
+ });
6638
+ if (!result.ok) {
6639
+ issues.push({ severity: "error", message: result.error.message });
6640
+ } else if (result.value.violations.length > 0) {
6641
+ for (const v of result.value.violations) {
6642
+ issues.push({
6643
+ severity: "error",
6644
+ message: `${v.reason}: ${v.file} imports ${v.imports} (${v.fromLayer} \u2192 ${v.toLayer})`,
6645
+ file: v.file,
6646
+ line: v.line
6647
+ });
6467
6648
  }
6468
- case "docs": {
6469
- const docsDir = path12.join(projectRoot, config.docsDir ?? "docs");
6470
- const entropyConfig = config.entropy || {};
6471
- const result = await checkDocCoverage("project", {
6472
- docsDir,
6473
- sourceDir: projectRoot,
6474
- excludePatterns: entropyConfig.excludePatterns || [
6475
- "**/node_modules/**",
6476
- "**/dist/**",
6477
- "**/*.test.ts",
6478
- "**/fixtures/**"
6479
- ]
6649
+ }
6650
+ }
6651
+ return issues;
6652
+ }
6653
+ async function runDocsCheck(projectRoot, config) {
6654
+ const issues = [];
6655
+ const docsDir = path14.join(projectRoot, config.docsDir ?? "docs");
6656
+ const entropyConfig = config.entropy || {};
6657
+ const result = await checkDocCoverage("project", {
6658
+ docsDir,
6659
+ sourceDir: projectRoot,
6660
+ excludePatterns: entropyConfig.excludePatterns || [
6661
+ "**/node_modules/**",
6662
+ "**/dist/**",
6663
+ "**/*.test.ts",
6664
+ "**/fixtures/**"
6665
+ ]
6666
+ });
6667
+ if (!result.ok) {
6668
+ issues.push({ severity: "warning", message: result.error.message });
6669
+ } else if (result.value.gaps.length > 0) {
6670
+ for (const gap of result.value.gaps) {
6671
+ issues.push({
6672
+ severity: "warning",
6673
+ message: `Undocumented: ${gap.file} (suggested: ${gap.suggestedSection})`,
6674
+ file: gap.file
6675
+ });
6676
+ }
6677
+ }
6678
+ return issues;
6679
+ }
6680
+ async function runEntropyCheck(projectRoot, config) {
6681
+ const issues = [];
6682
+ const entropyConfig = config.entropy || {};
6683
+ const perfConfig = config.performance || {};
6684
+ const entryPoints = entropyConfig.entryPoints ?? perfConfig.entryPoints;
6685
+ const analyzer = new EntropyAnalyzer({
6686
+ rootDir: projectRoot,
6687
+ ...entryPoints ? { entryPoints } : {},
6688
+ analyze: { drift: true, deadCode: true, patterns: false }
6689
+ });
6690
+ const result = await analyzer.analyze();
6691
+ if (!result.ok) {
6692
+ issues.push({ severity: "warning", message: result.error.message });
6693
+ } else {
6694
+ const report = result.value;
6695
+ if (report.drift) {
6696
+ for (const drift of report.drift.drifts) {
6697
+ issues.push({
6698
+ severity: "warning",
6699
+ message: `Doc drift (${drift.type}): ${drift.details}`,
6700
+ file: drift.docFile,
6701
+ line: drift.line
6480
6702
  });
6481
- if (!result.ok) {
6482
- issues.push({ severity: "warning", message: result.error.message });
6483
- } else if (result.value.gaps.length > 0) {
6484
- for (const gap of result.value.gaps) {
6485
- issues.push({
6486
- severity: "warning",
6487
- message: `Undocumented: ${gap.file} (suggested: ${gap.suggestedSection})`,
6488
- file: gap.file
6489
- });
6490
- }
6491
- }
6492
- break;
6493
6703
  }
6494
- case "entropy": {
6495
- const analyzer = new EntropyAnalyzer({
6496
- rootDir: projectRoot,
6497
- analyze: { drift: true, deadCode: true, patterns: false }
6704
+ }
6705
+ if (report.deadCode) {
6706
+ for (const dead of report.deadCode.deadExports) {
6707
+ issues.push({
6708
+ severity: "warning",
6709
+ message: `Dead export: ${dead.name}`,
6710
+ file: dead.file,
6711
+ line: dead.line
6498
6712
  });
6499
- const result = await analyzer.analyze();
6500
- if (!result.ok) {
6501
- issues.push({ severity: "warning", message: result.error.message });
6502
- } else {
6503
- const report = result.value;
6504
- if (report.drift) {
6505
- for (const drift of report.drift.drifts) {
6506
- issues.push({
6507
- severity: "warning",
6508
- message: `Doc drift (${drift.type}): ${drift.details}`,
6509
- file: drift.docFile,
6510
- line: drift.line
6511
- });
6512
- }
6513
- }
6514
- if (report.deadCode) {
6515
- for (const dead of report.deadCode.deadExports) {
6516
- issues.push({
6517
- severity: "warning",
6518
- message: `Dead export: ${dead.name}`,
6519
- file: dead.file,
6520
- line: dead.line
6521
- });
6522
- }
6523
- }
6524
- }
6525
- break;
6526
6713
  }
6527
- case "security": {
6528
- const securityConfig = parseSecurityConfig(config.security);
6529
- if (!securityConfig.enabled) break;
6530
- const scanner = new SecurityScanner(securityConfig);
6531
- scanner.configureForProject(projectRoot);
6532
- const { glob: globFn } = await import("glob");
6533
- const sourceFiles = await globFn("**/*.{ts,tsx,js,jsx,go,py}", {
6534
- cwd: projectRoot,
6535
- ignore: securityConfig.exclude ?? [
6536
- "**/node_modules/**",
6537
- "**/dist/**",
6538
- "**/*.test.ts",
6539
- "**/fixtures/**"
6540
- ],
6541
- absolute: true
6714
+ }
6715
+ }
6716
+ return issues;
6717
+ }
6718
+ async function runSecurityCheck(projectRoot, config) {
6719
+ const issues = [];
6720
+ const securityConfig = parseSecurityConfig(config.security);
6721
+ if (!securityConfig.enabled) return issues;
6722
+ const scanner = new SecurityScanner(securityConfig);
6723
+ scanner.configureForProject(projectRoot);
6724
+ const { glob: globFn } = await import("glob");
6725
+ const sourceFiles = await globFn("**/*.{ts,tsx,js,jsx,go,py}", {
6726
+ cwd: projectRoot,
6727
+ ignore: securityConfig.exclude ?? [
6728
+ "**/node_modules/**",
6729
+ "**/dist/**",
6730
+ "**/*.test.ts",
6731
+ "**/fixtures/**"
6732
+ ],
6733
+ absolute: true
6734
+ });
6735
+ const scanResult = await scanner.scanFiles(sourceFiles);
6736
+ for (const finding of scanResult.findings) {
6737
+ issues.push({
6738
+ severity: finding.severity === "info" ? "warning" : finding.severity,
6739
+ message: `[${finding.ruleId}] ${finding.message}: ${finding.match}`,
6740
+ file: finding.file,
6741
+ line: finding.line
6742
+ });
6743
+ }
6744
+ return issues;
6745
+ }
6746
+ async function runPerfCheck(projectRoot, config) {
6747
+ const issues = [];
6748
+ const perfConfig = config.performance || {};
6749
+ const entryPoints = perfConfig.entryPoints;
6750
+ const perfAnalyzer = new EntropyAnalyzer({
6751
+ rootDir: projectRoot,
6752
+ ...entryPoints ? { entryPoints } : {},
6753
+ analyze: {
6754
+ complexity: perfConfig.complexity || true,
6755
+ coupling: perfConfig.coupling || true,
6756
+ sizeBudget: perfConfig.sizeBudget || false
6757
+ }
6758
+ });
6759
+ const perfResult = await perfAnalyzer.analyze();
6760
+ if (!perfResult.ok) {
6761
+ issues.push({ severity: "warning", message: perfResult.error.message });
6762
+ } else {
6763
+ const perfReport = perfResult.value;
6764
+ if (perfReport.complexity) {
6765
+ for (const v of perfReport.complexity.violations) {
6766
+ issues.push({
6767
+ severity: v.severity === "info" ? "warning" : v.severity,
6768
+ message: `[Tier ${v.tier}] ${v.metric}: ${v.function} in ${v.file} (${v.value} > ${v.threshold})`,
6769
+ file: v.file,
6770
+ line: v.line
6542
6771
  });
6543
- const scanResult = await scanner.scanFiles(sourceFiles);
6544
- for (const finding of scanResult.findings) {
6545
- issues.push({
6546
- severity: finding.severity === "info" ? "warning" : finding.severity,
6547
- message: `[${finding.ruleId}] ${finding.message}: ${finding.match}`,
6548
- file: finding.file,
6549
- line: finding.line
6550
- });
6551
- }
6552
- break;
6553
6772
  }
6554
- case "perf": {
6555
- const perfConfig = config.performance || {};
6556
- const perfAnalyzer = new EntropyAnalyzer({
6557
- rootDir: projectRoot,
6558
- analyze: {
6559
- complexity: perfConfig.complexity || true,
6560
- coupling: perfConfig.coupling || true,
6561
- sizeBudget: perfConfig.sizeBudget || false
6562
- }
6773
+ }
6774
+ if (perfReport.coupling) {
6775
+ for (const v of perfReport.coupling.violations) {
6776
+ issues.push({
6777
+ severity: v.severity === "info" ? "warning" : v.severity,
6778
+ message: `[Tier ${v.tier}] ${v.metric}: ${v.file} (${v.value} > ${v.threshold})`,
6779
+ file: v.file
6563
6780
  });
6564
- const perfResult = await perfAnalyzer.analyze();
6565
- if (!perfResult.ok) {
6566
- issues.push({ severity: "warning", message: perfResult.error.message });
6567
- } else {
6568
- const perfReport = perfResult.value;
6569
- if (perfReport.complexity) {
6570
- for (const v of perfReport.complexity.violations) {
6571
- issues.push({
6572
- severity: v.severity === "info" ? "warning" : v.severity,
6573
- message: `[Tier ${v.tier}] ${v.metric}: ${v.function} in ${v.file} (${v.value} > ${v.threshold})`,
6574
- file: v.file,
6575
- line: v.line
6576
- });
6577
- }
6578
- }
6579
- if (perfReport.coupling) {
6580
- for (const v of perfReport.coupling.violations) {
6581
- issues.push({
6582
- severity: v.severity === "info" ? "warning" : v.severity,
6583
- message: `[Tier ${v.tier}] ${v.metric}: ${v.file} (${v.value} > ${v.threshold})`,
6584
- file: v.file
6585
- });
6586
- }
6587
- }
6588
- }
6589
- break;
6590
6781
  }
6591
- case "phase-gate": {
6592
- const phaseGates = config.phaseGates;
6593
- if (!phaseGates?.enabled) {
6594
- break;
6595
- }
6782
+ }
6783
+ }
6784
+ return issues;
6785
+ }
6786
+ async function runPhaseGateCheck(_projectRoot, config) {
6787
+ const issues = [];
6788
+ const phaseGates = config.phaseGates;
6789
+ if (!phaseGates?.enabled) {
6790
+ return issues;
6791
+ }
6792
+ issues.push({
6793
+ severity: "warning",
6794
+ message: "Phase gate is enabled but requires CLI context. Run `harness check-phase-gate` separately for full validation."
6795
+ });
6796
+ return issues;
6797
+ }
6798
+ async function runArchCheck(projectRoot, config) {
6799
+ const issues = [];
6800
+ const rawArchConfig = config.architecture;
6801
+ const archConfig = ArchConfigSchema.parse(rawArchConfig ?? {});
6802
+ if (!archConfig.enabled) return issues;
6803
+ const results = await runAll(archConfig, projectRoot);
6804
+ const baselineManager = new ArchBaselineManager(projectRoot, archConfig.baselinePath);
6805
+ const baseline = baselineManager.load();
6806
+ if (baseline) {
6807
+ const diffResult = diff(results, baseline);
6808
+ if (!diffResult.passed) {
6809
+ for (const v of diffResult.newViolations) {
6596
6810
  issues.push({
6597
- severity: "warning",
6598
- message: "Phase gate is enabled but requires CLI context. Run `harness check-phase-gate` separately for full validation."
6811
+ severity: v.severity,
6812
+ message: `[${v.category || "arch"}] NEW: ${v.detail}`,
6813
+ file: v.file
6599
6814
  });
6600
- break;
6601
6815
  }
6602
- case "arch": {
6603
- const rawArchConfig = config.architecture;
6604
- const archConfig = ArchConfigSchema.parse(rawArchConfig ?? {});
6605
- if (!archConfig.enabled) break;
6606
- const results = await runAll(archConfig, projectRoot);
6607
- const baselineManager = new ArchBaselineManager(projectRoot, archConfig.baselinePath);
6608
- const baseline = baselineManager.load();
6609
- if (baseline) {
6610
- const diffResult = diff(results, baseline);
6611
- if (!diffResult.passed) {
6612
- for (const v of diffResult.newViolations) {
6613
- issues.push({
6614
- severity: v.severity,
6615
- message: `[${v.category || "arch"}] NEW: ${v.detail}`,
6616
- file: v.file
6617
- });
6618
- }
6619
- for (const r of diffResult.regressions) {
6620
- issues.push({
6621
- severity: "error",
6622
- message: `[${r.category}] REGRESSION: ${r.currentValue} > ${r.baselineValue} (delta: ${r.delta})`
6623
- });
6624
- }
6625
- }
6626
- } else {
6627
- for (const result of results) {
6628
- for (const v of result.violations) {
6629
- issues.push({
6630
- severity: v.severity,
6631
- message: `[${result.category}] ${v.detail}`,
6632
- file: v.file
6633
- });
6634
- }
6635
- }
6636
- }
6637
- break;
6816
+ for (const r of diffResult.regressions) {
6817
+ issues.push({
6818
+ severity: "error",
6819
+ message: `[${r.category}] REGRESSION: ${r.currentValue} > ${r.baselineValue} (delta: ${r.delta})`
6820
+ });
6638
6821
  }
6639
6822
  }
6823
+ } else {
6824
+ for (const result of results) {
6825
+ for (const v of result.violations) {
6826
+ issues.push({
6827
+ severity: v.severity,
6828
+ message: `[${result.category}] ${v.detail}`,
6829
+ file: v.file
6830
+ });
6831
+ }
6832
+ }
6833
+ }
6834
+ return issues;
6835
+ }
6836
+ async function runSingleCheck(name, projectRoot, config) {
6837
+ const start = Date.now();
6838
+ const issues = [];
6839
+ try {
6840
+ switch (name) {
6841
+ case "validate":
6842
+ issues.push(...await runValidateCheck(projectRoot, config));
6843
+ break;
6844
+ case "deps":
6845
+ issues.push(...await runDepsCheck(projectRoot, config));
6846
+ break;
6847
+ case "docs":
6848
+ issues.push(...await runDocsCheck(projectRoot, config));
6849
+ break;
6850
+ case "entropy":
6851
+ issues.push(...await runEntropyCheck(projectRoot, config));
6852
+ break;
6853
+ case "security":
6854
+ issues.push(...await runSecurityCheck(projectRoot, config));
6855
+ break;
6856
+ case "perf":
6857
+ issues.push(...await runPerfCheck(projectRoot, config));
6858
+ break;
6859
+ case "phase-gate":
6860
+ issues.push(...await runPhaseGateCheck(projectRoot, config));
6861
+ break;
6862
+ case "arch":
6863
+ issues.push(...await runArchCheck(projectRoot, config));
6864
+ break;
6865
+ }
6640
6866
  } catch (error) {
6641
6867
  issues.push({
6642
6868
  severity: "error",
@@ -6704,7 +6930,7 @@ async function runCIChecks(input) {
6704
6930
  }
6705
6931
 
6706
6932
  // src/review/mechanical-checks.ts
6707
- import * as path13 from "path";
6933
+ import * as path15 from "path";
6708
6934
  async function runMechanicalChecks(options) {
6709
6935
  const { projectRoot, config, skip = [], changedFiles } = options;
6710
6936
  const findings = [];
@@ -6716,7 +6942,7 @@ async function runMechanicalChecks(options) {
6716
6942
  };
6717
6943
  if (!skip.includes("validate")) {
6718
6944
  try {
6719
- const agentsPath = path13.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
6945
+ const agentsPath = path15.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
6720
6946
  const result = await validateAgentsMap(agentsPath);
6721
6947
  if (!result.ok) {
6722
6948
  statuses.validate = "fail";
@@ -6753,7 +6979,7 @@ async function runMechanicalChecks(options) {
6753
6979
  statuses.validate = "fail";
6754
6980
  findings.push({
6755
6981
  tool: "validate",
6756
- file: path13.join(projectRoot, "AGENTS.md"),
6982
+ file: path15.join(projectRoot, "AGENTS.md"),
6757
6983
  message: err instanceof Error ? err.message : String(err),
6758
6984
  severity: "error"
6759
6985
  });
@@ -6817,7 +7043,7 @@ async function runMechanicalChecks(options) {
6817
7043
  (async () => {
6818
7044
  const localFindings = [];
6819
7045
  try {
6820
- const docsDir = path13.join(projectRoot, config.docsDir ?? "docs");
7046
+ const docsDir = path15.join(projectRoot, config.docsDir ?? "docs");
6821
7047
  const result = await checkDocCoverage("project", { docsDir });
6822
7048
  if (!result.ok) {
6823
7049
  statuses["check-docs"] = "warn";
@@ -6844,7 +7070,7 @@ async function runMechanicalChecks(options) {
6844
7070
  statuses["check-docs"] = "warn";
6845
7071
  localFindings.push({
6846
7072
  tool: "check-docs",
6847
- file: path13.join(projectRoot, "docs"),
7073
+ file: path15.join(projectRoot, "docs"),
6848
7074
  message: err instanceof Error ? err.message : String(err),
6849
7075
  severity: "warning"
6850
7076
  });
@@ -6992,7 +7218,7 @@ function detectChangeType(commitMessage, diff2) {
6992
7218
  }
6993
7219
 
6994
7220
  // src/review/context-scoper.ts
6995
- import * as path14 from "path";
7221
+ import * as path16 from "path";
6996
7222
  var ALL_DOMAINS = ["compliance", "bug", "security", "architecture"];
6997
7223
  var SECURITY_PATTERNS = /auth|crypto|password|secret|token|session|cookie|hash|encrypt|decrypt|sql|shell|exec|eval/i;
6998
7224
  function computeContextBudget(diffLines) {
@@ -7000,18 +7226,18 @@ function computeContextBudget(diffLines) {
7000
7226
  return diffLines;
7001
7227
  }
7002
7228
  function isWithinProject(absPath, projectRoot) {
7003
- const resolvedRoot = path14.resolve(projectRoot) + path14.sep;
7004
- const resolvedPath = path14.resolve(absPath);
7005
- return resolvedPath.startsWith(resolvedRoot) || resolvedPath === path14.resolve(projectRoot);
7229
+ const resolvedRoot = path16.resolve(projectRoot) + path16.sep;
7230
+ const resolvedPath = path16.resolve(absPath);
7231
+ return resolvedPath.startsWith(resolvedRoot) || resolvedPath === path16.resolve(projectRoot);
7006
7232
  }
7007
7233
  async function readContextFile(projectRoot, filePath, reason) {
7008
- const absPath = path14.isAbsolute(filePath) ? filePath : path14.join(projectRoot, filePath);
7234
+ const absPath = path16.isAbsolute(filePath) ? filePath : path16.join(projectRoot, filePath);
7009
7235
  if (!isWithinProject(absPath, projectRoot)) return null;
7010
7236
  const result = await readFileContent(absPath);
7011
7237
  if (!result.ok) return null;
7012
7238
  const content = result.value;
7013
7239
  const lines = content.split("\n").length;
7014
- const relPath = path14.isAbsolute(filePath) ? path14.relative(projectRoot, filePath) : filePath;
7240
+ const relPath = path16.isAbsolute(filePath) ? relativePosix(projectRoot, filePath) : filePath;
7015
7241
  return { path: relPath, content, reason, lines };
7016
7242
  }
7017
7243
  function extractImportSources(content) {
@@ -7026,18 +7252,18 @@ function extractImportSources(content) {
7026
7252
  }
7027
7253
  async function resolveImportPath(projectRoot, fromFile, importSource) {
7028
7254
  if (!importSource.startsWith(".")) return null;
7029
- const fromDir = path14.dirname(path14.join(projectRoot, fromFile));
7030
- const basePath = path14.resolve(fromDir, importSource);
7255
+ const fromDir = path16.dirname(path16.join(projectRoot, fromFile));
7256
+ const basePath = path16.resolve(fromDir, importSource);
7031
7257
  if (!isWithinProject(basePath, projectRoot)) return null;
7032
- const relBase = path14.relative(projectRoot, basePath);
7258
+ const relBase = relativePosix(projectRoot, basePath);
7033
7259
  const candidates = [
7034
7260
  relBase + ".ts",
7035
7261
  relBase + ".tsx",
7036
7262
  relBase + ".mts",
7037
- path14.join(relBase, "index.ts")
7263
+ path16.join(relBase, "index.ts")
7038
7264
  ];
7039
7265
  for (const candidate of candidates) {
7040
- const absCandidate = path14.join(projectRoot, candidate);
7266
+ const absCandidate = path16.join(projectRoot, candidate);
7041
7267
  if (await fileExists(absCandidate)) {
7042
7268
  return candidate;
7043
7269
  }
@@ -7045,10 +7271,10 @@ async function resolveImportPath(projectRoot, fromFile, importSource) {
7045
7271
  return null;
7046
7272
  }
7047
7273
  async function findTestFiles(projectRoot, sourceFile) {
7048
- const baseName = path14.basename(sourceFile, path14.extname(sourceFile));
7274
+ const baseName = path16.basename(sourceFile, path16.extname(sourceFile));
7049
7275
  const pattern = `**/${baseName}.{test,spec}.{ts,tsx,mts}`;
7050
7276
  const results = await findFiles(pattern, projectRoot);
7051
- return results.map((f) => path14.relative(projectRoot, f));
7277
+ return results.map((f) => relativePosix(projectRoot, f));
7052
7278
  }
7053
7279
  async function gatherImportContext(projectRoot, changedFiles, budget) {
7054
7280
  const contextFiles = [];
@@ -7334,101 +7560,102 @@ function findMissingJsDoc(bundle) {
7334
7560
  }
7335
7561
  return missing;
7336
7562
  }
7337
- function runComplianceAgent(bundle) {
7563
+ function checkMissingJsDoc(bundle, rules) {
7564
+ const jsDocRule = rules.find((r) => r.text.toLowerCase().includes("jsdoc"));
7565
+ if (!jsDocRule) return [];
7566
+ const missingDocs = findMissingJsDoc(bundle);
7567
+ return missingDocs.map((m) => ({
7568
+ id: makeFindingId("compliance", m.file, m.line, `Missing JSDoc ${m.exportName}`),
7569
+ file: m.file,
7570
+ lineRange: [m.line, m.line],
7571
+ domain: "compliance",
7572
+ severity: "important",
7573
+ title: `Missing JSDoc on exported \`${m.exportName}\``,
7574
+ rationale: `Convention requires all exports to have JSDoc comments (from ${jsDocRule.source}).`,
7575
+ suggestion: `Add a JSDoc comment above the export of \`${m.exportName}\`.`,
7576
+ evidence: [`changeType: ${bundle.changeType}`, `Convention rule: "${jsDocRule.text}"`],
7577
+ validatedBy: "heuristic"
7578
+ }));
7579
+ }
7580
+ function checkFeatureSpec(bundle) {
7581
+ const hasSpecContext = bundle.contextFiles.some(
7582
+ (f) => f.reason === "spec" || f.reason === "convention"
7583
+ );
7584
+ if (hasSpecContext || bundle.changedFiles.length === 0) return [];
7585
+ const firstFile = bundle.changedFiles[0];
7586
+ return [
7587
+ {
7588
+ id: makeFindingId("compliance", firstFile.path, 1, "No spec for feature"),
7589
+ file: firstFile.path,
7590
+ lineRange: [1, 1],
7591
+ domain: "compliance",
7592
+ severity: "suggestion",
7593
+ title: "No spec/design doc found for feature change",
7594
+ rationale: "Feature changes should reference a spec or design doc to verify alignment. No spec context was included in the review bundle.",
7595
+ evidence: [`changeType: feature`, `contextFiles count: ${bundle.contextFiles.length}`],
7596
+ validatedBy: "heuristic"
7597
+ }
7598
+ ];
7599
+ }
7600
+ function checkBugfixHistory(bundle) {
7601
+ if (bundle.commitHistory.length > 0 || bundle.changedFiles.length === 0) return [];
7602
+ const firstFile = bundle.changedFiles[0];
7603
+ return [
7604
+ {
7605
+ id: makeFindingId("compliance", firstFile.path, 1, "Bugfix no history"),
7606
+ file: firstFile.path,
7607
+ lineRange: [1, 1],
7608
+ domain: "compliance",
7609
+ severity: "suggestion",
7610
+ title: "Bugfix without commit history context",
7611
+ rationale: "Bugfix changes benefit from commit history to verify the root cause is addressed, not just the symptom. No commit history was provided.",
7612
+ evidence: [`changeType: bugfix`, `commitHistory entries: ${bundle.commitHistory.length}`],
7613
+ validatedBy: "heuristic"
7614
+ }
7615
+ ];
7616
+ }
7617
+ function checkChangeTypeSpecific(bundle) {
7618
+ switch (bundle.changeType) {
7619
+ case "feature":
7620
+ return checkFeatureSpec(bundle);
7621
+ case "bugfix":
7622
+ return checkBugfixHistory(bundle);
7623
+ default:
7624
+ return [];
7625
+ }
7626
+ }
7627
+ function checkResultTypeConvention(bundle, rules) {
7628
+ const resultTypeRule = rules.find((r) => r.text.toLowerCase().includes("result type"));
7629
+ if (!resultTypeRule) return [];
7338
7630
  const findings = [];
7339
- const rules = extractConventionRules(bundle);
7340
- const jsDocRuleExists = rules.some((r) => r.text.toLowerCase().includes("jsdoc"));
7341
- if (jsDocRuleExists) {
7342
- const missingDocs = findMissingJsDoc(bundle);
7343
- for (const m of missingDocs) {
7631
+ for (const cf of bundle.changedFiles) {
7632
+ const hasTryCatch = cf.content.includes("try {") || cf.content.includes("try{");
7633
+ const usesResult = cf.content.includes("Result<") || cf.content.includes("Result >") || cf.content.includes(": Result");
7634
+ if (hasTryCatch && !usesResult) {
7344
7635
  findings.push({
7345
- id: makeFindingId("compliance", m.file, m.line, `Missing JSDoc ${m.exportName}`),
7346
- file: m.file,
7347
- lineRange: [m.line, m.line],
7636
+ id: makeFindingId("compliance", cf.path, 1, "try-catch not Result"),
7637
+ file: cf.path,
7638
+ lineRange: [1, cf.lines],
7348
7639
  domain: "compliance",
7349
- severity: "important",
7350
- title: `Missing JSDoc on exported \`${m.exportName}\``,
7351
- rationale: `Convention requires all exports to have JSDoc comments (from ${rules.find((r) => r.text.toLowerCase().includes("jsdoc"))?.source ?? "conventions"}).`,
7352
- suggestion: `Add a JSDoc comment above the export of \`${m.exportName}\`.`,
7353
- evidence: [
7354
- `changeType: ${bundle.changeType}`,
7355
- `Convention rule: "${rules.find((r) => r.text.toLowerCase().includes("jsdoc"))?.text ?? ""}"`
7356
- ],
7640
+ severity: "suggestion",
7641
+ title: "Fallible operation uses try/catch instead of Result type",
7642
+ rationale: `Convention requires using Result type for fallible operations (from ${resultTypeRule.source}).`,
7643
+ suggestion: "Refactor error handling to use the Result type pattern.",
7644
+ evidence: [`changeType: ${bundle.changeType}`, `Convention rule: "${resultTypeRule.text}"`],
7357
7645
  validatedBy: "heuristic"
7358
7646
  });
7359
7647
  }
7360
7648
  }
7361
- switch (bundle.changeType) {
7362
- case "feature": {
7363
- const hasSpecContext = bundle.contextFiles.some(
7364
- (f) => f.reason === "spec" || f.reason === "convention"
7365
- );
7366
- if (!hasSpecContext && bundle.changedFiles.length > 0) {
7367
- const firstFile = bundle.changedFiles[0];
7368
- findings.push({
7369
- id: makeFindingId("compliance", firstFile.path, 1, "No spec for feature"),
7370
- file: firstFile.path,
7371
- lineRange: [1, 1],
7372
- domain: "compliance",
7373
- severity: "suggestion",
7374
- title: "No spec/design doc found for feature change",
7375
- rationale: "Feature changes should reference a spec or design doc to verify alignment. No spec context was included in the review bundle.",
7376
- evidence: [`changeType: feature`, `contextFiles count: ${bundle.contextFiles.length}`],
7377
- validatedBy: "heuristic"
7378
- });
7379
- }
7380
- break;
7381
- }
7382
- case "bugfix": {
7383
- if (bundle.commitHistory.length === 0 && bundle.changedFiles.length > 0) {
7384
- const firstFile = bundle.changedFiles[0];
7385
- findings.push({
7386
- id: makeFindingId("compliance", firstFile.path, 1, "Bugfix no history"),
7387
- file: firstFile.path,
7388
- lineRange: [1, 1],
7389
- domain: "compliance",
7390
- severity: "suggestion",
7391
- title: "Bugfix without commit history context",
7392
- rationale: "Bugfix changes benefit from commit history to verify the root cause is addressed, not just the symptom. No commit history was provided.",
7393
- evidence: [`changeType: bugfix`, `commitHistory entries: ${bundle.commitHistory.length}`],
7394
- validatedBy: "heuristic"
7395
- });
7396
- }
7397
- break;
7398
- }
7399
- case "refactor": {
7400
- break;
7401
- }
7402
- case "docs": {
7403
- break;
7404
- }
7405
- }
7406
- const resultTypeRule = rules.find((r) => r.text.toLowerCase().includes("result type"));
7407
- if (resultTypeRule) {
7408
- for (const cf of bundle.changedFiles) {
7409
- const hasTryCatch = cf.content.includes("try {") || cf.content.includes("try{");
7410
- const usesResult = cf.content.includes("Result<") || cf.content.includes("Result >") || cf.content.includes(": Result");
7411
- if (hasTryCatch && !usesResult) {
7412
- findings.push({
7413
- id: makeFindingId("compliance", cf.path, 1, "try-catch not Result"),
7414
- file: cf.path,
7415
- lineRange: [1, cf.lines],
7416
- domain: "compliance",
7417
- severity: "suggestion",
7418
- title: "Fallible operation uses try/catch instead of Result type",
7419
- rationale: `Convention requires using Result type for fallible operations (from ${resultTypeRule.source}).`,
7420
- suggestion: "Refactor error handling to use the Result type pattern.",
7421
- evidence: [
7422
- `changeType: ${bundle.changeType}`,
7423
- `Convention rule: "${resultTypeRule.text}"`
7424
- ],
7425
- validatedBy: "heuristic"
7426
- });
7427
- }
7428
- }
7429
- }
7430
7649
  return findings;
7431
7650
  }
7651
+ function runComplianceAgent(bundle) {
7652
+ const rules = extractConventionRules(bundle);
7653
+ return [
7654
+ ...checkMissingJsDoc(bundle, rules),
7655
+ ...checkChangeTypeSpecific(bundle),
7656
+ ...checkResultTypeConvention(bundle, rules)
7657
+ ];
7658
+ }
7432
7659
 
7433
7660
  // src/review/agents/bug-agent.ts
7434
7661
  var BUG_DETECTION_DESCRIPTOR = {
@@ -7705,31 +7932,32 @@ var ARCHITECTURE_DESCRIPTOR = {
7705
7932
  ]
7706
7933
  };
7707
7934
  var LARGE_FILE_THRESHOLD = 300;
7935
+ function isViolationLine(line) {
7936
+ const lower = line.toLowerCase();
7937
+ return lower.includes("violation") || lower.includes("layer");
7938
+ }
7939
+ function createLayerViolationFinding(line, fallbackPath) {
7940
+ const fileMatch = line.match(/(?:in\s+)?(\S+\.(?:ts|tsx|js|jsx))(?::(\d+))?/);
7941
+ const file = fileMatch?.[1] ?? fallbackPath;
7942
+ const lineNum = fileMatch?.[2] ? parseInt(fileMatch[2], 10) : 1;
7943
+ return {
7944
+ id: makeFindingId("arch", file, lineNum, "layer violation"),
7945
+ file,
7946
+ lineRange: [lineNum, lineNum],
7947
+ domain: "architecture",
7948
+ severity: "critical",
7949
+ title: "Layer boundary violation detected by check-deps",
7950
+ rationale: `Architectural layer violation: ${line.trim()}. Imports must flow in the correct direction per the project's layer definitions.`,
7951
+ suggestion: "Route the dependency through the correct intermediate layer (e.g., routes -> services -> db, not routes -> db).",
7952
+ evidence: [line.trim()],
7953
+ validatedBy: "heuristic"
7954
+ };
7955
+ }
7708
7956
  function detectLayerViolations(bundle) {
7709
- const findings = [];
7710
7957
  const checkDepsFile = bundle.contextFiles.find((f) => f.path === "harness-check-deps-output");
7711
- if (!checkDepsFile) return findings;
7712
- const lines = checkDepsFile.content.split("\n");
7713
- for (const line of lines) {
7714
- if (line.toLowerCase().includes("violation") || line.toLowerCase().includes("layer")) {
7715
- const fileMatch = line.match(/(?:in\s+)?(\S+\.(?:ts|tsx|js|jsx))(?::(\d+))?/);
7716
- const file = fileMatch?.[1] ?? bundle.changedFiles[0]?.path ?? "unknown";
7717
- const lineNum = fileMatch?.[2] ? parseInt(fileMatch[2], 10) : 1;
7718
- findings.push({
7719
- id: makeFindingId("arch", file, lineNum, "layer violation"),
7720
- file,
7721
- lineRange: [lineNum, lineNum],
7722
- domain: "architecture",
7723
- severity: "critical",
7724
- title: "Layer boundary violation detected by check-deps",
7725
- rationale: `Architectural layer violation: ${line.trim()}. Imports must flow in the correct direction per the project's layer definitions.`,
7726
- suggestion: "Route the dependency through the correct intermediate layer (e.g., routes -> services -> db, not routes -> db).",
7727
- evidence: [line.trim()],
7728
- validatedBy: "heuristic"
7729
- });
7730
- }
7731
- }
7732
- return findings;
7958
+ if (!checkDepsFile) return [];
7959
+ const fallbackPath = bundle.changedFiles[0]?.path ?? "unknown";
7960
+ return checkDepsFile.content.split("\n").filter(isViolationLine).map((line) => createLayerViolationFinding(line, fallbackPath));
7733
7961
  }
7734
7962
  function detectLargeFiles(bundle) {
7735
7963
  const findings = [];
@@ -7751,45 +7979,61 @@ function detectLargeFiles(bundle) {
7751
7979
  }
7752
7980
  return findings;
7753
7981
  }
7982
+ function extractRelativeImports(content) {
7983
+ const importRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
7984
+ let match;
7985
+ const imports = /* @__PURE__ */ new Set();
7986
+ while ((match = importRegex.exec(content)) !== null) {
7987
+ const source = match[1];
7988
+ if (source.startsWith(".")) {
7989
+ imports.add(source.replace(/^\.\//, "").replace(/^\.\.\//, ""));
7990
+ }
7991
+ }
7992
+ return imports;
7993
+ }
7994
+ function fileBaseName(filePath) {
7995
+ return filePath.replace(/.*\//, "").replace(/\.(ts|tsx|js|jsx)$/, "");
7996
+ }
7997
+ function findCircularImportInCtxFile(ctxFile, changedFilePath, changedPaths, fileImports) {
7998
+ const ctxImportRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
7999
+ let ctxMatch;
8000
+ while ((ctxMatch = ctxImportRegex.exec(ctxFile.content)) !== null) {
8001
+ const ctxSource = ctxMatch[1];
8002
+ if (!ctxSource.startsWith(".")) continue;
8003
+ for (const changedPath of changedPaths) {
8004
+ const baseName = fileBaseName(changedPath);
8005
+ const ctxBaseName = fileBaseName(ctxFile.path);
8006
+ if (ctxSource.includes(baseName) && fileImports.has(ctxBaseName)) {
8007
+ return {
8008
+ id: makeFindingId("arch", changedFilePath, 1, `circular ${ctxFile.path}`),
8009
+ file: changedFilePath,
8010
+ lineRange: [1, 1],
8011
+ domain: "architecture",
8012
+ severity: "important",
8013
+ title: `Potential circular import between ${changedFilePath} and ${ctxFile.path}`,
8014
+ rationale: "Circular imports can cause runtime issues (undefined values at import time) and indicate tightly coupled modules that should be refactored.",
8015
+ suggestion: "Extract shared types/interfaces into a separate module that both files can import from.",
8016
+ evidence: [
8017
+ `${changedFilePath} imports from a module that also imports from ${changedFilePath}`
8018
+ ],
8019
+ validatedBy: "heuristic"
8020
+ };
8021
+ }
8022
+ }
8023
+ }
8024
+ return null;
8025
+ }
7754
8026
  function detectCircularImports(bundle) {
7755
8027
  const findings = [];
7756
8028
  const changedPaths = new Set(bundle.changedFiles.map((f) => f.path));
8029
+ const relevantCtxFiles = bundle.contextFiles.filter(
8030
+ (f) => f.reason === "import" || f.reason === "graph-dependency"
8031
+ );
7757
8032
  for (const cf of bundle.changedFiles) {
7758
- const importRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
7759
- let match;
7760
- const imports = /* @__PURE__ */ new Set();
7761
- while ((match = importRegex.exec(cf.content)) !== null) {
7762
- const source = match[1];
7763
- if (source.startsWith(".")) {
7764
- imports.add(source.replace(/^\.\//, "").replace(/^\.\.\//, ""));
7765
- }
7766
- }
7767
- for (const ctxFile of bundle.contextFiles) {
7768
- if (ctxFile.reason !== "import" && ctxFile.reason !== "graph-dependency") continue;
7769
- const ctxImportRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
7770
- let ctxMatch;
7771
- while ((ctxMatch = ctxImportRegex.exec(ctxFile.content)) !== null) {
7772
- const ctxSource = ctxMatch[1];
7773
- if (ctxSource.startsWith(".")) {
7774
- for (const changedPath of changedPaths) {
7775
- const baseName = changedPath.replace(/.*\//, "").replace(/\.(ts|tsx|js|jsx)$/, "");
7776
- if (ctxSource.includes(baseName) && imports.has(ctxFile.path.replace(/.*\//, "").replace(/\.(ts|tsx|js|jsx)$/, ""))) {
7777
- findings.push({
7778
- id: makeFindingId("arch", cf.path, 1, `circular ${ctxFile.path}`),
7779
- file: cf.path,
7780
- lineRange: [1, 1],
7781
- domain: "architecture",
7782
- severity: "important",
7783
- title: `Potential circular import between ${cf.path} and ${ctxFile.path}`,
7784
- rationale: "Circular imports can cause runtime issues (undefined values at import time) and indicate tightly coupled modules that should be refactored.",
7785
- suggestion: "Extract shared types/interfaces into a separate module that both files can import from.",
7786
- evidence: [`${cf.path} imports from a module that also imports from ${cf.path}`],
7787
- validatedBy: "heuristic"
7788
- });
7789
- }
7790
- }
7791
- }
7792
- }
8033
+ const imports = extractRelativeImports(cf.content);
8034
+ for (const ctxFile of relevantCtxFiles) {
8035
+ const finding = findCircularImportInCtxFile(ctxFile, cf.path, changedPaths, imports);
8036
+ if (finding) findings.push(finding);
7793
8037
  }
7794
8038
  }
7795
8039
  return findings;
@@ -7836,7 +8080,7 @@ async function fanOutReview(options) {
7836
8080
  }
7837
8081
 
7838
8082
  // src/review/validate-findings.ts
7839
- import * as path15 from "path";
8083
+ import * as path17 from "path";
7840
8084
  var DOWNGRADE_MAP = {
7841
8085
  critical: "important",
7842
8086
  important: "suggestion",
@@ -7857,7 +8101,7 @@ function normalizePath(filePath, projectRoot) {
7857
8101
  let normalized = filePath;
7858
8102
  normalized = normalized.replace(/\\/g, "/");
7859
8103
  const normalizedRoot = projectRoot.replace(/\\/g, "/");
7860
- if (path15.isAbsolute(normalized)) {
8104
+ if (path17.isAbsolute(normalized)) {
7861
8105
  const root = normalizedRoot.endsWith("/") ? normalizedRoot : normalizedRoot + "/";
7862
8106
  if (normalized.startsWith(root)) {
7863
8107
  normalized = normalized.slice(root.length);
@@ -7882,12 +8126,12 @@ function followImportChain(fromFile, fileContents, maxDepth = 2) {
7882
8126
  while ((match = importRegex.exec(content)) !== null) {
7883
8127
  const importPath = match[1];
7884
8128
  if (!importPath.startsWith(".")) continue;
7885
- const dir = path15.dirname(current.file);
7886
- let resolved = path15.join(dir, importPath).replace(/\\/g, "/");
8129
+ const dir = path17.dirname(current.file);
8130
+ let resolved = path17.join(dir, importPath).replace(/\\/g, "/");
7887
8131
  if (!resolved.match(/\.(ts|tsx|js|jsx)$/)) {
7888
8132
  resolved += ".ts";
7889
8133
  }
7890
- resolved = path15.normalize(resolved).replace(/\\/g, "/");
8134
+ resolved = path17.normalize(resolved).replace(/\\/g, "/");
7891
8135
  if (!visited.has(resolved) && current.depth + 1 <= maxDepth) {
7892
8136
  queue.push({ file: resolved, depth: current.depth + 1 });
7893
8137
  }
@@ -7904,7 +8148,7 @@ async function validateFindings(options) {
7904
8148
  if (exclusionSet.isExcluded(normalizedFile, finding.lineRange) || exclusionSet.isExcluded(finding.file, finding.lineRange)) {
7905
8149
  continue;
7906
8150
  }
7907
- const absoluteFile = path15.isAbsolute(finding.file) ? finding.file : path15.join(projectRoot, finding.file).replace(/\\/g, "/");
8151
+ const absoluteFile = path17.isAbsolute(finding.file) ? finding.file : path17.join(projectRoot, finding.file).replace(/\\/g, "/");
7908
8152
  if (exclusionSet.isExcluded(absoluteFile, finding.lineRange)) {
7909
8153
  continue;
7910
8154
  }
@@ -7961,6 +8205,28 @@ async function validateFindings(options) {
7961
8205
  function rangesOverlap(a, b, gap) {
7962
8206
  return a[0] <= b[1] + gap && b[0] <= a[1] + gap;
7963
8207
  }
8208
+ function pickLongest(a, b) {
8209
+ if (a && b) return a.length >= b.length ? a : b;
8210
+ return a ?? b;
8211
+ }
8212
+ function buildMergedTitle(a, b, domains) {
8213
+ const primaryFinding = SEVERITY_RANK[a.severity] >= SEVERITY_RANK[b.severity] ? a : b;
8214
+ const domainList = [...domains].sort().join(", ");
8215
+ const cleanTitle = primaryFinding.title.replace(/^\[.*?\]\s*/, "");
8216
+ return { title: `[${domainList}] ${cleanTitle}`, primaryFinding };
8217
+ }
8218
+ function mergeSecurityFields(merged, primary, a, b) {
8219
+ const cweId = primary.cweId ?? a.cweId ?? b.cweId;
8220
+ const owaspCategory = primary.owaspCategory ?? a.owaspCategory ?? b.owaspCategory;
8221
+ const confidence = primary.confidence ?? a.confidence ?? b.confidence;
8222
+ const remediation = pickLongest(a.remediation, b.remediation);
8223
+ const mergedRefs = [.../* @__PURE__ */ new Set([...a.references ?? [], ...b.references ?? []])];
8224
+ if (cweId !== void 0) merged.cweId = cweId;
8225
+ if (owaspCategory !== void 0) merged.owaspCategory = owaspCategory;
8226
+ if (confidence !== void 0) merged.confidence = confidence;
8227
+ if (remediation !== void 0) merged.remediation = remediation;
8228
+ if (mergedRefs.length > 0) merged.references = mergedRefs;
8229
+ }
7964
8230
  function mergeFindings(a, b) {
7965
8231
  const highestSeverity = SEVERITY_RANK[a.severity] >= SEVERITY_RANK[b.severity] ? a.severity : b.severity;
7966
8232
  const highestValidatedBy = (VALIDATED_BY_RANK[a.validatedBy] ?? 0) >= (VALIDATED_BY_RANK[b.validatedBy] ?? 0) ? a.validatedBy : b.validatedBy;
@@ -7970,18 +8236,12 @@ function mergeFindings(a, b) {
7970
8236
  Math.min(a.lineRange[0], b.lineRange[0]),
7971
8237
  Math.max(a.lineRange[1], b.lineRange[1])
7972
8238
  ];
7973
- const domains = /* @__PURE__ */ new Set();
7974
- domains.add(a.domain);
7975
- domains.add(b.domain);
7976
- const suggestion = a.suggestion && b.suggestion ? a.suggestion.length >= b.suggestion.length ? a.suggestion : b.suggestion : a.suggestion ?? b.suggestion;
7977
- const primaryFinding = SEVERITY_RANK[a.severity] >= SEVERITY_RANK[b.severity] ? a : b;
7978
- const domainList = [...domains].sort().join(", ");
7979
- const cleanTitle = primaryFinding.title.replace(/^\[.*?\]\s*/, "");
7980
- const title = `[${domainList}] ${cleanTitle}`;
8239
+ const domains = /* @__PURE__ */ new Set([a.domain, b.domain]);
8240
+ const suggestion = pickLongest(a.suggestion, b.suggestion);
8241
+ const { title, primaryFinding } = buildMergedTitle(a, b, domains);
7981
8242
  const merged = {
7982
8243
  id: primaryFinding.id,
7983
8244
  file: a.file,
7984
- // same file for all merged findings
7985
8245
  lineRange,
7986
8246
  domain: primaryFinding.domain,
7987
8247
  severity: highestSeverity,
@@ -7993,16 +8253,7 @@ function mergeFindings(a, b) {
7993
8253
  if (suggestion !== void 0) {
7994
8254
  merged.suggestion = suggestion;
7995
8255
  }
7996
- const cweId = primaryFinding.cweId ?? a.cweId ?? b.cweId;
7997
- const owaspCategory = primaryFinding.owaspCategory ?? a.owaspCategory ?? b.owaspCategory;
7998
- const confidence = primaryFinding.confidence ?? a.confidence ?? b.confidence;
7999
- const remediation = a.remediation && b.remediation ? a.remediation.length >= b.remediation.length ? a.remediation : b.remediation : a.remediation ?? b.remediation;
8000
- const mergedRefs = [.../* @__PURE__ */ new Set([...a.references ?? [], ...b.references ?? []])];
8001
- if (cweId !== void 0) merged.cweId = cweId;
8002
- if (owaspCategory !== void 0) merged.owaspCategory = owaspCategory;
8003
- if (confidence !== void 0) merged.confidence = confidence;
8004
- if (remediation !== void 0) merged.remediation = remediation;
8005
- if (mergedRefs.length > 0) merged.references = mergedRefs;
8256
+ mergeSecurityFields(merged, primaryFinding, a, b);
8006
8257
  return merged;
8007
8258
  }
8008
8259
  function deduplicateFindings(options) {
@@ -8174,6 +8425,17 @@ function formatTerminalOutput(options) {
8174
8425
  if (suggestionCount > 0) parts.push(`${suggestionCount} suggestion(s)`);
8175
8426
  sections.push(` Found ${issueCount} issue(s): ${parts.join(", ")}.`);
8176
8427
  }
8428
+ if (options.evidenceCoverage) {
8429
+ const ec = options.evidenceCoverage;
8430
+ sections.push("");
8431
+ sections.push("## Evidence Coverage\n");
8432
+ sections.push(` Evidence entries: ${ec.totalEntries}`);
8433
+ sections.push(
8434
+ ` Findings with evidence: ${ec.findingsWithEvidence}/${ec.findingsWithEvidence + ec.uncitedCount}`
8435
+ );
8436
+ sections.push(` Uncited findings: ${ec.uncitedCount} (flagged as [UNVERIFIED])`);
8437
+ sections.push(` Coverage: ${ec.coveragePercentage}%`);
8438
+ }
8177
8439
  return sections.join("\n");
8178
8440
  }
8179
8441
 
@@ -8250,9 +8512,108 @@ function formatGitHubSummary(options) {
8250
8512
  const assessment = determineAssessment(findings);
8251
8513
  const assessmentLabel = assessment === "approve" ? "Approve" : assessment === "comment" ? "Comment" : "Request Changes";
8252
8514
  sections.push(`## Assessment: ${assessmentLabel}`);
8515
+ if (options.evidenceCoverage) {
8516
+ const ec = options.evidenceCoverage;
8517
+ sections.push("");
8518
+ sections.push("## Evidence Coverage\n");
8519
+ sections.push(`- Evidence entries: ${ec.totalEntries}`);
8520
+ sections.push(
8521
+ `- Findings with evidence: ${ec.findingsWithEvidence}/${ec.findingsWithEvidence + ec.uncitedCount}`
8522
+ );
8523
+ sections.push(`- Uncited findings: ${ec.uncitedCount} (flagged as \\[UNVERIFIED\\])`);
8524
+ sections.push(`- Coverage: ${ec.coveragePercentage}%`);
8525
+ }
8253
8526
  return sections.join("\n");
8254
8527
  }
8255
8528
 
8529
+ // src/review/evidence-gate.ts
8530
+ var FILE_LINE_RANGE_PATTERN = /^([\w./@-]+\.\w+):(\d+)-(\d+)/;
8531
+ var FILE_LINE_PATTERN = /^([\w./@-]+\.\w+):(\d+)/;
8532
+ var FILE_ONLY_PATTERN = /^([\w./@-]+\.\w+)\s/;
8533
+ function parseEvidenceRef(content) {
8534
+ const trimmed = content.trim();
8535
+ const rangeMatch = trimmed.match(FILE_LINE_RANGE_PATTERN);
8536
+ if (rangeMatch) {
8537
+ return {
8538
+ file: rangeMatch[1],
8539
+ lineStart: parseInt(rangeMatch[2], 10),
8540
+ lineEnd: parseInt(rangeMatch[3], 10)
8541
+ };
8542
+ }
8543
+ const lineMatch = trimmed.match(FILE_LINE_PATTERN);
8544
+ if (lineMatch) {
8545
+ return {
8546
+ file: lineMatch[1],
8547
+ lineStart: parseInt(lineMatch[2], 10)
8548
+ };
8549
+ }
8550
+ const fileMatch = trimmed.match(FILE_ONLY_PATTERN);
8551
+ if (fileMatch) {
8552
+ return { file: fileMatch[1] };
8553
+ }
8554
+ return null;
8555
+ }
8556
+ function evidenceMatchesFinding(ref, finding) {
8557
+ if (ref.file !== finding.file) return false;
8558
+ if (ref.lineStart === void 0) return true;
8559
+ const [findStart, findEnd] = finding.lineRange;
8560
+ if (ref.lineEnd !== void 0) {
8561
+ return ref.lineStart <= findEnd && ref.lineEnd >= findStart;
8562
+ }
8563
+ return ref.lineStart >= findStart && ref.lineStart <= findEnd;
8564
+ }
8565
+ function checkEvidenceCoverage(findings, evidenceEntries) {
8566
+ if (findings.length === 0) {
8567
+ return {
8568
+ totalEntries: evidenceEntries.filter((e) => e.status === "active").length,
8569
+ findingsWithEvidence: 0,
8570
+ uncitedCount: 0,
8571
+ uncitedFindings: [],
8572
+ coveragePercentage: 100
8573
+ };
8574
+ }
8575
+ const activeEvidence = evidenceEntries.filter((e) => e.status === "active");
8576
+ const evidenceRefs = [];
8577
+ for (const entry of activeEvidence) {
8578
+ const ref = parseEvidenceRef(entry.content);
8579
+ if (ref) evidenceRefs.push(ref);
8580
+ }
8581
+ let findingsWithEvidence = 0;
8582
+ const uncitedFindings = [];
8583
+ for (const finding of findings) {
8584
+ const hasEvidence = evidenceRefs.some((ref) => evidenceMatchesFinding(ref, finding));
8585
+ if (hasEvidence) {
8586
+ findingsWithEvidence++;
8587
+ } else {
8588
+ uncitedFindings.push(finding.title);
8589
+ }
8590
+ }
8591
+ const uncitedCount = findings.length - findingsWithEvidence;
8592
+ const coveragePercentage = Math.round(findingsWithEvidence / findings.length * 100);
8593
+ return {
8594
+ totalEntries: activeEvidence.length,
8595
+ findingsWithEvidence,
8596
+ uncitedCount,
8597
+ uncitedFindings,
8598
+ coveragePercentage
8599
+ };
8600
+ }
8601
+ function tagUncitedFindings(findings, evidenceEntries) {
8602
+ const activeEvidence = evidenceEntries.filter((e) => e.status === "active");
8603
+ const evidenceRefs = [];
8604
+ for (const entry of activeEvidence) {
8605
+ const ref = parseEvidenceRef(entry.content);
8606
+ if (ref) evidenceRefs.push(ref);
8607
+ }
8608
+ for (const finding of findings) {
8609
+ const hasEvidence = evidenceRefs.some((ref) => evidenceMatchesFinding(ref, finding));
8610
+ if (!hasEvidence && !finding.title.startsWith("[UNVERIFIED]")) {
8611
+ finding.title = `[UNVERIFIED] ${finding.title}`;
8612
+ }
8613
+ }
8614
+ return findings;
8615
+ }
8616
+
8256
8617
  // src/review/pipeline-orchestrator.ts
8257
8618
  async function runReviewPipeline(options) {
8258
8619
  const {
@@ -8265,7 +8626,8 @@ async function runReviewPipeline(options) {
8265
8626
  conventionFiles,
8266
8627
  checkDepsOutput,
8267
8628
  config = {},
8268
- commitHistory
8629
+ commitHistory,
8630
+ sessionSlug
8269
8631
  } = options;
8270
8632
  if (flags.ci && prMetadata) {
8271
8633
  const eligibility = checkEligibility(prMetadata, true);
@@ -8361,13 +8723,25 @@ async function runReviewPipeline(options) {
8361
8723
  projectRoot,
8362
8724
  fileContents
8363
8725
  });
8726
+ let evidenceCoverage;
8727
+ if (sessionSlug) {
8728
+ try {
8729
+ const evidenceResult = await readSessionSection(projectRoot, sessionSlug, "evidence");
8730
+ if (evidenceResult.ok) {
8731
+ evidenceCoverage = checkEvidenceCoverage(validatedFindings, evidenceResult.value);
8732
+ tagUncitedFindings(validatedFindings, evidenceResult.value);
8733
+ }
8734
+ } catch {
8735
+ }
8736
+ }
8364
8737
  const dedupedFindings = deduplicateFindings({ findings: validatedFindings });
8365
8738
  const strengths = [];
8366
8739
  const assessment = determineAssessment(dedupedFindings);
8367
8740
  const exitCode = getExitCode(assessment);
8368
8741
  const terminalOutput = formatTerminalOutput({
8369
8742
  findings: dedupedFindings,
8370
- strengths
8743
+ strengths,
8744
+ ...evidenceCoverage != null ? { evidenceCoverage } : {}
8371
8745
  });
8372
8746
  let githubComments = [];
8373
8747
  if (flags.comment) {
@@ -8382,7 +8756,8 @@ async function runReviewPipeline(options) {
8382
8756
  terminalOutput,
8383
8757
  githubComments,
8384
8758
  exitCode,
8385
- ...mechanicalResult !== void 0 ? { mechanicalResult } : {}
8759
+ ...mechanicalResult != null ? { mechanicalResult } : {},
8760
+ ...evidenceCoverage != null ? { evidenceCoverage } : {}
8386
8761
  };
8387
8762
  }
8388
8763
 
@@ -8485,13 +8860,29 @@ function parseFeatures(sectionBody) {
8485
8860
  }
8486
8861
  return Ok2(features);
8487
8862
  }
8488
- function parseFeatureFields(name, body) {
8863
+ function extractFieldMap(body) {
8489
8864
  const fieldMap = /* @__PURE__ */ new Map();
8490
8865
  const fieldPattern = /^- \*\*(.+?):\*\* (.+)$/gm;
8491
8866
  let match;
8492
8867
  while ((match = fieldPattern.exec(body)) !== null) {
8493
8868
  fieldMap.set(match[1], match[2]);
8494
8869
  }
8870
+ return fieldMap;
8871
+ }
8872
+ function parseListField(fieldMap, ...keys) {
8873
+ let raw = EM_DASH;
8874
+ for (const key of keys) {
8875
+ const val = fieldMap.get(key);
8876
+ if (val !== void 0) {
8877
+ raw = val;
8878
+ break;
8879
+ }
8880
+ }
8881
+ if (raw === EM_DASH || raw === "none") return [];
8882
+ return raw.split(",").map((s) => s.trim());
8883
+ }
8884
+ function parseFeatureFields(name, body) {
8885
+ const fieldMap = extractFieldMap(body);
8495
8886
  const statusRaw = fieldMap.get("Status");
8496
8887
  if (!statusRaw || !VALID_STATUSES.has(statusRaw)) {
8497
8888
  return Err2(
@@ -8500,15 +8891,17 @@ function parseFeatureFields(name, body) {
8500
8891
  )
8501
8892
  );
8502
8893
  }
8503
- const status = statusRaw;
8504
8894
  const specRaw = fieldMap.get("Spec") ?? EM_DASH;
8505
- const spec = specRaw === EM_DASH ? null : specRaw;
8506
- const plansRaw = fieldMap.get("Plans") ?? fieldMap.get("Plan") ?? EM_DASH;
8507
- const plans = plansRaw === EM_DASH || plansRaw === "none" ? [] : plansRaw.split(",").map((p) => p.trim());
8508
- const blockedByRaw = fieldMap.get("Blocked by") ?? fieldMap.get("Blockers") ?? EM_DASH;
8509
- const blockedBy = blockedByRaw === EM_DASH || blockedByRaw === "none" ? [] : blockedByRaw.split(",").map((b) => b.trim());
8510
- const summary = fieldMap.get("Summary") ?? "";
8511
- return Ok2({ name, status, spec, plans, blockedBy, summary });
8895
+ const plans = parseListField(fieldMap, "Plans", "Plan");
8896
+ const blockedBy = parseListField(fieldMap, "Blocked by", "Blockers");
8897
+ return Ok2({
8898
+ name,
8899
+ status: statusRaw,
8900
+ spec: specRaw === EM_DASH ? null : specRaw,
8901
+ plans,
8902
+ blockedBy,
8903
+ summary: fieldMap.get("Summary") ?? ""
8904
+ });
8512
8905
  }
8513
8906
 
8514
8907
  // src/roadmap/serialize.ts
@@ -8559,8 +8952,8 @@ function serializeFeature(feature) {
8559
8952
  }
8560
8953
 
8561
8954
  // src/roadmap/sync.ts
8562
- import * as fs16 from "fs";
8563
- import * as path16 from "path";
8955
+ import * as fs18 from "fs";
8956
+ import * as path18 from "path";
8564
8957
  import { Ok as Ok3 } from "@harness-engineering/types";
8565
8958
  function inferStatus(feature, projectPath, allFeatures) {
8566
8959
  if (feature.blockedBy.length > 0) {
@@ -8575,10 +8968,10 @@ function inferStatus(feature, projectPath, allFeatures) {
8575
8968
  const featuresWithPlans = allFeatures.filter((f) => f.plans.length > 0);
8576
8969
  const useRootState = featuresWithPlans.length <= 1;
8577
8970
  if (useRootState) {
8578
- const rootStatePath = path16.join(projectPath, ".harness", "state.json");
8579
- if (fs16.existsSync(rootStatePath)) {
8971
+ const rootStatePath = path18.join(projectPath, ".harness", "state.json");
8972
+ if (fs18.existsSync(rootStatePath)) {
8580
8973
  try {
8581
- const raw = fs16.readFileSync(rootStatePath, "utf-8");
8974
+ const raw = fs18.readFileSync(rootStatePath, "utf-8");
8582
8975
  const state = JSON.parse(raw);
8583
8976
  if (state.progress) {
8584
8977
  for (const status of Object.values(state.progress)) {
@@ -8589,16 +8982,16 @@ function inferStatus(feature, projectPath, allFeatures) {
8589
8982
  }
8590
8983
  }
8591
8984
  }
8592
- const sessionsDir = path16.join(projectPath, ".harness", "sessions");
8593
- if (fs16.existsSync(sessionsDir)) {
8985
+ const sessionsDir = path18.join(projectPath, ".harness", "sessions");
8986
+ if (fs18.existsSync(sessionsDir)) {
8594
8987
  try {
8595
- const sessionDirs = fs16.readdirSync(sessionsDir, { withFileTypes: true });
8988
+ const sessionDirs = fs18.readdirSync(sessionsDir, { withFileTypes: true });
8596
8989
  for (const entry of sessionDirs) {
8597
8990
  if (!entry.isDirectory()) continue;
8598
- const autopilotPath = path16.join(sessionsDir, entry.name, "autopilot-state.json");
8599
- if (!fs16.existsSync(autopilotPath)) continue;
8991
+ const autopilotPath = path18.join(sessionsDir, entry.name, "autopilot-state.json");
8992
+ if (!fs18.existsSync(autopilotPath)) continue;
8600
8993
  try {
8601
- const raw = fs16.readFileSync(autopilotPath, "utf-8");
8994
+ const raw = fs18.readFileSync(autopilotPath, "utf-8");
8602
8995
  const autopilot = JSON.parse(raw);
8603
8996
  if (!autopilot.phases) continue;
8604
8997
  const linkedPhases = autopilot.phases.filter(
@@ -8678,17 +9071,17 @@ var EmitInteractionInputSchema = z6.object({
8678
9071
  });
8679
9072
 
8680
9073
  // src/blueprint/scanner.ts
8681
- import * as fs17 from "fs/promises";
8682
- import * as path17 from "path";
9074
+ import * as fs19 from "fs/promises";
9075
+ import * as path19 from "path";
8683
9076
  var ProjectScanner = class {
8684
9077
  constructor(rootDir) {
8685
9078
  this.rootDir = rootDir;
8686
9079
  }
8687
9080
  async scan() {
8688
- let projectName = path17.basename(this.rootDir);
9081
+ let projectName = path19.basename(this.rootDir);
8689
9082
  try {
8690
- const pkgPath = path17.join(this.rootDir, "package.json");
8691
- const pkgRaw = await fs17.readFile(pkgPath, "utf-8");
9083
+ const pkgPath = path19.join(this.rootDir, "package.json");
9084
+ const pkgRaw = await fs19.readFile(pkgPath, "utf-8");
8692
9085
  const pkg = JSON.parse(pkgRaw);
8693
9086
  if (pkg.name) projectName = pkg.name;
8694
9087
  } catch {
@@ -8729,8 +9122,8 @@ var ProjectScanner = class {
8729
9122
  };
8730
9123
 
8731
9124
  // src/blueprint/generator.ts
8732
- import * as fs18 from "fs/promises";
8733
- import * as path18 from "path";
9125
+ import * as fs20 from "fs/promises";
9126
+ import * as path20 from "path";
8734
9127
  import * as ejs from "ejs";
8735
9128
 
8736
9129
  // src/blueprint/templates.ts
@@ -8814,19 +9207,19 @@ var BlueprintGenerator = class {
8814
9207
  styles: STYLES,
8815
9208
  scripts: SCRIPTS
8816
9209
  });
8817
- await fs18.mkdir(options.outputDir, { recursive: true });
8818
- await fs18.writeFile(path18.join(options.outputDir, "index.html"), html);
9210
+ await fs20.mkdir(options.outputDir, { recursive: true });
9211
+ await fs20.writeFile(path20.join(options.outputDir, "index.html"), html);
8819
9212
  }
8820
9213
  };
8821
9214
 
8822
9215
  // src/update-checker.ts
8823
- import * as fs19 from "fs";
8824
- import * as path19 from "path";
9216
+ import * as fs21 from "fs";
9217
+ import * as path21 from "path";
8825
9218
  import * as os from "os";
8826
9219
  import { spawn } from "child_process";
8827
9220
  function getStatePath() {
8828
9221
  const home = process.env["HOME"] || os.homedir();
8829
- return path19.join(home, ".harness", "update-check.json");
9222
+ return path21.join(home, ".harness", "update-check.json");
8830
9223
  }
8831
9224
  function isUpdateCheckEnabled(configInterval) {
8832
9225
  if (process.env["HARNESS_NO_UPDATE_CHECK"] === "1") return false;
@@ -8839,7 +9232,7 @@ function shouldRunCheck(state, intervalMs) {
8839
9232
  }
8840
9233
  function readCheckState() {
8841
9234
  try {
8842
- const raw = fs19.readFileSync(getStatePath(), "utf-8");
9235
+ const raw = fs21.readFileSync(getStatePath(), "utf-8");
8843
9236
  const parsed = JSON.parse(raw);
8844
9237
  if (typeof parsed === "object" && parsed !== null && "lastCheckTime" in parsed && typeof parsed.lastCheckTime === "number" && "currentVersion" in parsed && typeof parsed.currentVersion === "string") {
8845
9238
  const state = parsed;
@@ -8856,7 +9249,7 @@ function readCheckState() {
8856
9249
  }
8857
9250
  function spawnBackgroundCheck(currentVersion) {
8858
9251
  const statePath = getStatePath();
8859
- const stateDir = path19.dirname(statePath);
9252
+ const stateDir = path21.dirname(statePath);
8860
9253
  const script = `
8861
9254
  const { execSync } = require('child_process');
8862
9255
  const fs = require('fs');
@@ -8910,7 +9303,7 @@ Run "harness update" to upgrade.`;
8910
9303
  }
8911
9304
 
8912
9305
  // src/index.ts
8913
- var VERSION = "0.11.0";
9306
+ var VERSION = "0.14.0";
8914
9307
  export {
8915
9308
  AGENT_DESCRIPTORS,
8916
9309
  ARCHITECTURE_DESCRIPTOR,
@@ -8990,6 +9383,7 @@ export {
8990
9383
  analyzeLearningPatterns,
8991
9384
  appendFailure,
8992
9385
  appendLearning,
9386
+ appendSessionEntry,
8993
9387
  applyFixes,
8994
9388
  applyHotspotDowngrade,
8995
9389
  archMatchers,
@@ -8997,12 +9391,14 @@ export {
8997
9391
  architecture,
8998
9392
  archiveFailures,
8999
9393
  archiveLearnings,
9394
+ archiveSession,
9000
9395
  archiveStream,
9001
9396
  buildDependencyGraph,
9002
9397
  buildExclusionSet,
9003
9398
  buildSnapshot,
9004
9399
  checkDocCoverage,
9005
9400
  checkEligibility,
9401
+ checkEvidenceCoverage,
9006
9402
  classifyFinding,
9007
9403
  clearFailuresCache,
9008
9404
  clearLearningsCache,
@@ -9086,6 +9482,8 @@ export {
9086
9482
  reactRules,
9087
9483
  readCheckState,
9088
9484
  readLockfile,
9485
+ readSessionSection,
9486
+ readSessionSections,
9089
9487
  removeContributions,
9090
9488
  removeProvenance,
9091
9489
  requestMultiplePeerReviews,
@@ -9119,8 +9517,10 @@ export {
9119
9517
  spawnBackgroundCheck,
9120
9518
  syncConstraintNodes,
9121
9519
  syncRoadmap,
9520
+ tagUncitedFindings,
9122
9521
  touchStream,
9123
9522
  trackAction,
9523
+ updateSessionEntryStatus,
9124
9524
  updateSessionIndex,
9125
9525
  validateAgentsMap,
9126
9526
  validateBoundaries,