@harness-engineering/core 0.13.0 → 0.14.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -109,6 +109,7 @@ __export(index_exports, {
109
109
  analyzeLearningPatterns: () => analyzeLearningPatterns,
110
110
  appendFailure: () => appendFailure,
111
111
  appendLearning: () => appendLearning,
112
+ appendSessionEntry: () => appendSessionEntry,
112
113
  applyFixes: () => applyFixes,
113
114
  applyHotspotDowngrade: () => applyHotspotDowngrade,
114
115
  archMatchers: () => archMatchers,
@@ -116,12 +117,14 @@ __export(index_exports, {
116
117
  architecture: () => architecture,
117
118
  archiveFailures: () => archiveFailures,
118
119
  archiveLearnings: () => archiveLearnings,
120
+ archiveSession: () => archiveSession,
119
121
  archiveStream: () => archiveStream,
120
122
  buildDependencyGraph: () => buildDependencyGraph,
121
123
  buildExclusionSet: () => buildExclusionSet,
122
124
  buildSnapshot: () => buildSnapshot,
123
125
  checkDocCoverage: () => checkDocCoverage,
124
126
  checkEligibility: () => checkEligibility,
127
+ checkEvidenceCoverage: () => checkEvidenceCoverage,
125
128
  classifyFinding: () => classifyFinding,
126
129
  clearFailuresCache: () => clearFailuresCache,
127
130
  clearLearningsCache: () => clearLearningsCache,
@@ -205,6 +208,8 @@ __export(index_exports, {
205
208
  reactRules: () => reactRules,
206
209
  readCheckState: () => readCheckState,
207
210
  readLockfile: () => readLockfile,
211
+ readSessionSection: () => readSessionSection,
212
+ readSessionSections: () => readSessionSections,
208
213
  removeContributions: () => removeContributions,
209
214
  removeProvenance: () => removeProvenance,
210
215
  requestMultiplePeerReviews: () => requestMultiplePeerReviews,
@@ -215,7 +220,7 @@ __export(index_exports, {
215
220
  resolveRuleSeverity: () => resolveRuleSeverity,
216
221
  resolveSessionDir: () => resolveSessionDir,
217
222
  resolveStreamPath: () => resolveStreamPath,
218
- resolveThresholds: () => resolveThresholds,
223
+ resolveThresholds: () => resolveThresholds2,
219
224
  runAll: () => runAll,
220
225
  runArchitectureAgent: () => runArchitectureAgent,
221
226
  runBugDetectionAgent: () => runBugDetectionAgent,
@@ -238,8 +243,10 @@ __export(index_exports, {
238
243
  spawnBackgroundCheck: () => spawnBackgroundCheck,
239
244
  syncConstraintNodes: () => syncConstraintNodes,
240
245
  syncRoadmap: () => syncRoadmap,
246
+ tagUncitedFindings: () => tagUncitedFindings,
241
247
  touchStream: () => touchStream,
242
248
  trackAction: () => trackAction,
249
+ updateSessionEntryStatus: () => updateSessionEntryStatus,
243
250
  updateSessionIndex: () => updateSessionIndex,
244
251
  validateAgentsMap: () => validateAgentsMap,
245
252
  validateBoundaries: () => validateBoundaries,
@@ -273,20 +280,21 @@ var import_types = require("@harness-engineering/types");
273
280
  // src/shared/fs-utils.ts
274
281
  var import_fs = require("fs");
275
282
  var import_util = require("util");
283
+ var import_node_path = require("path");
276
284
  var import_glob = require("glob");
277
285
  var accessAsync = (0, import_util.promisify)(import_fs.access);
278
286
  var readFileAsync = (0, import_util.promisify)(import_fs.readFile);
279
- async function fileExists(path20) {
287
+ async function fileExists(path22) {
280
288
  try {
281
- await accessAsync(path20, import_fs.constants.F_OK);
289
+ await accessAsync(path22, import_fs.constants.F_OK);
282
290
  return true;
283
291
  } catch {
284
292
  return false;
285
293
  }
286
294
  }
287
- async function readFileContent(path20) {
295
+ async function readFileContent(path22) {
288
296
  try {
289
- const content = await readFileAsync(path20, "utf-8");
297
+ const content = await readFileAsync(path22, "utf-8");
290
298
  return (0, import_types.Ok)(content);
291
299
  } catch (error) {
292
300
  return (0, import_types.Err)(error);
@@ -295,6 +303,9 @@ async function readFileContent(path20) {
295
303
  async function findFiles(pattern, cwd = process.cwd()) {
296
304
  return (0, import_glob.glob)(pattern, { cwd, absolute: true });
297
305
  }
306
+ function relativePosix(from, to) {
307
+ return (0, import_node_path.relative)(from, to).replaceAll("\\", "/");
308
+ }
298
309
 
299
310
  // src/validation/file-structure.ts
300
311
  async function validateFileStructure(projectPath, conventions) {
@@ -334,15 +345,15 @@ function validateConfig(data, schema) {
334
345
  let message = "Configuration validation failed";
335
346
  const suggestions = [];
336
347
  if (firstError) {
337
- const path20 = firstError.path.join(".");
338
- const pathDisplay = path20 ? ` at "${path20}"` : "";
348
+ const path22 = firstError.path.join(".");
349
+ const pathDisplay = path22 ? ` at "${path22}"` : "";
339
350
  if (firstError.code === "invalid_type") {
340
351
  const received = firstError.received;
341
352
  const expected = firstError.expected;
342
353
  if (received === "undefined") {
343
354
  code = "MISSING_FIELD";
344
355
  message = `Missing required field${pathDisplay}: ${firstError.message}`;
345
- suggestions.push(`Field "${path20}" is required and must be of type "${expected}"`);
356
+ suggestions.push(`Field "${path22}" is required and must be of type "${expected}"`);
346
357
  } else {
347
358
  code = "INVALID_TYPE";
348
359
  message = `Invalid type${pathDisplay}: ${firstError.message}`;
@@ -496,6 +507,43 @@ function extractMarkdownLinks(content) {
496
507
  }
497
508
  return links;
498
509
  }
510
+ function isDescriptionTerminator(trimmed) {
511
+ return trimmed.startsWith("#") || trimmed.startsWith("-") || trimmed.startsWith("*") || trimmed.startsWith("```");
512
+ }
513
+ function extractDescription(sectionLines) {
514
+ const descriptionLines = [];
515
+ for (const line of sectionLines) {
516
+ const trimmed = line.trim();
517
+ if (trimmed === "") {
518
+ if (descriptionLines.length > 0) break;
519
+ continue;
520
+ }
521
+ if (isDescriptionTerminator(trimmed)) break;
522
+ descriptionLines.push(trimmed);
523
+ }
524
+ return descriptionLines.length > 0 ? descriptionLines.join(" ") : void 0;
525
+ }
526
+ function buildAgentMapSection(section, lines) {
527
+ const endIndex = section.endIndex ?? lines.length;
528
+ const sectionLines = lines.slice(section.startIndex + 1, endIndex);
529
+ const sectionContent = sectionLines.join("\n");
530
+ const links = extractMarkdownLinks(sectionContent).map((link) => ({
531
+ ...link,
532
+ line: link.line + section.startIndex + 1,
533
+ exists: false
534
+ }));
535
+ const result = {
536
+ title: section.title,
537
+ level: section.level,
538
+ line: section.line,
539
+ links
540
+ };
541
+ const description = extractDescription(sectionLines);
542
+ if (description) {
543
+ result.description = description;
544
+ }
545
+ return result;
546
+ }
499
547
  function extractSections(content) {
500
548
  const lines = content.split("\n");
501
549
  const sections = [];
@@ -508,7 +556,6 @@ function extractSections(content) {
508
556
  title: match[2].trim(),
509
557
  level: match[1].length,
510
558
  line: i + 1,
511
- // 1-indexed
512
559
  startIndex: i
513
560
  });
514
561
  }
@@ -520,62 +567,29 @@ function extractSections(content) {
520
567
  currentSection.endIndex = nextSection ? nextSection.startIndex : lines.length;
521
568
  }
522
569
  }
523
- return sections.map((section) => {
524
- const endIndex = section.endIndex ?? lines.length;
525
- const sectionLines = lines.slice(section.startIndex + 1, endIndex);
526
- const sectionContent = sectionLines.join("\n");
527
- const links = extractMarkdownLinks(sectionContent).map((link) => ({
528
- ...link,
529
- line: link.line + section.startIndex + 1,
530
- // Adjust line number
531
- exists: false
532
- // Will be set later by validateAgentsMap
533
- }));
534
- const descriptionLines = [];
535
- for (const line of sectionLines) {
536
- const trimmed = line.trim();
537
- if (trimmed === "") {
538
- if (descriptionLines.length > 0) break;
539
- continue;
540
- }
541
- if (trimmed.startsWith("#")) break;
542
- if (trimmed.startsWith("-") || trimmed.startsWith("*")) break;
543
- if (trimmed.startsWith("```")) break;
544
- descriptionLines.push(trimmed);
545
- }
546
- const result = {
547
- title: section.title,
548
- level: section.level,
549
- line: section.line,
550
- links
551
- };
552
- if (descriptionLines.length > 0) {
553
- result.description = descriptionLines.join(" ");
554
- }
555
- return result;
556
- });
570
+ return sections.map((section) => buildAgentMapSection(section, lines));
557
571
  }
558
- function isExternalLink(path20) {
559
- return path20.startsWith("http://") || path20.startsWith("https://") || path20.startsWith("#") || path20.startsWith("mailto:");
572
+ function isExternalLink(path22) {
573
+ return path22.startsWith("http://") || path22.startsWith("https://") || path22.startsWith("#") || path22.startsWith("mailto:");
560
574
  }
561
575
  function resolveLinkPath(linkPath, baseDir) {
562
576
  return linkPath.startsWith(".") ? (0, import_path.join)(baseDir, linkPath) : linkPath;
563
577
  }
564
- async function validateAgentsMap(path20 = "./AGENTS.md") {
565
- const contentResult = await readFileContent(path20);
578
+ async function validateAgentsMap(path22 = "./AGENTS.md") {
579
+ const contentResult = await readFileContent(path22);
566
580
  if (!contentResult.ok) {
567
581
  return (0, import_types.Err)(
568
582
  createError(
569
583
  "PARSE_ERROR",
570
584
  `Failed to read AGENTS.md: ${contentResult.error.message}`,
571
- { path: path20 },
585
+ { path: path22 },
572
586
  ["Ensure the file exists", "Check file permissions"]
573
587
  )
574
588
  );
575
589
  }
576
590
  const content = contentResult.value;
577
591
  const sections = extractSections(content);
578
- const baseDir = (0, import_path.dirname)(path20);
592
+ const baseDir = (0, import_path.dirname)(path22);
579
593
  const sectionTitles = sections.map((s) => s.title);
580
594
  const missingSections = REQUIRED_SECTIONS.filter(
581
595
  (required) => !sectionTitles.some((title) => title.toLowerCase().includes(required.toLowerCase()))
@@ -656,7 +670,7 @@ async function checkDocCoverage(domain, options = {}) {
656
670
  try {
657
671
  const sourceFiles = await findFiles("**/*.{ts,js,tsx,jsx}", sourceDir);
658
672
  const filteredSourceFiles = sourceFiles.filter((file) => {
659
- const relativePath = (0, import_path2.relative)(sourceDir, file);
673
+ const relativePath = relativePosix(sourceDir, file);
660
674
  return !excludePatterns.some((pattern) => {
661
675
  return (0, import_minimatch.minimatch)(relativePath, pattern, { dot: true }) || (0, import_minimatch.minimatch)(file, pattern, { dot: true });
662
676
  });
@@ -679,7 +693,7 @@ async function checkDocCoverage(domain, options = {}) {
679
693
  const undocumented = [];
680
694
  const gaps = [];
681
695
  for (const sourceFile of filteredSourceFiles) {
682
- const relativePath = (0, import_path2.relative)(sourceDir, sourceFile);
696
+ const relativePath = relativePosix(sourceDir, sourceFile);
683
697
  const fileName = (0, import_path2.basename)(sourceFile);
684
698
  const isDocumented = documentedPaths.has(relativePath) || documentedPaths.has(fileName) || documentedPaths.has(`src/${relativePath}`);
685
699
  if (isDocumented) {
@@ -716,8 +730,8 @@ async function checkDocCoverage(domain, options = {}) {
716
730
 
717
731
  // src/context/knowledge-map.ts
718
732
  var import_path3 = require("path");
719
- function suggestFix(path20, existingFiles) {
720
- const targetName = (0, import_path3.basename)(path20).toLowerCase();
733
+ function suggestFix(path22, existingFiles) {
734
+ const targetName = (0, import_path3.basename)(path22).toLowerCase();
721
735
  const similar = existingFiles.find((file) => {
722
736
  const fileName = (0, import_path3.basename)(file).toLowerCase();
723
737
  return fileName.includes(targetName) || targetName.includes(fileName);
@@ -725,7 +739,7 @@ function suggestFix(path20, existingFiles) {
725
739
  if (similar) {
726
740
  return `Did you mean "${similar}"?`;
727
741
  }
728
- return `Create the file "${path20}" or remove the link`;
742
+ return `Create the file "${path22}" or remove the link`;
729
743
  }
730
744
  async function validateKnowledgeMap(rootDir = process.cwd()) {
731
745
  const agentsPath = (0, import_path3.join)(rootDir, "AGENTS.md");
@@ -739,7 +753,7 @@ async function validateKnowledgeMap(rootDir = process.cwd()) {
739
753
  totalLinks: agentsTotalLinks
740
754
  } = agentsResult.value;
741
755
  const existingFiles = await findFiles("**/*", rootDir);
742
- const relativeExistingFiles = existingFiles.map((f) => (0, import_path3.relative)(rootDir, f));
756
+ const relativeExistingFiles = existingFiles.map((f) => relativePosix(rootDir, f));
743
757
  const brokenLinks = agentsBrokenLinks.map((link) => {
744
758
  const section = sections.find(
745
759
  (s) => s.links.some((l) => l.path === link.path && l.line === link.line)
@@ -780,7 +794,7 @@ var DEFAULT_SECTIONS = [
780
794
  function groupByDirectory(files, rootDir) {
781
795
  const groups = /* @__PURE__ */ new Map();
782
796
  for (const file of files) {
783
- const relativePath = (0, import_path4.relative)(rootDir, file);
797
+ const relativePath = relativePosix(rootDir, file);
784
798
  const dir = (0, import_path4.dirname)(relativePath);
785
799
  if (!groups.has(dir)) {
786
800
  groups.set(dir, []);
@@ -836,7 +850,7 @@ async function generateAgentsMap(config, graphSections) {
836
850
  allFiles.push(...files);
837
851
  }
838
852
  const filteredFiles = allFiles.filter((file) => {
839
- const relativePath = (0, import_path4.relative)(rootDir, file);
853
+ const relativePath = relativePosix(rootDir, file);
840
854
  return !matchesExcludePattern(relativePath, excludePaths);
841
855
  });
842
856
  lines.push("## Repository Structure");
@@ -864,11 +878,11 @@ async function generateAgentsMap(config, graphSections) {
864
878
  }
865
879
  const sectionFiles = await findFiles(section.pattern, rootDir);
866
880
  const filteredSectionFiles = sectionFiles.filter((file) => {
867
- const relativePath = (0, import_path4.relative)(rootDir, file);
881
+ const relativePath = relativePosix(rootDir, file);
868
882
  return !matchesExcludePattern(relativePath, excludePaths);
869
883
  });
870
884
  for (const file of filteredSectionFiles.slice(0, 20)) {
871
- lines.push(formatFileLink((0, import_path4.relative)(rootDir, file)));
885
+ lines.push(formatFileLink(relativePosix(rootDir, file)));
872
886
  }
873
887
  if (filteredSectionFiles.length > 20) {
874
888
  lines.push(`- _... and ${filteredSectionFiles.length - 20} more files_`);
@@ -1139,8 +1153,8 @@ async function buildDependencyGraph(files, parser, graphDependencyData) {
1139
1153
  function checkLayerViolations(graph, layers, rootDir) {
1140
1154
  const violations = [];
1141
1155
  for (const edge of graph.edges) {
1142
- const fromRelative = (0, import_path5.relative)(rootDir, edge.from);
1143
- const toRelative = (0, import_path5.relative)(rootDir, edge.to);
1156
+ const fromRelative = relativePosix(rootDir, edge.from);
1157
+ const toRelative = relativePosix(rootDir, edge.to);
1144
1158
  const fromLayer = resolveFileToLayer(fromRelative, layers);
1145
1159
  const toLayer = resolveFileToLayer(toRelative, layers);
1146
1160
  if (!fromLayer || !toLayer) continue;
@@ -1224,65 +1238,71 @@ async function validateDependencies(config) {
1224
1238
  }
1225
1239
 
1226
1240
  // src/constraints/circular-deps.ts
1227
- function tarjanSCC(graph) {
1228
- const nodeMap = /* @__PURE__ */ new Map();
1229
- const stack = [];
1230
- const sccs = [];
1231
- let index = 0;
1241
+ function buildAdjacencyList(graph) {
1232
1242
  const adjacency = /* @__PURE__ */ new Map();
1243
+ const nodeSet = new Set(graph.nodes);
1233
1244
  for (const node of graph.nodes) {
1234
1245
  adjacency.set(node, []);
1235
1246
  }
1236
1247
  for (const edge of graph.edges) {
1237
1248
  const neighbors = adjacency.get(edge.from);
1238
- if (neighbors && graph.nodes.includes(edge.to)) {
1249
+ if (neighbors && nodeSet.has(edge.to)) {
1239
1250
  neighbors.push(edge.to);
1240
1251
  }
1241
1252
  }
1242
- function strongConnect(node) {
1243
- nodeMap.set(node, {
1244
- index,
1245
- lowlink: index,
1246
- onStack: true
1247
- });
1248
- index++;
1249
- stack.push(node);
1250
- const neighbors = adjacency.get(node) ?? [];
1251
- for (const neighbor of neighbors) {
1252
- const neighborData = nodeMap.get(neighbor);
1253
- if (!neighborData) {
1254
- strongConnect(neighbor);
1255
- const nodeData2 = nodeMap.get(node);
1256
- const updatedNeighborData = nodeMap.get(neighbor);
1257
- nodeData2.lowlink = Math.min(nodeData2.lowlink, updatedNeighborData.lowlink);
1258
- } else if (neighborData.onStack) {
1259
- const nodeData2 = nodeMap.get(node);
1260
- nodeData2.lowlink = Math.min(nodeData2.lowlink, neighborData.index);
1261
- }
1262
- }
1263
- const nodeData = nodeMap.get(node);
1264
- if (nodeData.lowlink === nodeData.index) {
1265
- const scc = [];
1266
- let w;
1267
- do {
1268
- w = stack.pop();
1269
- nodeMap.get(w).onStack = false;
1270
- scc.push(w);
1271
- } while (w !== node);
1272
- if (scc.length > 1) {
1273
- sccs.push(scc);
1274
- } else if (scc.length === 1) {
1275
- const selfNode = scc[0];
1276
- const selfNeighbors = adjacency.get(selfNode) ?? [];
1277
- if (selfNeighbors.includes(selfNode)) {
1278
- sccs.push(scc);
1279
- }
1280
- }
1253
+ return adjacency;
1254
+ }
1255
+ function isCyclicSCC(scc, adjacency) {
1256
+ if (scc.length > 1) return true;
1257
+ if (scc.length === 1) {
1258
+ const selfNode = scc[0];
1259
+ const selfNeighbors = adjacency.get(selfNode) ?? [];
1260
+ return selfNeighbors.includes(selfNode);
1261
+ }
1262
+ return false;
1263
+ }
1264
+ function processNeighbors(node, neighbors, nodeMap, stack, adjacency, sccs, indexRef) {
1265
+ for (const neighbor of neighbors) {
1266
+ const neighborData = nodeMap.get(neighbor);
1267
+ if (!neighborData) {
1268
+ strongConnectImpl(neighbor, nodeMap, stack, adjacency, sccs, indexRef);
1269
+ const nodeData = nodeMap.get(node);
1270
+ const updatedNeighborData = nodeMap.get(neighbor);
1271
+ nodeData.lowlink = Math.min(nodeData.lowlink, updatedNeighborData.lowlink);
1272
+ } else if (neighborData.onStack) {
1273
+ const nodeData = nodeMap.get(node);
1274
+ nodeData.lowlink = Math.min(nodeData.lowlink, neighborData.index);
1281
1275
  }
1282
1276
  }
1277
+ }
1278
+ function strongConnectImpl(node, nodeMap, stack, adjacency, sccs, indexRef) {
1279
+ nodeMap.set(node, { index: indexRef.value, lowlink: indexRef.value, onStack: true });
1280
+ indexRef.value++;
1281
+ stack.push(node);
1282
+ processNeighbors(node, adjacency.get(node) ?? [], nodeMap, stack, adjacency, sccs, indexRef);
1283
+ const nodeData = nodeMap.get(node);
1284
+ if (nodeData.lowlink === nodeData.index) {
1285
+ const scc = [];
1286
+ let w;
1287
+ do {
1288
+ w = stack.pop();
1289
+ nodeMap.get(w).onStack = false;
1290
+ scc.push(w);
1291
+ } while (w !== node);
1292
+ if (isCyclicSCC(scc, adjacency)) {
1293
+ sccs.push(scc);
1294
+ }
1295
+ }
1296
+ }
1297
+ function tarjanSCC(graph) {
1298
+ const nodeMap = /* @__PURE__ */ new Map();
1299
+ const stack = [];
1300
+ const sccs = [];
1301
+ const indexRef = { value: 0 };
1302
+ const adjacency = buildAdjacencyList(graph);
1283
1303
  for (const node of graph.nodes) {
1284
1304
  if (!nodeMap.has(node)) {
1285
- strongConnect(node);
1305
+ strongConnectImpl(node, nodeMap, stack, adjacency, sccs, indexRef);
1286
1306
  }
1287
1307
  }
1288
1308
  return sccs;
@@ -1325,8 +1345,8 @@ function createBoundaryValidator(schema, name) {
1325
1345
  return (0, import_types.Ok)(result.data);
1326
1346
  }
1327
1347
  const suggestions = result.error.issues.map((issue) => {
1328
- const path20 = issue.path.join(".");
1329
- return path20 ? `${path20}: ${issue.message}` : issue.message;
1348
+ const path22 = issue.path.join(".");
1349
+ return path22 ? `${path22}: ${issue.message}` : issue.message;
1330
1350
  });
1331
1351
  return (0, import_types.Err)(
1332
1352
  createError(
@@ -1548,175 +1568,183 @@ function stringArraysEqual(a, b) {
1548
1568
  const sortedB = [...b].sort();
1549
1569
  return sortedA.every((val, i) => val === sortedB[i]);
1550
1570
  }
1551
- function deepMergeConstraints(localConfig, bundleConstraints, _existingContributions) {
1552
- const config = { ...localConfig };
1553
- const contributions = {};
1554
- const conflicts = [];
1555
- if (bundleConstraints.layers && bundleConstraints.layers.length > 0) {
1556
- const localLayers = Array.isArray(localConfig.layers) ? localConfig.layers : [];
1557
- const mergedLayers = [...localLayers];
1558
- const contributedLayerNames = [];
1559
- for (const bundleLayer of bundleConstraints.layers) {
1560
- const existing = localLayers.find((l) => l.name === bundleLayer.name);
1561
- if (!existing) {
1562
- mergedLayers.push(bundleLayer);
1563
- contributedLayerNames.push(bundleLayer.name);
1564
- } else {
1565
- const same = existing.pattern === bundleLayer.pattern && stringArraysEqual(existing.allowedDependencies, bundleLayer.allowedDependencies);
1566
- if (!same) {
1567
- conflicts.push({
1568
- section: "layers",
1569
- key: bundleLayer.name,
1570
- localValue: existing,
1571
- packageValue: bundleLayer,
1572
- description: `Layer '${bundleLayer.name}' already exists locally with different configuration`
1573
- });
1574
- }
1571
+ function mergeLayers(localConfig, bundleLayers, config, contributions, conflicts) {
1572
+ const localLayers = Array.isArray(localConfig.layers) ? localConfig.layers : [];
1573
+ const mergedLayers = [...localLayers];
1574
+ const contributedLayerNames = [];
1575
+ for (const bundleLayer of bundleLayers) {
1576
+ const existing = localLayers.find((l) => l.name === bundleLayer.name);
1577
+ if (!existing) {
1578
+ mergedLayers.push(bundleLayer);
1579
+ contributedLayerNames.push(bundleLayer.name);
1580
+ } else {
1581
+ const same = existing.pattern === bundleLayer.pattern && stringArraysEqual(existing.allowedDependencies, bundleLayer.allowedDependencies);
1582
+ if (!same) {
1583
+ conflicts.push({
1584
+ section: "layers",
1585
+ key: bundleLayer.name,
1586
+ localValue: existing,
1587
+ packageValue: bundleLayer,
1588
+ description: `Layer '${bundleLayer.name}' already exists locally with different configuration`
1589
+ });
1575
1590
  }
1576
1591
  }
1577
- config.layers = mergedLayers;
1578
- if (contributedLayerNames.length > 0) {
1579
- contributions.layers = contributedLayerNames;
1592
+ }
1593
+ config.layers = mergedLayers;
1594
+ if (contributedLayerNames.length > 0) contributions.layers = contributedLayerNames;
1595
+ }
1596
+ function mergeForbiddenImports(localConfig, bundleRules, config, contributions, conflicts) {
1597
+ const localFI = Array.isArray(localConfig.forbiddenImports) ? localConfig.forbiddenImports : [];
1598
+ const mergedFI = [...localFI];
1599
+ const contributedFromKeys = [];
1600
+ for (const bundleRule of bundleRules) {
1601
+ const existing = localFI.find((r) => r.from === bundleRule.from);
1602
+ if (!existing) {
1603
+ const entry = { from: bundleRule.from, disallow: bundleRule.disallow };
1604
+ if (bundleRule.message !== void 0) entry.message = bundleRule.message;
1605
+ mergedFI.push(entry);
1606
+ contributedFromKeys.push(bundleRule.from);
1607
+ } else {
1608
+ if (!stringArraysEqual(existing.disallow, bundleRule.disallow)) {
1609
+ conflicts.push({
1610
+ section: "forbiddenImports",
1611
+ key: bundleRule.from,
1612
+ localValue: existing,
1613
+ packageValue: bundleRule,
1614
+ description: `Forbidden import rule for '${bundleRule.from}' already exists locally with different disallow list`
1615
+ });
1616
+ }
1580
1617
  }
1581
1618
  }
1582
- if (bundleConstraints.forbiddenImports && bundleConstraints.forbiddenImports.length > 0) {
1583
- const localFI = Array.isArray(localConfig.forbiddenImports) ? localConfig.forbiddenImports : [];
1584
- const mergedFI = [...localFI];
1585
- const contributedFromKeys = [];
1586
- for (const bundleRule of bundleConstraints.forbiddenImports) {
1587
- const existing = localFI.find((r) => r.from === bundleRule.from);
1588
- if (!existing) {
1589
- const entry = {
1590
- from: bundleRule.from,
1591
- disallow: bundleRule.disallow
1592
- };
1593
- if (bundleRule.message !== void 0) {
1594
- entry.message = bundleRule.message;
1595
- }
1596
- mergedFI.push(entry);
1597
- contributedFromKeys.push(bundleRule.from);
1598
- } else {
1599
- const same = stringArraysEqual(existing.disallow, bundleRule.disallow);
1600
- if (!same) {
1619
+ config.forbiddenImports = mergedFI;
1620
+ if (contributedFromKeys.length > 0) contributions.forbiddenImports = contributedFromKeys;
1621
+ }
1622
+ function mergeBoundaries(localConfig, bundleBoundaries, config, contributions) {
1623
+ const localBoundaries = localConfig.boundaries ?? { requireSchema: [] };
1624
+ const localSchemas = new Set(localBoundaries.requireSchema ?? []);
1625
+ const newSchemas = [];
1626
+ for (const schema of bundleBoundaries.requireSchema ?? []) {
1627
+ if (!localSchemas.has(schema)) {
1628
+ newSchemas.push(schema);
1629
+ localSchemas.add(schema);
1630
+ }
1631
+ }
1632
+ config.boundaries = { requireSchema: [...localBoundaries.requireSchema ?? [], ...newSchemas] };
1633
+ if (newSchemas.length > 0) contributions.boundaries = newSchemas;
1634
+ }
1635
+ function mergeArchitecture(localConfig, bundleArch, config, contributions, conflicts) {
1636
+ const localArch = localConfig.architecture ?? { thresholds: {}, modules: {} };
1637
+ const mergedThresholds = { ...localArch.thresholds };
1638
+ const contributedThresholdKeys = [];
1639
+ for (const [category, value] of Object.entries(bundleArch.thresholds ?? {})) {
1640
+ if (!(category in mergedThresholds)) {
1641
+ mergedThresholds[category] = value;
1642
+ contributedThresholdKeys.push(category);
1643
+ } else if (!deepEqual(mergedThresholds[category], value)) {
1644
+ conflicts.push({
1645
+ section: "architecture.thresholds",
1646
+ key: category,
1647
+ localValue: mergedThresholds[category],
1648
+ packageValue: value,
1649
+ description: `Architecture threshold '${category}' already exists locally with a different value`
1650
+ });
1651
+ }
1652
+ }
1653
+ const mergedModules = { ...localArch.modules };
1654
+ const contributedModuleKeys = [];
1655
+ for (const [modulePath, bundleCategoryMap] of Object.entries(bundleArch.modules ?? {})) {
1656
+ if (!(modulePath in mergedModules)) {
1657
+ mergedModules[modulePath] = bundleCategoryMap;
1658
+ for (const cat of Object.keys(bundleCategoryMap))
1659
+ contributedModuleKeys.push(`${modulePath}:${cat}`);
1660
+ } else {
1661
+ const mergedCategoryMap = { ...mergedModules[modulePath] };
1662
+ for (const [category, value] of Object.entries(bundleCategoryMap)) {
1663
+ if (!(category in mergedCategoryMap)) {
1664
+ mergedCategoryMap[category] = value;
1665
+ contributedModuleKeys.push(`${modulePath}:${category}`);
1666
+ } else if (!deepEqual(mergedCategoryMap[category], value)) {
1601
1667
  conflicts.push({
1602
- section: "forbiddenImports",
1603
- key: bundleRule.from,
1604
- localValue: existing,
1605
- packageValue: bundleRule,
1606
- description: `Forbidden import rule for '${bundleRule.from}' already exists locally with different disallow list`
1668
+ section: "architecture.modules",
1669
+ key: `${modulePath}:${category}`,
1670
+ localValue: mergedCategoryMap[category],
1671
+ packageValue: value,
1672
+ description: `Architecture module override '${modulePath}' category '${category}' already exists locally with a different value`
1607
1673
  });
1608
1674
  }
1609
1675
  }
1676
+ mergedModules[modulePath] = mergedCategoryMap;
1677
+ }
1678
+ }
1679
+ config.architecture = { ...localArch, thresholds: mergedThresholds, modules: mergedModules };
1680
+ if (contributedThresholdKeys.length > 0)
1681
+ contributions["architecture.thresholds"] = contributedThresholdKeys;
1682
+ if (contributedModuleKeys.length > 0)
1683
+ contributions["architecture.modules"] = contributedModuleKeys;
1684
+ }
1685
+ function mergeSecurityRules(localConfig, bundleRules, config, contributions, conflicts) {
1686
+ const localSecurity = localConfig.security ?? { rules: {} };
1687
+ const localRules = localSecurity.rules ?? {};
1688
+ const mergedRules = { ...localRules };
1689
+ const contributedRuleIds = [];
1690
+ for (const [ruleId, severity] of Object.entries(bundleRules)) {
1691
+ if (!(ruleId in mergedRules)) {
1692
+ mergedRules[ruleId] = severity;
1693
+ contributedRuleIds.push(ruleId);
1694
+ } else if (mergedRules[ruleId] !== severity) {
1695
+ conflicts.push({
1696
+ section: "security.rules",
1697
+ key: ruleId,
1698
+ localValue: mergedRules[ruleId],
1699
+ packageValue: severity,
1700
+ description: `Security rule '${ruleId}' already exists locally with severity '${mergedRules[ruleId]}', bundle has '${severity}'`
1701
+ });
1610
1702
  }
1611
- config.forbiddenImports = mergedFI;
1612
- if (contributedFromKeys.length > 0) {
1613
- contributions.forbiddenImports = contributedFromKeys;
1614
- }
1703
+ }
1704
+ config.security = { ...localSecurity, rules: mergedRules };
1705
+ if (contributedRuleIds.length > 0) contributions["security.rules"] = contributedRuleIds;
1706
+ }
1707
+ function deepMergeConstraints(localConfig, bundleConstraints, _existingContributions) {
1708
+ const config = { ...localConfig };
1709
+ const contributions = {};
1710
+ const conflicts = [];
1711
+ if (bundleConstraints.layers && bundleConstraints.layers.length > 0) {
1712
+ mergeLayers(localConfig, bundleConstraints.layers, config, contributions, conflicts);
1713
+ }
1714
+ if (bundleConstraints.forbiddenImports && bundleConstraints.forbiddenImports.length > 0) {
1715
+ mergeForbiddenImports(
1716
+ localConfig,
1717
+ bundleConstraints.forbiddenImports,
1718
+ config,
1719
+ contributions,
1720
+ conflicts
1721
+ );
1615
1722
  }
1616
1723
  if (bundleConstraints.boundaries) {
1617
- const localBoundaries = localConfig.boundaries ?? { requireSchema: [] };
1618
- const localSchemas = new Set(localBoundaries.requireSchema ?? []);
1619
- const bundleSchemas = bundleConstraints.boundaries.requireSchema ?? [];
1620
- const newSchemas = [];
1621
- for (const schema of bundleSchemas) {
1622
- if (!localSchemas.has(schema)) {
1623
- newSchemas.push(schema);
1624
- localSchemas.add(schema);
1625
- }
1626
- }
1627
- config.boundaries = {
1628
- requireSchema: [...localBoundaries.requireSchema ?? [], ...newSchemas]
1629
- };
1630
- if (newSchemas.length > 0) {
1631
- contributions.boundaries = newSchemas;
1632
- }
1724
+ mergeBoundaries(
1725
+ localConfig,
1726
+ bundleConstraints.boundaries,
1727
+ config,
1728
+ contributions
1729
+ );
1633
1730
  }
1634
1731
  if (bundleConstraints.architecture) {
1635
- const localArch = localConfig.architecture ?? {
1636
- thresholds: {},
1637
- modules: {}
1638
- };
1639
- const mergedThresholds = { ...localArch.thresholds };
1640
- const contributedThresholdKeys = [];
1641
- const bundleThresholds = bundleConstraints.architecture.thresholds ?? {};
1642
- for (const [category, value] of Object.entries(bundleThresholds)) {
1643
- if (!(category in mergedThresholds)) {
1644
- mergedThresholds[category] = value;
1645
- contributedThresholdKeys.push(category);
1646
- } else if (!deepEqual(mergedThresholds[category], value)) {
1647
- conflicts.push({
1648
- section: "architecture.thresholds",
1649
- key: category,
1650
- localValue: mergedThresholds[category],
1651
- packageValue: value,
1652
- description: `Architecture threshold '${category}' already exists locally with a different value`
1653
- });
1654
- }
1655
- }
1656
- const mergedModules = { ...localArch.modules };
1657
- const contributedModuleKeys = [];
1658
- const bundleModules = bundleConstraints.architecture.modules ?? {};
1659
- for (const [modulePath, bundleCategoryMap] of Object.entries(bundleModules)) {
1660
- if (!(modulePath in mergedModules)) {
1661
- mergedModules[modulePath] = bundleCategoryMap;
1662
- for (const cat of Object.keys(bundleCategoryMap)) {
1663
- contributedModuleKeys.push(`${modulePath}:${cat}`);
1664
- }
1665
- } else {
1666
- const localCategoryMap = mergedModules[modulePath];
1667
- const mergedCategoryMap = { ...localCategoryMap };
1668
- for (const [category, value] of Object.entries(bundleCategoryMap)) {
1669
- if (!(category in mergedCategoryMap)) {
1670
- mergedCategoryMap[category] = value;
1671
- contributedModuleKeys.push(`${modulePath}:${category}`);
1672
- } else if (!deepEqual(mergedCategoryMap[category], value)) {
1673
- conflicts.push({
1674
- section: "architecture.modules",
1675
- key: `${modulePath}:${category}`,
1676
- localValue: mergedCategoryMap[category],
1677
- packageValue: value,
1678
- description: `Architecture module override '${modulePath}' category '${category}' already exists locally with a different value`
1679
- });
1680
- }
1681
- }
1682
- mergedModules[modulePath] = mergedCategoryMap;
1683
- }
1684
- }
1685
- config.architecture = {
1686
- ...localArch,
1687
- thresholds: mergedThresholds,
1688
- modules: mergedModules
1689
- };
1690
- if (contributedThresholdKeys.length > 0) {
1691
- contributions["architecture.thresholds"] = contributedThresholdKeys;
1692
- }
1693
- if (contributedModuleKeys.length > 0) {
1694
- contributions["architecture.modules"] = contributedModuleKeys;
1695
- }
1732
+ mergeArchitecture(
1733
+ localConfig,
1734
+ bundleConstraints.architecture,
1735
+ config,
1736
+ contributions,
1737
+ conflicts
1738
+ );
1696
1739
  }
1697
1740
  if (bundleConstraints.security?.rules) {
1698
- const localSecurity = localConfig.security ?? { rules: {} };
1699
- const localRules = localSecurity.rules ?? {};
1700
- const mergedRules = { ...localRules };
1701
- const contributedRuleIds = [];
1702
- for (const [ruleId, severity] of Object.entries(bundleConstraints.security.rules)) {
1703
- if (!(ruleId in mergedRules)) {
1704
- mergedRules[ruleId] = severity;
1705
- contributedRuleIds.push(ruleId);
1706
- } else if (mergedRules[ruleId] !== severity) {
1707
- conflicts.push({
1708
- section: "security.rules",
1709
- key: ruleId,
1710
- localValue: mergedRules[ruleId],
1711
- packageValue: severity,
1712
- description: `Security rule '${ruleId}' already exists locally with severity '${mergedRules[ruleId]}', bundle has '${severity}'`
1713
- });
1714
- }
1715
- }
1716
- config.security = { ...localSecurity, rules: mergedRules };
1717
- if (contributedRuleIds.length > 0) {
1718
- contributions["security.rules"] = contributedRuleIds;
1719
- }
1741
+ mergeSecurityRules(
1742
+ localConfig,
1743
+ bundleConstraints.security.rules,
1744
+ config,
1745
+ contributions,
1746
+ conflicts
1747
+ );
1720
1748
  }
1721
1749
  return { config, contributions, conflicts };
1722
1750
  }
@@ -1877,14 +1905,84 @@ function walk(node, visitor) {
1877
1905
  }
1878
1906
  }
1879
1907
  }
1908
+ function makeLocation(node) {
1909
+ return {
1910
+ file: "",
1911
+ line: node.loc?.start.line ?? 0,
1912
+ column: node.loc?.start.column ?? 0
1913
+ };
1914
+ }
1915
+ function processImportSpecifiers(importDecl, imp) {
1916
+ for (const spec of importDecl.specifiers) {
1917
+ if (spec.type === "ImportDefaultSpecifier") {
1918
+ imp.default = spec.local.name;
1919
+ } else if (spec.type === "ImportNamespaceSpecifier") {
1920
+ imp.namespace = spec.local.name;
1921
+ } else if (spec.type === "ImportSpecifier") {
1922
+ imp.specifiers.push(spec.local.name);
1923
+ if (spec.importKind === "type") {
1924
+ imp.kind = "type";
1925
+ }
1926
+ }
1927
+ }
1928
+ }
1929
+ function getExportedName(exported) {
1930
+ return exported.type === "Identifier" ? exported.name : String(exported.value);
1931
+ }
1932
+ function processReExportSpecifiers(exportDecl, exports2) {
1933
+ for (const spec of exportDecl.specifiers) {
1934
+ if (spec.type !== "ExportSpecifier") continue;
1935
+ exports2.push({
1936
+ name: getExportedName(spec.exported),
1937
+ type: "named",
1938
+ location: makeLocation(exportDecl),
1939
+ isReExport: true,
1940
+ source: exportDecl.source.value
1941
+ });
1942
+ }
1943
+ }
1944
+ function processExportDeclaration(exportDecl, exports2) {
1945
+ const decl = exportDecl.declaration;
1946
+ if (!decl) return;
1947
+ if (decl.type === "VariableDeclaration") {
1948
+ for (const declarator of decl.declarations) {
1949
+ if (declarator.id.type === "Identifier") {
1950
+ exports2.push({
1951
+ name: declarator.id.name,
1952
+ type: "named",
1953
+ location: makeLocation(decl),
1954
+ isReExport: false
1955
+ });
1956
+ }
1957
+ }
1958
+ } else if ((decl.type === "FunctionDeclaration" || decl.type === "ClassDeclaration") && decl.id) {
1959
+ exports2.push({
1960
+ name: decl.id.name,
1961
+ type: "named",
1962
+ location: makeLocation(decl),
1963
+ isReExport: false
1964
+ });
1965
+ }
1966
+ }
1967
+ function processExportListSpecifiers(exportDecl, exports2) {
1968
+ for (const spec of exportDecl.specifiers) {
1969
+ if (spec.type !== "ExportSpecifier") continue;
1970
+ exports2.push({
1971
+ name: getExportedName(spec.exported),
1972
+ type: "named",
1973
+ location: makeLocation(exportDecl),
1974
+ isReExport: false
1975
+ });
1976
+ }
1977
+ }
1880
1978
  var TypeScriptParser = class {
1881
1979
  name = "typescript";
1882
1980
  extensions = [".ts", ".tsx", ".mts", ".cts"];
1883
- async parseFile(path20) {
1884
- const contentResult = await readFileContent(path20);
1981
+ async parseFile(path22) {
1982
+ const contentResult = await readFileContent(path22);
1885
1983
  if (!contentResult.ok) {
1886
1984
  return (0, import_types.Err)(
1887
- createParseError("NOT_FOUND", `File not found: ${path20}`, { path: path20 }, [
1985
+ createParseError("NOT_FOUND", `File not found: ${path22}`, { path: path22 }, [
1888
1986
  "Check that the file exists",
1889
1987
  "Verify the path is correct"
1890
1988
  ])
@@ -1894,7 +1992,7 @@ var TypeScriptParser = class {
1894
1992
  const ast = (0, import_typescript_estree.parse)(contentResult.value, {
1895
1993
  loc: true,
1896
1994
  range: true,
1897
- jsx: path20.endsWith(".tsx"),
1995
+ jsx: path22.endsWith(".tsx"),
1898
1996
  errorOnUnknownASTType: false
1899
1997
  });
1900
1998
  return (0, import_types.Ok)({
@@ -1905,7 +2003,7 @@ var TypeScriptParser = class {
1905
2003
  } catch (e) {
1906
2004
  const error = e;
1907
2005
  return (0, import_types.Err)(
1908
- createParseError("SYNTAX_ERROR", `Failed to parse ${path20}: ${error.message}`, { path: path20 }, [
2006
+ createParseError("SYNTAX_ERROR", `Failed to parse ${path22}: ${error.message}`, { path: path22 }, [
1909
2007
  "Check for syntax errors in the file",
1910
2008
  "Ensure valid TypeScript syntax"
1911
2009
  ])
@@ -1921,26 +2019,12 @@ var TypeScriptParser = class {
1921
2019
  const imp = {
1922
2020
  source: importDecl.source.value,
1923
2021
  specifiers: [],
1924
- location: {
1925
- file: "",
1926
- line: importDecl.loc?.start.line ?? 0,
1927
- column: importDecl.loc?.start.column ?? 0
1928
- },
2022
+ location: makeLocation(importDecl),
1929
2023
  kind: importDecl.importKind === "type" ? "type" : "value"
1930
2024
  };
1931
- for (const spec of importDecl.specifiers) {
1932
- if (spec.type === "ImportDefaultSpecifier") {
1933
- imp.default = spec.local.name;
1934
- } else if (spec.type === "ImportNamespaceSpecifier") {
1935
- imp.namespace = spec.local.name;
1936
- } else if (spec.type === "ImportSpecifier") {
1937
- imp.specifiers.push(spec.local.name);
1938
- if (spec.importKind === "type") {
1939
- imp.kind = "type";
1940
- }
1941
- }
1942
- }
2025
+ processImportSpecifiers(importDecl, imp);
1943
2026
  imports.push(imp);
2027
+ return;
1944
2028
  }
1945
2029
  if (node.type === "ImportExpression") {
1946
2030
  const importExpr = node;
@@ -1948,11 +2032,7 @@ var TypeScriptParser = class {
1948
2032
  imports.push({
1949
2033
  source: importExpr.source.value,
1950
2034
  specifiers: [],
1951
- location: {
1952
- file: "",
1953
- line: importExpr.loc?.start.line ?? 0,
1954
- column: importExpr.loc?.start.column ?? 0
1955
- },
2035
+ location: makeLocation(importExpr),
1956
2036
  kind: "value"
1957
2037
  });
1958
2038
  }
@@ -1967,97 +2047,29 @@ var TypeScriptParser = class {
1967
2047
  if (node.type === "ExportNamedDeclaration") {
1968
2048
  const exportDecl = node;
1969
2049
  if (exportDecl.source) {
1970
- for (const spec of exportDecl.specifiers) {
1971
- if (spec.type === "ExportSpecifier") {
1972
- const exported = spec.exported;
1973
- const name = exported.type === "Identifier" ? exported.name : String(exported.value);
1974
- exports2.push({
1975
- name,
1976
- type: "named",
1977
- location: {
1978
- file: "",
1979
- line: exportDecl.loc?.start.line ?? 0,
1980
- column: exportDecl.loc?.start.column ?? 0
1981
- },
1982
- isReExport: true,
1983
- source: exportDecl.source.value
1984
- });
1985
- }
1986
- }
2050
+ processReExportSpecifiers(exportDecl, exports2);
1987
2051
  return;
1988
2052
  }
1989
- if (exportDecl.declaration) {
1990
- const decl = exportDecl.declaration;
1991
- if (decl.type === "VariableDeclaration") {
1992
- for (const declarator of decl.declarations) {
1993
- if (declarator.id.type === "Identifier") {
1994
- exports2.push({
1995
- name: declarator.id.name,
1996
- type: "named",
1997
- location: {
1998
- file: "",
1999
- line: decl.loc?.start.line ?? 0,
2000
- column: decl.loc?.start.column ?? 0
2001
- },
2002
- isReExport: false
2003
- });
2004
- }
2005
- }
2006
- } else if (decl.type === "FunctionDeclaration" || decl.type === "ClassDeclaration") {
2007
- if (decl.id) {
2008
- exports2.push({
2009
- name: decl.id.name,
2010
- type: "named",
2011
- location: {
2012
- file: "",
2013
- line: decl.loc?.start.line ?? 0,
2014
- column: decl.loc?.start.column ?? 0
2015
- },
2016
- isReExport: false
2017
- });
2018
- }
2019
- }
2020
- }
2021
- for (const spec of exportDecl.specifiers) {
2022
- if (spec.type === "ExportSpecifier") {
2023
- const exported = spec.exported;
2024
- const name = exported.type === "Identifier" ? exported.name : String(exported.value);
2025
- exports2.push({
2026
- name,
2027
- type: "named",
2028
- location: {
2029
- file: "",
2030
- line: exportDecl.loc?.start.line ?? 0,
2031
- column: exportDecl.loc?.start.column ?? 0
2032
- },
2033
- isReExport: false
2034
- });
2035
- }
2036
- }
2053
+ processExportDeclaration(exportDecl, exports2);
2054
+ processExportListSpecifiers(exportDecl, exports2);
2055
+ return;
2037
2056
  }
2038
2057
  if (node.type === "ExportDefaultDeclaration") {
2039
2058
  const exportDecl = node;
2040
2059
  exports2.push({
2041
2060
  name: "default",
2042
2061
  type: "default",
2043
- location: {
2044
- file: "",
2045
- line: exportDecl.loc?.start.line ?? 0,
2046
- column: exportDecl.loc?.start.column ?? 0
2047
- },
2062
+ location: makeLocation(exportDecl),
2048
2063
  isReExport: false
2049
2064
  });
2065
+ return;
2050
2066
  }
2051
2067
  if (node.type === "ExportAllDeclaration") {
2052
2068
  const exportDecl = node;
2053
2069
  exports2.push({
2054
2070
  name: exportDecl.exported?.name ?? "*",
2055
2071
  type: "namespace",
2056
- location: {
2057
- file: "",
2058
- line: exportDecl.loc?.start.line ?? 0,
2059
- column: exportDecl.loc?.start.column ?? 0
2060
- },
2072
+ location: makeLocation(exportDecl),
2061
2073
  isReExport: true,
2062
2074
  source: exportDecl.source.value
2063
2075
  });
@@ -2073,10 +2085,27 @@ var TypeScriptParser = class {
2073
2085
  // src/entropy/snapshot.ts
2074
2086
  var import_path6 = require("path");
2075
2087
  var import_minimatch3 = require("minimatch");
2088
+ function collectFieldEntries(rootDir, field) {
2089
+ if (typeof field === "string") return [(0, import_path6.resolve)(rootDir, field)];
2090
+ if (typeof field === "object" && field !== null) {
2091
+ return Object.values(field).filter((v) => typeof v === "string").map((v) => (0, import_path6.resolve)(rootDir, v));
2092
+ }
2093
+ return [];
2094
+ }
2095
+ function extractPackageEntries(rootDir, pkg) {
2096
+ const entries = [];
2097
+ entries.push(...collectFieldEntries(rootDir, pkg["exports"]));
2098
+ if (entries.length === 0 && typeof pkg["main"] === "string") {
2099
+ entries.push((0, import_path6.resolve)(rootDir, pkg["main"]));
2100
+ }
2101
+ if (pkg["bin"]) {
2102
+ entries.push(...collectFieldEntries(rootDir, pkg["bin"]));
2103
+ }
2104
+ return entries;
2105
+ }
2076
2106
  async function resolveEntryPoints(rootDir, explicitEntries) {
2077
2107
  if (explicitEntries && explicitEntries.length > 0) {
2078
- const resolved = explicitEntries.map((e) => (0, import_path6.resolve)(rootDir, e));
2079
- return (0, import_types.Ok)(resolved);
2108
+ return (0, import_types.Ok)(explicitEntries.map((e) => (0, import_path6.resolve)(rootDir, e)));
2080
2109
  }
2081
2110
  const pkgPath = (0, import_path6.join)(rootDir, "package.json");
2082
2111
  if (await fileExists(pkgPath)) {
@@ -2084,38 +2113,8 @@ async function resolveEntryPoints(rootDir, explicitEntries) {
2084
2113
  if (pkgContent.ok) {
2085
2114
  try {
2086
2115
  const pkg = JSON.parse(pkgContent.value);
2087
- const entries = [];
2088
- if (pkg["exports"]) {
2089
- const exports2 = pkg["exports"];
2090
- if (typeof exports2 === "string") {
2091
- entries.push((0, import_path6.resolve)(rootDir, exports2));
2092
- } else if (typeof exports2 === "object" && exports2 !== null) {
2093
- for (const value of Object.values(exports2)) {
2094
- if (typeof value === "string") {
2095
- entries.push((0, import_path6.resolve)(rootDir, value));
2096
- }
2097
- }
2098
- }
2099
- }
2100
- const main = pkg["main"];
2101
- if (typeof main === "string" && entries.length === 0) {
2102
- entries.push((0, import_path6.resolve)(rootDir, main));
2103
- }
2104
- const bin = pkg["bin"];
2105
- if (bin) {
2106
- if (typeof bin === "string") {
2107
- entries.push((0, import_path6.resolve)(rootDir, bin));
2108
- } else if (typeof bin === "object") {
2109
- for (const value of Object.values(bin)) {
2110
- if (typeof value === "string") {
2111
- entries.push((0, import_path6.resolve)(rootDir, value));
2112
- }
2113
- }
2114
- }
2115
- }
2116
- if (entries.length > 0) {
2117
- return (0, import_types.Ok)(entries);
2118
- }
2116
+ const entries = extractPackageEntries(rootDir, pkg);
2117
+ if (entries.length > 0) return (0, import_types.Ok)(entries);
2119
2118
  } catch {
2120
2119
  }
2121
2120
  }
@@ -2189,66 +2188,49 @@ function extractInlineRefs(content) {
2189
2188
  }
2190
2189
  return refs;
2191
2190
  }
2192
- async function parseDocumentationFile(path20) {
2193
- const contentResult = await readFileContent(path20);
2191
+ async function parseDocumentationFile(path22) {
2192
+ const contentResult = await readFileContent(path22);
2194
2193
  if (!contentResult.ok) {
2195
2194
  return (0, import_types.Err)(
2196
2195
  createEntropyError(
2197
2196
  "PARSE_ERROR",
2198
- `Failed to read documentation file: ${path20}`,
2199
- { file: path20 },
2197
+ `Failed to read documentation file: ${path22}`,
2198
+ { file: path22 },
2200
2199
  ["Check that the file exists"]
2201
2200
  )
2202
2201
  );
2203
2202
  }
2204
2203
  const content = contentResult.value;
2205
- const type = path20.endsWith(".md") ? "markdown" : "text";
2204
+ const type = path22.endsWith(".md") ? "markdown" : "text";
2206
2205
  return (0, import_types.Ok)({
2207
- path: path20,
2206
+ path: path22,
2208
2207
  type,
2209
2208
  content,
2210
2209
  codeBlocks: extractCodeBlocks(content),
2211
2210
  inlineRefs: extractInlineRefs(content)
2212
2211
  });
2213
2212
  }
2213
+ function makeInternalSymbol(name, type, line) {
2214
+ return { name, type, line, references: 0, calledBy: [] };
2215
+ }
2216
+ function extractSymbolsFromNode(node) {
2217
+ const line = node.loc?.start?.line || 0;
2218
+ if (node.type === "FunctionDeclaration" && node.id?.name) {
2219
+ return [makeInternalSymbol(node.id.name, "function", line)];
2220
+ }
2221
+ if (node.type === "VariableDeclaration") {
2222
+ return (node.declarations || []).filter((decl) => decl.id?.name).map((decl) => makeInternalSymbol(decl.id.name, "variable", line));
2223
+ }
2224
+ if (node.type === "ClassDeclaration" && node.id?.name) {
2225
+ return [makeInternalSymbol(node.id.name, "class", line)];
2226
+ }
2227
+ return [];
2228
+ }
2214
2229
  function extractInternalSymbols(ast) {
2215
- const symbols = [];
2216
2230
  const body = ast.body;
2217
- if (!body?.body) return symbols;
2218
- for (const node of body.body) {
2219
- if (node.type === "FunctionDeclaration" && node.id?.name) {
2220
- symbols.push({
2221
- name: node.id.name,
2222
- type: "function",
2223
- line: node.loc?.start?.line || 0,
2224
- references: 0,
2225
- calledBy: []
2226
- });
2227
- }
2228
- if (node.type === "VariableDeclaration") {
2229
- for (const decl of node.declarations || []) {
2230
- if (decl.id?.name) {
2231
- symbols.push({
2232
- name: decl.id.name,
2233
- type: "variable",
2234
- line: node.loc?.start?.line || 0,
2235
- references: 0,
2236
- calledBy: []
2237
- });
2238
- }
2239
- }
2240
- }
2241
- if (node.type === "ClassDeclaration" && node.id?.name) {
2242
- symbols.push({
2243
- name: node.id.name,
2244
- type: "class",
2245
- line: node.loc?.start?.line || 0,
2246
- references: 0,
2247
- calledBy: []
2248
- });
2249
- }
2250
- }
2251
- return symbols;
2231
+ if (!body?.body) return [];
2232
+ const nodes = body.body;
2233
+ return nodes.flatMap(extractSymbolsFromNode);
2252
2234
  }
2253
2235
  function extractJSDocComments(ast) {
2254
2236
  const comments = [];
@@ -2335,7 +2317,7 @@ async function buildSnapshot(config) {
2335
2317
  sourceFilePaths.push(...files2);
2336
2318
  }
2337
2319
  sourceFilePaths = sourceFilePaths.filter((f) => {
2338
- const rel = (0, import_path6.relative)(rootDir, f);
2320
+ const rel = relativePosix(rootDir, f);
2339
2321
  return !excludePatterns.some((p) => (0, import_minimatch3.minimatch)(rel, p));
2340
2322
  });
2341
2323
  const files = [];
@@ -2389,27 +2371,34 @@ async function buildSnapshot(config) {
2389
2371
 
2390
2372
  // src/entropy/detectors/drift.ts
2391
2373
  var import_path7 = require("path");
2392
- function levenshteinDistance(a, b) {
2374
+ function initLevenshteinMatrix(aLen, bLen) {
2393
2375
  const matrix = [];
2394
- for (let i = 0; i <= b.length; i++) {
2376
+ for (let i = 0; i <= bLen; i++) {
2395
2377
  matrix[i] = [i];
2396
2378
  }
2397
- for (let j = 0; j <= a.length; j++) {
2398
- const row = matrix[0];
2399
- if (row) {
2400
- row[j] = j;
2379
+ const firstRow = matrix[0];
2380
+ if (firstRow) {
2381
+ for (let j = 0; j <= aLen; j++) {
2382
+ firstRow[j] = j;
2401
2383
  }
2402
2384
  }
2385
+ return matrix;
2386
+ }
2387
+ function computeLevenshteinCell(row, prevRow, j, charsMatch) {
2388
+ if (charsMatch) {
2389
+ row[j] = prevRow[j - 1] ?? 0;
2390
+ } else {
2391
+ row[j] = Math.min((prevRow[j - 1] ?? 0) + 1, (row[j - 1] ?? 0) + 1, (prevRow[j] ?? 0) + 1);
2392
+ }
2393
+ }
2394
+ function levenshteinDistance(a, b) {
2395
+ const matrix = initLevenshteinMatrix(a.length, b.length);
2403
2396
  for (let i = 1; i <= b.length; i++) {
2404
2397
  for (let j = 1; j <= a.length; j++) {
2405
2398
  const row = matrix[i];
2406
2399
  const prevRow = matrix[i - 1];
2407
2400
  if (!row || !prevRow) continue;
2408
- if (b.charAt(i - 1) === a.charAt(j - 1)) {
2409
- row[j] = prevRow[j - 1] ?? 0;
2410
- } else {
2411
- row[j] = Math.min((prevRow[j - 1] ?? 0) + 1, (row[j - 1] ?? 0) + 1, (prevRow[j] ?? 0) + 1);
2412
- }
2401
+ computeLevenshteinCell(row, prevRow, j, b.charAt(i - 1) === a.charAt(j - 1));
2413
2402
  }
2414
2403
  }
2415
2404
  const lastRow = matrix[b.length];
@@ -2695,32 +2684,27 @@ function findDeadExports(snapshot, usageMap, reachability) {
2695
2684
  }
2696
2685
  return deadExports;
2697
2686
  }
2698
- function countLinesFromAST(ast) {
2699
- if (ast.body && Array.isArray(ast.body)) {
2700
- let maxLine = 0;
2701
- const traverse = (node) => {
2702
- if (node && typeof node === "object") {
2703
- const n = node;
2704
- if (n.loc?.end?.line && n.loc.end.line > maxLine) {
2705
- maxLine = n.loc.end.line;
2706
- }
2707
- for (const key of Object.keys(node)) {
2708
- const value = node[key];
2709
- if (Array.isArray(value)) {
2710
- for (const item of value) {
2711
- traverse(item);
2712
- }
2713
- } else if (value && typeof value === "object") {
2714
- traverse(value);
2715
- }
2716
- }
2687
+ function findMaxLineInNode(node) {
2688
+ if (!node || typeof node !== "object") return 0;
2689
+ const n = node;
2690
+ let maxLine = n.loc?.end?.line ?? 0;
2691
+ for (const key of Object.keys(node)) {
2692
+ const value = node[key];
2693
+ if (Array.isArray(value)) {
2694
+ for (const item of value) {
2695
+ maxLine = Math.max(maxLine, findMaxLineInNode(item));
2717
2696
  }
2718
- };
2719
- traverse(ast);
2720
- if (maxLine > 0) return maxLine;
2721
- return Math.max(ast.body.length * 3, 1);
2697
+ } else if (value && typeof value === "object") {
2698
+ maxLine = Math.max(maxLine, findMaxLineInNode(value));
2699
+ }
2722
2700
  }
2723
- return 1;
2701
+ return maxLine;
2702
+ }
2703
+ function countLinesFromAST(ast) {
2704
+ if (!ast.body || !Array.isArray(ast.body)) return 1;
2705
+ const maxLine = findMaxLineInNode(ast);
2706
+ if (maxLine > 0) return maxLine;
2707
+ return Math.max(ast.body.length * 3, 1);
2724
2708
  }
2725
2709
  function findDeadFiles(snapshot, reachability) {
2726
2710
  const deadFiles = [];
@@ -2867,135 +2851,150 @@ async function detectDeadCode(snapshot, graphDeadCodeData) {
2867
2851
 
2868
2852
  // src/entropy/detectors/patterns.ts
2869
2853
  var import_minimatch4 = require("minimatch");
2870
- var import_path9 = require("path");
2871
2854
  function fileMatchesPattern(filePath, pattern, rootDir) {
2872
- const relativePath = (0, import_path9.relative)(rootDir, filePath);
2855
+ const relativePath = relativePosix(rootDir, filePath);
2873
2856
  return (0, import_minimatch4.minimatch)(relativePath, pattern);
2874
2857
  }
2875
- function checkConfigPattern(pattern, file, rootDir) {
2858
+ var CONVENTION_DESCRIPTIONS = {
2859
+ camelCase: "camelCase (e.g., myFunction)",
2860
+ PascalCase: "PascalCase (e.g., MyClass)",
2861
+ UPPER_SNAKE: "UPPER_SNAKE_CASE (e.g., MY_CONSTANT)",
2862
+ "kebab-case": "kebab-case (e.g., my-component)"
2863
+ };
2864
+ function checkMustExport(rule, file, message) {
2865
+ if (rule.type !== "must-export") return [];
2876
2866
  const matches = [];
2877
- const fileMatches = pattern.files.some((glob2) => fileMatchesPattern(file.path, glob2, rootDir));
2878
- if (!fileMatches) {
2879
- return matches;
2880
- }
2881
- const rule = pattern.rule;
2882
- switch (rule.type) {
2883
- case "must-export": {
2884
- for (const name of rule.names) {
2885
- const hasExport = file.exports.some((e) => e.name === name);
2886
- if (!hasExport) {
2887
- matches.push({
2888
- line: 1,
2889
- message: pattern.message || `Missing required export: "${name}"`,
2890
- suggestion: `Add export for "${name}"`
2891
- });
2892
- }
2893
- }
2894
- break;
2867
+ for (const name of rule.names) {
2868
+ if (!file.exports.some((e) => e.name === name)) {
2869
+ matches.push({
2870
+ line: 1,
2871
+ message: message || `Missing required export: "${name}"`,
2872
+ suggestion: `Add export for "${name}"`
2873
+ });
2895
2874
  }
2896
- case "must-export-default": {
2897
- const hasDefault = file.exports.some((e) => e.type === "default");
2898
- if (!hasDefault) {
2899
- matches.push({
2900
- line: 1,
2901
- message: pattern.message || "File must have a default export",
2902
- suggestion: "Add a default export"
2903
- });
2875
+ }
2876
+ return matches;
2877
+ }
2878
+ function checkMustExportDefault(_rule, file, message) {
2879
+ if (!file.exports.some((e) => e.type === "default")) {
2880
+ return [
2881
+ {
2882
+ line: 1,
2883
+ message: message || "File must have a default export",
2884
+ suggestion: "Add a default export"
2904
2885
  }
2905
- break;
2886
+ ];
2887
+ }
2888
+ return [];
2889
+ }
2890
+ function checkNoExport(rule, file, message) {
2891
+ if (rule.type !== "no-export") return [];
2892
+ const matches = [];
2893
+ for (const name of rule.names) {
2894
+ const exp = file.exports.find((e) => e.name === name);
2895
+ if (exp) {
2896
+ matches.push({
2897
+ line: exp.location.line,
2898
+ message: message || `Forbidden export: "${name}"`,
2899
+ suggestion: `Remove export "${name}"`
2900
+ });
2906
2901
  }
2907
- case "no-export": {
2908
- for (const name of rule.names) {
2909
- const exp = file.exports.find((e) => e.name === name);
2910
- if (exp) {
2911
- matches.push({
2912
- line: exp.location.line,
2913
- message: pattern.message || `Forbidden export: "${name}"`,
2914
- suggestion: `Remove export "${name}"`
2915
- });
2916
- }
2902
+ }
2903
+ return matches;
2904
+ }
2905
+ function checkMustImport(rule, file, message) {
2906
+ if (rule.type !== "must-import") return [];
2907
+ const hasImport = file.imports.some(
2908
+ (i) => i.source === rule.from || i.source.endsWith(rule.from)
2909
+ );
2910
+ if (!hasImport) {
2911
+ return [
2912
+ {
2913
+ line: 1,
2914
+ message: message || `Missing required import from "${rule.from}"`,
2915
+ suggestion: `Add import from "${rule.from}"`
2917
2916
  }
2918
- break;
2919
- }
2920
- case "must-import": {
2921
- const hasImport = file.imports.some(
2922
- (i) => i.source === rule.from || i.source.endsWith(rule.from)
2923
- );
2924
- if (!hasImport) {
2925
- matches.push({
2926
- line: 1,
2927
- message: pattern.message || `Missing required import from "${rule.from}"`,
2928
- suggestion: `Add import from "${rule.from}"`
2929
- });
2917
+ ];
2918
+ }
2919
+ return [];
2920
+ }
2921
+ function checkNoImport(rule, file, message) {
2922
+ if (rule.type !== "no-import") return [];
2923
+ const forbiddenImport = file.imports.find(
2924
+ (i) => i.source === rule.from || i.source.endsWith(rule.from)
2925
+ );
2926
+ if (forbiddenImport) {
2927
+ return [
2928
+ {
2929
+ line: forbiddenImport.location.line,
2930
+ message: message || `Forbidden import from "${rule.from}"`,
2931
+ suggestion: `Remove import from "${rule.from}"`
2930
2932
  }
2931
- break;
2933
+ ];
2934
+ }
2935
+ return [];
2936
+ }
2937
+ function checkNaming(rule, file, message) {
2938
+ if (rule.type !== "naming") return [];
2939
+ const regex = new RegExp(rule.match);
2940
+ const matches = [];
2941
+ for (const exp of file.exports) {
2942
+ if (!regex.test(exp.name)) {
2943
+ const expected = CONVENTION_DESCRIPTIONS[rule.convention] ?? rule.convention;
2944
+ matches.push({
2945
+ line: exp.location.line,
2946
+ message: message || `"${exp.name}" does not follow ${rule.convention} convention`,
2947
+ suggestion: `Rename to follow ${expected}`
2948
+ });
2932
2949
  }
2933
- case "no-import": {
2934
- const forbiddenImport = file.imports.find(
2935
- (i) => i.source === rule.from || i.source.endsWith(rule.from)
2936
- );
2937
- if (forbiddenImport) {
2938
- matches.push({
2939
- line: forbiddenImport.location.line,
2940
- message: pattern.message || `Forbidden import from "${rule.from}"`,
2941
- suggestion: `Remove import from "${rule.from}"`
2942
- });
2950
+ }
2951
+ return matches;
2952
+ }
2953
+ function checkMaxExports(rule, file, message) {
2954
+ if (rule.type !== "max-exports") return [];
2955
+ if (file.exports.length > rule.count) {
2956
+ return [
2957
+ {
2958
+ line: 1,
2959
+ message: message || `File has ${file.exports.length} exports, max is ${rule.count}`,
2960
+ suggestion: `Split into multiple files or reduce exports to ${rule.count}`
2943
2961
  }
2944
- break;
2945
- }
2946
- case "naming": {
2947
- const regex = new RegExp(rule.match);
2948
- for (const exp of file.exports) {
2949
- if (!regex.test(exp.name)) {
2950
- let expected = "";
2951
- switch (rule.convention) {
2952
- case "camelCase":
2953
- expected = "camelCase (e.g., myFunction)";
2954
- break;
2955
- case "PascalCase":
2956
- expected = "PascalCase (e.g., MyClass)";
2957
- break;
2958
- case "UPPER_SNAKE":
2959
- expected = "UPPER_SNAKE_CASE (e.g., MY_CONSTANT)";
2960
- break;
2961
- case "kebab-case":
2962
- expected = "kebab-case (e.g., my-component)";
2963
- break;
2964
- }
2965
- matches.push({
2966
- line: exp.location.line,
2967
- message: pattern.message || `"${exp.name}" does not follow ${rule.convention} convention`,
2968
- suggestion: `Rename to follow ${expected}`
2969
- });
2970
- }
2971
- }
2972
- break;
2973
- }
2974
- case "max-exports": {
2975
- if (file.exports.length > rule.count) {
2976
- matches.push({
2977
- line: 1,
2978
- message: pattern.message || `File has ${file.exports.length} exports, max is ${rule.count}`,
2979
- suggestion: `Split into multiple files or reduce exports to ${rule.count}`
2980
- });
2981
- }
2982
- break;
2983
- }
2984
- case "max-lines": {
2985
- break;
2986
- }
2987
- case "require-jsdoc": {
2988
- if (file.jsDocComments.length === 0 && file.exports.length > 0) {
2989
- matches.push({
2990
- line: 1,
2991
- message: pattern.message || "Exported symbols require JSDoc documentation",
2992
- suggestion: "Add JSDoc comments to exports"
2993
- });
2962
+ ];
2963
+ }
2964
+ return [];
2965
+ }
2966
+ function checkMaxLines(_rule, _file, _message) {
2967
+ return [];
2968
+ }
2969
+ function checkRequireJsdoc(_rule, file, message) {
2970
+ if (file.jsDocComments.length === 0 && file.exports.length > 0) {
2971
+ return [
2972
+ {
2973
+ line: 1,
2974
+ message: message || "Exported symbols require JSDoc documentation",
2975
+ suggestion: "Add JSDoc comments to exports"
2994
2976
  }
2995
- break;
2996
- }
2977
+ ];
2997
2978
  }
2998
- return matches;
2979
+ return [];
2980
+ }
2981
+ var RULE_CHECKERS = {
2982
+ "must-export": checkMustExport,
2983
+ "must-export-default": checkMustExportDefault,
2984
+ "no-export": checkNoExport,
2985
+ "must-import": checkMustImport,
2986
+ "no-import": checkNoImport,
2987
+ naming: checkNaming,
2988
+ "max-exports": checkMaxExports,
2989
+ "max-lines": checkMaxLines,
2990
+ "require-jsdoc": checkRequireJsdoc
2991
+ };
2992
+ function checkConfigPattern(pattern, file, rootDir) {
2993
+ const fileMatches = pattern.files.some((glob2) => fileMatchesPattern(file.path, glob2, rootDir));
2994
+ if (!fileMatches) return [];
2995
+ const checker = RULE_CHECKERS[pattern.rule.type];
2996
+ if (!checker) return [];
2997
+ return checker(pattern.rule, file, pattern.message);
2999
2998
  }
3000
2999
  async function detectPatternViolations(snapshot, config) {
3001
3000
  const violations = [];
@@ -3061,22 +3060,22 @@ var DEFAULT_THRESHOLDS = {
3061
3060
  fileLength: { info: 300 },
3062
3061
  hotspotPercentile: { error: 95 }
3063
3062
  };
3063
+ var FUNCTION_PATTERNS = [
3064
+ // function declarations: function name(params) {
3065
+ /^\s*(?:export\s+)?(?:async\s+)?function\s+(\w+)\s*\(([^)]*)\)/,
3066
+ // method declarations: name(params) {
3067
+ /^\s*(?:async\s+)?(\w+)\s*\(([^)]*)\)\s*(?::\s*[^{]+)?\s*\{/,
3068
+ // arrow functions assigned to const/let/var: const name = (params) =>
3069
+ /^\s*(?:export\s+)?(?:const|let|var)\s+(\w+)\s*=\s*(?:async\s+)?\(([^)]*)\)\s*(?::\s*[^=]+)?\s*=>/,
3070
+ // arrow functions assigned to const/let/var with single param: const name = param =>
3071
+ /^\s*(?:export\s+)?(?:const|let|var)\s+(\w+)\s*=\s*(?:async\s+)?(\w+)\s*=>/
3072
+ ];
3064
3073
  function extractFunctions(content) {
3065
3074
  const functions = [];
3066
3075
  const lines = content.split("\n");
3067
- const patterns = [
3068
- // function declarations: function name(params) {
3069
- /^\s*(?:export\s+)?(?:async\s+)?function\s+(\w+)\s*\(([^)]*)\)/,
3070
- // method declarations: name(params) {
3071
- /^\s*(?:async\s+)?(\w+)\s*\(([^)]*)\)\s*(?::\s*[^{]+)?\s*\{/,
3072
- // arrow functions assigned to const/let/var: const name = (params) =>
3073
- /^\s*(?:export\s+)?(?:const|let|var)\s+(\w+)\s*=\s*(?:async\s+)?\(([^)]*)\)\s*(?::\s*[^=]+)?\s*=>/,
3074
- // arrow functions assigned to const/let/var with single param: const name = param =>
3075
- /^\s*(?:export\s+)?(?:const|let|var)\s+(\w+)\s*=\s*(?:async\s+)?(\w+)\s*=>/
3076
- ];
3077
3076
  for (let i = 0; i < lines.length; i++) {
3078
3077
  const line = lines[i];
3079
- for (const pattern of patterns) {
3078
+ for (const pattern of FUNCTION_PATTERNS) {
3080
3079
  const match = line.match(pattern);
3081
3080
  if (match) {
3082
3081
  const name = match[1] ?? "anonymous";
@@ -3165,26 +3164,155 @@ function computeNestingDepth(body) {
3165
3164
  }
3166
3165
  return maxDepth;
3167
3166
  }
3168
- async function detectComplexityViolations(snapshot, config, graphData) {
3169
- const violations = [];
3170
- const thresholds = {
3167
+ function resolveThresholds(config) {
3168
+ const userThresholds = config?.thresholds;
3169
+ if (!userThresholds) return { ...DEFAULT_THRESHOLDS };
3170
+ return {
3171
3171
  cyclomaticComplexity: {
3172
- error: config?.thresholds?.cyclomaticComplexity?.error ?? DEFAULT_THRESHOLDS.cyclomaticComplexity.error,
3173
- warn: config?.thresholds?.cyclomaticComplexity?.warn ?? DEFAULT_THRESHOLDS.cyclomaticComplexity.warn
3172
+ ...DEFAULT_THRESHOLDS.cyclomaticComplexity,
3173
+ ...stripUndefined(userThresholds.cyclomaticComplexity)
3174
3174
  },
3175
3175
  nestingDepth: {
3176
- warn: config?.thresholds?.nestingDepth?.warn ?? DEFAULT_THRESHOLDS.nestingDepth.warn
3176
+ ...DEFAULT_THRESHOLDS.nestingDepth,
3177
+ ...stripUndefined(userThresholds.nestingDepth)
3177
3178
  },
3178
3179
  functionLength: {
3179
- warn: config?.thresholds?.functionLength?.warn ?? DEFAULT_THRESHOLDS.functionLength.warn
3180
+ ...DEFAULT_THRESHOLDS.functionLength,
3181
+ ...stripUndefined(userThresholds.functionLength)
3180
3182
  },
3181
3183
  parameterCount: {
3182
- warn: config?.thresholds?.parameterCount?.warn ?? DEFAULT_THRESHOLDS.parameterCount.warn
3184
+ ...DEFAULT_THRESHOLDS.parameterCount,
3185
+ ...stripUndefined(userThresholds.parameterCount)
3183
3186
  },
3184
- fileLength: {
3185
- info: config?.thresholds?.fileLength?.info ?? DEFAULT_THRESHOLDS.fileLength.info
3186
- }
3187
+ fileLength: { ...DEFAULT_THRESHOLDS.fileLength, ...stripUndefined(userThresholds.fileLength) }
3188
+ };
3189
+ }
3190
+ function stripUndefined(obj) {
3191
+ if (!obj) return {};
3192
+ const result = {};
3193
+ for (const [key, val] of Object.entries(obj)) {
3194
+ if (val !== void 0) result[key] = val;
3195
+ }
3196
+ return result;
3197
+ }
3198
+ function checkFileLengthViolation(filePath, lineCount, threshold) {
3199
+ if (lineCount <= threshold) return null;
3200
+ return {
3201
+ file: filePath,
3202
+ function: "<file>",
3203
+ line: 1,
3204
+ metric: "fileLength",
3205
+ value: lineCount,
3206
+ threshold,
3207
+ tier: 3,
3208
+ severity: "info",
3209
+ message: `File has ${lineCount} lines (threshold: ${threshold})`
3210
+ };
3211
+ }
3212
+ function checkCyclomaticComplexity(filePath, fn, thresholds) {
3213
+ const complexity = computeCyclomaticComplexity(fn.body);
3214
+ if (complexity > thresholds.error) {
3215
+ return {
3216
+ file: filePath,
3217
+ function: fn.name,
3218
+ line: fn.line,
3219
+ metric: "cyclomaticComplexity",
3220
+ value: complexity,
3221
+ threshold: thresholds.error,
3222
+ tier: 1,
3223
+ severity: "error",
3224
+ message: `Function "${fn.name}" has cyclomatic complexity of ${complexity} (error threshold: ${thresholds.error})`
3225
+ };
3226
+ }
3227
+ if (complexity > thresholds.warn) {
3228
+ return {
3229
+ file: filePath,
3230
+ function: fn.name,
3231
+ line: fn.line,
3232
+ metric: "cyclomaticComplexity",
3233
+ value: complexity,
3234
+ threshold: thresholds.warn,
3235
+ tier: 2,
3236
+ severity: "warning",
3237
+ message: `Function "${fn.name}" has cyclomatic complexity of ${complexity} (warning threshold: ${thresholds.warn})`
3238
+ };
3239
+ }
3240
+ return null;
3241
+ }
3242
+ function checkNestingDepth(filePath, fn, threshold) {
3243
+ const depth = computeNestingDepth(fn.body);
3244
+ if (depth <= threshold) return null;
3245
+ return {
3246
+ file: filePath,
3247
+ function: fn.name,
3248
+ line: fn.line,
3249
+ metric: "nestingDepth",
3250
+ value: depth,
3251
+ threshold,
3252
+ tier: 2,
3253
+ severity: "warning",
3254
+ message: `Function "${fn.name}" has nesting depth of ${depth} (threshold: ${threshold})`
3255
+ };
3256
+ }
3257
+ function checkFunctionLength(filePath, fn, threshold) {
3258
+ const fnLength = fn.endLine - fn.startLine + 1;
3259
+ if (fnLength <= threshold) return null;
3260
+ return {
3261
+ file: filePath,
3262
+ function: fn.name,
3263
+ line: fn.line,
3264
+ metric: "functionLength",
3265
+ value: fnLength,
3266
+ threshold,
3267
+ tier: 2,
3268
+ severity: "warning",
3269
+ message: `Function "${fn.name}" is ${fnLength} lines long (threshold: ${threshold})`
3270
+ };
3271
+ }
3272
+ function checkParameterCount(filePath, fn, threshold) {
3273
+ if (fn.params <= threshold) return null;
3274
+ return {
3275
+ file: filePath,
3276
+ function: fn.name,
3277
+ line: fn.line,
3278
+ metric: "parameterCount",
3279
+ value: fn.params,
3280
+ threshold,
3281
+ tier: 2,
3282
+ severity: "warning",
3283
+ message: `Function "${fn.name}" has ${fn.params} parameters (threshold: ${threshold})`
3187
3284
  };
3285
+ }
3286
+ function checkHotspot(filePath, fn, graphData) {
3287
+ const hotspot = graphData.hotspots.find((h) => h.file === filePath && h.function === fn.name);
3288
+ if (!hotspot || hotspot.hotspotScore <= graphData.percentile95Score) return null;
3289
+ return {
3290
+ file: filePath,
3291
+ function: fn.name,
3292
+ line: fn.line,
3293
+ metric: "hotspotScore",
3294
+ value: hotspot.hotspotScore,
3295
+ threshold: graphData.percentile95Score,
3296
+ tier: 1,
3297
+ severity: "error",
3298
+ message: `Function "${fn.name}" is a complexity hotspot (score: ${hotspot.hotspotScore}, p95: ${graphData.percentile95Score})`
3299
+ };
3300
+ }
3301
+ function collectFunctionViolations(filePath, fn, thresholds, graphData) {
3302
+ const checks = [
3303
+ checkCyclomaticComplexity(filePath, fn, thresholds.cyclomaticComplexity),
3304
+ checkNestingDepth(filePath, fn, thresholds.nestingDepth.warn),
3305
+ checkFunctionLength(filePath, fn, thresholds.functionLength.warn),
3306
+ checkParameterCount(filePath, fn, thresholds.parameterCount.warn)
3307
+ ];
3308
+ if (graphData) {
3309
+ checks.push(checkHotspot(filePath, fn, graphData));
3310
+ }
3311
+ return checks.filter((v) => v !== null);
3312
+ }
3313
+ async function detectComplexityViolations(snapshot, config, graphData) {
3314
+ const violations = [];
3315
+ const thresholds = resolveThresholds(config);
3188
3316
  let totalFunctions = 0;
3189
3317
  for (const file of snapshot.files) {
3190
3318
  let content;
@@ -3194,107 +3322,16 @@ async function detectComplexityViolations(snapshot, config, graphData) {
3194
3322
  continue;
3195
3323
  }
3196
3324
  const lines = content.split("\n");
3197
- if (lines.length > thresholds.fileLength.info) {
3198
- violations.push({
3199
- file: file.path,
3200
- function: "<file>",
3201
- line: 1,
3202
- metric: "fileLength",
3203
- value: lines.length,
3204
- threshold: thresholds.fileLength.info,
3205
- tier: 3,
3206
- severity: "info",
3207
- message: `File has ${lines.length} lines (threshold: ${thresholds.fileLength.info})`
3208
- });
3209
- }
3325
+ const fileLenViolation = checkFileLengthViolation(
3326
+ file.path,
3327
+ lines.length,
3328
+ thresholds.fileLength.info
3329
+ );
3330
+ if (fileLenViolation) violations.push(fileLenViolation);
3210
3331
  const functions = extractFunctions(content);
3211
3332
  totalFunctions += functions.length;
3212
3333
  for (const fn of functions) {
3213
- const complexity = computeCyclomaticComplexity(fn.body);
3214
- if (complexity > thresholds.cyclomaticComplexity.error) {
3215
- violations.push({
3216
- file: file.path,
3217
- function: fn.name,
3218
- line: fn.line,
3219
- metric: "cyclomaticComplexity",
3220
- value: complexity,
3221
- threshold: thresholds.cyclomaticComplexity.error,
3222
- tier: 1,
3223
- severity: "error",
3224
- message: `Function "${fn.name}" has cyclomatic complexity of ${complexity} (error threshold: ${thresholds.cyclomaticComplexity.error})`
3225
- });
3226
- } else if (complexity > thresholds.cyclomaticComplexity.warn) {
3227
- violations.push({
3228
- file: file.path,
3229
- function: fn.name,
3230
- line: fn.line,
3231
- metric: "cyclomaticComplexity",
3232
- value: complexity,
3233
- threshold: thresholds.cyclomaticComplexity.warn,
3234
- tier: 2,
3235
- severity: "warning",
3236
- message: `Function "${fn.name}" has cyclomatic complexity of ${complexity} (warning threshold: ${thresholds.cyclomaticComplexity.warn})`
3237
- });
3238
- }
3239
- const nestingDepth = computeNestingDepth(fn.body);
3240
- if (nestingDepth > thresholds.nestingDepth.warn) {
3241
- violations.push({
3242
- file: file.path,
3243
- function: fn.name,
3244
- line: fn.line,
3245
- metric: "nestingDepth",
3246
- value: nestingDepth,
3247
- threshold: thresholds.nestingDepth.warn,
3248
- tier: 2,
3249
- severity: "warning",
3250
- message: `Function "${fn.name}" has nesting depth of ${nestingDepth} (threshold: ${thresholds.nestingDepth.warn})`
3251
- });
3252
- }
3253
- const fnLength = fn.endLine - fn.startLine + 1;
3254
- if (fnLength > thresholds.functionLength.warn) {
3255
- violations.push({
3256
- file: file.path,
3257
- function: fn.name,
3258
- line: fn.line,
3259
- metric: "functionLength",
3260
- value: fnLength,
3261
- threshold: thresholds.functionLength.warn,
3262
- tier: 2,
3263
- severity: "warning",
3264
- message: `Function "${fn.name}" is ${fnLength} lines long (threshold: ${thresholds.functionLength.warn})`
3265
- });
3266
- }
3267
- if (fn.params > thresholds.parameterCount.warn) {
3268
- violations.push({
3269
- file: file.path,
3270
- function: fn.name,
3271
- line: fn.line,
3272
- metric: "parameterCount",
3273
- value: fn.params,
3274
- threshold: thresholds.parameterCount.warn,
3275
- tier: 2,
3276
- severity: "warning",
3277
- message: `Function "${fn.name}" has ${fn.params} parameters (threshold: ${thresholds.parameterCount.warn})`
3278
- });
3279
- }
3280
- if (graphData) {
3281
- const hotspot = graphData.hotspots.find(
3282
- (h) => h.file === file.path && h.function === fn.name
3283
- );
3284
- if (hotspot && hotspot.hotspotScore > graphData.percentile95Score) {
3285
- violations.push({
3286
- file: file.path,
3287
- function: fn.name,
3288
- line: fn.line,
3289
- metric: "hotspotScore",
3290
- value: hotspot.hotspotScore,
3291
- threshold: graphData.percentile95Score,
3292
- tier: 1,
3293
- severity: "error",
3294
- message: `Function "${fn.name}" is a complexity hotspot (score: ${hotspot.hotspotScore}, p95: ${graphData.percentile95Score})`
3295
- });
3296
- }
3297
- }
3334
+ violations.push(...collectFunctionViolations(file.path, fn, thresholds, graphData));
3298
3335
  }
3299
3336
  }
3300
3337
  const errorCount = violations.filter((v) => v.severity === "error").length;
@@ -3465,7 +3502,7 @@ async function detectCouplingViolations(snapshot, config, graphData) {
3465
3502
 
3466
3503
  // src/entropy/detectors/size-budget.ts
3467
3504
  var import_node_fs = require("fs");
3468
- var import_node_path = require("path");
3505
+ var import_node_path2 = require("path");
3469
3506
  function parseSize(size) {
3470
3507
  const match = size.trim().match(/^(\d+(?:\.\d+)?)\s*(KB|MB|GB|B)?$/i);
3471
3508
  if (!match) return 0;
@@ -3492,7 +3529,7 @@ function dirSize(dirPath) {
3492
3529
  }
3493
3530
  for (const entry of entries) {
3494
3531
  if (entry === "node_modules" || entry === ".git") continue;
3495
- const fullPath = (0, import_node_path.join)(dirPath, entry);
3532
+ const fullPath = (0, import_node_path2.join)(dirPath, entry);
3496
3533
  try {
3497
3534
  const stat = (0, import_node_fs.statSync)(fullPath);
3498
3535
  if (stat.isDirectory()) {
@@ -3512,7 +3549,7 @@ async function detectSizeBudgetViolations(rootDir, config) {
3512
3549
  let packagesChecked = 0;
3513
3550
  for (const [pkgPath, budget] of Object.entries(budgets)) {
3514
3551
  packagesChecked++;
3515
- const distPath = (0, import_node_path.join)(rootDir, pkgPath, "dist");
3552
+ const distPath = (0, import_node_path2.join)(rootDir, pkgPath, "dist");
3516
3553
  const currentSize = dirSize(distPath);
3517
3554
  if (budget.warn) {
3518
3555
  const budgetBytes = parseSize(budget.warn);
@@ -3893,7 +3930,7 @@ var EntropyAnalyzer = class {
3893
3930
  // src/entropy/fixers/safe-fixes.ts
3894
3931
  var fs3 = __toESM(require("fs"));
3895
3932
  var import_util2 = require("util");
3896
- var import_path10 = require("path");
3933
+ var import_path9 = require("path");
3897
3934
  var readFile5 = (0, import_util2.promisify)(fs3.readFile);
3898
3935
  var writeFile3 = (0, import_util2.promisify)(fs3.writeFile);
3899
3936
  var unlink2 = (0, import_util2.promisify)(fs3.unlink);
@@ -3909,7 +3946,7 @@ function createDeadFileFixes(deadCodeReport) {
3909
3946
  return deadCodeReport.deadFiles.map((file) => ({
3910
3947
  type: "dead-files",
3911
3948
  file: file.path,
3912
- description: `Delete dead file (${file.reason}): ${(0, import_path10.basename)(file.path)}`,
3949
+ description: `Delete dead file (${file.reason}): ${(0, import_path9.basename)(file.path)}`,
3913
3950
  action: "delete-file",
3914
3951
  safe: true,
3915
3952
  reversible: true
@@ -3926,17 +3963,35 @@ function createUnusedImportFixes(deadCodeReport) {
3926
3963
  reversible: true
3927
3964
  }));
3928
3965
  }
3966
+ var EXPORT_TYPE_KEYWORD = {
3967
+ class: "class",
3968
+ function: "function",
3969
+ variable: "const",
3970
+ type: "type",
3971
+ interface: "interface",
3972
+ enum: "enum"
3973
+ };
3974
+ function getExportKeyword(exportType) {
3975
+ return EXPORT_TYPE_KEYWORD[exportType] ?? "enum";
3976
+ }
3977
+ function getDefaultExportKeyword(exportType) {
3978
+ if (exportType === "class" || exportType === "function") return exportType;
3979
+ return "";
3980
+ }
3929
3981
  function createDeadExportFixes(deadCodeReport) {
3930
- return deadCodeReport.deadExports.filter((exp) => exp.reason === "NO_IMPORTERS").map((exp) => ({
3931
- type: "dead-exports",
3932
- file: exp.file,
3933
- description: `Remove export keyword from ${exp.name} (${exp.reason})`,
3934
- action: "replace",
3935
- oldContent: exp.isDefault ? `export default ${exp.type === "class" ? "class" : exp.type === "function" ? "function" : ""} ${exp.name}` : `export ${exp.type === "class" ? "class" : exp.type === "function" ? "function" : exp.type === "variable" ? "const" : exp.type === "type" ? "type" : exp.type === "interface" ? "interface" : "enum"} ${exp.name}`,
3936
- newContent: exp.isDefault ? `${exp.type === "class" ? "class" : exp.type === "function" ? "function" : ""} ${exp.name}` : `${exp.type === "class" ? "class" : exp.type === "function" ? "function" : exp.type === "variable" ? "const" : exp.type === "type" ? "type" : exp.type === "interface" ? "interface" : "enum"} ${exp.name}`,
3937
- safe: true,
3938
- reversible: true
3939
- }));
3982
+ return deadCodeReport.deadExports.filter((exp) => exp.reason === "NO_IMPORTERS").map((exp) => {
3983
+ const keyword = exp.isDefault ? getDefaultExportKeyword(exp.type) : getExportKeyword(exp.type);
3984
+ return {
3985
+ type: "dead-exports",
3986
+ file: exp.file,
3987
+ description: `Remove export keyword from ${exp.name} (${exp.reason})`,
3988
+ action: "replace",
3989
+ oldContent: exp.isDefault ? `export default ${keyword} ${exp.name}` : `export ${keyword} ${exp.name}`,
3990
+ newContent: `${keyword} ${exp.name}`,
3991
+ safe: true,
3992
+ reversible: true
3993
+ };
3994
+ });
3940
3995
  }
3941
3996
  function createCommentedCodeFixes(blocks) {
3942
3997
  return blocks.map((block) => ({
@@ -3992,9 +4047,9 @@ function previewFix(fix) {
3992
4047
  }
3993
4048
  }
3994
4049
  async function createBackup(filePath, backupDir) {
3995
- const backupPath = (0, import_path10.join)(backupDir, `${Date.now()}-${(0, import_path10.basename)(filePath)}`);
4050
+ const backupPath = (0, import_path9.join)(backupDir, `${Date.now()}-${(0, import_path9.basename)(filePath)}`);
3996
4051
  try {
3997
- await mkdir2((0, import_path10.dirname)(backupPath), { recursive: true });
4052
+ await mkdir2((0, import_path9.dirname)(backupPath), { recursive: true });
3998
4053
  await copyFile2(filePath, backupPath);
3999
4054
  return (0, import_types.Ok)(backupPath);
4000
4055
  } catch (e) {
@@ -4115,53 +4170,80 @@ var ALWAYS_UNSAFE_TYPES = /* @__PURE__ */ new Set([
4115
4170
  "dead-internal"
4116
4171
  ]);
4117
4172
  var idCounter = 0;
4173
+ var DEAD_CODE_FIX_ACTIONS = {
4174
+ "dead-export": "Remove export keyword",
4175
+ "dead-file": "Delete file",
4176
+ "commented-code": "Delete commented block",
4177
+ "unused-import": "Remove import"
4178
+ };
4179
+ function classifyDeadCode(input) {
4180
+ if (input.isPublicApi) {
4181
+ return {
4182
+ safety: "unsafe",
4183
+ safetyReason: "Public API export may have external consumers",
4184
+ suggestion: "Deprecate before removing"
4185
+ };
4186
+ }
4187
+ const fixAction = DEAD_CODE_FIX_ACTIONS[input.type];
4188
+ if (fixAction) {
4189
+ return {
4190
+ safety: "safe",
4191
+ safetyReason: "zero importers, non-public",
4192
+ fixAction,
4193
+ suggestion: fixAction
4194
+ };
4195
+ }
4196
+ if (input.type === "orphaned-dep") {
4197
+ return {
4198
+ safety: "probably-safe",
4199
+ safetyReason: "No imports found, but needs install+test verification",
4200
+ fixAction: "Remove from package.json",
4201
+ suggestion: "Remove from package.json"
4202
+ };
4203
+ }
4204
+ return {
4205
+ safety: "unsafe",
4206
+ safetyReason: "Unknown dead code type",
4207
+ suggestion: "Manual review required"
4208
+ };
4209
+ }
4210
+ function classifyArchitecture(input) {
4211
+ if (input.type === "import-ordering") {
4212
+ return {
4213
+ safety: "safe",
4214
+ safetyReason: "Mechanical reorder, no semantic change",
4215
+ fixAction: "Reorder imports",
4216
+ suggestion: "Reorder imports"
4217
+ };
4218
+ }
4219
+ if (input.type === "forbidden-import" && input.hasAlternative) {
4220
+ return {
4221
+ safety: "probably-safe",
4222
+ safetyReason: "Alternative configured, needs typecheck+test",
4223
+ fixAction: "Replace with configured alternative",
4224
+ suggestion: "Replace with configured alternative"
4225
+ };
4226
+ }
4227
+ return {
4228
+ safety: "unsafe",
4229
+ safetyReason: `${input.type} requires structural changes`,
4230
+ suggestion: "Restructure code to fix violation"
4231
+ };
4232
+ }
4118
4233
  function classifyFinding(input) {
4119
4234
  idCounter++;
4120
4235
  const id = `${input.concern === "dead-code" ? "dc" : "arch"}-${idCounter}`;
4121
- let safety;
4122
- let safetyReason;
4123
- let fixAction;
4124
- let suggestion;
4236
+ let classification;
4125
4237
  if (ALWAYS_UNSAFE_TYPES.has(input.type)) {
4126
- safety = "unsafe";
4127
- safetyReason = `${input.type} requires human judgment`;
4128
- suggestion = "Review and refactor manually";
4238
+ classification = {
4239
+ safety: "unsafe",
4240
+ safetyReason: `${input.type} requires human judgment`,
4241
+ suggestion: "Review and refactor manually"
4242
+ };
4129
4243
  } else if (input.concern === "dead-code") {
4130
- if (input.isPublicApi) {
4131
- safety = "unsafe";
4132
- safetyReason = "Public API export may have external consumers";
4133
- suggestion = "Deprecate before removing";
4134
- } else if (input.type === "dead-export" || input.type === "unused-import" || input.type === "commented-code" || input.type === "dead-file") {
4135
- safety = "safe";
4136
- safetyReason = "zero importers, non-public";
4137
- fixAction = input.type === "dead-export" ? "Remove export keyword" : input.type === "dead-file" ? "Delete file" : input.type === "commented-code" ? "Delete commented block" : "Remove import";
4138
- suggestion = fixAction;
4139
- } else if (input.type === "orphaned-dep") {
4140
- safety = "probably-safe";
4141
- safetyReason = "No imports found, but needs install+test verification";
4142
- fixAction = "Remove from package.json";
4143
- suggestion = fixAction;
4144
- } else {
4145
- safety = "unsafe";
4146
- safetyReason = "Unknown dead code type";
4147
- suggestion = "Manual review required";
4148
- }
4244
+ classification = classifyDeadCode(input);
4149
4245
  } else {
4150
- if (input.type === "import-ordering") {
4151
- safety = "safe";
4152
- safetyReason = "Mechanical reorder, no semantic change";
4153
- fixAction = "Reorder imports";
4154
- suggestion = fixAction;
4155
- } else if (input.type === "forbidden-import" && input.hasAlternative) {
4156
- safety = "probably-safe";
4157
- safetyReason = "Alternative configured, needs typecheck+test";
4158
- fixAction = "Replace with configured alternative";
4159
- suggestion = fixAction;
4160
- } else {
4161
- safety = "unsafe";
4162
- safetyReason = `${input.type} requires structural changes`;
4163
- suggestion = "Restructure code to fix violation";
4164
- }
4246
+ classification = classifyArchitecture(input);
4165
4247
  }
4166
4248
  return {
4167
4249
  id,
@@ -4170,11 +4252,11 @@ function classifyFinding(input) {
4170
4252
  ...input.line !== void 0 ? { line: input.line } : {},
4171
4253
  type: input.type,
4172
4254
  description: input.description,
4173
- safety,
4174
- safetyReason,
4255
+ safety: classification.safety,
4256
+ safetyReason: classification.safetyReason,
4175
4257
  hotspotDowngraded: false,
4176
- ...fixAction !== void 0 ? { fixAction } : {},
4177
- suggestion
4258
+ ...classification.fixAction !== void 0 ? { fixAction: classification.fixAction } : {},
4259
+ suggestion: classification.suggestion
4178
4260
  };
4179
4261
  }
4180
4262
  function applyHotspotDowngrade(finding, hotspot) {
@@ -4330,11 +4412,11 @@ function validatePatternConfig(config) {
4330
4412
 
4331
4413
  // src/performance/baseline-manager.ts
4332
4414
  var import_node_fs2 = require("fs");
4333
- var import_node_path2 = require("path");
4415
+ var import_node_path3 = require("path");
4334
4416
  var BaselineManager = class {
4335
4417
  baselinesPath;
4336
4418
  constructor(projectRoot) {
4337
- this.baselinesPath = (0, import_node_path2.join)(projectRoot, ".harness", "perf", "baselines.json");
4419
+ this.baselinesPath = (0, import_node_path3.join)(projectRoot, ".harness", "perf", "baselines.json");
4338
4420
  }
4339
4421
  /**
4340
4422
  * Load the baselines file from disk.
@@ -4374,7 +4456,7 @@ var BaselineManager = class {
4374
4456
  updatedFrom: commitHash,
4375
4457
  benchmarks
4376
4458
  };
4377
- const dir = (0, import_node_path2.dirname)(this.baselinesPath);
4459
+ const dir = (0, import_node_path3.dirname)(this.baselinesPath);
4378
4460
  if (!(0, import_node_fs2.existsSync)(dir)) {
4379
4461
  (0, import_node_fs2.mkdirSync)(dir, { recursive: true });
4380
4462
  }
@@ -4468,43 +4550,57 @@ var BenchmarkRunner = class {
4468
4550
  };
4469
4551
  }
4470
4552
  }
4553
+ /**
4554
+ * Extract a BenchmarkResult from a single assertion with benchmark data.
4555
+ */
4556
+ parseBenchAssertion(assertion, file) {
4557
+ if (!assertion.benchmark) return null;
4558
+ const bench = assertion.benchmark;
4559
+ return {
4560
+ name: assertion.fullName || assertion.title || "unknown",
4561
+ file: file.replace(process.cwd() + "/", ""),
4562
+ opsPerSec: Math.round(bench.hz || 0),
4563
+ meanMs: bench.mean ? bench.mean * 1e3 : 0,
4564
+ p99Ms: bench.p99 ? bench.p99 * 1e3 : bench.mean ? bench.mean * 1e3 * 1.5 : 0,
4565
+ marginOfError: bench.rme ? bench.rme / 100 : 0.05
4566
+ };
4567
+ }
4568
+ /**
4569
+ * Extract JSON from output that may contain non-JSON preamble.
4570
+ */
4571
+ extractJson(output) {
4572
+ const jsonStart = output.indexOf("{");
4573
+ const jsonEnd = output.lastIndexOf("}");
4574
+ if (jsonStart === -1 || jsonEnd === -1) return null;
4575
+ return JSON.parse(output.slice(jsonStart, jsonEnd + 1));
4576
+ }
4471
4577
  /**
4472
4578
  * Parse vitest bench JSON reporter output into BenchmarkResult[].
4473
4579
  * Vitest bench JSON output contains testResults with benchmark data.
4474
4580
  */
4475
- parseVitestBenchOutput(output) {
4581
+ collectAssertionResults(testResults) {
4476
4582
  const results = [];
4477
- try {
4478
- const jsonStart = output.indexOf("{");
4479
- const jsonEnd = output.lastIndexOf("}");
4480
- if (jsonStart === -1 || jsonEnd === -1) return results;
4481
- const jsonStr = output.slice(jsonStart, jsonEnd + 1);
4482
- const parsed = JSON.parse(jsonStr);
4483
- if (parsed.testResults) {
4484
- for (const testResult of parsed.testResults) {
4485
- const file = testResult.name || testResult.filepath || "";
4486
- if (testResult.assertionResults) {
4487
- for (const assertion of testResult.assertionResults) {
4488
- if (assertion.benchmark) {
4489
- const bench = assertion.benchmark;
4490
- results.push({
4491
- name: assertion.fullName || assertion.title || "unknown",
4492
- file: file.replace(process.cwd() + "/", ""),
4493
- opsPerSec: Math.round(bench.hz || 0),
4494
- meanMs: bench.mean ? bench.mean * 1e3 : 0,
4495
- // p99: use actual p99 if available, otherwise estimate as 1.5× mean
4496
- p99Ms: bench.p99 ? bench.p99 * 1e3 : bench.mean ? bench.mean * 1e3 * 1.5 : 0,
4497
- marginOfError: bench.rme ? bench.rme / 100 : 0.05
4498
- });
4499
- }
4500
- }
4501
- }
4502
- }
4583
+ for (const testResult of testResults) {
4584
+ const file = testResult.name || testResult.filepath || "";
4585
+ const assertions = testResult.assertionResults ?? [];
4586
+ for (const assertion of assertions) {
4587
+ const result = this.parseBenchAssertion(assertion, file);
4588
+ if (result) results.push(result);
4503
4589
  }
4504
- } catch {
4505
4590
  }
4506
4591
  return results;
4507
4592
  }
4593
+ parseVitestBenchOutput(output) {
4594
+ try {
4595
+ const parsed = this.extractJson(output);
4596
+ if (!parsed) return [];
4597
+ const testResults = parsed.testResults;
4598
+ if (!testResults) return [];
4599
+ return this.collectAssertionResults(testResults);
4600
+ } catch {
4601
+ return [];
4602
+ }
4603
+ }
4508
4604
  };
4509
4605
 
4510
4606
  // src/performance/regression-detector.ts
@@ -4814,39 +4910,31 @@ function resetFeedbackConfig() {
4814
4910
  }
4815
4911
 
4816
4912
  // src/feedback/review/diff-analyzer.ts
4913
+ function detectFileStatus(part) {
4914
+ if (/new file mode/.test(part)) return "added";
4915
+ if (/deleted file mode/.test(part)) return "deleted";
4916
+ if (part.includes("rename from")) return "renamed";
4917
+ return "modified";
4918
+ }
4919
+ function parseDiffPart(part) {
4920
+ if (!part.trim()) return null;
4921
+ const headerMatch = /diff --git a\/(.+?) b\/(.+?)(?:\n|$)/.exec(part);
4922
+ if (!headerMatch || !headerMatch[2]) return null;
4923
+ const additionRegex = /^\+(?!\+\+)/gm;
4924
+ const deletionRegex = /^-(?!--)/gm;
4925
+ return {
4926
+ path: headerMatch[2],
4927
+ status: detectFileStatus(part),
4928
+ additions: (part.match(additionRegex) || []).length,
4929
+ deletions: (part.match(deletionRegex) || []).length
4930
+ };
4931
+ }
4817
4932
  function parseDiff(diff2) {
4818
4933
  try {
4819
4934
  if (!diff2.trim()) {
4820
4935
  return (0, import_types.Ok)({ diff: diff2, files: [] });
4821
4936
  }
4822
- const files = [];
4823
- const newFileRegex = /new file mode/;
4824
- const deletedFileRegex = /deleted file mode/;
4825
- const additionRegex = /^\+(?!\+\+)/gm;
4826
- const deletionRegex = /^-(?!--)/gm;
4827
- const diffParts = diff2.split(/(?=diff --git)/);
4828
- for (const part of diffParts) {
4829
- if (!part.trim()) continue;
4830
- const headerMatch = /diff --git a\/(.+?) b\/(.+?)(?:\n|$)/.exec(part);
4831
- if (!headerMatch || !headerMatch[2]) continue;
4832
- const filePath = headerMatch[2];
4833
- let status = "modified";
4834
- if (newFileRegex.test(part)) {
4835
- status = "added";
4836
- } else if (deletedFileRegex.test(part)) {
4837
- status = "deleted";
4838
- } else if (part.includes("rename from")) {
4839
- status = "renamed";
4840
- }
4841
- const additions = (part.match(additionRegex) || []).length;
4842
- const deletions = (part.match(deletionRegex) || []).length;
4843
- files.push({
4844
- path: filePath,
4845
- status,
4846
- additions,
4847
- deletions
4848
- });
4849
- }
4937
+ const files = diff2.split(/(?=diff --git)/).map(parseDiffPart).filter((f) => f !== null);
4850
4938
  return (0, import_types.Ok)({ diff: diff2, files });
4851
4939
  } catch (error) {
4852
4940
  return (0, import_types.Err)({
@@ -5012,107 +5100,123 @@ var ChecklistBuilder = class {
5012
5100
  this.graphImpactData = graphImpactData;
5013
5101
  return this;
5014
5102
  }
5015
- async run(changes) {
5016
- const startTime = Date.now();
5103
+ /**
5104
+ * Build a single harness check item with or without graph data.
5105
+ */
5106
+ buildHarnessCheckItem(id, check, fallbackDetails, graphItemBuilder) {
5107
+ if (this.graphHarnessData && graphItemBuilder) {
5108
+ return graphItemBuilder();
5109
+ }
5110
+ return {
5111
+ id,
5112
+ category: "harness",
5113
+ check,
5114
+ passed: true,
5115
+ severity: "info",
5116
+ details: fallbackDetails
5117
+ };
5118
+ }
5119
+ /**
5120
+ * Build all harness check items based on harnessOptions and graph data.
5121
+ */
5122
+ buildHarnessItems() {
5123
+ if (!this.harnessOptions) return [];
5017
5124
  const items = [];
5018
- if (this.harnessOptions) {
5019
- if (this.harnessOptions.context !== false) {
5020
- if (this.graphHarnessData) {
5021
- items.push({
5022
- id: "harness-context",
5023
- category: "harness",
5024
- check: "Context validation",
5025
- passed: this.graphHarnessData.graphExists && this.graphHarnessData.nodeCount > 0,
5026
- severity: "info",
5027
- details: this.graphHarnessData.graphExists ? `Graph loaded: ${this.graphHarnessData.nodeCount} nodes, ${this.graphHarnessData.edgeCount} edges` : "No graph available \u2014 run harness scan to build the knowledge graph"
5028
- });
5029
- } else {
5030
- items.push({
5125
+ const graphData = this.graphHarnessData;
5126
+ if (this.harnessOptions.context !== false) {
5127
+ items.push(
5128
+ this.buildHarnessCheckItem(
5129
+ "harness-context",
5130
+ "Context validation",
5131
+ "Harness context validation not yet integrated (run with graph for real checks)",
5132
+ graphData ? () => ({
5031
5133
  id: "harness-context",
5032
5134
  category: "harness",
5033
5135
  check: "Context validation",
5034
- passed: true,
5035
- severity: "info",
5036
- details: "Harness context validation not yet integrated (run with graph for real checks)"
5037
- });
5038
- }
5039
- }
5040
- if (this.harnessOptions.constraints !== false) {
5041
- if (this.graphHarnessData) {
5042
- const violations = this.graphHarnessData.constraintViolations;
5043
- items.push({
5044
- id: "harness-constraints",
5045
- category: "harness",
5046
- check: "Constraint validation",
5047
- passed: violations === 0,
5048
- severity: violations > 0 ? "error" : "info",
5049
- details: violations === 0 ? "No constraint violations detected" : `${violations} constraint violation(s) detected`
5050
- });
5051
- } else {
5052
- items.push({
5053
- id: "harness-constraints",
5054
- category: "harness",
5055
- check: "Constraint validation",
5056
- passed: true,
5057
- severity: "info",
5058
- details: "Harness constraint validation not yet integrated (run with graph for real checks)"
5059
- });
5060
- }
5061
- }
5062
- if (this.harnessOptions.entropy !== false) {
5063
- if (this.graphHarnessData) {
5064
- const issues = this.graphHarnessData.unreachableNodes + this.graphHarnessData.undocumentedFiles;
5065
- items.push({
5066
- id: "harness-entropy",
5067
- category: "harness",
5068
- check: "Entropy detection",
5069
- passed: issues === 0,
5070
- severity: issues > 0 ? "warning" : "info",
5071
- details: issues === 0 ? "No entropy issues detected" : `${this.graphHarnessData.unreachableNodes} unreachable node(s), ${this.graphHarnessData.undocumentedFiles} undocumented file(s)`
5072
- });
5073
- } else {
5074
- items.push({
5075
- id: "harness-entropy",
5076
- category: "harness",
5077
- check: "Entropy detection",
5078
- passed: true,
5136
+ passed: graphData.graphExists && graphData.nodeCount > 0,
5079
5137
  severity: "info",
5080
- details: "Harness entropy detection not yet integrated (run with graph for real checks)"
5081
- });
5082
- }
5083
- }
5138
+ details: graphData.graphExists ? `Graph loaded: ${graphData.nodeCount} nodes, ${graphData.edgeCount} edges` : "No graph available \u2014 run harness scan to build the knowledge graph"
5139
+ }) : void 0
5140
+ )
5141
+ );
5142
+ }
5143
+ if (this.harnessOptions.constraints !== false) {
5144
+ items.push(
5145
+ this.buildHarnessCheckItem(
5146
+ "harness-constraints",
5147
+ "Constraint validation",
5148
+ "Harness constraint validation not yet integrated (run with graph for real checks)",
5149
+ graphData ? () => {
5150
+ const violations = graphData.constraintViolations;
5151
+ return {
5152
+ id: "harness-constraints",
5153
+ category: "harness",
5154
+ check: "Constraint validation",
5155
+ passed: violations === 0,
5156
+ severity: violations > 0 ? "error" : "info",
5157
+ details: violations === 0 ? "No constraint violations detected" : `${violations} constraint violation(s) detected`
5158
+ };
5159
+ } : void 0
5160
+ )
5161
+ );
5162
+ }
5163
+ if (this.harnessOptions.entropy !== false) {
5164
+ items.push(
5165
+ this.buildHarnessCheckItem(
5166
+ "harness-entropy",
5167
+ "Entropy detection",
5168
+ "Harness entropy detection not yet integrated (run with graph for real checks)",
5169
+ graphData ? () => {
5170
+ const issues = graphData.unreachableNodes + graphData.undocumentedFiles;
5171
+ return {
5172
+ id: "harness-entropy",
5173
+ category: "harness",
5174
+ check: "Entropy detection",
5175
+ passed: issues === 0,
5176
+ severity: issues > 0 ? "warning" : "info",
5177
+ details: issues === 0 ? "No entropy issues detected" : `${graphData.unreachableNodes} unreachable node(s), ${graphData.undocumentedFiles} undocumented file(s)`
5178
+ };
5179
+ } : void 0
5180
+ )
5181
+ );
5182
+ }
5183
+ return items;
5184
+ }
5185
+ /**
5186
+ * Execute a single custom rule and return a ReviewItem.
5187
+ */
5188
+ async executeCustomRule(rule, changes) {
5189
+ try {
5190
+ const result = await rule.check(changes, this.rootDir);
5191
+ const item = {
5192
+ id: rule.id,
5193
+ category: "custom",
5194
+ check: rule.name,
5195
+ passed: result.passed,
5196
+ severity: rule.severity,
5197
+ details: result.details
5198
+ };
5199
+ if (result.suggestion !== void 0) item.suggestion = result.suggestion;
5200
+ if (result.file !== void 0) item.file = result.file;
5201
+ if (result.line !== void 0) item.line = result.line;
5202
+ return item;
5203
+ } catch (error) {
5204
+ return {
5205
+ id: rule.id,
5206
+ category: "custom",
5207
+ check: rule.name,
5208
+ passed: false,
5209
+ severity: "error",
5210
+ details: `Rule execution failed: ${String(error)}`
5211
+ };
5084
5212
  }
5213
+ }
5214
+ async run(changes) {
5215
+ const startTime = Date.now();
5216
+ const items = [];
5217
+ items.push(...this.buildHarnessItems());
5085
5218
  for (const rule of this.customRules) {
5086
- try {
5087
- const result = await rule.check(changes, this.rootDir);
5088
- const item = {
5089
- id: rule.id,
5090
- category: "custom",
5091
- check: rule.name,
5092
- passed: result.passed,
5093
- severity: rule.severity,
5094
- details: result.details
5095
- };
5096
- if (result.suggestion !== void 0) {
5097
- item.suggestion = result.suggestion;
5098
- }
5099
- if (result.file !== void 0) {
5100
- item.file = result.file;
5101
- }
5102
- if (result.line !== void 0) {
5103
- item.line = result.line;
5104
- }
5105
- items.push(item);
5106
- } catch (error) {
5107
- items.push({
5108
- id: rule.id,
5109
- category: "custom",
5110
- check: rule.name,
5111
- passed: false,
5112
- severity: "error",
5113
- details: `Rule execution failed: ${String(error)}`
5114
- });
5115
- }
5219
+ items.push(await this.executeCustomRule(rule, changes));
5116
5220
  }
5117
5221
  if (this.diffOptions) {
5118
5222
  const diffResult = await analyzeDiff(changes, this.diffOptions, this.graphImpactData);
@@ -5127,7 +5231,6 @@ var ChecklistBuilder = class {
5127
5231
  const checklist = {
5128
5232
  items,
5129
5233
  passed: failed === 0,
5130
- // Pass if no failed items
5131
5234
  summary: {
5132
5235
  total: items.length,
5133
5236
  passed,
@@ -5365,7 +5468,7 @@ async function requestMultiplePeerReviews(requests) {
5365
5468
 
5366
5469
  // src/feedback/logging/file-sink.ts
5367
5470
  var import_fs2 = require("fs");
5368
- var import_path11 = require("path");
5471
+ var import_path10 = require("path");
5369
5472
  var FileSink = class {
5370
5473
  name = "file";
5371
5474
  filePath;
@@ -5388,7 +5491,7 @@ var FileSink = class {
5388
5491
  }
5389
5492
  ensureDirectory() {
5390
5493
  if (!this.initialized) {
5391
- const dir = (0, import_path11.dirname)(this.filePath);
5494
+ const dir = (0, import_path10.dirname)(this.filePath);
5392
5495
  if (!(0, import_fs2.existsSync)(dir)) {
5393
5496
  (0, import_fs2.mkdirSync)(dir, { recursive: true });
5394
5497
  }
@@ -5535,14 +5638,10 @@ var ConstraintRuleSchema = import_zod3.z.object({
5535
5638
  // forward-compat for governs edges
5536
5639
  });
5537
5640
 
5538
- // src/architecture/collectors/circular-deps.ts
5539
- var import_node_path3 = require("path");
5540
-
5541
5641
  // src/architecture/collectors/hash.ts
5542
5642
  var import_node_crypto = require("crypto");
5543
5643
  function violationId(relativePath, category, normalizedDetail) {
5544
- const path20 = relativePath.replace(/\\/g, "/");
5545
- const input = `${path20}:${category}:${normalizedDetail}`;
5644
+ const input = `${relativePath}:${category}:${normalizedDetail}`;
5546
5645
  return (0, import_node_crypto.createHash)("sha256").update(input).digest("hex");
5547
5646
  }
5548
5647
  function constraintRuleId(category, scope, description) {
@@ -5606,8 +5705,8 @@ var CircularDepsCollector = class {
5606
5705
  }
5607
5706
  const { cycles, largestCycle } = result.value;
5608
5707
  const violations = cycles.map((cycle) => {
5609
- const cyclePath = cycle.cycle.map((f) => (0, import_node_path3.relative)(rootDir, f)).join(" -> ");
5610
- const firstFile = (0, import_node_path3.relative)(rootDir, cycle.cycle[0]);
5708
+ const cyclePath = cycle.cycle.map((f) => relativePosix(rootDir, f)).join(" -> ");
5709
+ const firstFile = relativePosix(rootDir, cycle.cycle[0]);
5611
5710
  return {
5612
5711
  id: violationId(firstFile, this.category, cyclePath),
5613
5712
  file: firstFile,
@@ -5628,7 +5727,6 @@ var CircularDepsCollector = class {
5628
5727
  };
5629
5728
 
5630
5729
  // src/architecture/collectors/layer-violations.ts
5631
- var import_node_path4 = require("path");
5632
5730
  var LayerViolationCollector = class {
5633
5731
  category = "layer-violations";
5634
5732
  getRules(_config, _rootDir) {
@@ -5672,8 +5770,8 @@ var LayerViolationCollector = class {
5672
5770
  (v) => v.reason === "WRONG_LAYER"
5673
5771
  );
5674
5772
  const violations = layerViolations.map((v) => {
5675
- const relFile = (0, import_node_path4.relative)(rootDir, v.file);
5676
- const relImport = (0, import_node_path4.relative)(rootDir, v.imports);
5773
+ const relFile = relativePosix(rootDir, v.file);
5774
+ const relImport = relativePosix(rootDir, v.imports);
5677
5775
  const detail = `${v.fromLayer} -> ${v.toLayer}: ${relFile} imports ${relImport}`;
5678
5776
  return {
5679
5777
  id: violationId(relFile, this.category, detail),
@@ -5695,7 +5793,6 @@ var LayerViolationCollector = class {
5695
5793
  };
5696
5794
 
5697
5795
  // src/architecture/collectors/complexity.ts
5698
- var import_node_path5 = require("path");
5699
5796
  var ComplexityCollector = class {
5700
5797
  category = "complexity";
5701
5798
  getRules(_config, _rootDir) {
@@ -5756,7 +5853,7 @@ var ComplexityCollector = class {
5756
5853
  (v) => v.severity === "error" || v.severity === "warning"
5757
5854
  );
5758
5855
  const violations = filtered.map((v) => {
5759
- const relFile = (0, import_node_path5.relative)(rootDir, v.file);
5856
+ const relFile = relativePosix(rootDir, v.file);
5760
5857
  const idDetail = `${v.metric}:${v.function}`;
5761
5858
  return {
5762
5859
  id: violationId(relFile, this.category, idDetail),
@@ -5782,7 +5879,6 @@ var ComplexityCollector = class {
5782
5879
  };
5783
5880
 
5784
5881
  // src/architecture/collectors/coupling.ts
5785
- var import_node_path6 = require("path");
5786
5882
  var CouplingCollector = class {
5787
5883
  category = "coupling";
5788
5884
  getRules(_config, _rootDir) {
@@ -5833,7 +5929,7 @@ var CouplingCollector = class {
5833
5929
  (v) => v.severity === "error" || v.severity === "warning"
5834
5930
  );
5835
5931
  const violations = filtered.map((v) => {
5836
- const relFile = (0, import_node_path6.relative)(rootDir, v.file);
5932
+ const relFile = relativePosix(rootDir, v.file);
5837
5933
  const idDetail = `${v.metric}`;
5838
5934
  return {
5839
5935
  id: violationId(relFile, this.category, idDetail),
@@ -5856,7 +5952,6 @@ var CouplingCollector = class {
5856
5952
  };
5857
5953
 
5858
5954
  // src/architecture/collectors/forbidden-imports.ts
5859
- var import_node_path7 = require("path");
5860
5955
  var ForbiddenImportCollector = class {
5861
5956
  category = "forbidden-imports";
5862
5957
  getRules(_config, _rootDir) {
@@ -5900,8 +5995,8 @@ var ForbiddenImportCollector = class {
5900
5995
  (v) => v.reason === "FORBIDDEN_IMPORT"
5901
5996
  );
5902
5997
  const violations = forbidden.map((v) => {
5903
- const relFile = (0, import_node_path7.relative)(rootDir, v.file);
5904
- const relImport = (0, import_node_path7.relative)(rootDir, v.imports);
5998
+ const relFile = relativePosix(rootDir, v.file);
5999
+ const relImport = relativePosix(rootDir, v.imports);
5905
6000
  const detail = `forbidden import: ${relFile} -> ${relImport}`;
5906
6001
  return {
5907
6002
  id: violationId(relFile, this.category, detail),
@@ -5924,7 +6019,7 @@ var ForbiddenImportCollector = class {
5924
6019
 
5925
6020
  // src/architecture/collectors/module-size.ts
5926
6021
  var import_promises2 = require("fs/promises");
5927
- var import_node_path8 = require("path");
6022
+ var import_node_path4 = require("path");
5928
6023
  async function discoverModules(rootDir) {
5929
6024
  const modules = [];
5930
6025
  async function scanDir(dir) {
@@ -5940,7 +6035,7 @@ async function discoverModules(rootDir) {
5940
6035
  if (entry.name.startsWith(".") || entry.name === "node_modules" || entry.name === "dist") {
5941
6036
  continue;
5942
6037
  }
5943
- const fullPath = (0, import_node_path8.join)(dir, entry.name);
6038
+ const fullPath = (0, import_node_path4.join)(dir, entry.name);
5944
6039
  if (entry.isDirectory()) {
5945
6040
  subdirs.push(fullPath);
5946
6041
  } else if (entry.isFile() && (entry.name.endsWith(".ts") || entry.name.endsWith(".tsx")) && !entry.name.endsWith(".test.ts") && !entry.name.endsWith(".test.tsx") && !entry.name.endsWith(".spec.ts")) {
@@ -5957,10 +6052,10 @@ async function discoverModules(rootDir) {
5957
6052
  }
5958
6053
  }
5959
6054
  modules.push({
5960
- modulePath: (0, import_node_path8.relative)(rootDir, dir),
6055
+ modulePath: relativePosix(rootDir, dir),
5961
6056
  fileCount: tsFiles.length,
5962
6057
  totalLoc,
5963
- files: tsFiles.map((f) => (0, import_node_path8.relative)(rootDir, f))
6058
+ files: tsFiles.map((f) => relativePosix(rootDir, f))
5964
6059
  });
5965
6060
  }
5966
6061
  for (const sub of subdirs) {
@@ -6052,16 +6147,16 @@ var ModuleSizeCollector = class {
6052
6147
 
6053
6148
  // src/architecture/collectors/dep-depth.ts
6054
6149
  var import_promises3 = require("fs/promises");
6055
- var import_node_path9 = require("path");
6150
+ var import_node_path5 = require("path");
6056
6151
  function extractImportSources(content, filePath) {
6057
6152
  const importRegex = /(?:import|export)\s+.*?from\s+['"](\.[^'"]+)['"]/g;
6058
6153
  const dynamicRegex = /import\s*\(\s*['"](\.[^'"]+)['"]\s*\)/g;
6059
6154
  const sources = [];
6060
- const dir = (0, import_node_path9.dirname)(filePath);
6155
+ const dir = (0, import_node_path5.dirname)(filePath);
6061
6156
  for (const regex of [importRegex, dynamicRegex]) {
6062
6157
  let match;
6063
6158
  while ((match = regex.exec(content)) !== null) {
6064
- let resolved = (0, import_node_path9.resolve)(dir, match[1]);
6159
+ let resolved = (0, import_node_path5.resolve)(dir, match[1]);
6065
6160
  if (!resolved.endsWith(".ts") && !resolved.endsWith(".tsx")) {
6066
6161
  resolved += ".ts";
6067
6162
  }
@@ -6082,7 +6177,7 @@ async function collectTsFiles(dir) {
6082
6177
  for (const entry of entries) {
6083
6178
  if (entry.name.startsWith(".") || entry.name === "node_modules" || entry.name === "dist")
6084
6179
  continue;
6085
- const fullPath = (0, import_node_path9.join)(d, entry.name);
6180
+ const fullPath = (0, import_node_path5.join)(d, entry.name);
6086
6181
  if (entry.isDirectory()) {
6087
6182
  await scan(fullPath);
6088
6183
  } else if (entry.isFile() && (entry.name.endsWith(".ts") || entry.name.endsWith(".tsx")) && !entry.name.endsWith(".test.ts") && !entry.name.endsWith(".test.tsx") && !entry.name.endsWith(".spec.ts")) {
@@ -6136,7 +6231,7 @@ var DepDepthCollector = class {
6136
6231
  }
6137
6232
  const moduleMap = /* @__PURE__ */ new Map();
6138
6233
  for (const file of allFiles) {
6139
- const relDir = (0, import_node_path9.relative)(rootDir, (0, import_node_path9.dirname)(file));
6234
+ const relDir = relativePosix(rootDir, (0, import_node_path5.dirname)(file));
6140
6235
  if (!moduleMap.has(relDir)) moduleMap.set(relDir, []);
6141
6236
  moduleMap.get(relDir).push(file);
6142
6237
  }
@@ -6280,11 +6375,11 @@ function detectStaleConstraints(store, windowDays = 30, category) {
6280
6375
  // src/architecture/baseline-manager.ts
6281
6376
  var import_node_fs3 = require("fs");
6282
6377
  var import_node_crypto2 = require("crypto");
6283
- var import_node_path10 = require("path");
6378
+ var import_node_path6 = require("path");
6284
6379
  var ArchBaselineManager = class {
6285
6380
  baselinesPath;
6286
6381
  constructor(projectRoot, baselinePath) {
6287
- this.baselinesPath = baselinePath ? (0, import_node_path10.join)(projectRoot, baselinePath) : (0, import_node_path10.join)(projectRoot, ".harness", "arch", "baselines.json");
6382
+ this.baselinesPath = baselinePath ? (0, import_node_path6.join)(projectRoot, baselinePath) : (0, import_node_path6.join)(projectRoot, ".harness", "arch", "baselines.json");
6288
6383
  }
6289
6384
  /**
6290
6385
  * Snapshot the current metric results into an ArchBaseline.
@@ -6345,7 +6440,7 @@ var ArchBaselineManager = class {
6345
6440
  * Uses atomic write (write to temp file, then rename) to prevent corruption.
6346
6441
  */
6347
6442
  save(baseline) {
6348
- const dir = (0, import_node_path10.dirname)(this.baselinesPath);
6443
+ const dir = (0, import_node_path6.dirname)(this.baselinesPath);
6349
6444
  if (!(0, import_node_fs3.existsSync)(dir)) {
6350
6445
  (0, import_node_fs3.mkdirSync)(dir, { recursive: true });
6351
6446
  }
@@ -6372,6 +6467,31 @@ function aggregateByCategory(results) {
6372
6467
  }
6373
6468
  return map;
6374
6469
  }
6470
+ function classifyViolations(violations, baselineViolationIds) {
6471
+ const newViolations = [];
6472
+ const preExisting = [];
6473
+ for (const violation of violations) {
6474
+ if (baselineViolationIds.has(violation.id)) {
6475
+ preExisting.push(violation.id);
6476
+ } else {
6477
+ newViolations.push(violation);
6478
+ }
6479
+ }
6480
+ return { newViolations, preExisting };
6481
+ }
6482
+ function findResolvedViolations(baselineCategory, currentViolationIds) {
6483
+ if (!baselineCategory) return [];
6484
+ return baselineCategory.violationIds.filter((id) => !currentViolationIds.has(id));
6485
+ }
6486
+ function collectOrphanedBaselineViolations(baseline, visitedCategories) {
6487
+ const resolved = [];
6488
+ for (const [category, baselineCategory] of Object.entries(baseline.metrics)) {
6489
+ if (!visitedCategories.has(category) && baselineCategory) {
6490
+ resolved.push(...baselineCategory.violationIds);
6491
+ }
6492
+ }
6493
+ return resolved;
6494
+ }
6375
6495
  function diff(current, baseline) {
6376
6496
  const aggregated = aggregateByCategory(current);
6377
6497
  const newViolations = [];
@@ -6384,21 +6504,11 @@ function diff(current, baseline) {
6384
6504
  const baselineCategory = baseline.metrics[category];
6385
6505
  const baselineViolationIds = new Set(baselineCategory?.violationIds ?? []);
6386
6506
  const baselineValue = baselineCategory?.value ?? 0;
6387
- for (const violation of agg.violations) {
6388
- if (baselineViolationIds.has(violation.id)) {
6389
- preExisting.push(violation.id);
6390
- } else {
6391
- newViolations.push(violation);
6392
- }
6393
- }
6507
+ const classified = classifyViolations(agg.violations, baselineViolationIds);
6508
+ newViolations.push(...classified.newViolations);
6509
+ preExisting.push(...classified.preExisting);
6394
6510
  const currentViolationIds = new Set(agg.violations.map((v) => v.id));
6395
- if (baselineCategory) {
6396
- for (const id of baselineCategory.violationIds) {
6397
- if (!currentViolationIds.has(id)) {
6398
- resolvedViolations.push(id);
6399
- }
6400
- }
6401
- }
6511
+ resolvedViolations.push(...findResolvedViolations(baselineCategory, currentViolationIds));
6402
6512
  if (baselineCategory && agg.value > baselineValue) {
6403
6513
  regressions.push({
6404
6514
  category,
@@ -6408,16 +6518,9 @@ function diff(current, baseline) {
6408
6518
  });
6409
6519
  }
6410
6520
  }
6411
- for (const [category, baselineCategory] of Object.entries(baseline.metrics)) {
6412
- if (!visitedCategories.has(category) && baselineCategory) {
6413
- for (const id of baselineCategory.violationIds) {
6414
- resolvedViolations.push(id);
6415
- }
6416
- }
6417
- }
6418
- const passed = newViolations.length === 0 && regressions.length === 0;
6521
+ resolvedViolations.push(...collectOrphanedBaselineViolations(baseline, visitedCategories));
6419
6522
  return {
6420
- passed,
6523
+ passed: newViolations.length === 0 && regressions.length === 0,
6421
6524
  newViolations,
6422
6525
  resolvedViolations,
6423
6526
  preExisting,
@@ -6426,7 +6529,7 @@ function diff(current, baseline) {
6426
6529
  }
6427
6530
 
6428
6531
  // src/architecture/config.ts
6429
- function resolveThresholds(scope, config) {
6532
+ function resolveThresholds2(scope, config) {
6430
6533
  const projectThresholds = {};
6431
6534
  for (const [key, val] of Object.entries(config.thresholds)) {
6432
6535
  projectThresholds[key] = typeof val === "object" && val !== null && !Array.isArray(val) ? { ...val } : val;
@@ -6763,6 +6866,8 @@ var INDEX_FILE = "index.json";
6763
6866
  var SESSIONS_DIR = "sessions";
6764
6867
  var SESSION_INDEX_FILE = "index.md";
6765
6868
  var SUMMARY_FILE = "summary.md";
6869
+ var SESSION_STATE_FILE = "session-state.json";
6870
+ var ARCHIVE_DIR = "archive";
6766
6871
 
6767
6872
  // src/state/stream-resolver.ts
6768
6873
  var STREAMS_DIR = "streams";
@@ -7671,6 +7776,143 @@ function listActiveSessions(projectPath) {
7671
7776
  }
7672
7777
  }
7673
7778
 
7779
+ // src/state/session-sections.ts
7780
+ var fs14 = __toESM(require("fs"));
7781
+ var path11 = __toESM(require("path"));
7782
+ var import_types14 = require("@harness-engineering/types");
7783
+ function emptySections() {
7784
+ const sections = {};
7785
+ for (const name of import_types14.SESSION_SECTION_NAMES) {
7786
+ sections[name] = [];
7787
+ }
7788
+ return sections;
7789
+ }
7790
+ async function loadSessionState(projectPath, sessionSlug) {
7791
+ const dirResult = resolveSessionDir(projectPath, sessionSlug);
7792
+ if (!dirResult.ok) return dirResult;
7793
+ const sessionDir = dirResult.value;
7794
+ const filePath = path11.join(sessionDir, SESSION_STATE_FILE);
7795
+ if (!fs14.existsSync(filePath)) {
7796
+ return (0, import_types.Ok)(emptySections());
7797
+ }
7798
+ try {
7799
+ const raw = fs14.readFileSync(filePath, "utf-8");
7800
+ const parsed = JSON.parse(raw);
7801
+ const sections = emptySections();
7802
+ for (const name of import_types14.SESSION_SECTION_NAMES) {
7803
+ if (Array.isArray(parsed[name])) {
7804
+ sections[name] = parsed[name];
7805
+ }
7806
+ }
7807
+ return (0, import_types.Ok)(sections);
7808
+ } catch (error) {
7809
+ return (0, import_types.Err)(
7810
+ new Error(
7811
+ `Failed to load session state: ${error instanceof Error ? error.message : String(error)}`
7812
+ )
7813
+ );
7814
+ }
7815
+ }
7816
+ async function saveSessionState(projectPath, sessionSlug, sections) {
7817
+ const dirResult = resolveSessionDir(projectPath, sessionSlug, { create: true });
7818
+ if (!dirResult.ok) return dirResult;
7819
+ const sessionDir = dirResult.value;
7820
+ const filePath = path11.join(sessionDir, SESSION_STATE_FILE);
7821
+ try {
7822
+ fs14.writeFileSync(filePath, JSON.stringify(sections, null, 2));
7823
+ return (0, import_types.Ok)(void 0);
7824
+ } catch (error) {
7825
+ return (0, import_types.Err)(
7826
+ new Error(
7827
+ `Failed to save session state: ${error instanceof Error ? error.message : String(error)}`
7828
+ )
7829
+ );
7830
+ }
7831
+ }
7832
+ async function readSessionSections(projectPath, sessionSlug) {
7833
+ return loadSessionState(projectPath, sessionSlug);
7834
+ }
7835
+ async function readSessionSection(projectPath, sessionSlug, section) {
7836
+ const result = await loadSessionState(projectPath, sessionSlug);
7837
+ if (!result.ok) return result;
7838
+ return (0, import_types.Ok)(result.value[section]);
7839
+ }
7840
+ async function appendSessionEntry(projectPath, sessionSlug, section, authorSkill, content) {
7841
+ const loadResult = await loadSessionState(projectPath, sessionSlug);
7842
+ if (!loadResult.ok) return loadResult;
7843
+ const sections = loadResult.value;
7844
+ const entry = {
7845
+ id: generateEntryId(),
7846
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
7847
+ authorSkill,
7848
+ content,
7849
+ status: "active"
7850
+ };
7851
+ sections[section].push(entry);
7852
+ const saveResult = await saveSessionState(projectPath, sessionSlug, sections);
7853
+ if (!saveResult.ok) return saveResult;
7854
+ return (0, import_types.Ok)(entry);
7855
+ }
7856
+ async function updateSessionEntryStatus(projectPath, sessionSlug, section, entryId, newStatus) {
7857
+ const loadResult = await loadSessionState(projectPath, sessionSlug);
7858
+ if (!loadResult.ok) return loadResult;
7859
+ const sections = loadResult.value;
7860
+ const entry = sections[section].find((e) => e.id === entryId);
7861
+ if (!entry) {
7862
+ return (0, import_types.Err)(new Error(`Entry '${entryId}' not found in section '${section}'`));
7863
+ }
7864
+ entry.status = newStatus;
7865
+ const saveResult = await saveSessionState(projectPath, sessionSlug, sections);
7866
+ if (!saveResult.ok) return saveResult;
7867
+ return (0, import_types.Ok)(entry);
7868
+ }
7869
+ function generateEntryId() {
7870
+ const timestamp = Date.now().toString(36);
7871
+ const random = Math.random().toString(36).substring(2, 8);
7872
+ return `${timestamp}-${random}`;
7873
+ }
7874
+
7875
+ // src/state/session-archive.ts
7876
+ var fs15 = __toESM(require("fs"));
7877
+ var path12 = __toESM(require("path"));
7878
+ async function archiveSession(projectPath, sessionSlug) {
7879
+ const dirResult = resolveSessionDir(projectPath, sessionSlug);
7880
+ if (!dirResult.ok) return dirResult;
7881
+ const sessionDir = dirResult.value;
7882
+ if (!fs15.existsSync(sessionDir)) {
7883
+ return (0, import_types.Err)(new Error(`Session '${sessionSlug}' not found at ${sessionDir}`));
7884
+ }
7885
+ const archiveBase = path12.join(projectPath, HARNESS_DIR, ARCHIVE_DIR, "sessions");
7886
+ try {
7887
+ fs15.mkdirSync(archiveBase, { recursive: true });
7888
+ const date = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
7889
+ let archiveName = `${sessionSlug}-${date}`;
7890
+ let counter = 1;
7891
+ while (fs15.existsSync(path12.join(archiveBase, archiveName))) {
7892
+ archiveName = `${sessionSlug}-${date}-${counter}`;
7893
+ counter++;
7894
+ }
7895
+ const dest = path12.join(archiveBase, archiveName);
7896
+ try {
7897
+ fs15.renameSync(sessionDir, dest);
7898
+ } catch (renameErr) {
7899
+ if (renameErr instanceof Error && "code" in renameErr && renameErr.code === "EXDEV") {
7900
+ fs15.cpSync(sessionDir, dest, { recursive: true });
7901
+ fs15.rmSync(sessionDir, { recursive: true });
7902
+ } else {
7903
+ throw renameErr;
7904
+ }
7905
+ }
7906
+ return (0, import_types.Ok)(void 0);
7907
+ } catch (error) {
7908
+ return (0, import_types.Err)(
7909
+ new Error(
7910
+ `Failed to archive session: ${error instanceof Error ? error.message : String(error)}`
7911
+ )
7912
+ );
7913
+ }
7914
+ }
7915
+
7674
7916
  // src/workflow/runner.ts
7675
7917
  async function executeWorkflow(workflow, executor) {
7676
7918
  const stepResults = [];
@@ -7820,7 +8062,7 @@ async function runMultiTurnPipeline(initialContext, turnExecutor, options) {
7820
8062
  }
7821
8063
 
7822
8064
  // src/security/scanner.ts
7823
- var fs15 = __toESM(require("fs/promises"));
8065
+ var fs17 = __toESM(require("fs/promises"));
7824
8066
 
7825
8067
  // src/security/rules/registry.ts
7826
8068
  var RuleRegistry = class {
@@ -7907,15 +8149,15 @@ function resolveRuleSeverity(ruleId, defaultSeverity, overrides, strict) {
7907
8149
  }
7908
8150
 
7909
8151
  // src/security/stack-detector.ts
7910
- var fs14 = __toESM(require("fs"));
7911
- var path11 = __toESM(require("path"));
8152
+ var fs16 = __toESM(require("fs"));
8153
+ var path13 = __toESM(require("path"));
7912
8154
  function detectStack(projectRoot) {
7913
8155
  const stacks = [];
7914
- const pkgJsonPath = path11.join(projectRoot, "package.json");
7915
- if (fs14.existsSync(pkgJsonPath)) {
8156
+ const pkgJsonPath = path13.join(projectRoot, "package.json");
8157
+ if (fs16.existsSync(pkgJsonPath)) {
7916
8158
  stacks.push("node");
7917
8159
  try {
7918
- const pkgJson = JSON.parse(fs14.readFileSync(pkgJsonPath, "utf-8"));
8160
+ const pkgJson = JSON.parse(fs16.readFileSync(pkgJsonPath, "utf-8"));
7919
8161
  const allDeps = {
7920
8162
  ...pkgJson.dependencies,
7921
8163
  ...pkgJson.devDependencies
@@ -7930,13 +8172,13 @@ function detectStack(projectRoot) {
7930
8172
  } catch {
7931
8173
  }
7932
8174
  }
7933
- const goModPath = path11.join(projectRoot, "go.mod");
7934
- if (fs14.existsSync(goModPath)) {
8175
+ const goModPath = path13.join(projectRoot, "go.mod");
8176
+ if (fs16.existsSync(goModPath)) {
7935
8177
  stacks.push("go");
7936
8178
  }
7937
- const requirementsPath = path11.join(projectRoot, "requirements.txt");
7938
- const pyprojectPath = path11.join(projectRoot, "pyproject.toml");
7939
- if (fs14.existsSync(requirementsPath) || fs14.existsSync(pyprojectPath)) {
8179
+ const requirementsPath = path13.join(projectRoot, "requirements.txt");
8180
+ const pyprojectPath = path13.join(projectRoot, "pyproject.toml");
8181
+ if (fs16.existsSync(requirementsPath) || fs16.existsSync(pyprojectPath)) {
7940
8182
  stacks.push("python");
7941
8183
  }
7942
8184
  return stacks;
@@ -8363,7 +8605,7 @@ var SecurityScanner = class {
8363
8605
  }
8364
8606
  async scanFile(filePath) {
8365
8607
  if (!this.config.enabled) return [];
8366
- const content = await fs15.readFile(filePath, "utf-8");
8608
+ const content = await fs17.readFile(filePath, "utf-8");
8367
8609
  return this.scanContent(content, filePath, 1);
8368
8610
  }
8369
8611
  async scanFiles(filePaths) {
@@ -8388,7 +8630,7 @@ var SecurityScanner = class {
8388
8630
  };
8389
8631
 
8390
8632
  // src/ci/check-orchestrator.ts
8391
- var path12 = __toESM(require("path"));
8633
+ var path14 = __toESM(require("path"));
8392
8634
  var ALL_CHECKS = [
8393
8635
  "validate",
8394
8636
  "deps",
@@ -8399,237 +8641,275 @@ var ALL_CHECKS = [
8399
8641
  "phase-gate",
8400
8642
  "arch"
8401
8643
  ];
8644
+ async function runValidateCheck(projectRoot, config) {
8645
+ const issues = [];
8646
+ const agentsPath = path14.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
8647
+ const result = await validateAgentsMap(agentsPath);
8648
+ if (!result.ok) {
8649
+ issues.push({ severity: "error", message: result.error.message });
8650
+ } else if (!result.value.valid) {
8651
+ if (result.value.errors) {
8652
+ for (const err of result.value.errors) {
8653
+ issues.push({ severity: "error", message: err.message });
8654
+ }
8655
+ }
8656
+ for (const section of result.value.missingSections) {
8657
+ issues.push({ severity: "warning", message: `Missing section: ${section}` });
8658
+ }
8659
+ for (const link of result.value.brokenLinks) {
8660
+ issues.push({
8661
+ severity: "warning",
8662
+ message: `Broken link: ${link.text} \u2192 ${link.path}`,
8663
+ file: link.path
8664
+ });
8665
+ }
8666
+ }
8667
+ return issues;
8668
+ }
8669
+ async function runDepsCheck(projectRoot, config) {
8670
+ const issues = [];
8671
+ const rawLayers = config.layers;
8672
+ if (rawLayers && rawLayers.length > 0) {
8673
+ const parser = new TypeScriptParser();
8674
+ const layers = rawLayers.map(
8675
+ (l) => defineLayer(
8676
+ l.name,
8677
+ Array.isArray(l.patterns) ? l.patterns : [l.pattern],
8678
+ l.allowedDependencies
8679
+ )
8680
+ );
8681
+ const result = await validateDependencies({
8682
+ layers,
8683
+ rootDir: projectRoot,
8684
+ parser
8685
+ });
8686
+ if (!result.ok) {
8687
+ issues.push({ severity: "error", message: result.error.message });
8688
+ } else if (result.value.violations.length > 0) {
8689
+ for (const v of result.value.violations) {
8690
+ issues.push({
8691
+ severity: "error",
8692
+ message: `${v.reason}: ${v.file} imports ${v.imports} (${v.fromLayer} \u2192 ${v.toLayer})`,
8693
+ file: v.file,
8694
+ line: v.line
8695
+ });
8696
+ }
8697
+ }
8698
+ }
8699
+ return issues;
8700
+ }
8701
+ async function runDocsCheck(projectRoot, config) {
8702
+ const issues = [];
8703
+ const docsDir = path14.join(projectRoot, config.docsDir ?? "docs");
8704
+ const entropyConfig = config.entropy || {};
8705
+ const result = await checkDocCoverage("project", {
8706
+ docsDir,
8707
+ sourceDir: projectRoot,
8708
+ excludePatterns: entropyConfig.excludePatterns || [
8709
+ "**/node_modules/**",
8710
+ "**/dist/**",
8711
+ "**/*.test.ts",
8712
+ "**/fixtures/**"
8713
+ ]
8714
+ });
8715
+ if (!result.ok) {
8716
+ issues.push({ severity: "warning", message: result.error.message });
8717
+ } else if (result.value.gaps.length > 0) {
8718
+ for (const gap of result.value.gaps) {
8719
+ issues.push({
8720
+ severity: "warning",
8721
+ message: `Undocumented: ${gap.file} (suggested: ${gap.suggestedSection})`,
8722
+ file: gap.file
8723
+ });
8724
+ }
8725
+ }
8726
+ return issues;
8727
+ }
8728
+ async function runEntropyCheck(projectRoot, config) {
8729
+ const issues = [];
8730
+ const entropyConfig = config.entropy || {};
8731
+ const perfConfig = config.performance || {};
8732
+ const entryPoints = entropyConfig.entryPoints ?? perfConfig.entryPoints;
8733
+ const analyzer = new EntropyAnalyzer({
8734
+ rootDir: projectRoot,
8735
+ ...entryPoints ? { entryPoints } : {},
8736
+ analyze: { drift: true, deadCode: true, patterns: false }
8737
+ });
8738
+ const result = await analyzer.analyze();
8739
+ if (!result.ok) {
8740
+ issues.push({ severity: "warning", message: result.error.message });
8741
+ } else {
8742
+ const report = result.value;
8743
+ if (report.drift) {
8744
+ for (const drift of report.drift.drifts) {
8745
+ issues.push({
8746
+ severity: "warning",
8747
+ message: `Doc drift (${drift.type}): ${drift.details}`,
8748
+ file: drift.docFile,
8749
+ line: drift.line
8750
+ });
8751
+ }
8752
+ }
8753
+ if (report.deadCode) {
8754
+ for (const dead of report.deadCode.deadExports) {
8755
+ issues.push({
8756
+ severity: "warning",
8757
+ message: `Dead export: ${dead.name}`,
8758
+ file: dead.file,
8759
+ line: dead.line
8760
+ });
8761
+ }
8762
+ }
8763
+ }
8764
+ return issues;
8765
+ }
8766
+ async function runSecurityCheck(projectRoot, config) {
8767
+ const issues = [];
8768
+ const securityConfig = parseSecurityConfig(config.security);
8769
+ if (!securityConfig.enabled) return issues;
8770
+ const scanner = new SecurityScanner(securityConfig);
8771
+ scanner.configureForProject(projectRoot);
8772
+ const { glob: globFn } = await import("glob");
8773
+ const sourceFiles = await globFn("**/*.{ts,tsx,js,jsx,go,py}", {
8774
+ cwd: projectRoot,
8775
+ ignore: securityConfig.exclude ?? [
8776
+ "**/node_modules/**",
8777
+ "**/dist/**",
8778
+ "**/*.test.ts",
8779
+ "**/fixtures/**"
8780
+ ],
8781
+ absolute: true
8782
+ });
8783
+ const scanResult = await scanner.scanFiles(sourceFiles);
8784
+ for (const finding of scanResult.findings) {
8785
+ issues.push({
8786
+ severity: finding.severity === "info" ? "warning" : finding.severity,
8787
+ message: `[${finding.ruleId}] ${finding.message}: ${finding.match}`,
8788
+ file: finding.file,
8789
+ line: finding.line
8790
+ });
8791
+ }
8792
+ return issues;
8793
+ }
8794
+ async function runPerfCheck(projectRoot, config) {
8795
+ const issues = [];
8796
+ const perfConfig = config.performance || {};
8797
+ const entryPoints = perfConfig.entryPoints;
8798
+ const perfAnalyzer = new EntropyAnalyzer({
8799
+ rootDir: projectRoot,
8800
+ ...entryPoints ? { entryPoints } : {},
8801
+ analyze: {
8802
+ complexity: perfConfig.complexity || true,
8803
+ coupling: perfConfig.coupling || true,
8804
+ sizeBudget: perfConfig.sizeBudget || false
8805
+ }
8806
+ });
8807
+ const perfResult = await perfAnalyzer.analyze();
8808
+ if (!perfResult.ok) {
8809
+ issues.push({ severity: "warning", message: perfResult.error.message });
8810
+ } else {
8811
+ const perfReport = perfResult.value;
8812
+ if (perfReport.complexity) {
8813
+ for (const v of perfReport.complexity.violations) {
8814
+ issues.push({
8815
+ severity: v.severity === "info" ? "warning" : v.severity,
8816
+ message: `[Tier ${v.tier}] ${v.metric}: ${v.function} in ${v.file} (${v.value} > ${v.threshold})`,
8817
+ file: v.file,
8818
+ line: v.line
8819
+ });
8820
+ }
8821
+ }
8822
+ if (perfReport.coupling) {
8823
+ for (const v of perfReport.coupling.violations) {
8824
+ issues.push({
8825
+ severity: v.severity === "info" ? "warning" : v.severity,
8826
+ message: `[Tier ${v.tier}] ${v.metric}: ${v.file} (${v.value} > ${v.threshold})`,
8827
+ file: v.file
8828
+ });
8829
+ }
8830
+ }
8831
+ }
8832
+ return issues;
8833
+ }
8834
+ async function runPhaseGateCheck(_projectRoot, config) {
8835
+ const issues = [];
8836
+ const phaseGates = config.phaseGates;
8837
+ if (!phaseGates?.enabled) {
8838
+ return issues;
8839
+ }
8840
+ issues.push({
8841
+ severity: "warning",
8842
+ message: "Phase gate is enabled but requires CLI context. Run `harness check-phase-gate` separately for full validation."
8843
+ });
8844
+ return issues;
8845
+ }
8846
+ async function runArchCheck(projectRoot, config) {
8847
+ const issues = [];
8848
+ const rawArchConfig = config.architecture;
8849
+ const archConfig = ArchConfigSchema.parse(rawArchConfig ?? {});
8850
+ if (!archConfig.enabled) return issues;
8851
+ const results = await runAll(archConfig, projectRoot);
8852
+ const baselineManager = new ArchBaselineManager(projectRoot, archConfig.baselinePath);
8853
+ const baseline = baselineManager.load();
8854
+ if (baseline) {
8855
+ const diffResult = diff(results, baseline);
8856
+ if (!diffResult.passed) {
8857
+ for (const v of diffResult.newViolations) {
8858
+ issues.push({
8859
+ severity: v.severity,
8860
+ message: `[${v.category || "arch"}] NEW: ${v.detail}`,
8861
+ file: v.file
8862
+ });
8863
+ }
8864
+ for (const r of diffResult.regressions) {
8865
+ issues.push({
8866
+ severity: "error",
8867
+ message: `[${r.category}] REGRESSION: ${r.currentValue} > ${r.baselineValue} (delta: ${r.delta})`
8868
+ });
8869
+ }
8870
+ }
8871
+ } else {
8872
+ for (const result of results) {
8873
+ for (const v of result.violations) {
8874
+ issues.push({
8875
+ severity: v.severity,
8876
+ message: `[${result.category}] ${v.detail}`,
8877
+ file: v.file
8878
+ });
8879
+ }
8880
+ }
8881
+ }
8882
+ return issues;
8883
+ }
8402
8884
  async function runSingleCheck(name, projectRoot, config) {
8403
8885
  const start = Date.now();
8404
8886
  const issues = [];
8405
8887
  try {
8406
8888
  switch (name) {
8407
- case "validate": {
8408
- const agentsPath = path12.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
8409
- const result = await validateAgentsMap(agentsPath);
8410
- if (!result.ok) {
8411
- issues.push({ severity: "error", message: result.error.message });
8412
- } else if (!result.value.valid) {
8413
- if (result.value.errors) {
8414
- for (const err of result.value.errors) {
8415
- issues.push({ severity: "error", message: err.message });
8416
- }
8417
- }
8418
- for (const section of result.value.missingSections) {
8419
- issues.push({ severity: "warning", message: `Missing section: ${section}` });
8420
- }
8421
- for (const link of result.value.brokenLinks) {
8422
- issues.push({
8423
- severity: "warning",
8424
- message: `Broken link: ${link.text} \u2192 ${link.path}`,
8425
- file: link.path
8426
- });
8427
- }
8428
- }
8889
+ case "validate":
8890
+ issues.push(...await runValidateCheck(projectRoot, config));
8429
8891
  break;
8430
- }
8431
- case "deps": {
8432
- const rawLayers = config.layers;
8433
- if (rawLayers && rawLayers.length > 0) {
8434
- const parser = new TypeScriptParser();
8435
- const layers = rawLayers.map(
8436
- (l) => defineLayer(
8437
- l.name,
8438
- Array.isArray(l.patterns) ? l.patterns : [l.pattern],
8439
- l.allowedDependencies
8440
- )
8441
- );
8442
- const result = await validateDependencies({
8443
- layers,
8444
- rootDir: projectRoot,
8445
- parser
8446
- });
8447
- if (!result.ok) {
8448
- issues.push({ severity: "error", message: result.error.message });
8449
- } else if (result.value.violations.length > 0) {
8450
- for (const v of result.value.violations) {
8451
- issues.push({
8452
- severity: "error",
8453
- message: `${v.reason}: ${v.file} imports ${v.imports} (${v.fromLayer} \u2192 ${v.toLayer})`,
8454
- file: v.file,
8455
- line: v.line
8456
- });
8457
- }
8458
- }
8459
- }
8892
+ case "deps":
8893
+ issues.push(...await runDepsCheck(projectRoot, config));
8460
8894
  break;
8461
- }
8462
- case "docs": {
8463
- const docsDir = path12.join(projectRoot, config.docsDir ?? "docs");
8464
- const entropyConfig = config.entropy || {};
8465
- const result = await checkDocCoverage("project", {
8466
- docsDir,
8467
- sourceDir: projectRoot,
8468
- excludePatterns: entropyConfig.excludePatterns || [
8469
- "**/node_modules/**",
8470
- "**/dist/**",
8471
- "**/*.test.ts",
8472
- "**/fixtures/**"
8473
- ]
8474
- });
8475
- if (!result.ok) {
8476
- issues.push({ severity: "warning", message: result.error.message });
8477
- } else if (result.value.gaps.length > 0) {
8478
- for (const gap of result.value.gaps) {
8479
- issues.push({
8480
- severity: "warning",
8481
- message: `Undocumented: ${gap.file} (suggested: ${gap.suggestedSection})`,
8482
- file: gap.file
8483
- });
8484
- }
8485
- }
8895
+ case "docs":
8896
+ issues.push(...await runDocsCheck(projectRoot, config));
8486
8897
  break;
8487
- }
8488
- case "entropy": {
8489
- const analyzer = new EntropyAnalyzer({
8490
- rootDir: projectRoot,
8491
- analyze: { drift: true, deadCode: true, patterns: false }
8492
- });
8493
- const result = await analyzer.analyze();
8494
- if (!result.ok) {
8495
- issues.push({ severity: "warning", message: result.error.message });
8496
- } else {
8497
- const report = result.value;
8498
- if (report.drift) {
8499
- for (const drift of report.drift.drifts) {
8500
- issues.push({
8501
- severity: "warning",
8502
- message: `Doc drift (${drift.type}): ${drift.details}`,
8503
- file: drift.docFile,
8504
- line: drift.line
8505
- });
8506
- }
8507
- }
8508
- if (report.deadCode) {
8509
- for (const dead of report.deadCode.deadExports) {
8510
- issues.push({
8511
- severity: "warning",
8512
- message: `Dead export: ${dead.name}`,
8513
- file: dead.file,
8514
- line: dead.line
8515
- });
8516
- }
8517
- }
8518
- }
8898
+ case "entropy":
8899
+ issues.push(...await runEntropyCheck(projectRoot, config));
8519
8900
  break;
8520
- }
8521
- case "security": {
8522
- const securityConfig = parseSecurityConfig(config.security);
8523
- if (!securityConfig.enabled) break;
8524
- const scanner = new SecurityScanner(securityConfig);
8525
- scanner.configureForProject(projectRoot);
8526
- const { glob: globFn } = await import("glob");
8527
- const sourceFiles = await globFn("**/*.{ts,tsx,js,jsx,go,py}", {
8528
- cwd: projectRoot,
8529
- ignore: securityConfig.exclude ?? [
8530
- "**/node_modules/**",
8531
- "**/dist/**",
8532
- "**/*.test.ts",
8533
- "**/fixtures/**"
8534
- ],
8535
- absolute: true
8536
- });
8537
- const scanResult = await scanner.scanFiles(sourceFiles);
8538
- for (const finding of scanResult.findings) {
8539
- issues.push({
8540
- severity: finding.severity === "info" ? "warning" : finding.severity,
8541
- message: `[${finding.ruleId}] ${finding.message}: ${finding.match}`,
8542
- file: finding.file,
8543
- line: finding.line
8544
- });
8545
- }
8901
+ case "security":
8902
+ issues.push(...await runSecurityCheck(projectRoot, config));
8546
8903
  break;
8547
- }
8548
- case "perf": {
8549
- const perfConfig = config.performance || {};
8550
- const perfAnalyzer = new EntropyAnalyzer({
8551
- rootDir: projectRoot,
8552
- analyze: {
8553
- complexity: perfConfig.complexity || true,
8554
- coupling: perfConfig.coupling || true,
8555
- sizeBudget: perfConfig.sizeBudget || false
8556
- }
8557
- });
8558
- const perfResult = await perfAnalyzer.analyze();
8559
- if (!perfResult.ok) {
8560
- issues.push({ severity: "warning", message: perfResult.error.message });
8561
- } else {
8562
- const perfReport = perfResult.value;
8563
- if (perfReport.complexity) {
8564
- for (const v of perfReport.complexity.violations) {
8565
- issues.push({
8566
- severity: v.severity === "info" ? "warning" : v.severity,
8567
- message: `[Tier ${v.tier}] ${v.metric}: ${v.function} in ${v.file} (${v.value} > ${v.threshold})`,
8568
- file: v.file,
8569
- line: v.line
8570
- });
8571
- }
8572
- }
8573
- if (perfReport.coupling) {
8574
- for (const v of perfReport.coupling.violations) {
8575
- issues.push({
8576
- severity: v.severity === "info" ? "warning" : v.severity,
8577
- message: `[Tier ${v.tier}] ${v.metric}: ${v.file} (${v.value} > ${v.threshold})`,
8578
- file: v.file
8579
- });
8580
- }
8581
- }
8582
- }
8904
+ case "perf":
8905
+ issues.push(...await runPerfCheck(projectRoot, config));
8583
8906
  break;
8584
- }
8585
- case "phase-gate": {
8586
- const phaseGates = config.phaseGates;
8587
- if (!phaseGates?.enabled) {
8588
- break;
8589
- }
8590
- issues.push({
8591
- severity: "warning",
8592
- message: "Phase gate is enabled but requires CLI context. Run `harness check-phase-gate` separately for full validation."
8593
- });
8907
+ case "phase-gate":
8908
+ issues.push(...await runPhaseGateCheck(projectRoot, config));
8594
8909
  break;
8595
- }
8596
- case "arch": {
8597
- const rawArchConfig = config.architecture;
8598
- const archConfig = ArchConfigSchema.parse(rawArchConfig ?? {});
8599
- if (!archConfig.enabled) break;
8600
- const results = await runAll(archConfig, projectRoot);
8601
- const baselineManager = new ArchBaselineManager(projectRoot, archConfig.baselinePath);
8602
- const baseline = baselineManager.load();
8603
- if (baseline) {
8604
- const diffResult = diff(results, baseline);
8605
- if (!diffResult.passed) {
8606
- for (const v of diffResult.newViolations) {
8607
- issues.push({
8608
- severity: v.severity,
8609
- message: `[${v.category || "arch"}] NEW: ${v.detail}`,
8610
- file: v.file
8611
- });
8612
- }
8613
- for (const r of diffResult.regressions) {
8614
- issues.push({
8615
- severity: "error",
8616
- message: `[${r.category}] REGRESSION: ${r.currentValue} > ${r.baselineValue} (delta: ${r.delta})`
8617
- });
8618
- }
8619
- }
8620
- } else {
8621
- for (const result of results) {
8622
- for (const v of result.violations) {
8623
- issues.push({
8624
- severity: v.severity,
8625
- message: `[${result.category}] ${v.detail}`,
8626
- file: v.file
8627
- });
8628
- }
8629
- }
8630
- }
8910
+ case "arch":
8911
+ issues.push(...await runArchCheck(projectRoot, config));
8631
8912
  break;
8632
- }
8633
8913
  }
8634
8914
  } catch (error) {
8635
8915
  issues.push({
@@ -8698,7 +8978,7 @@ async function runCIChecks(input) {
8698
8978
  }
8699
8979
 
8700
8980
  // src/review/mechanical-checks.ts
8701
- var path13 = __toESM(require("path"));
8981
+ var path15 = __toESM(require("path"));
8702
8982
  async function runMechanicalChecks(options) {
8703
8983
  const { projectRoot, config, skip = [], changedFiles } = options;
8704
8984
  const findings = [];
@@ -8710,7 +8990,7 @@ async function runMechanicalChecks(options) {
8710
8990
  };
8711
8991
  if (!skip.includes("validate")) {
8712
8992
  try {
8713
- const agentsPath = path13.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
8993
+ const agentsPath = path15.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
8714
8994
  const result = await validateAgentsMap(agentsPath);
8715
8995
  if (!result.ok) {
8716
8996
  statuses.validate = "fail";
@@ -8747,7 +9027,7 @@ async function runMechanicalChecks(options) {
8747
9027
  statuses.validate = "fail";
8748
9028
  findings.push({
8749
9029
  tool: "validate",
8750
- file: path13.join(projectRoot, "AGENTS.md"),
9030
+ file: path15.join(projectRoot, "AGENTS.md"),
8751
9031
  message: err instanceof Error ? err.message : String(err),
8752
9032
  severity: "error"
8753
9033
  });
@@ -8811,7 +9091,7 @@ async function runMechanicalChecks(options) {
8811
9091
  (async () => {
8812
9092
  const localFindings = [];
8813
9093
  try {
8814
- const docsDir = path13.join(projectRoot, config.docsDir ?? "docs");
9094
+ const docsDir = path15.join(projectRoot, config.docsDir ?? "docs");
8815
9095
  const result = await checkDocCoverage("project", { docsDir });
8816
9096
  if (!result.ok) {
8817
9097
  statuses["check-docs"] = "warn";
@@ -8838,7 +9118,7 @@ async function runMechanicalChecks(options) {
8838
9118
  statuses["check-docs"] = "warn";
8839
9119
  localFindings.push({
8840
9120
  tool: "check-docs",
8841
- file: path13.join(projectRoot, "docs"),
9121
+ file: path15.join(projectRoot, "docs"),
8842
9122
  message: err instanceof Error ? err.message : String(err),
8843
9123
  severity: "warning"
8844
9124
  });
@@ -8986,7 +9266,7 @@ function detectChangeType(commitMessage, diff2) {
8986
9266
  }
8987
9267
 
8988
9268
  // src/review/context-scoper.ts
8989
- var path14 = __toESM(require("path"));
9269
+ var path16 = __toESM(require("path"));
8990
9270
  var ALL_DOMAINS = ["compliance", "bug", "security", "architecture"];
8991
9271
  var SECURITY_PATTERNS = /auth|crypto|password|secret|token|session|cookie|hash|encrypt|decrypt|sql|shell|exec|eval/i;
8992
9272
  function computeContextBudget(diffLines) {
@@ -8994,18 +9274,18 @@ function computeContextBudget(diffLines) {
8994
9274
  return diffLines;
8995
9275
  }
8996
9276
  function isWithinProject(absPath, projectRoot) {
8997
- const resolvedRoot = path14.resolve(projectRoot) + path14.sep;
8998
- const resolvedPath = path14.resolve(absPath);
8999
- return resolvedPath.startsWith(resolvedRoot) || resolvedPath === path14.resolve(projectRoot);
9277
+ const resolvedRoot = path16.resolve(projectRoot) + path16.sep;
9278
+ const resolvedPath = path16.resolve(absPath);
9279
+ return resolvedPath.startsWith(resolvedRoot) || resolvedPath === path16.resolve(projectRoot);
9000
9280
  }
9001
9281
  async function readContextFile(projectRoot, filePath, reason) {
9002
- const absPath = path14.isAbsolute(filePath) ? filePath : path14.join(projectRoot, filePath);
9282
+ const absPath = path16.isAbsolute(filePath) ? filePath : path16.join(projectRoot, filePath);
9003
9283
  if (!isWithinProject(absPath, projectRoot)) return null;
9004
9284
  const result = await readFileContent(absPath);
9005
9285
  if (!result.ok) return null;
9006
9286
  const content = result.value;
9007
9287
  const lines = content.split("\n").length;
9008
- const relPath = path14.isAbsolute(filePath) ? path14.relative(projectRoot, filePath) : filePath;
9288
+ const relPath = path16.isAbsolute(filePath) ? relativePosix(projectRoot, filePath) : filePath;
9009
9289
  return { path: relPath, content, reason, lines };
9010
9290
  }
9011
9291
  function extractImportSources2(content) {
@@ -9020,18 +9300,18 @@ function extractImportSources2(content) {
9020
9300
  }
9021
9301
  async function resolveImportPath2(projectRoot, fromFile, importSource) {
9022
9302
  if (!importSource.startsWith(".")) return null;
9023
- const fromDir = path14.dirname(path14.join(projectRoot, fromFile));
9024
- const basePath = path14.resolve(fromDir, importSource);
9303
+ const fromDir = path16.dirname(path16.join(projectRoot, fromFile));
9304
+ const basePath = path16.resolve(fromDir, importSource);
9025
9305
  if (!isWithinProject(basePath, projectRoot)) return null;
9026
- const relBase = path14.relative(projectRoot, basePath);
9306
+ const relBase = relativePosix(projectRoot, basePath);
9027
9307
  const candidates = [
9028
9308
  relBase + ".ts",
9029
9309
  relBase + ".tsx",
9030
9310
  relBase + ".mts",
9031
- path14.join(relBase, "index.ts")
9311
+ path16.join(relBase, "index.ts")
9032
9312
  ];
9033
9313
  for (const candidate of candidates) {
9034
- const absCandidate = path14.join(projectRoot, candidate);
9314
+ const absCandidate = path16.join(projectRoot, candidate);
9035
9315
  if (await fileExists(absCandidate)) {
9036
9316
  return candidate;
9037
9317
  }
@@ -9039,10 +9319,10 @@ async function resolveImportPath2(projectRoot, fromFile, importSource) {
9039
9319
  return null;
9040
9320
  }
9041
9321
  async function findTestFiles(projectRoot, sourceFile) {
9042
- const baseName = path14.basename(sourceFile, path14.extname(sourceFile));
9322
+ const baseName = path16.basename(sourceFile, path16.extname(sourceFile));
9043
9323
  const pattern = `**/${baseName}.{test,spec}.{ts,tsx,mts}`;
9044
9324
  const results = await findFiles(pattern, projectRoot);
9045
- return results.map((f) => path14.relative(projectRoot, f));
9325
+ return results.map((f) => relativePosix(projectRoot, f));
9046
9326
  }
9047
9327
  async function gatherImportContext(projectRoot, changedFiles, budget) {
9048
9328
  const contextFiles = [];
@@ -9328,101 +9608,102 @@ function findMissingJsDoc(bundle) {
9328
9608
  }
9329
9609
  return missing;
9330
9610
  }
9331
- function runComplianceAgent(bundle) {
9611
+ function checkMissingJsDoc(bundle, rules) {
9612
+ const jsDocRule = rules.find((r) => r.text.toLowerCase().includes("jsdoc"));
9613
+ if (!jsDocRule) return [];
9614
+ const missingDocs = findMissingJsDoc(bundle);
9615
+ return missingDocs.map((m) => ({
9616
+ id: makeFindingId("compliance", m.file, m.line, `Missing JSDoc ${m.exportName}`),
9617
+ file: m.file,
9618
+ lineRange: [m.line, m.line],
9619
+ domain: "compliance",
9620
+ severity: "important",
9621
+ title: `Missing JSDoc on exported \`${m.exportName}\``,
9622
+ rationale: `Convention requires all exports to have JSDoc comments (from ${jsDocRule.source}).`,
9623
+ suggestion: `Add a JSDoc comment above the export of \`${m.exportName}\`.`,
9624
+ evidence: [`changeType: ${bundle.changeType}`, `Convention rule: "${jsDocRule.text}"`],
9625
+ validatedBy: "heuristic"
9626
+ }));
9627
+ }
9628
+ function checkFeatureSpec(bundle) {
9629
+ const hasSpecContext = bundle.contextFiles.some(
9630
+ (f) => f.reason === "spec" || f.reason === "convention"
9631
+ );
9632
+ if (hasSpecContext || bundle.changedFiles.length === 0) return [];
9633
+ const firstFile = bundle.changedFiles[0];
9634
+ return [
9635
+ {
9636
+ id: makeFindingId("compliance", firstFile.path, 1, "No spec for feature"),
9637
+ file: firstFile.path,
9638
+ lineRange: [1, 1],
9639
+ domain: "compliance",
9640
+ severity: "suggestion",
9641
+ title: "No spec/design doc found for feature change",
9642
+ rationale: "Feature changes should reference a spec or design doc to verify alignment. No spec context was included in the review bundle.",
9643
+ evidence: [`changeType: feature`, `contextFiles count: ${bundle.contextFiles.length}`],
9644
+ validatedBy: "heuristic"
9645
+ }
9646
+ ];
9647
+ }
9648
+ function checkBugfixHistory(bundle) {
9649
+ if (bundle.commitHistory.length > 0 || bundle.changedFiles.length === 0) return [];
9650
+ const firstFile = bundle.changedFiles[0];
9651
+ return [
9652
+ {
9653
+ id: makeFindingId("compliance", firstFile.path, 1, "Bugfix no history"),
9654
+ file: firstFile.path,
9655
+ lineRange: [1, 1],
9656
+ domain: "compliance",
9657
+ severity: "suggestion",
9658
+ title: "Bugfix without commit history context",
9659
+ rationale: "Bugfix changes benefit from commit history to verify the root cause is addressed, not just the symptom. No commit history was provided.",
9660
+ evidence: [`changeType: bugfix`, `commitHistory entries: ${bundle.commitHistory.length}`],
9661
+ validatedBy: "heuristic"
9662
+ }
9663
+ ];
9664
+ }
9665
+ function checkChangeTypeSpecific(bundle) {
9666
+ switch (bundle.changeType) {
9667
+ case "feature":
9668
+ return checkFeatureSpec(bundle);
9669
+ case "bugfix":
9670
+ return checkBugfixHistory(bundle);
9671
+ default:
9672
+ return [];
9673
+ }
9674
+ }
9675
+ function checkResultTypeConvention(bundle, rules) {
9676
+ const resultTypeRule = rules.find((r) => r.text.toLowerCase().includes("result type"));
9677
+ if (!resultTypeRule) return [];
9332
9678
  const findings = [];
9333
- const rules = extractConventionRules(bundle);
9334
- const jsDocRuleExists = rules.some((r) => r.text.toLowerCase().includes("jsdoc"));
9335
- if (jsDocRuleExists) {
9336
- const missingDocs = findMissingJsDoc(bundle);
9337
- for (const m of missingDocs) {
9679
+ for (const cf of bundle.changedFiles) {
9680
+ const hasTryCatch = cf.content.includes("try {") || cf.content.includes("try{");
9681
+ const usesResult = cf.content.includes("Result<") || cf.content.includes("Result >") || cf.content.includes(": Result");
9682
+ if (hasTryCatch && !usesResult) {
9338
9683
  findings.push({
9339
- id: makeFindingId("compliance", m.file, m.line, `Missing JSDoc ${m.exportName}`),
9340
- file: m.file,
9341
- lineRange: [m.line, m.line],
9684
+ id: makeFindingId("compliance", cf.path, 1, "try-catch not Result"),
9685
+ file: cf.path,
9686
+ lineRange: [1, cf.lines],
9342
9687
  domain: "compliance",
9343
- severity: "important",
9344
- title: `Missing JSDoc on exported \`${m.exportName}\``,
9345
- rationale: `Convention requires all exports to have JSDoc comments (from ${rules.find((r) => r.text.toLowerCase().includes("jsdoc"))?.source ?? "conventions"}).`,
9346
- suggestion: `Add a JSDoc comment above the export of \`${m.exportName}\`.`,
9347
- evidence: [
9348
- `changeType: ${bundle.changeType}`,
9349
- `Convention rule: "${rules.find((r) => r.text.toLowerCase().includes("jsdoc"))?.text ?? ""}"`
9350
- ],
9688
+ severity: "suggestion",
9689
+ title: "Fallible operation uses try/catch instead of Result type",
9690
+ rationale: `Convention requires using Result type for fallible operations (from ${resultTypeRule.source}).`,
9691
+ suggestion: "Refactor error handling to use the Result type pattern.",
9692
+ evidence: [`changeType: ${bundle.changeType}`, `Convention rule: "${resultTypeRule.text}"`],
9351
9693
  validatedBy: "heuristic"
9352
9694
  });
9353
9695
  }
9354
9696
  }
9355
- switch (bundle.changeType) {
9356
- case "feature": {
9357
- const hasSpecContext = bundle.contextFiles.some(
9358
- (f) => f.reason === "spec" || f.reason === "convention"
9359
- );
9360
- if (!hasSpecContext && bundle.changedFiles.length > 0) {
9361
- const firstFile = bundle.changedFiles[0];
9362
- findings.push({
9363
- id: makeFindingId("compliance", firstFile.path, 1, "No spec for feature"),
9364
- file: firstFile.path,
9365
- lineRange: [1, 1],
9366
- domain: "compliance",
9367
- severity: "suggestion",
9368
- title: "No spec/design doc found for feature change",
9369
- rationale: "Feature changes should reference a spec or design doc to verify alignment. No spec context was included in the review bundle.",
9370
- evidence: [`changeType: feature`, `contextFiles count: ${bundle.contextFiles.length}`],
9371
- validatedBy: "heuristic"
9372
- });
9373
- }
9374
- break;
9375
- }
9376
- case "bugfix": {
9377
- if (bundle.commitHistory.length === 0 && bundle.changedFiles.length > 0) {
9378
- const firstFile = bundle.changedFiles[0];
9379
- findings.push({
9380
- id: makeFindingId("compliance", firstFile.path, 1, "Bugfix no history"),
9381
- file: firstFile.path,
9382
- lineRange: [1, 1],
9383
- domain: "compliance",
9384
- severity: "suggestion",
9385
- title: "Bugfix without commit history context",
9386
- rationale: "Bugfix changes benefit from commit history to verify the root cause is addressed, not just the symptom. No commit history was provided.",
9387
- evidence: [`changeType: bugfix`, `commitHistory entries: ${bundle.commitHistory.length}`],
9388
- validatedBy: "heuristic"
9389
- });
9390
- }
9391
- break;
9392
- }
9393
- case "refactor": {
9394
- break;
9395
- }
9396
- case "docs": {
9397
- break;
9398
- }
9399
- }
9400
- const resultTypeRule = rules.find((r) => r.text.toLowerCase().includes("result type"));
9401
- if (resultTypeRule) {
9402
- for (const cf of bundle.changedFiles) {
9403
- const hasTryCatch = cf.content.includes("try {") || cf.content.includes("try{");
9404
- const usesResult = cf.content.includes("Result<") || cf.content.includes("Result >") || cf.content.includes(": Result");
9405
- if (hasTryCatch && !usesResult) {
9406
- findings.push({
9407
- id: makeFindingId("compliance", cf.path, 1, "try-catch not Result"),
9408
- file: cf.path,
9409
- lineRange: [1, cf.lines],
9410
- domain: "compliance",
9411
- severity: "suggestion",
9412
- title: "Fallible operation uses try/catch instead of Result type",
9413
- rationale: `Convention requires using Result type for fallible operations (from ${resultTypeRule.source}).`,
9414
- suggestion: "Refactor error handling to use the Result type pattern.",
9415
- evidence: [
9416
- `changeType: ${bundle.changeType}`,
9417
- `Convention rule: "${resultTypeRule.text}"`
9418
- ],
9419
- validatedBy: "heuristic"
9420
- });
9421
- }
9422
- }
9423
- }
9424
9697
  return findings;
9425
9698
  }
9699
+ function runComplianceAgent(bundle) {
9700
+ const rules = extractConventionRules(bundle);
9701
+ return [
9702
+ ...checkMissingJsDoc(bundle, rules),
9703
+ ...checkChangeTypeSpecific(bundle),
9704
+ ...checkResultTypeConvention(bundle, rules)
9705
+ ];
9706
+ }
9426
9707
 
9427
9708
  // src/review/agents/bug-agent.ts
9428
9709
  var BUG_DETECTION_DESCRIPTOR = {
@@ -9699,31 +9980,32 @@ var ARCHITECTURE_DESCRIPTOR = {
9699
9980
  ]
9700
9981
  };
9701
9982
  var LARGE_FILE_THRESHOLD = 300;
9983
+ function isViolationLine(line) {
9984
+ const lower = line.toLowerCase();
9985
+ return lower.includes("violation") || lower.includes("layer");
9986
+ }
9987
+ function createLayerViolationFinding(line, fallbackPath) {
9988
+ const fileMatch = line.match(/(?:in\s+)?(\S+\.(?:ts|tsx|js|jsx))(?::(\d+))?/);
9989
+ const file = fileMatch?.[1] ?? fallbackPath;
9990
+ const lineNum = fileMatch?.[2] ? parseInt(fileMatch[2], 10) : 1;
9991
+ return {
9992
+ id: makeFindingId("arch", file, lineNum, "layer violation"),
9993
+ file,
9994
+ lineRange: [lineNum, lineNum],
9995
+ domain: "architecture",
9996
+ severity: "critical",
9997
+ title: "Layer boundary violation detected by check-deps",
9998
+ rationale: `Architectural layer violation: ${line.trim()}. Imports must flow in the correct direction per the project's layer definitions.`,
9999
+ suggestion: "Route the dependency through the correct intermediate layer (e.g., routes -> services -> db, not routes -> db).",
10000
+ evidence: [line.trim()],
10001
+ validatedBy: "heuristic"
10002
+ };
10003
+ }
9702
10004
  function detectLayerViolations(bundle) {
9703
- const findings = [];
9704
10005
  const checkDepsFile = bundle.contextFiles.find((f) => f.path === "harness-check-deps-output");
9705
- if (!checkDepsFile) return findings;
9706
- const lines = checkDepsFile.content.split("\n");
9707
- for (const line of lines) {
9708
- if (line.toLowerCase().includes("violation") || line.toLowerCase().includes("layer")) {
9709
- const fileMatch = line.match(/(?:in\s+)?(\S+\.(?:ts|tsx|js|jsx))(?::(\d+))?/);
9710
- const file = fileMatch?.[1] ?? bundle.changedFiles[0]?.path ?? "unknown";
9711
- const lineNum = fileMatch?.[2] ? parseInt(fileMatch[2], 10) : 1;
9712
- findings.push({
9713
- id: makeFindingId("arch", file, lineNum, "layer violation"),
9714
- file,
9715
- lineRange: [lineNum, lineNum],
9716
- domain: "architecture",
9717
- severity: "critical",
9718
- title: "Layer boundary violation detected by check-deps",
9719
- rationale: `Architectural layer violation: ${line.trim()}. Imports must flow in the correct direction per the project's layer definitions.`,
9720
- suggestion: "Route the dependency through the correct intermediate layer (e.g., routes -> services -> db, not routes -> db).",
9721
- evidence: [line.trim()],
9722
- validatedBy: "heuristic"
9723
- });
9724
- }
9725
- }
9726
- return findings;
10006
+ if (!checkDepsFile) return [];
10007
+ const fallbackPath = bundle.changedFiles[0]?.path ?? "unknown";
10008
+ return checkDepsFile.content.split("\n").filter(isViolationLine).map((line) => createLayerViolationFinding(line, fallbackPath));
9727
10009
  }
9728
10010
  function detectLargeFiles(bundle) {
9729
10011
  const findings = [];
@@ -9745,45 +10027,61 @@ function detectLargeFiles(bundle) {
9745
10027
  }
9746
10028
  return findings;
9747
10029
  }
10030
+ function extractRelativeImports(content) {
10031
+ const importRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
10032
+ let match;
10033
+ const imports = /* @__PURE__ */ new Set();
10034
+ while ((match = importRegex.exec(content)) !== null) {
10035
+ const source = match[1];
10036
+ if (source.startsWith(".")) {
10037
+ imports.add(source.replace(/^\.\//, "").replace(/^\.\.\//, ""));
10038
+ }
10039
+ }
10040
+ return imports;
10041
+ }
10042
+ function fileBaseName(filePath) {
10043
+ return filePath.replace(/.*\//, "").replace(/\.(ts|tsx|js|jsx)$/, "");
10044
+ }
10045
+ function findCircularImportInCtxFile(ctxFile, changedFilePath, changedPaths, fileImports) {
10046
+ const ctxImportRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
10047
+ let ctxMatch;
10048
+ while ((ctxMatch = ctxImportRegex.exec(ctxFile.content)) !== null) {
10049
+ const ctxSource = ctxMatch[1];
10050
+ if (!ctxSource.startsWith(".")) continue;
10051
+ for (const changedPath of changedPaths) {
10052
+ const baseName = fileBaseName(changedPath);
10053
+ const ctxBaseName = fileBaseName(ctxFile.path);
10054
+ if (ctxSource.includes(baseName) && fileImports.has(ctxBaseName)) {
10055
+ return {
10056
+ id: makeFindingId("arch", changedFilePath, 1, `circular ${ctxFile.path}`),
10057
+ file: changedFilePath,
10058
+ lineRange: [1, 1],
10059
+ domain: "architecture",
10060
+ severity: "important",
10061
+ title: `Potential circular import between ${changedFilePath} and ${ctxFile.path}`,
10062
+ rationale: "Circular imports can cause runtime issues (undefined values at import time) and indicate tightly coupled modules that should be refactored.",
10063
+ suggestion: "Extract shared types/interfaces into a separate module that both files can import from.",
10064
+ evidence: [
10065
+ `${changedFilePath} imports from a module that also imports from ${changedFilePath}`
10066
+ ],
10067
+ validatedBy: "heuristic"
10068
+ };
10069
+ }
10070
+ }
10071
+ }
10072
+ return null;
10073
+ }
9748
10074
  function detectCircularImports(bundle) {
9749
10075
  const findings = [];
9750
10076
  const changedPaths = new Set(bundle.changedFiles.map((f) => f.path));
10077
+ const relevantCtxFiles = bundle.contextFiles.filter(
10078
+ (f) => f.reason === "import" || f.reason === "graph-dependency"
10079
+ );
9751
10080
  for (const cf of bundle.changedFiles) {
9752
- const importRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
9753
- let match;
9754
- const imports = /* @__PURE__ */ new Set();
9755
- while ((match = importRegex.exec(cf.content)) !== null) {
9756
- const source = match[1];
9757
- if (source.startsWith(".")) {
9758
- imports.add(source.replace(/^\.\//, "").replace(/^\.\.\//, ""));
9759
- }
9760
- }
9761
- for (const ctxFile of bundle.contextFiles) {
9762
- if (ctxFile.reason !== "import" && ctxFile.reason !== "graph-dependency") continue;
9763
- const ctxImportRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
9764
- let ctxMatch;
9765
- while ((ctxMatch = ctxImportRegex.exec(ctxFile.content)) !== null) {
9766
- const ctxSource = ctxMatch[1];
9767
- if (ctxSource.startsWith(".")) {
9768
- for (const changedPath of changedPaths) {
9769
- const baseName = changedPath.replace(/.*\//, "").replace(/\.(ts|tsx|js|jsx)$/, "");
9770
- if (ctxSource.includes(baseName) && imports.has(ctxFile.path.replace(/.*\//, "").replace(/\.(ts|tsx|js|jsx)$/, ""))) {
9771
- findings.push({
9772
- id: makeFindingId("arch", cf.path, 1, `circular ${ctxFile.path}`),
9773
- file: cf.path,
9774
- lineRange: [1, 1],
9775
- domain: "architecture",
9776
- severity: "important",
9777
- title: `Potential circular import between ${cf.path} and ${ctxFile.path}`,
9778
- rationale: "Circular imports can cause runtime issues (undefined values at import time) and indicate tightly coupled modules that should be refactored.",
9779
- suggestion: "Extract shared types/interfaces into a separate module that both files can import from.",
9780
- evidence: [`${cf.path} imports from a module that also imports from ${cf.path}`],
9781
- validatedBy: "heuristic"
9782
- });
9783
- }
9784
- }
9785
- }
9786
- }
10081
+ const imports = extractRelativeImports(cf.content);
10082
+ for (const ctxFile of relevantCtxFiles) {
10083
+ const finding = findCircularImportInCtxFile(ctxFile, cf.path, changedPaths, imports);
10084
+ if (finding) findings.push(finding);
9787
10085
  }
9788
10086
  }
9789
10087
  return findings;
@@ -9830,7 +10128,7 @@ async function fanOutReview(options) {
9830
10128
  }
9831
10129
 
9832
10130
  // src/review/validate-findings.ts
9833
- var path15 = __toESM(require("path"));
10131
+ var path17 = __toESM(require("path"));
9834
10132
  var DOWNGRADE_MAP = {
9835
10133
  critical: "important",
9836
10134
  important: "suggestion",
@@ -9851,7 +10149,7 @@ function normalizePath(filePath, projectRoot) {
9851
10149
  let normalized = filePath;
9852
10150
  normalized = normalized.replace(/\\/g, "/");
9853
10151
  const normalizedRoot = projectRoot.replace(/\\/g, "/");
9854
- if (path15.isAbsolute(normalized)) {
10152
+ if (path17.isAbsolute(normalized)) {
9855
10153
  const root = normalizedRoot.endsWith("/") ? normalizedRoot : normalizedRoot + "/";
9856
10154
  if (normalized.startsWith(root)) {
9857
10155
  normalized = normalized.slice(root.length);
@@ -9876,12 +10174,12 @@ function followImportChain(fromFile, fileContents, maxDepth = 2) {
9876
10174
  while ((match = importRegex.exec(content)) !== null) {
9877
10175
  const importPath = match[1];
9878
10176
  if (!importPath.startsWith(".")) continue;
9879
- const dir = path15.dirname(current.file);
9880
- let resolved = path15.join(dir, importPath).replace(/\\/g, "/");
10177
+ const dir = path17.dirname(current.file);
10178
+ let resolved = path17.join(dir, importPath).replace(/\\/g, "/");
9881
10179
  if (!resolved.match(/\.(ts|tsx|js|jsx)$/)) {
9882
10180
  resolved += ".ts";
9883
10181
  }
9884
- resolved = path15.normalize(resolved).replace(/\\/g, "/");
10182
+ resolved = path17.normalize(resolved).replace(/\\/g, "/");
9885
10183
  if (!visited.has(resolved) && current.depth + 1 <= maxDepth) {
9886
10184
  queue.push({ file: resolved, depth: current.depth + 1 });
9887
10185
  }
@@ -9898,7 +10196,7 @@ async function validateFindings(options) {
9898
10196
  if (exclusionSet.isExcluded(normalizedFile, finding.lineRange) || exclusionSet.isExcluded(finding.file, finding.lineRange)) {
9899
10197
  continue;
9900
10198
  }
9901
- const absoluteFile = path15.isAbsolute(finding.file) ? finding.file : path15.join(projectRoot, finding.file).replace(/\\/g, "/");
10199
+ const absoluteFile = path17.isAbsolute(finding.file) ? finding.file : path17.join(projectRoot, finding.file).replace(/\\/g, "/");
9902
10200
  if (exclusionSet.isExcluded(absoluteFile, finding.lineRange)) {
9903
10201
  continue;
9904
10202
  }
@@ -9955,6 +10253,28 @@ async function validateFindings(options) {
9955
10253
  function rangesOverlap(a, b, gap) {
9956
10254
  return a[0] <= b[1] + gap && b[0] <= a[1] + gap;
9957
10255
  }
10256
+ function pickLongest(a, b) {
10257
+ if (a && b) return a.length >= b.length ? a : b;
10258
+ return a ?? b;
10259
+ }
10260
+ function buildMergedTitle(a, b, domains) {
10261
+ const primaryFinding = SEVERITY_RANK[a.severity] >= SEVERITY_RANK[b.severity] ? a : b;
10262
+ const domainList = [...domains].sort().join(", ");
10263
+ const cleanTitle = primaryFinding.title.replace(/^\[.*?\]\s*/, "");
10264
+ return { title: `[${domainList}] ${cleanTitle}`, primaryFinding };
10265
+ }
10266
+ function mergeSecurityFields(merged, primary, a, b) {
10267
+ const cweId = primary.cweId ?? a.cweId ?? b.cweId;
10268
+ const owaspCategory = primary.owaspCategory ?? a.owaspCategory ?? b.owaspCategory;
10269
+ const confidence = primary.confidence ?? a.confidence ?? b.confidence;
10270
+ const remediation = pickLongest(a.remediation, b.remediation);
10271
+ const mergedRefs = [.../* @__PURE__ */ new Set([...a.references ?? [], ...b.references ?? []])];
10272
+ if (cweId !== void 0) merged.cweId = cweId;
10273
+ if (owaspCategory !== void 0) merged.owaspCategory = owaspCategory;
10274
+ if (confidence !== void 0) merged.confidence = confidence;
10275
+ if (remediation !== void 0) merged.remediation = remediation;
10276
+ if (mergedRefs.length > 0) merged.references = mergedRefs;
10277
+ }
9958
10278
  function mergeFindings(a, b) {
9959
10279
  const highestSeverity = SEVERITY_RANK[a.severity] >= SEVERITY_RANK[b.severity] ? a.severity : b.severity;
9960
10280
  const highestValidatedBy = (VALIDATED_BY_RANK[a.validatedBy] ?? 0) >= (VALIDATED_BY_RANK[b.validatedBy] ?? 0) ? a.validatedBy : b.validatedBy;
@@ -9964,18 +10284,12 @@ function mergeFindings(a, b) {
9964
10284
  Math.min(a.lineRange[0], b.lineRange[0]),
9965
10285
  Math.max(a.lineRange[1], b.lineRange[1])
9966
10286
  ];
9967
- const domains = /* @__PURE__ */ new Set();
9968
- domains.add(a.domain);
9969
- domains.add(b.domain);
9970
- const suggestion = a.suggestion && b.suggestion ? a.suggestion.length >= b.suggestion.length ? a.suggestion : b.suggestion : a.suggestion ?? b.suggestion;
9971
- const primaryFinding = SEVERITY_RANK[a.severity] >= SEVERITY_RANK[b.severity] ? a : b;
9972
- const domainList = [...domains].sort().join(", ");
9973
- const cleanTitle = primaryFinding.title.replace(/^\[.*?\]\s*/, "");
9974
- const title = `[${domainList}] ${cleanTitle}`;
10287
+ const domains = /* @__PURE__ */ new Set([a.domain, b.domain]);
10288
+ const suggestion = pickLongest(a.suggestion, b.suggestion);
10289
+ const { title, primaryFinding } = buildMergedTitle(a, b, domains);
9975
10290
  const merged = {
9976
10291
  id: primaryFinding.id,
9977
10292
  file: a.file,
9978
- // same file for all merged findings
9979
10293
  lineRange,
9980
10294
  domain: primaryFinding.domain,
9981
10295
  severity: highestSeverity,
@@ -9987,16 +10301,7 @@ function mergeFindings(a, b) {
9987
10301
  if (suggestion !== void 0) {
9988
10302
  merged.suggestion = suggestion;
9989
10303
  }
9990
- const cweId = primaryFinding.cweId ?? a.cweId ?? b.cweId;
9991
- const owaspCategory = primaryFinding.owaspCategory ?? a.owaspCategory ?? b.owaspCategory;
9992
- const confidence = primaryFinding.confidence ?? a.confidence ?? b.confidence;
9993
- const remediation = a.remediation && b.remediation ? a.remediation.length >= b.remediation.length ? a.remediation : b.remediation : a.remediation ?? b.remediation;
9994
- const mergedRefs = [.../* @__PURE__ */ new Set([...a.references ?? [], ...b.references ?? []])];
9995
- if (cweId !== void 0) merged.cweId = cweId;
9996
- if (owaspCategory !== void 0) merged.owaspCategory = owaspCategory;
9997
- if (confidence !== void 0) merged.confidence = confidence;
9998
- if (remediation !== void 0) merged.remediation = remediation;
9999
- if (mergedRefs.length > 0) merged.references = mergedRefs;
10304
+ mergeSecurityFields(merged, primaryFinding, a, b);
10000
10305
  return merged;
10001
10306
  }
10002
10307
  function deduplicateFindings(options) {
@@ -10168,6 +10473,17 @@ function formatTerminalOutput(options) {
10168
10473
  if (suggestionCount > 0) parts.push(`${suggestionCount} suggestion(s)`);
10169
10474
  sections.push(` Found ${issueCount} issue(s): ${parts.join(", ")}.`);
10170
10475
  }
10476
+ if (options.evidenceCoverage) {
10477
+ const ec = options.evidenceCoverage;
10478
+ sections.push("");
10479
+ sections.push("## Evidence Coverage\n");
10480
+ sections.push(` Evidence entries: ${ec.totalEntries}`);
10481
+ sections.push(
10482
+ ` Findings with evidence: ${ec.findingsWithEvidence}/${ec.findingsWithEvidence + ec.uncitedCount}`
10483
+ );
10484
+ sections.push(` Uncited findings: ${ec.uncitedCount} (flagged as [UNVERIFIED])`);
10485
+ sections.push(` Coverage: ${ec.coveragePercentage}%`);
10486
+ }
10171
10487
  return sections.join("\n");
10172
10488
  }
10173
10489
 
@@ -10244,9 +10560,108 @@ function formatGitHubSummary(options) {
10244
10560
  const assessment = determineAssessment(findings);
10245
10561
  const assessmentLabel = assessment === "approve" ? "Approve" : assessment === "comment" ? "Comment" : "Request Changes";
10246
10562
  sections.push(`## Assessment: ${assessmentLabel}`);
10563
+ if (options.evidenceCoverage) {
10564
+ const ec = options.evidenceCoverage;
10565
+ sections.push("");
10566
+ sections.push("## Evidence Coverage\n");
10567
+ sections.push(`- Evidence entries: ${ec.totalEntries}`);
10568
+ sections.push(
10569
+ `- Findings with evidence: ${ec.findingsWithEvidence}/${ec.findingsWithEvidence + ec.uncitedCount}`
10570
+ );
10571
+ sections.push(`- Uncited findings: ${ec.uncitedCount} (flagged as \\[UNVERIFIED\\])`);
10572
+ sections.push(`- Coverage: ${ec.coveragePercentage}%`);
10573
+ }
10247
10574
  return sections.join("\n");
10248
10575
  }
10249
10576
 
10577
+ // src/review/evidence-gate.ts
10578
+ var FILE_LINE_RANGE_PATTERN = /^([\w./@-]+\.\w+):(\d+)-(\d+)/;
10579
+ var FILE_LINE_PATTERN = /^([\w./@-]+\.\w+):(\d+)/;
10580
+ var FILE_ONLY_PATTERN = /^([\w./@-]+\.\w+)\s/;
10581
+ function parseEvidenceRef(content) {
10582
+ const trimmed = content.trim();
10583
+ const rangeMatch = trimmed.match(FILE_LINE_RANGE_PATTERN);
10584
+ if (rangeMatch) {
10585
+ return {
10586
+ file: rangeMatch[1],
10587
+ lineStart: parseInt(rangeMatch[2], 10),
10588
+ lineEnd: parseInt(rangeMatch[3], 10)
10589
+ };
10590
+ }
10591
+ const lineMatch = trimmed.match(FILE_LINE_PATTERN);
10592
+ if (lineMatch) {
10593
+ return {
10594
+ file: lineMatch[1],
10595
+ lineStart: parseInt(lineMatch[2], 10)
10596
+ };
10597
+ }
10598
+ const fileMatch = trimmed.match(FILE_ONLY_PATTERN);
10599
+ if (fileMatch) {
10600
+ return { file: fileMatch[1] };
10601
+ }
10602
+ return null;
10603
+ }
10604
+ function evidenceMatchesFinding(ref, finding) {
10605
+ if (ref.file !== finding.file) return false;
10606
+ if (ref.lineStart === void 0) return true;
10607
+ const [findStart, findEnd] = finding.lineRange;
10608
+ if (ref.lineEnd !== void 0) {
10609
+ return ref.lineStart <= findEnd && ref.lineEnd >= findStart;
10610
+ }
10611
+ return ref.lineStart >= findStart && ref.lineStart <= findEnd;
10612
+ }
10613
+ function checkEvidenceCoverage(findings, evidenceEntries) {
10614
+ if (findings.length === 0) {
10615
+ return {
10616
+ totalEntries: evidenceEntries.filter((e) => e.status === "active").length,
10617
+ findingsWithEvidence: 0,
10618
+ uncitedCount: 0,
10619
+ uncitedFindings: [],
10620
+ coveragePercentage: 100
10621
+ };
10622
+ }
10623
+ const activeEvidence = evidenceEntries.filter((e) => e.status === "active");
10624
+ const evidenceRefs = [];
10625
+ for (const entry of activeEvidence) {
10626
+ const ref = parseEvidenceRef(entry.content);
10627
+ if (ref) evidenceRefs.push(ref);
10628
+ }
10629
+ let findingsWithEvidence = 0;
10630
+ const uncitedFindings = [];
10631
+ for (const finding of findings) {
10632
+ const hasEvidence = evidenceRefs.some((ref) => evidenceMatchesFinding(ref, finding));
10633
+ if (hasEvidence) {
10634
+ findingsWithEvidence++;
10635
+ } else {
10636
+ uncitedFindings.push(finding.title);
10637
+ }
10638
+ }
10639
+ const uncitedCount = findings.length - findingsWithEvidence;
10640
+ const coveragePercentage = Math.round(findingsWithEvidence / findings.length * 100);
10641
+ return {
10642
+ totalEntries: activeEvidence.length,
10643
+ findingsWithEvidence,
10644
+ uncitedCount,
10645
+ uncitedFindings,
10646
+ coveragePercentage
10647
+ };
10648
+ }
10649
+ function tagUncitedFindings(findings, evidenceEntries) {
10650
+ const activeEvidence = evidenceEntries.filter((e) => e.status === "active");
10651
+ const evidenceRefs = [];
10652
+ for (const entry of activeEvidence) {
10653
+ const ref = parseEvidenceRef(entry.content);
10654
+ if (ref) evidenceRefs.push(ref);
10655
+ }
10656
+ for (const finding of findings) {
10657
+ const hasEvidence = evidenceRefs.some((ref) => evidenceMatchesFinding(ref, finding));
10658
+ if (!hasEvidence && !finding.title.startsWith("[UNVERIFIED]")) {
10659
+ finding.title = `[UNVERIFIED] ${finding.title}`;
10660
+ }
10661
+ }
10662
+ return findings;
10663
+ }
10664
+
10250
10665
  // src/review/pipeline-orchestrator.ts
10251
10666
  async function runReviewPipeline(options) {
10252
10667
  const {
@@ -10259,7 +10674,8 @@ async function runReviewPipeline(options) {
10259
10674
  conventionFiles,
10260
10675
  checkDepsOutput,
10261
10676
  config = {},
10262
- commitHistory
10677
+ commitHistory,
10678
+ sessionSlug
10263
10679
  } = options;
10264
10680
  if (flags.ci && prMetadata) {
10265
10681
  const eligibility = checkEligibility(prMetadata, true);
@@ -10355,13 +10771,25 @@ async function runReviewPipeline(options) {
10355
10771
  projectRoot,
10356
10772
  fileContents
10357
10773
  });
10774
+ let evidenceCoverage;
10775
+ if (sessionSlug) {
10776
+ try {
10777
+ const evidenceResult = await readSessionSection(projectRoot, sessionSlug, "evidence");
10778
+ if (evidenceResult.ok) {
10779
+ evidenceCoverage = checkEvidenceCoverage(validatedFindings, evidenceResult.value);
10780
+ tagUncitedFindings(validatedFindings, evidenceResult.value);
10781
+ }
10782
+ } catch {
10783
+ }
10784
+ }
10358
10785
  const dedupedFindings = deduplicateFindings({ findings: validatedFindings });
10359
10786
  const strengths = [];
10360
10787
  const assessment = determineAssessment(dedupedFindings);
10361
10788
  const exitCode = getExitCode(assessment);
10362
10789
  const terminalOutput = formatTerminalOutput({
10363
10790
  findings: dedupedFindings,
10364
- strengths
10791
+ strengths,
10792
+ ...evidenceCoverage != null ? { evidenceCoverage } : {}
10365
10793
  });
10366
10794
  let githubComments = [];
10367
10795
  if (flags.comment) {
@@ -10376,12 +10804,13 @@ async function runReviewPipeline(options) {
10376
10804
  terminalOutput,
10377
10805
  githubComments,
10378
10806
  exitCode,
10379
- ...mechanicalResult !== void 0 ? { mechanicalResult } : {}
10807
+ ...mechanicalResult != null ? { mechanicalResult } : {},
10808
+ ...evidenceCoverage != null ? { evidenceCoverage } : {}
10380
10809
  };
10381
10810
  }
10382
10811
 
10383
10812
  // src/roadmap/parse.ts
10384
- var import_types18 = require("@harness-engineering/types");
10813
+ var import_types19 = require("@harness-engineering/types");
10385
10814
  var VALID_STATUSES = /* @__PURE__ */ new Set([
10386
10815
  "backlog",
10387
10816
  "planned",
@@ -10393,14 +10822,14 @@ var EM_DASH = "\u2014";
10393
10822
  function parseRoadmap(markdown) {
10394
10823
  const fmMatch = markdown.match(/^---\n([\s\S]*?)\n---/);
10395
10824
  if (!fmMatch) {
10396
- return (0, import_types18.Err)(new Error("Missing or malformed YAML frontmatter"));
10825
+ return (0, import_types19.Err)(new Error("Missing or malformed YAML frontmatter"));
10397
10826
  }
10398
10827
  const fmResult = parseFrontmatter(fmMatch[1]);
10399
10828
  if (!fmResult.ok) return fmResult;
10400
10829
  const body = markdown.slice(fmMatch[0].length);
10401
10830
  const milestonesResult = parseMilestones(body);
10402
10831
  if (!milestonesResult.ok) return milestonesResult;
10403
- return (0, import_types18.Ok)({
10832
+ return (0, import_types19.Ok)({
10404
10833
  frontmatter: fmResult.value,
10405
10834
  milestones: milestonesResult.value
10406
10835
  });
@@ -10422,7 +10851,7 @@ function parseFrontmatter(raw) {
10422
10851
  const created = map.get("created");
10423
10852
  const updated = map.get("updated");
10424
10853
  if (!project || !versionStr || !lastSynced || !lastManualEdit) {
10425
- return (0, import_types18.Err)(
10854
+ return (0, import_types19.Err)(
10426
10855
  new Error(
10427
10856
  "Frontmatter missing required fields: project, version, last_synced, last_manual_edit"
10428
10857
  )
@@ -10430,12 +10859,12 @@ function parseFrontmatter(raw) {
10430
10859
  }
10431
10860
  const version = parseInt(versionStr, 10);
10432
10861
  if (isNaN(version)) {
10433
- return (0, import_types18.Err)(new Error("Frontmatter version must be a number"));
10862
+ return (0, import_types19.Err)(new Error("Frontmatter version must be a number"));
10434
10863
  }
10435
10864
  const fm = { project, version, lastSynced, lastManualEdit };
10436
10865
  if (created) fm.created = created;
10437
10866
  if (updated) fm.updated = updated;
10438
- return (0, import_types18.Ok)(fm);
10867
+ return (0, import_types19.Ok)(fm);
10439
10868
  }
10440
10869
  function parseMilestones(body) {
10441
10870
  const milestones = [];
@@ -10459,7 +10888,7 @@ function parseMilestones(body) {
10459
10888
  features: featuresResult.value
10460
10889
  });
10461
10890
  }
10462
- return (0, import_types18.Ok)(milestones);
10891
+ return (0, import_types19.Ok)(milestones);
10463
10892
  }
10464
10893
  function parseFeatures(sectionBody) {
10465
10894
  const features = [];
@@ -10477,32 +10906,50 @@ function parseFeatures(sectionBody) {
10477
10906
  if (!featureResult.ok) return featureResult;
10478
10907
  features.push(featureResult.value);
10479
10908
  }
10480
- return (0, import_types18.Ok)(features);
10909
+ return (0, import_types19.Ok)(features);
10481
10910
  }
10482
- function parseFeatureFields(name, body) {
10911
+ function extractFieldMap(body) {
10483
10912
  const fieldMap = /* @__PURE__ */ new Map();
10484
10913
  const fieldPattern = /^- \*\*(.+?):\*\* (.+)$/gm;
10485
10914
  let match;
10486
10915
  while ((match = fieldPattern.exec(body)) !== null) {
10487
10916
  fieldMap.set(match[1], match[2]);
10488
10917
  }
10918
+ return fieldMap;
10919
+ }
10920
+ function parseListField(fieldMap, ...keys) {
10921
+ let raw = EM_DASH;
10922
+ for (const key of keys) {
10923
+ const val = fieldMap.get(key);
10924
+ if (val !== void 0) {
10925
+ raw = val;
10926
+ break;
10927
+ }
10928
+ }
10929
+ if (raw === EM_DASH || raw === "none") return [];
10930
+ return raw.split(",").map((s) => s.trim());
10931
+ }
10932
+ function parseFeatureFields(name, body) {
10933
+ const fieldMap = extractFieldMap(body);
10489
10934
  const statusRaw = fieldMap.get("Status");
10490
10935
  if (!statusRaw || !VALID_STATUSES.has(statusRaw)) {
10491
- return (0, import_types18.Err)(
10936
+ return (0, import_types19.Err)(
10492
10937
  new Error(
10493
10938
  `Feature "${name}" has invalid status: "${statusRaw ?? "(missing)"}". Valid statuses: ${[...VALID_STATUSES].join(", ")}`
10494
10939
  )
10495
10940
  );
10496
10941
  }
10497
- const status = statusRaw;
10498
10942
  const specRaw = fieldMap.get("Spec") ?? EM_DASH;
10499
- const spec = specRaw === EM_DASH ? null : specRaw;
10500
- const plansRaw = fieldMap.get("Plans") ?? fieldMap.get("Plan") ?? EM_DASH;
10501
- const plans = plansRaw === EM_DASH || plansRaw === "none" ? [] : plansRaw.split(",").map((p) => p.trim());
10502
- const blockedByRaw = fieldMap.get("Blocked by") ?? fieldMap.get("Blockers") ?? EM_DASH;
10503
- const blockedBy = blockedByRaw === EM_DASH || blockedByRaw === "none" ? [] : blockedByRaw.split(",").map((b) => b.trim());
10504
- const summary = fieldMap.get("Summary") ?? "";
10505
- return (0, import_types18.Ok)({ name, status, spec, plans, blockedBy, summary });
10943
+ const plans = parseListField(fieldMap, "Plans", "Plan");
10944
+ const blockedBy = parseListField(fieldMap, "Blocked by", "Blockers");
10945
+ return (0, import_types19.Ok)({
10946
+ name,
10947
+ status: statusRaw,
10948
+ spec: specRaw === EM_DASH ? null : specRaw,
10949
+ plans,
10950
+ blockedBy,
10951
+ summary: fieldMap.get("Summary") ?? ""
10952
+ });
10506
10953
  }
10507
10954
 
10508
10955
  // src/roadmap/serialize.ts
@@ -10553,9 +11000,9 @@ function serializeFeature(feature) {
10553
11000
  }
10554
11001
 
10555
11002
  // src/roadmap/sync.ts
10556
- var fs16 = __toESM(require("fs"));
10557
- var path16 = __toESM(require("path"));
10558
- var import_types19 = require("@harness-engineering/types");
11003
+ var fs18 = __toESM(require("fs"));
11004
+ var path18 = __toESM(require("path"));
11005
+ var import_types20 = require("@harness-engineering/types");
10559
11006
  function inferStatus(feature, projectPath, allFeatures) {
10560
11007
  if (feature.blockedBy.length > 0) {
10561
11008
  const blockerNotDone = feature.blockedBy.some((blockerName) => {
@@ -10569,10 +11016,10 @@ function inferStatus(feature, projectPath, allFeatures) {
10569
11016
  const featuresWithPlans = allFeatures.filter((f) => f.plans.length > 0);
10570
11017
  const useRootState = featuresWithPlans.length <= 1;
10571
11018
  if (useRootState) {
10572
- const rootStatePath = path16.join(projectPath, ".harness", "state.json");
10573
- if (fs16.existsSync(rootStatePath)) {
11019
+ const rootStatePath = path18.join(projectPath, ".harness", "state.json");
11020
+ if (fs18.existsSync(rootStatePath)) {
10574
11021
  try {
10575
- const raw = fs16.readFileSync(rootStatePath, "utf-8");
11022
+ const raw = fs18.readFileSync(rootStatePath, "utf-8");
10576
11023
  const state = JSON.parse(raw);
10577
11024
  if (state.progress) {
10578
11025
  for (const status of Object.values(state.progress)) {
@@ -10583,16 +11030,16 @@ function inferStatus(feature, projectPath, allFeatures) {
10583
11030
  }
10584
11031
  }
10585
11032
  }
10586
- const sessionsDir = path16.join(projectPath, ".harness", "sessions");
10587
- if (fs16.existsSync(sessionsDir)) {
11033
+ const sessionsDir = path18.join(projectPath, ".harness", "sessions");
11034
+ if (fs18.existsSync(sessionsDir)) {
10588
11035
  try {
10589
- const sessionDirs = fs16.readdirSync(sessionsDir, { withFileTypes: true });
11036
+ const sessionDirs = fs18.readdirSync(sessionsDir, { withFileTypes: true });
10590
11037
  for (const entry of sessionDirs) {
10591
11038
  if (!entry.isDirectory()) continue;
10592
- const autopilotPath = path16.join(sessionsDir, entry.name, "autopilot-state.json");
10593
- if (!fs16.existsSync(autopilotPath)) continue;
11039
+ const autopilotPath = path18.join(sessionsDir, entry.name, "autopilot-state.json");
11040
+ if (!fs18.existsSync(autopilotPath)) continue;
10594
11041
  try {
10595
- const raw = fs16.readFileSync(autopilotPath, "utf-8");
11042
+ const raw = fs18.readFileSync(autopilotPath, "utf-8");
10596
11043
  const autopilot = JSON.parse(raw);
10597
11044
  if (!autopilot.phases) continue;
10598
11045
  const linkedPhases = autopilot.phases.filter(
@@ -10639,7 +11086,7 @@ function syncRoadmap(options) {
10639
11086
  to: inferred
10640
11087
  });
10641
11088
  }
10642
- return (0, import_types19.Ok)(changes);
11089
+ return (0, import_types20.Ok)(changes);
10643
11090
  }
10644
11091
 
10645
11092
  // src/interaction/types.ts
@@ -10672,17 +11119,17 @@ var EmitInteractionInputSchema = import_zod7.z.object({
10672
11119
  });
10673
11120
 
10674
11121
  // src/blueprint/scanner.ts
10675
- var fs17 = __toESM(require("fs/promises"));
10676
- var path17 = __toESM(require("path"));
11122
+ var fs19 = __toESM(require("fs/promises"));
11123
+ var path19 = __toESM(require("path"));
10677
11124
  var ProjectScanner = class {
10678
11125
  constructor(rootDir) {
10679
11126
  this.rootDir = rootDir;
10680
11127
  }
10681
11128
  async scan() {
10682
- let projectName = path17.basename(this.rootDir);
11129
+ let projectName = path19.basename(this.rootDir);
10683
11130
  try {
10684
- const pkgPath = path17.join(this.rootDir, "package.json");
10685
- const pkgRaw = await fs17.readFile(pkgPath, "utf-8");
11131
+ const pkgPath = path19.join(this.rootDir, "package.json");
11132
+ const pkgRaw = await fs19.readFile(pkgPath, "utf-8");
10686
11133
  const pkg = JSON.parse(pkgRaw);
10687
11134
  if (pkg.name) projectName = pkg.name;
10688
11135
  } catch {
@@ -10723,8 +11170,8 @@ var ProjectScanner = class {
10723
11170
  };
10724
11171
 
10725
11172
  // src/blueprint/generator.ts
10726
- var fs18 = __toESM(require("fs/promises"));
10727
- var path18 = __toESM(require("path"));
11173
+ var fs20 = __toESM(require("fs/promises"));
11174
+ var path20 = __toESM(require("path"));
10728
11175
  var ejs = __toESM(require("ejs"));
10729
11176
 
10730
11177
  // src/blueprint/templates.ts
@@ -10808,19 +11255,19 @@ var BlueprintGenerator = class {
10808
11255
  styles: STYLES,
10809
11256
  scripts: SCRIPTS
10810
11257
  });
10811
- await fs18.mkdir(options.outputDir, { recursive: true });
10812
- await fs18.writeFile(path18.join(options.outputDir, "index.html"), html);
11258
+ await fs20.mkdir(options.outputDir, { recursive: true });
11259
+ await fs20.writeFile(path20.join(options.outputDir, "index.html"), html);
10813
11260
  }
10814
11261
  };
10815
11262
 
10816
11263
  // src/update-checker.ts
10817
- var fs19 = __toESM(require("fs"));
10818
- var path19 = __toESM(require("path"));
11264
+ var fs21 = __toESM(require("fs"));
11265
+ var path21 = __toESM(require("path"));
10819
11266
  var os = __toESM(require("os"));
10820
11267
  var import_child_process3 = require("child_process");
10821
11268
  function getStatePath() {
10822
11269
  const home = process.env["HOME"] || os.homedir();
10823
- return path19.join(home, ".harness", "update-check.json");
11270
+ return path21.join(home, ".harness", "update-check.json");
10824
11271
  }
10825
11272
  function isUpdateCheckEnabled(configInterval) {
10826
11273
  if (process.env["HARNESS_NO_UPDATE_CHECK"] === "1") return false;
@@ -10833,7 +11280,7 @@ function shouldRunCheck(state, intervalMs) {
10833
11280
  }
10834
11281
  function readCheckState() {
10835
11282
  try {
10836
- const raw = fs19.readFileSync(getStatePath(), "utf-8");
11283
+ const raw = fs21.readFileSync(getStatePath(), "utf-8");
10837
11284
  const parsed = JSON.parse(raw);
10838
11285
  if (typeof parsed === "object" && parsed !== null && "lastCheckTime" in parsed && typeof parsed.lastCheckTime === "number" && "currentVersion" in parsed && typeof parsed.currentVersion === "string") {
10839
11286
  const state = parsed;
@@ -10850,7 +11297,7 @@ function readCheckState() {
10850
11297
  }
10851
11298
  function spawnBackgroundCheck(currentVersion) {
10852
11299
  const statePath = getStatePath();
10853
- const stateDir = path19.dirname(statePath);
11300
+ const stateDir = path21.dirname(statePath);
10854
11301
  const script = `
10855
11302
  const { execSync } = require('child_process');
10856
11303
  const fs = require('fs');
@@ -10904,7 +11351,7 @@ Run "harness update" to upgrade.`;
10904
11351
  }
10905
11352
 
10906
11353
  // src/index.ts
10907
- var VERSION = "0.11.0";
11354
+ var VERSION = "0.14.0";
10908
11355
  // Annotate the CommonJS export names for ESM import in node:
10909
11356
  0 && (module.exports = {
10910
11357
  AGENT_DESCRIPTORS,
@@ -10985,6 +11432,7 @@ var VERSION = "0.11.0";
10985
11432
  analyzeLearningPatterns,
10986
11433
  appendFailure,
10987
11434
  appendLearning,
11435
+ appendSessionEntry,
10988
11436
  applyFixes,
10989
11437
  applyHotspotDowngrade,
10990
11438
  archMatchers,
@@ -10992,12 +11440,14 @@ var VERSION = "0.11.0";
10992
11440
  architecture,
10993
11441
  archiveFailures,
10994
11442
  archiveLearnings,
11443
+ archiveSession,
10995
11444
  archiveStream,
10996
11445
  buildDependencyGraph,
10997
11446
  buildExclusionSet,
10998
11447
  buildSnapshot,
10999
11448
  checkDocCoverage,
11000
11449
  checkEligibility,
11450
+ checkEvidenceCoverage,
11001
11451
  classifyFinding,
11002
11452
  clearFailuresCache,
11003
11453
  clearLearningsCache,
@@ -11081,6 +11531,8 @@ var VERSION = "0.11.0";
11081
11531
  reactRules,
11082
11532
  readCheckState,
11083
11533
  readLockfile,
11534
+ readSessionSection,
11535
+ readSessionSections,
11084
11536
  removeContributions,
11085
11537
  removeProvenance,
11086
11538
  requestMultiplePeerReviews,
@@ -11114,8 +11566,10 @@ var VERSION = "0.11.0";
11114
11566
  spawnBackgroundCheck,
11115
11567
  syncConstraintNodes,
11116
11568
  syncRoadmap,
11569
+ tagUncitedFindings,
11117
11570
  touchStream,
11118
11571
  trackAction,
11572
+ updateSessionEntryStatus,
11119
11573
  updateSessionIndex,
11120
11574
  validateAgentsMap,
11121
11575
  validateBoundaries,