@harness-engineering/core 0.13.1 → 0.14.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -109,6 +109,7 @@ __export(index_exports, {
109
109
  analyzeLearningPatterns: () => analyzeLearningPatterns,
110
110
  appendFailure: () => appendFailure,
111
111
  appendLearning: () => appendLearning,
112
+ appendSessionEntry: () => appendSessionEntry,
112
113
  applyFixes: () => applyFixes,
113
114
  applyHotspotDowngrade: () => applyHotspotDowngrade,
114
115
  archMatchers: () => archMatchers,
@@ -116,12 +117,14 @@ __export(index_exports, {
116
117
  architecture: () => architecture,
117
118
  archiveFailures: () => archiveFailures,
118
119
  archiveLearnings: () => archiveLearnings,
120
+ archiveSession: () => archiveSession,
119
121
  archiveStream: () => archiveStream,
120
122
  buildDependencyGraph: () => buildDependencyGraph,
121
123
  buildExclusionSet: () => buildExclusionSet,
122
124
  buildSnapshot: () => buildSnapshot,
123
125
  checkDocCoverage: () => checkDocCoverage,
124
126
  checkEligibility: () => checkEligibility,
127
+ checkEvidenceCoverage: () => checkEvidenceCoverage,
125
128
  classifyFinding: () => classifyFinding,
126
129
  clearFailuresCache: () => clearFailuresCache,
127
130
  clearLearningsCache: () => clearLearningsCache,
@@ -205,6 +208,8 @@ __export(index_exports, {
205
208
  reactRules: () => reactRules,
206
209
  readCheckState: () => readCheckState,
207
210
  readLockfile: () => readLockfile,
211
+ readSessionSection: () => readSessionSection,
212
+ readSessionSections: () => readSessionSections,
208
213
  removeContributions: () => removeContributions,
209
214
  removeProvenance: () => removeProvenance,
210
215
  requestMultiplePeerReviews: () => requestMultiplePeerReviews,
@@ -215,7 +220,7 @@ __export(index_exports, {
215
220
  resolveRuleSeverity: () => resolveRuleSeverity,
216
221
  resolveSessionDir: () => resolveSessionDir,
217
222
  resolveStreamPath: () => resolveStreamPath,
218
- resolveThresholds: () => resolveThresholds,
223
+ resolveThresholds: () => resolveThresholds2,
219
224
  runAll: () => runAll,
220
225
  runArchitectureAgent: () => runArchitectureAgent,
221
226
  runBugDetectionAgent: () => runBugDetectionAgent,
@@ -238,8 +243,10 @@ __export(index_exports, {
238
243
  spawnBackgroundCheck: () => spawnBackgroundCheck,
239
244
  syncConstraintNodes: () => syncConstraintNodes,
240
245
  syncRoadmap: () => syncRoadmap,
246
+ tagUncitedFindings: () => tagUncitedFindings,
241
247
  touchStream: () => touchStream,
242
248
  trackAction: () => trackAction,
249
+ updateSessionEntryStatus: () => updateSessionEntryStatus,
243
250
  updateSessionIndex: () => updateSessionIndex,
244
251
  validateAgentsMap: () => validateAgentsMap,
245
252
  validateBoundaries: () => validateBoundaries,
@@ -277,17 +284,17 @@ var import_node_path = require("path");
277
284
  var import_glob = require("glob");
278
285
  var accessAsync = (0, import_util.promisify)(import_fs.access);
279
286
  var readFileAsync = (0, import_util.promisify)(import_fs.readFile);
280
- async function fileExists(path20) {
287
+ async function fileExists(path22) {
281
288
  try {
282
- await accessAsync(path20, import_fs.constants.F_OK);
289
+ await accessAsync(path22, import_fs.constants.F_OK);
283
290
  return true;
284
291
  } catch {
285
292
  return false;
286
293
  }
287
294
  }
288
- async function readFileContent(path20) {
295
+ async function readFileContent(path22) {
289
296
  try {
290
- const content = await readFileAsync(path20, "utf-8");
297
+ const content = await readFileAsync(path22, "utf-8");
291
298
  return (0, import_types.Ok)(content);
292
299
  } catch (error) {
293
300
  return (0, import_types.Err)(error);
@@ -338,15 +345,15 @@ function validateConfig(data, schema) {
338
345
  let message = "Configuration validation failed";
339
346
  const suggestions = [];
340
347
  if (firstError) {
341
- const path20 = firstError.path.join(".");
342
- const pathDisplay = path20 ? ` at "${path20}"` : "";
348
+ const path22 = firstError.path.join(".");
349
+ const pathDisplay = path22 ? ` at "${path22}"` : "";
343
350
  if (firstError.code === "invalid_type") {
344
351
  const received = firstError.received;
345
352
  const expected = firstError.expected;
346
353
  if (received === "undefined") {
347
354
  code = "MISSING_FIELD";
348
355
  message = `Missing required field${pathDisplay}: ${firstError.message}`;
349
- suggestions.push(`Field "${path20}" is required and must be of type "${expected}"`);
356
+ suggestions.push(`Field "${path22}" is required and must be of type "${expected}"`);
350
357
  } else {
351
358
  code = "INVALID_TYPE";
352
359
  message = `Invalid type${pathDisplay}: ${firstError.message}`;
@@ -500,6 +507,43 @@ function extractMarkdownLinks(content) {
500
507
  }
501
508
  return links;
502
509
  }
510
+ function isDescriptionTerminator(trimmed) {
511
+ return trimmed.startsWith("#") || trimmed.startsWith("-") || trimmed.startsWith("*") || trimmed.startsWith("```");
512
+ }
513
+ function extractDescription(sectionLines) {
514
+ const descriptionLines = [];
515
+ for (const line of sectionLines) {
516
+ const trimmed = line.trim();
517
+ if (trimmed === "") {
518
+ if (descriptionLines.length > 0) break;
519
+ continue;
520
+ }
521
+ if (isDescriptionTerminator(trimmed)) break;
522
+ descriptionLines.push(trimmed);
523
+ }
524
+ return descriptionLines.length > 0 ? descriptionLines.join(" ") : void 0;
525
+ }
526
+ function buildAgentMapSection(section, lines) {
527
+ const endIndex = section.endIndex ?? lines.length;
528
+ const sectionLines = lines.slice(section.startIndex + 1, endIndex);
529
+ const sectionContent = sectionLines.join("\n");
530
+ const links = extractMarkdownLinks(sectionContent).map((link) => ({
531
+ ...link,
532
+ line: link.line + section.startIndex + 1,
533
+ exists: false
534
+ }));
535
+ const result = {
536
+ title: section.title,
537
+ level: section.level,
538
+ line: section.line,
539
+ links
540
+ };
541
+ const description = extractDescription(sectionLines);
542
+ if (description) {
543
+ result.description = description;
544
+ }
545
+ return result;
546
+ }
503
547
  function extractSections(content) {
504
548
  const lines = content.split("\n");
505
549
  const sections = [];
@@ -512,7 +556,6 @@ function extractSections(content) {
512
556
  title: match[2].trim(),
513
557
  level: match[1].length,
514
558
  line: i + 1,
515
- // 1-indexed
516
559
  startIndex: i
517
560
  });
518
561
  }
@@ -524,62 +567,29 @@ function extractSections(content) {
524
567
  currentSection.endIndex = nextSection ? nextSection.startIndex : lines.length;
525
568
  }
526
569
  }
527
- return sections.map((section) => {
528
- const endIndex = section.endIndex ?? lines.length;
529
- const sectionLines = lines.slice(section.startIndex + 1, endIndex);
530
- const sectionContent = sectionLines.join("\n");
531
- const links = extractMarkdownLinks(sectionContent).map((link) => ({
532
- ...link,
533
- line: link.line + section.startIndex + 1,
534
- // Adjust line number
535
- exists: false
536
- // Will be set later by validateAgentsMap
537
- }));
538
- const descriptionLines = [];
539
- for (const line of sectionLines) {
540
- const trimmed = line.trim();
541
- if (trimmed === "") {
542
- if (descriptionLines.length > 0) break;
543
- continue;
544
- }
545
- if (trimmed.startsWith("#")) break;
546
- if (trimmed.startsWith("-") || trimmed.startsWith("*")) break;
547
- if (trimmed.startsWith("```")) break;
548
- descriptionLines.push(trimmed);
549
- }
550
- const result = {
551
- title: section.title,
552
- level: section.level,
553
- line: section.line,
554
- links
555
- };
556
- if (descriptionLines.length > 0) {
557
- result.description = descriptionLines.join(" ");
558
- }
559
- return result;
560
- });
570
+ return sections.map((section) => buildAgentMapSection(section, lines));
561
571
  }
562
- function isExternalLink(path20) {
563
- return path20.startsWith("http://") || path20.startsWith("https://") || path20.startsWith("#") || path20.startsWith("mailto:");
572
+ function isExternalLink(path22) {
573
+ return path22.startsWith("http://") || path22.startsWith("https://") || path22.startsWith("#") || path22.startsWith("mailto:");
564
574
  }
565
575
  function resolveLinkPath(linkPath, baseDir) {
566
576
  return linkPath.startsWith(".") ? (0, import_path.join)(baseDir, linkPath) : linkPath;
567
577
  }
568
- async function validateAgentsMap(path20 = "./AGENTS.md") {
569
- const contentResult = await readFileContent(path20);
578
+ async function validateAgentsMap(path22 = "./AGENTS.md") {
579
+ const contentResult = await readFileContent(path22);
570
580
  if (!contentResult.ok) {
571
581
  return (0, import_types.Err)(
572
582
  createError(
573
583
  "PARSE_ERROR",
574
584
  `Failed to read AGENTS.md: ${contentResult.error.message}`,
575
- { path: path20 },
585
+ { path: path22 },
576
586
  ["Ensure the file exists", "Check file permissions"]
577
587
  )
578
588
  );
579
589
  }
580
590
  const content = contentResult.value;
581
591
  const sections = extractSections(content);
582
- const baseDir = (0, import_path.dirname)(path20);
592
+ const baseDir = (0, import_path.dirname)(path22);
583
593
  const sectionTitles = sections.map((s) => s.title);
584
594
  const missingSections = REQUIRED_SECTIONS.filter(
585
595
  (required) => !sectionTitles.some((title) => title.toLowerCase().includes(required.toLowerCase()))
@@ -720,8 +730,8 @@ async function checkDocCoverage(domain, options = {}) {
720
730
 
721
731
  // src/context/knowledge-map.ts
722
732
  var import_path3 = require("path");
723
- function suggestFix(path20, existingFiles) {
724
- const targetName = (0, import_path3.basename)(path20).toLowerCase();
733
+ function suggestFix(path22, existingFiles) {
734
+ const targetName = (0, import_path3.basename)(path22).toLowerCase();
725
735
  const similar = existingFiles.find((file) => {
726
736
  const fileName = (0, import_path3.basename)(file).toLowerCase();
727
737
  return fileName.includes(targetName) || targetName.includes(fileName);
@@ -729,7 +739,7 @@ function suggestFix(path20, existingFiles) {
729
739
  if (similar) {
730
740
  return `Did you mean "${similar}"?`;
731
741
  }
732
- return `Create the file "${path20}" or remove the link`;
742
+ return `Create the file "${path22}" or remove the link`;
733
743
  }
734
744
  async function validateKnowledgeMap(rootDir = process.cwd()) {
735
745
  const agentsPath = (0, import_path3.join)(rootDir, "AGENTS.md");
@@ -1228,65 +1238,71 @@ async function validateDependencies(config) {
1228
1238
  }
1229
1239
 
1230
1240
  // src/constraints/circular-deps.ts
1231
- function tarjanSCC(graph) {
1232
- const nodeMap = /* @__PURE__ */ new Map();
1233
- const stack = [];
1234
- const sccs = [];
1235
- let index = 0;
1241
+ function buildAdjacencyList(graph) {
1236
1242
  const adjacency = /* @__PURE__ */ new Map();
1243
+ const nodeSet = new Set(graph.nodes);
1237
1244
  for (const node of graph.nodes) {
1238
1245
  adjacency.set(node, []);
1239
1246
  }
1240
1247
  for (const edge of graph.edges) {
1241
1248
  const neighbors = adjacency.get(edge.from);
1242
- if (neighbors && graph.nodes.includes(edge.to)) {
1249
+ if (neighbors && nodeSet.has(edge.to)) {
1243
1250
  neighbors.push(edge.to);
1244
1251
  }
1245
1252
  }
1246
- function strongConnect(node) {
1247
- nodeMap.set(node, {
1248
- index,
1249
- lowlink: index,
1250
- onStack: true
1251
- });
1252
- index++;
1253
- stack.push(node);
1254
- const neighbors = adjacency.get(node) ?? [];
1255
- for (const neighbor of neighbors) {
1256
- const neighborData = nodeMap.get(neighbor);
1257
- if (!neighborData) {
1258
- strongConnect(neighbor);
1259
- const nodeData2 = nodeMap.get(node);
1260
- const updatedNeighborData = nodeMap.get(neighbor);
1261
- nodeData2.lowlink = Math.min(nodeData2.lowlink, updatedNeighborData.lowlink);
1262
- } else if (neighborData.onStack) {
1263
- const nodeData2 = nodeMap.get(node);
1264
- nodeData2.lowlink = Math.min(nodeData2.lowlink, neighborData.index);
1265
- }
1266
- }
1267
- const nodeData = nodeMap.get(node);
1268
- if (nodeData.lowlink === nodeData.index) {
1269
- const scc = [];
1270
- let w;
1271
- do {
1272
- w = stack.pop();
1273
- nodeMap.get(w).onStack = false;
1274
- scc.push(w);
1275
- } while (w !== node);
1276
- if (scc.length > 1) {
1277
- sccs.push(scc);
1278
- } else if (scc.length === 1) {
1279
- const selfNode = scc[0];
1280
- const selfNeighbors = adjacency.get(selfNode) ?? [];
1281
- if (selfNeighbors.includes(selfNode)) {
1282
- sccs.push(scc);
1283
- }
1284
- }
1253
+ return adjacency;
1254
+ }
1255
+ function isCyclicSCC(scc, adjacency) {
1256
+ if (scc.length > 1) return true;
1257
+ if (scc.length === 1) {
1258
+ const selfNode = scc[0];
1259
+ const selfNeighbors = adjacency.get(selfNode) ?? [];
1260
+ return selfNeighbors.includes(selfNode);
1261
+ }
1262
+ return false;
1263
+ }
1264
+ function processNeighbors(node, neighbors, nodeMap, stack, adjacency, sccs, indexRef) {
1265
+ for (const neighbor of neighbors) {
1266
+ const neighborData = nodeMap.get(neighbor);
1267
+ if (!neighborData) {
1268
+ strongConnectImpl(neighbor, nodeMap, stack, adjacency, sccs, indexRef);
1269
+ const nodeData = nodeMap.get(node);
1270
+ const updatedNeighborData = nodeMap.get(neighbor);
1271
+ nodeData.lowlink = Math.min(nodeData.lowlink, updatedNeighborData.lowlink);
1272
+ } else if (neighborData.onStack) {
1273
+ const nodeData = nodeMap.get(node);
1274
+ nodeData.lowlink = Math.min(nodeData.lowlink, neighborData.index);
1275
+ }
1276
+ }
1277
+ }
1278
+ function strongConnectImpl(node, nodeMap, stack, adjacency, sccs, indexRef) {
1279
+ nodeMap.set(node, { index: indexRef.value, lowlink: indexRef.value, onStack: true });
1280
+ indexRef.value++;
1281
+ stack.push(node);
1282
+ processNeighbors(node, adjacency.get(node) ?? [], nodeMap, stack, adjacency, sccs, indexRef);
1283
+ const nodeData = nodeMap.get(node);
1284
+ if (nodeData.lowlink === nodeData.index) {
1285
+ const scc = [];
1286
+ let w;
1287
+ do {
1288
+ w = stack.pop();
1289
+ nodeMap.get(w).onStack = false;
1290
+ scc.push(w);
1291
+ } while (w !== node);
1292
+ if (isCyclicSCC(scc, adjacency)) {
1293
+ sccs.push(scc);
1285
1294
  }
1286
1295
  }
1296
+ }
1297
+ function tarjanSCC(graph) {
1298
+ const nodeMap = /* @__PURE__ */ new Map();
1299
+ const stack = [];
1300
+ const sccs = [];
1301
+ const indexRef = { value: 0 };
1302
+ const adjacency = buildAdjacencyList(graph);
1287
1303
  for (const node of graph.nodes) {
1288
1304
  if (!nodeMap.has(node)) {
1289
- strongConnect(node);
1305
+ strongConnectImpl(node, nodeMap, stack, adjacency, sccs, indexRef);
1290
1306
  }
1291
1307
  }
1292
1308
  return sccs;
@@ -1329,8 +1345,8 @@ function createBoundaryValidator(schema, name) {
1329
1345
  return (0, import_types.Ok)(result.data);
1330
1346
  }
1331
1347
  const suggestions = result.error.issues.map((issue) => {
1332
- const path20 = issue.path.join(".");
1333
- return path20 ? `${path20}: ${issue.message}` : issue.message;
1348
+ const path22 = issue.path.join(".");
1349
+ return path22 ? `${path22}: ${issue.message}` : issue.message;
1334
1350
  });
1335
1351
  return (0, import_types.Err)(
1336
1352
  createError(
@@ -1552,175 +1568,183 @@ function stringArraysEqual(a, b) {
1552
1568
  const sortedB = [...b].sort();
1553
1569
  return sortedA.every((val, i) => val === sortedB[i]);
1554
1570
  }
1555
- function deepMergeConstraints(localConfig, bundleConstraints, _existingContributions) {
1556
- const config = { ...localConfig };
1557
- const contributions = {};
1558
- const conflicts = [];
1559
- if (bundleConstraints.layers && bundleConstraints.layers.length > 0) {
1560
- const localLayers = Array.isArray(localConfig.layers) ? localConfig.layers : [];
1561
- const mergedLayers = [...localLayers];
1562
- const contributedLayerNames = [];
1563
- for (const bundleLayer of bundleConstraints.layers) {
1564
- const existing = localLayers.find((l) => l.name === bundleLayer.name);
1565
- if (!existing) {
1566
- mergedLayers.push(bundleLayer);
1567
- contributedLayerNames.push(bundleLayer.name);
1568
- } else {
1569
- const same = existing.pattern === bundleLayer.pattern && stringArraysEqual(existing.allowedDependencies, bundleLayer.allowedDependencies);
1570
- if (!same) {
1571
- conflicts.push({
1572
- section: "layers",
1573
- key: bundleLayer.name,
1574
- localValue: existing,
1575
- packageValue: bundleLayer,
1576
- description: `Layer '${bundleLayer.name}' already exists locally with different configuration`
1577
- });
1578
- }
1571
+ function mergeLayers(localConfig, bundleLayers, config, contributions, conflicts) {
1572
+ const localLayers = Array.isArray(localConfig.layers) ? localConfig.layers : [];
1573
+ const mergedLayers = [...localLayers];
1574
+ const contributedLayerNames = [];
1575
+ for (const bundleLayer of bundleLayers) {
1576
+ const existing = localLayers.find((l) => l.name === bundleLayer.name);
1577
+ if (!existing) {
1578
+ mergedLayers.push(bundleLayer);
1579
+ contributedLayerNames.push(bundleLayer.name);
1580
+ } else {
1581
+ const same = existing.pattern === bundleLayer.pattern && stringArraysEqual(existing.allowedDependencies, bundleLayer.allowedDependencies);
1582
+ if (!same) {
1583
+ conflicts.push({
1584
+ section: "layers",
1585
+ key: bundleLayer.name,
1586
+ localValue: existing,
1587
+ packageValue: bundleLayer,
1588
+ description: `Layer '${bundleLayer.name}' already exists locally with different configuration`
1589
+ });
1590
+ }
1591
+ }
1592
+ }
1593
+ config.layers = mergedLayers;
1594
+ if (contributedLayerNames.length > 0) contributions.layers = contributedLayerNames;
1595
+ }
1596
+ function mergeForbiddenImports(localConfig, bundleRules, config, contributions, conflicts) {
1597
+ const localFI = Array.isArray(localConfig.forbiddenImports) ? localConfig.forbiddenImports : [];
1598
+ const mergedFI = [...localFI];
1599
+ const contributedFromKeys = [];
1600
+ for (const bundleRule of bundleRules) {
1601
+ const existing = localFI.find((r) => r.from === bundleRule.from);
1602
+ if (!existing) {
1603
+ const entry = { from: bundleRule.from, disallow: bundleRule.disallow };
1604
+ if (bundleRule.message !== void 0) entry.message = bundleRule.message;
1605
+ mergedFI.push(entry);
1606
+ contributedFromKeys.push(bundleRule.from);
1607
+ } else {
1608
+ if (!stringArraysEqual(existing.disallow, bundleRule.disallow)) {
1609
+ conflicts.push({
1610
+ section: "forbiddenImports",
1611
+ key: bundleRule.from,
1612
+ localValue: existing,
1613
+ packageValue: bundleRule,
1614
+ description: `Forbidden import rule for '${bundleRule.from}' already exists locally with different disallow list`
1615
+ });
1579
1616
  }
1580
1617
  }
1581
- config.layers = mergedLayers;
1582
- if (contributedLayerNames.length > 0) {
1583
- contributions.layers = contributedLayerNames;
1618
+ }
1619
+ config.forbiddenImports = mergedFI;
1620
+ if (contributedFromKeys.length > 0) contributions.forbiddenImports = contributedFromKeys;
1621
+ }
1622
+ function mergeBoundaries(localConfig, bundleBoundaries, config, contributions) {
1623
+ const localBoundaries = localConfig.boundaries ?? { requireSchema: [] };
1624
+ const localSchemas = new Set(localBoundaries.requireSchema ?? []);
1625
+ const newSchemas = [];
1626
+ for (const schema of bundleBoundaries.requireSchema ?? []) {
1627
+ if (!localSchemas.has(schema)) {
1628
+ newSchemas.push(schema);
1629
+ localSchemas.add(schema);
1630
+ }
1631
+ }
1632
+ config.boundaries = { requireSchema: [...localBoundaries.requireSchema ?? [], ...newSchemas] };
1633
+ if (newSchemas.length > 0) contributions.boundaries = newSchemas;
1634
+ }
1635
+ function mergeArchitecture(localConfig, bundleArch, config, contributions, conflicts) {
1636
+ const localArch = localConfig.architecture ?? { thresholds: {}, modules: {} };
1637
+ const mergedThresholds = { ...localArch.thresholds };
1638
+ const contributedThresholdKeys = [];
1639
+ for (const [category, value] of Object.entries(bundleArch.thresholds ?? {})) {
1640
+ if (!(category in mergedThresholds)) {
1641
+ mergedThresholds[category] = value;
1642
+ contributedThresholdKeys.push(category);
1643
+ } else if (!deepEqual(mergedThresholds[category], value)) {
1644
+ conflicts.push({
1645
+ section: "architecture.thresholds",
1646
+ key: category,
1647
+ localValue: mergedThresholds[category],
1648
+ packageValue: value,
1649
+ description: `Architecture threshold '${category}' already exists locally with a different value`
1650
+ });
1584
1651
  }
1585
1652
  }
1586
- if (bundleConstraints.forbiddenImports && bundleConstraints.forbiddenImports.length > 0) {
1587
- const localFI = Array.isArray(localConfig.forbiddenImports) ? localConfig.forbiddenImports : [];
1588
- const mergedFI = [...localFI];
1589
- const contributedFromKeys = [];
1590
- for (const bundleRule of bundleConstraints.forbiddenImports) {
1591
- const existing = localFI.find((r) => r.from === bundleRule.from);
1592
- if (!existing) {
1593
- const entry = {
1594
- from: bundleRule.from,
1595
- disallow: bundleRule.disallow
1596
- };
1597
- if (bundleRule.message !== void 0) {
1598
- entry.message = bundleRule.message;
1599
- }
1600
- mergedFI.push(entry);
1601
- contributedFromKeys.push(bundleRule.from);
1602
- } else {
1603
- const same = stringArraysEqual(existing.disallow, bundleRule.disallow);
1604
- if (!same) {
1653
+ const mergedModules = { ...localArch.modules };
1654
+ const contributedModuleKeys = [];
1655
+ for (const [modulePath, bundleCategoryMap] of Object.entries(bundleArch.modules ?? {})) {
1656
+ if (!(modulePath in mergedModules)) {
1657
+ mergedModules[modulePath] = bundleCategoryMap;
1658
+ for (const cat of Object.keys(bundleCategoryMap))
1659
+ contributedModuleKeys.push(`${modulePath}:${cat}`);
1660
+ } else {
1661
+ const mergedCategoryMap = { ...mergedModules[modulePath] };
1662
+ for (const [category, value] of Object.entries(bundleCategoryMap)) {
1663
+ if (!(category in mergedCategoryMap)) {
1664
+ mergedCategoryMap[category] = value;
1665
+ contributedModuleKeys.push(`${modulePath}:${category}`);
1666
+ } else if (!deepEqual(mergedCategoryMap[category], value)) {
1605
1667
  conflicts.push({
1606
- section: "forbiddenImports",
1607
- key: bundleRule.from,
1608
- localValue: existing,
1609
- packageValue: bundleRule,
1610
- description: `Forbidden import rule for '${bundleRule.from}' already exists locally with different disallow list`
1668
+ section: "architecture.modules",
1669
+ key: `${modulePath}:${category}`,
1670
+ localValue: mergedCategoryMap[category],
1671
+ packageValue: value,
1672
+ description: `Architecture module override '${modulePath}' category '${category}' already exists locally with a different value`
1611
1673
  });
1612
1674
  }
1613
1675
  }
1676
+ mergedModules[modulePath] = mergedCategoryMap;
1677
+ }
1678
+ }
1679
+ config.architecture = { ...localArch, thresholds: mergedThresholds, modules: mergedModules };
1680
+ if (contributedThresholdKeys.length > 0)
1681
+ contributions["architecture.thresholds"] = contributedThresholdKeys;
1682
+ if (contributedModuleKeys.length > 0)
1683
+ contributions["architecture.modules"] = contributedModuleKeys;
1684
+ }
1685
+ function mergeSecurityRules(localConfig, bundleRules, config, contributions, conflicts) {
1686
+ const localSecurity = localConfig.security ?? { rules: {} };
1687
+ const localRules = localSecurity.rules ?? {};
1688
+ const mergedRules = { ...localRules };
1689
+ const contributedRuleIds = [];
1690
+ for (const [ruleId, severity] of Object.entries(bundleRules)) {
1691
+ if (!(ruleId in mergedRules)) {
1692
+ mergedRules[ruleId] = severity;
1693
+ contributedRuleIds.push(ruleId);
1694
+ } else if (mergedRules[ruleId] !== severity) {
1695
+ conflicts.push({
1696
+ section: "security.rules",
1697
+ key: ruleId,
1698
+ localValue: mergedRules[ruleId],
1699
+ packageValue: severity,
1700
+ description: `Security rule '${ruleId}' already exists locally with severity '${mergedRules[ruleId]}', bundle has '${severity}'`
1701
+ });
1614
1702
  }
1615
- config.forbiddenImports = mergedFI;
1616
- if (contributedFromKeys.length > 0) {
1617
- contributions.forbiddenImports = contributedFromKeys;
1618
- }
1703
+ }
1704
+ config.security = { ...localSecurity, rules: mergedRules };
1705
+ if (contributedRuleIds.length > 0) contributions["security.rules"] = contributedRuleIds;
1706
+ }
1707
+ function deepMergeConstraints(localConfig, bundleConstraints, _existingContributions) {
1708
+ const config = { ...localConfig };
1709
+ const contributions = {};
1710
+ const conflicts = [];
1711
+ if (bundleConstraints.layers && bundleConstraints.layers.length > 0) {
1712
+ mergeLayers(localConfig, bundleConstraints.layers, config, contributions, conflicts);
1713
+ }
1714
+ if (bundleConstraints.forbiddenImports && bundleConstraints.forbiddenImports.length > 0) {
1715
+ mergeForbiddenImports(
1716
+ localConfig,
1717
+ bundleConstraints.forbiddenImports,
1718
+ config,
1719
+ contributions,
1720
+ conflicts
1721
+ );
1619
1722
  }
1620
1723
  if (bundleConstraints.boundaries) {
1621
- const localBoundaries = localConfig.boundaries ?? { requireSchema: [] };
1622
- const localSchemas = new Set(localBoundaries.requireSchema ?? []);
1623
- const bundleSchemas = bundleConstraints.boundaries.requireSchema ?? [];
1624
- const newSchemas = [];
1625
- for (const schema of bundleSchemas) {
1626
- if (!localSchemas.has(schema)) {
1627
- newSchemas.push(schema);
1628
- localSchemas.add(schema);
1629
- }
1630
- }
1631
- config.boundaries = {
1632
- requireSchema: [...localBoundaries.requireSchema ?? [], ...newSchemas]
1633
- };
1634
- if (newSchemas.length > 0) {
1635
- contributions.boundaries = newSchemas;
1636
- }
1724
+ mergeBoundaries(
1725
+ localConfig,
1726
+ bundleConstraints.boundaries,
1727
+ config,
1728
+ contributions
1729
+ );
1637
1730
  }
1638
1731
  if (bundleConstraints.architecture) {
1639
- const localArch = localConfig.architecture ?? {
1640
- thresholds: {},
1641
- modules: {}
1642
- };
1643
- const mergedThresholds = { ...localArch.thresholds };
1644
- const contributedThresholdKeys = [];
1645
- const bundleThresholds = bundleConstraints.architecture.thresholds ?? {};
1646
- for (const [category, value] of Object.entries(bundleThresholds)) {
1647
- if (!(category in mergedThresholds)) {
1648
- mergedThresholds[category] = value;
1649
- contributedThresholdKeys.push(category);
1650
- } else if (!deepEqual(mergedThresholds[category], value)) {
1651
- conflicts.push({
1652
- section: "architecture.thresholds",
1653
- key: category,
1654
- localValue: mergedThresholds[category],
1655
- packageValue: value,
1656
- description: `Architecture threshold '${category}' already exists locally with a different value`
1657
- });
1658
- }
1659
- }
1660
- const mergedModules = { ...localArch.modules };
1661
- const contributedModuleKeys = [];
1662
- const bundleModules = bundleConstraints.architecture.modules ?? {};
1663
- for (const [modulePath, bundleCategoryMap] of Object.entries(bundleModules)) {
1664
- if (!(modulePath in mergedModules)) {
1665
- mergedModules[modulePath] = bundleCategoryMap;
1666
- for (const cat of Object.keys(bundleCategoryMap)) {
1667
- contributedModuleKeys.push(`${modulePath}:${cat}`);
1668
- }
1669
- } else {
1670
- const localCategoryMap = mergedModules[modulePath];
1671
- const mergedCategoryMap = { ...localCategoryMap };
1672
- for (const [category, value] of Object.entries(bundleCategoryMap)) {
1673
- if (!(category in mergedCategoryMap)) {
1674
- mergedCategoryMap[category] = value;
1675
- contributedModuleKeys.push(`${modulePath}:${category}`);
1676
- } else if (!deepEqual(mergedCategoryMap[category], value)) {
1677
- conflicts.push({
1678
- section: "architecture.modules",
1679
- key: `${modulePath}:${category}`,
1680
- localValue: mergedCategoryMap[category],
1681
- packageValue: value,
1682
- description: `Architecture module override '${modulePath}' category '${category}' already exists locally with a different value`
1683
- });
1684
- }
1685
- }
1686
- mergedModules[modulePath] = mergedCategoryMap;
1687
- }
1688
- }
1689
- config.architecture = {
1690
- ...localArch,
1691
- thresholds: mergedThresholds,
1692
- modules: mergedModules
1693
- };
1694
- if (contributedThresholdKeys.length > 0) {
1695
- contributions["architecture.thresholds"] = contributedThresholdKeys;
1696
- }
1697
- if (contributedModuleKeys.length > 0) {
1698
- contributions["architecture.modules"] = contributedModuleKeys;
1699
- }
1732
+ mergeArchitecture(
1733
+ localConfig,
1734
+ bundleConstraints.architecture,
1735
+ config,
1736
+ contributions,
1737
+ conflicts
1738
+ );
1700
1739
  }
1701
1740
  if (bundleConstraints.security?.rules) {
1702
- const localSecurity = localConfig.security ?? { rules: {} };
1703
- const localRules = localSecurity.rules ?? {};
1704
- const mergedRules = { ...localRules };
1705
- const contributedRuleIds = [];
1706
- for (const [ruleId, severity] of Object.entries(bundleConstraints.security.rules)) {
1707
- if (!(ruleId in mergedRules)) {
1708
- mergedRules[ruleId] = severity;
1709
- contributedRuleIds.push(ruleId);
1710
- } else if (mergedRules[ruleId] !== severity) {
1711
- conflicts.push({
1712
- section: "security.rules",
1713
- key: ruleId,
1714
- localValue: mergedRules[ruleId],
1715
- packageValue: severity,
1716
- description: `Security rule '${ruleId}' already exists locally with severity '${mergedRules[ruleId]}', bundle has '${severity}'`
1717
- });
1718
- }
1719
- }
1720
- config.security = { ...localSecurity, rules: mergedRules };
1721
- if (contributedRuleIds.length > 0) {
1722
- contributions["security.rules"] = contributedRuleIds;
1723
- }
1741
+ mergeSecurityRules(
1742
+ localConfig,
1743
+ bundleConstraints.security.rules,
1744
+ config,
1745
+ contributions,
1746
+ conflicts
1747
+ );
1724
1748
  }
1725
1749
  return { config, contributions, conflicts };
1726
1750
  }
@@ -1881,14 +1905,84 @@ function walk(node, visitor) {
1881
1905
  }
1882
1906
  }
1883
1907
  }
1908
+ function makeLocation(node) {
1909
+ return {
1910
+ file: "",
1911
+ line: node.loc?.start.line ?? 0,
1912
+ column: node.loc?.start.column ?? 0
1913
+ };
1914
+ }
1915
+ function processImportSpecifiers(importDecl, imp) {
1916
+ for (const spec of importDecl.specifiers) {
1917
+ if (spec.type === "ImportDefaultSpecifier") {
1918
+ imp.default = spec.local.name;
1919
+ } else if (spec.type === "ImportNamespaceSpecifier") {
1920
+ imp.namespace = spec.local.name;
1921
+ } else if (spec.type === "ImportSpecifier") {
1922
+ imp.specifiers.push(spec.local.name);
1923
+ if (spec.importKind === "type") {
1924
+ imp.kind = "type";
1925
+ }
1926
+ }
1927
+ }
1928
+ }
1929
+ function getExportedName(exported) {
1930
+ return exported.type === "Identifier" ? exported.name : String(exported.value);
1931
+ }
1932
+ function processReExportSpecifiers(exportDecl, exports2) {
1933
+ for (const spec of exportDecl.specifiers) {
1934
+ if (spec.type !== "ExportSpecifier") continue;
1935
+ exports2.push({
1936
+ name: getExportedName(spec.exported),
1937
+ type: "named",
1938
+ location: makeLocation(exportDecl),
1939
+ isReExport: true,
1940
+ source: exportDecl.source.value
1941
+ });
1942
+ }
1943
+ }
1944
+ function processExportDeclaration(exportDecl, exports2) {
1945
+ const decl = exportDecl.declaration;
1946
+ if (!decl) return;
1947
+ if (decl.type === "VariableDeclaration") {
1948
+ for (const declarator of decl.declarations) {
1949
+ if (declarator.id.type === "Identifier") {
1950
+ exports2.push({
1951
+ name: declarator.id.name,
1952
+ type: "named",
1953
+ location: makeLocation(decl),
1954
+ isReExport: false
1955
+ });
1956
+ }
1957
+ }
1958
+ } else if ((decl.type === "FunctionDeclaration" || decl.type === "ClassDeclaration") && decl.id) {
1959
+ exports2.push({
1960
+ name: decl.id.name,
1961
+ type: "named",
1962
+ location: makeLocation(decl),
1963
+ isReExport: false
1964
+ });
1965
+ }
1966
+ }
1967
+ function processExportListSpecifiers(exportDecl, exports2) {
1968
+ for (const spec of exportDecl.specifiers) {
1969
+ if (spec.type !== "ExportSpecifier") continue;
1970
+ exports2.push({
1971
+ name: getExportedName(spec.exported),
1972
+ type: "named",
1973
+ location: makeLocation(exportDecl),
1974
+ isReExport: false
1975
+ });
1976
+ }
1977
+ }
1884
1978
  var TypeScriptParser = class {
1885
1979
  name = "typescript";
1886
1980
  extensions = [".ts", ".tsx", ".mts", ".cts"];
1887
- async parseFile(path20) {
1888
- const contentResult = await readFileContent(path20);
1981
+ async parseFile(path22) {
1982
+ const contentResult = await readFileContent(path22);
1889
1983
  if (!contentResult.ok) {
1890
1984
  return (0, import_types.Err)(
1891
- createParseError("NOT_FOUND", `File not found: ${path20}`, { path: path20 }, [
1985
+ createParseError("NOT_FOUND", `File not found: ${path22}`, { path: path22 }, [
1892
1986
  "Check that the file exists",
1893
1987
  "Verify the path is correct"
1894
1988
  ])
@@ -1898,7 +1992,7 @@ var TypeScriptParser = class {
1898
1992
  const ast = (0, import_typescript_estree.parse)(contentResult.value, {
1899
1993
  loc: true,
1900
1994
  range: true,
1901
- jsx: path20.endsWith(".tsx"),
1995
+ jsx: path22.endsWith(".tsx"),
1902
1996
  errorOnUnknownASTType: false
1903
1997
  });
1904
1998
  return (0, import_types.Ok)({
@@ -1909,7 +2003,7 @@ var TypeScriptParser = class {
1909
2003
  } catch (e) {
1910
2004
  const error = e;
1911
2005
  return (0, import_types.Err)(
1912
- createParseError("SYNTAX_ERROR", `Failed to parse ${path20}: ${error.message}`, { path: path20 }, [
2006
+ createParseError("SYNTAX_ERROR", `Failed to parse ${path22}: ${error.message}`, { path: path22 }, [
1913
2007
  "Check for syntax errors in the file",
1914
2008
  "Ensure valid TypeScript syntax"
1915
2009
  ])
@@ -1925,26 +2019,12 @@ var TypeScriptParser = class {
1925
2019
  const imp = {
1926
2020
  source: importDecl.source.value,
1927
2021
  specifiers: [],
1928
- location: {
1929
- file: "",
1930
- line: importDecl.loc?.start.line ?? 0,
1931
- column: importDecl.loc?.start.column ?? 0
1932
- },
2022
+ location: makeLocation(importDecl),
1933
2023
  kind: importDecl.importKind === "type" ? "type" : "value"
1934
2024
  };
1935
- for (const spec of importDecl.specifiers) {
1936
- if (spec.type === "ImportDefaultSpecifier") {
1937
- imp.default = spec.local.name;
1938
- } else if (spec.type === "ImportNamespaceSpecifier") {
1939
- imp.namespace = spec.local.name;
1940
- } else if (spec.type === "ImportSpecifier") {
1941
- imp.specifiers.push(spec.local.name);
1942
- if (spec.importKind === "type") {
1943
- imp.kind = "type";
1944
- }
1945
- }
1946
- }
2025
+ processImportSpecifiers(importDecl, imp);
1947
2026
  imports.push(imp);
2027
+ return;
1948
2028
  }
1949
2029
  if (node.type === "ImportExpression") {
1950
2030
  const importExpr = node;
@@ -1952,11 +2032,7 @@ var TypeScriptParser = class {
1952
2032
  imports.push({
1953
2033
  source: importExpr.source.value,
1954
2034
  specifiers: [],
1955
- location: {
1956
- file: "",
1957
- line: importExpr.loc?.start.line ?? 0,
1958
- column: importExpr.loc?.start.column ?? 0
1959
- },
2035
+ location: makeLocation(importExpr),
1960
2036
  kind: "value"
1961
2037
  });
1962
2038
  }
@@ -1971,97 +2047,29 @@ var TypeScriptParser = class {
1971
2047
  if (node.type === "ExportNamedDeclaration") {
1972
2048
  const exportDecl = node;
1973
2049
  if (exportDecl.source) {
1974
- for (const spec of exportDecl.specifiers) {
1975
- if (spec.type === "ExportSpecifier") {
1976
- const exported = spec.exported;
1977
- const name = exported.type === "Identifier" ? exported.name : String(exported.value);
1978
- exports2.push({
1979
- name,
1980
- type: "named",
1981
- location: {
1982
- file: "",
1983
- line: exportDecl.loc?.start.line ?? 0,
1984
- column: exportDecl.loc?.start.column ?? 0
1985
- },
1986
- isReExport: true,
1987
- source: exportDecl.source.value
1988
- });
1989
- }
1990
- }
2050
+ processReExportSpecifiers(exportDecl, exports2);
1991
2051
  return;
1992
2052
  }
1993
- if (exportDecl.declaration) {
1994
- const decl = exportDecl.declaration;
1995
- if (decl.type === "VariableDeclaration") {
1996
- for (const declarator of decl.declarations) {
1997
- if (declarator.id.type === "Identifier") {
1998
- exports2.push({
1999
- name: declarator.id.name,
2000
- type: "named",
2001
- location: {
2002
- file: "",
2003
- line: decl.loc?.start.line ?? 0,
2004
- column: decl.loc?.start.column ?? 0
2005
- },
2006
- isReExport: false
2007
- });
2008
- }
2009
- }
2010
- } else if (decl.type === "FunctionDeclaration" || decl.type === "ClassDeclaration") {
2011
- if (decl.id) {
2012
- exports2.push({
2013
- name: decl.id.name,
2014
- type: "named",
2015
- location: {
2016
- file: "",
2017
- line: decl.loc?.start.line ?? 0,
2018
- column: decl.loc?.start.column ?? 0
2019
- },
2020
- isReExport: false
2021
- });
2022
- }
2023
- }
2024
- }
2025
- for (const spec of exportDecl.specifiers) {
2026
- if (spec.type === "ExportSpecifier") {
2027
- const exported = spec.exported;
2028
- const name = exported.type === "Identifier" ? exported.name : String(exported.value);
2029
- exports2.push({
2030
- name,
2031
- type: "named",
2032
- location: {
2033
- file: "",
2034
- line: exportDecl.loc?.start.line ?? 0,
2035
- column: exportDecl.loc?.start.column ?? 0
2036
- },
2037
- isReExport: false
2038
- });
2039
- }
2040
- }
2053
+ processExportDeclaration(exportDecl, exports2);
2054
+ processExportListSpecifiers(exportDecl, exports2);
2055
+ return;
2041
2056
  }
2042
2057
  if (node.type === "ExportDefaultDeclaration") {
2043
2058
  const exportDecl = node;
2044
2059
  exports2.push({
2045
2060
  name: "default",
2046
2061
  type: "default",
2047
- location: {
2048
- file: "",
2049
- line: exportDecl.loc?.start.line ?? 0,
2050
- column: exportDecl.loc?.start.column ?? 0
2051
- },
2062
+ location: makeLocation(exportDecl),
2052
2063
  isReExport: false
2053
2064
  });
2065
+ return;
2054
2066
  }
2055
2067
  if (node.type === "ExportAllDeclaration") {
2056
2068
  const exportDecl = node;
2057
2069
  exports2.push({
2058
2070
  name: exportDecl.exported?.name ?? "*",
2059
2071
  type: "namespace",
2060
- location: {
2061
- file: "",
2062
- line: exportDecl.loc?.start.line ?? 0,
2063
- column: exportDecl.loc?.start.column ?? 0
2064
- },
2072
+ location: makeLocation(exportDecl),
2065
2073
  isReExport: true,
2066
2074
  source: exportDecl.source.value
2067
2075
  });
@@ -2077,10 +2085,27 @@ var TypeScriptParser = class {
2077
2085
  // src/entropy/snapshot.ts
2078
2086
  var import_path6 = require("path");
2079
2087
  var import_minimatch3 = require("minimatch");
2088
+ function collectFieldEntries(rootDir, field) {
2089
+ if (typeof field === "string") return [(0, import_path6.resolve)(rootDir, field)];
2090
+ if (typeof field === "object" && field !== null) {
2091
+ return Object.values(field).filter((v) => typeof v === "string").map((v) => (0, import_path6.resolve)(rootDir, v));
2092
+ }
2093
+ return [];
2094
+ }
2095
+ function extractPackageEntries(rootDir, pkg) {
2096
+ const entries = [];
2097
+ entries.push(...collectFieldEntries(rootDir, pkg["exports"]));
2098
+ if (entries.length === 0 && typeof pkg["main"] === "string") {
2099
+ entries.push((0, import_path6.resolve)(rootDir, pkg["main"]));
2100
+ }
2101
+ if (pkg["bin"]) {
2102
+ entries.push(...collectFieldEntries(rootDir, pkg["bin"]));
2103
+ }
2104
+ return entries;
2105
+ }
2080
2106
  async function resolveEntryPoints(rootDir, explicitEntries) {
2081
2107
  if (explicitEntries && explicitEntries.length > 0) {
2082
- const resolved = explicitEntries.map((e) => (0, import_path6.resolve)(rootDir, e));
2083
- return (0, import_types.Ok)(resolved);
2108
+ return (0, import_types.Ok)(explicitEntries.map((e) => (0, import_path6.resolve)(rootDir, e)));
2084
2109
  }
2085
2110
  const pkgPath = (0, import_path6.join)(rootDir, "package.json");
2086
2111
  if (await fileExists(pkgPath)) {
@@ -2088,38 +2113,8 @@ async function resolveEntryPoints(rootDir, explicitEntries) {
2088
2113
  if (pkgContent.ok) {
2089
2114
  try {
2090
2115
  const pkg = JSON.parse(pkgContent.value);
2091
- const entries = [];
2092
- if (pkg["exports"]) {
2093
- const exports2 = pkg["exports"];
2094
- if (typeof exports2 === "string") {
2095
- entries.push((0, import_path6.resolve)(rootDir, exports2));
2096
- } else if (typeof exports2 === "object" && exports2 !== null) {
2097
- for (const value of Object.values(exports2)) {
2098
- if (typeof value === "string") {
2099
- entries.push((0, import_path6.resolve)(rootDir, value));
2100
- }
2101
- }
2102
- }
2103
- }
2104
- const main = pkg["main"];
2105
- if (typeof main === "string" && entries.length === 0) {
2106
- entries.push((0, import_path6.resolve)(rootDir, main));
2107
- }
2108
- const bin = pkg["bin"];
2109
- if (bin) {
2110
- if (typeof bin === "string") {
2111
- entries.push((0, import_path6.resolve)(rootDir, bin));
2112
- } else if (typeof bin === "object") {
2113
- for (const value of Object.values(bin)) {
2114
- if (typeof value === "string") {
2115
- entries.push((0, import_path6.resolve)(rootDir, value));
2116
- }
2117
- }
2118
- }
2119
- }
2120
- if (entries.length > 0) {
2121
- return (0, import_types.Ok)(entries);
2122
- }
2116
+ const entries = extractPackageEntries(rootDir, pkg);
2117
+ if (entries.length > 0) return (0, import_types.Ok)(entries);
2123
2118
  } catch {
2124
2119
  }
2125
2120
  }
@@ -2193,66 +2188,49 @@ function extractInlineRefs(content) {
2193
2188
  }
2194
2189
  return refs;
2195
2190
  }
2196
- async function parseDocumentationFile(path20) {
2197
- const contentResult = await readFileContent(path20);
2191
+ async function parseDocumentationFile(path22) {
2192
+ const contentResult = await readFileContent(path22);
2198
2193
  if (!contentResult.ok) {
2199
2194
  return (0, import_types.Err)(
2200
2195
  createEntropyError(
2201
2196
  "PARSE_ERROR",
2202
- `Failed to read documentation file: ${path20}`,
2203
- { file: path20 },
2197
+ `Failed to read documentation file: ${path22}`,
2198
+ { file: path22 },
2204
2199
  ["Check that the file exists"]
2205
2200
  )
2206
2201
  );
2207
2202
  }
2208
2203
  const content = contentResult.value;
2209
- const type = path20.endsWith(".md") ? "markdown" : "text";
2204
+ const type = path22.endsWith(".md") ? "markdown" : "text";
2210
2205
  return (0, import_types.Ok)({
2211
- path: path20,
2206
+ path: path22,
2212
2207
  type,
2213
2208
  content,
2214
2209
  codeBlocks: extractCodeBlocks(content),
2215
2210
  inlineRefs: extractInlineRefs(content)
2216
2211
  });
2217
2212
  }
2213
+ function makeInternalSymbol(name, type, line) {
2214
+ return { name, type, line, references: 0, calledBy: [] };
2215
+ }
2216
+ function extractSymbolsFromNode(node) {
2217
+ const line = node.loc?.start?.line || 0;
2218
+ if (node.type === "FunctionDeclaration" && node.id?.name) {
2219
+ return [makeInternalSymbol(node.id.name, "function", line)];
2220
+ }
2221
+ if (node.type === "VariableDeclaration") {
2222
+ return (node.declarations || []).filter((decl) => decl.id?.name).map((decl) => makeInternalSymbol(decl.id.name, "variable", line));
2223
+ }
2224
+ if (node.type === "ClassDeclaration" && node.id?.name) {
2225
+ return [makeInternalSymbol(node.id.name, "class", line)];
2226
+ }
2227
+ return [];
2228
+ }
2218
2229
  function extractInternalSymbols(ast) {
2219
- const symbols = [];
2220
2230
  const body = ast.body;
2221
- if (!body?.body) return symbols;
2222
- for (const node of body.body) {
2223
- if (node.type === "FunctionDeclaration" && node.id?.name) {
2224
- symbols.push({
2225
- name: node.id.name,
2226
- type: "function",
2227
- line: node.loc?.start?.line || 0,
2228
- references: 0,
2229
- calledBy: []
2230
- });
2231
- }
2232
- if (node.type === "VariableDeclaration") {
2233
- for (const decl of node.declarations || []) {
2234
- if (decl.id?.name) {
2235
- symbols.push({
2236
- name: decl.id.name,
2237
- type: "variable",
2238
- line: node.loc?.start?.line || 0,
2239
- references: 0,
2240
- calledBy: []
2241
- });
2242
- }
2243
- }
2244
- }
2245
- if (node.type === "ClassDeclaration" && node.id?.name) {
2246
- symbols.push({
2247
- name: node.id.name,
2248
- type: "class",
2249
- line: node.loc?.start?.line || 0,
2250
- references: 0,
2251
- calledBy: []
2252
- });
2253
- }
2254
- }
2255
- return symbols;
2231
+ if (!body?.body) return [];
2232
+ const nodes = body.body;
2233
+ return nodes.flatMap(extractSymbolsFromNode);
2256
2234
  }
2257
2235
  function extractJSDocComments(ast) {
2258
2236
  const comments = [];
@@ -2393,27 +2371,34 @@ async function buildSnapshot(config) {
2393
2371
 
2394
2372
  // src/entropy/detectors/drift.ts
2395
2373
  var import_path7 = require("path");
2396
- function levenshteinDistance(a, b) {
2374
+ function initLevenshteinMatrix(aLen, bLen) {
2397
2375
  const matrix = [];
2398
- for (let i = 0; i <= b.length; i++) {
2376
+ for (let i = 0; i <= bLen; i++) {
2399
2377
  matrix[i] = [i];
2400
2378
  }
2401
- for (let j = 0; j <= a.length; j++) {
2402
- const row = matrix[0];
2403
- if (row) {
2404
- row[j] = j;
2379
+ const firstRow = matrix[0];
2380
+ if (firstRow) {
2381
+ for (let j = 0; j <= aLen; j++) {
2382
+ firstRow[j] = j;
2405
2383
  }
2406
2384
  }
2385
+ return matrix;
2386
+ }
2387
+ function computeLevenshteinCell(row, prevRow, j, charsMatch) {
2388
+ if (charsMatch) {
2389
+ row[j] = prevRow[j - 1] ?? 0;
2390
+ } else {
2391
+ row[j] = Math.min((prevRow[j - 1] ?? 0) + 1, (row[j - 1] ?? 0) + 1, (prevRow[j] ?? 0) + 1);
2392
+ }
2393
+ }
2394
+ function levenshteinDistance(a, b) {
2395
+ const matrix = initLevenshteinMatrix(a.length, b.length);
2407
2396
  for (let i = 1; i <= b.length; i++) {
2408
2397
  for (let j = 1; j <= a.length; j++) {
2409
2398
  const row = matrix[i];
2410
2399
  const prevRow = matrix[i - 1];
2411
2400
  if (!row || !prevRow) continue;
2412
- if (b.charAt(i - 1) === a.charAt(j - 1)) {
2413
- row[j] = prevRow[j - 1] ?? 0;
2414
- } else {
2415
- row[j] = Math.min((prevRow[j - 1] ?? 0) + 1, (row[j - 1] ?? 0) + 1, (prevRow[j] ?? 0) + 1);
2416
- }
2401
+ computeLevenshteinCell(row, prevRow, j, b.charAt(i - 1) === a.charAt(j - 1));
2417
2402
  }
2418
2403
  }
2419
2404
  const lastRow = matrix[b.length];
@@ -2699,32 +2684,27 @@ function findDeadExports(snapshot, usageMap, reachability) {
2699
2684
  }
2700
2685
  return deadExports;
2701
2686
  }
2702
- function countLinesFromAST(ast) {
2703
- if (ast.body && Array.isArray(ast.body)) {
2704
- let maxLine = 0;
2705
- const traverse = (node) => {
2706
- if (node && typeof node === "object") {
2707
- const n = node;
2708
- if (n.loc?.end?.line && n.loc.end.line > maxLine) {
2709
- maxLine = n.loc.end.line;
2710
- }
2711
- for (const key of Object.keys(node)) {
2712
- const value = node[key];
2713
- if (Array.isArray(value)) {
2714
- for (const item of value) {
2715
- traverse(item);
2716
- }
2717
- } else if (value && typeof value === "object") {
2718
- traverse(value);
2719
- }
2720
- }
2687
+ function findMaxLineInNode(node) {
2688
+ if (!node || typeof node !== "object") return 0;
2689
+ const n = node;
2690
+ let maxLine = n.loc?.end?.line ?? 0;
2691
+ for (const key of Object.keys(node)) {
2692
+ const value = node[key];
2693
+ if (Array.isArray(value)) {
2694
+ for (const item of value) {
2695
+ maxLine = Math.max(maxLine, findMaxLineInNode(item));
2721
2696
  }
2722
- };
2723
- traverse(ast);
2724
- if (maxLine > 0) return maxLine;
2725
- return Math.max(ast.body.length * 3, 1);
2697
+ } else if (value && typeof value === "object") {
2698
+ maxLine = Math.max(maxLine, findMaxLineInNode(value));
2699
+ }
2726
2700
  }
2727
- return 1;
2701
+ return maxLine;
2702
+ }
2703
+ function countLinesFromAST(ast) {
2704
+ if (!ast.body || !Array.isArray(ast.body)) return 1;
2705
+ const maxLine = findMaxLineInNode(ast);
2706
+ if (maxLine > 0) return maxLine;
2707
+ return Math.max(ast.body.length * 3, 1);
2728
2708
  }
2729
2709
  function findDeadFiles(snapshot, reachability) {
2730
2710
  const deadFiles = [];
@@ -2875,130 +2855,146 @@ function fileMatchesPattern(filePath, pattern, rootDir) {
2875
2855
  const relativePath = relativePosix(rootDir, filePath);
2876
2856
  return (0, import_minimatch4.minimatch)(relativePath, pattern);
2877
2857
  }
2878
- function checkConfigPattern(pattern, file, rootDir) {
2858
+ var CONVENTION_DESCRIPTIONS = {
2859
+ camelCase: "camelCase (e.g., myFunction)",
2860
+ PascalCase: "PascalCase (e.g., MyClass)",
2861
+ UPPER_SNAKE: "UPPER_SNAKE_CASE (e.g., MY_CONSTANT)",
2862
+ "kebab-case": "kebab-case (e.g., my-component)"
2863
+ };
2864
+ function checkMustExport(rule, file, message) {
2865
+ if (rule.type !== "must-export") return [];
2879
2866
  const matches = [];
2880
- const fileMatches = pattern.files.some((glob2) => fileMatchesPattern(file.path, glob2, rootDir));
2881
- if (!fileMatches) {
2882
- return matches;
2883
- }
2884
- const rule = pattern.rule;
2885
- switch (rule.type) {
2886
- case "must-export": {
2887
- for (const name of rule.names) {
2888
- const hasExport = file.exports.some((e) => e.name === name);
2889
- if (!hasExport) {
2890
- matches.push({
2891
- line: 1,
2892
- message: pattern.message || `Missing required export: "${name}"`,
2893
- suggestion: `Add export for "${name}"`
2894
- });
2895
- }
2896
- }
2897
- break;
2898
- }
2899
- case "must-export-default": {
2900
- const hasDefault = file.exports.some((e) => e.type === "default");
2901
- if (!hasDefault) {
2902
- matches.push({
2903
- line: 1,
2904
- message: pattern.message || "File must have a default export",
2905
- suggestion: "Add a default export"
2906
- });
2907
- }
2908
- break;
2909
- }
2910
- case "no-export": {
2911
- for (const name of rule.names) {
2912
- const exp = file.exports.find((e) => e.name === name);
2913
- if (exp) {
2914
- matches.push({
2915
- line: exp.location.line,
2916
- message: pattern.message || `Forbidden export: "${name}"`,
2917
- suggestion: `Remove export "${name}"`
2918
- });
2919
- }
2920
- }
2921
- break;
2867
+ for (const name of rule.names) {
2868
+ if (!file.exports.some((e) => e.name === name)) {
2869
+ matches.push({
2870
+ line: 1,
2871
+ message: message || `Missing required export: "${name}"`,
2872
+ suggestion: `Add export for "${name}"`
2873
+ });
2922
2874
  }
2923
- case "must-import": {
2924
- const hasImport = file.imports.some(
2925
- (i) => i.source === rule.from || i.source.endsWith(rule.from)
2926
- );
2927
- if (!hasImport) {
2928
- matches.push({
2929
- line: 1,
2930
- message: pattern.message || `Missing required import from "${rule.from}"`,
2931
- suggestion: `Add import from "${rule.from}"`
2932
- });
2875
+ }
2876
+ return matches;
2877
+ }
2878
+ function checkMustExportDefault(_rule, file, message) {
2879
+ if (!file.exports.some((e) => e.type === "default")) {
2880
+ return [
2881
+ {
2882
+ line: 1,
2883
+ message: message || "File must have a default export",
2884
+ suggestion: "Add a default export"
2933
2885
  }
2934
- break;
2886
+ ];
2887
+ }
2888
+ return [];
2889
+ }
2890
+ function checkNoExport(rule, file, message) {
2891
+ if (rule.type !== "no-export") return [];
2892
+ const matches = [];
2893
+ for (const name of rule.names) {
2894
+ const exp = file.exports.find((e) => e.name === name);
2895
+ if (exp) {
2896
+ matches.push({
2897
+ line: exp.location.line,
2898
+ message: message || `Forbidden export: "${name}"`,
2899
+ suggestion: `Remove export "${name}"`
2900
+ });
2935
2901
  }
2936
- case "no-import": {
2937
- const forbiddenImport = file.imports.find(
2938
- (i) => i.source === rule.from || i.source.endsWith(rule.from)
2939
- );
2940
- if (forbiddenImport) {
2941
- matches.push({
2942
- line: forbiddenImport.location.line,
2943
- message: pattern.message || `Forbidden import from "${rule.from}"`,
2944
- suggestion: `Remove import from "${rule.from}"`
2945
- });
2902
+ }
2903
+ return matches;
2904
+ }
2905
+ function checkMustImport(rule, file, message) {
2906
+ if (rule.type !== "must-import") return [];
2907
+ const hasImport = file.imports.some(
2908
+ (i) => i.source === rule.from || i.source.endsWith(rule.from)
2909
+ );
2910
+ if (!hasImport) {
2911
+ return [
2912
+ {
2913
+ line: 1,
2914
+ message: message || `Missing required import from "${rule.from}"`,
2915
+ suggestion: `Add import from "${rule.from}"`
2946
2916
  }
2947
- break;
2948
- }
2949
- case "naming": {
2950
- const regex = new RegExp(rule.match);
2951
- for (const exp of file.exports) {
2952
- if (!regex.test(exp.name)) {
2953
- let expected = "";
2954
- switch (rule.convention) {
2955
- case "camelCase":
2956
- expected = "camelCase (e.g., myFunction)";
2957
- break;
2958
- case "PascalCase":
2959
- expected = "PascalCase (e.g., MyClass)";
2960
- break;
2961
- case "UPPER_SNAKE":
2962
- expected = "UPPER_SNAKE_CASE (e.g., MY_CONSTANT)";
2963
- break;
2964
- case "kebab-case":
2965
- expected = "kebab-case (e.g., my-component)";
2966
- break;
2967
- }
2968
- matches.push({
2969
- line: exp.location.line,
2970
- message: pattern.message || `"${exp.name}" does not follow ${rule.convention} convention`,
2971
- suggestion: `Rename to follow ${expected}`
2972
- });
2973
- }
2917
+ ];
2918
+ }
2919
+ return [];
2920
+ }
2921
+ function checkNoImport(rule, file, message) {
2922
+ if (rule.type !== "no-import") return [];
2923
+ const forbiddenImport = file.imports.find(
2924
+ (i) => i.source === rule.from || i.source.endsWith(rule.from)
2925
+ );
2926
+ if (forbiddenImport) {
2927
+ return [
2928
+ {
2929
+ line: forbiddenImport.location.line,
2930
+ message: message || `Forbidden import from "${rule.from}"`,
2931
+ suggestion: `Remove import from "${rule.from}"`
2974
2932
  }
2975
- break;
2933
+ ];
2934
+ }
2935
+ return [];
2936
+ }
2937
+ function checkNaming(rule, file, message) {
2938
+ if (rule.type !== "naming") return [];
2939
+ const regex = new RegExp(rule.match);
2940
+ const matches = [];
2941
+ for (const exp of file.exports) {
2942
+ if (!regex.test(exp.name)) {
2943
+ const expected = CONVENTION_DESCRIPTIONS[rule.convention] ?? rule.convention;
2944
+ matches.push({
2945
+ line: exp.location.line,
2946
+ message: message || `"${exp.name}" does not follow ${rule.convention} convention`,
2947
+ suggestion: `Rename to follow ${expected}`
2948
+ });
2976
2949
  }
2977
- case "max-exports": {
2978
- if (file.exports.length > rule.count) {
2979
- matches.push({
2980
- line: 1,
2981
- message: pattern.message || `File has ${file.exports.length} exports, max is ${rule.count}`,
2982
- suggestion: `Split into multiple files or reduce exports to ${rule.count}`
2983
- });
2950
+ }
2951
+ return matches;
2952
+ }
2953
+ function checkMaxExports(rule, file, message) {
2954
+ if (rule.type !== "max-exports") return [];
2955
+ if (file.exports.length > rule.count) {
2956
+ return [
2957
+ {
2958
+ line: 1,
2959
+ message: message || `File has ${file.exports.length} exports, max is ${rule.count}`,
2960
+ suggestion: `Split into multiple files or reduce exports to ${rule.count}`
2984
2961
  }
2985
- break;
2986
- }
2987
- case "max-lines": {
2988
- break;
2989
- }
2990
- case "require-jsdoc": {
2991
- if (file.jsDocComments.length === 0 && file.exports.length > 0) {
2992
- matches.push({
2993
- line: 1,
2994
- message: pattern.message || "Exported symbols require JSDoc documentation",
2995
- suggestion: "Add JSDoc comments to exports"
2996
- });
2962
+ ];
2963
+ }
2964
+ return [];
2965
+ }
2966
+ function checkMaxLines(_rule, _file, _message) {
2967
+ return [];
2968
+ }
2969
+ function checkRequireJsdoc(_rule, file, message) {
2970
+ if (file.jsDocComments.length === 0 && file.exports.length > 0) {
2971
+ return [
2972
+ {
2973
+ line: 1,
2974
+ message: message || "Exported symbols require JSDoc documentation",
2975
+ suggestion: "Add JSDoc comments to exports"
2997
2976
  }
2998
- break;
2999
- }
2977
+ ];
3000
2978
  }
3001
- return matches;
2979
+ return [];
2980
+ }
2981
+ var RULE_CHECKERS = {
2982
+ "must-export": checkMustExport,
2983
+ "must-export-default": checkMustExportDefault,
2984
+ "no-export": checkNoExport,
2985
+ "must-import": checkMustImport,
2986
+ "no-import": checkNoImport,
2987
+ naming: checkNaming,
2988
+ "max-exports": checkMaxExports,
2989
+ "max-lines": checkMaxLines,
2990
+ "require-jsdoc": checkRequireJsdoc
2991
+ };
2992
+ function checkConfigPattern(pattern, file, rootDir) {
2993
+ const fileMatches = pattern.files.some((glob2) => fileMatchesPattern(file.path, glob2, rootDir));
2994
+ if (!fileMatches) return [];
2995
+ const checker = RULE_CHECKERS[pattern.rule.type];
2996
+ if (!checker) return [];
2997
+ return checker(pattern.rule, file, pattern.message);
3002
2998
  }
3003
2999
  async function detectPatternViolations(snapshot, config) {
3004
3000
  const violations = [];
@@ -3064,22 +3060,22 @@ var DEFAULT_THRESHOLDS = {
3064
3060
  fileLength: { info: 300 },
3065
3061
  hotspotPercentile: { error: 95 }
3066
3062
  };
3063
+ var FUNCTION_PATTERNS = [
3064
+ // function declarations: function name(params) {
3065
+ /^\s*(?:export\s+)?(?:async\s+)?function\s+(\w+)\s*\(([^)]*)\)/,
3066
+ // method declarations: name(params) {
3067
+ /^\s*(?:async\s+)?(\w+)\s*\(([^)]*)\)\s*(?::\s*[^{]+)?\s*\{/,
3068
+ // arrow functions assigned to const/let/var: const name = (params) =>
3069
+ /^\s*(?:export\s+)?(?:const|let|var)\s+(\w+)\s*=\s*(?:async\s+)?\(([^)]*)\)\s*(?::\s*[^=]+)?\s*=>/,
3070
+ // arrow functions assigned to const/let/var with single param: const name = param =>
3071
+ /^\s*(?:export\s+)?(?:const|let|var)\s+(\w+)\s*=\s*(?:async\s+)?(\w+)\s*=>/
3072
+ ];
3067
3073
  function extractFunctions(content) {
3068
3074
  const functions = [];
3069
3075
  const lines = content.split("\n");
3070
- const patterns = [
3071
- // function declarations: function name(params) {
3072
- /^\s*(?:export\s+)?(?:async\s+)?function\s+(\w+)\s*\(([^)]*)\)/,
3073
- // method declarations: name(params) {
3074
- /^\s*(?:async\s+)?(\w+)\s*\(([^)]*)\)\s*(?::\s*[^{]+)?\s*\{/,
3075
- // arrow functions assigned to const/let/var: const name = (params) =>
3076
- /^\s*(?:export\s+)?(?:const|let|var)\s+(\w+)\s*=\s*(?:async\s+)?\(([^)]*)\)\s*(?::\s*[^=]+)?\s*=>/,
3077
- // arrow functions assigned to const/let/var with single param: const name = param =>
3078
- /^\s*(?:export\s+)?(?:const|let|var)\s+(\w+)\s*=\s*(?:async\s+)?(\w+)\s*=>/
3079
- ];
3080
3076
  for (let i = 0; i < lines.length; i++) {
3081
3077
  const line = lines[i];
3082
- for (const pattern of patterns) {
3078
+ for (const pattern of FUNCTION_PATTERNS) {
3083
3079
  const match = line.match(pattern);
3084
3080
  if (match) {
3085
3081
  const name = match[1] ?? "anonymous";
@@ -3168,26 +3164,155 @@ function computeNestingDepth(body) {
3168
3164
  }
3169
3165
  return maxDepth;
3170
3166
  }
3171
- async function detectComplexityViolations(snapshot, config, graphData) {
3172
- const violations = [];
3173
- const thresholds = {
3167
+ function resolveThresholds(config) {
3168
+ const userThresholds = config?.thresholds;
3169
+ if (!userThresholds) return { ...DEFAULT_THRESHOLDS };
3170
+ return {
3174
3171
  cyclomaticComplexity: {
3175
- error: config?.thresholds?.cyclomaticComplexity?.error ?? DEFAULT_THRESHOLDS.cyclomaticComplexity.error,
3176
- warn: config?.thresholds?.cyclomaticComplexity?.warn ?? DEFAULT_THRESHOLDS.cyclomaticComplexity.warn
3172
+ ...DEFAULT_THRESHOLDS.cyclomaticComplexity,
3173
+ ...stripUndefined(userThresholds.cyclomaticComplexity)
3177
3174
  },
3178
3175
  nestingDepth: {
3179
- warn: config?.thresholds?.nestingDepth?.warn ?? DEFAULT_THRESHOLDS.nestingDepth.warn
3176
+ ...DEFAULT_THRESHOLDS.nestingDepth,
3177
+ ...stripUndefined(userThresholds.nestingDepth)
3180
3178
  },
3181
3179
  functionLength: {
3182
- warn: config?.thresholds?.functionLength?.warn ?? DEFAULT_THRESHOLDS.functionLength.warn
3180
+ ...DEFAULT_THRESHOLDS.functionLength,
3181
+ ...stripUndefined(userThresholds.functionLength)
3183
3182
  },
3184
3183
  parameterCount: {
3185
- warn: config?.thresholds?.parameterCount?.warn ?? DEFAULT_THRESHOLDS.parameterCount.warn
3184
+ ...DEFAULT_THRESHOLDS.parameterCount,
3185
+ ...stripUndefined(userThresholds.parameterCount)
3186
3186
  },
3187
- fileLength: {
3188
- info: config?.thresholds?.fileLength?.info ?? DEFAULT_THRESHOLDS.fileLength.info
3189
- }
3187
+ fileLength: { ...DEFAULT_THRESHOLDS.fileLength, ...stripUndefined(userThresholds.fileLength) }
3188
+ };
3189
+ }
3190
+ function stripUndefined(obj) {
3191
+ if (!obj) return {};
3192
+ const result = {};
3193
+ for (const [key, val] of Object.entries(obj)) {
3194
+ if (val !== void 0) result[key] = val;
3195
+ }
3196
+ return result;
3197
+ }
3198
+ function checkFileLengthViolation(filePath, lineCount, threshold) {
3199
+ if (lineCount <= threshold) return null;
3200
+ return {
3201
+ file: filePath,
3202
+ function: "<file>",
3203
+ line: 1,
3204
+ metric: "fileLength",
3205
+ value: lineCount,
3206
+ threshold,
3207
+ tier: 3,
3208
+ severity: "info",
3209
+ message: `File has ${lineCount} lines (threshold: ${threshold})`
3210
+ };
3211
+ }
3212
+ function checkCyclomaticComplexity(filePath, fn, thresholds) {
3213
+ const complexity = computeCyclomaticComplexity(fn.body);
3214
+ if (complexity > thresholds.error) {
3215
+ return {
3216
+ file: filePath,
3217
+ function: fn.name,
3218
+ line: fn.line,
3219
+ metric: "cyclomaticComplexity",
3220
+ value: complexity,
3221
+ threshold: thresholds.error,
3222
+ tier: 1,
3223
+ severity: "error",
3224
+ message: `Function "${fn.name}" has cyclomatic complexity of ${complexity} (error threshold: ${thresholds.error})`
3225
+ };
3226
+ }
3227
+ if (complexity > thresholds.warn) {
3228
+ return {
3229
+ file: filePath,
3230
+ function: fn.name,
3231
+ line: fn.line,
3232
+ metric: "cyclomaticComplexity",
3233
+ value: complexity,
3234
+ threshold: thresholds.warn,
3235
+ tier: 2,
3236
+ severity: "warning",
3237
+ message: `Function "${fn.name}" has cyclomatic complexity of ${complexity} (warning threshold: ${thresholds.warn})`
3238
+ };
3239
+ }
3240
+ return null;
3241
+ }
3242
+ function checkNestingDepth(filePath, fn, threshold) {
3243
+ const depth = computeNestingDepth(fn.body);
3244
+ if (depth <= threshold) return null;
3245
+ return {
3246
+ file: filePath,
3247
+ function: fn.name,
3248
+ line: fn.line,
3249
+ metric: "nestingDepth",
3250
+ value: depth,
3251
+ threshold,
3252
+ tier: 2,
3253
+ severity: "warning",
3254
+ message: `Function "${fn.name}" has nesting depth of ${depth} (threshold: ${threshold})`
3255
+ };
3256
+ }
3257
+ function checkFunctionLength(filePath, fn, threshold) {
3258
+ const fnLength = fn.endLine - fn.startLine + 1;
3259
+ if (fnLength <= threshold) return null;
3260
+ return {
3261
+ file: filePath,
3262
+ function: fn.name,
3263
+ line: fn.line,
3264
+ metric: "functionLength",
3265
+ value: fnLength,
3266
+ threshold,
3267
+ tier: 2,
3268
+ severity: "warning",
3269
+ message: `Function "${fn.name}" is ${fnLength} lines long (threshold: ${threshold})`
3270
+ };
3271
+ }
3272
+ function checkParameterCount(filePath, fn, threshold) {
3273
+ if (fn.params <= threshold) return null;
3274
+ return {
3275
+ file: filePath,
3276
+ function: fn.name,
3277
+ line: fn.line,
3278
+ metric: "parameterCount",
3279
+ value: fn.params,
3280
+ threshold,
3281
+ tier: 2,
3282
+ severity: "warning",
3283
+ message: `Function "${fn.name}" has ${fn.params} parameters (threshold: ${threshold})`
3284
+ };
3285
+ }
3286
+ function checkHotspot(filePath, fn, graphData) {
3287
+ const hotspot = graphData.hotspots.find((h) => h.file === filePath && h.function === fn.name);
3288
+ if (!hotspot || hotspot.hotspotScore <= graphData.percentile95Score) return null;
3289
+ return {
3290
+ file: filePath,
3291
+ function: fn.name,
3292
+ line: fn.line,
3293
+ metric: "hotspotScore",
3294
+ value: hotspot.hotspotScore,
3295
+ threshold: graphData.percentile95Score,
3296
+ tier: 1,
3297
+ severity: "error",
3298
+ message: `Function "${fn.name}" is a complexity hotspot (score: ${hotspot.hotspotScore}, p95: ${graphData.percentile95Score})`
3190
3299
  };
3300
+ }
3301
+ function collectFunctionViolations(filePath, fn, thresholds, graphData) {
3302
+ const checks = [
3303
+ checkCyclomaticComplexity(filePath, fn, thresholds.cyclomaticComplexity),
3304
+ checkNestingDepth(filePath, fn, thresholds.nestingDepth.warn),
3305
+ checkFunctionLength(filePath, fn, thresholds.functionLength.warn),
3306
+ checkParameterCount(filePath, fn, thresholds.parameterCount.warn)
3307
+ ];
3308
+ if (graphData) {
3309
+ checks.push(checkHotspot(filePath, fn, graphData));
3310
+ }
3311
+ return checks.filter((v) => v !== null);
3312
+ }
3313
+ async function detectComplexityViolations(snapshot, config, graphData) {
3314
+ const violations = [];
3315
+ const thresholds = resolveThresholds(config);
3191
3316
  let totalFunctions = 0;
3192
3317
  for (const file of snapshot.files) {
3193
3318
  let content;
@@ -3197,107 +3322,16 @@ async function detectComplexityViolations(snapshot, config, graphData) {
3197
3322
  continue;
3198
3323
  }
3199
3324
  const lines = content.split("\n");
3200
- if (lines.length > thresholds.fileLength.info) {
3201
- violations.push({
3202
- file: file.path,
3203
- function: "<file>",
3204
- line: 1,
3205
- metric: "fileLength",
3206
- value: lines.length,
3207
- threshold: thresholds.fileLength.info,
3208
- tier: 3,
3209
- severity: "info",
3210
- message: `File has ${lines.length} lines (threshold: ${thresholds.fileLength.info})`
3211
- });
3212
- }
3325
+ const fileLenViolation = checkFileLengthViolation(
3326
+ file.path,
3327
+ lines.length,
3328
+ thresholds.fileLength.info
3329
+ );
3330
+ if (fileLenViolation) violations.push(fileLenViolation);
3213
3331
  const functions = extractFunctions(content);
3214
3332
  totalFunctions += functions.length;
3215
3333
  for (const fn of functions) {
3216
- const complexity = computeCyclomaticComplexity(fn.body);
3217
- if (complexity > thresholds.cyclomaticComplexity.error) {
3218
- violations.push({
3219
- file: file.path,
3220
- function: fn.name,
3221
- line: fn.line,
3222
- metric: "cyclomaticComplexity",
3223
- value: complexity,
3224
- threshold: thresholds.cyclomaticComplexity.error,
3225
- tier: 1,
3226
- severity: "error",
3227
- message: `Function "${fn.name}" has cyclomatic complexity of ${complexity} (error threshold: ${thresholds.cyclomaticComplexity.error})`
3228
- });
3229
- } else if (complexity > thresholds.cyclomaticComplexity.warn) {
3230
- violations.push({
3231
- file: file.path,
3232
- function: fn.name,
3233
- line: fn.line,
3234
- metric: "cyclomaticComplexity",
3235
- value: complexity,
3236
- threshold: thresholds.cyclomaticComplexity.warn,
3237
- tier: 2,
3238
- severity: "warning",
3239
- message: `Function "${fn.name}" has cyclomatic complexity of ${complexity} (warning threshold: ${thresholds.cyclomaticComplexity.warn})`
3240
- });
3241
- }
3242
- const nestingDepth = computeNestingDepth(fn.body);
3243
- if (nestingDepth > thresholds.nestingDepth.warn) {
3244
- violations.push({
3245
- file: file.path,
3246
- function: fn.name,
3247
- line: fn.line,
3248
- metric: "nestingDepth",
3249
- value: nestingDepth,
3250
- threshold: thresholds.nestingDepth.warn,
3251
- tier: 2,
3252
- severity: "warning",
3253
- message: `Function "${fn.name}" has nesting depth of ${nestingDepth} (threshold: ${thresholds.nestingDepth.warn})`
3254
- });
3255
- }
3256
- const fnLength = fn.endLine - fn.startLine + 1;
3257
- if (fnLength > thresholds.functionLength.warn) {
3258
- violations.push({
3259
- file: file.path,
3260
- function: fn.name,
3261
- line: fn.line,
3262
- metric: "functionLength",
3263
- value: fnLength,
3264
- threshold: thresholds.functionLength.warn,
3265
- tier: 2,
3266
- severity: "warning",
3267
- message: `Function "${fn.name}" is ${fnLength} lines long (threshold: ${thresholds.functionLength.warn})`
3268
- });
3269
- }
3270
- if (fn.params > thresholds.parameterCount.warn) {
3271
- violations.push({
3272
- file: file.path,
3273
- function: fn.name,
3274
- line: fn.line,
3275
- metric: "parameterCount",
3276
- value: fn.params,
3277
- threshold: thresholds.parameterCount.warn,
3278
- tier: 2,
3279
- severity: "warning",
3280
- message: `Function "${fn.name}" has ${fn.params} parameters (threshold: ${thresholds.parameterCount.warn})`
3281
- });
3282
- }
3283
- if (graphData) {
3284
- const hotspot = graphData.hotspots.find(
3285
- (h) => h.file === file.path && h.function === fn.name
3286
- );
3287
- if (hotspot && hotspot.hotspotScore > graphData.percentile95Score) {
3288
- violations.push({
3289
- file: file.path,
3290
- function: fn.name,
3291
- line: fn.line,
3292
- metric: "hotspotScore",
3293
- value: hotspot.hotspotScore,
3294
- threshold: graphData.percentile95Score,
3295
- tier: 1,
3296
- severity: "error",
3297
- message: `Function "${fn.name}" is a complexity hotspot (score: ${hotspot.hotspotScore}, p95: ${graphData.percentile95Score})`
3298
- });
3299
- }
3300
- }
3334
+ violations.push(...collectFunctionViolations(file.path, fn, thresholds, graphData));
3301
3335
  }
3302
3336
  }
3303
3337
  const errorCount = violations.filter((v) => v.severity === "error").length;
@@ -3929,17 +3963,35 @@ function createUnusedImportFixes(deadCodeReport) {
3929
3963
  reversible: true
3930
3964
  }));
3931
3965
  }
3966
+ var EXPORT_TYPE_KEYWORD = {
3967
+ class: "class",
3968
+ function: "function",
3969
+ variable: "const",
3970
+ type: "type",
3971
+ interface: "interface",
3972
+ enum: "enum"
3973
+ };
3974
+ function getExportKeyword(exportType) {
3975
+ return EXPORT_TYPE_KEYWORD[exportType] ?? "enum";
3976
+ }
3977
+ function getDefaultExportKeyword(exportType) {
3978
+ if (exportType === "class" || exportType === "function") return exportType;
3979
+ return "";
3980
+ }
3932
3981
  function createDeadExportFixes(deadCodeReport) {
3933
- return deadCodeReport.deadExports.filter((exp) => exp.reason === "NO_IMPORTERS").map((exp) => ({
3934
- type: "dead-exports",
3935
- file: exp.file,
3936
- description: `Remove export keyword from ${exp.name} (${exp.reason})`,
3937
- action: "replace",
3938
- oldContent: exp.isDefault ? `export default ${exp.type === "class" ? "class" : exp.type === "function" ? "function" : ""} ${exp.name}` : `export ${exp.type === "class" ? "class" : exp.type === "function" ? "function" : exp.type === "variable" ? "const" : exp.type === "type" ? "type" : exp.type === "interface" ? "interface" : "enum"} ${exp.name}`,
3939
- newContent: exp.isDefault ? `${exp.type === "class" ? "class" : exp.type === "function" ? "function" : ""} ${exp.name}` : `${exp.type === "class" ? "class" : exp.type === "function" ? "function" : exp.type === "variable" ? "const" : exp.type === "type" ? "type" : exp.type === "interface" ? "interface" : "enum"} ${exp.name}`,
3940
- safe: true,
3941
- reversible: true
3942
- }));
3982
+ return deadCodeReport.deadExports.filter((exp) => exp.reason === "NO_IMPORTERS").map((exp) => {
3983
+ const keyword = exp.isDefault ? getDefaultExportKeyword(exp.type) : getExportKeyword(exp.type);
3984
+ return {
3985
+ type: "dead-exports",
3986
+ file: exp.file,
3987
+ description: `Remove export keyword from ${exp.name} (${exp.reason})`,
3988
+ action: "replace",
3989
+ oldContent: exp.isDefault ? `export default ${keyword} ${exp.name}` : `export ${keyword} ${exp.name}`,
3990
+ newContent: `${keyword} ${exp.name}`,
3991
+ safe: true,
3992
+ reversible: true
3993
+ };
3994
+ });
3943
3995
  }
3944
3996
  function createCommentedCodeFixes(blocks) {
3945
3997
  return blocks.map((block) => ({
@@ -4118,53 +4170,80 @@ var ALWAYS_UNSAFE_TYPES = /* @__PURE__ */ new Set([
4118
4170
  "dead-internal"
4119
4171
  ]);
4120
4172
  var idCounter = 0;
4173
+ var DEAD_CODE_FIX_ACTIONS = {
4174
+ "dead-export": "Remove export keyword",
4175
+ "dead-file": "Delete file",
4176
+ "commented-code": "Delete commented block",
4177
+ "unused-import": "Remove import"
4178
+ };
4179
+ function classifyDeadCode(input) {
4180
+ if (input.isPublicApi) {
4181
+ return {
4182
+ safety: "unsafe",
4183
+ safetyReason: "Public API export may have external consumers",
4184
+ suggestion: "Deprecate before removing"
4185
+ };
4186
+ }
4187
+ const fixAction = DEAD_CODE_FIX_ACTIONS[input.type];
4188
+ if (fixAction) {
4189
+ return {
4190
+ safety: "safe",
4191
+ safetyReason: "zero importers, non-public",
4192
+ fixAction,
4193
+ suggestion: fixAction
4194
+ };
4195
+ }
4196
+ if (input.type === "orphaned-dep") {
4197
+ return {
4198
+ safety: "probably-safe",
4199
+ safetyReason: "No imports found, but needs install+test verification",
4200
+ fixAction: "Remove from package.json",
4201
+ suggestion: "Remove from package.json"
4202
+ };
4203
+ }
4204
+ return {
4205
+ safety: "unsafe",
4206
+ safetyReason: "Unknown dead code type",
4207
+ suggestion: "Manual review required"
4208
+ };
4209
+ }
4210
+ function classifyArchitecture(input) {
4211
+ if (input.type === "import-ordering") {
4212
+ return {
4213
+ safety: "safe",
4214
+ safetyReason: "Mechanical reorder, no semantic change",
4215
+ fixAction: "Reorder imports",
4216
+ suggestion: "Reorder imports"
4217
+ };
4218
+ }
4219
+ if (input.type === "forbidden-import" && input.hasAlternative) {
4220
+ return {
4221
+ safety: "probably-safe",
4222
+ safetyReason: "Alternative configured, needs typecheck+test",
4223
+ fixAction: "Replace with configured alternative",
4224
+ suggestion: "Replace with configured alternative"
4225
+ };
4226
+ }
4227
+ return {
4228
+ safety: "unsafe",
4229
+ safetyReason: `${input.type} requires structural changes`,
4230
+ suggestion: "Restructure code to fix violation"
4231
+ };
4232
+ }
4121
4233
  function classifyFinding(input) {
4122
4234
  idCounter++;
4123
4235
  const id = `${input.concern === "dead-code" ? "dc" : "arch"}-${idCounter}`;
4124
- let safety;
4125
- let safetyReason;
4126
- let fixAction;
4127
- let suggestion;
4236
+ let classification;
4128
4237
  if (ALWAYS_UNSAFE_TYPES.has(input.type)) {
4129
- safety = "unsafe";
4130
- safetyReason = `${input.type} requires human judgment`;
4131
- suggestion = "Review and refactor manually";
4238
+ classification = {
4239
+ safety: "unsafe",
4240
+ safetyReason: `${input.type} requires human judgment`,
4241
+ suggestion: "Review and refactor manually"
4242
+ };
4132
4243
  } else if (input.concern === "dead-code") {
4133
- if (input.isPublicApi) {
4134
- safety = "unsafe";
4135
- safetyReason = "Public API export may have external consumers";
4136
- suggestion = "Deprecate before removing";
4137
- } else if (input.type === "dead-export" || input.type === "unused-import" || input.type === "commented-code" || input.type === "dead-file") {
4138
- safety = "safe";
4139
- safetyReason = "zero importers, non-public";
4140
- fixAction = input.type === "dead-export" ? "Remove export keyword" : input.type === "dead-file" ? "Delete file" : input.type === "commented-code" ? "Delete commented block" : "Remove import";
4141
- suggestion = fixAction;
4142
- } else if (input.type === "orphaned-dep") {
4143
- safety = "probably-safe";
4144
- safetyReason = "No imports found, but needs install+test verification";
4145
- fixAction = "Remove from package.json";
4146
- suggestion = fixAction;
4147
- } else {
4148
- safety = "unsafe";
4149
- safetyReason = "Unknown dead code type";
4150
- suggestion = "Manual review required";
4151
- }
4244
+ classification = classifyDeadCode(input);
4152
4245
  } else {
4153
- if (input.type === "import-ordering") {
4154
- safety = "safe";
4155
- safetyReason = "Mechanical reorder, no semantic change";
4156
- fixAction = "Reorder imports";
4157
- suggestion = fixAction;
4158
- } else if (input.type === "forbidden-import" && input.hasAlternative) {
4159
- safety = "probably-safe";
4160
- safetyReason = "Alternative configured, needs typecheck+test";
4161
- fixAction = "Replace with configured alternative";
4162
- suggestion = fixAction;
4163
- } else {
4164
- safety = "unsafe";
4165
- safetyReason = `${input.type} requires structural changes`;
4166
- suggestion = "Restructure code to fix violation";
4167
- }
4246
+ classification = classifyArchitecture(input);
4168
4247
  }
4169
4248
  return {
4170
4249
  id,
@@ -4173,11 +4252,11 @@ function classifyFinding(input) {
4173
4252
  ...input.line !== void 0 ? { line: input.line } : {},
4174
4253
  type: input.type,
4175
4254
  description: input.description,
4176
- safety,
4177
- safetyReason,
4255
+ safety: classification.safety,
4256
+ safetyReason: classification.safetyReason,
4178
4257
  hotspotDowngraded: false,
4179
- ...fixAction !== void 0 ? { fixAction } : {},
4180
- suggestion
4258
+ ...classification.fixAction !== void 0 ? { fixAction: classification.fixAction } : {},
4259
+ suggestion: classification.suggestion
4181
4260
  };
4182
4261
  }
4183
4262
  function applyHotspotDowngrade(finding, hotspot) {
@@ -4471,43 +4550,57 @@ var BenchmarkRunner = class {
4471
4550
  };
4472
4551
  }
4473
4552
  }
4553
+ /**
4554
+ * Extract a BenchmarkResult from a single assertion with benchmark data.
4555
+ */
4556
+ parseBenchAssertion(assertion, file) {
4557
+ if (!assertion.benchmark) return null;
4558
+ const bench = assertion.benchmark;
4559
+ return {
4560
+ name: assertion.fullName || assertion.title || "unknown",
4561
+ file: file.replace(process.cwd() + "/", ""),
4562
+ opsPerSec: Math.round(bench.hz || 0),
4563
+ meanMs: bench.mean ? bench.mean * 1e3 : 0,
4564
+ p99Ms: bench.p99 ? bench.p99 * 1e3 : bench.mean ? bench.mean * 1e3 * 1.5 : 0,
4565
+ marginOfError: bench.rme ? bench.rme / 100 : 0.05
4566
+ };
4567
+ }
4568
+ /**
4569
+ * Extract JSON from output that may contain non-JSON preamble.
4570
+ */
4571
+ extractJson(output) {
4572
+ const jsonStart = output.indexOf("{");
4573
+ const jsonEnd = output.lastIndexOf("}");
4574
+ if (jsonStart === -1 || jsonEnd === -1) return null;
4575
+ return JSON.parse(output.slice(jsonStart, jsonEnd + 1));
4576
+ }
4474
4577
  /**
4475
4578
  * Parse vitest bench JSON reporter output into BenchmarkResult[].
4476
4579
  * Vitest bench JSON output contains testResults with benchmark data.
4477
4580
  */
4478
- parseVitestBenchOutput(output) {
4581
+ collectAssertionResults(testResults) {
4479
4582
  const results = [];
4480
- try {
4481
- const jsonStart = output.indexOf("{");
4482
- const jsonEnd = output.lastIndexOf("}");
4483
- if (jsonStart === -1 || jsonEnd === -1) return results;
4484
- const jsonStr = output.slice(jsonStart, jsonEnd + 1);
4485
- const parsed = JSON.parse(jsonStr);
4486
- if (parsed.testResults) {
4487
- for (const testResult of parsed.testResults) {
4488
- const file = testResult.name || testResult.filepath || "";
4489
- if (testResult.assertionResults) {
4490
- for (const assertion of testResult.assertionResults) {
4491
- if (assertion.benchmark) {
4492
- const bench = assertion.benchmark;
4493
- results.push({
4494
- name: assertion.fullName || assertion.title || "unknown",
4495
- file: file.replace(process.cwd() + "/", ""),
4496
- opsPerSec: Math.round(bench.hz || 0),
4497
- meanMs: bench.mean ? bench.mean * 1e3 : 0,
4498
- // p99: use actual p99 if available, otherwise estimate as 1.5× mean
4499
- p99Ms: bench.p99 ? bench.p99 * 1e3 : bench.mean ? bench.mean * 1e3 * 1.5 : 0,
4500
- marginOfError: bench.rme ? bench.rme / 100 : 0.05
4501
- });
4502
- }
4503
- }
4504
- }
4505
- }
4583
+ for (const testResult of testResults) {
4584
+ const file = testResult.name || testResult.filepath || "";
4585
+ const assertions = testResult.assertionResults ?? [];
4586
+ for (const assertion of assertions) {
4587
+ const result = this.parseBenchAssertion(assertion, file);
4588
+ if (result) results.push(result);
4506
4589
  }
4507
- } catch {
4508
4590
  }
4509
4591
  return results;
4510
4592
  }
4593
+ parseVitestBenchOutput(output) {
4594
+ try {
4595
+ const parsed = this.extractJson(output);
4596
+ if (!parsed) return [];
4597
+ const testResults = parsed.testResults;
4598
+ if (!testResults) return [];
4599
+ return this.collectAssertionResults(testResults);
4600
+ } catch {
4601
+ return [];
4602
+ }
4603
+ }
4511
4604
  };
4512
4605
 
4513
4606
  // src/performance/regression-detector.ts
@@ -4817,39 +4910,31 @@ function resetFeedbackConfig() {
4817
4910
  }
4818
4911
 
4819
4912
  // src/feedback/review/diff-analyzer.ts
4913
+ function detectFileStatus(part) {
4914
+ if (/new file mode/.test(part)) return "added";
4915
+ if (/deleted file mode/.test(part)) return "deleted";
4916
+ if (part.includes("rename from")) return "renamed";
4917
+ return "modified";
4918
+ }
4919
+ function parseDiffPart(part) {
4920
+ if (!part.trim()) return null;
4921
+ const headerMatch = /diff --git a\/(.+?) b\/(.+?)(?:\n|$)/.exec(part);
4922
+ if (!headerMatch || !headerMatch[2]) return null;
4923
+ const additionRegex = /^\+(?!\+\+)/gm;
4924
+ const deletionRegex = /^-(?!--)/gm;
4925
+ return {
4926
+ path: headerMatch[2],
4927
+ status: detectFileStatus(part),
4928
+ additions: (part.match(additionRegex) || []).length,
4929
+ deletions: (part.match(deletionRegex) || []).length
4930
+ };
4931
+ }
4820
4932
  function parseDiff(diff2) {
4821
4933
  try {
4822
4934
  if (!diff2.trim()) {
4823
4935
  return (0, import_types.Ok)({ diff: diff2, files: [] });
4824
4936
  }
4825
- const files = [];
4826
- const newFileRegex = /new file mode/;
4827
- const deletedFileRegex = /deleted file mode/;
4828
- const additionRegex = /^\+(?!\+\+)/gm;
4829
- const deletionRegex = /^-(?!--)/gm;
4830
- const diffParts = diff2.split(/(?=diff --git)/);
4831
- for (const part of diffParts) {
4832
- if (!part.trim()) continue;
4833
- const headerMatch = /diff --git a\/(.+?) b\/(.+?)(?:\n|$)/.exec(part);
4834
- if (!headerMatch || !headerMatch[2]) continue;
4835
- const filePath = headerMatch[2];
4836
- let status = "modified";
4837
- if (newFileRegex.test(part)) {
4838
- status = "added";
4839
- } else if (deletedFileRegex.test(part)) {
4840
- status = "deleted";
4841
- } else if (part.includes("rename from")) {
4842
- status = "renamed";
4843
- }
4844
- const additions = (part.match(additionRegex) || []).length;
4845
- const deletions = (part.match(deletionRegex) || []).length;
4846
- files.push({
4847
- path: filePath,
4848
- status,
4849
- additions,
4850
- deletions
4851
- });
4852
- }
4937
+ const files = diff2.split(/(?=diff --git)/).map(parseDiffPart).filter((f) => f !== null);
4853
4938
  return (0, import_types.Ok)({ diff: diff2, files });
4854
4939
  } catch (error) {
4855
4940
  return (0, import_types.Err)({
@@ -5015,107 +5100,123 @@ var ChecklistBuilder = class {
5015
5100
  this.graphImpactData = graphImpactData;
5016
5101
  return this;
5017
5102
  }
5018
- async run(changes) {
5019
- const startTime = Date.now();
5103
+ /**
5104
+ * Build a single harness check item with or without graph data.
5105
+ */
5106
+ buildHarnessCheckItem(id, check, fallbackDetails, graphItemBuilder) {
5107
+ if (this.graphHarnessData && graphItemBuilder) {
5108
+ return graphItemBuilder();
5109
+ }
5110
+ return {
5111
+ id,
5112
+ category: "harness",
5113
+ check,
5114
+ passed: true,
5115
+ severity: "info",
5116
+ details: fallbackDetails
5117
+ };
5118
+ }
5119
+ /**
5120
+ * Build all harness check items based on harnessOptions and graph data.
5121
+ */
5122
+ buildHarnessItems() {
5123
+ if (!this.harnessOptions) return [];
5020
5124
  const items = [];
5021
- if (this.harnessOptions) {
5022
- if (this.harnessOptions.context !== false) {
5023
- if (this.graphHarnessData) {
5024
- items.push({
5025
- id: "harness-context",
5026
- category: "harness",
5027
- check: "Context validation",
5028
- passed: this.graphHarnessData.graphExists && this.graphHarnessData.nodeCount > 0,
5029
- severity: "info",
5030
- details: this.graphHarnessData.graphExists ? `Graph loaded: ${this.graphHarnessData.nodeCount} nodes, ${this.graphHarnessData.edgeCount} edges` : "No graph available \u2014 run harness scan to build the knowledge graph"
5031
- });
5032
- } else {
5033
- items.push({
5125
+ const graphData = this.graphHarnessData;
5126
+ if (this.harnessOptions.context !== false) {
5127
+ items.push(
5128
+ this.buildHarnessCheckItem(
5129
+ "harness-context",
5130
+ "Context validation",
5131
+ "Harness context validation not yet integrated (run with graph for real checks)",
5132
+ graphData ? () => ({
5034
5133
  id: "harness-context",
5035
5134
  category: "harness",
5036
5135
  check: "Context validation",
5037
- passed: true,
5038
- severity: "info",
5039
- details: "Harness context validation not yet integrated (run with graph for real checks)"
5040
- });
5041
- }
5042
- }
5043
- if (this.harnessOptions.constraints !== false) {
5044
- if (this.graphHarnessData) {
5045
- const violations = this.graphHarnessData.constraintViolations;
5046
- items.push({
5047
- id: "harness-constraints",
5048
- category: "harness",
5049
- check: "Constraint validation",
5050
- passed: violations === 0,
5051
- severity: violations > 0 ? "error" : "info",
5052
- details: violations === 0 ? "No constraint violations detected" : `${violations} constraint violation(s) detected`
5053
- });
5054
- } else {
5055
- items.push({
5056
- id: "harness-constraints",
5057
- category: "harness",
5058
- check: "Constraint validation",
5059
- passed: true,
5060
- severity: "info",
5061
- details: "Harness constraint validation not yet integrated (run with graph for real checks)"
5062
- });
5063
- }
5064
- }
5065
- if (this.harnessOptions.entropy !== false) {
5066
- if (this.graphHarnessData) {
5067
- const issues = this.graphHarnessData.unreachableNodes + this.graphHarnessData.undocumentedFiles;
5068
- items.push({
5069
- id: "harness-entropy",
5070
- category: "harness",
5071
- check: "Entropy detection",
5072
- passed: issues === 0,
5073
- severity: issues > 0 ? "warning" : "info",
5074
- details: issues === 0 ? "No entropy issues detected" : `${this.graphHarnessData.unreachableNodes} unreachable node(s), ${this.graphHarnessData.undocumentedFiles} undocumented file(s)`
5075
- });
5076
- } else {
5077
- items.push({
5078
- id: "harness-entropy",
5079
- category: "harness",
5080
- check: "Entropy detection",
5081
- passed: true,
5136
+ passed: graphData.graphExists && graphData.nodeCount > 0,
5082
5137
  severity: "info",
5083
- details: "Harness entropy detection not yet integrated (run with graph for real checks)"
5084
- });
5085
- }
5086
- }
5138
+ details: graphData.graphExists ? `Graph loaded: ${graphData.nodeCount} nodes, ${graphData.edgeCount} edges` : "No graph available \u2014 run harness scan to build the knowledge graph"
5139
+ }) : void 0
5140
+ )
5141
+ );
5142
+ }
5143
+ if (this.harnessOptions.constraints !== false) {
5144
+ items.push(
5145
+ this.buildHarnessCheckItem(
5146
+ "harness-constraints",
5147
+ "Constraint validation",
5148
+ "Harness constraint validation not yet integrated (run with graph for real checks)",
5149
+ graphData ? () => {
5150
+ const violations = graphData.constraintViolations;
5151
+ return {
5152
+ id: "harness-constraints",
5153
+ category: "harness",
5154
+ check: "Constraint validation",
5155
+ passed: violations === 0,
5156
+ severity: violations > 0 ? "error" : "info",
5157
+ details: violations === 0 ? "No constraint violations detected" : `${violations} constraint violation(s) detected`
5158
+ };
5159
+ } : void 0
5160
+ )
5161
+ );
5162
+ }
5163
+ if (this.harnessOptions.entropy !== false) {
5164
+ items.push(
5165
+ this.buildHarnessCheckItem(
5166
+ "harness-entropy",
5167
+ "Entropy detection",
5168
+ "Harness entropy detection not yet integrated (run with graph for real checks)",
5169
+ graphData ? () => {
5170
+ const issues = graphData.unreachableNodes + graphData.undocumentedFiles;
5171
+ return {
5172
+ id: "harness-entropy",
5173
+ category: "harness",
5174
+ check: "Entropy detection",
5175
+ passed: issues === 0,
5176
+ severity: issues > 0 ? "warning" : "info",
5177
+ details: issues === 0 ? "No entropy issues detected" : `${graphData.unreachableNodes} unreachable node(s), ${graphData.undocumentedFiles} undocumented file(s)`
5178
+ };
5179
+ } : void 0
5180
+ )
5181
+ );
5182
+ }
5183
+ return items;
5184
+ }
5185
+ /**
5186
+ * Execute a single custom rule and return a ReviewItem.
5187
+ */
5188
+ async executeCustomRule(rule, changes) {
5189
+ try {
5190
+ const result = await rule.check(changes, this.rootDir);
5191
+ const item = {
5192
+ id: rule.id,
5193
+ category: "custom",
5194
+ check: rule.name,
5195
+ passed: result.passed,
5196
+ severity: rule.severity,
5197
+ details: result.details
5198
+ };
5199
+ if (result.suggestion !== void 0) item.suggestion = result.suggestion;
5200
+ if (result.file !== void 0) item.file = result.file;
5201
+ if (result.line !== void 0) item.line = result.line;
5202
+ return item;
5203
+ } catch (error) {
5204
+ return {
5205
+ id: rule.id,
5206
+ category: "custom",
5207
+ check: rule.name,
5208
+ passed: false,
5209
+ severity: "error",
5210
+ details: `Rule execution failed: ${String(error)}`
5211
+ };
5087
5212
  }
5213
+ }
5214
+ async run(changes) {
5215
+ const startTime = Date.now();
5216
+ const items = [];
5217
+ items.push(...this.buildHarnessItems());
5088
5218
  for (const rule of this.customRules) {
5089
- try {
5090
- const result = await rule.check(changes, this.rootDir);
5091
- const item = {
5092
- id: rule.id,
5093
- category: "custom",
5094
- check: rule.name,
5095
- passed: result.passed,
5096
- severity: rule.severity,
5097
- details: result.details
5098
- };
5099
- if (result.suggestion !== void 0) {
5100
- item.suggestion = result.suggestion;
5101
- }
5102
- if (result.file !== void 0) {
5103
- item.file = result.file;
5104
- }
5105
- if (result.line !== void 0) {
5106
- item.line = result.line;
5107
- }
5108
- items.push(item);
5109
- } catch (error) {
5110
- items.push({
5111
- id: rule.id,
5112
- category: "custom",
5113
- check: rule.name,
5114
- passed: false,
5115
- severity: "error",
5116
- details: `Rule execution failed: ${String(error)}`
5117
- });
5118
- }
5219
+ items.push(await this.executeCustomRule(rule, changes));
5119
5220
  }
5120
5221
  if (this.diffOptions) {
5121
5222
  const diffResult = await analyzeDiff(changes, this.diffOptions, this.graphImpactData);
@@ -5130,7 +5231,6 @@ var ChecklistBuilder = class {
5130
5231
  const checklist = {
5131
5232
  items,
5132
5233
  passed: failed === 0,
5133
- // Pass if no failed items
5134
5234
  summary: {
5135
5235
  total: items.length,
5136
5236
  passed,
@@ -6365,7 +6465,32 @@ function aggregateByCategory(results) {
6365
6465
  });
6366
6466
  }
6367
6467
  }
6368
- return map;
6468
+ return map;
6469
+ }
6470
+ function classifyViolations(violations, baselineViolationIds) {
6471
+ const newViolations = [];
6472
+ const preExisting = [];
6473
+ for (const violation of violations) {
6474
+ if (baselineViolationIds.has(violation.id)) {
6475
+ preExisting.push(violation.id);
6476
+ } else {
6477
+ newViolations.push(violation);
6478
+ }
6479
+ }
6480
+ return { newViolations, preExisting };
6481
+ }
6482
+ function findResolvedViolations(baselineCategory, currentViolationIds) {
6483
+ if (!baselineCategory) return [];
6484
+ return baselineCategory.violationIds.filter((id) => !currentViolationIds.has(id));
6485
+ }
6486
+ function collectOrphanedBaselineViolations(baseline, visitedCategories) {
6487
+ const resolved = [];
6488
+ for (const [category, baselineCategory] of Object.entries(baseline.metrics)) {
6489
+ if (!visitedCategories.has(category) && baselineCategory) {
6490
+ resolved.push(...baselineCategory.violationIds);
6491
+ }
6492
+ }
6493
+ return resolved;
6369
6494
  }
6370
6495
  function diff(current, baseline) {
6371
6496
  const aggregated = aggregateByCategory(current);
@@ -6379,21 +6504,11 @@ function diff(current, baseline) {
6379
6504
  const baselineCategory = baseline.metrics[category];
6380
6505
  const baselineViolationIds = new Set(baselineCategory?.violationIds ?? []);
6381
6506
  const baselineValue = baselineCategory?.value ?? 0;
6382
- for (const violation of agg.violations) {
6383
- if (baselineViolationIds.has(violation.id)) {
6384
- preExisting.push(violation.id);
6385
- } else {
6386
- newViolations.push(violation);
6387
- }
6388
- }
6507
+ const classified = classifyViolations(agg.violations, baselineViolationIds);
6508
+ newViolations.push(...classified.newViolations);
6509
+ preExisting.push(...classified.preExisting);
6389
6510
  const currentViolationIds = new Set(agg.violations.map((v) => v.id));
6390
- if (baselineCategory) {
6391
- for (const id of baselineCategory.violationIds) {
6392
- if (!currentViolationIds.has(id)) {
6393
- resolvedViolations.push(id);
6394
- }
6395
- }
6396
- }
6511
+ resolvedViolations.push(...findResolvedViolations(baselineCategory, currentViolationIds));
6397
6512
  if (baselineCategory && agg.value > baselineValue) {
6398
6513
  regressions.push({
6399
6514
  category,
@@ -6403,16 +6518,9 @@ function diff(current, baseline) {
6403
6518
  });
6404
6519
  }
6405
6520
  }
6406
- for (const [category, baselineCategory] of Object.entries(baseline.metrics)) {
6407
- if (!visitedCategories.has(category) && baselineCategory) {
6408
- for (const id of baselineCategory.violationIds) {
6409
- resolvedViolations.push(id);
6410
- }
6411
- }
6412
- }
6413
- const passed = newViolations.length === 0 && regressions.length === 0;
6521
+ resolvedViolations.push(...collectOrphanedBaselineViolations(baseline, visitedCategories));
6414
6522
  return {
6415
- passed,
6523
+ passed: newViolations.length === 0 && regressions.length === 0,
6416
6524
  newViolations,
6417
6525
  resolvedViolations,
6418
6526
  preExisting,
@@ -6421,7 +6529,7 @@ function diff(current, baseline) {
6421
6529
  }
6422
6530
 
6423
6531
  // src/architecture/config.ts
6424
- function resolveThresholds(scope, config) {
6532
+ function resolveThresholds2(scope, config) {
6425
6533
  const projectThresholds = {};
6426
6534
  for (const [key, val] of Object.entries(config.thresholds)) {
6427
6535
  projectThresholds[key] = typeof val === "object" && val !== null && !Array.isArray(val) ? { ...val } : val;
@@ -6758,6 +6866,8 @@ var INDEX_FILE = "index.json";
6758
6866
  var SESSIONS_DIR = "sessions";
6759
6867
  var SESSION_INDEX_FILE = "index.md";
6760
6868
  var SUMMARY_FILE = "summary.md";
6869
+ var SESSION_STATE_FILE = "session-state.json";
6870
+ var ARCHIVE_DIR = "archive";
6761
6871
 
6762
6872
  // src/state/stream-resolver.ts
6763
6873
  var STREAMS_DIR = "streams";
@@ -7666,6 +7776,143 @@ function listActiveSessions(projectPath) {
7666
7776
  }
7667
7777
  }
7668
7778
 
7779
+ // src/state/session-sections.ts
7780
+ var fs14 = __toESM(require("fs"));
7781
+ var path11 = __toESM(require("path"));
7782
+ var import_types14 = require("@harness-engineering/types");
7783
+ function emptySections() {
7784
+ const sections = {};
7785
+ for (const name of import_types14.SESSION_SECTION_NAMES) {
7786
+ sections[name] = [];
7787
+ }
7788
+ return sections;
7789
+ }
7790
+ async function loadSessionState(projectPath, sessionSlug) {
7791
+ const dirResult = resolveSessionDir(projectPath, sessionSlug);
7792
+ if (!dirResult.ok) return dirResult;
7793
+ const sessionDir = dirResult.value;
7794
+ const filePath = path11.join(sessionDir, SESSION_STATE_FILE);
7795
+ if (!fs14.existsSync(filePath)) {
7796
+ return (0, import_types.Ok)(emptySections());
7797
+ }
7798
+ try {
7799
+ const raw = fs14.readFileSync(filePath, "utf-8");
7800
+ const parsed = JSON.parse(raw);
7801
+ const sections = emptySections();
7802
+ for (const name of import_types14.SESSION_SECTION_NAMES) {
7803
+ if (Array.isArray(parsed[name])) {
7804
+ sections[name] = parsed[name];
7805
+ }
7806
+ }
7807
+ return (0, import_types.Ok)(sections);
7808
+ } catch (error) {
7809
+ return (0, import_types.Err)(
7810
+ new Error(
7811
+ `Failed to load session state: ${error instanceof Error ? error.message : String(error)}`
7812
+ )
7813
+ );
7814
+ }
7815
+ }
7816
+ async function saveSessionState(projectPath, sessionSlug, sections) {
7817
+ const dirResult = resolveSessionDir(projectPath, sessionSlug, { create: true });
7818
+ if (!dirResult.ok) return dirResult;
7819
+ const sessionDir = dirResult.value;
7820
+ const filePath = path11.join(sessionDir, SESSION_STATE_FILE);
7821
+ try {
7822
+ fs14.writeFileSync(filePath, JSON.stringify(sections, null, 2));
7823
+ return (0, import_types.Ok)(void 0);
7824
+ } catch (error) {
7825
+ return (0, import_types.Err)(
7826
+ new Error(
7827
+ `Failed to save session state: ${error instanceof Error ? error.message : String(error)}`
7828
+ )
7829
+ );
7830
+ }
7831
+ }
7832
+ async function readSessionSections(projectPath, sessionSlug) {
7833
+ return loadSessionState(projectPath, sessionSlug);
7834
+ }
7835
+ async function readSessionSection(projectPath, sessionSlug, section) {
7836
+ const result = await loadSessionState(projectPath, sessionSlug);
7837
+ if (!result.ok) return result;
7838
+ return (0, import_types.Ok)(result.value[section]);
7839
+ }
7840
+ async function appendSessionEntry(projectPath, sessionSlug, section, authorSkill, content) {
7841
+ const loadResult = await loadSessionState(projectPath, sessionSlug);
7842
+ if (!loadResult.ok) return loadResult;
7843
+ const sections = loadResult.value;
7844
+ const entry = {
7845
+ id: generateEntryId(),
7846
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
7847
+ authorSkill,
7848
+ content,
7849
+ status: "active"
7850
+ };
7851
+ sections[section].push(entry);
7852
+ const saveResult = await saveSessionState(projectPath, sessionSlug, sections);
7853
+ if (!saveResult.ok) return saveResult;
7854
+ return (0, import_types.Ok)(entry);
7855
+ }
7856
+ async function updateSessionEntryStatus(projectPath, sessionSlug, section, entryId, newStatus) {
7857
+ const loadResult = await loadSessionState(projectPath, sessionSlug);
7858
+ if (!loadResult.ok) return loadResult;
7859
+ const sections = loadResult.value;
7860
+ const entry = sections[section].find((e) => e.id === entryId);
7861
+ if (!entry) {
7862
+ return (0, import_types.Err)(new Error(`Entry '${entryId}' not found in section '${section}'`));
7863
+ }
7864
+ entry.status = newStatus;
7865
+ const saveResult = await saveSessionState(projectPath, sessionSlug, sections);
7866
+ if (!saveResult.ok) return saveResult;
7867
+ return (0, import_types.Ok)(entry);
7868
+ }
7869
+ function generateEntryId() {
7870
+ const timestamp = Date.now().toString(36);
7871
+ const random = Math.random().toString(36).substring(2, 8);
7872
+ return `${timestamp}-${random}`;
7873
+ }
7874
+
7875
+ // src/state/session-archive.ts
7876
+ var fs15 = __toESM(require("fs"));
7877
+ var path12 = __toESM(require("path"));
7878
+ async function archiveSession(projectPath, sessionSlug) {
7879
+ const dirResult = resolveSessionDir(projectPath, sessionSlug);
7880
+ if (!dirResult.ok) return dirResult;
7881
+ const sessionDir = dirResult.value;
7882
+ if (!fs15.existsSync(sessionDir)) {
7883
+ return (0, import_types.Err)(new Error(`Session '${sessionSlug}' not found at ${sessionDir}`));
7884
+ }
7885
+ const archiveBase = path12.join(projectPath, HARNESS_DIR, ARCHIVE_DIR, "sessions");
7886
+ try {
7887
+ fs15.mkdirSync(archiveBase, { recursive: true });
7888
+ const date = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
7889
+ let archiveName = `${sessionSlug}-${date}`;
7890
+ let counter = 1;
7891
+ while (fs15.existsSync(path12.join(archiveBase, archiveName))) {
7892
+ archiveName = `${sessionSlug}-${date}-${counter}`;
7893
+ counter++;
7894
+ }
7895
+ const dest = path12.join(archiveBase, archiveName);
7896
+ try {
7897
+ fs15.renameSync(sessionDir, dest);
7898
+ } catch (renameErr) {
7899
+ if (renameErr instanceof Error && "code" in renameErr && renameErr.code === "EXDEV") {
7900
+ fs15.cpSync(sessionDir, dest, { recursive: true });
7901
+ fs15.rmSync(sessionDir, { recursive: true });
7902
+ } else {
7903
+ throw renameErr;
7904
+ }
7905
+ }
7906
+ return (0, import_types.Ok)(void 0);
7907
+ } catch (error) {
7908
+ return (0, import_types.Err)(
7909
+ new Error(
7910
+ `Failed to archive session: ${error instanceof Error ? error.message : String(error)}`
7911
+ )
7912
+ );
7913
+ }
7914
+ }
7915
+
7669
7916
  // src/workflow/runner.ts
7670
7917
  async function executeWorkflow(workflow, executor) {
7671
7918
  const stepResults = [];
@@ -7815,7 +8062,7 @@ async function runMultiTurnPipeline(initialContext, turnExecutor, options) {
7815
8062
  }
7816
8063
 
7817
8064
  // src/security/scanner.ts
7818
- var fs15 = __toESM(require("fs/promises"));
8065
+ var fs17 = __toESM(require("fs/promises"));
7819
8066
 
7820
8067
  // src/security/rules/registry.ts
7821
8068
  var RuleRegistry = class {
@@ -7902,15 +8149,15 @@ function resolveRuleSeverity(ruleId, defaultSeverity, overrides, strict) {
7902
8149
  }
7903
8150
 
7904
8151
  // src/security/stack-detector.ts
7905
- var fs14 = __toESM(require("fs"));
7906
- var path11 = __toESM(require("path"));
8152
+ var fs16 = __toESM(require("fs"));
8153
+ var path13 = __toESM(require("path"));
7907
8154
  function detectStack(projectRoot) {
7908
8155
  const stacks = [];
7909
- const pkgJsonPath = path11.join(projectRoot, "package.json");
7910
- if (fs14.existsSync(pkgJsonPath)) {
8156
+ const pkgJsonPath = path13.join(projectRoot, "package.json");
8157
+ if (fs16.existsSync(pkgJsonPath)) {
7911
8158
  stacks.push("node");
7912
8159
  try {
7913
- const pkgJson = JSON.parse(fs14.readFileSync(pkgJsonPath, "utf-8"));
8160
+ const pkgJson = JSON.parse(fs16.readFileSync(pkgJsonPath, "utf-8"));
7914
8161
  const allDeps = {
7915
8162
  ...pkgJson.dependencies,
7916
8163
  ...pkgJson.devDependencies
@@ -7925,13 +8172,13 @@ function detectStack(projectRoot) {
7925
8172
  } catch {
7926
8173
  }
7927
8174
  }
7928
- const goModPath = path11.join(projectRoot, "go.mod");
7929
- if (fs14.existsSync(goModPath)) {
8175
+ const goModPath = path13.join(projectRoot, "go.mod");
8176
+ if (fs16.existsSync(goModPath)) {
7930
8177
  stacks.push("go");
7931
8178
  }
7932
- const requirementsPath = path11.join(projectRoot, "requirements.txt");
7933
- const pyprojectPath = path11.join(projectRoot, "pyproject.toml");
7934
- if (fs14.existsSync(requirementsPath) || fs14.existsSync(pyprojectPath)) {
8179
+ const requirementsPath = path13.join(projectRoot, "requirements.txt");
8180
+ const pyprojectPath = path13.join(projectRoot, "pyproject.toml");
8181
+ if (fs16.existsSync(requirementsPath) || fs16.existsSync(pyprojectPath)) {
7935
8182
  stacks.push("python");
7936
8183
  }
7937
8184
  return stacks;
@@ -8358,7 +8605,7 @@ var SecurityScanner = class {
8358
8605
  }
8359
8606
  async scanFile(filePath) {
8360
8607
  if (!this.config.enabled) return [];
8361
- const content = await fs15.readFile(filePath, "utf-8");
8608
+ const content = await fs17.readFile(filePath, "utf-8");
8362
8609
  return this.scanContent(content, filePath, 1);
8363
8610
  }
8364
8611
  async scanFiles(filePaths) {
@@ -8383,7 +8630,7 @@ var SecurityScanner = class {
8383
8630
  };
8384
8631
 
8385
8632
  // src/ci/check-orchestrator.ts
8386
- var path12 = __toESM(require("path"));
8633
+ var path14 = __toESM(require("path"));
8387
8634
  var ALL_CHECKS = [
8388
8635
  "validate",
8389
8636
  "deps",
@@ -8396,7 +8643,7 @@ var ALL_CHECKS = [
8396
8643
  ];
8397
8644
  async function runValidateCheck(projectRoot, config) {
8398
8645
  const issues = [];
8399
- const agentsPath = path12.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
8646
+ const agentsPath = path14.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
8400
8647
  const result = await validateAgentsMap(agentsPath);
8401
8648
  if (!result.ok) {
8402
8649
  issues.push({ severity: "error", message: result.error.message });
@@ -8453,7 +8700,7 @@ async function runDepsCheck(projectRoot, config) {
8453
8700
  }
8454
8701
  async function runDocsCheck(projectRoot, config) {
8455
8702
  const issues = [];
8456
- const docsDir = path12.join(projectRoot, config.docsDir ?? "docs");
8703
+ const docsDir = path14.join(projectRoot, config.docsDir ?? "docs");
8457
8704
  const entropyConfig = config.entropy || {};
8458
8705
  const result = await checkDocCoverage("project", {
8459
8706
  docsDir,
@@ -8478,10 +8725,14 @@ async function runDocsCheck(projectRoot, config) {
8478
8725
  }
8479
8726
  return issues;
8480
8727
  }
8481
- async function runEntropyCheck(projectRoot, _config) {
8728
+ async function runEntropyCheck(projectRoot, config) {
8482
8729
  const issues = [];
8730
+ const entropyConfig = config.entropy || {};
8731
+ const perfConfig = config.performance || {};
8732
+ const entryPoints = entropyConfig.entryPoints ?? perfConfig.entryPoints;
8483
8733
  const analyzer = new EntropyAnalyzer({
8484
8734
  rootDir: projectRoot,
8735
+ ...entryPoints ? { entryPoints } : {},
8485
8736
  analyze: { drift: true, deadCode: true, patterns: false }
8486
8737
  });
8487
8738
  const result = await analyzer.analyze();
@@ -8543,8 +8794,10 @@ async function runSecurityCheck(projectRoot, config) {
8543
8794
  async function runPerfCheck(projectRoot, config) {
8544
8795
  const issues = [];
8545
8796
  const perfConfig = config.performance || {};
8797
+ const entryPoints = perfConfig.entryPoints;
8546
8798
  const perfAnalyzer = new EntropyAnalyzer({
8547
8799
  rootDir: projectRoot,
8800
+ ...entryPoints ? { entryPoints } : {},
8548
8801
  analyze: {
8549
8802
  complexity: perfConfig.complexity || true,
8550
8803
  coupling: perfConfig.coupling || true,
@@ -8725,7 +8978,7 @@ async function runCIChecks(input) {
8725
8978
  }
8726
8979
 
8727
8980
  // src/review/mechanical-checks.ts
8728
- var path13 = __toESM(require("path"));
8981
+ var path15 = __toESM(require("path"));
8729
8982
  async function runMechanicalChecks(options) {
8730
8983
  const { projectRoot, config, skip = [], changedFiles } = options;
8731
8984
  const findings = [];
@@ -8737,7 +8990,7 @@ async function runMechanicalChecks(options) {
8737
8990
  };
8738
8991
  if (!skip.includes("validate")) {
8739
8992
  try {
8740
- const agentsPath = path13.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
8993
+ const agentsPath = path15.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
8741
8994
  const result = await validateAgentsMap(agentsPath);
8742
8995
  if (!result.ok) {
8743
8996
  statuses.validate = "fail";
@@ -8774,7 +9027,7 @@ async function runMechanicalChecks(options) {
8774
9027
  statuses.validate = "fail";
8775
9028
  findings.push({
8776
9029
  tool: "validate",
8777
- file: path13.join(projectRoot, "AGENTS.md"),
9030
+ file: path15.join(projectRoot, "AGENTS.md"),
8778
9031
  message: err instanceof Error ? err.message : String(err),
8779
9032
  severity: "error"
8780
9033
  });
@@ -8838,7 +9091,7 @@ async function runMechanicalChecks(options) {
8838
9091
  (async () => {
8839
9092
  const localFindings = [];
8840
9093
  try {
8841
- const docsDir = path13.join(projectRoot, config.docsDir ?? "docs");
9094
+ const docsDir = path15.join(projectRoot, config.docsDir ?? "docs");
8842
9095
  const result = await checkDocCoverage("project", { docsDir });
8843
9096
  if (!result.ok) {
8844
9097
  statuses["check-docs"] = "warn";
@@ -8865,7 +9118,7 @@ async function runMechanicalChecks(options) {
8865
9118
  statuses["check-docs"] = "warn";
8866
9119
  localFindings.push({
8867
9120
  tool: "check-docs",
8868
- file: path13.join(projectRoot, "docs"),
9121
+ file: path15.join(projectRoot, "docs"),
8869
9122
  message: err instanceof Error ? err.message : String(err),
8870
9123
  severity: "warning"
8871
9124
  });
@@ -9013,7 +9266,7 @@ function detectChangeType(commitMessage, diff2) {
9013
9266
  }
9014
9267
 
9015
9268
  // src/review/context-scoper.ts
9016
- var path14 = __toESM(require("path"));
9269
+ var path16 = __toESM(require("path"));
9017
9270
  var ALL_DOMAINS = ["compliance", "bug", "security", "architecture"];
9018
9271
  var SECURITY_PATTERNS = /auth|crypto|password|secret|token|session|cookie|hash|encrypt|decrypt|sql|shell|exec|eval/i;
9019
9272
  function computeContextBudget(diffLines) {
@@ -9021,18 +9274,18 @@ function computeContextBudget(diffLines) {
9021
9274
  return diffLines;
9022
9275
  }
9023
9276
  function isWithinProject(absPath, projectRoot) {
9024
- const resolvedRoot = path14.resolve(projectRoot) + path14.sep;
9025
- const resolvedPath = path14.resolve(absPath);
9026
- return resolvedPath.startsWith(resolvedRoot) || resolvedPath === path14.resolve(projectRoot);
9277
+ const resolvedRoot = path16.resolve(projectRoot) + path16.sep;
9278
+ const resolvedPath = path16.resolve(absPath);
9279
+ return resolvedPath.startsWith(resolvedRoot) || resolvedPath === path16.resolve(projectRoot);
9027
9280
  }
9028
9281
  async function readContextFile(projectRoot, filePath, reason) {
9029
- const absPath = path14.isAbsolute(filePath) ? filePath : path14.join(projectRoot, filePath);
9282
+ const absPath = path16.isAbsolute(filePath) ? filePath : path16.join(projectRoot, filePath);
9030
9283
  if (!isWithinProject(absPath, projectRoot)) return null;
9031
9284
  const result = await readFileContent(absPath);
9032
9285
  if (!result.ok) return null;
9033
9286
  const content = result.value;
9034
9287
  const lines = content.split("\n").length;
9035
- const relPath = path14.isAbsolute(filePath) ? relativePosix(projectRoot, filePath) : filePath;
9288
+ const relPath = path16.isAbsolute(filePath) ? relativePosix(projectRoot, filePath) : filePath;
9036
9289
  return { path: relPath, content, reason, lines };
9037
9290
  }
9038
9291
  function extractImportSources2(content) {
@@ -9047,18 +9300,18 @@ function extractImportSources2(content) {
9047
9300
  }
9048
9301
  async function resolveImportPath2(projectRoot, fromFile, importSource) {
9049
9302
  if (!importSource.startsWith(".")) return null;
9050
- const fromDir = path14.dirname(path14.join(projectRoot, fromFile));
9051
- const basePath = path14.resolve(fromDir, importSource);
9303
+ const fromDir = path16.dirname(path16.join(projectRoot, fromFile));
9304
+ const basePath = path16.resolve(fromDir, importSource);
9052
9305
  if (!isWithinProject(basePath, projectRoot)) return null;
9053
9306
  const relBase = relativePosix(projectRoot, basePath);
9054
9307
  const candidates = [
9055
9308
  relBase + ".ts",
9056
9309
  relBase + ".tsx",
9057
9310
  relBase + ".mts",
9058
- path14.join(relBase, "index.ts")
9311
+ path16.join(relBase, "index.ts")
9059
9312
  ];
9060
9313
  for (const candidate of candidates) {
9061
- const absCandidate = path14.join(projectRoot, candidate);
9314
+ const absCandidate = path16.join(projectRoot, candidate);
9062
9315
  if (await fileExists(absCandidate)) {
9063
9316
  return candidate;
9064
9317
  }
@@ -9066,7 +9319,7 @@ async function resolveImportPath2(projectRoot, fromFile, importSource) {
9066
9319
  return null;
9067
9320
  }
9068
9321
  async function findTestFiles(projectRoot, sourceFile) {
9069
- const baseName = path14.basename(sourceFile, path14.extname(sourceFile));
9322
+ const baseName = path16.basename(sourceFile, path16.extname(sourceFile));
9070
9323
  const pattern = `**/${baseName}.{test,spec}.{ts,tsx,mts}`;
9071
9324
  const results = await findFiles(pattern, projectRoot);
9072
9325
  return results.map((f) => relativePosix(projectRoot, f));
@@ -9355,101 +9608,102 @@ function findMissingJsDoc(bundle) {
9355
9608
  }
9356
9609
  return missing;
9357
9610
  }
9358
- function runComplianceAgent(bundle) {
9611
+ function checkMissingJsDoc(bundle, rules) {
9612
+ const jsDocRule = rules.find((r) => r.text.toLowerCase().includes("jsdoc"));
9613
+ if (!jsDocRule) return [];
9614
+ const missingDocs = findMissingJsDoc(bundle);
9615
+ return missingDocs.map((m) => ({
9616
+ id: makeFindingId("compliance", m.file, m.line, `Missing JSDoc ${m.exportName}`),
9617
+ file: m.file,
9618
+ lineRange: [m.line, m.line],
9619
+ domain: "compliance",
9620
+ severity: "important",
9621
+ title: `Missing JSDoc on exported \`${m.exportName}\``,
9622
+ rationale: `Convention requires all exports to have JSDoc comments (from ${jsDocRule.source}).`,
9623
+ suggestion: `Add a JSDoc comment above the export of \`${m.exportName}\`.`,
9624
+ evidence: [`changeType: ${bundle.changeType}`, `Convention rule: "${jsDocRule.text}"`],
9625
+ validatedBy: "heuristic"
9626
+ }));
9627
+ }
9628
+ function checkFeatureSpec(bundle) {
9629
+ const hasSpecContext = bundle.contextFiles.some(
9630
+ (f) => f.reason === "spec" || f.reason === "convention"
9631
+ );
9632
+ if (hasSpecContext || bundle.changedFiles.length === 0) return [];
9633
+ const firstFile = bundle.changedFiles[0];
9634
+ return [
9635
+ {
9636
+ id: makeFindingId("compliance", firstFile.path, 1, "No spec for feature"),
9637
+ file: firstFile.path,
9638
+ lineRange: [1, 1],
9639
+ domain: "compliance",
9640
+ severity: "suggestion",
9641
+ title: "No spec/design doc found for feature change",
9642
+ rationale: "Feature changes should reference a spec or design doc to verify alignment. No spec context was included in the review bundle.",
9643
+ evidence: [`changeType: feature`, `contextFiles count: ${bundle.contextFiles.length}`],
9644
+ validatedBy: "heuristic"
9645
+ }
9646
+ ];
9647
+ }
9648
+ function checkBugfixHistory(bundle) {
9649
+ if (bundle.commitHistory.length > 0 || bundle.changedFiles.length === 0) return [];
9650
+ const firstFile = bundle.changedFiles[0];
9651
+ return [
9652
+ {
9653
+ id: makeFindingId("compliance", firstFile.path, 1, "Bugfix no history"),
9654
+ file: firstFile.path,
9655
+ lineRange: [1, 1],
9656
+ domain: "compliance",
9657
+ severity: "suggestion",
9658
+ title: "Bugfix without commit history context",
9659
+ rationale: "Bugfix changes benefit from commit history to verify the root cause is addressed, not just the symptom. No commit history was provided.",
9660
+ evidence: [`changeType: bugfix`, `commitHistory entries: ${bundle.commitHistory.length}`],
9661
+ validatedBy: "heuristic"
9662
+ }
9663
+ ];
9664
+ }
9665
+ function checkChangeTypeSpecific(bundle) {
9666
+ switch (bundle.changeType) {
9667
+ case "feature":
9668
+ return checkFeatureSpec(bundle);
9669
+ case "bugfix":
9670
+ return checkBugfixHistory(bundle);
9671
+ default:
9672
+ return [];
9673
+ }
9674
+ }
9675
+ function checkResultTypeConvention(bundle, rules) {
9676
+ const resultTypeRule = rules.find((r) => r.text.toLowerCase().includes("result type"));
9677
+ if (!resultTypeRule) return [];
9359
9678
  const findings = [];
9360
- const rules = extractConventionRules(bundle);
9361
- const jsDocRuleExists = rules.some((r) => r.text.toLowerCase().includes("jsdoc"));
9362
- if (jsDocRuleExists) {
9363
- const missingDocs = findMissingJsDoc(bundle);
9364
- for (const m of missingDocs) {
9679
+ for (const cf of bundle.changedFiles) {
9680
+ const hasTryCatch = cf.content.includes("try {") || cf.content.includes("try{");
9681
+ const usesResult = cf.content.includes("Result<") || cf.content.includes("Result >") || cf.content.includes(": Result");
9682
+ if (hasTryCatch && !usesResult) {
9365
9683
  findings.push({
9366
- id: makeFindingId("compliance", m.file, m.line, `Missing JSDoc ${m.exportName}`),
9367
- file: m.file,
9368
- lineRange: [m.line, m.line],
9684
+ id: makeFindingId("compliance", cf.path, 1, "try-catch not Result"),
9685
+ file: cf.path,
9686
+ lineRange: [1, cf.lines],
9369
9687
  domain: "compliance",
9370
- severity: "important",
9371
- title: `Missing JSDoc on exported \`${m.exportName}\``,
9372
- rationale: `Convention requires all exports to have JSDoc comments (from ${rules.find((r) => r.text.toLowerCase().includes("jsdoc"))?.source ?? "conventions"}).`,
9373
- suggestion: `Add a JSDoc comment above the export of \`${m.exportName}\`.`,
9374
- evidence: [
9375
- `changeType: ${bundle.changeType}`,
9376
- `Convention rule: "${rules.find((r) => r.text.toLowerCase().includes("jsdoc"))?.text ?? ""}"`
9377
- ],
9688
+ severity: "suggestion",
9689
+ title: "Fallible operation uses try/catch instead of Result type",
9690
+ rationale: `Convention requires using Result type for fallible operations (from ${resultTypeRule.source}).`,
9691
+ suggestion: "Refactor error handling to use the Result type pattern.",
9692
+ evidence: [`changeType: ${bundle.changeType}`, `Convention rule: "${resultTypeRule.text}"`],
9378
9693
  validatedBy: "heuristic"
9379
9694
  });
9380
9695
  }
9381
9696
  }
9382
- switch (bundle.changeType) {
9383
- case "feature": {
9384
- const hasSpecContext = bundle.contextFiles.some(
9385
- (f) => f.reason === "spec" || f.reason === "convention"
9386
- );
9387
- if (!hasSpecContext && bundle.changedFiles.length > 0) {
9388
- const firstFile = bundle.changedFiles[0];
9389
- findings.push({
9390
- id: makeFindingId("compliance", firstFile.path, 1, "No spec for feature"),
9391
- file: firstFile.path,
9392
- lineRange: [1, 1],
9393
- domain: "compliance",
9394
- severity: "suggestion",
9395
- title: "No spec/design doc found for feature change",
9396
- rationale: "Feature changes should reference a spec or design doc to verify alignment. No spec context was included in the review bundle.",
9397
- evidence: [`changeType: feature`, `contextFiles count: ${bundle.contextFiles.length}`],
9398
- validatedBy: "heuristic"
9399
- });
9400
- }
9401
- break;
9402
- }
9403
- case "bugfix": {
9404
- if (bundle.commitHistory.length === 0 && bundle.changedFiles.length > 0) {
9405
- const firstFile = bundle.changedFiles[0];
9406
- findings.push({
9407
- id: makeFindingId("compliance", firstFile.path, 1, "Bugfix no history"),
9408
- file: firstFile.path,
9409
- lineRange: [1, 1],
9410
- domain: "compliance",
9411
- severity: "suggestion",
9412
- title: "Bugfix without commit history context",
9413
- rationale: "Bugfix changes benefit from commit history to verify the root cause is addressed, not just the symptom. No commit history was provided.",
9414
- evidence: [`changeType: bugfix`, `commitHistory entries: ${bundle.commitHistory.length}`],
9415
- validatedBy: "heuristic"
9416
- });
9417
- }
9418
- break;
9419
- }
9420
- case "refactor": {
9421
- break;
9422
- }
9423
- case "docs": {
9424
- break;
9425
- }
9426
- }
9427
- const resultTypeRule = rules.find((r) => r.text.toLowerCase().includes("result type"));
9428
- if (resultTypeRule) {
9429
- for (const cf of bundle.changedFiles) {
9430
- const hasTryCatch = cf.content.includes("try {") || cf.content.includes("try{");
9431
- const usesResult = cf.content.includes("Result<") || cf.content.includes("Result >") || cf.content.includes(": Result");
9432
- if (hasTryCatch && !usesResult) {
9433
- findings.push({
9434
- id: makeFindingId("compliance", cf.path, 1, "try-catch not Result"),
9435
- file: cf.path,
9436
- lineRange: [1, cf.lines],
9437
- domain: "compliance",
9438
- severity: "suggestion",
9439
- title: "Fallible operation uses try/catch instead of Result type",
9440
- rationale: `Convention requires using Result type for fallible operations (from ${resultTypeRule.source}).`,
9441
- suggestion: "Refactor error handling to use the Result type pattern.",
9442
- evidence: [
9443
- `changeType: ${bundle.changeType}`,
9444
- `Convention rule: "${resultTypeRule.text}"`
9445
- ],
9446
- validatedBy: "heuristic"
9447
- });
9448
- }
9449
- }
9450
- }
9451
9697
  return findings;
9452
9698
  }
9699
+ function runComplianceAgent(bundle) {
9700
+ const rules = extractConventionRules(bundle);
9701
+ return [
9702
+ ...checkMissingJsDoc(bundle, rules),
9703
+ ...checkChangeTypeSpecific(bundle),
9704
+ ...checkResultTypeConvention(bundle, rules)
9705
+ ];
9706
+ }
9453
9707
 
9454
9708
  // src/review/agents/bug-agent.ts
9455
9709
  var BUG_DETECTION_DESCRIPTOR = {
@@ -9726,31 +9980,32 @@ var ARCHITECTURE_DESCRIPTOR = {
9726
9980
  ]
9727
9981
  };
9728
9982
  var LARGE_FILE_THRESHOLD = 300;
9983
+ function isViolationLine(line) {
9984
+ const lower = line.toLowerCase();
9985
+ return lower.includes("violation") || lower.includes("layer");
9986
+ }
9987
+ function createLayerViolationFinding(line, fallbackPath) {
9988
+ const fileMatch = line.match(/(?:in\s+)?(\S+\.(?:ts|tsx|js|jsx))(?::(\d+))?/);
9989
+ const file = fileMatch?.[1] ?? fallbackPath;
9990
+ const lineNum = fileMatch?.[2] ? parseInt(fileMatch[2], 10) : 1;
9991
+ return {
9992
+ id: makeFindingId("arch", file, lineNum, "layer violation"),
9993
+ file,
9994
+ lineRange: [lineNum, lineNum],
9995
+ domain: "architecture",
9996
+ severity: "critical",
9997
+ title: "Layer boundary violation detected by check-deps",
9998
+ rationale: `Architectural layer violation: ${line.trim()}. Imports must flow in the correct direction per the project's layer definitions.`,
9999
+ suggestion: "Route the dependency through the correct intermediate layer (e.g., routes -> services -> db, not routes -> db).",
10000
+ evidence: [line.trim()],
10001
+ validatedBy: "heuristic"
10002
+ };
10003
+ }
9729
10004
  function detectLayerViolations(bundle) {
9730
- const findings = [];
9731
10005
  const checkDepsFile = bundle.contextFiles.find((f) => f.path === "harness-check-deps-output");
9732
- if (!checkDepsFile) return findings;
9733
- const lines = checkDepsFile.content.split("\n");
9734
- for (const line of lines) {
9735
- if (line.toLowerCase().includes("violation") || line.toLowerCase().includes("layer")) {
9736
- const fileMatch = line.match(/(?:in\s+)?(\S+\.(?:ts|tsx|js|jsx))(?::(\d+))?/);
9737
- const file = fileMatch?.[1] ?? bundle.changedFiles[0]?.path ?? "unknown";
9738
- const lineNum = fileMatch?.[2] ? parseInt(fileMatch[2], 10) : 1;
9739
- findings.push({
9740
- id: makeFindingId("arch", file, lineNum, "layer violation"),
9741
- file,
9742
- lineRange: [lineNum, lineNum],
9743
- domain: "architecture",
9744
- severity: "critical",
9745
- title: "Layer boundary violation detected by check-deps",
9746
- rationale: `Architectural layer violation: ${line.trim()}. Imports must flow in the correct direction per the project's layer definitions.`,
9747
- suggestion: "Route the dependency through the correct intermediate layer (e.g., routes -> services -> db, not routes -> db).",
9748
- evidence: [line.trim()],
9749
- validatedBy: "heuristic"
9750
- });
9751
- }
9752
- }
9753
- return findings;
10006
+ if (!checkDepsFile) return [];
10007
+ const fallbackPath = bundle.changedFiles[0]?.path ?? "unknown";
10008
+ return checkDepsFile.content.split("\n").filter(isViolationLine).map((line) => createLayerViolationFinding(line, fallbackPath));
9754
10009
  }
9755
10010
  function detectLargeFiles(bundle) {
9756
10011
  const findings = [];
@@ -9772,45 +10027,61 @@ function detectLargeFiles(bundle) {
9772
10027
  }
9773
10028
  return findings;
9774
10029
  }
10030
+ function extractRelativeImports(content) {
10031
+ const importRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
10032
+ let match;
10033
+ const imports = /* @__PURE__ */ new Set();
10034
+ while ((match = importRegex.exec(content)) !== null) {
10035
+ const source = match[1];
10036
+ if (source.startsWith(".")) {
10037
+ imports.add(source.replace(/^\.\//, "").replace(/^\.\.\//, ""));
10038
+ }
10039
+ }
10040
+ return imports;
10041
+ }
10042
+ function fileBaseName(filePath) {
10043
+ return filePath.replace(/.*\//, "").replace(/\.(ts|tsx|js|jsx)$/, "");
10044
+ }
10045
+ function findCircularImportInCtxFile(ctxFile, changedFilePath, changedPaths, fileImports) {
10046
+ const ctxImportRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
10047
+ let ctxMatch;
10048
+ while ((ctxMatch = ctxImportRegex.exec(ctxFile.content)) !== null) {
10049
+ const ctxSource = ctxMatch[1];
10050
+ if (!ctxSource.startsWith(".")) continue;
10051
+ for (const changedPath of changedPaths) {
10052
+ const baseName = fileBaseName(changedPath);
10053
+ const ctxBaseName = fileBaseName(ctxFile.path);
10054
+ if (ctxSource.includes(baseName) && fileImports.has(ctxBaseName)) {
10055
+ return {
10056
+ id: makeFindingId("arch", changedFilePath, 1, `circular ${ctxFile.path}`),
10057
+ file: changedFilePath,
10058
+ lineRange: [1, 1],
10059
+ domain: "architecture",
10060
+ severity: "important",
10061
+ title: `Potential circular import between ${changedFilePath} and ${ctxFile.path}`,
10062
+ rationale: "Circular imports can cause runtime issues (undefined values at import time) and indicate tightly coupled modules that should be refactored.",
10063
+ suggestion: "Extract shared types/interfaces into a separate module that both files can import from.",
10064
+ evidence: [
10065
+ `${changedFilePath} imports from a module that also imports from ${changedFilePath}`
10066
+ ],
10067
+ validatedBy: "heuristic"
10068
+ };
10069
+ }
10070
+ }
10071
+ }
10072
+ return null;
10073
+ }
9775
10074
  function detectCircularImports(bundle) {
9776
10075
  const findings = [];
9777
10076
  const changedPaths = new Set(bundle.changedFiles.map((f) => f.path));
10077
+ const relevantCtxFiles = bundle.contextFiles.filter(
10078
+ (f) => f.reason === "import" || f.reason === "graph-dependency"
10079
+ );
9778
10080
  for (const cf of bundle.changedFiles) {
9779
- const importRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
9780
- let match;
9781
- const imports = /* @__PURE__ */ new Set();
9782
- while ((match = importRegex.exec(cf.content)) !== null) {
9783
- const source = match[1];
9784
- if (source.startsWith(".")) {
9785
- imports.add(source.replace(/^\.\//, "").replace(/^\.\.\//, ""));
9786
- }
9787
- }
9788
- for (const ctxFile of bundle.contextFiles) {
9789
- if (ctxFile.reason !== "import" && ctxFile.reason !== "graph-dependency") continue;
9790
- const ctxImportRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
9791
- let ctxMatch;
9792
- while ((ctxMatch = ctxImportRegex.exec(ctxFile.content)) !== null) {
9793
- const ctxSource = ctxMatch[1];
9794
- if (ctxSource.startsWith(".")) {
9795
- for (const changedPath of changedPaths) {
9796
- const baseName = changedPath.replace(/.*\//, "").replace(/\.(ts|tsx|js|jsx)$/, "");
9797
- if (ctxSource.includes(baseName) && imports.has(ctxFile.path.replace(/.*\//, "").replace(/\.(ts|tsx|js|jsx)$/, ""))) {
9798
- findings.push({
9799
- id: makeFindingId("arch", cf.path, 1, `circular ${ctxFile.path}`),
9800
- file: cf.path,
9801
- lineRange: [1, 1],
9802
- domain: "architecture",
9803
- severity: "important",
9804
- title: `Potential circular import between ${cf.path} and ${ctxFile.path}`,
9805
- rationale: "Circular imports can cause runtime issues (undefined values at import time) and indicate tightly coupled modules that should be refactored.",
9806
- suggestion: "Extract shared types/interfaces into a separate module that both files can import from.",
9807
- evidence: [`${cf.path} imports from a module that also imports from ${cf.path}`],
9808
- validatedBy: "heuristic"
9809
- });
9810
- }
9811
- }
9812
- }
9813
- }
10081
+ const imports = extractRelativeImports(cf.content);
10082
+ for (const ctxFile of relevantCtxFiles) {
10083
+ const finding = findCircularImportInCtxFile(ctxFile, cf.path, changedPaths, imports);
10084
+ if (finding) findings.push(finding);
9814
10085
  }
9815
10086
  }
9816
10087
  return findings;
@@ -9857,7 +10128,7 @@ async function fanOutReview(options) {
9857
10128
  }
9858
10129
 
9859
10130
  // src/review/validate-findings.ts
9860
- var path15 = __toESM(require("path"));
10131
+ var path17 = __toESM(require("path"));
9861
10132
  var DOWNGRADE_MAP = {
9862
10133
  critical: "important",
9863
10134
  important: "suggestion",
@@ -9878,7 +10149,7 @@ function normalizePath(filePath, projectRoot) {
9878
10149
  let normalized = filePath;
9879
10150
  normalized = normalized.replace(/\\/g, "/");
9880
10151
  const normalizedRoot = projectRoot.replace(/\\/g, "/");
9881
- if (path15.isAbsolute(normalized)) {
10152
+ if (path17.isAbsolute(normalized)) {
9882
10153
  const root = normalizedRoot.endsWith("/") ? normalizedRoot : normalizedRoot + "/";
9883
10154
  if (normalized.startsWith(root)) {
9884
10155
  normalized = normalized.slice(root.length);
@@ -9903,12 +10174,12 @@ function followImportChain(fromFile, fileContents, maxDepth = 2) {
9903
10174
  while ((match = importRegex.exec(content)) !== null) {
9904
10175
  const importPath = match[1];
9905
10176
  if (!importPath.startsWith(".")) continue;
9906
- const dir = path15.dirname(current.file);
9907
- let resolved = path15.join(dir, importPath).replace(/\\/g, "/");
10177
+ const dir = path17.dirname(current.file);
10178
+ let resolved = path17.join(dir, importPath).replace(/\\/g, "/");
9908
10179
  if (!resolved.match(/\.(ts|tsx|js|jsx)$/)) {
9909
10180
  resolved += ".ts";
9910
10181
  }
9911
- resolved = path15.normalize(resolved).replace(/\\/g, "/");
10182
+ resolved = path17.normalize(resolved).replace(/\\/g, "/");
9912
10183
  if (!visited.has(resolved) && current.depth + 1 <= maxDepth) {
9913
10184
  queue.push({ file: resolved, depth: current.depth + 1 });
9914
10185
  }
@@ -9925,7 +10196,7 @@ async function validateFindings(options) {
9925
10196
  if (exclusionSet.isExcluded(normalizedFile, finding.lineRange) || exclusionSet.isExcluded(finding.file, finding.lineRange)) {
9926
10197
  continue;
9927
10198
  }
9928
- const absoluteFile = path15.isAbsolute(finding.file) ? finding.file : path15.join(projectRoot, finding.file).replace(/\\/g, "/");
10199
+ const absoluteFile = path17.isAbsolute(finding.file) ? finding.file : path17.join(projectRoot, finding.file).replace(/\\/g, "/");
9929
10200
  if (exclusionSet.isExcluded(absoluteFile, finding.lineRange)) {
9930
10201
  continue;
9931
10202
  }
@@ -9982,6 +10253,28 @@ async function validateFindings(options) {
9982
10253
  function rangesOverlap(a, b, gap) {
9983
10254
  return a[0] <= b[1] + gap && b[0] <= a[1] + gap;
9984
10255
  }
10256
+ function pickLongest(a, b) {
10257
+ if (a && b) return a.length >= b.length ? a : b;
10258
+ return a ?? b;
10259
+ }
10260
+ function buildMergedTitle(a, b, domains) {
10261
+ const primaryFinding = SEVERITY_RANK[a.severity] >= SEVERITY_RANK[b.severity] ? a : b;
10262
+ const domainList = [...domains].sort().join(", ");
10263
+ const cleanTitle = primaryFinding.title.replace(/^\[.*?\]\s*/, "");
10264
+ return { title: `[${domainList}] ${cleanTitle}`, primaryFinding };
10265
+ }
10266
+ function mergeSecurityFields(merged, primary, a, b) {
10267
+ const cweId = primary.cweId ?? a.cweId ?? b.cweId;
10268
+ const owaspCategory = primary.owaspCategory ?? a.owaspCategory ?? b.owaspCategory;
10269
+ const confidence = primary.confidence ?? a.confidence ?? b.confidence;
10270
+ const remediation = pickLongest(a.remediation, b.remediation);
10271
+ const mergedRefs = [.../* @__PURE__ */ new Set([...a.references ?? [], ...b.references ?? []])];
10272
+ if (cweId !== void 0) merged.cweId = cweId;
10273
+ if (owaspCategory !== void 0) merged.owaspCategory = owaspCategory;
10274
+ if (confidence !== void 0) merged.confidence = confidence;
10275
+ if (remediation !== void 0) merged.remediation = remediation;
10276
+ if (mergedRefs.length > 0) merged.references = mergedRefs;
10277
+ }
9985
10278
  function mergeFindings(a, b) {
9986
10279
  const highestSeverity = SEVERITY_RANK[a.severity] >= SEVERITY_RANK[b.severity] ? a.severity : b.severity;
9987
10280
  const highestValidatedBy = (VALIDATED_BY_RANK[a.validatedBy] ?? 0) >= (VALIDATED_BY_RANK[b.validatedBy] ?? 0) ? a.validatedBy : b.validatedBy;
@@ -9991,18 +10284,12 @@ function mergeFindings(a, b) {
9991
10284
  Math.min(a.lineRange[0], b.lineRange[0]),
9992
10285
  Math.max(a.lineRange[1], b.lineRange[1])
9993
10286
  ];
9994
- const domains = /* @__PURE__ */ new Set();
9995
- domains.add(a.domain);
9996
- domains.add(b.domain);
9997
- const suggestion = a.suggestion && b.suggestion ? a.suggestion.length >= b.suggestion.length ? a.suggestion : b.suggestion : a.suggestion ?? b.suggestion;
9998
- const primaryFinding = SEVERITY_RANK[a.severity] >= SEVERITY_RANK[b.severity] ? a : b;
9999
- const domainList = [...domains].sort().join(", ");
10000
- const cleanTitle = primaryFinding.title.replace(/^\[.*?\]\s*/, "");
10001
- const title = `[${domainList}] ${cleanTitle}`;
10287
+ const domains = /* @__PURE__ */ new Set([a.domain, b.domain]);
10288
+ const suggestion = pickLongest(a.suggestion, b.suggestion);
10289
+ const { title, primaryFinding } = buildMergedTitle(a, b, domains);
10002
10290
  const merged = {
10003
10291
  id: primaryFinding.id,
10004
10292
  file: a.file,
10005
- // same file for all merged findings
10006
10293
  lineRange,
10007
10294
  domain: primaryFinding.domain,
10008
10295
  severity: highestSeverity,
@@ -10014,16 +10301,7 @@ function mergeFindings(a, b) {
10014
10301
  if (suggestion !== void 0) {
10015
10302
  merged.suggestion = suggestion;
10016
10303
  }
10017
- const cweId = primaryFinding.cweId ?? a.cweId ?? b.cweId;
10018
- const owaspCategory = primaryFinding.owaspCategory ?? a.owaspCategory ?? b.owaspCategory;
10019
- const confidence = primaryFinding.confidence ?? a.confidence ?? b.confidence;
10020
- const remediation = a.remediation && b.remediation ? a.remediation.length >= b.remediation.length ? a.remediation : b.remediation : a.remediation ?? b.remediation;
10021
- const mergedRefs = [.../* @__PURE__ */ new Set([...a.references ?? [], ...b.references ?? []])];
10022
- if (cweId !== void 0) merged.cweId = cweId;
10023
- if (owaspCategory !== void 0) merged.owaspCategory = owaspCategory;
10024
- if (confidence !== void 0) merged.confidence = confidence;
10025
- if (remediation !== void 0) merged.remediation = remediation;
10026
- if (mergedRefs.length > 0) merged.references = mergedRefs;
10304
+ mergeSecurityFields(merged, primaryFinding, a, b);
10027
10305
  return merged;
10028
10306
  }
10029
10307
  function deduplicateFindings(options) {
@@ -10195,6 +10473,17 @@ function formatTerminalOutput(options) {
10195
10473
  if (suggestionCount > 0) parts.push(`${suggestionCount} suggestion(s)`);
10196
10474
  sections.push(` Found ${issueCount} issue(s): ${parts.join(", ")}.`);
10197
10475
  }
10476
+ if (options.evidenceCoverage) {
10477
+ const ec = options.evidenceCoverage;
10478
+ sections.push("");
10479
+ sections.push("## Evidence Coverage\n");
10480
+ sections.push(` Evidence entries: ${ec.totalEntries}`);
10481
+ sections.push(
10482
+ ` Findings with evidence: ${ec.findingsWithEvidence}/${ec.findingsWithEvidence + ec.uncitedCount}`
10483
+ );
10484
+ sections.push(` Uncited findings: ${ec.uncitedCount} (flagged as [UNVERIFIED])`);
10485
+ sections.push(` Coverage: ${ec.coveragePercentage}%`);
10486
+ }
10198
10487
  return sections.join("\n");
10199
10488
  }
10200
10489
 
@@ -10271,9 +10560,108 @@ function formatGitHubSummary(options) {
10271
10560
  const assessment = determineAssessment(findings);
10272
10561
  const assessmentLabel = assessment === "approve" ? "Approve" : assessment === "comment" ? "Comment" : "Request Changes";
10273
10562
  sections.push(`## Assessment: ${assessmentLabel}`);
10563
+ if (options.evidenceCoverage) {
10564
+ const ec = options.evidenceCoverage;
10565
+ sections.push("");
10566
+ sections.push("## Evidence Coverage\n");
10567
+ sections.push(`- Evidence entries: ${ec.totalEntries}`);
10568
+ sections.push(
10569
+ `- Findings with evidence: ${ec.findingsWithEvidence}/${ec.findingsWithEvidence + ec.uncitedCount}`
10570
+ );
10571
+ sections.push(`- Uncited findings: ${ec.uncitedCount} (flagged as \\[UNVERIFIED\\])`);
10572
+ sections.push(`- Coverage: ${ec.coveragePercentage}%`);
10573
+ }
10274
10574
  return sections.join("\n");
10275
10575
  }
10276
10576
 
10577
+ // src/review/evidence-gate.ts
10578
+ var FILE_LINE_RANGE_PATTERN = /^([\w./@-]+\.\w+):(\d+)-(\d+)/;
10579
+ var FILE_LINE_PATTERN = /^([\w./@-]+\.\w+):(\d+)/;
10580
+ var FILE_ONLY_PATTERN = /^([\w./@-]+\.\w+)\s/;
10581
+ function parseEvidenceRef(content) {
10582
+ const trimmed = content.trim();
10583
+ const rangeMatch = trimmed.match(FILE_LINE_RANGE_PATTERN);
10584
+ if (rangeMatch) {
10585
+ return {
10586
+ file: rangeMatch[1],
10587
+ lineStart: parseInt(rangeMatch[2], 10),
10588
+ lineEnd: parseInt(rangeMatch[3], 10)
10589
+ };
10590
+ }
10591
+ const lineMatch = trimmed.match(FILE_LINE_PATTERN);
10592
+ if (lineMatch) {
10593
+ return {
10594
+ file: lineMatch[1],
10595
+ lineStart: parseInt(lineMatch[2], 10)
10596
+ };
10597
+ }
10598
+ const fileMatch = trimmed.match(FILE_ONLY_PATTERN);
10599
+ if (fileMatch) {
10600
+ return { file: fileMatch[1] };
10601
+ }
10602
+ return null;
10603
+ }
10604
+ function evidenceMatchesFinding(ref, finding) {
10605
+ if (ref.file !== finding.file) return false;
10606
+ if (ref.lineStart === void 0) return true;
10607
+ const [findStart, findEnd] = finding.lineRange;
10608
+ if (ref.lineEnd !== void 0) {
10609
+ return ref.lineStart <= findEnd && ref.lineEnd >= findStart;
10610
+ }
10611
+ return ref.lineStart >= findStart && ref.lineStart <= findEnd;
10612
+ }
10613
+ function checkEvidenceCoverage(findings, evidenceEntries) {
10614
+ if (findings.length === 0) {
10615
+ return {
10616
+ totalEntries: evidenceEntries.filter((e) => e.status === "active").length,
10617
+ findingsWithEvidence: 0,
10618
+ uncitedCount: 0,
10619
+ uncitedFindings: [],
10620
+ coveragePercentage: 100
10621
+ };
10622
+ }
10623
+ const activeEvidence = evidenceEntries.filter((e) => e.status === "active");
10624
+ const evidenceRefs = [];
10625
+ for (const entry of activeEvidence) {
10626
+ const ref = parseEvidenceRef(entry.content);
10627
+ if (ref) evidenceRefs.push(ref);
10628
+ }
10629
+ let findingsWithEvidence = 0;
10630
+ const uncitedFindings = [];
10631
+ for (const finding of findings) {
10632
+ const hasEvidence = evidenceRefs.some((ref) => evidenceMatchesFinding(ref, finding));
10633
+ if (hasEvidence) {
10634
+ findingsWithEvidence++;
10635
+ } else {
10636
+ uncitedFindings.push(finding.title);
10637
+ }
10638
+ }
10639
+ const uncitedCount = findings.length - findingsWithEvidence;
10640
+ const coveragePercentage = Math.round(findingsWithEvidence / findings.length * 100);
10641
+ return {
10642
+ totalEntries: activeEvidence.length,
10643
+ findingsWithEvidence,
10644
+ uncitedCount,
10645
+ uncitedFindings,
10646
+ coveragePercentage
10647
+ };
10648
+ }
10649
+ function tagUncitedFindings(findings, evidenceEntries) {
10650
+ const activeEvidence = evidenceEntries.filter((e) => e.status === "active");
10651
+ const evidenceRefs = [];
10652
+ for (const entry of activeEvidence) {
10653
+ const ref = parseEvidenceRef(entry.content);
10654
+ if (ref) evidenceRefs.push(ref);
10655
+ }
10656
+ for (const finding of findings) {
10657
+ const hasEvidence = evidenceRefs.some((ref) => evidenceMatchesFinding(ref, finding));
10658
+ if (!hasEvidence && !finding.title.startsWith("[UNVERIFIED]")) {
10659
+ finding.title = `[UNVERIFIED] ${finding.title}`;
10660
+ }
10661
+ }
10662
+ return findings;
10663
+ }
10664
+
10277
10665
  // src/review/pipeline-orchestrator.ts
10278
10666
  async function runReviewPipeline(options) {
10279
10667
  const {
@@ -10286,7 +10674,8 @@ async function runReviewPipeline(options) {
10286
10674
  conventionFiles,
10287
10675
  checkDepsOutput,
10288
10676
  config = {},
10289
- commitHistory
10677
+ commitHistory,
10678
+ sessionSlug
10290
10679
  } = options;
10291
10680
  if (flags.ci && prMetadata) {
10292
10681
  const eligibility = checkEligibility(prMetadata, true);
@@ -10382,13 +10771,25 @@ async function runReviewPipeline(options) {
10382
10771
  projectRoot,
10383
10772
  fileContents
10384
10773
  });
10774
+ let evidenceCoverage;
10775
+ if (sessionSlug) {
10776
+ try {
10777
+ const evidenceResult = await readSessionSection(projectRoot, sessionSlug, "evidence");
10778
+ if (evidenceResult.ok) {
10779
+ evidenceCoverage = checkEvidenceCoverage(validatedFindings, evidenceResult.value);
10780
+ tagUncitedFindings(validatedFindings, evidenceResult.value);
10781
+ }
10782
+ } catch {
10783
+ }
10784
+ }
10385
10785
  const dedupedFindings = deduplicateFindings({ findings: validatedFindings });
10386
10786
  const strengths = [];
10387
10787
  const assessment = determineAssessment(dedupedFindings);
10388
10788
  const exitCode = getExitCode(assessment);
10389
10789
  const terminalOutput = formatTerminalOutput({
10390
10790
  findings: dedupedFindings,
10391
- strengths
10791
+ strengths,
10792
+ ...evidenceCoverage != null ? { evidenceCoverage } : {}
10392
10793
  });
10393
10794
  let githubComments = [];
10394
10795
  if (flags.comment) {
@@ -10403,12 +10804,13 @@ async function runReviewPipeline(options) {
10403
10804
  terminalOutput,
10404
10805
  githubComments,
10405
10806
  exitCode,
10406
- ...mechanicalResult !== void 0 ? { mechanicalResult } : {}
10807
+ ...mechanicalResult != null ? { mechanicalResult } : {},
10808
+ ...evidenceCoverage != null ? { evidenceCoverage } : {}
10407
10809
  };
10408
10810
  }
10409
10811
 
10410
10812
  // src/roadmap/parse.ts
10411
- var import_types18 = require("@harness-engineering/types");
10813
+ var import_types19 = require("@harness-engineering/types");
10412
10814
  var VALID_STATUSES = /* @__PURE__ */ new Set([
10413
10815
  "backlog",
10414
10816
  "planned",
@@ -10420,14 +10822,14 @@ var EM_DASH = "\u2014";
10420
10822
  function parseRoadmap(markdown) {
10421
10823
  const fmMatch = markdown.match(/^---\n([\s\S]*?)\n---/);
10422
10824
  if (!fmMatch) {
10423
- return (0, import_types18.Err)(new Error("Missing or malformed YAML frontmatter"));
10825
+ return (0, import_types19.Err)(new Error("Missing or malformed YAML frontmatter"));
10424
10826
  }
10425
10827
  const fmResult = parseFrontmatter(fmMatch[1]);
10426
10828
  if (!fmResult.ok) return fmResult;
10427
10829
  const body = markdown.slice(fmMatch[0].length);
10428
10830
  const milestonesResult = parseMilestones(body);
10429
10831
  if (!milestonesResult.ok) return milestonesResult;
10430
- return (0, import_types18.Ok)({
10832
+ return (0, import_types19.Ok)({
10431
10833
  frontmatter: fmResult.value,
10432
10834
  milestones: milestonesResult.value
10433
10835
  });
@@ -10449,7 +10851,7 @@ function parseFrontmatter(raw) {
10449
10851
  const created = map.get("created");
10450
10852
  const updated = map.get("updated");
10451
10853
  if (!project || !versionStr || !lastSynced || !lastManualEdit) {
10452
- return (0, import_types18.Err)(
10854
+ return (0, import_types19.Err)(
10453
10855
  new Error(
10454
10856
  "Frontmatter missing required fields: project, version, last_synced, last_manual_edit"
10455
10857
  )
@@ -10457,12 +10859,12 @@ function parseFrontmatter(raw) {
10457
10859
  }
10458
10860
  const version = parseInt(versionStr, 10);
10459
10861
  if (isNaN(version)) {
10460
- return (0, import_types18.Err)(new Error("Frontmatter version must be a number"));
10862
+ return (0, import_types19.Err)(new Error("Frontmatter version must be a number"));
10461
10863
  }
10462
10864
  const fm = { project, version, lastSynced, lastManualEdit };
10463
10865
  if (created) fm.created = created;
10464
10866
  if (updated) fm.updated = updated;
10465
- return (0, import_types18.Ok)(fm);
10867
+ return (0, import_types19.Ok)(fm);
10466
10868
  }
10467
10869
  function parseMilestones(body) {
10468
10870
  const milestones = [];
@@ -10486,7 +10888,7 @@ function parseMilestones(body) {
10486
10888
  features: featuresResult.value
10487
10889
  });
10488
10890
  }
10489
- return (0, import_types18.Ok)(milestones);
10891
+ return (0, import_types19.Ok)(milestones);
10490
10892
  }
10491
10893
  function parseFeatures(sectionBody) {
10492
10894
  const features = [];
@@ -10504,32 +10906,50 @@ function parseFeatures(sectionBody) {
10504
10906
  if (!featureResult.ok) return featureResult;
10505
10907
  features.push(featureResult.value);
10506
10908
  }
10507
- return (0, import_types18.Ok)(features);
10909
+ return (0, import_types19.Ok)(features);
10508
10910
  }
10509
- function parseFeatureFields(name, body) {
10911
+ function extractFieldMap(body) {
10510
10912
  const fieldMap = /* @__PURE__ */ new Map();
10511
10913
  const fieldPattern = /^- \*\*(.+?):\*\* (.+)$/gm;
10512
10914
  let match;
10513
10915
  while ((match = fieldPattern.exec(body)) !== null) {
10514
10916
  fieldMap.set(match[1], match[2]);
10515
10917
  }
10918
+ return fieldMap;
10919
+ }
10920
+ function parseListField(fieldMap, ...keys) {
10921
+ let raw = EM_DASH;
10922
+ for (const key of keys) {
10923
+ const val = fieldMap.get(key);
10924
+ if (val !== void 0) {
10925
+ raw = val;
10926
+ break;
10927
+ }
10928
+ }
10929
+ if (raw === EM_DASH || raw === "none") return [];
10930
+ return raw.split(",").map((s) => s.trim());
10931
+ }
10932
+ function parseFeatureFields(name, body) {
10933
+ const fieldMap = extractFieldMap(body);
10516
10934
  const statusRaw = fieldMap.get("Status");
10517
10935
  if (!statusRaw || !VALID_STATUSES.has(statusRaw)) {
10518
- return (0, import_types18.Err)(
10936
+ return (0, import_types19.Err)(
10519
10937
  new Error(
10520
10938
  `Feature "${name}" has invalid status: "${statusRaw ?? "(missing)"}". Valid statuses: ${[...VALID_STATUSES].join(", ")}`
10521
10939
  )
10522
10940
  );
10523
10941
  }
10524
- const status = statusRaw;
10525
10942
  const specRaw = fieldMap.get("Spec") ?? EM_DASH;
10526
- const spec = specRaw === EM_DASH ? null : specRaw;
10527
- const plansRaw = fieldMap.get("Plans") ?? fieldMap.get("Plan") ?? EM_DASH;
10528
- const plans = plansRaw === EM_DASH || plansRaw === "none" ? [] : plansRaw.split(",").map((p) => p.trim());
10529
- const blockedByRaw = fieldMap.get("Blocked by") ?? fieldMap.get("Blockers") ?? EM_DASH;
10530
- const blockedBy = blockedByRaw === EM_DASH || blockedByRaw === "none" ? [] : blockedByRaw.split(",").map((b) => b.trim());
10531
- const summary = fieldMap.get("Summary") ?? "";
10532
- return (0, import_types18.Ok)({ name, status, spec, plans, blockedBy, summary });
10943
+ const plans = parseListField(fieldMap, "Plans", "Plan");
10944
+ const blockedBy = parseListField(fieldMap, "Blocked by", "Blockers");
10945
+ return (0, import_types19.Ok)({
10946
+ name,
10947
+ status: statusRaw,
10948
+ spec: specRaw === EM_DASH ? null : specRaw,
10949
+ plans,
10950
+ blockedBy,
10951
+ summary: fieldMap.get("Summary") ?? ""
10952
+ });
10533
10953
  }
10534
10954
 
10535
10955
  // src/roadmap/serialize.ts
@@ -10580,9 +11000,9 @@ function serializeFeature(feature) {
10580
11000
  }
10581
11001
 
10582
11002
  // src/roadmap/sync.ts
10583
- var fs16 = __toESM(require("fs"));
10584
- var path16 = __toESM(require("path"));
10585
- var import_types19 = require("@harness-engineering/types");
11003
+ var fs18 = __toESM(require("fs"));
11004
+ var path18 = __toESM(require("path"));
11005
+ var import_types20 = require("@harness-engineering/types");
10586
11006
  function inferStatus(feature, projectPath, allFeatures) {
10587
11007
  if (feature.blockedBy.length > 0) {
10588
11008
  const blockerNotDone = feature.blockedBy.some((blockerName) => {
@@ -10596,10 +11016,10 @@ function inferStatus(feature, projectPath, allFeatures) {
10596
11016
  const featuresWithPlans = allFeatures.filter((f) => f.plans.length > 0);
10597
11017
  const useRootState = featuresWithPlans.length <= 1;
10598
11018
  if (useRootState) {
10599
- const rootStatePath = path16.join(projectPath, ".harness", "state.json");
10600
- if (fs16.existsSync(rootStatePath)) {
11019
+ const rootStatePath = path18.join(projectPath, ".harness", "state.json");
11020
+ if (fs18.existsSync(rootStatePath)) {
10601
11021
  try {
10602
- const raw = fs16.readFileSync(rootStatePath, "utf-8");
11022
+ const raw = fs18.readFileSync(rootStatePath, "utf-8");
10603
11023
  const state = JSON.parse(raw);
10604
11024
  if (state.progress) {
10605
11025
  for (const status of Object.values(state.progress)) {
@@ -10610,16 +11030,16 @@ function inferStatus(feature, projectPath, allFeatures) {
10610
11030
  }
10611
11031
  }
10612
11032
  }
10613
- const sessionsDir = path16.join(projectPath, ".harness", "sessions");
10614
- if (fs16.existsSync(sessionsDir)) {
11033
+ const sessionsDir = path18.join(projectPath, ".harness", "sessions");
11034
+ if (fs18.existsSync(sessionsDir)) {
10615
11035
  try {
10616
- const sessionDirs = fs16.readdirSync(sessionsDir, { withFileTypes: true });
11036
+ const sessionDirs = fs18.readdirSync(sessionsDir, { withFileTypes: true });
10617
11037
  for (const entry of sessionDirs) {
10618
11038
  if (!entry.isDirectory()) continue;
10619
- const autopilotPath = path16.join(sessionsDir, entry.name, "autopilot-state.json");
10620
- if (!fs16.existsSync(autopilotPath)) continue;
11039
+ const autopilotPath = path18.join(sessionsDir, entry.name, "autopilot-state.json");
11040
+ if (!fs18.existsSync(autopilotPath)) continue;
10621
11041
  try {
10622
- const raw = fs16.readFileSync(autopilotPath, "utf-8");
11042
+ const raw = fs18.readFileSync(autopilotPath, "utf-8");
10623
11043
  const autopilot = JSON.parse(raw);
10624
11044
  if (!autopilot.phases) continue;
10625
11045
  const linkedPhases = autopilot.phases.filter(
@@ -10666,7 +11086,7 @@ function syncRoadmap(options) {
10666
11086
  to: inferred
10667
11087
  });
10668
11088
  }
10669
- return (0, import_types19.Ok)(changes);
11089
+ return (0, import_types20.Ok)(changes);
10670
11090
  }
10671
11091
 
10672
11092
  // src/interaction/types.ts
@@ -10699,17 +11119,17 @@ var EmitInteractionInputSchema = import_zod7.z.object({
10699
11119
  });
10700
11120
 
10701
11121
  // src/blueprint/scanner.ts
10702
- var fs17 = __toESM(require("fs/promises"));
10703
- var path17 = __toESM(require("path"));
11122
+ var fs19 = __toESM(require("fs/promises"));
11123
+ var path19 = __toESM(require("path"));
10704
11124
  var ProjectScanner = class {
10705
11125
  constructor(rootDir) {
10706
11126
  this.rootDir = rootDir;
10707
11127
  }
10708
11128
  async scan() {
10709
- let projectName = path17.basename(this.rootDir);
11129
+ let projectName = path19.basename(this.rootDir);
10710
11130
  try {
10711
- const pkgPath = path17.join(this.rootDir, "package.json");
10712
- const pkgRaw = await fs17.readFile(pkgPath, "utf-8");
11131
+ const pkgPath = path19.join(this.rootDir, "package.json");
11132
+ const pkgRaw = await fs19.readFile(pkgPath, "utf-8");
10713
11133
  const pkg = JSON.parse(pkgRaw);
10714
11134
  if (pkg.name) projectName = pkg.name;
10715
11135
  } catch {
@@ -10750,8 +11170,8 @@ var ProjectScanner = class {
10750
11170
  };
10751
11171
 
10752
11172
  // src/blueprint/generator.ts
10753
- var fs18 = __toESM(require("fs/promises"));
10754
- var path18 = __toESM(require("path"));
11173
+ var fs20 = __toESM(require("fs/promises"));
11174
+ var path20 = __toESM(require("path"));
10755
11175
  var ejs = __toESM(require("ejs"));
10756
11176
 
10757
11177
  // src/blueprint/templates.ts
@@ -10835,19 +11255,19 @@ var BlueprintGenerator = class {
10835
11255
  styles: STYLES,
10836
11256
  scripts: SCRIPTS
10837
11257
  });
10838
- await fs18.mkdir(options.outputDir, { recursive: true });
10839
- await fs18.writeFile(path18.join(options.outputDir, "index.html"), html);
11258
+ await fs20.mkdir(options.outputDir, { recursive: true });
11259
+ await fs20.writeFile(path20.join(options.outputDir, "index.html"), html);
10840
11260
  }
10841
11261
  };
10842
11262
 
10843
11263
  // src/update-checker.ts
10844
- var fs19 = __toESM(require("fs"));
10845
- var path19 = __toESM(require("path"));
11264
+ var fs21 = __toESM(require("fs"));
11265
+ var path21 = __toESM(require("path"));
10846
11266
  var os = __toESM(require("os"));
10847
11267
  var import_child_process3 = require("child_process");
10848
11268
  function getStatePath() {
10849
11269
  const home = process.env["HOME"] || os.homedir();
10850
- return path19.join(home, ".harness", "update-check.json");
11270
+ return path21.join(home, ".harness", "update-check.json");
10851
11271
  }
10852
11272
  function isUpdateCheckEnabled(configInterval) {
10853
11273
  if (process.env["HARNESS_NO_UPDATE_CHECK"] === "1") return false;
@@ -10860,7 +11280,7 @@ function shouldRunCheck(state, intervalMs) {
10860
11280
  }
10861
11281
  function readCheckState() {
10862
11282
  try {
10863
- const raw = fs19.readFileSync(getStatePath(), "utf-8");
11283
+ const raw = fs21.readFileSync(getStatePath(), "utf-8");
10864
11284
  const parsed = JSON.parse(raw);
10865
11285
  if (typeof parsed === "object" && parsed !== null && "lastCheckTime" in parsed && typeof parsed.lastCheckTime === "number" && "currentVersion" in parsed && typeof parsed.currentVersion === "string") {
10866
11286
  const state = parsed;
@@ -10877,7 +11297,7 @@ function readCheckState() {
10877
11297
  }
10878
11298
  function spawnBackgroundCheck(currentVersion) {
10879
11299
  const statePath = getStatePath();
10880
- const stateDir = path19.dirname(statePath);
11300
+ const stateDir = path21.dirname(statePath);
10881
11301
  const script = `
10882
11302
  const { execSync } = require('child_process');
10883
11303
  const fs = require('fs');
@@ -10931,7 +11351,7 @@ Run "harness update" to upgrade.`;
10931
11351
  }
10932
11352
 
10933
11353
  // src/index.ts
10934
- var VERSION = "0.13.0";
11354
+ var VERSION = "0.14.0";
10935
11355
  // Annotate the CommonJS export names for ESM import in node:
10936
11356
  0 && (module.exports = {
10937
11357
  AGENT_DESCRIPTORS,
@@ -11012,6 +11432,7 @@ var VERSION = "0.13.0";
11012
11432
  analyzeLearningPatterns,
11013
11433
  appendFailure,
11014
11434
  appendLearning,
11435
+ appendSessionEntry,
11015
11436
  applyFixes,
11016
11437
  applyHotspotDowngrade,
11017
11438
  archMatchers,
@@ -11019,12 +11440,14 @@ var VERSION = "0.13.0";
11019
11440
  architecture,
11020
11441
  archiveFailures,
11021
11442
  archiveLearnings,
11443
+ archiveSession,
11022
11444
  archiveStream,
11023
11445
  buildDependencyGraph,
11024
11446
  buildExclusionSet,
11025
11447
  buildSnapshot,
11026
11448
  checkDocCoverage,
11027
11449
  checkEligibility,
11450
+ checkEvidenceCoverage,
11028
11451
  classifyFinding,
11029
11452
  clearFailuresCache,
11030
11453
  clearLearningsCache,
@@ -11108,6 +11531,8 @@ var VERSION = "0.13.0";
11108
11531
  reactRules,
11109
11532
  readCheckState,
11110
11533
  readLockfile,
11534
+ readSessionSection,
11535
+ readSessionSections,
11111
11536
  removeContributions,
11112
11537
  removeProvenance,
11113
11538
  requestMultiplePeerReviews,
@@ -11141,8 +11566,10 @@ var VERSION = "0.13.0";
11141
11566
  spawnBackgroundCheck,
11142
11567
  syncConstraintNodes,
11143
11568
  syncRoadmap,
11569
+ tagUncitedFindings,
11144
11570
  touchStream,
11145
11571
  trackAction,
11572
+ updateSessionEntryStatus,
11146
11573
  updateSessionIndex,
11147
11574
  validateAgentsMap,
11148
11575
  validateBoundaries,