@harness-engineering/cli 1.13.1 → 1.15.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (147) hide show
  1. package/dist/agents/skills/claude-code/harness-autopilot/SKILL.md +240 -39
  2. package/dist/agents/skills/claude-code/harness-autopilot/skill.yaml +6 -0
  3. package/dist/agents/skills/claude-code/harness-brainstorming/SKILL.md +39 -0
  4. package/dist/agents/skills/claude-code/harness-code-review/SKILL.md +44 -0
  5. package/dist/agents/skills/claude-code/harness-execution/SKILL.md +44 -0
  6. package/dist/agents/skills/claude-code/harness-planning/SKILL.md +39 -0
  7. package/dist/agents/skills/claude-code/harness-product-spec/SKILL.md +5 -5
  8. package/dist/agents/skills/claude-code/harness-release-readiness/SKILL.md +3 -3
  9. package/dist/agents/skills/claude-code/harness-verification/SKILL.md +35 -0
  10. package/dist/agents/skills/claude-code/initialize-harness-project/SKILL.md +11 -3
  11. package/dist/agents/skills/gemini-cli/harness-autopilot/SKILL.md +240 -39
  12. package/dist/agents/skills/gemini-cli/harness-autopilot/skill.yaml +6 -0
  13. package/dist/agents/skills/gemini-cli/harness-brainstorming/SKILL.md +39 -0
  14. package/dist/agents/skills/gemini-cli/harness-code-review/SKILL.md +44 -0
  15. package/dist/agents/skills/gemini-cli/harness-execution/SKILL.md +44 -0
  16. package/dist/agents/skills/gemini-cli/harness-planning/SKILL.md +39 -0
  17. package/dist/agents/skills/gemini-cli/harness-product-spec/SKILL.md +5 -5
  18. package/dist/agents/skills/gemini-cli/harness-release-readiness/SKILL.md +3 -3
  19. package/dist/agents/skills/gemini-cli/harness-verification/SKILL.md +35 -0
  20. package/dist/agents/skills/gemini-cli/initialize-harness-project/SKILL.md +11 -3
  21. package/dist/agents/skills/package.json +1 -0
  22. package/dist/agents/skills/vitest.config.mts +5 -0
  23. package/dist/agents-md-ZGNIDWAF.js +8 -0
  24. package/dist/{architecture-2R5Z4ZAF.js → architecture-ZLIH5533.js} +4 -4
  25. package/dist/bin/harness-mcp.js +14 -14
  26. package/dist/bin/harness.js +27 -25
  27. package/dist/{check-phase-gate-2OFZ7OWW.js → check-phase-gate-ZOXVBDCN.js} +4 -4
  28. package/dist/{chunk-ND6PNADU.js → chunk-2BKLWLY6.js} +9 -9
  29. package/dist/{chunk-65FRIL4D.js → chunk-3ZZKVN62.js} +1 -1
  30. package/dist/{chunk-C2ERUR3L.js → chunk-7MJAPE3Z.js} +165 -49
  31. package/dist/{chunk-Z77YQRQT.js → chunk-B2HKP423.js} +16 -5
  32. package/dist/{chunk-QPEH2QPG.js → chunk-DBSOCI3G.js} +53 -54
  33. package/dist/{chunk-TKJZKICB.js → chunk-EDXIVMAP.js} +7 -7
  34. package/dist/{chunk-MHBMTPW7.js → chunk-ERS5EVUZ.js} +9 -0
  35. package/dist/{chunk-JSTQ3AWB.js → chunk-FIAPHX37.js} +1 -1
  36. package/dist/{chunk-IMFVFNJE.js → chunk-FTMXDOR6.js} +1 -1
  37. package/dist/{chunk-72GHBOL2.js → chunk-GZKSBLQL.js} +1 -1
  38. package/dist/{chunk-K6XAPGML.js → chunk-H7Y5CKTM.js} +1 -1
  39. package/dist/{chunk-SSKDAOX5.js → chunk-J4RAX7YB.js} +1164 -516
  40. package/dist/{chunk-UAX4I5ZE.js → chunk-LGYBN7Y6.js} +2 -2
  41. package/dist/{chunk-QY4T6YAZ.js → chunk-N25INEIX.js} +4 -4
  42. package/dist/{chunk-4ZMOCPYO.js → chunk-ND2ENWDM.js} +1 -1
  43. package/dist/{chunk-NERR4TAO.js → chunk-NNHDDXYT.js} +1250 -765
  44. package/dist/{chunk-NKDM3FMH.js → chunk-OD3S2NHN.js} +1 -1
  45. package/dist/{chunk-NOPU4RZ4.js → chunk-OFXQSFOW.js} +3 -3
  46. package/dist/{chunk-TS3XWPW5.js → chunk-RCWZBSK5.js} +1 -1
  47. package/dist/{chunk-VUCPTQ6G.js → chunk-SD3SQOZ2.js} +1 -1
  48. package/dist/{chunk-DZS7CJKL.js → chunk-VEPAJXBW.js} +45 -47
  49. package/dist/{chunk-IM32EEDM.js → chunk-YLXFKVJE.js} +9 -9
  50. package/dist/{chunk-Q6AB7W5Z.js → chunk-YQ6KC6TE.js} +1 -1
  51. package/dist/{chunk-PQ5YK4AY.js → chunk-Z2OOPXJO.js} +2740 -1221
  52. package/dist/ci-workflow-765LSHRD.js +8 -0
  53. package/dist/{dist-2B363XUH.js → dist-ALQDD67R.js} +64 -2
  54. package/dist/{dist-HXHWB7SV.js → dist-B26DFXMP.js} +571 -478
  55. package/dist/{dist-L7LAAQAS.js → dist-DZ63LLUD.js} +1 -1
  56. package/dist/{dist-D4RYGUZE.js → dist-USY2C5JL.js} +3 -1
  57. package/dist/{docs-FZOPM4GK.js → docs-NRMQCOJ6.js} +4 -4
  58. package/dist/engine-3RB7MXPP.js +8 -0
  59. package/dist/{entropy-LVHJMFGH.js → entropy-6AGX2ZUN.js} +3 -3
  60. package/dist/{feedback-IHLVLMRD.js → feedback-MY4QZIFD.js} +1 -1
  61. package/dist/{generate-agent-definitions-64S3CLEZ.js → generate-agent-definitions-ZAE726AU.js} +4 -4
  62. package/dist/{graph-loader-GJZ4FN4Y.js → graph-loader-2M2HXDQI.js} +1 -1
  63. package/dist/index.d.ts +156 -17
  64. package/dist/index.js +24 -24
  65. package/dist/loader-UUTVMQCC.js +10 -0
  66. package/dist/{mcp-JQUI7BVZ.js → mcp-VU5FMO52.js} +14 -14
  67. package/dist/{performance-ZTVSUANN.js → performance-2D7G6NMJ.js} +3 -3
  68. package/dist/{review-pipeline-76JHKGSV.js → review-pipeline-RAQ55ISU.js} +1 -1
  69. package/dist/runtime-BCK5RRZQ.js +9 -0
  70. package/dist/{security-FWQZF2IZ.js → security-2RPQEN62.js} +1 -1
  71. package/dist/templates/axum/Cargo.toml.hbs +8 -0
  72. package/dist/templates/axum/src/main.rs +12 -0
  73. package/dist/templates/axum/template.json +16 -0
  74. package/dist/templates/django/manage.py.hbs +19 -0
  75. package/dist/templates/django/requirements.txt.hbs +1 -0
  76. package/dist/templates/django/src/settings.py.hbs +44 -0
  77. package/dist/templates/django/src/urls.py +6 -0
  78. package/dist/templates/django/src/wsgi.py.hbs +9 -0
  79. package/dist/templates/django/template.json +21 -0
  80. package/dist/templates/express/package.json.hbs +15 -0
  81. package/dist/templates/express/src/app.ts +12 -0
  82. package/dist/templates/express/src/lib/.gitkeep +0 -0
  83. package/dist/templates/express/template.json +16 -0
  84. package/dist/templates/fastapi/requirements.txt.hbs +2 -0
  85. package/dist/templates/fastapi/src/main.py +8 -0
  86. package/dist/templates/fastapi/template.json +20 -0
  87. package/dist/templates/gin/go.mod.hbs +5 -0
  88. package/dist/templates/gin/main.go +15 -0
  89. package/dist/templates/gin/template.json +19 -0
  90. package/dist/templates/go-base/.golangci.yml +16 -0
  91. package/dist/templates/go-base/AGENTS.md.hbs +35 -0
  92. package/dist/templates/go-base/go.mod.hbs +3 -0
  93. package/dist/templates/go-base/harness.config.json.hbs +17 -0
  94. package/dist/templates/go-base/main.go +7 -0
  95. package/dist/templates/go-base/template.json +14 -0
  96. package/dist/templates/java-base/AGENTS.md.hbs +35 -0
  97. package/dist/templates/java-base/checkstyle.xml +20 -0
  98. package/dist/templates/java-base/harness.config.json.hbs +16 -0
  99. package/dist/templates/java-base/pom.xml.hbs +39 -0
  100. package/dist/templates/java-base/src/main/java/App.java.hbs +5 -0
  101. package/dist/templates/java-base/template.json +13 -0
  102. package/dist/templates/nestjs/nest-cli.json +5 -0
  103. package/dist/templates/nestjs/package.json.hbs +18 -0
  104. package/dist/templates/nestjs/src/app.module.ts +8 -0
  105. package/dist/templates/nestjs/src/lib/.gitkeep +0 -0
  106. package/dist/templates/nestjs/src/main.ts +11 -0
  107. package/dist/templates/nestjs/template.json +16 -0
  108. package/dist/templates/nextjs/template.json +15 -1
  109. package/dist/templates/python-base/.python-version +1 -0
  110. package/dist/templates/python-base/AGENTS.md.hbs +32 -0
  111. package/dist/templates/python-base/harness.config.json.hbs +16 -0
  112. package/dist/templates/python-base/pyproject.toml.hbs +18 -0
  113. package/dist/templates/python-base/ruff.toml +5 -0
  114. package/dist/templates/python-base/src/__init__.py +0 -0
  115. package/dist/templates/python-base/template.json +13 -0
  116. package/dist/templates/react-vite/index.html +12 -0
  117. package/dist/templates/react-vite/package.json.hbs +18 -0
  118. package/dist/templates/react-vite/src/App.tsx +7 -0
  119. package/dist/templates/react-vite/src/lib/.gitkeep +0 -0
  120. package/dist/templates/react-vite/src/main.tsx +9 -0
  121. package/dist/templates/react-vite/template.json +19 -0
  122. package/dist/templates/react-vite/vite.config.ts +6 -0
  123. package/dist/templates/rust-base/AGENTS.md.hbs +35 -0
  124. package/dist/templates/rust-base/Cargo.toml.hbs +6 -0
  125. package/dist/templates/rust-base/clippy.toml +2 -0
  126. package/dist/templates/rust-base/harness.config.json.hbs +17 -0
  127. package/dist/templates/rust-base/src/main.rs +3 -0
  128. package/dist/templates/rust-base/template.json +14 -0
  129. package/dist/templates/spring-boot/pom.xml.hbs +50 -0
  130. package/dist/templates/spring-boot/src/main/java/Application.java.hbs +19 -0
  131. package/dist/templates/spring-boot/template.json +15 -0
  132. package/dist/templates/vue/index.html +12 -0
  133. package/dist/templates/vue/package.json.hbs +16 -0
  134. package/dist/templates/vue/src/App.vue +7 -0
  135. package/dist/templates/vue/src/lib/.gitkeep +0 -0
  136. package/dist/templates/vue/src/main.ts +4 -0
  137. package/dist/templates/vue/template.json +19 -0
  138. package/dist/templates/vue/vite.config.ts +6 -0
  139. package/dist/{validate-GCHZJIL7.js → validate-KBYQAEWE.js} +4 -4
  140. package/dist/validate-cross-check-OABMREW4.js +8 -0
  141. package/package.json +7 -5
  142. package/dist/agents-md-XU3BHE22.js +0 -8
  143. package/dist/ci-workflow-EHV65NQB.js +0 -8
  144. package/dist/engine-OL4T6NZS.js +0 -8
  145. package/dist/loader-DPYFB6R6.js +0 -10
  146. package/dist/runtime-X7U6SC7K.js +0 -9
  147. package/dist/validate-cross-check-STFHYMAZ.js +0 -8
@@ -1,9 +1,10 @@
1
1
  import {
2
2
  Err,
3
- Ok
4
- } from "./chunk-MHBMTPW7.js";
3
+ Ok,
4
+ SESSION_SECTION_NAMES
5
+ } from "./chunk-ERS5EVUZ.js";
5
6
 
6
- // ../core/dist/chunk-D6VFA6AS.mjs
7
+ // ../core/dist/chunk-BQUWXBGR.mjs
7
8
  import { z } from "zod";
8
9
  import { createHash } from "crypto";
9
10
  import { minimatch } from "minimatch";
@@ -134,17 +135,17 @@ function resolveFileToLayer(file, layers) {
134
135
  }
135
136
  var accessAsync = promisify(access);
136
137
  var readFileAsync = promisify(readFile);
137
- async function fileExists(path20) {
138
+ async function fileExists(path23) {
138
139
  try {
139
- await accessAsync(path20, constants.F_OK);
140
+ await accessAsync(path23, constants.F_OK);
140
141
  return true;
141
142
  } catch {
142
143
  return false;
143
144
  }
144
145
  }
145
- async function readFileContent(path20) {
146
+ async function readFileContent(path23) {
146
147
  try {
147
- const content = await readFileAsync(path20, "utf-8");
148
+ const content = await readFileAsync(path23, "utf-8");
148
149
  return Ok(content);
149
150
  } catch (error) {
150
151
  return Err(error);
@@ -290,65 +291,71 @@ async function validateDependencies(config) {
290
291
  graph: graphResult.value
291
292
  });
292
293
  }
293
- function tarjanSCC(graph) {
294
- const nodeMap = /* @__PURE__ */ new Map();
295
- const stack = [];
296
- const sccs = [];
297
- let index = 0;
294
+ function buildAdjacencyList(graph) {
298
295
  const adjacency = /* @__PURE__ */ new Map();
296
+ const nodeSet = new Set(graph.nodes);
299
297
  for (const node of graph.nodes) {
300
298
  adjacency.set(node, []);
301
299
  }
302
300
  for (const edge of graph.edges) {
303
301
  const neighbors = adjacency.get(edge.from);
304
- if (neighbors && graph.nodes.includes(edge.to)) {
302
+ if (neighbors && nodeSet.has(edge.to)) {
305
303
  neighbors.push(edge.to);
306
304
  }
307
305
  }
308
- function strongConnect(node) {
309
- nodeMap.set(node, {
310
- index,
311
- lowlink: index,
312
- onStack: true
313
- });
314
- index++;
315
- stack.push(node);
316
- const neighbors = adjacency.get(node) ?? [];
317
- for (const neighbor of neighbors) {
318
- const neighborData = nodeMap.get(neighbor);
319
- if (!neighborData) {
320
- strongConnect(neighbor);
321
- const nodeData2 = nodeMap.get(node);
322
- const updatedNeighborData = nodeMap.get(neighbor);
323
- nodeData2.lowlink = Math.min(nodeData2.lowlink, updatedNeighborData.lowlink);
324
- } else if (neighborData.onStack) {
325
- const nodeData2 = nodeMap.get(node);
326
- nodeData2.lowlink = Math.min(nodeData2.lowlink, neighborData.index);
327
- }
328
- }
329
- const nodeData = nodeMap.get(node);
330
- if (nodeData.lowlink === nodeData.index) {
331
- const scc = [];
332
- let w;
333
- do {
334
- w = stack.pop();
335
- nodeMap.get(w).onStack = false;
336
- scc.push(w);
337
- } while (w !== node);
338
- if (scc.length > 1) {
339
- sccs.push(scc);
340
- } else if (scc.length === 1) {
341
- const selfNode = scc[0];
342
- const selfNeighbors = adjacency.get(selfNode) ?? [];
343
- if (selfNeighbors.includes(selfNode)) {
344
- sccs.push(scc);
345
- }
346
- }
306
+ return adjacency;
307
+ }
308
+ function isCyclicSCC(scc, adjacency) {
309
+ if (scc.length > 1) return true;
310
+ if (scc.length === 1) {
311
+ const selfNode = scc[0];
312
+ const selfNeighbors = adjacency.get(selfNode) ?? [];
313
+ return selfNeighbors.includes(selfNode);
314
+ }
315
+ return false;
316
+ }
317
+ function processNeighbors(node, neighbors, nodeMap, stack, adjacency, sccs, indexRef) {
318
+ for (const neighbor of neighbors) {
319
+ const neighborData = nodeMap.get(neighbor);
320
+ if (!neighborData) {
321
+ strongConnectImpl(neighbor, nodeMap, stack, adjacency, sccs, indexRef);
322
+ const nodeData = nodeMap.get(node);
323
+ const updatedNeighborData = nodeMap.get(neighbor);
324
+ nodeData.lowlink = Math.min(nodeData.lowlink, updatedNeighborData.lowlink);
325
+ } else if (neighborData.onStack) {
326
+ const nodeData = nodeMap.get(node);
327
+ nodeData.lowlink = Math.min(nodeData.lowlink, neighborData.index);
328
+ }
329
+ }
330
+ }
331
+ function strongConnectImpl(node, nodeMap, stack, adjacency, sccs, indexRef) {
332
+ nodeMap.set(node, { index: indexRef.value, lowlink: indexRef.value, onStack: true });
333
+ indexRef.value++;
334
+ stack.push(node);
335
+ processNeighbors(node, adjacency.get(node) ?? [], nodeMap, stack, adjacency, sccs, indexRef);
336
+ const nodeData = nodeMap.get(node);
337
+ if (nodeData.lowlink === nodeData.index) {
338
+ const scc = [];
339
+ let w;
340
+ do {
341
+ w = stack.pop();
342
+ nodeMap.get(w).onStack = false;
343
+ scc.push(w);
344
+ } while (w !== node);
345
+ if (isCyclicSCC(scc, adjacency)) {
346
+ sccs.push(scc);
347
347
  }
348
348
  }
349
+ }
350
+ function tarjanSCC(graph) {
351
+ const nodeMap = /* @__PURE__ */ new Map();
352
+ const stack = [];
353
+ const sccs = [];
354
+ const indexRef = { value: 0 };
355
+ const adjacency = buildAdjacencyList(graph);
349
356
  for (const node of graph.nodes) {
350
357
  if (!nodeMap.has(node)) {
351
- strongConnect(node);
358
+ strongConnectImpl(node, nodeMap, stack, adjacency, sccs, indexRef);
352
359
  }
353
360
  }
354
361
  return sccs;
@@ -607,6 +614,31 @@ function aggregateByCategory(results) {
607
614
  }
608
615
  return map;
609
616
  }
617
+ function classifyViolations(violations, baselineViolationIds) {
618
+ const newViolations = [];
619
+ const preExisting = [];
620
+ for (const violation of violations) {
621
+ if (baselineViolationIds.has(violation.id)) {
622
+ preExisting.push(violation.id);
623
+ } else {
624
+ newViolations.push(violation);
625
+ }
626
+ }
627
+ return { newViolations, preExisting };
628
+ }
629
+ function findResolvedViolations(baselineCategory, currentViolationIds) {
630
+ if (!baselineCategory) return [];
631
+ return baselineCategory.violationIds.filter((id) => !currentViolationIds.has(id));
632
+ }
633
+ function collectOrphanedBaselineViolations(baseline, visitedCategories) {
634
+ const resolved = [];
635
+ for (const [category, baselineCategory] of Object.entries(baseline.metrics)) {
636
+ if (!visitedCategories.has(category) && baselineCategory) {
637
+ resolved.push(...baselineCategory.violationIds);
638
+ }
639
+ }
640
+ return resolved;
641
+ }
610
642
  function diff(current, baseline) {
611
643
  const aggregated = aggregateByCategory(current);
612
644
  const newViolations = [];
@@ -619,21 +651,11 @@ function diff(current, baseline) {
619
651
  const baselineCategory = baseline.metrics[category];
620
652
  const baselineViolationIds = new Set(baselineCategory?.violationIds ?? []);
621
653
  const baselineValue = baselineCategory?.value ?? 0;
622
- for (const violation of agg.violations) {
623
- if (baselineViolationIds.has(violation.id)) {
624
- preExisting.push(violation.id);
625
- } else {
626
- newViolations.push(violation);
627
- }
628
- }
654
+ const classified = classifyViolations(agg.violations, baselineViolationIds);
655
+ newViolations.push(...classified.newViolations);
656
+ preExisting.push(...classified.preExisting);
629
657
  const currentViolationIds = new Set(agg.violations.map((v) => v.id));
630
- if (baselineCategory) {
631
- for (const id of baselineCategory.violationIds) {
632
- if (!currentViolationIds.has(id)) {
633
- resolvedViolations.push(id);
634
- }
635
- }
636
- }
658
+ resolvedViolations.push(...findResolvedViolations(baselineCategory, currentViolationIds));
637
659
  if (baselineCategory && agg.value > baselineValue) {
638
660
  regressions.push({
639
661
  category,
@@ -643,16 +665,9 @@ function diff(current, baseline) {
643
665
  });
644
666
  }
645
667
  }
646
- for (const [category, baselineCategory] of Object.entries(baseline.metrics)) {
647
- if (!visitedCategories.has(category) && baselineCategory) {
648
- for (const id of baselineCategory.violationIds) {
649
- resolvedViolations.push(id);
650
- }
651
- }
652
- }
653
- const passed = newViolations.length === 0 && regressions.length === 0;
668
+ resolvedViolations.push(...collectOrphanedBaselineViolations(baseline, visitedCategories));
654
669
  return {
655
- passed,
670
+ passed: newViolations.length === 0 && regressions.length === 0,
656
671
  newViolations,
657
672
  resolvedViolations,
658
673
  preExisting,
@@ -667,22 +682,22 @@ var DEFAULT_THRESHOLDS = {
667
682
  fileLength: { info: 300 },
668
683
  hotspotPercentile: { error: 95 }
669
684
  };
685
+ var FUNCTION_PATTERNS = [
686
+ // function declarations: function name(params) {
687
+ /^\s*(?:export\s+)?(?:async\s+)?function\s+(\w+)\s*\(([^)]*)\)/,
688
+ // method declarations: name(params) {
689
+ /^\s*(?:async\s+)?(\w+)\s*\(([^)]*)\)\s*(?::\s*[^{]+)?\s*\{/,
690
+ // arrow functions assigned to const/let/var: const name = (params) =>
691
+ /^\s*(?:export\s+)?(?:const|let|var)\s+(\w+)\s*=\s*(?:async\s+)?\(([^)]*)\)\s*(?::\s*[^=]+)?\s*=>/,
692
+ // arrow functions assigned to const/let/var with single param: const name = param =>
693
+ /^\s*(?:export\s+)?(?:const|let|var)\s+(\w+)\s*=\s*(?:async\s+)?(\w+)\s*=>/
694
+ ];
670
695
  function extractFunctions(content) {
671
696
  const functions = [];
672
697
  const lines = content.split("\n");
673
- const patterns = [
674
- // function declarations: function name(params) {
675
- /^\s*(?:export\s+)?(?:async\s+)?function\s+(\w+)\s*\(([^)]*)\)/,
676
- // method declarations: name(params) {
677
- /^\s*(?:async\s+)?(\w+)\s*\(([^)]*)\)\s*(?::\s*[^{]+)?\s*\{/,
678
- // arrow functions assigned to const/let/var: const name = (params) =>
679
- /^\s*(?:export\s+)?(?:const|let|var)\s+(\w+)\s*=\s*(?:async\s+)?\(([^)]*)\)\s*(?::\s*[^=]+)?\s*=>/,
680
- // arrow functions assigned to const/let/var with single param: const name = param =>
681
- /^\s*(?:export\s+)?(?:const|let|var)\s+(\w+)\s*=\s*(?:async\s+)?(\w+)\s*=>/
682
- ];
683
698
  for (let i = 0; i < lines.length; i++) {
684
699
  const line = lines[i];
685
- for (const pattern of patterns) {
700
+ for (const pattern of FUNCTION_PATTERNS) {
686
701
  const match = line.match(pattern);
687
702
  if (match) {
688
703
  const name = match[1] ?? "anonymous";
@@ -771,26 +786,155 @@ function computeNestingDepth(body) {
771
786
  }
772
787
  return maxDepth;
773
788
  }
774
- async function detectComplexityViolations(snapshot, config, graphData) {
775
- const violations = [];
776
- const thresholds = {
789
+ function resolveThresholds(config) {
790
+ const userThresholds = config?.thresholds;
791
+ if (!userThresholds) return { ...DEFAULT_THRESHOLDS };
792
+ return {
777
793
  cyclomaticComplexity: {
778
- error: config?.thresholds?.cyclomaticComplexity?.error ?? DEFAULT_THRESHOLDS.cyclomaticComplexity.error,
779
- warn: config?.thresholds?.cyclomaticComplexity?.warn ?? DEFAULT_THRESHOLDS.cyclomaticComplexity.warn
794
+ ...DEFAULT_THRESHOLDS.cyclomaticComplexity,
795
+ ...stripUndefined(userThresholds.cyclomaticComplexity)
780
796
  },
781
797
  nestingDepth: {
782
- warn: config?.thresholds?.nestingDepth?.warn ?? DEFAULT_THRESHOLDS.nestingDepth.warn
798
+ ...DEFAULT_THRESHOLDS.nestingDepth,
799
+ ...stripUndefined(userThresholds.nestingDepth)
783
800
  },
784
801
  functionLength: {
785
- warn: config?.thresholds?.functionLength?.warn ?? DEFAULT_THRESHOLDS.functionLength.warn
802
+ ...DEFAULT_THRESHOLDS.functionLength,
803
+ ...stripUndefined(userThresholds.functionLength)
786
804
  },
787
805
  parameterCount: {
788
- warn: config?.thresholds?.parameterCount?.warn ?? DEFAULT_THRESHOLDS.parameterCount.warn
806
+ ...DEFAULT_THRESHOLDS.parameterCount,
807
+ ...stripUndefined(userThresholds.parameterCount)
789
808
  },
790
- fileLength: {
791
- info: config?.thresholds?.fileLength?.info ?? DEFAULT_THRESHOLDS.fileLength.info
792
- }
809
+ fileLength: { ...DEFAULT_THRESHOLDS.fileLength, ...stripUndefined(userThresholds.fileLength) }
810
+ };
811
+ }
812
+ function stripUndefined(obj) {
813
+ if (!obj) return {};
814
+ const result = {};
815
+ for (const [key, val] of Object.entries(obj)) {
816
+ if (val !== void 0) result[key] = val;
817
+ }
818
+ return result;
819
+ }
820
+ function checkFileLengthViolation(filePath, lineCount, threshold) {
821
+ if (lineCount <= threshold) return null;
822
+ return {
823
+ file: filePath,
824
+ function: "<file>",
825
+ line: 1,
826
+ metric: "fileLength",
827
+ value: lineCount,
828
+ threshold,
829
+ tier: 3,
830
+ severity: "info",
831
+ message: `File has ${lineCount} lines (threshold: ${threshold})`
832
+ };
833
+ }
834
+ function checkCyclomaticComplexity(filePath, fn, thresholds) {
835
+ const complexity = computeCyclomaticComplexity(fn.body);
836
+ if (complexity > thresholds.error) {
837
+ return {
838
+ file: filePath,
839
+ function: fn.name,
840
+ line: fn.line,
841
+ metric: "cyclomaticComplexity",
842
+ value: complexity,
843
+ threshold: thresholds.error,
844
+ tier: 1,
845
+ severity: "error",
846
+ message: `Function "${fn.name}" has cyclomatic complexity of ${complexity} (error threshold: ${thresholds.error})`
847
+ };
848
+ }
849
+ if (complexity > thresholds.warn) {
850
+ return {
851
+ file: filePath,
852
+ function: fn.name,
853
+ line: fn.line,
854
+ metric: "cyclomaticComplexity",
855
+ value: complexity,
856
+ threshold: thresholds.warn,
857
+ tier: 2,
858
+ severity: "warning",
859
+ message: `Function "${fn.name}" has cyclomatic complexity of ${complexity} (warning threshold: ${thresholds.warn})`
860
+ };
861
+ }
862
+ return null;
863
+ }
864
+ function checkNestingDepth(filePath, fn, threshold) {
865
+ const depth = computeNestingDepth(fn.body);
866
+ if (depth <= threshold) return null;
867
+ return {
868
+ file: filePath,
869
+ function: fn.name,
870
+ line: fn.line,
871
+ metric: "nestingDepth",
872
+ value: depth,
873
+ threshold,
874
+ tier: 2,
875
+ severity: "warning",
876
+ message: `Function "${fn.name}" has nesting depth of ${depth} (threshold: ${threshold})`
877
+ };
878
+ }
879
+ function checkFunctionLength(filePath, fn, threshold) {
880
+ const fnLength = fn.endLine - fn.startLine + 1;
881
+ if (fnLength <= threshold) return null;
882
+ return {
883
+ file: filePath,
884
+ function: fn.name,
885
+ line: fn.line,
886
+ metric: "functionLength",
887
+ value: fnLength,
888
+ threshold,
889
+ tier: 2,
890
+ severity: "warning",
891
+ message: `Function "${fn.name}" is ${fnLength} lines long (threshold: ${threshold})`
892
+ };
893
+ }
894
+ function checkParameterCount(filePath, fn, threshold) {
895
+ if (fn.params <= threshold) return null;
896
+ return {
897
+ file: filePath,
898
+ function: fn.name,
899
+ line: fn.line,
900
+ metric: "parameterCount",
901
+ value: fn.params,
902
+ threshold,
903
+ tier: 2,
904
+ severity: "warning",
905
+ message: `Function "${fn.name}" has ${fn.params} parameters (threshold: ${threshold})`
906
+ };
907
+ }
908
+ function checkHotspot(filePath, fn, graphData) {
909
+ const hotspot = graphData.hotspots.find((h) => h.file === filePath && h.function === fn.name);
910
+ if (!hotspot || hotspot.hotspotScore <= graphData.percentile95Score) return null;
911
+ return {
912
+ file: filePath,
913
+ function: fn.name,
914
+ line: fn.line,
915
+ metric: "hotspotScore",
916
+ value: hotspot.hotspotScore,
917
+ threshold: graphData.percentile95Score,
918
+ tier: 1,
919
+ severity: "error",
920
+ message: `Function "${fn.name}" is a complexity hotspot (score: ${hotspot.hotspotScore}, p95: ${graphData.percentile95Score})`
793
921
  };
922
+ }
923
+ function collectFunctionViolations(filePath, fn, thresholds, graphData) {
924
+ const checks = [
925
+ checkCyclomaticComplexity(filePath, fn, thresholds.cyclomaticComplexity),
926
+ checkNestingDepth(filePath, fn, thresholds.nestingDepth.warn),
927
+ checkFunctionLength(filePath, fn, thresholds.functionLength.warn),
928
+ checkParameterCount(filePath, fn, thresholds.parameterCount.warn)
929
+ ];
930
+ if (graphData) {
931
+ checks.push(checkHotspot(filePath, fn, graphData));
932
+ }
933
+ return checks.filter((v) => v !== null);
934
+ }
935
+ async function detectComplexityViolations(snapshot, config, graphData) {
936
+ const violations = [];
937
+ const thresholds = resolveThresholds(config);
794
938
  let totalFunctions = 0;
795
939
  for (const file of snapshot.files) {
796
940
  let content;
@@ -800,107 +944,16 @@ async function detectComplexityViolations(snapshot, config, graphData) {
800
944
  continue;
801
945
  }
802
946
  const lines = content.split("\n");
803
- if (lines.length > thresholds.fileLength.info) {
804
- violations.push({
805
- file: file.path,
806
- function: "<file>",
807
- line: 1,
808
- metric: "fileLength",
809
- value: lines.length,
810
- threshold: thresholds.fileLength.info,
811
- tier: 3,
812
- severity: "info",
813
- message: `File has ${lines.length} lines (threshold: ${thresholds.fileLength.info})`
814
- });
815
- }
947
+ const fileLenViolation = checkFileLengthViolation(
948
+ file.path,
949
+ lines.length,
950
+ thresholds.fileLength.info
951
+ );
952
+ if (fileLenViolation) violations.push(fileLenViolation);
816
953
  const functions = extractFunctions(content);
817
954
  totalFunctions += functions.length;
818
955
  for (const fn of functions) {
819
- const complexity = computeCyclomaticComplexity(fn.body);
820
- if (complexity > thresholds.cyclomaticComplexity.error) {
821
- violations.push({
822
- file: file.path,
823
- function: fn.name,
824
- line: fn.line,
825
- metric: "cyclomaticComplexity",
826
- value: complexity,
827
- threshold: thresholds.cyclomaticComplexity.error,
828
- tier: 1,
829
- severity: "error",
830
- message: `Function "${fn.name}" has cyclomatic complexity of ${complexity} (error threshold: ${thresholds.cyclomaticComplexity.error})`
831
- });
832
- } else if (complexity > thresholds.cyclomaticComplexity.warn) {
833
- violations.push({
834
- file: file.path,
835
- function: fn.name,
836
- line: fn.line,
837
- metric: "cyclomaticComplexity",
838
- value: complexity,
839
- threshold: thresholds.cyclomaticComplexity.warn,
840
- tier: 2,
841
- severity: "warning",
842
- message: `Function "${fn.name}" has cyclomatic complexity of ${complexity} (warning threshold: ${thresholds.cyclomaticComplexity.warn})`
843
- });
844
- }
845
- const nestingDepth = computeNestingDepth(fn.body);
846
- if (nestingDepth > thresholds.nestingDepth.warn) {
847
- violations.push({
848
- file: file.path,
849
- function: fn.name,
850
- line: fn.line,
851
- metric: "nestingDepth",
852
- value: nestingDepth,
853
- threshold: thresholds.nestingDepth.warn,
854
- tier: 2,
855
- severity: "warning",
856
- message: `Function "${fn.name}" has nesting depth of ${nestingDepth} (threshold: ${thresholds.nestingDepth.warn})`
857
- });
858
- }
859
- const fnLength = fn.endLine - fn.startLine + 1;
860
- if (fnLength > thresholds.functionLength.warn) {
861
- violations.push({
862
- file: file.path,
863
- function: fn.name,
864
- line: fn.line,
865
- metric: "functionLength",
866
- value: fnLength,
867
- threshold: thresholds.functionLength.warn,
868
- tier: 2,
869
- severity: "warning",
870
- message: `Function "${fn.name}" is ${fnLength} lines long (threshold: ${thresholds.functionLength.warn})`
871
- });
872
- }
873
- if (fn.params > thresholds.parameterCount.warn) {
874
- violations.push({
875
- file: file.path,
876
- function: fn.name,
877
- line: fn.line,
878
- metric: "parameterCount",
879
- value: fn.params,
880
- threshold: thresholds.parameterCount.warn,
881
- tier: 2,
882
- severity: "warning",
883
- message: `Function "${fn.name}" has ${fn.params} parameters (threshold: ${thresholds.parameterCount.warn})`
884
- });
885
- }
886
- if (graphData) {
887
- const hotspot = graphData.hotspots.find(
888
- (h) => h.file === file.path && h.function === fn.name
889
- );
890
- if (hotspot && hotspot.hotspotScore > graphData.percentile95Score) {
891
- violations.push({
892
- file: file.path,
893
- function: fn.name,
894
- line: fn.line,
895
- metric: "hotspotScore",
896
- value: hotspot.hotspotScore,
897
- threshold: graphData.percentile95Score,
898
- tier: 1,
899
- severity: "error",
900
- message: `Function "${fn.name}" is a complexity hotspot (score: ${hotspot.hotspotScore}, p95: ${graphData.percentile95Score})`
901
- });
902
- }
903
- }
956
+ violations.push(...collectFunctionViolations(file.path, fn, thresholds, graphData));
904
957
  }
905
958
  }
906
959
  const errorCount = violations.filter((v) => v.severity === "error").length;
@@ -1779,6 +1832,7 @@ import * as fs6 from "fs";
1779
1832
  import * as path3 from "path";
1780
1833
  import * as fs9 from "fs";
1781
1834
  import * as path6 from "path";
1835
+ import * as crypto from "crypto";
1782
1836
  import * as fs10 from "fs";
1783
1837
  import * as path7 from "path";
1784
1838
  import * as fs11 from "fs";
@@ -1788,26 +1842,35 @@ import * as path9 from "path";
1788
1842
  import { execSync as execSync2 } from "child_process";
1789
1843
  import * as fs13 from "fs";
1790
1844
  import * as path10 from "path";
1791
- import * as fs15 from "fs/promises";
1792
- import { z as z5 } from "zod";
1793
1845
  import * as fs14 from "fs";
1794
1846
  import * as path11 from "path";
1847
+ import * as fs15 from "fs";
1795
1848
  import * as path12 from "path";
1849
+ import * as fs16 from "fs";
1796
1850
  import * as path13 from "path";
1851
+ import { z as z5 } from "zod";
1852
+ import * as fs18 from "fs/promises";
1853
+ import { minimatch as minimatch4 } from "minimatch";
1854
+ import { z as z6 } from "zod";
1855
+ import * as fs17 from "fs";
1797
1856
  import * as path14 from "path";
1798
1857
  import * as path15 from "path";
1799
- import * as fs16 from "fs";
1800
1858
  import * as path16 from "path";
1801
- import { z as z6 } from "zod";
1802
- import * as fs17 from "fs/promises";
1803
1859
  import * as path17 from "path";
1804
- import * as fs18 from "fs/promises";
1805
1860
  import * as path18 from "path";
1806
- import * as ejs from "ejs";
1807
1861
  import * as fs19 from "fs";
1808
1862
  import * as path19 from "path";
1863
+ import { z as z7 } from "zod";
1864
+ import * as fs20 from "fs/promises";
1865
+ import * as path20 from "path";
1866
+ import * as fs21 from "fs/promises";
1867
+ import * as path21 from "path";
1868
+ import * as ejs from "ejs";
1869
+ import * as fs22 from "fs";
1870
+ import * as path22 from "path";
1809
1871
  import * as os from "os";
1810
1872
  import { spawn } from "child_process";
1873
+ import Parser from "web-tree-sitter";
1811
1874
  async function validateFileStructure(projectPath, conventions) {
1812
1875
  const missing = [];
1813
1876
  const unexpected = [];
@@ -1843,15 +1906,15 @@ function validateConfig(data, schema) {
1843
1906
  let message = "Configuration validation failed";
1844
1907
  const suggestions = [];
1845
1908
  if (firstError) {
1846
- const path20 = firstError.path.join(".");
1847
- const pathDisplay = path20 ? ` at "${path20}"` : "";
1909
+ const path23 = firstError.path.join(".");
1910
+ const pathDisplay = path23 ? ` at "${path23}"` : "";
1848
1911
  if (firstError.code === "invalid_type") {
1849
1912
  const received = firstError.received;
1850
1913
  const expected = firstError.expected;
1851
1914
  if (received === "undefined") {
1852
1915
  code = "MISSING_FIELD";
1853
1916
  message = `Missing required field${pathDisplay}: ${firstError.message}`;
1854
- suggestions.push(`Field "${path20}" is required and must be of type "${expected}"`);
1917
+ suggestions.push(`Field "${path23}" is required and must be of type "${expected}"`);
1855
1918
  } else {
1856
1919
  code = "INVALID_TYPE";
1857
1920
  message = `Invalid type${pathDisplay}: ${firstError.message}`;
@@ -1998,6 +2061,43 @@ function extractMarkdownLinks(content) {
1998
2061
  }
1999
2062
  return links;
2000
2063
  }
2064
+ function isDescriptionTerminator(trimmed) {
2065
+ return trimmed.startsWith("#") || trimmed.startsWith("-") || trimmed.startsWith("*") || trimmed.startsWith("```");
2066
+ }
2067
+ function extractDescription(sectionLines) {
2068
+ const descriptionLines = [];
2069
+ for (const line of sectionLines) {
2070
+ const trimmed = line.trim();
2071
+ if (trimmed === "") {
2072
+ if (descriptionLines.length > 0) break;
2073
+ continue;
2074
+ }
2075
+ if (isDescriptionTerminator(trimmed)) break;
2076
+ descriptionLines.push(trimmed);
2077
+ }
2078
+ return descriptionLines.length > 0 ? descriptionLines.join(" ") : void 0;
2079
+ }
2080
+ function buildAgentMapSection(section, lines) {
2081
+ const endIndex = section.endIndex ?? lines.length;
2082
+ const sectionLines = lines.slice(section.startIndex + 1, endIndex);
2083
+ const sectionContent = sectionLines.join("\n");
2084
+ const links = extractMarkdownLinks(sectionContent).map((link) => ({
2085
+ ...link,
2086
+ line: link.line + section.startIndex + 1,
2087
+ exists: false
2088
+ }));
2089
+ const result = {
2090
+ title: section.title,
2091
+ level: section.level,
2092
+ line: section.line,
2093
+ links
2094
+ };
2095
+ const description = extractDescription(sectionLines);
2096
+ if (description) {
2097
+ result.description = description;
2098
+ }
2099
+ return result;
2100
+ }
2001
2101
  function extractSections(content) {
2002
2102
  const lines = content.split("\n");
2003
2103
  const sections = [];
@@ -2010,7 +2110,6 @@ function extractSections(content) {
2010
2110
  title: match[2].trim(),
2011
2111
  level: match[1].length,
2012
2112
  line: i + 1,
2013
- // 1-indexed
2014
2113
  startIndex: i
2015
2114
  });
2016
2115
  }
@@ -2022,62 +2121,29 @@ function extractSections(content) {
2022
2121
  currentSection.endIndex = nextSection ? nextSection.startIndex : lines.length;
2023
2122
  }
2024
2123
  }
2025
- return sections.map((section) => {
2026
- const endIndex = section.endIndex ?? lines.length;
2027
- const sectionLines = lines.slice(section.startIndex + 1, endIndex);
2028
- const sectionContent = sectionLines.join("\n");
2029
- const links = extractMarkdownLinks(sectionContent).map((link) => ({
2030
- ...link,
2031
- line: link.line + section.startIndex + 1,
2032
- // Adjust line number
2033
- exists: false
2034
- // Will be set later by validateAgentsMap
2035
- }));
2036
- const descriptionLines = [];
2037
- for (const line of sectionLines) {
2038
- const trimmed = line.trim();
2039
- if (trimmed === "") {
2040
- if (descriptionLines.length > 0) break;
2041
- continue;
2042
- }
2043
- if (trimmed.startsWith("#")) break;
2044
- if (trimmed.startsWith("-") || trimmed.startsWith("*")) break;
2045
- if (trimmed.startsWith("```")) break;
2046
- descriptionLines.push(trimmed);
2047
- }
2048
- const result = {
2049
- title: section.title,
2050
- level: section.level,
2051
- line: section.line,
2052
- links
2053
- };
2054
- if (descriptionLines.length > 0) {
2055
- result.description = descriptionLines.join(" ");
2056
- }
2057
- return result;
2058
- });
2124
+ return sections.map((section) => buildAgentMapSection(section, lines));
2059
2125
  }
2060
- function isExternalLink(path20) {
2061
- return path20.startsWith("http://") || path20.startsWith("https://") || path20.startsWith("#") || path20.startsWith("mailto:");
2126
+ function isExternalLink(path23) {
2127
+ return path23.startsWith("http://") || path23.startsWith("https://") || path23.startsWith("#") || path23.startsWith("mailto:");
2062
2128
  }
2063
2129
  function resolveLinkPath(linkPath, baseDir) {
2064
2130
  return linkPath.startsWith(".") ? join4(baseDir, linkPath) : linkPath;
2065
2131
  }
2066
- async function validateAgentsMap(path20 = "./AGENTS.md") {
2067
- const contentResult = await readFileContent(path20);
2132
+ async function validateAgentsMap(path23 = "./AGENTS.md") {
2133
+ const contentResult = await readFileContent(path23);
2068
2134
  if (!contentResult.ok) {
2069
2135
  return Err(
2070
2136
  createError(
2071
2137
  "PARSE_ERROR",
2072
2138
  `Failed to read AGENTS.md: ${contentResult.error.message}`,
2073
- { path: path20 },
2139
+ { path: path23 },
2074
2140
  ["Ensure the file exists", "Check file permissions"]
2075
2141
  )
2076
2142
  );
2077
2143
  }
2078
2144
  const content = contentResult.value;
2079
2145
  const sections = extractSections(content);
2080
- const baseDir = dirname4(path20);
2146
+ const baseDir = dirname4(path23);
2081
2147
  const sectionTitles = sections.map((s) => s.title);
2082
2148
  const missingSections = REQUIRED_SECTIONS.filter(
2083
2149
  (required) => !sectionTitles.some((title) => title.toLowerCase().includes(required.toLowerCase()))
@@ -2211,8 +2277,8 @@ async function checkDocCoverage(domain, options = {}) {
2211
2277
  );
2212
2278
  }
2213
2279
  }
2214
- function suggestFix(path20, existingFiles) {
2215
- const targetName = basename2(path20).toLowerCase();
2280
+ function suggestFix(path23, existingFiles) {
2281
+ const targetName = basename2(path23).toLowerCase();
2216
2282
  const similar = existingFiles.find((file) => {
2217
2283
  const fileName = basename2(file).toLowerCase();
2218
2284
  return fileName.includes(targetName) || targetName.includes(fileName);
@@ -2220,7 +2286,7 @@ function suggestFix(path20, existingFiles) {
2220
2286
  if (similar) {
2221
2287
  return `Did you mean "${similar}"?`;
2222
2288
  }
2223
- return `Create the file "${path20}" or remove the link`;
2289
+ return `Create the file "${path23}" or remove the link`;
2224
2290
  }
2225
2291
  async function validateKnowledgeMap(rootDir = process.cwd()) {
2226
2292
  const agentsPath = join22(rootDir, "AGENTS.md");
@@ -2563,8 +2629,8 @@ function createBoundaryValidator(schema, name) {
2563
2629
  return Ok(result.data);
2564
2630
  }
2565
2631
  const suggestions = result.error.issues.map((issue) => {
2566
- const path20 = issue.path.join(".");
2567
- return path20 ? `${path20}: ${issue.message}` : issue.message;
2632
+ const path23 = issue.path.join(".");
2633
+ return path23 ? `${path23}: ${issue.message}` : issue.message;
2568
2634
  });
2569
2635
  return Err(
2570
2636
  createError(
@@ -2774,175 +2840,183 @@ function stringArraysEqual(a, b) {
2774
2840
  const sortedB = [...b].sort();
2775
2841
  return sortedA.every((val, i) => val === sortedB[i]);
2776
2842
  }
2777
- function deepMergeConstraints(localConfig, bundleConstraints, _existingContributions) {
2778
- const config = { ...localConfig };
2779
- const contributions = {};
2780
- const conflicts = [];
2781
- if (bundleConstraints.layers && bundleConstraints.layers.length > 0) {
2782
- const localLayers = Array.isArray(localConfig.layers) ? localConfig.layers : [];
2783
- const mergedLayers = [...localLayers];
2784
- const contributedLayerNames = [];
2785
- for (const bundleLayer of bundleConstraints.layers) {
2786
- const existing = localLayers.find((l) => l.name === bundleLayer.name);
2787
- if (!existing) {
2788
- mergedLayers.push(bundleLayer);
2789
- contributedLayerNames.push(bundleLayer.name);
2790
- } else {
2791
- const same = existing.pattern === bundleLayer.pattern && stringArraysEqual(existing.allowedDependencies, bundleLayer.allowedDependencies);
2792
- if (!same) {
2793
- conflicts.push({
2794
- section: "layers",
2795
- key: bundleLayer.name,
2796
- localValue: existing,
2797
- packageValue: bundleLayer,
2798
- description: `Layer '${bundleLayer.name}' already exists locally with different configuration`
2799
- });
2800
- }
2843
+ function mergeLayers(localConfig, bundleLayers, config, contributions, conflicts) {
2844
+ const localLayers = Array.isArray(localConfig.layers) ? localConfig.layers : [];
2845
+ const mergedLayers = [...localLayers];
2846
+ const contributedLayerNames = [];
2847
+ for (const bundleLayer of bundleLayers) {
2848
+ const existing = localLayers.find((l) => l.name === bundleLayer.name);
2849
+ if (!existing) {
2850
+ mergedLayers.push(bundleLayer);
2851
+ contributedLayerNames.push(bundleLayer.name);
2852
+ } else {
2853
+ const same = existing.pattern === bundleLayer.pattern && stringArraysEqual(existing.allowedDependencies, bundleLayer.allowedDependencies);
2854
+ if (!same) {
2855
+ conflicts.push({
2856
+ section: "layers",
2857
+ key: bundleLayer.name,
2858
+ localValue: existing,
2859
+ packageValue: bundleLayer,
2860
+ description: `Layer '${bundleLayer.name}' already exists locally with different configuration`
2861
+ });
2862
+ }
2863
+ }
2864
+ }
2865
+ config.layers = mergedLayers;
2866
+ if (contributedLayerNames.length > 0) contributions.layers = contributedLayerNames;
2867
+ }
2868
+ function mergeForbiddenImports(localConfig, bundleRules, config, contributions, conflicts) {
2869
+ const localFI = Array.isArray(localConfig.forbiddenImports) ? localConfig.forbiddenImports : [];
2870
+ const mergedFI = [...localFI];
2871
+ const contributedFromKeys = [];
2872
+ for (const bundleRule of bundleRules) {
2873
+ const existing = localFI.find((r) => r.from === bundleRule.from);
2874
+ if (!existing) {
2875
+ const entry = { from: bundleRule.from, disallow: bundleRule.disallow };
2876
+ if (bundleRule.message !== void 0) entry.message = bundleRule.message;
2877
+ mergedFI.push(entry);
2878
+ contributedFromKeys.push(bundleRule.from);
2879
+ } else {
2880
+ if (!stringArraysEqual(existing.disallow, bundleRule.disallow)) {
2881
+ conflicts.push({
2882
+ section: "forbiddenImports",
2883
+ key: bundleRule.from,
2884
+ localValue: existing,
2885
+ packageValue: bundleRule,
2886
+ description: `Forbidden import rule for '${bundleRule.from}' already exists locally with different disallow list`
2887
+ });
2801
2888
  }
2802
2889
  }
2803
- config.layers = mergedLayers;
2804
- if (contributedLayerNames.length > 0) {
2805
- contributions.layers = contributedLayerNames;
2890
+ }
2891
+ config.forbiddenImports = mergedFI;
2892
+ if (contributedFromKeys.length > 0) contributions.forbiddenImports = contributedFromKeys;
2893
+ }
2894
+ function mergeBoundaries(localConfig, bundleBoundaries, config, contributions) {
2895
+ const localBoundaries = localConfig.boundaries ?? { requireSchema: [] };
2896
+ const localSchemas = new Set(localBoundaries.requireSchema ?? []);
2897
+ const newSchemas = [];
2898
+ for (const schema of bundleBoundaries.requireSchema ?? []) {
2899
+ if (!localSchemas.has(schema)) {
2900
+ newSchemas.push(schema);
2901
+ localSchemas.add(schema);
2902
+ }
2903
+ }
2904
+ config.boundaries = { requireSchema: [...localBoundaries.requireSchema ?? [], ...newSchemas] };
2905
+ if (newSchemas.length > 0) contributions.boundaries = newSchemas;
2906
+ }
2907
+ function mergeArchitecture(localConfig, bundleArch, config, contributions, conflicts) {
2908
+ const localArch = localConfig.architecture ?? { thresholds: {}, modules: {} };
2909
+ const mergedThresholds = { ...localArch.thresholds };
2910
+ const contributedThresholdKeys = [];
2911
+ for (const [category, value] of Object.entries(bundleArch.thresholds ?? {})) {
2912
+ if (!(category in mergedThresholds)) {
2913
+ mergedThresholds[category] = value;
2914
+ contributedThresholdKeys.push(category);
2915
+ } else if (!deepEqual(mergedThresholds[category], value)) {
2916
+ conflicts.push({
2917
+ section: "architecture.thresholds",
2918
+ key: category,
2919
+ localValue: mergedThresholds[category],
2920
+ packageValue: value,
2921
+ description: `Architecture threshold '${category}' already exists locally with a different value`
2922
+ });
2806
2923
  }
2807
2924
  }
2808
- if (bundleConstraints.forbiddenImports && bundleConstraints.forbiddenImports.length > 0) {
2809
- const localFI = Array.isArray(localConfig.forbiddenImports) ? localConfig.forbiddenImports : [];
2810
- const mergedFI = [...localFI];
2811
- const contributedFromKeys = [];
2812
- for (const bundleRule of bundleConstraints.forbiddenImports) {
2813
- const existing = localFI.find((r) => r.from === bundleRule.from);
2814
- if (!existing) {
2815
- const entry = {
2816
- from: bundleRule.from,
2817
- disallow: bundleRule.disallow
2818
- };
2819
- if (bundleRule.message !== void 0) {
2820
- entry.message = bundleRule.message;
2821
- }
2822
- mergedFI.push(entry);
2823
- contributedFromKeys.push(bundleRule.from);
2824
- } else {
2825
- const same = stringArraysEqual(existing.disallow, bundleRule.disallow);
2826
- if (!same) {
2925
+ const mergedModules = { ...localArch.modules };
2926
+ const contributedModuleKeys = [];
2927
+ for (const [modulePath, bundleCategoryMap] of Object.entries(bundleArch.modules ?? {})) {
2928
+ if (!(modulePath in mergedModules)) {
2929
+ mergedModules[modulePath] = bundleCategoryMap;
2930
+ for (const cat of Object.keys(bundleCategoryMap))
2931
+ contributedModuleKeys.push(`${modulePath}:${cat}`);
2932
+ } else {
2933
+ const mergedCategoryMap = { ...mergedModules[modulePath] };
2934
+ for (const [category, value] of Object.entries(bundleCategoryMap)) {
2935
+ if (!(category in mergedCategoryMap)) {
2936
+ mergedCategoryMap[category] = value;
2937
+ contributedModuleKeys.push(`${modulePath}:${category}`);
2938
+ } else if (!deepEqual(mergedCategoryMap[category], value)) {
2827
2939
  conflicts.push({
2828
- section: "forbiddenImports",
2829
- key: bundleRule.from,
2830
- localValue: existing,
2831
- packageValue: bundleRule,
2832
- description: `Forbidden import rule for '${bundleRule.from}' already exists locally with different disallow list`
2940
+ section: "architecture.modules",
2941
+ key: `${modulePath}:${category}`,
2942
+ localValue: mergedCategoryMap[category],
2943
+ packageValue: value,
2944
+ description: `Architecture module override '${modulePath}' category '${category}' already exists locally with a different value`
2833
2945
  });
2834
2946
  }
2835
2947
  }
2948
+ mergedModules[modulePath] = mergedCategoryMap;
2949
+ }
2950
+ }
2951
+ config.architecture = { ...localArch, thresholds: mergedThresholds, modules: mergedModules };
2952
+ if (contributedThresholdKeys.length > 0)
2953
+ contributions["architecture.thresholds"] = contributedThresholdKeys;
2954
+ if (contributedModuleKeys.length > 0)
2955
+ contributions["architecture.modules"] = contributedModuleKeys;
2956
+ }
2957
+ function mergeSecurityRules(localConfig, bundleRules, config, contributions, conflicts) {
2958
+ const localSecurity = localConfig.security ?? { rules: {} };
2959
+ const localRules = localSecurity.rules ?? {};
2960
+ const mergedRules = { ...localRules };
2961
+ const contributedRuleIds = [];
2962
+ for (const [ruleId, severity] of Object.entries(bundleRules)) {
2963
+ if (!(ruleId in mergedRules)) {
2964
+ mergedRules[ruleId] = severity;
2965
+ contributedRuleIds.push(ruleId);
2966
+ } else if (mergedRules[ruleId] !== severity) {
2967
+ conflicts.push({
2968
+ section: "security.rules",
2969
+ key: ruleId,
2970
+ localValue: mergedRules[ruleId],
2971
+ packageValue: severity,
2972
+ description: `Security rule '${ruleId}' already exists locally with severity '${mergedRules[ruleId]}', bundle has '${severity}'`
2973
+ });
2836
2974
  }
2837
- config.forbiddenImports = mergedFI;
2838
- if (contributedFromKeys.length > 0) {
2839
- contributions.forbiddenImports = contributedFromKeys;
2840
- }
2975
+ }
2976
+ config.security = { ...localSecurity, rules: mergedRules };
2977
+ if (contributedRuleIds.length > 0) contributions["security.rules"] = contributedRuleIds;
2978
+ }
2979
+ function deepMergeConstraints(localConfig, bundleConstraints, _existingContributions) {
2980
+ const config = { ...localConfig };
2981
+ const contributions = {};
2982
+ const conflicts = [];
2983
+ if (bundleConstraints.layers && bundleConstraints.layers.length > 0) {
2984
+ mergeLayers(localConfig, bundleConstraints.layers, config, contributions, conflicts);
2985
+ }
2986
+ if (bundleConstraints.forbiddenImports && bundleConstraints.forbiddenImports.length > 0) {
2987
+ mergeForbiddenImports(
2988
+ localConfig,
2989
+ bundleConstraints.forbiddenImports,
2990
+ config,
2991
+ contributions,
2992
+ conflicts
2993
+ );
2841
2994
  }
2842
2995
  if (bundleConstraints.boundaries) {
2843
- const localBoundaries = localConfig.boundaries ?? { requireSchema: [] };
2844
- const localSchemas = new Set(localBoundaries.requireSchema ?? []);
2845
- const bundleSchemas = bundleConstraints.boundaries.requireSchema ?? [];
2846
- const newSchemas = [];
2847
- for (const schema of bundleSchemas) {
2848
- if (!localSchemas.has(schema)) {
2849
- newSchemas.push(schema);
2850
- localSchemas.add(schema);
2851
- }
2852
- }
2853
- config.boundaries = {
2854
- requireSchema: [...localBoundaries.requireSchema ?? [], ...newSchemas]
2855
- };
2856
- if (newSchemas.length > 0) {
2857
- contributions.boundaries = newSchemas;
2858
- }
2996
+ mergeBoundaries(
2997
+ localConfig,
2998
+ bundleConstraints.boundaries,
2999
+ config,
3000
+ contributions
3001
+ );
2859
3002
  }
2860
3003
  if (bundleConstraints.architecture) {
2861
- const localArch = localConfig.architecture ?? {
2862
- thresholds: {},
2863
- modules: {}
2864
- };
2865
- const mergedThresholds = { ...localArch.thresholds };
2866
- const contributedThresholdKeys = [];
2867
- const bundleThresholds = bundleConstraints.architecture.thresholds ?? {};
2868
- for (const [category, value] of Object.entries(bundleThresholds)) {
2869
- if (!(category in mergedThresholds)) {
2870
- mergedThresholds[category] = value;
2871
- contributedThresholdKeys.push(category);
2872
- } else if (!deepEqual(mergedThresholds[category], value)) {
2873
- conflicts.push({
2874
- section: "architecture.thresholds",
2875
- key: category,
2876
- localValue: mergedThresholds[category],
2877
- packageValue: value,
2878
- description: `Architecture threshold '${category}' already exists locally with a different value`
2879
- });
2880
- }
2881
- }
2882
- const mergedModules = { ...localArch.modules };
2883
- const contributedModuleKeys = [];
2884
- const bundleModules = bundleConstraints.architecture.modules ?? {};
2885
- for (const [modulePath, bundleCategoryMap] of Object.entries(bundleModules)) {
2886
- if (!(modulePath in mergedModules)) {
2887
- mergedModules[modulePath] = bundleCategoryMap;
2888
- for (const cat of Object.keys(bundleCategoryMap)) {
2889
- contributedModuleKeys.push(`${modulePath}:${cat}`);
2890
- }
2891
- } else {
2892
- const localCategoryMap = mergedModules[modulePath];
2893
- const mergedCategoryMap = { ...localCategoryMap };
2894
- for (const [category, value] of Object.entries(bundleCategoryMap)) {
2895
- if (!(category in mergedCategoryMap)) {
2896
- mergedCategoryMap[category] = value;
2897
- contributedModuleKeys.push(`${modulePath}:${category}`);
2898
- } else if (!deepEqual(mergedCategoryMap[category], value)) {
2899
- conflicts.push({
2900
- section: "architecture.modules",
2901
- key: `${modulePath}:${category}`,
2902
- localValue: mergedCategoryMap[category],
2903
- packageValue: value,
2904
- description: `Architecture module override '${modulePath}' category '${category}' already exists locally with a different value`
2905
- });
2906
- }
2907
- }
2908
- mergedModules[modulePath] = mergedCategoryMap;
2909
- }
2910
- }
2911
- config.architecture = {
2912
- ...localArch,
2913
- thresholds: mergedThresholds,
2914
- modules: mergedModules
2915
- };
2916
- if (contributedThresholdKeys.length > 0) {
2917
- contributions["architecture.thresholds"] = contributedThresholdKeys;
2918
- }
2919
- if (contributedModuleKeys.length > 0) {
2920
- contributions["architecture.modules"] = contributedModuleKeys;
2921
- }
3004
+ mergeArchitecture(
3005
+ localConfig,
3006
+ bundleConstraints.architecture,
3007
+ config,
3008
+ contributions,
3009
+ conflicts
3010
+ );
2922
3011
  }
2923
3012
  if (bundleConstraints.security?.rules) {
2924
- const localSecurity = localConfig.security ?? { rules: {} };
2925
- const localRules = localSecurity.rules ?? {};
2926
- const mergedRules = { ...localRules };
2927
- const contributedRuleIds = [];
2928
- for (const [ruleId, severity] of Object.entries(bundleConstraints.security.rules)) {
2929
- if (!(ruleId in mergedRules)) {
2930
- mergedRules[ruleId] = severity;
2931
- contributedRuleIds.push(ruleId);
2932
- } else if (mergedRules[ruleId] !== severity) {
2933
- conflicts.push({
2934
- section: "security.rules",
2935
- key: ruleId,
2936
- localValue: mergedRules[ruleId],
2937
- packageValue: severity,
2938
- description: `Security rule '${ruleId}' already exists locally with severity '${mergedRules[ruleId]}', bundle has '${severity}'`
2939
- });
2940
- }
2941
- }
2942
- config.security = { ...localSecurity, rules: mergedRules };
2943
- if (contributedRuleIds.length > 0) {
2944
- contributions["security.rules"] = contributedRuleIds;
2945
- }
3013
+ mergeSecurityRules(
3014
+ localConfig,
3015
+ bundleConstraints.security.rules,
3016
+ config,
3017
+ contributions,
3018
+ conflicts
3019
+ );
2946
3020
  }
2947
3021
  return { config, contributions, conflicts };
2948
3022
  }
@@ -3091,14 +3165,84 @@ function walk(node, visitor) {
3091
3165
  }
3092
3166
  }
3093
3167
  }
3168
+ function makeLocation(node) {
3169
+ return {
3170
+ file: "",
3171
+ line: node.loc?.start.line ?? 0,
3172
+ column: node.loc?.start.column ?? 0
3173
+ };
3174
+ }
3175
+ function processImportSpecifiers(importDecl, imp) {
3176
+ for (const spec of importDecl.specifiers) {
3177
+ if (spec.type === "ImportDefaultSpecifier") {
3178
+ imp.default = spec.local.name;
3179
+ } else if (spec.type === "ImportNamespaceSpecifier") {
3180
+ imp.namespace = spec.local.name;
3181
+ } else if (spec.type === "ImportSpecifier") {
3182
+ imp.specifiers.push(spec.local.name);
3183
+ if (spec.importKind === "type") {
3184
+ imp.kind = "type";
3185
+ }
3186
+ }
3187
+ }
3188
+ }
3189
+ function getExportedName(exported) {
3190
+ return exported.type === "Identifier" ? exported.name : String(exported.value);
3191
+ }
3192
+ function processReExportSpecifiers(exportDecl, exports) {
3193
+ for (const spec of exportDecl.specifiers) {
3194
+ if (spec.type !== "ExportSpecifier") continue;
3195
+ exports.push({
3196
+ name: getExportedName(spec.exported),
3197
+ type: "named",
3198
+ location: makeLocation(exportDecl),
3199
+ isReExport: true,
3200
+ source: exportDecl.source.value
3201
+ });
3202
+ }
3203
+ }
3204
+ function processExportDeclaration(exportDecl, exports) {
3205
+ const decl = exportDecl.declaration;
3206
+ if (!decl) return;
3207
+ if (decl.type === "VariableDeclaration") {
3208
+ for (const declarator of decl.declarations) {
3209
+ if (declarator.id.type === "Identifier") {
3210
+ exports.push({
3211
+ name: declarator.id.name,
3212
+ type: "named",
3213
+ location: makeLocation(decl),
3214
+ isReExport: false
3215
+ });
3216
+ }
3217
+ }
3218
+ } else if ((decl.type === "FunctionDeclaration" || decl.type === "ClassDeclaration") && decl.id) {
3219
+ exports.push({
3220
+ name: decl.id.name,
3221
+ type: "named",
3222
+ location: makeLocation(decl),
3223
+ isReExport: false
3224
+ });
3225
+ }
3226
+ }
3227
+ function processExportListSpecifiers(exportDecl, exports) {
3228
+ for (const spec of exportDecl.specifiers) {
3229
+ if (spec.type !== "ExportSpecifier") continue;
3230
+ exports.push({
3231
+ name: getExportedName(spec.exported),
3232
+ type: "named",
3233
+ location: makeLocation(exportDecl),
3234
+ isReExport: false
3235
+ });
3236
+ }
3237
+ }
3094
3238
  var TypeScriptParser = class {
3095
3239
  name = "typescript";
3096
3240
  extensions = [".ts", ".tsx", ".mts", ".cts"];
3097
- async parseFile(path20) {
3098
- const contentResult = await readFileContent(path20);
3241
+ async parseFile(path23) {
3242
+ const contentResult = await readFileContent(path23);
3099
3243
  if (!contentResult.ok) {
3100
3244
  return Err(
3101
- createParseError("NOT_FOUND", `File not found: ${path20}`, { path: path20 }, [
3245
+ createParseError("NOT_FOUND", `File not found: ${path23}`, { path: path23 }, [
3102
3246
  "Check that the file exists",
3103
3247
  "Verify the path is correct"
3104
3248
  ])
@@ -3108,7 +3252,7 @@ var TypeScriptParser = class {
3108
3252
  const ast = parse(contentResult.value, {
3109
3253
  loc: true,
3110
3254
  range: true,
3111
- jsx: path20.endsWith(".tsx"),
3255
+ jsx: path23.endsWith(".tsx"),
3112
3256
  errorOnUnknownASTType: false
3113
3257
  });
3114
3258
  return Ok({
@@ -3119,7 +3263,7 @@ var TypeScriptParser = class {
3119
3263
  } catch (e) {
3120
3264
  const error = e;
3121
3265
  return Err(
3122
- createParseError("SYNTAX_ERROR", `Failed to parse ${path20}: ${error.message}`, { path: path20 }, [
3266
+ createParseError("SYNTAX_ERROR", `Failed to parse ${path23}: ${error.message}`, { path: path23 }, [
3123
3267
  "Check for syntax errors in the file",
3124
3268
  "Ensure valid TypeScript syntax"
3125
3269
  ])
@@ -3135,26 +3279,12 @@ var TypeScriptParser = class {
3135
3279
  const imp = {
3136
3280
  source: importDecl.source.value,
3137
3281
  specifiers: [],
3138
- location: {
3139
- file: "",
3140
- line: importDecl.loc?.start.line ?? 0,
3141
- column: importDecl.loc?.start.column ?? 0
3142
- },
3282
+ location: makeLocation(importDecl),
3143
3283
  kind: importDecl.importKind === "type" ? "type" : "value"
3144
3284
  };
3145
- for (const spec of importDecl.specifiers) {
3146
- if (spec.type === "ImportDefaultSpecifier") {
3147
- imp.default = spec.local.name;
3148
- } else if (spec.type === "ImportNamespaceSpecifier") {
3149
- imp.namespace = spec.local.name;
3150
- } else if (spec.type === "ImportSpecifier") {
3151
- imp.specifiers.push(spec.local.name);
3152
- if (spec.importKind === "type") {
3153
- imp.kind = "type";
3154
- }
3155
- }
3156
- }
3285
+ processImportSpecifiers(importDecl, imp);
3157
3286
  imports.push(imp);
3287
+ return;
3158
3288
  }
3159
3289
  if (node.type === "ImportExpression") {
3160
3290
  const importExpr = node;
@@ -3162,11 +3292,7 @@ var TypeScriptParser = class {
3162
3292
  imports.push({
3163
3293
  source: importExpr.source.value,
3164
3294
  specifiers: [],
3165
- location: {
3166
- file: "",
3167
- line: importExpr.loc?.start.line ?? 0,
3168
- column: importExpr.loc?.start.column ?? 0
3169
- },
3295
+ location: makeLocation(importExpr),
3170
3296
  kind: "value"
3171
3297
  });
3172
3298
  }
@@ -3181,97 +3307,29 @@ var TypeScriptParser = class {
3181
3307
  if (node.type === "ExportNamedDeclaration") {
3182
3308
  const exportDecl = node;
3183
3309
  if (exportDecl.source) {
3184
- for (const spec of exportDecl.specifiers) {
3185
- if (spec.type === "ExportSpecifier") {
3186
- const exported = spec.exported;
3187
- const name = exported.type === "Identifier" ? exported.name : String(exported.value);
3188
- exports.push({
3189
- name,
3190
- type: "named",
3191
- location: {
3192
- file: "",
3193
- line: exportDecl.loc?.start.line ?? 0,
3194
- column: exportDecl.loc?.start.column ?? 0
3195
- },
3196
- isReExport: true,
3197
- source: exportDecl.source.value
3198
- });
3199
- }
3200
- }
3310
+ processReExportSpecifiers(exportDecl, exports);
3201
3311
  return;
3202
3312
  }
3203
- if (exportDecl.declaration) {
3204
- const decl = exportDecl.declaration;
3205
- if (decl.type === "VariableDeclaration") {
3206
- for (const declarator of decl.declarations) {
3207
- if (declarator.id.type === "Identifier") {
3208
- exports.push({
3209
- name: declarator.id.name,
3210
- type: "named",
3211
- location: {
3212
- file: "",
3213
- line: decl.loc?.start.line ?? 0,
3214
- column: decl.loc?.start.column ?? 0
3215
- },
3216
- isReExport: false
3217
- });
3218
- }
3219
- }
3220
- } else if (decl.type === "FunctionDeclaration" || decl.type === "ClassDeclaration") {
3221
- if (decl.id) {
3222
- exports.push({
3223
- name: decl.id.name,
3224
- type: "named",
3225
- location: {
3226
- file: "",
3227
- line: decl.loc?.start.line ?? 0,
3228
- column: decl.loc?.start.column ?? 0
3229
- },
3230
- isReExport: false
3231
- });
3232
- }
3233
- }
3234
- }
3235
- for (const spec of exportDecl.specifiers) {
3236
- if (spec.type === "ExportSpecifier") {
3237
- const exported = spec.exported;
3238
- const name = exported.type === "Identifier" ? exported.name : String(exported.value);
3239
- exports.push({
3240
- name,
3241
- type: "named",
3242
- location: {
3243
- file: "",
3244
- line: exportDecl.loc?.start.line ?? 0,
3245
- column: exportDecl.loc?.start.column ?? 0
3246
- },
3247
- isReExport: false
3248
- });
3249
- }
3250
- }
3313
+ processExportDeclaration(exportDecl, exports);
3314
+ processExportListSpecifiers(exportDecl, exports);
3315
+ return;
3251
3316
  }
3252
3317
  if (node.type === "ExportDefaultDeclaration") {
3253
3318
  const exportDecl = node;
3254
3319
  exports.push({
3255
3320
  name: "default",
3256
3321
  type: "default",
3257
- location: {
3258
- file: "",
3259
- line: exportDecl.loc?.start.line ?? 0,
3260
- column: exportDecl.loc?.start.column ?? 0
3261
- },
3322
+ location: makeLocation(exportDecl),
3262
3323
  isReExport: false
3263
3324
  });
3325
+ return;
3264
3326
  }
3265
3327
  if (node.type === "ExportAllDeclaration") {
3266
3328
  const exportDecl = node;
3267
3329
  exports.push({
3268
3330
  name: exportDecl.exported?.name ?? "*",
3269
3331
  type: "namespace",
3270
- location: {
3271
- file: "",
3272
- line: exportDecl.loc?.start.line ?? 0,
3273
- column: exportDecl.loc?.start.column ?? 0
3274
- },
3332
+ location: makeLocation(exportDecl),
3275
3333
  isReExport: true,
3276
3334
  source: exportDecl.source.value
3277
3335
  });
@@ -3283,10 +3341,27 @@ var TypeScriptParser = class {
3283
3341
  return Ok({ available: true, version: "7.0.0" });
3284
3342
  }
3285
3343
  };
3344
+ function collectFieldEntries(rootDir, field) {
3345
+ if (typeof field === "string") return [resolve3(rootDir, field)];
3346
+ if (typeof field === "object" && field !== null) {
3347
+ return Object.values(field).filter((v) => typeof v === "string").map((v) => resolve3(rootDir, v));
3348
+ }
3349
+ return [];
3350
+ }
3351
+ function extractPackageEntries(rootDir, pkg) {
3352
+ const entries = [];
3353
+ entries.push(...collectFieldEntries(rootDir, pkg["exports"]));
3354
+ if (entries.length === 0 && typeof pkg["main"] === "string") {
3355
+ entries.push(resolve3(rootDir, pkg["main"]));
3356
+ }
3357
+ if (pkg["bin"]) {
3358
+ entries.push(...collectFieldEntries(rootDir, pkg["bin"]));
3359
+ }
3360
+ return entries;
3361
+ }
3286
3362
  async function resolveEntryPoints(rootDir, explicitEntries) {
3287
3363
  if (explicitEntries && explicitEntries.length > 0) {
3288
- const resolved = explicitEntries.map((e) => resolve3(rootDir, e));
3289
- return Ok(resolved);
3364
+ return Ok(explicitEntries.map((e) => resolve3(rootDir, e)));
3290
3365
  }
3291
3366
  const pkgPath = join32(rootDir, "package.json");
3292
3367
  if (await fileExists(pkgPath)) {
@@ -3294,38 +3369,8 @@ async function resolveEntryPoints(rootDir, explicitEntries) {
3294
3369
  if (pkgContent.ok) {
3295
3370
  try {
3296
3371
  const pkg = JSON.parse(pkgContent.value);
3297
- const entries = [];
3298
- if (pkg["exports"]) {
3299
- const exports = pkg["exports"];
3300
- if (typeof exports === "string") {
3301
- entries.push(resolve3(rootDir, exports));
3302
- } else if (typeof exports === "object" && exports !== null) {
3303
- for (const value of Object.values(exports)) {
3304
- if (typeof value === "string") {
3305
- entries.push(resolve3(rootDir, value));
3306
- }
3307
- }
3308
- }
3309
- }
3310
- const main = pkg["main"];
3311
- if (typeof main === "string" && entries.length === 0) {
3312
- entries.push(resolve3(rootDir, main));
3313
- }
3314
- const bin = pkg["bin"];
3315
- if (bin) {
3316
- if (typeof bin === "string") {
3317
- entries.push(resolve3(rootDir, bin));
3318
- } else if (typeof bin === "object") {
3319
- for (const value of Object.values(bin)) {
3320
- if (typeof value === "string") {
3321
- entries.push(resolve3(rootDir, value));
3322
- }
3323
- }
3324
- }
3325
- }
3326
- if (entries.length > 0) {
3327
- return Ok(entries);
3328
- }
3372
+ const entries = extractPackageEntries(rootDir, pkg);
3373
+ if (entries.length > 0) return Ok(entries);
3329
3374
  } catch {
3330
3375
  }
3331
3376
  }
@@ -3399,66 +3444,49 @@ function extractInlineRefs(content) {
3399
3444
  }
3400
3445
  return refs;
3401
3446
  }
3402
- async function parseDocumentationFile(path20) {
3403
- const contentResult = await readFileContent(path20);
3447
+ async function parseDocumentationFile(path23) {
3448
+ const contentResult = await readFileContent(path23);
3404
3449
  if (!contentResult.ok) {
3405
3450
  return Err(
3406
3451
  createEntropyError(
3407
3452
  "PARSE_ERROR",
3408
- `Failed to read documentation file: ${path20}`,
3409
- { file: path20 },
3453
+ `Failed to read documentation file: ${path23}`,
3454
+ { file: path23 },
3410
3455
  ["Check that the file exists"]
3411
3456
  )
3412
3457
  );
3413
3458
  }
3414
3459
  const content = contentResult.value;
3415
- const type = path20.endsWith(".md") ? "markdown" : "text";
3460
+ const type = path23.endsWith(".md") ? "markdown" : "text";
3416
3461
  return Ok({
3417
- path: path20,
3462
+ path: path23,
3418
3463
  type,
3419
3464
  content,
3420
3465
  codeBlocks: extractCodeBlocks(content),
3421
3466
  inlineRefs: extractInlineRefs(content)
3422
3467
  });
3423
3468
  }
3469
+ function makeInternalSymbol(name, type, line) {
3470
+ return { name, type, line, references: 0, calledBy: [] };
3471
+ }
3472
+ function extractSymbolsFromNode(node) {
3473
+ const line = node.loc?.start?.line || 0;
3474
+ if (node.type === "FunctionDeclaration" && node.id?.name) {
3475
+ return [makeInternalSymbol(node.id.name, "function", line)];
3476
+ }
3477
+ if (node.type === "VariableDeclaration") {
3478
+ return (node.declarations || []).filter((decl) => decl.id?.name).map((decl) => makeInternalSymbol(decl.id.name, "variable", line));
3479
+ }
3480
+ if (node.type === "ClassDeclaration" && node.id?.name) {
3481
+ return [makeInternalSymbol(node.id.name, "class", line)];
3482
+ }
3483
+ return [];
3484
+ }
3424
3485
  function extractInternalSymbols(ast) {
3425
- const symbols = [];
3426
3486
  const body = ast.body;
3427
- if (!body?.body) return symbols;
3428
- for (const node of body.body) {
3429
- if (node.type === "FunctionDeclaration" && node.id?.name) {
3430
- symbols.push({
3431
- name: node.id.name,
3432
- type: "function",
3433
- line: node.loc?.start?.line || 0,
3434
- references: 0,
3435
- calledBy: []
3436
- });
3437
- }
3438
- if (node.type === "VariableDeclaration") {
3439
- for (const decl of node.declarations || []) {
3440
- if (decl.id?.name) {
3441
- symbols.push({
3442
- name: decl.id.name,
3443
- type: "variable",
3444
- line: node.loc?.start?.line || 0,
3445
- references: 0,
3446
- calledBy: []
3447
- });
3448
- }
3449
- }
3450
- }
3451
- if (node.type === "ClassDeclaration" && node.id?.name) {
3452
- symbols.push({
3453
- name: node.id.name,
3454
- type: "class",
3455
- line: node.loc?.start?.line || 0,
3456
- references: 0,
3457
- calledBy: []
3458
- });
3459
- }
3460
- }
3461
- return symbols;
3487
+ if (!body?.body) return [];
3488
+ const nodes = body.body;
3489
+ return nodes.flatMap(extractSymbolsFromNode);
3462
3490
  }
3463
3491
  function extractJSDocComments(ast) {
3464
3492
  const comments = [];
@@ -3596,27 +3624,34 @@ async function buildSnapshot(config) {
3596
3624
  buildTime
3597
3625
  });
3598
3626
  }
3599
- function levenshteinDistance(a, b) {
3627
+ function initLevenshteinMatrix(aLen, bLen) {
3600
3628
  const matrix = [];
3601
- for (let i = 0; i <= b.length; i++) {
3629
+ for (let i = 0; i <= bLen; i++) {
3602
3630
  matrix[i] = [i];
3603
3631
  }
3604
- for (let j = 0; j <= a.length; j++) {
3605
- const row = matrix[0];
3606
- if (row) {
3607
- row[j] = j;
3632
+ const firstRow = matrix[0];
3633
+ if (firstRow) {
3634
+ for (let j = 0; j <= aLen; j++) {
3635
+ firstRow[j] = j;
3608
3636
  }
3609
3637
  }
3638
+ return matrix;
3639
+ }
3640
+ function computeLevenshteinCell(row, prevRow, j, charsMatch) {
3641
+ if (charsMatch) {
3642
+ row[j] = prevRow[j - 1] ?? 0;
3643
+ } else {
3644
+ row[j] = Math.min((prevRow[j - 1] ?? 0) + 1, (row[j - 1] ?? 0) + 1, (prevRow[j] ?? 0) + 1);
3645
+ }
3646
+ }
3647
+ function levenshteinDistance(a, b) {
3648
+ const matrix = initLevenshteinMatrix(a.length, b.length);
3610
3649
  for (let i = 1; i <= b.length; i++) {
3611
3650
  for (let j = 1; j <= a.length; j++) {
3612
3651
  const row = matrix[i];
3613
3652
  const prevRow = matrix[i - 1];
3614
3653
  if (!row || !prevRow) continue;
3615
- if (b.charAt(i - 1) === a.charAt(j - 1)) {
3616
- row[j] = prevRow[j - 1] ?? 0;
3617
- } else {
3618
- row[j] = Math.min((prevRow[j - 1] ?? 0) + 1, (row[j - 1] ?? 0) + 1, (prevRow[j] ?? 0) + 1);
3619
- }
3654
+ computeLevenshteinCell(row, prevRow, j, b.charAt(i - 1) === a.charAt(j - 1));
3620
3655
  }
3621
3656
  }
3622
3657
  const lastRow = matrix[b.length];
@@ -3899,32 +3934,27 @@ function findDeadExports(snapshot, usageMap, reachability) {
3899
3934
  }
3900
3935
  return deadExports;
3901
3936
  }
3902
- function countLinesFromAST(ast) {
3903
- if (ast.body && Array.isArray(ast.body)) {
3904
- let maxLine = 0;
3905
- const traverse = (node) => {
3906
- if (node && typeof node === "object") {
3907
- const n = node;
3908
- if (n.loc?.end?.line && n.loc.end.line > maxLine) {
3909
- maxLine = n.loc.end.line;
3910
- }
3911
- for (const key of Object.keys(node)) {
3912
- const value = node[key];
3913
- if (Array.isArray(value)) {
3914
- for (const item of value) {
3915
- traverse(item);
3916
- }
3917
- } else if (value && typeof value === "object") {
3918
- traverse(value);
3919
- }
3920
- }
3937
+ function findMaxLineInNode(node) {
3938
+ if (!node || typeof node !== "object") return 0;
3939
+ const n = node;
3940
+ let maxLine = n.loc?.end?.line ?? 0;
3941
+ for (const key of Object.keys(node)) {
3942
+ const value = node[key];
3943
+ if (Array.isArray(value)) {
3944
+ for (const item of value) {
3945
+ maxLine = Math.max(maxLine, findMaxLineInNode(item));
3921
3946
  }
3922
- };
3923
- traverse(ast);
3924
- if (maxLine > 0) return maxLine;
3925
- return Math.max(ast.body.length * 3, 1);
3947
+ } else if (value && typeof value === "object") {
3948
+ maxLine = Math.max(maxLine, findMaxLineInNode(value));
3949
+ }
3926
3950
  }
3927
- return 1;
3951
+ return maxLine;
3952
+ }
3953
+ function countLinesFromAST(ast) {
3954
+ if (!ast.body || !Array.isArray(ast.body)) return 1;
3955
+ const maxLine = findMaxLineInNode(ast);
3956
+ if (maxLine > 0) return maxLine;
3957
+ return Math.max(ast.body.length * 3, 1);
3928
3958
  }
3929
3959
  function findDeadFiles(snapshot, reachability) {
3930
3960
  const deadFiles = [];
@@ -4072,130 +4102,146 @@ function fileMatchesPattern(filePath, pattern, rootDir) {
4072
4102
  const relativePath = relativePosix(rootDir, filePath);
4073
4103
  return minimatch3(relativePath, pattern);
4074
4104
  }
4075
- function checkConfigPattern(pattern, file, rootDir) {
4105
+ var CONVENTION_DESCRIPTIONS = {
4106
+ camelCase: "camelCase (e.g., myFunction)",
4107
+ PascalCase: "PascalCase (e.g., MyClass)",
4108
+ UPPER_SNAKE: "UPPER_SNAKE_CASE (e.g., MY_CONSTANT)",
4109
+ "kebab-case": "kebab-case (e.g., my-component)"
4110
+ };
4111
+ function checkMustExport(rule, file, message) {
4112
+ if (rule.type !== "must-export") return [];
4076
4113
  const matches = [];
4077
- const fileMatches = pattern.files.some((glob2) => fileMatchesPattern(file.path, glob2, rootDir));
4078
- if (!fileMatches) {
4079
- return matches;
4080
- }
4081
- const rule = pattern.rule;
4082
- switch (rule.type) {
4083
- case "must-export": {
4084
- for (const name of rule.names) {
4085
- const hasExport = file.exports.some((e) => e.name === name);
4086
- if (!hasExport) {
4087
- matches.push({
4088
- line: 1,
4089
- message: pattern.message || `Missing required export: "${name}"`,
4090
- suggestion: `Add export for "${name}"`
4091
- });
4092
- }
4093
- }
4094
- break;
4095
- }
4096
- case "must-export-default": {
4097
- const hasDefault = file.exports.some((e) => e.type === "default");
4098
- if (!hasDefault) {
4099
- matches.push({
4100
- line: 1,
4101
- message: pattern.message || "File must have a default export",
4102
- suggestion: "Add a default export"
4103
- });
4104
- }
4105
- break;
4106
- }
4107
- case "no-export": {
4108
- for (const name of rule.names) {
4109
- const exp = file.exports.find((e) => e.name === name);
4110
- if (exp) {
4111
- matches.push({
4112
- line: exp.location.line,
4113
- message: pattern.message || `Forbidden export: "${name}"`,
4114
- suggestion: `Remove export "${name}"`
4115
- });
4116
- }
4117
- }
4118
- break;
4114
+ for (const name of rule.names) {
4115
+ if (!file.exports.some((e) => e.name === name)) {
4116
+ matches.push({
4117
+ line: 1,
4118
+ message: message || `Missing required export: "${name}"`,
4119
+ suggestion: `Add export for "${name}"`
4120
+ });
4119
4121
  }
4120
- case "must-import": {
4121
- const hasImport = file.imports.some(
4122
- (i) => i.source === rule.from || i.source.endsWith(rule.from)
4123
- );
4124
- if (!hasImport) {
4125
- matches.push({
4126
- line: 1,
4127
- message: pattern.message || `Missing required import from "${rule.from}"`,
4128
- suggestion: `Add import from "${rule.from}"`
4129
- });
4122
+ }
4123
+ return matches;
4124
+ }
4125
+ function checkMustExportDefault(_rule, file, message) {
4126
+ if (!file.exports.some((e) => e.type === "default")) {
4127
+ return [
4128
+ {
4129
+ line: 1,
4130
+ message: message || "File must have a default export",
4131
+ suggestion: "Add a default export"
4130
4132
  }
4131
- break;
4133
+ ];
4134
+ }
4135
+ return [];
4136
+ }
4137
+ function checkNoExport(rule, file, message) {
4138
+ if (rule.type !== "no-export") return [];
4139
+ const matches = [];
4140
+ for (const name of rule.names) {
4141
+ const exp = file.exports.find((e) => e.name === name);
4142
+ if (exp) {
4143
+ matches.push({
4144
+ line: exp.location.line,
4145
+ message: message || `Forbidden export: "${name}"`,
4146
+ suggestion: `Remove export "${name}"`
4147
+ });
4132
4148
  }
4133
- case "no-import": {
4134
- const forbiddenImport = file.imports.find(
4135
- (i) => i.source === rule.from || i.source.endsWith(rule.from)
4136
- );
4137
- if (forbiddenImport) {
4138
- matches.push({
4139
- line: forbiddenImport.location.line,
4140
- message: pattern.message || `Forbidden import from "${rule.from}"`,
4141
- suggestion: `Remove import from "${rule.from}"`
4142
- });
4149
+ }
4150
+ return matches;
4151
+ }
4152
+ function checkMustImport(rule, file, message) {
4153
+ if (rule.type !== "must-import") return [];
4154
+ const hasImport = file.imports.some(
4155
+ (i) => i.source === rule.from || i.source.endsWith(rule.from)
4156
+ );
4157
+ if (!hasImport) {
4158
+ return [
4159
+ {
4160
+ line: 1,
4161
+ message: message || `Missing required import from "${rule.from}"`,
4162
+ suggestion: `Add import from "${rule.from}"`
4143
4163
  }
4144
- break;
4145
- }
4146
- case "naming": {
4147
- const regex = new RegExp(rule.match);
4148
- for (const exp of file.exports) {
4149
- if (!regex.test(exp.name)) {
4150
- let expected = "";
4151
- switch (rule.convention) {
4152
- case "camelCase":
4153
- expected = "camelCase (e.g., myFunction)";
4154
- break;
4155
- case "PascalCase":
4156
- expected = "PascalCase (e.g., MyClass)";
4157
- break;
4158
- case "UPPER_SNAKE":
4159
- expected = "UPPER_SNAKE_CASE (e.g., MY_CONSTANT)";
4160
- break;
4161
- case "kebab-case":
4162
- expected = "kebab-case (e.g., my-component)";
4163
- break;
4164
- }
4165
- matches.push({
4166
- line: exp.location.line,
4167
- message: pattern.message || `"${exp.name}" does not follow ${rule.convention} convention`,
4168
- suggestion: `Rename to follow ${expected}`
4169
- });
4170
- }
4164
+ ];
4165
+ }
4166
+ return [];
4167
+ }
4168
+ function checkNoImport(rule, file, message) {
4169
+ if (rule.type !== "no-import") return [];
4170
+ const forbiddenImport = file.imports.find(
4171
+ (i) => i.source === rule.from || i.source.endsWith(rule.from)
4172
+ );
4173
+ if (forbiddenImport) {
4174
+ return [
4175
+ {
4176
+ line: forbiddenImport.location.line,
4177
+ message: message || `Forbidden import from "${rule.from}"`,
4178
+ suggestion: `Remove import from "${rule.from}"`
4171
4179
  }
4172
- break;
4180
+ ];
4181
+ }
4182
+ return [];
4183
+ }
4184
+ function checkNaming(rule, file, message) {
4185
+ if (rule.type !== "naming") return [];
4186
+ const regex = new RegExp(rule.match);
4187
+ const matches = [];
4188
+ for (const exp of file.exports) {
4189
+ if (!regex.test(exp.name)) {
4190
+ const expected = CONVENTION_DESCRIPTIONS[rule.convention] ?? rule.convention;
4191
+ matches.push({
4192
+ line: exp.location.line,
4193
+ message: message || `"${exp.name}" does not follow ${rule.convention} convention`,
4194
+ suggestion: `Rename to follow ${expected}`
4195
+ });
4173
4196
  }
4174
- case "max-exports": {
4175
- if (file.exports.length > rule.count) {
4176
- matches.push({
4177
- line: 1,
4178
- message: pattern.message || `File has ${file.exports.length} exports, max is ${rule.count}`,
4179
- suggestion: `Split into multiple files or reduce exports to ${rule.count}`
4180
- });
4197
+ }
4198
+ return matches;
4199
+ }
4200
+ function checkMaxExports(rule, file, message) {
4201
+ if (rule.type !== "max-exports") return [];
4202
+ if (file.exports.length > rule.count) {
4203
+ return [
4204
+ {
4205
+ line: 1,
4206
+ message: message || `File has ${file.exports.length} exports, max is ${rule.count}`,
4207
+ suggestion: `Split into multiple files or reduce exports to ${rule.count}`
4181
4208
  }
4182
- break;
4183
- }
4184
- case "max-lines": {
4185
- break;
4186
- }
4187
- case "require-jsdoc": {
4188
- if (file.jsDocComments.length === 0 && file.exports.length > 0) {
4189
- matches.push({
4190
- line: 1,
4191
- message: pattern.message || "Exported symbols require JSDoc documentation",
4192
- suggestion: "Add JSDoc comments to exports"
4193
- });
4209
+ ];
4210
+ }
4211
+ return [];
4212
+ }
4213
+ function checkMaxLines(_rule, _file, _message) {
4214
+ return [];
4215
+ }
4216
+ function checkRequireJsdoc(_rule, file, message) {
4217
+ if (file.jsDocComments.length === 0 && file.exports.length > 0) {
4218
+ return [
4219
+ {
4220
+ line: 1,
4221
+ message: message || "Exported symbols require JSDoc documentation",
4222
+ suggestion: "Add JSDoc comments to exports"
4194
4223
  }
4195
- break;
4196
- }
4224
+ ];
4197
4225
  }
4198
- return matches;
4226
+ return [];
4227
+ }
4228
+ var RULE_CHECKERS = {
4229
+ "must-export": checkMustExport,
4230
+ "must-export-default": checkMustExportDefault,
4231
+ "no-export": checkNoExport,
4232
+ "must-import": checkMustImport,
4233
+ "no-import": checkNoImport,
4234
+ naming: checkNaming,
4235
+ "max-exports": checkMaxExports,
4236
+ "max-lines": checkMaxLines,
4237
+ "require-jsdoc": checkRequireJsdoc
4238
+ };
4239
+ function checkConfigPattern(pattern, file, rootDir) {
4240
+ const fileMatches = pattern.files.some((glob2) => fileMatchesPattern(file.path, glob2, rootDir));
4241
+ if (!fileMatches) return [];
4242
+ const checker = RULE_CHECKERS[pattern.rule.type];
4243
+ if (!checker) return [];
4244
+ return checker(pattern.rule, file, pattern.message);
4199
4245
  }
4200
4246
  async function detectPatternViolations(snapshot, config) {
4201
4247
  const violations = [];
@@ -4701,17 +4747,35 @@ function createUnusedImportFixes(deadCodeReport) {
4701
4747
  reversible: true
4702
4748
  }));
4703
4749
  }
4750
+ var EXPORT_TYPE_KEYWORD = {
4751
+ class: "class",
4752
+ function: "function",
4753
+ variable: "const",
4754
+ type: "type",
4755
+ interface: "interface",
4756
+ enum: "enum"
4757
+ };
4758
+ function getExportKeyword(exportType) {
4759
+ return EXPORT_TYPE_KEYWORD[exportType] ?? "enum";
4760
+ }
4761
+ function getDefaultExportKeyword(exportType) {
4762
+ if (exportType === "class" || exportType === "function") return exportType;
4763
+ return "";
4764
+ }
4704
4765
  function createDeadExportFixes(deadCodeReport) {
4705
- return deadCodeReport.deadExports.filter((exp) => exp.reason === "NO_IMPORTERS").map((exp) => ({
4706
- type: "dead-exports",
4707
- file: exp.file,
4708
- description: `Remove export keyword from ${exp.name} (${exp.reason})`,
4709
- action: "replace",
4710
- oldContent: exp.isDefault ? `export default ${exp.type === "class" ? "class" : exp.type === "function" ? "function" : ""} ${exp.name}` : `export ${exp.type === "class" ? "class" : exp.type === "function" ? "function" : exp.type === "variable" ? "const" : exp.type === "type" ? "type" : exp.type === "interface" ? "interface" : "enum"} ${exp.name}`,
4711
- newContent: exp.isDefault ? `${exp.type === "class" ? "class" : exp.type === "function" ? "function" : ""} ${exp.name}` : `${exp.type === "class" ? "class" : exp.type === "function" ? "function" : exp.type === "variable" ? "const" : exp.type === "type" ? "type" : exp.type === "interface" ? "interface" : "enum"} ${exp.name}`,
4712
- safe: true,
4713
- reversible: true
4714
- }));
4766
+ return deadCodeReport.deadExports.filter((exp) => exp.reason === "NO_IMPORTERS").map((exp) => {
4767
+ const keyword = exp.isDefault ? getDefaultExportKeyword(exp.type) : getExportKeyword(exp.type);
4768
+ return {
4769
+ type: "dead-exports",
4770
+ file: exp.file,
4771
+ description: `Remove export keyword from ${exp.name} (${exp.reason})`,
4772
+ action: "replace",
4773
+ oldContent: exp.isDefault ? `export default ${keyword} ${exp.name}` : `export ${keyword} ${exp.name}`,
4774
+ newContent: `${keyword} ${exp.name}`,
4775
+ safe: true,
4776
+ reversible: true
4777
+ };
4778
+ });
4715
4779
  }
4716
4780
  function createCommentedCodeFixes(blocks) {
4717
4781
  return blocks.map((block) => ({
@@ -4886,53 +4950,80 @@ var ALWAYS_UNSAFE_TYPES = /* @__PURE__ */ new Set([
4886
4950
  "dead-internal"
4887
4951
  ]);
4888
4952
  var idCounter = 0;
4953
+ var DEAD_CODE_FIX_ACTIONS = {
4954
+ "dead-export": "Remove export keyword",
4955
+ "dead-file": "Delete file",
4956
+ "commented-code": "Delete commented block",
4957
+ "unused-import": "Remove import"
4958
+ };
4959
+ function classifyDeadCode(input) {
4960
+ if (input.isPublicApi) {
4961
+ return {
4962
+ safety: "unsafe",
4963
+ safetyReason: "Public API export may have external consumers",
4964
+ suggestion: "Deprecate before removing"
4965
+ };
4966
+ }
4967
+ const fixAction = DEAD_CODE_FIX_ACTIONS[input.type];
4968
+ if (fixAction) {
4969
+ return {
4970
+ safety: "safe",
4971
+ safetyReason: "zero importers, non-public",
4972
+ fixAction,
4973
+ suggestion: fixAction
4974
+ };
4975
+ }
4976
+ if (input.type === "orphaned-dep") {
4977
+ return {
4978
+ safety: "probably-safe",
4979
+ safetyReason: "No imports found, but needs install+test verification",
4980
+ fixAction: "Remove from package.json",
4981
+ suggestion: "Remove from package.json"
4982
+ };
4983
+ }
4984
+ return {
4985
+ safety: "unsafe",
4986
+ safetyReason: "Unknown dead code type",
4987
+ suggestion: "Manual review required"
4988
+ };
4989
+ }
4990
+ function classifyArchitecture(input) {
4991
+ if (input.type === "import-ordering") {
4992
+ return {
4993
+ safety: "safe",
4994
+ safetyReason: "Mechanical reorder, no semantic change",
4995
+ fixAction: "Reorder imports",
4996
+ suggestion: "Reorder imports"
4997
+ };
4998
+ }
4999
+ if (input.type === "forbidden-import" && input.hasAlternative) {
5000
+ return {
5001
+ safety: "probably-safe",
5002
+ safetyReason: "Alternative configured, needs typecheck+test",
5003
+ fixAction: "Replace with configured alternative",
5004
+ suggestion: "Replace with configured alternative"
5005
+ };
5006
+ }
5007
+ return {
5008
+ safety: "unsafe",
5009
+ safetyReason: `${input.type} requires structural changes`,
5010
+ suggestion: "Restructure code to fix violation"
5011
+ };
5012
+ }
4889
5013
  function classifyFinding(input) {
4890
5014
  idCounter++;
4891
5015
  const id = `${input.concern === "dead-code" ? "dc" : "arch"}-${idCounter}`;
4892
- let safety;
4893
- let safetyReason;
4894
- let fixAction;
4895
- let suggestion;
5016
+ let classification;
4896
5017
  if (ALWAYS_UNSAFE_TYPES.has(input.type)) {
4897
- safety = "unsafe";
4898
- safetyReason = `${input.type} requires human judgment`;
4899
- suggestion = "Review and refactor manually";
5018
+ classification = {
5019
+ safety: "unsafe",
5020
+ safetyReason: `${input.type} requires human judgment`,
5021
+ suggestion: "Review and refactor manually"
5022
+ };
4900
5023
  } else if (input.concern === "dead-code") {
4901
- if (input.isPublicApi) {
4902
- safety = "unsafe";
4903
- safetyReason = "Public API export may have external consumers";
4904
- suggestion = "Deprecate before removing";
4905
- } else if (input.type === "dead-export" || input.type === "unused-import" || input.type === "commented-code" || input.type === "dead-file") {
4906
- safety = "safe";
4907
- safetyReason = "zero importers, non-public";
4908
- fixAction = input.type === "dead-export" ? "Remove export keyword" : input.type === "dead-file" ? "Delete file" : input.type === "commented-code" ? "Delete commented block" : "Remove import";
4909
- suggestion = fixAction;
4910
- } else if (input.type === "orphaned-dep") {
4911
- safety = "probably-safe";
4912
- safetyReason = "No imports found, but needs install+test verification";
4913
- fixAction = "Remove from package.json";
4914
- suggestion = fixAction;
4915
- } else {
4916
- safety = "unsafe";
4917
- safetyReason = "Unknown dead code type";
4918
- suggestion = "Manual review required";
4919
- }
5024
+ classification = classifyDeadCode(input);
4920
5025
  } else {
4921
- if (input.type === "import-ordering") {
4922
- safety = "safe";
4923
- safetyReason = "Mechanical reorder, no semantic change";
4924
- fixAction = "Reorder imports";
4925
- suggestion = fixAction;
4926
- } else if (input.type === "forbidden-import" && input.hasAlternative) {
4927
- safety = "probably-safe";
4928
- safetyReason = "Alternative configured, needs typecheck+test";
4929
- fixAction = "Replace with configured alternative";
4930
- suggestion = fixAction;
4931
- } else {
4932
- safety = "unsafe";
4933
- safetyReason = `${input.type} requires structural changes`;
4934
- suggestion = "Restructure code to fix violation";
4935
- }
5026
+ classification = classifyArchitecture(input);
4936
5027
  }
4937
5028
  return {
4938
5029
  id,
@@ -4941,11 +5032,11 @@ function classifyFinding(input) {
4941
5032
  ...input.line !== void 0 ? { line: input.line } : {},
4942
5033
  type: input.type,
4943
5034
  description: input.description,
4944
- safety,
4945
- safetyReason,
5035
+ safety: classification.safety,
5036
+ safetyReason: classification.safetyReason,
4946
5037
  hotspotDowngraded: false,
4947
- ...fixAction !== void 0 ? { fixAction } : {},
4948
- suggestion
5038
+ ...classification.fixAction !== void 0 ? { fixAction: classification.fixAction } : {},
5039
+ suggestion: classification.suggestion
4949
5040
  };
4950
5041
  }
4951
5042
  function applyHotspotDowngrade(finding, hotspot) {
@@ -5229,43 +5320,57 @@ var BenchmarkRunner = class {
5229
5320
  };
5230
5321
  }
5231
5322
  }
5323
+ /**
5324
+ * Extract a BenchmarkResult from a single assertion with benchmark data.
5325
+ */
5326
+ parseBenchAssertion(assertion, file) {
5327
+ if (!assertion.benchmark) return null;
5328
+ const bench = assertion.benchmark;
5329
+ return {
5330
+ name: assertion.fullName || assertion.title || "unknown",
5331
+ file: file.replace(process.cwd() + "/", ""),
5332
+ opsPerSec: Math.round(bench.hz || 0),
5333
+ meanMs: bench.mean ? bench.mean * 1e3 : 0,
5334
+ p99Ms: bench.p99 ? bench.p99 * 1e3 : bench.mean ? bench.mean * 1e3 * 1.5 : 0,
5335
+ marginOfError: bench.rme ? bench.rme / 100 : 0.05
5336
+ };
5337
+ }
5338
+ /**
5339
+ * Extract JSON from output that may contain non-JSON preamble.
5340
+ */
5341
+ extractJson(output) {
5342
+ const jsonStart = output.indexOf("{");
5343
+ const jsonEnd = output.lastIndexOf("}");
5344
+ if (jsonStart === -1 || jsonEnd === -1) return null;
5345
+ return JSON.parse(output.slice(jsonStart, jsonEnd + 1));
5346
+ }
5232
5347
  /**
5233
5348
  * Parse vitest bench JSON reporter output into BenchmarkResult[].
5234
5349
  * Vitest bench JSON output contains testResults with benchmark data.
5235
5350
  */
5236
- parseVitestBenchOutput(output) {
5351
+ collectAssertionResults(testResults) {
5237
5352
  const results = [];
5238
- try {
5239
- const jsonStart = output.indexOf("{");
5240
- const jsonEnd = output.lastIndexOf("}");
5241
- if (jsonStart === -1 || jsonEnd === -1) return results;
5242
- const jsonStr = output.slice(jsonStart, jsonEnd + 1);
5243
- const parsed = JSON.parse(jsonStr);
5244
- if (parsed.testResults) {
5245
- for (const testResult of parsed.testResults) {
5246
- const file = testResult.name || testResult.filepath || "";
5247
- if (testResult.assertionResults) {
5248
- for (const assertion of testResult.assertionResults) {
5249
- if (assertion.benchmark) {
5250
- const bench = assertion.benchmark;
5251
- results.push({
5252
- name: assertion.fullName || assertion.title || "unknown",
5253
- file: file.replace(process.cwd() + "/", ""),
5254
- opsPerSec: Math.round(bench.hz || 0),
5255
- meanMs: bench.mean ? bench.mean * 1e3 : 0,
5256
- // p99: use actual p99 if available, otherwise estimate as 1.5× mean
5257
- p99Ms: bench.p99 ? bench.p99 * 1e3 : bench.mean ? bench.mean * 1e3 * 1.5 : 0,
5258
- marginOfError: bench.rme ? bench.rme / 100 : 0.05
5259
- });
5260
- }
5261
- }
5262
- }
5263
- }
5353
+ for (const testResult of testResults) {
5354
+ const file = testResult.name || testResult.filepath || "";
5355
+ const assertions = testResult.assertionResults ?? [];
5356
+ for (const assertion of assertions) {
5357
+ const result = this.parseBenchAssertion(assertion, file);
5358
+ if (result) results.push(result);
5264
5359
  }
5265
- } catch {
5266
5360
  }
5267
5361
  return results;
5268
5362
  }
5363
+ parseVitestBenchOutput(output) {
5364
+ try {
5365
+ const parsed = this.extractJson(output);
5366
+ if (!parsed) return [];
5367
+ const testResults = parsed.testResults;
5368
+ if (!testResults) return [];
5369
+ return this.collectAssertionResults(testResults);
5370
+ } catch {
5371
+ return [];
5372
+ }
5373
+ }
5269
5374
  };
5270
5375
  var RegressionDetector = class {
5271
5376
  detect(results, baselines, criticalPaths) {
@@ -5557,39 +5662,31 @@ function getFeedbackConfig() {
5557
5662
  function resetFeedbackConfig() {
5558
5663
  feedbackConfig = null;
5559
5664
  }
5665
+ function detectFileStatus(part) {
5666
+ if (/new file mode/.test(part)) return "added";
5667
+ if (/deleted file mode/.test(part)) return "deleted";
5668
+ if (part.includes("rename from")) return "renamed";
5669
+ return "modified";
5670
+ }
5671
+ function parseDiffPart(part) {
5672
+ if (!part.trim()) return null;
5673
+ const headerMatch = /diff --git a\/(.+?) b\/(.+?)(?:\n|$)/.exec(part);
5674
+ if (!headerMatch || !headerMatch[2]) return null;
5675
+ const additionRegex = /^\+(?!\+\+)/gm;
5676
+ const deletionRegex = /^-(?!--)/gm;
5677
+ return {
5678
+ path: headerMatch[2],
5679
+ status: detectFileStatus(part),
5680
+ additions: (part.match(additionRegex) || []).length,
5681
+ deletions: (part.match(deletionRegex) || []).length
5682
+ };
5683
+ }
5560
5684
  function parseDiff(diff2) {
5561
5685
  try {
5562
5686
  if (!diff2.trim()) {
5563
5687
  return Ok({ diff: diff2, files: [] });
5564
5688
  }
5565
- const files = [];
5566
- const newFileRegex = /new file mode/;
5567
- const deletedFileRegex = /deleted file mode/;
5568
- const additionRegex = /^\+(?!\+\+)/gm;
5569
- const deletionRegex = /^-(?!--)/gm;
5570
- const diffParts = diff2.split(/(?=diff --git)/);
5571
- for (const part of diffParts) {
5572
- if (!part.trim()) continue;
5573
- const headerMatch = /diff --git a\/(.+?) b\/(.+?)(?:\n|$)/.exec(part);
5574
- if (!headerMatch || !headerMatch[2]) continue;
5575
- const filePath = headerMatch[2];
5576
- let status = "modified";
5577
- if (newFileRegex.test(part)) {
5578
- status = "added";
5579
- } else if (deletedFileRegex.test(part)) {
5580
- status = "deleted";
5581
- } else if (part.includes("rename from")) {
5582
- status = "renamed";
5583
- }
5584
- const additions = (part.match(additionRegex) || []).length;
5585
- const deletions = (part.match(deletionRegex) || []).length;
5586
- files.push({
5587
- path: filePath,
5588
- status,
5589
- additions,
5590
- deletions
5591
- });
5592
- }
5689
+ const files = diff2.split(/(?=diff --git)/).map(parseDiffPart).filter((f) => f !== null);
5593
5690
  return Ok({ diff: diff2, files });
5594
5691
  } catch (error) {
5595
5692
  return Err({
@@ -5753,107 +5850,123 @@ var ChecklistBuilder = class {
5753
5850
  this.graphImpactData = graphImpactData;
5754
5851
  return this;
5755
5852
  }
5756
- async run(changes) {
5757
- const startTime = Date.now();
5853
+ /**
5854
+ * Build a single harness check item with or without graph data.
5855
+ */
5856
+ buildHarnessCheckItem(id, check, fallbackDetails, graphItemBuilder) {
5857
+ if (this.graphHarnessData && graphItemBuilder) {
5858
+ return graphItemBuilder();
5859
+ }
5860
+ return {
5861
+ id,
5862
+ category: "harness",
5863
+ check,
5864
+ passed: true,
5865
+ severity: "info",
5866
+ details: fallbackDetails
5867
+ };
5868
+ }
5869
+ /**
5870
+ * Build all harness check items based on harnessOptions and graph data.
5871
+ */
5872
+ buildHarnessItems() {
5873
+ if (!this.harnessOptions) return [];
5758
5874
  const items = [];
5759
- if (this.harnessOptions) {
5760
- if (this.harnessOptions.context !== false) {
5761
- if (this.graphHarnessData) {
5762
- items.push({
5763
- id: "harness-context",
5764
- category: "harness",
5765
- check: "Context validation",
5766
- passed: this.graphHarnessData.graphExists && this.graphHarnessData.nodeCount > 0,
5767
- severity: "info",
5768
- details: this.graphHarnessData.graphExists ? `Graph loaded: ${this.graphHarnessData.nodeCount} nodes, ${this.graphHarnessData.edgeCount} edges` : "No graph available \u2014 run harness scan to build the knowledge graph"
5769
- });
5770
- } else {
5771
- items.push({
5875
+ const graphData = this.graphHarnessData;
5876
+ if (this.harnessOptions.context !== false) {
5877
+ items.push(
5878
+ this.buildHarnessCheckItem(
5879
+ "harness-context",
5880
+ "Context validation",
5881
+ "Harness context validation not yet integrated (run with graph for real checks)",
5882
+ graphData ? () => ({
5772
5883
  id: "harness-context",
5773
5884
  category: "harness",
5774
5885
  check: "Context validation",
5775
- passed: true,
5886
+ passed: graphData.graphExists && graphData.nodeCount > 0,
5776
5887
  severity: "info",
5777
- details: "Harness context validation not yet integrated (run with graph for real checks)"
5778
- });
5779
- }
5780
- }
5781
- if (this.harnessOptions.constraints !== false) {
5782
- if (this.graphHarnessData) {
5783
- const violations = this.graphHarnessData.constraintViolations;
5784
- items.push({
5785
- id: "harness-constraints",
5786
- category: "harness",
5787
- check: "Constraint validation",
5788
- passed: violations === 0,
5789
- severity: violations > 0 ? "error" : "info",
5790
- details: violations === 0 ? "No constraint violations detected" : `${violations} constraint violation(s) detected`
5791
- });
5792
- } else {
5793
- items.push({
5794
- id: "harness-constraints",
5795
- category: "harness",
5796
- check: "Constraint validation",
5797
- passed: true,
5798
- severity: "info",
5799
- details: "Harness constraint validation not yet integrated (run with graph for real checks)"
5800
- });
5801
- }
5802
- }
5803
- if (this.harnessOptions.entropy !== false) {
5804
- if (this.graphHarnessData) {
5805
- const issues = this.graphHarnessData.unreachableNodes + this.graphHarnessData.undocumentedFiles;
5806
- items.push({
5807
- id: "harness-entropy",
5808
- category: "harness",
5809
- check: "Entropy detection",
5810
- passed: issues === 0,
5811
- severity: issues > 0 ? "warning" : "info",
5812
- details: issues === 0 ? "No entropy issues detected" : `${this.graphHarnessData.unreachableNodes} unreachable node(s), ${this.graphHarnessData.undocumentedFiles} undocumented file(s)`
5813
- });
5814
- } else {
5815
- items.push({
5816
- id: "harness-entropy",
5817
- category: "harness",
5818
- check: "Entropy detection",
5819
- passed: true,
5820
- severity: "info",
5821
- details: "Harness entropy detection not yet integrated (run with graph for real checks)"
5822
- });
5823
- }
5824
- }
5888
+ details: graphData.graphExists ? `Graph loaded: ${graphData.nodeCount} nodes, ${graphData.edgeCount} edges` : "No graph available \u2014 run harness scan to build the knowledge graph"
5889
+ }) : void 0
5890
+ )
5891
+ );
5892
+ }
5893
+ if (this.harnessOptions.constraints !== false) {
5894
+ items.push(
5895
+ this.buildHarnessCheckItem(
5896
+ "harness-constraints",
5897
+ "Constraint validation",
5898
+ "Harness constraint validation not yet integrated (run with graph for real checks)",
5899
+ graphData ? () => {
5900
+ const violations = graphData.constraintViolations;
5901
+ return {
5902
+ id: "harness-constraints",
5903
+ category: "harness",
5904
+ check: "Constraint validation",
5905
+ passed: violations === 0,
5906
+ severity: violations > 0 ? "error" : "info",
5907
+ details: violations === 0 ? "No constraint violations detected" : `${violations} constraint violation(s) detected`
5908
+ };
5909
+ } : void 0
5910
+ )
5911
+ );
5912
+ }
5913
+ if (this.harnessOptions.entropy !== false) {
5914
+ items.push(
5915
+ this.buildHarnessCheckItem(
5916
+ "harness-entropy",
5917
+ "Entropy detection",
5918
+ "Harness entropy detection not yet integrated (run with graph for real checks)",
5919
+ graphData ? () => {
5920
+ const issues = graphData.unreachableNodes + graphData.undocumentedFiles;
5921
+ return {
5922
+ id: "harness-entropy",
5923
+ category: "harness",
5924
+ check: "Entropy detection",
5925
+ passed: issues === 0,
5926
+ severity: issues > 0 ? "warning" : "info",
5927
+ details: issues === 0 ? "No entropy issues detected" : `${graphData.unreachableNodes} unreachable node(s), ${graphData.undocumentedFiles} undocumented file(s)`
5928
+ };
5929
+ } : void 0
5930
+ )
5931
+ );
5932
+ }
5933
+ return items;
5934
+ }
5935
+ /**
5936
+ * Execute a single custom rule and return a ReviewItem.
5937
+ */
5938
+ async executeCustomRule(rule, changes) {
5939
+ try {
5940
+ const result = await rule.check(changes, this.rootDir);
5941
+ const item = {
5942
+ id: rule.id,
5943
+ category: "custom",
5944
+ check: rule.name,
5945
+ passed: result.passed,
5946
+ severity: rule.severity,
5947
+ details: result.details
5948
+ };
5949
+ if (result.suggestion !== void 0) item.suggestion = result.suggestion;
5950
+ if (result.file !== void 0) item.file = result.file;
5951
+ if (result.line !== void 0) item.line = result.line;
5952
+ return item;
5953
+ } catch (error) {
5954
+ return {
5955
+ id: rule.id,
5956
+ category: "custom",
5957
+ check: rule.name,
5958
+ passed: false,
5959
+ severity: "error",
5960
+ details: `Rule execution failed: ${String(error)}`
5961
+ };
5825
5962
  }
5963
+ }
5964
+ async run(changes) {
5965
+ const startTime = Date.now();
5966
+ const items = [];
5967
+ items.push(...this.buildHarnessItems());
5826
5968
  for (const rule of this.customRules) {
5827
- try {
5828
- const result = await rule.check(changes, this.rootDir);
5829
- const item = {
5830
- id: rule.id,
5831
- category: "custom",
5832
- check: rule.name,
5833
- passed: result.passed,
5834
- severity: rule.severity,
5835
- details: result.details
5836
- };
5837
- if (result.suggestion !== void 0) {
5838
- item.suggestion = result.suggestion;
5839
- }
5840
- if (result.file !== void 0) {
5841
- item.file = result.file;
5842
- }
5843
- if (result.line !== void 0) {
5844
- item.line = result.line;
5845
- }
5846
- items.push(item);
5847
- } catch (error) {
5848
- items.push({
5849
- id: rule.id,
5850
- category: "custom",
5851
- check: rule.name,
5852
- passed: false,
5853
- severity: "error",
5854
- details: `Rule execution failed: ${String(error)}`
5855
- });
5856
- }
5969
+ items.push(await this.executeCustomRule(rule, changes));
5857
5970
  }
5858
5971
  if (this.diffOptions) {
5859
5972
  const diffResult = await analyzeDiff(changes, this.diffOptions, this.graphImpactData);
@@ -5868,7 +5981,6 @@ var ChecklistBuilder = class {
5868
5981
  const checklist = {
5869
5982
  items,
5870
5983
  passed: failed === 0,
5871
- // Pass if no failed items
5872
5984
  summary: {
5873
5985
  total: items.length,
5874
5986
  passed,
@@ -6252,7 +6364,7 @@ function detectStaleConstraints(store, windowDays = 30, category) {
6252
6364
  staleConstraints.sort((a, b) => b.daysSinceLastViolation - a.daysSinceLastViolation);
6253
6365
  return { staleConstraints, totalConstraints, windowDays };
6254
6366
  }
6255
- function resolveThresholds(scope, config) {
6367
+ function resolveThresholds2(scope, config) {
6256
6368
  const projectThresholds = {};
6257
6369
  for (const [key, val] of Object.entries(config.thresholds)) {
6258
6370
  projectThresholds[key] = typeof val === "object" && val !== null && !Array.isArray(val) ? { ...val } : val;
@@ -6382,6 +6494,10 @@ var INDEX_FILE = "index.json";
6382
6494
  var SESSIONS_DIR = "sessions";
6383
6495
  var SESSION_INDEX_FILE = "index.md";
6384
6496
  var SUMMARY_FILE = "summary.md";
6497
+ var SESSION_STATE_FILE = "session-state.json";
6498
+ var ARCHIVE_DIR = "archive";
6499
+ var CONTENT_HASHES_FILE = "content-hashes.json";
6500
+ var EVENTS_FILE = "events.jsonl";
6385
6501
  var STREAMS_DIR = "streams";
6386
6502
  var STREAM_NAME_REGEX = /^[a-z0-9][a-z0-9._-]*$/;
6387
6503
  function streamsDir(projectPath) {
@@ -6710,6 +6826,84 @@ async function saveState(projectPath, state, stream, session) {
6710
6826
  );
6711
6827
  }
6712
6828
  }
6829
+ function parseFrontmatter(line) {
6830
+ const match = line.match(/^<!--\s+hash:([a-f0-9]+)(?:\s+tags:([^\s]+))?\s+-->/);
6831
+ if (!match) return null;
6832
+ const hash = match[1];
6833
+ const tags = match[2] ? match[2].split(",").filter(Boolean) : [];
6834
+ return { hash, tags };
6835
+ }
6836
+ function computeEntryHash(text) {
6837
+ return crypto.createHash("sha256").update(text).digest("hex").slice(0, 8);
6838
+ }
6839
+ function normalizeLearningContent(text) {
6840
+ let normalized = text;
6841
+ normalized = normalized.replace(/\d{4}-\d{2}-\d{2}/g, "");
6842
+ normalized = normalized.replace(/\[skill:[^\]]*\]/g, "");
6843
+ normalized = normalized.replace(/\[outcome:[^\]]*\]/g, "");
6844
+ normalized = normalized.replace(/^[\s]*[-*]\s+/gm, "");
6845
+ normalized = normalized.replace(/\*\*/g, "");
6846
+ normalized = normalized.replace(/:\s*/g, " ");
6847
+ normalized = normalized.toLowerCase();
6848
+ normalized = normalized.replace(/\s+/g, " ").trim();
6849
+ return normalized;
6850
+ }
6851
+ function computeContentHash(text) {
6852
+ return crypto.createHash("sha256").update(text).digest("hex").slice(0, 16);
6853
+ }
6854
+ function loadContentHashes(stateDir) {
6855
+ const hashesPath = path6.join(stateDir, CONTENT_HASHES_FILE);
6856
+ if (!fs9.existsSync(hashesPath)) return {};
6857
+ try {
6858
+ const raw = fs9.readFileSync(hashesPath, "utf-8");
6859
+ const parsed = JSON.parse(raw);
6860
+ if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) return {};
6861
+ return parsed;
6862
+ } catch {
6863
+ return {};
6864
+ }
6865
+ }
6866
+ function saveContentHashes(stateDir, index) {
6867
+ const hashesPath = path6.join(stateDir, CONTENT_HASHES_FILE);
6868
+ fs9.writeFileSync(hashesPath, JSON.stringify(index, null, 2) + "\n");
6869
+ }
6870
+ function rebuildContentHashes(stateDir) {
6871
+ const learningsPath = path6.join(stateDir, LEARNINGS_FILE);
6872
+ if (!fs9.existsSync(learningsPath)) return {};
6873
+ const content = fs9.readFileSync(learningsPath, "utf-8");
6874
+ const lines = content.split("\n");
6875
+ const index = {};
6876
+ for (let i = 0; i < lines.length; i++) {
6877
+ const line = lines[i];
6878
+ const isDatedBullet = /^- \*\*\d{4}-\d{2}-\d{2}/.test(line);
6879
+ if (isDatedBullet) {
6880
+ const learningMatch = line.match(/:\*\*\s*(.+)$/);
6881
+ if (learningMatch?.[1]) {
6882
+ const normalized = normalizeLearningContent(learningMatch[1]);
6883
+ const hash = computeContentHash(normalized);
6884
+ const dateMatch = line.match(/(\d{4}-\d{2}-\d{2})/);
6885
+ index[hash] = { date: dateMatch?.[1] ?? "", line: i + 1 };
6886
+ }
6887
+ }
6888
+ }
6889
+ saveContentHashes(stateDir, index);
6890
+ return index;
6891
+ }
6892
+ function extractIndexEntry(entry) {
6893
+ const lines = entry.split("\n");
6894
+ const summary = lines[0] ?? entry;
6895
+ const tags = [];
6896
+ const skillMatch = entry.match(/\[skill:([^\]]+)\]/);
6897
+ if (skillMatch?.[1]) tags.push(skillMatch[1]);
6898
+ const outcomeMatch = entry.match(/\[outcome:([^\]]+)\]/);
6899
+ if (outcomeMatch?.[1]) tags.push(outcomeMatch[1]);
6900
+ return {
6901
+ hash: computeEntryHash(entry),
6902
+ tags,
6903
+ summary,
6904
+ fullText: entry
6905
+ };
6906
+ }
6713
6907
  var learningsCacheMap = /* @__PURE__ */ new Map();
6714
6908
  function clearLearningsCache() {
6715
6909
  learningsCacheMap.clear();
@@ -6721,27 +6915,55 @@ async function appendLearning(projectPath, learning, skillName, outcome, stream,
6721
6915
  const stateDir = dirResult.value;
6722
6916
  const learningsPath = path6.join(stateDir, LEARNINGS_FILE);
6723
6917
  fs9.mkdirSync(stateDir, { recursive: true });
6918
+ const normalizedContent = normalizeLearningContent(learning);
6919
+ const contentHash = computeContentHash(normalizedContent);
6920
+ const hashesPath = path6.join(stateDir, CONTENT_HASHES_FILE);
6921
+ let contentHashes;
6922
+ if (fs9.existsSync(hashesPath)) {
6923
+ contentHashes = loadContentHashes(stateDir);
6924
+ if (Object.keys(contentHashes).length === 0 && fs9.existsSync(learningsPath)) {
6925
+ contentHashes = rebuildContentHashes(stateDir);
6926
+ }
6927
+ } else if (fs9.existsSync(learningsPath)) {
6928
+ contentHashes = rebuildContentHashes(stateDir);
6929
+ } else {
6930
+ contentHashes = {};
6931
+ }
6932
+ if (contentHashes[contentHash]) {
6933
+ return Ok(void 0);
6934
+ }
6724
6935
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
6725
- let entry;
6936
+ const fmTags = [];
6937
+ if (skillName) fmTags.push(skillName);
6938
+ if (outcome) fmTags.push(outcome);
6939
+ let bulletLine;
6726
6940
  if (skillName && outcome) {
6727
- entry = `
6728
- - **${timestamp} [skill:${skillName}] [outcome:${outcome}]:** ${learning}
6729
- `;
6941
+ bulletLine = `- **${timestamp} [skill:${skillName}] [outcome:${outcome}]:** ${learning}`;
6730
6942
  } else if (skillName) {
6731
- entry = `
6732
- - **${timestamp} [skill:${skillName}]:** ${learning}
6733
- `;
6943
+ bulletLine = `- **${timestamp} [skill:${skillName}]:** ${learning}`;
6734
6944
  } else {
6735
- entry = `
6736
- - **${timestamp}:** ${learning}
6737
- `;
6945
+ bulletLine = `- **${timestamp}:** ${learning}`;
6738
6946
  }
6947
+ const hash = crypto.createHash("sha256").update(bulletLine).digest("hex").slice(0, 8);
6948
+ const tagsStr = fmTags.length > 0 ? ` tags:${fmTags.join(",")}` : "";
6949
+ const frontmatter = `<!-- hash:${hash}${tagsStr} -->`;
6950
+ const entry = `
6951
+ ${frontmatter}
6952
+ ${bulletLine}
6953
+ `;
6954
+ let existingLineCount;
6739
6955
  if (!fs9.existsSync(learningsPath)) {
6740
6956
  fs9.writeFileSync(learningsPath, `# Learnings
6741
6957
  ${entry}`);
6958
+ existingLineCount = 1;
6742
6959
  } else {
6960
+ const existingContent = fs9.readFileSync(learningsPath, "utf-8");
6961
+ existingLineCount = existingContent.split("\n").length;
6743
6962
  fs9.appendFileSync(learningsPath, entry);
6744
6963
  }
6964
+ const bulletLine_lineNum = existingLineCount + 2;
6965
+ contentHashes[contentHash] = { date: timestamp ?? "", line: bulletLine_lineNum };
6966
+ saveContentHashes(stateDir, contentHashes);
6745
6967
  learningsCacheMap.delete(learningsPath);
6746
6968
  return Ok(void 0);
6747
6969
  } catch (error) {
@@ -6789,7 +7011,30 @@ function analyzeLearningPatterns(entries) {
6789
7011
  return patterns.sort((a, b) => b.count - a.count);
6790
7012
  }
6791
7013
  async function loadBudgetedLearnings(projectPath, options) {
6792
- const { intent, tokenBudget = 1e3, skill, session, stream } = options;
7014
+ const { intent, tokenBudget = 1e3, skill, session, stream, depth = "summary" } = options;
7015
+ if (depth === "index") {
7016
+ const indexEntries = [];
7017
+ if (session) {
7018
+ const sessionResult = await loadIndexEntries(projectPath, skill, stream, session);
7019
+ if (sessionResult.ok) indexEntries.push(...sessionResult.value);
7020
+ }
7021
+ const globalResult2 = await loadIndexEntries(projectPath, skill, stream);
7022
+ if (globalResult2.ok) {
7023
+ const sessionHashes = new Set(indexEntries.map((e) => e.hash));
7024
+ const uniqueGlobal = globalResult2.value.filter((e) => !sessionHashes.has(e.hash));
7025
+ indexEntries.push(...uniqueGlobal);
7026
+ }
7027
+ const budgeted2 = [];
7028
+ let totalTokens2 = 0;
7029
+ for (const entry of indexEntries) {
7030
+ const separator = budgeted2.length > 0 ? "\n" : "";
7031
+ const entryCost = estimateTokens(entry.summary + separator);
7032
+ if (totalTokens2 + entryCost > tokenBudget) break;
7033
+ budgeted2.push(entry.summary);
7034
+ totalTokens2 += entryCost;
7035
+ }
7036
+ return Ok(budgeted2);
7037
+ }
6793
7038
  const sortByRecencyAndRelevance = (entries) => {
6794
7039
  return [...entries].sort((a, b) => {
6795
7040
  const dateA = parseDateFromEntry(a) ?? "0000-00-00";
@@ -6808,7 +7053,9 @@ async function loadBudgetedLearnings(projectPath, options) {
6808
7053
  }
6809
7054
  const globalResult = await loadRelevantLearnings(projectPath, skill, stream);
6810
7055
  if (globalResult.ok) {
6811
- allEntries.push(...sortByRecencyAndRelevance(globalResult.value));
7056
+ const sessionSet = new Set(allEntries.map((e) => e.trim()));
7057
+ const uniqueGlobal = globalResult.value.filter((e) => !sessionSet.has(e.trim()));
7058
+ allEntries.push(...sortByRecencyAndRelevance(uniqueGlobal));
6812
7059
  }
6813
7060
  const budgeted = [];
6814
7061
  let totalTokens = 0;
@@ -6821,6 +7068,68 @@ async function loadBudgetedLearnings(projectPath, options) {
6821
7068
  }
6822
7069
  return Ok(budgeted);
6823
7070
  }
7071
+ async function loadIndexEntries(projectPath, skillName, stream, session) {
7072
+ try {
7073
+ const dirResult = await getStateDir(projectPath, stream, session);
7074
+ if (!dirResult.ok) return dirResult;
7075
+ const stateDir = dirResult.value;
7076
+ const learningsPath = path6.join(stateDir, LEARNINGS_FILE);
7077
+ if (!fs9.existsSync(learningsPath)) {
7078
+ return Ok([]);
7079
+ }
7080
+ const content = fs9.readFileSync(learningsPath, "utf-8");
7081
+ const lines = content.split("\n");
7082
+ const indexEntries = [];
7083
+ let pendingFrontmatter = null;
7084
+ let currentBlock = [];
7085
+ for (const line of lines) {
7086
+ if (line.startsWith("# ")) continue;
7087
+ const fm = parseFrontmatter(line);
7088
+ if (fm) {
7089
+ pendingFrontmatter = fm;
7090
+ continue;
7091
+ }
7092
+ const isDatedBullet = /^- \*\*\d{4}-\d{2}-\d{2}/.test(line);
7093
+ const isHeading = /^## \d{4}-\d{2}-\d{2}/.test(line);
7094
+ if (isDatedBullet || isHeading) {
7095
+ if (pendingFrontmatter) {
7096
+ indexEntries.push({
7097
+ hash: pendingFrontmatter.hash,
7098
+ tags: pendingFrontmatter.tags,
7099
+ summary: line,
7100
+ fullText: ""
7101
+ // Placeholder — full text not loaded in index mode
7102
+ });
7103
+ pendingFrontmatter = null;
7104
+ } else {
7105
+ const idx = extractIndexEntry(line);
7106
+ indexEntries.push({
7107
+ hash: idx.hash,
7108
+ tags: idx.tags,
7109
+ summary: line,
7110
+ fullText: ""
7111
+ });
7112
+ }
7113
+ currentBlock = [line];
7114
+ } else if (line.trim() !== "" && currentBlock.length > 0) {
7115
+ currentBlock.push(line);
7116
+ }
7117
+ }
7118
+ if (skillName) {
7119
+ const filtered = indexEntries.filter(
7120
+ (e) => e.tags.includes(skillName) || e.summary.includes(`[skill:${skillName}]`)
7121
+ );
7122
+ return Ok(filtered);
7123
+ }
7124
+ return Ok(indexEntries);
7125
+ } catch (error) {
7126
+ return Err(
7127
+ new Error(
7128
+ `Failed to load index entries: ${error instanceof Error ? error.message : String(error)}`
7129
+ )
7130
+ );
7131
+ }
7132
+ }
6824
7133
  async function loadRelevantLearnings(projectPath, skillName, stream, session) {
6825
7134
  try {
6826
7135
  const dirResult = await getStateDir(projectPath, stream, session);
@@ -6843,6 +7152,7 @@ async function loadRelevantLearnings(projectPath, skillName, stream, session) {
6843
7152
  let currentBlock = [];
6844
7153
  for (const line of lines) {
6845
7154
  if (line.startsWith("# ")) continue;
7155
+ if (/^<!--\s+hash:[a-f0-9]+/.test(line)) continue;
6846
7156
  const isDatedBullet = /^- \*\*\d{4}-\d{2}-\d{2}/.test(line);
6847
7157
  const isHeading = /^## \d{4}-\d{2}-\d{2}/.test(line);
6848
7158
  if (isDatedBullet || isHeading) {
@@ -6952,6 +7262,68 @@ async function pruneLearnings(projectPath, stream) {
6952
7262
  );
6953
7263
  }
6954
7264
  }
7265
+ var PROMOTABLE_OUTCOMES = ["gotcha", "decision", "observation"];
7266
+ function isGeneralizable(entry) {
7267
+ for (const outcome of PROMOTABLE_OUTCOMES) {
7268
+ if (entry.includes(`[outcome:${outcome}]`)) return true;
7269
+ }
7270
+ return false;
7271
+ }
7272
+ async function promoteSessionLearnings(projectPath, sessionSlug, stream) {
7273
+ try {
7274
+ const sessionResult = await loadRelevantLearnings(projectPath, void 0, stream, sessionSlug);
7275
+ if (!sessionResult.ok) return sessionResult;
7276
+ const sessionEntries = sessionResult.value;
7277
+ if (sessionEntries.length === 0) {
7278
+ return Ok({ promoted: 0, skipped: 0 });
7279
+ }
7280
+ const toPromote = [];
7281
+ let skipped = 0;
7282
+ for (const entry of sessionEntries) {
7283
+ if (isGeneralizable(entry)) {
7284
+ toPromote.push(entry);
7285
+ } else {
7286
+ skipped++;
7287
+ }
7288
+ }
7289
+ if (toPromote.length === 0) {
7290
+ return Ok({ promoted: 0, skipped });
7291
+ }
7292
+ const dirResult = await getStateDir(projectPath, stream);
7293
+ if (!dirResult.ok) return dirResult;
7294
+ const stateDir = dirResult.value;
7295
+ const globalPath = path6.join(stateDir, LEARNINGS_FILE);
7296
+ const existingGlobal = fs9.existsSync(globalPath) ? fs9.readFileSync(globalPath, "utf-8") : "";
7297
+ const newEntries = toPromote.filter((entry) => !existingGlobal.includes(entry.trim()));
7298
+ if (newEntries.length === 0) {
7299
+ return Ok({ promoted: 0, skipped: skipped + toPromote.length });
7300
+ }
7301
+ const promotedContent = newEntries.join("\n\n") + "\n";
7302
+ if (!existingGlobal) {
7303
+ fs9.writeFileSync(globalPath, `# Learnings
7304
+
7305
+ ${promotedContent}`);
7306
+ } else {
7307
+ fs9.appendFileSync(globalPath, "\n\n" + promotedContent);
7308
+ }
7309
+ learningsCacheMap.delete(globalPath);
7310
+ return Ok({
7311
+ promoted: newEntries.length,
7312
+ skipped: skipped + (toPromote.length - newEntries.length)
7313
+ });
7314
+ } catch (error) {
7315
+ return Err(
7316
+ new Error(
7317
+ `Failed to promote session learnings: ${error instanceof Error ? error.message : String(error)}`
7318
+ )
7319
+ );
7320
+ }
7321
+ }
7322
+ async function countLearningEntries(projectPath, stream) {
7323
+ const loadResult = await loadRelevantLearnings(projectPath, void 0, stream);
7324
+ if (!loadResult.ok) return 0;
7325
+ return loadResult.value.length;
7326
+ }
6955
7327
  var failuresCacheMap = /* @__PURE__ */ new Map();
6956
7328
  function clearFailuresCache() {
6957
7329
  failuresCacheMap.clear();
@@ -7258,6 +7630,274 @@ function listActiveSessions(projectPath) {
7258
7630
  );
7259
7631
  }
7260
7632
  }
7633
+ function emptySections() {
7634
+ const sections = {};
7635
+ for (const name of SESSION_SECTION_NAMES) {
7636
+ sections[name] = [];
7637
+ }
7638
+ return sections;
7639
+ }
7640
+ async function loadSessionState(projectPath, sessionSlug) {
7641
+ const dirResult = resolveSessionDir(projectPath, sessionSlug);
7642
+ if (!dirResult.ok) return dirResult;
7643
+ const sessionDir = dirResult.value;
7644
+ const filePath = path11.join(sessionDir, SESSION_STATE_FILE);
7645
+ if (!fs14.existsSync(filePath)) {
7646
+ return Ok(emptySections());
7647
+ }
7648
+ try {
7649
+ const raw = fs14.readFileSync(filePath, "utf-8");
7650
+ const parsed = JSON.parse(raw);
7651
+ const sections = emptySections();
7652
+ for (const name of SESSION_SECTION_NAMES) {
7653
+ if (Array.isArray(parsed[name])) {
7654
+ sections[name] = parsed[name];
7655
+ }
7656
+ }
7657
+ return Ok(sections);
7658
+ } catch (error) {
7659
+ return Err(
7660
+ new Error(
7661
+ `Failed to load session state: ${error instanceof Error ? error.message : String(error)}`
7662
+ )
7663
+ );
7664
+ }
7665
+ }
7666
+ async function saveSessionState(projectPath, sessionSlug, sections) {
7667
+ const dirResult = resolveSessionDir(projectPath, sessionSlug, { create: true });
7668
+ if (!dirResult.ok) return dirResult;
7669
+ const sessionDir = dirResult.value;
7670
+ const filePath = path11.join(sessionDir, SESSION_STATE_FILE);
7671
+ try {
7672
+ fs14.writeFileSync(filePath, JSON.stringify(sections, null, 2));
7673
+ return Ok(void 0);
7674
+ } catch (error) {
7675
+ return Err(
7676
+ new Error(
7677
+ `Failed to save session state: ${error instanceof Error ? error.message : String(error)}`
7678
+ )
7679
+ );
7680
+ }
7681
+ }
7682
+ async function readSessionSections(projectPath, sessionSlug) {
7683
+ return loadSessionState(projectPath, sessionSlug);
7684
+ }
7685
+ async function readSessionSection(projectPath, sessionSlug, section) {
7686
+ const result = await loadSessionState(projectPath, sessionSlug);
7687
+ if (!result.ok) return result;
7688
+ return Ok(result.value[section]);
7689
+ }
7690
+ async function appendSessionEntry(projectPath, sessionSlug, section, authorSkill, content) {
7691
+ const loadResult = await loadSessionState(projectPath, sessionSlug);
7692
+ if (!loadResult.ok) return loadResult;
7693
+ const sections = loadResult.value;
7694
+ const entry = {
7695
+ id: generateEntryId(),
7696
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
7697
+ authorSkill,
7698
+ content,
7699
+ status: "active"
7700
+ };
7701
+ sections[section].push(entry);
7702
+ const saveResult = await saveSessionState(projectPath, sessionSlug, sections);
7703
+ if (!saveResult.ok) return saveResult;
7704
+ return Ok(entry);
7705
+ }
7706
+ async function updateSessionEntryStatus(projectPath, sessionSlug, section, entryId, newStatus) {
7707
+ const loadResult = await loadSessionState(projectPath, sessionSlug);
7708
+ if (!loadResult.ok) return loadResult;
7709
+ const sections = loadResult.value;
7710
+ const entry = sections[section].find((e) => e.id === entryId);
7711
+ if (!entry) {
7712
+ return Err(new Error(`Entry '${entryId}' not found in section '${section}'`));
7713
+ }
7714
+ entry.status = newStatus;
7715
+ const saveResult = await saveSessionState(projectPath, sessionSlug, sections);
7716
+ if (!saveResult.ok) return saveResult;
7717
+ return Ok(entry);
7718
+ }
7719
+ function generateEntryId() {
7720
+ const timestamp = Date.now().toString(36);
7721
+ const random = Math.random().toString(36).substring(2, 8);
7722
+ return `${timestamp}-${random}`;
7723
+ }
7724
+ async function archiveSession(projectPath, sessionSlug) {
7725
+ const dirResult = resolveSessionDir(projectPath, sessionSlug);
7726
+ if (!dirResult.ok) return dirResult;
7727
+ const sessionDir = dirResult.value;
7728
+ if (!fs15.existsSync(sessionDir)) {
7729
+ return Err(new Error(`Session '${sessionSlug}' not found at ${sessionDir}`));
7730
+ }
7731
+ const archiveBase = path12.join(projectPath, HARNESS_DIR, ARCHIVE_DIR, "sessions");
7732
+ try {
7733
+ fs15.mkdirSync(archiveBase, { recursive: true });
7734
+ const date = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
7735
+ let archiveName = `${sessionSlug}-${date}`;
7736
+ let counter = 1;
7737
+ while (fs15.existsSync(path12.join(archiveBase, archiveName))) {
7738
+ archiveName = `${sessionSlug}-${date}-${counter}`;
7739
+ counter++;
7740
+ }
7741
+ const dest = path12.join(archiveBase, archiveName);
7742
+ try {
7743
+ fs15.renameSync(sessionDir, dest);
7744
+ } catch (renameErr) {
7745
+ if (renameErr instanceof Error && "code" in renameErr && renameErr.code === "EXDEV") {
7746
+ fs15.cpSync(sessionDir, dest, { recursive: true });
7747
+ fs15.rmSync(sessionDir, { recursive: true });
7748
+ } else {
7749
+ throw renameErr;
7750
+ }
7751
+ }
7752
+ return Ok(void 0);
7753
+ } catch (error) {
7754
+ return Err(
7755
+ new Error(
7756
+ `Failed to archive session: ${error instanceof Error ? error.message : String(error)}`
7757
+ )
7758
+ );
7759
+ }
7760
+ }
7761
+ var SkillEventSchema = z5.object({
7762
+ timestamp: z5.string(),
7763
+ skill: z5.string(),
7764
+ session: z5.string().optional(),
7765
+ type: z5.enum(["phase_transition", "decision", "gate_result", "handoff", "error", "checkpoint"]),
7766
+ summary: z5.string(),
7767
+ data: z5.record(z5.unknown()).optional(),
7768
+ refs: z5.array(z5.string()).optional(),
7769
+ contentHash: z5.string().optional()
7770
+ });
7771
+ function computeEventHash(event, session) {
7772
+ const identity = `${event.skill}|${event.type}|${event.summary}|${session ?? ""}`;
7773
+ return computeContentHash(identity);
7774
+ }
7775
+ var knownHashesCache = /* @__PURE__ */ new Map();
7776
+ function loadKnownHashes(eventsPath) {
7777
+ const cached = knownHashesCache.get(eventsPath);
7778
+ if (cached) return cached;
7779
+ const hashes = /* @__PURE__ */ new Set();
7780
+ if (fs16.existsSync(eventsPath)) {
7781
+ const content = fs16.readFileSync(eventsPath, "utf-8");
7782
+ const lines = content.split("\n").filter((line) => line.trim() !== "");
7783
+ for (const line of lines) {
7784
+ try {
7785
+ const existing = JSON.parse(line);
7786
+ if (existing.contentHash) {
7787
+ hashes.add(existing.contentHash);
7788
+ }
7789
+ } catch {
7790
+ }
7791
+ }
7792
+ }
7793
+ knownHashesCache.set(eventsPath, hashes);
7794
+ return hashes;
7795
+ }
7796
+ function clearEventHashCache() {
7797
+ knownHashesCache.clear();
7798
+ }
7799
+ async function emitEvent(projectPath, event, options) {
7800
+ try {
7801
+ const dirResult = await getStateDir(projectPath, options?.stream, options?.session);
7802
+ if (!dirResult.ok) return dirResult;
7803
+ const stateDir = dirResult.value;
7804
+ const eventsPath = path13.join(stateDir, EVENTS_FILE);
7805
+ fs16.mkdirSync(stateDir, { recursive: true });
7806
+ const contentHash = computeEventHash(event, options?.session);
7807
+ const knownHashes = loadKnownHashes(eventsPath);
7808
+ if (knownHashes.has(contentHash)) {
7809
+ return Ok({ written: false, reason: "duplicate" });
7810
+ }
7811
+ const fullEvent = {
7812
+ ...event,
7813
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
7814
+ contentHash
7815
+ };
7816
+ if (options?.session) {
7817
+ fullEvent.session = options.session;
7818
+ }
7819
+ fs16.appendFileSync(eventsPath, JSON.stringify(fullEvent) + "\n");
7820
+ knownHashes.add(contentHash);
7821
+ return Ok({ written: true });
7822
+ } catch (error) {
7823
+ return Err(
7824
+ new Error(`Failed to emit event: ${error instanceof Error ? error.message : String(error)}`)
7825
+ );
7826
+ }
7827
+ }
7828
+ async function loadEvents(projectPath, options) {
7829
+ try {
7830
+ const dirResult = await getStateDir(projectPath, options?.stream, options?.session);
7831
+ if (!dirResult.ok) return dirResult;
7832
+ const stateDir = dirResult.value;
7833
+ const eventsPath = path13.join(stateDir, EVENTS_FILE);
7834
+ if (!fs16.existsSync(eventsPath)) {
7835
+ return Ok([]);
7836
+ }
7837
+ const content = fs16.readFileSync(eventsPath, "utf-8");
7838
+ const lines = content.split("\n").filter((line) => line.trim() !== "");
7839
+ const events = [];
7840
+ for (const line of lines) {
7841
+ try {
7842
+ const parsed = JSON.parse(line);
7843
+ const result = SkillEventSchema.safeParse(parsed);
7844
+ if (result.success) {
7845
+ events.push(result.data);
7846
+ }
7847
+ } catch {
7848
+ }
7849
+ }
7850
+ return Ok(events);
7851
+ } catch (error) {
7852
+ return Err(
7853
+ new Error(`Failed to load events: ${error instanceof Error ? error.message : String(error)}`)
7854
+ );
7855
+ }
7856
+ }
7857
+ function formatPhaseTransition(event) {
7858
+ const data = event.data;
7859
+ const suffix = data?.taskCount ? ` (${data.taskCount} tasks)` : "";
7860
+ return `phase: ${data?.from ?? "?"} -> ${data?.to ?? "?"}${suffix}`;
7861
+ }
7862
+ function formatGateResult(event) {
7863
+ const data = event.data;
7864
+ const status = data?.passed ? "passed" : "failed";
7865
+ const checks = data?.checks?.map((c) => `${c.name} ${c.passed ? "Y" : "N"}`).join(", ");
7866
+ return checks ? `gate: ${status} (${checks})` : `gate: ${status}`;
7867
+ }
7868
+ function formatHandoffDetail(event) {
7869
+ const data = event.data;
7870
+ const direction = data?.toSkill ? ` -> ${data.toSkill}` : "";
7871
+ return `handoff: ${event.summary}${direction}`;
7872
+ }
7873
+ var EVENT_FORMATTERS = {
7874
+ phase_transition: formatPhaseTransition,
7875
+ gate_result: formatGateResult,
7876
+ decision: (event) => `decision: ${event.summary}`,
7877
+ handoff: formatHandoffDetail,
7878
+ error: (event) => `error: ${event.summary}`,
7879
+ checkpoint: (event) => `checkpoint: ${event.summary}`
7880
+ };
7881
+ function formatEventTimeline(events, limit = 20) {
7882
+ if (events.length === 0) return "";
7883
+ const recent = events.slice(-limit);
7884
+ return recent.map((event) => {
7885
+ const time = formatTime(event.timestamp);
7886
+ const formatter = EVENT_FORMATTERS[event.type];
7887
+ const detail = formatter ? formatter(event) : event.summary;
7888
+ return `- ${time} [${event.skill}] ${detail}`;
7889
+ }).join("\n");
7890
+ }
7891
+ function formatTime(timestamp) {
7892
+ try {
7893
+ const date = new Date(timestamp);
7894
+ const hours = String(date.getHours()).padStart(2, "0");
7895
+ const minutes = String(date.getMinutes()).padStart(2, "0");
7896
+ return `${hours}:${minutes}`;
7897
+ } catch {
7898
+ return "??:??";
7899
+ }
7900
+ }
7261
7901
  async function executeWorkflow(workflow, executor) {
7262
7902
  const stepResults = [];
7263
7903
  const startTime = Date.now();
@@ -7434,19 +8074,19 @@ var DEFAULT_SECURITY_CONFIG = {
7434
8074
  rules: {},
7435
8075
  exclude: ["**/node_modules/**", "**/dist/**", "**/*.test.ts", "**/fixtures/**"]
7436
8076
  };
7437
- var RuleOverrideSchema = z5.enum(["off", "error", "warning", "info"]);
7438
- var SecurityConfigSchema = z5.object({
7439
- enabled: z5.boolean().default(true),
7440
- strict: z5.boolean().default(false),
7441
- rules: z5.record(z5.string(), RuleOverrideSchema).optional().default({}),
7442
- exclude: z5.array(z5.string()).optional().default(["**/node_modules/**", "**/dist/**", "**/*.test.ts", "**/fixtures/**"]),
7443
- external: z5.object({
7444
- semgrep: z5.object({
7445
- enabled: z5.union([z5.literal("auto"), z5.boolean()]).default("auto"),
7446
- rulesets: z5.array(z5.string()).optional()
8077
+ var RuleOverrideSchema = z6.enum(["off", "error", "warning", "info"]);
8078
+ var SecurityConfigSchema = z6.object({
8079
+ enabled: z6.boolean().default(true),
8080
+ strict: z6.boolean().default(false),
8081
+ rules: z6.record(z6.string(), RuleOverrideSchema).optional().default({}),
8082
+ exclude: z6.array(z6.string()).optional().default(["**/node_modules/**", "**/dist/**", "**/*.test.ts", "**/fixtures/**"]),
8083
+ external: z6.object({
8084
+ semgrep: z6.object({
8085
+ enabled: z6.union([z6.literal("auto"), z6.boolean()]).default("auto"),
8086
+ rulesets: z6.array(z6.string()).optional()
7447
8087
  }).optional(),
7448
- gitleaks: z5.object({
7449
- enabled: z5.union([z5.literal("auto"), z5.boolean()]).default("auto")
8088
+ gitleaks: z6.object({
8089
+ enabled: z6.union([z6.literal("auto"), z6.boolean()]).default("auto")
7450
8090
  }).optional()
7451
8091
  }).optional()
7452
8092
  });
@@ -7479,11 +8119,11 @@ function resolveRuleSeverity(ruleId, defaultSeverity, overrides, strict) {
7479
8119
  }
7480
8120
  function detectStack(projectRoot) {
7481
8121
  const stacks = [];
7482
- const pkgJsonPath = path11.join(projectRoot, "package.json");
7483
- if (fs14.existsSync(pkgJsonPath)) {
8122
+ const pkgJsonPath = path14.join(projectRoot, "package.json");
8123
+ if (fs17.existsSync(pkgJsonPath)) {
7484
8124
  stacks.push("node");
7485
8125
  try {
7486
- const pkgJson = JSON.parse(fs14.readFileSync(pkgJsonPath, "utf-8"));
8126
+ const pkgJson = JSON.parse(fs17.readFileSync(pkgJsonPath, "utf-8"));
7487
8127
  const allDeps = {
7488
8128
  ...pkgJson.dependencies,
7489
8129
  ...pkgJson.devDependencies
@@ -7498,13 +8138,13 @@ function detectStack(projectRoot) {
7498
8138
  } catch {
7499
8139
  }
7500
8140
  }
7501
- const goModPath = path11.join(projectRoot, "go.mod");
7502
- if (fs14.existsSync(goModPath)) {
8141
+ const goModPath = path14.join(projectRoot, "go.mod");
8142
+ if (fs17.existsSync(goModPath)) {
7503
8143
  stacks.push("go");
7504
8144
  }
7505
- const requirementsPath = path11.join(projectRoot, "requirements.txt");
7506
- const pyprojectPath = path11.join(projectRoot, "pyproject.toml");
7507
- if (fs14.existsSync(requirementsPath) || fs14.existsSync(pyprojectPath)) {
8145
+ const requirementsPath = path14.join(projectRoot, "requirements.txt");
8146
+ const pyprojectPath = path14.join(projectRoot, "pyproject.toml");
8147
+ if (fs17.existsSync(requirementsPath) || fs17.existsSync(pyprojectPath)) {
7508
8148
  stacks.push("python");
7509
8149
  }
7510
8150
  return stacks;
@@ -7566,6 +8206,72 @@ var secretRules = [
7566
8206
  message: "Hardcoded JWT token detected",
7567
8207
  remediation: "Tokens should be fetched at runtime, not embedded in source",
7568
8208
  references: ["CWE-798"]
8209
+ },
8210
+ {
8211
+ id: "SEC-SEC-006",
8212
+ name: "Anthropic API Key",
8213
+ category: "secrets",
8214
+ severity: "error",
8215
+ confidence: "high",
8216
+ patterns: [/sk-ant-api\d{2}-[A-Za-z0-9_-]{20,}/],
8217
+ message: "Hardcoded Anthropic API key detected",
8218
+ remediation: "Use environment variables: process.env.ANTHROPIC_API_KEY",
8219
+ references: ["CWE-798"]
8220
+ },
8221
+ {
8222
+ id: "SEC-SEC-007",
8223
+ name: "OpenAI API Key",
8224
+ category: "secrets",
8225
+ severity: "error",
8226
+ confidence: "high",
8227
+ patterns: [/sk-proj-[A-Za-z0-9_-]{20,}/],
8228
+ message: "Hardcoded OpenAI API key detected",
8229
+ remediation: "Use environment variables: process.env.OPENAI_API_KEY",
8230
+ references: ["CWE-798"]
8231
+ },
8232
+ {
8233
+ id: "SEC-SEC-008",
8234
+ name: "Google API Key",
8235
+ category: "secrets",
8236
+ severity: "error",
8237
+ confidence: "high",
8238
+ patterns: [/AIza[A-Za-z0-9_-]{35}/],
8239
+ message: "Hardcoded Google API key detected",
8240
+ remediation: "Use environment variables or a secrets manager for Google API keys",
8241
+ references: ["CWE-798"]
8242
+ },
8243
+ {
8244
+ id: "SEC-SEC-009",
8245
+ name: "GitHub Personal Access Token",
8246
+ category: "secrets",
8247
+ severity: "error",
8248
+ confidence: "high",
8249
+ patterns: [/gh[pous]_[A-Za-z0-9_]{36,}/],
8250
+ message: "Hardcoded GitHub personal access token detected",
8251
+ remediation: "Use environment variables: process.env.GITHUB_TOKEN",
8252
+ references: ["CWE-798"]
8253
+ },
8254
+ {
8255
+ id: "SEC-SEC-010",
8256
+ name: "Stripe Live Key",
8257
+ category: "secrets",
8258
+ severity: "error",
8259
+ confidence: "high",
8260
+ patterns: [/\b[spr]k_live_[A-Za-z0-9]{24,}/],
8261
+ message: "Hardcoded Stripe live key detected",
8262
+ remediation: "Use environment variables for Stripe keys; never commit live keys",
8263
+ references: ["CWE-798"]
8264
+ },
8265
+ {
8266
+ id: "SEC-SEC-011",
8267
+ name: "Database Connection String with Credentials",
8268
+ category: "secrets",
8269
+ severity: "error",
8270
+ confidence: "high",
8271
+ patterns: [/(?:postgres|mysql|mongodb|redis|amqp|mssql)(?:\+\w+)?:\/\/[^/\s:]+:[^@/\s]+@/i],
8272
+ message: "Database connection string with embedded credentials detected",
8273
+ remediation: "Use environment variables for connection strings; separate credentials from URIs",
8274
+ references: ["CWE-798"]
7569
8275
  }
7570
8276
  ];
7571
8277
  var injectionRules = [
@@ -7739,14 +8445,162 @@ var deserializationRules = [
7739
8445
  references: ["CWE-502"]
7740
8446
  }
7741
8447
  ];
7742
- var nodeRules = [
8448
+ var agentConfigRules = [
7743
8449
  {
7744
- id: "SEC-NODE-001",
7745
- name: "Prototype Pollution",
7746
- category: "injection",
7747
- severity: "warning",
7748
- confidence: "medium",
7749
- patterns: [
8450
+ id: "SEC-AGT-001",
8451
+ name: "Hidden Unicode Characters",
8452
+ category: "agent-config",
8453
+ severity: "error",
8454
+ confidence: "high",
8455
+ patterns: [/\u200B|\u200C|\u200D|\uFEFF|\u2060/],
8456
+ fileGlob: "**/CLAUDE.md,**/AGENTS.md,**/*.yaml",
8457
+ message: "Hidden zero-width Unicode characters detected in agent configuration",
8458
+ remediation: "Remove invisible Unicode characters; they may hide malicious instructions",
8459
+ references: ["CWE-116"]
8460
+ },
8461
+ {
8462
+ id: "SEC-AGT-002",
8463
+ name: "URL Execution Directives",
8464
+ category: "agent-config",
8465
+ severity: "warning",
8466
+ confidence: "medium",
8467
+ patterns: [/\b(?:curl|wget)\s+\S+/i, /\bfetch\s*\(/i],
8468
+ fileGlob: "**/CLAUDE.md,**/AGENTS.md",
8469
+ message: "URL execution directive found in agent configuration",
8470
+ remediation: "Avoid instructing agents to download and execute remote content",
8471
+ references: ["CWE-94"]
8472
+ },
8473
+ {
8474
+ id: "SEC-AGT-003",
8475
+ name: "Wildcard Tool Permissions",
8476
+ category: "agent-config",
8477
+ severity: "warning",
8478
+ confidence: "high",
8479
+ patterns: [/(?:Bash|Write|Edit)\s*\(\s*\*\s*\)/],
8480
+ fileGlob: "**/.claude/**,**/settings*.json",
8481
+ message: "Wildcard tool permissions grant unrestricted access",
8482
+ remediation: "Scope tool permissions to specific patterns instead of wildcards",
8483
+ references: ["CWE-250"]
8484
+ },
8485
+ {
8486
+ id: "SEC-AGT-004",
8487
+ name: "Auto-approve Patterns",
8488
+ category: "agent-config",
8489
+ severity: "warning",
8490
+ confidence: "high",
8491
+ patterns: [/\bautoApprove\b/i, /\bauto_approve\b/i],
8492
+ fileGlob: "**/.claude/**,**/.mcp.json",
8493
+ message: "Auto-approve configuration bypasses human review of tool calls",
8494
+ remediation: "Review auto-approved tools carefully; prefer explicit approval for destructive operations",
8495
+ references: ["CWE-862"]
8496
+ },
8497
+ {
8498
+ id: "SEC-AGT-005",
8499
+ name: "Prompt Injection Surface",
8500
+ category: "agent-config",
8501
+ severity: "warning",
8502
+ confidence: "medium",
8503
+ patterns: [/\$\{[^}]*\}/, /\{\{[^}]*\}\}/],
8504
+ fileGlob: "**/skill.yaml",
8505
+ message: "Template interpolation syntax in skill YAML may enable prompt injection",
8506
+ remediation: "Avoid dynamic interpolation in skill descriptions; use static text",
8507
+ references: ["CWE-94"]
8508
+ },
8509
+ {
8510
+ id: "SEC-AGT-006",
8511
+ name: "Permission Bypass Flags",
8512
+ category: "agent-config",
8513
+ severity: "error",
8514
+ confidence: "high",
8515
+ patterns: [/--dangerously-skip-permissions/, /--no-verify/],
8516
+ fileGlob: "**/CLAUDE.md,**/AGENTS.md,**/.claude/**",
8517
+ message: "Permission bypass flag detected in agent configuration",
8518
+ remediation: "Remove flags that bypass safety checks; they undermine enforcement",
8519
+ references: ["CWE-863"]
8520
+ },
8521
+ {
8522
+ id: "SEC-AGT-007",
8523
+ name: "Hook Injection Surface",
8524
+ category: "agent-config",
8525
+ severity: "error",
8526
+ confidence: "low",
8527
+ patterns: [/\$\(/, /`[^`]+`/, /\s&&\s/, /\s\|\|\s/],
8528
+ fileGlob: "**/settings*.json,**/hooks.json",
8529
+ message: "Shell metacharacters in hook commands may enable command injection",
8530
+ remediation: "Use simple, single-command hooks without shell operators; chain logic inside the script",
8531
+ references: ["CWE-78"]
8532
+ }
8533
+ ];
8534
+ var mcpRules = [
8535
+ {
8536
+ id: "SEC-MCP-001",
8537
+ name: "Hardcoded MCP Secrets",
8538
+ category: "mcp",
8539
+ severity: "error",
8540
+ confidence: "medium",
8541
+ patterns: [/(?:API_KEY|SECRET|TOKEN|PASSWORD|CREDENTIAL)\s*["']?\s*:\s*["'][^"']{8,}["']/i],
8542
+ fileGlob: "**/.mcp.json",
8543
+ message: "Hardcoded secret detected in MCP server configuration",
8544
+ remediation: "Use environment variable references instead of inline secrets in .mcp.json",
8545
+ references: ["CWE-798"]
8546
+ },
8547
+ {
8548
+ id: "SEC-MCP-002",
8549
+ name: "Shell Injection in MCP Args",
8550
+ category: "mcp",
8551
+ severity: "error",
8552
+ confidence: "medium",
8553
+ patterns: [/\$\(/, /`[^`]+`/],
8554
+ fileGlob: "**/.mcp.json",
8555
+ message: "Shell metacharacters detected in MCP server arguments",
8556
+ remediation: "Use literal argument values; avoid shell interpolation in MCP args",
8557
+ references: ["CWE-78"]
8558
+ },
8559
+ {
8560
+ id: "SEC-MCP-003",
8561
+ name: "Network Exposure",
8562
+ category: "mcp",
8563
+ severity: "warning",
8564
+ confidence: "high",
8565
+ patterns: [/0\.0\.0\.0/, /["']\*["']\s*:\s*\d/, /host["']?\s*:\s*["']\*["']/i],
8566
+ fileGlob: "**/.mcp.json",
8567
+ message: "MCP server binding to all network interfaces (0.0.0.0 or wildcard *)",
8568
+ remediation: "Bind to 127.0.0.1 or localhost to restrict access to local machine",
8569
+ references: ["CWE-668"]
8570
+ },
8571
+ {
8572
+ id: "SEC-MCP-004",
8573
+ name: "Typosquatting Vector",
8574
+ category: "mcp",
8575
+ severity: "warning",
8576
+ confidence: "medium",
8577
+ patterns: [/\bnpx\s+(?:-y|--yes)\b/],
8578
+ fileGlob: "**/.mcp.json",
8579
+ message: "npx -y auto-installs packages without confirmation, enabling typosquatting",
8580
+ remediation: "Pin exact package versions or install packages explicitly before use",
8581
+ references: ["CWE-427"]
8582
+ },
8583
+ {
8584
+ id: "SEC-MCP-005",
8585
+ name: "Unencrypted Transport",
8586
+ category: "mcp",
8587
+ severity: "warning",
8588
+ confidence: "medium",
8589
+ patterns: [/http:\/\/(?!localhost\b|127\.0\.0\.1\b)/],
8590
+ fileGlob: "**/.mcp.json",
8591
+ message: "Unencrypted HTTP transport detected for MCP server connection",
8592
+ remediation: "Use https:// for all non-localhost MCP server connections",
8593
+ references: ["CWE-319"]
8594
+ }
8595
+ ];
8596
+ var nodeRules = [
8597
+ {
8598
+ id: "SEC-NODE-001",
8599
+ name: "Prototype Pollution",
8600
+ category: "injection",
8601
+ severity: "warning",
8602
+ confidence: "medium",
8603
+ patterns: [
7750
8604
  /__proto__/,
7751
8605
  /\bconstructor\s*\[/,
7752
8606
  /\bprototype\s*\[/,
@@ -7857,7 +8711,9 @@ var SecurityScanner = class {
7857
8711
  ...cryptoRules,
7858
8712
  ...pathTraversalRules,
7859
8713
  ...networkRules,
7860
- ...deserializationRules
8714
+ ...deserializationRules,
8715
+ ...agentConfigRules,
8716
+ ...mcpRules
7861
8717
  ]);
7862
8718
  this.registry.registerAll([...nodeRules, ...expressRules, ...reactRules, ...goRules]);
7863
8719
  this.activeRules = this.registry.getAll();
@@ -7866,6 +8722,12 @@ var SecurityScanner = class {
7866
8722
  const stacks = detectStack(projectRoot);
7867
8723
  this.activeRules = this.registry.getForStacks(stacks.length > 0 ? stacks : []);
7868
8724
  }
8725
+ /**
8726
+ * Scan raw content against all active rules. Note: this method does NOT apply
8727
+ * fileGlob filtering — every active rule is evaluated regardless of filePath.
8728
+ * If you are scanning a specific file and want fileGlob-based rule filtering,
8729
+ * use {@link scanFile} instead.
8730
+ */
7869
8731
  scanContent(content, filePath, startLine = 1) {
7870
8732
  if (!this.config.enabled) return [];
7871
8733
  const findings = [];
@@ -7907,8 +8769,52 @@ var SecurityScanner = class {
7907
8769
  }
7908
8770
  async scanFile(filePath) {
7909
8771
  if (!this.config.enabled) return [];
7910
- const content = await fs15.readFile(filePath, "utf-8");
7911
- return this.scanContent(content, filePath, 1);
8772
+ const content = await fs18.readFile(filePath, "utf-8");
8773
+ return this.scanContentForFile(content, filePath, 1);
8774
+ }
8775
+ scanContentForFile(content, filePath, startLine = 1) {
8776
+ if (!this.config.enabled) return [];
8777
+ const findings = [];
8778
+ const lines = content.split("\n");
8779
+ const applicableRules = this.activeRules.filter((rule) => {
8780
+ if (!rule.fileGlob) return true;
8781
+ const globs = rule.fileGlob.split(",").map((g) => g.trim());
8782
+ return globs.some((glob2) => minimatch4(filePath, glob2, { dot: true }));
8783
+ });
8784
+ for (const rule of applicableRules) {
8785
+ const resolved = resolveRuleSeverity(
8786
+ rule.id,
8787
+ rule.severity,
8788
+ this.config.rules ?? {},
8789
+ this.config.strict
8790
+ );
8791
+ if (resolved === "off") continue;
8792
+ for (let i = 0; i < lines.length; i++) {
8793
+ const line = lines[i] ?? "";
8794
+ if (line.includes("harness-ignore") && line.includes(rule.id)) continue;
8795
+ for (const pattern of rule.patterns) {
8796
+ pattern.lastIndex = 0;
8797
+ if (pattern.test(line)) {
8798
+ findings.push({
8799
+ ruleId: rule.id,
8800
+ ruleName: rule.name,
8801
+ category: rule.category,
8802
+ severity: resolved,
8803
+ confidence: rule.confidence,
8804
+ file: filePath,
8805
+ line: startLine + i,
8806
+ match: line.trim(),
8807
+ context: line,
8808
+ message: rule.message,
8809
+ remediation: rule.remediation,
8810
+ ...rule.references ? { references: rule.references } : {}
8811
+ });
8812
+ break;
8813
+ }
8814
+ }
8815
+ }
8816
+ }
8817
+ return findings;
7912
8818
  }
7913
8819
  async scanFiles(filePaths) {
7914
8820
  const allFindings = [];
@@ -7942,7 +8848,7 @@ var ALL_CHECKS = [
7942
8848
  ];
7943
8849
  async function runValidateCheck(projectRoot, config) {
7944
8850
  const issues = [];
7945
- const agentsPath = path12.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
8851
+ const agentsPath = path15.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
7946
8852
  const result = await validateAgentsMap(agentsPath);
7947
8853
  if (!result.ok) {
7948
8854
  issues.push({ severity: "error", message: result.error.message });
@@ -7999,7 +8905,7 @@ async function runDepsCheck(projectRoot, config) {
7999
8905
  }
8000
8906
  async function runDocsCheck(projectRoot, config) {
8001
8907
  const issues = [];
8002
- const docsDir = path12.join(projectRoot, config.docsDir ?? "docs");
8908
+ const docsDir = path15.join(projectRoot, config.docsDir ?? "docs");
8003
8909
  const entropyConfig = config.entropy || {};
8004
8910
  const result = await checkDocCoverage("project", {
8005
8911
  docsDir,
@@ -8024,10 +8930,14 @@ async function runDocsCheck(projectRoot, config) {
8024
8930
  }
8025
8931
  return issues;
8026
8932
  }
8027
- async function runEntropyCheck(projectRoot, _config) {
8933
+ async function runEntropyCheck(projectRoot, config) {
8028
8934
  const issues = [];
8935
+ const entropyConfig = config.entropy || {};
8936
+ const perfConfig = config.performance || {};
8937
+ const entryPoints = entropyConfig.entryPoints ?? perfConfig.entryPoints;
8029
8938
  const analyzer = new EntropyAnalyzer({
8030
8939
  rootDir: projectRoot,
8940
+ ...entryPoints ? { entryPoints } : {},
8031
8941
  analyze: { drift: true, deadCode: true, patterns: false }
8032
8942
  });
8033
8943
  const result = await analyzer.analyze();
@@ -8089,8 +8999,10 @@ async function runSecurityCheck(projectRoot, config) {
8089
8999
  async function runPerfCheck(projectRoot, config) {
8090
9000
  const issues = [];
8091
9001
  const perfConfig = config.performance || {};
9002
+ const entryPoints = perfConfig.entryPoints;
8092
9003
  const perfAnalyzer = new EntropyAnalyzer({
8093
9004
  rootDir: projectRoot,
9005
+ ...entryPoints ? { entryPoints } : {},
8094
9006
  analyze: {
8095
9007
  complexity: perfConfig.complexity || true,
8096
9008
  coupling: perfConfig.coupling || true,
@@ -8280,7 +9192,7 @@ async function runMechanicalChecks(options) {
8280
9192
  };
8281
9193
  if (!skip.includes("validate")) {
8282
9194
  try {
8283
- const agentsPath = path13.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
9195
+ const agentsPath = path16.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
8284
9196
  const result = await validateAgentsMap(agentsPath);
8285
9197
  if (!result.ok) {
8286
9198
  statuses.validate = "fail";
@@ -8317,7 +9229,7 @@ async function runMechanicalChecks(options) {
8317
9229
  statuses.validate = "fail";
8318
9230
  findings.push({
8319
9231
  tool: "validate",
8320
- file: path13.join(projectRoot, "AGENTS.md"),
9232
+ file: path16.join(projectRoot, "AGENTS.md"),
8321
9233
  message: err instanceof Error ? err.message : String(err),
8322
9234
  severity: "error"
8323
9235
  });
@@ -8381,7 +9293,7 @@ async function runMechanicalChecks(options) {
8381
9293
  (async () => {
8382
9294
  const localFindings = [];
8383
9295
  try {
8384
- const docsDir = path13.join(projectRoot, config.docsDir ?? "docs");
9296
+ const docsDir = path16.join(projectRoot, config.docsDir ?? "docs");
8385
9297
  const result = await checkDocCoverage("project", { docsDir });
8386
9298
  if (!result.ok) {
8387
9299
  statuses["check-docs"] = "warn";
@@ -8408,7 +9320,7 @@ async function runMechanicalChecks(options) {
8408
9320
  statuses["check-docs"] = "warn";
8409
9321
  localFindings.push({
8410
9322
  tool: "check-docs",
8411
- file: path13.join(projectRoot, "docs"),
9323
+ file: path16.join(projectRoot, "docs"),
8412
9324
  message: err instanceof Error ? err.message : String(err),
8413
9325
  severity: "warning"
8414
9326
  });
@@ -8557,18 +9469,18 @@ function computeContextBudget(diffLines) {
8557
9469
  return diffLines;
8558
9470
  }
8559
9471
  function isWithinProject(absPath, projectRoot) {
8560
- const resolvedRoot = path14.resolve(projectRoot) + path14.sep;
8561
- const resolvedPath = path14.resolve(absPath);
8562
- return resolvedPath.startsWith(resolvedRoot) || resolvedPath === path14.resolve(projectRoot);
9472
+ const resolvedRoot = path17.resolve(projectRoot) + path17.sep;
9473
+ const resolvedPath = path17.resolve(absPath);
9474
+ return resolvedPath.startsWith(resolvedRoot) || resolvedPath === path17.resolve(projectRoot);
8563
9475
  }
8564
9476
  async function readContextFile(projectRoot, filePath, reason) {
8565
- const absPath = path14.isAbsolute(filePath) ? filePath : path14.join(projectRoot, filePath);
9477
+ const absPath = path17.isAbsolute(filePath) ? filePath : path17.join(projectRoot, filePath);
8566
9478
  if (!isWithinProject(absPath, projectRoot)) return null;
8567
9479
  const result = await readFileContent(absPath);
8568
9480
  if (!result.ok) return null;
8569
9481
  const content = result.value;
8570
9482
  const lines = content.split("\n").length;
8571
- const relPath = path14.isAbsolute(filePath) ? relativePosix(projectRoot, filePath) : filePath;
9483
+ const relPath = path17.isAbsolute(filePath) ? relativePosix(projectRoot, filePath) : filePath;
8572
9484
  return { path: relPath, content, reason, lines };
8573
9485
  }
8574
9486
  function extractImportSources2(content) {
@@ -8583,18 +9495,18 @@ function extractImportSources2(content) {
8583
9495
  }
8584
9496
  async function resolveImportPath2(projectRoot, fromFile, importSource) {
8585
9497
  if (!importSource.startsWith(".")) return null;
8586
- const fromDir = path14.dirname(path14.join(projectRoot, fromFile));
8587
- const basePath = path14.resolve(fromDir, importSource);
9498
+ const fromDir = path17.dirname(path17.join(projectRoot, fromFile));
9499
+ const basePath = path17.resolve(fromDir, importSource);
8588
9500
  if (!isWithinProject(basePath, projectRoot)) return null;
8589
9501
  const relBase = relativePosix(projectRoot, basePath);
8590
9502
  const candidates = [
8591
9503
  relBase + ".ts",
8592
9504
  relBase + ".tsx",
8593
9505
  relBase + ".mts",
8594
- path14.join(relBase, "index.ts")
9506
+ path17.join(relBase, "index.ts")
8595
9507
  ];
8596
9508
  for (const candidate of candidates) {
8597
- const absCandidate = path14.join(projectRoot, candidate);
9509
+ const absCandidate = path17.join(projectRoot, candidate);
8598
9510
  if (await fileExists(absCandidate)) {
8599
9511
  return candidate;
8600
9512
  }
@@ -8602,7 +9514,7 @@ async function resolveImportPath2(projectRoot, fromFile, importSource) {
8602
9514
  return null;
8603
9515
  }
8604
9516
  async function findTestFiles(projectRoot, sourceFile) {
8605
- const baseName = path14.basename(sourceFile, path14.extname(sourceFile));
9517
+ const baseName = path17.basename(sourceFile, path17.extname(sourceFile));
8606
9518
  const pattern = `**/${baseName}.{test,spec}.{ts,tsx,mts}`;
8607
9519
  const results = await findFiles(pattern, projectRoot);
8608
9520
  return results.map((f) => relativePosix(projectRoot, f));
@@ -8887,101 +9799,102 @@ function findMissingJsDoc(bundle) {
8887
9799
  }
8888
9800
  return missing;
8889
9801
  }
8890
- function runComplianceAgent(bundle) {
9802
+ function checkMissingJsDoc(bundle, rules) {
9803
+ const jsDocRule = rules.find((r) => r.text.toLowerCase().includes("jsdoc"));
9804
+ if (!jsDocRule) return [];
9805
+ const missingDocs = findMissingJsDoc(bundle);
9806
+ return missingDocs.map((m) => ({
9807
+ id: makeFindingId("compliance", m.file, m.line, `Missing JSDoc ${m.exportName}`),
9808
+ file: m.file,
9809
+ lineRange: [m.line, m.line],
9810
+ domain: "compliance",
9811
+ severity: "important",
9812
+ title: `Missing JSDoc on exported \`${m.exportName}\``,
9813
+ rationale: `Convention requires all exports to have JSDoc comments (from ${jsDocRule.source}).`,
9814
+ suggestion: `Add a JSDoc comment above the export of \`${m.exportName}\`.`,
9815
+ evidence: [`changeType: ${bundle.changeType}`, `Convention rule: "${jsDocRule.text}"`],
9816
+ validatedBy: "heuristic"
9817
+ }));
9818
+ }
9819
+ function checkFeatureSpec(bundle) {
9820
+ const hasSpecContext = bundle.contextFiles.some(
9821
+ (f) => f.reason === "spec" || f.reason === "convention"
9822
+ );
9823
+ if (hasSpecContext || bundle.changedFiles.length === 0) return [];
9824
+ const firstFile = bundle.changedFiles[0];
9825
+ return [
9826
+ {
9827
+ id: makeFindingId("compliance", firstFile.path, 1, "No spec for feature"),
9828
+ file: firstFile.path,
9829
+ lineRange: [1, 1],
9830
+ domain: "compliance",
9831
+ severity: "suggestion",
9832
+ title: "No spec/design doc found for feature change",
9833
+ rationale: "Feature changes should reference a spec or design doc to verify alignment. No spec context was included in the review bundle.",
9834
+ evidence: [`changeType: feature`, `contextFiles count: ${bundle.contextFiles.length}`],
9835
+ validatedBy: "heuristic"
9836
+ }
9837
+ ];
9838
+ }
9839
+ function checkBugfixHistory(bundle) {
9840
+ if (bundle.commitHistory.length > 0 || bundle.changedFiles.length === 0) return [];
9841
+ const firstFile = bundle.changedFiles[0];
9842
+ return [
9843
+ {
9844
+ id: makeFindingId("compliance", firstFile.path, 1, "Bugfix no history"),
9845
+ file: firstFile.path,
9846
+ lineRange: [1, 1],
9847
+ domain: "compliance",
9848
+ severity: "suggestion",
9849
+ title: "Bugfix without commit history context",
9850
+ rationale: "Bugfix changes benefit from commit history to verify the root cause is addressed, not just the symptom. No commit history was provided.",
9851
+ evidence: [`changeType: bugfix`, `commitHistory entries: ${bundle.commitHistory.length}`],
9852
+ validatedBy: "heuristic"
9853
+ }
9854
+ ];
9855
+ }
9856
+ function checkChangeTypeSpecific(bundle) {
9857
+ switch (bundle.changeType) {
9858
+ case "feature":
9859
+ return checkFeatureSpec(bundle);
9860
+ case "bugfix":
9861
+ return checkBugfixHistory(bundle);
9862
+ default:
9863
+ return [];
9864
+ }
9865
+ }
9866
+ function checkResultTypeConvention(bundle, rules) {
9867
+ const resultTypeRule = rules.find((r) => r.text.toLowerCase().includes("result type"));
9868
+ if (!resultTypeRule) return [];
8891
9869
  const findings = [];
8892
- const rules = extractConventionRules(bundle);
8893
- const jsDocRuleExists = rules.some((r) => r.text.toLowerCase().includes("jsdoc"));
8894
- if (jsDocRuleExists) {
8895
- const missingDocs = findMissingJsDoc(bundle);
8896
- for (const m of missingDocs) {
9870
+ for (const cf of bundle.changedFiles) {
9871
+ const hasTryCatch = cf.content.includes("try {") || cf.content.includes("try{");
9872
+ const usesResult = cf.content.includes("Result<") || cf.content.includes("Result >") || cf.content.includes(": Result");
9873
+ if (hasTryCatch && !usesResult) {
8897
9874
  findings.push({
8898
- id: makeFindingId("compliance", m.file, m.line, `Missing JSDoc ${m.exportName}`),
8899
- file: m.file,
8900
- lineRange: [m.line, m.line],
9875
+ id: makeFindingId("compliance", cf.path, 1, "try-catch not Result"),
9876
+ file: cf.path,
9877
+ lineRange: [1, cf.lines],
8901
9878
  domain: "compliance",
8902
- severity: "important",
8903
- title: `Missing JSDoc on exported \`${m.exportName}\``,
8904
- rationale: `Convention requires all exports to have JSDoc comments (from ${rules.find((r) => r.text.toLowerCase().includes("jsdoc"))?.source ?? "conventions"}).`,
8905
- suggestion: `Add a JSDoc comment above the export of \`${m.exportName}\`.`,
8906
- evidence: [
8907
- `changeType: ${bundle.changeType}`,
8908
- `Convention rule: "${rules.find((r) => r.text.toLowerCase().includes("jsdoc"))?.text ?? ""}"`
8909
- ],
9879
+ severity: "suggestion",
9880
+ title: "Fallible operation uses try/catch instead of Result type",
9881
+ rationale: `Convention requires using Result type for fallible operations (from ${resultTypeRule.source}).`,
9882
+ suggestion: "Refactor error handling to use the Result type pattern.",
9883
+ evidence: [`changeType: ${bundle.changeType}`, `Convention rule: "${resultTypeRule.text}"`],
8910
9884
  validatedBy: "heuristic"
8911
9885
  });
8912
9886
  }
8913
9887
  }
8914
- switch (bundle.changeType) {
8915
- case "feature": {
8916
- const hasSpecContext = bundle.contextFiles.some(
8917
- (f) => f.reason === "spec" || f.reason === "convention"
8918
- );
8919
- if (!hasSpecContext && bundle.changedFiles.length > 0) {
8920
- const firstFile = bundle.changedFiles[0];
8921
- findings.push({
8922
- id: makeFindingId("compliance", firstFile.path, 1, "No spec for feature"),
8923
- file: firstFile.path,
8924
- lineRange: [1, 1],
8925
- domain: "compliance",
8926
- severity: "suggestion",
8927
- title: "No spec/design doc found for feature change",
8928
- rationale: "Feature changes should reference a spec or design doc to verify alignment. No spec context was included in the review bundle.",
8929
- evidence: [`changeType: feature`, `contextFiles count: ${bundle.contextFiles.length}`],
8930
- validatedBy: "heuristic"
8931
- });
8932
- }
8933
- break;
8934
- }
8935
- case "bugfix": {
8936
- if (bundle.commitHistory.length === 0 && bundle.changedFiles.length > 0) {
8937
- const firstFile = bundle.changedFiles[0];
8938
- findings.push({
8939
- id: makeFindingId("compliance", firstFile.path, 1, "Bugfix no history"),
8940
- file: firstFile.path,
8941
- lineRange: [1, 1],
8942
- domain: "compliance",
8943
- severity: "suggestion",
8944
- title: "Bugfix without commit history context",
8945
- rationale: "Bugfix changes benefit from commit history to verify the root cause is addressed, not just the symptom. No commit history was provided.",
8946
- evidence: [`changeType: bugfix`, `commitHistory entries: ${bundle.commitHistory.length}`],
8947
- validatedBy: "heuristic"
8948
- });
8949
- }
8950
- break;
8951
- }
8952
- case "refactor": {
8953
- break;
8954
- }
8955
- case "docs": {
8956
- break;
8957
- }
8958
- }
8959
- const resultTypeRule = rules.find((r) => r.text.toLowerCase().includes("result type"));
8960
- if (resultTypeRule) {
8961
- for (const cf of bundle.changedFiles) {
8962
- const hasTryCatch = cf.content.includes("try {") || cf.content.includes("try{");
8963
- const usesResult = cf.content.includes("Result<") || cf.content.includes("Result >") || cf.content.includes(": Result");
8964
- if (hasTryCatch && !usesResult) {
8965
- findings.push({
8966
- id: makeFindingId("compliance", cf.path, 1, "try-catch not Result"),
8967
- file: cf.path,
8968
- lineRange: [1, cf.lines],
8969
- domain: "compliance",
8970
- severity: "suggestion",
8971
- title: "Fallible operation uses try/catch instead of Result type",
8972
- rationale: `Convention requires using Result type for fallible operations (from ${resultTypeRule.source}).`,
8973
- suggestion: "Refactor error handling to use the Result type pattern.",
8974
- evidence: [
8975
- `changeType: ${bundle.changeType}`,
8976
- `Convention rule: "${resultTypeRule.text}"`
8977
- ],
8978
- validatedBy: "heuristic"
8979
- });
8980
- }
8981
- }
8982
- }
8983
9888
  return findings;
8984
9889
  }
9890
+ function runComplianceAgent(bundle) {
9891
+ const rules = extractConventionRules(bundle);
9892
+ return [
9893
+ ...checkMissingJsDoc(bundle, rules),
9894
+ ...checkChangeTypeSpecific(bundle),
9895
+ ...checkResultTypeConvention(bundle, rules)
9896
+ ];
9897
+ }
8985
9898
  var BUG_DETECTION_DESCRIPTOR = {
8986
9899
  domain: "bug",
8987
9900
  tier: "strong",
@@ -9252,31 +10165,32 @@ var ARCHITECTURE_DESCRIPTOR = {
9252
10165
  ]
9253
10166
  };
9254
10167
  var LARGE_FILE_THRESHOLD = 300;
10168
+ function isViolationLine(line) {
10169
+ const lower = line.toLowerCase();
10170
+ return lower.includes("violation") || lower.includes("layer");
10171
+ }
10172
+ function createLayerViolationFinding(line, fallbackPath) {
10173
+ const fileMatch = line.match(/(?:in\s+)?(\S+\.(?:ts|tsx|js|jsx))(?::(\d+))?/);
10174
+ const file = fileMatch?.[1] ?? fallbackPath;
10175
+ const lineNum = fileMatch?.[2] ? parseInt(fileMatch[2], 10) : 1;
10176
+ return {
10177
+ id: makeFindingId("arch", file, lineNum, "layer violation"),
10178
+ file,
10179
+ lineRange: [lineNum, lineNum],
10180
+ domain: "architecture",
10181
+ severity: "critical",
10182
+ title: "Layer boundary violation detected by check-deps",
10183
+ rationale: `Architectural layer violation: ${line.trim()}. Imports must flow in the correct direction per the project's layer definitions.`,
10184
+ suggestion: "Route the dependency through the correct intermediate layer (e.g., routes -> services -> db, not routes -> db).",
10185
+ evidence: [line.trim()],
10186
+ validatedBy: "heuristic"
10187
+ };
10188
+ }
9255
10189
  function detectLayerViolations(bundle) {
9256
- const findings = [];
9257
10190
  const checkDepsFile = bundle.contextFiles.find((f) => f.path === "harness-check-deps-output");
9258
- if (!checkDepsFile) return findings;
9259
- const lines = checkDepsFile.content.split("\n");
9260
- for (const line of lines) {
9261
- if (line.toLowerCase().includes("violation") || line.toLowerCase().includes("layer")) {
9262
- const fileMatch = line.match(/(?:in\s+)?(\S+\.(?:ts|tsx|js|jsx))(?::(\d+))?/);
9263
- const file = fileMatch?.[1] ?? bundle.changedFiles[0]?.path ?? "unknown";
9264
- const lineNum = fileMatch?.[2] ? parseInt(fileMatch[2], 10) : 1;
9265
- findings.push({
9266
- id: makeFindingId("arch", file, lineNum, "layer violation"),
9267
- file,
9268
- lineRange: [lineNum, lineNum],
9269
- domain: "architecture",
9270
- severity: "critical",
9271
- title: "Layer boundary violation detected by check-deps",
9272
- rationale: `Architectural layer violation: ${line.trim()}. Imports must flow in the correct direction per the project's layer definitions.`,
9273
- suggestion: "Route the dependency through the correct intermediate layer (e.g., routes -> services -> db, not routes -> db).",
9274
- evidence: [line.trim()],
9275
- validatedBy: "heuristic"
9276
- });
9277
- }
9278
- }
9279
- return findings;
10191
+ if (!checkDepsFile) return [];
10192
+ const fallbackPath = bundle.changedFiles[0]?.path ?? "unknown";
10193
+ return checkDepsFile.content.split("\n").filter(isViolationLine).map((line) => createLayerViolationFinding(line, fallbackPath));
9280
10194
  }
9281
10195
  function detectLargeFiles(bundle) {
9282
10196
  const findings = [];
@@ -9298,45 +10212,61 @@ function detectLargeFiles(bundle) {
9298
10212
  }
9299
10213
  return findings;
9300
10214
  }
10215
+ function extractRelativeImports(content) {
10216
+ const importRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
10217
+ let match;
10218
+ const imports = /* @__PURE__ */ new Set();
10219
+ while ((match = importRegex.exec(content)) !== null) {
10220
+ const source = match[1];
10221
+ if (source.startsWith(".")) {
10222
+ imports.add(source.replace(/^\.\//, "").replace(/^\.\.\//, ""));
10223
+ }
10224
+ }
10225
+ return imports;
10226
+ }
10227
+ function fileBaseName(filePath) {
10228
+ return filePath.replace(/.*\//, "").replace(/\.(ts|tsx|js|jsx)$/, "");
10229
+ }
10230
+ function findCircularImportInCtxFile(ctxFile, changedFilePath, changedPaths, fileImports) {
10231
+ const ctxImportRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
10232
+ let ctxMatch;
10233
+ while ((ctxMatch = ctxImportRegex.exec(ctxFile.content)) !== null) {
10234
+ const ctxSource = ctxMatch[1];
10235
+ if (!ctxSource.startsWith(".")) continue;
10236
+ for (const changedPath of changedPaths) {
10237
+ const baseName = fileBaseName(changedPath);
10238
+ const ctxBaseName = fileBaseName(ctxFile.path);
10239
+ if (ctxSource.includes(baseName) && fileImports.has(ctxBaseName)) {
10240
+ return {
10241
+ id: makeFindingId("arch", changedFilePath, 1, `circular ${ctxFile.path}`),
10242
+ file: changedFilePath,
10243
+ lineRange: [1, 1],
10244
+ domain: "architecture",
10245
+ severity: "important",
10246
+ title: `Potential circular import between ${changedFilePath} and ${ctxFile.path}`,
10247
+ rationale: "Circular imports can cause runtime issues (undefined values at import time) and indicate tightly coupled modules that should be refactored.",
10248
+ suggestion: "Extract shared types/interfaces into a separate module that both files can import from.",
10249
+ evidence: [
10250
+ `${changedFilePath} imports from a module that also imports from ${changedFilePath}`
10251
+ ],
10252
+ validatedBy: "heuristic"
10253
+ };
10254
+ }
10255
+ }
10256
+ }
10257
+ return null;
10258
+ }
9301
10259
  function detectCircularImports(bundle) {
9302
10260
  const findings = [];
9303
10261
  const changedPaths = new Set(bundle.changedFiles.map((f) => f.path));
10262
+ const relevantCtxFiles = bundle.contextFiles.filter(
10263
+ (f) => f.reason === "import" || f.reason === "graph-dependency"
10264
+ );
9304
10265
  for (const cf of bundle.changedFiles) {
9305
- const importRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
9306
- let match;
9307
- const imports = /* @__PURE__ */ new Set();
9308
- while ((match = importRegex.exec(cf.content)) !== null) {
9309
- const source = match[1];
9310
- if (source.startsWith(".")) {
9311
- imports.add(source.replace(/^\.\//, "").replace(/^\.\.\//, ""));
9312
- }
9313
- }
9314
- for (const ctxFile of bundle.contextFiles) {
9315
- if (ctxFile.reason !== "import" && ctxFile.reason !== "graph-dependency") continue;
9316
- const ctxImportRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
9317
- let ctxMatch;
9318
- while ((ctxMatch = ctxImportRegex.exec(ctxFile.content)) !== null) {
9319
- const ctxSource = ctxMatch[1];
9320
- if (ctxSource.startsWith(".")) {
9321
- for (const changedPath of changedPaths) {
9322
- const baseName = changedPath.replace(/.*\//, "").replace(/\.(ts|tsx|js|jsx)$/, "");
9323
- if (ctxSource.includes(baseName) && imports.has(ctxFile.path.replace(/.*\//, "").replace(/\.(ts|tsx|js|jsx)$/, ""))) {
9324
- findings.push({
9325
- id: makeFindingId("arch", cf.path, 1, `circular ${ctxFile.path}`),
9326
- file: cf.path,
9327
- lineRange: [1, 1],
9328
- domain: "architecture",
9329
- severity: "important",
9330
- title: `Potential circular import between ${cf.path} and ${ctxFile.path}`,
9331
- rationale: "Circular imports can cause runtime issues (undefined values at import time) and indicate tightly coupled modules that should be refactored.",
9332
- suggestion: "Extract shared types/interfaces into a separate module that both files can import from.",
9333
- evidence: [`${cf.path} imports from a module that also imports from ${cf.path}`],
9334
- validatedBy: "heuristic"
9335
- });
9336
- }
9337
- }
9338
- }
9339
- }
10266
+ const imports = extractRelativeImports(cf.content);
10267
+ for (const ctxFile of relevantCtxFiles) {
10268
+ const finding = findCircularImportInCtxFile(ctxFile, cf.path, changedPaths, imports);
10269
+ if (finding) findings.push(finding);
9340
10270
  }
9341
10271
  }
9342
10272
  return findings;
@@ -9397,7 +10327,7 @@ function normalizePath(filePath, projectRoot) {
9397
10327
  let normalized = filePath;
9398
10328
  normalized = normalized.replace(/\\/g, "/");
9399
10329
  const normalizedRoot = projectRoot.replace(/\\/g, "/");
9400
- if (path15.isAbsolute(normalized)) {
10330
+ if (path18.isAbsolute(normalized)) {
9401
10331
  const root = normalizedRoot.endsWith("/") ? normalizedRoot : normalizedRoot + "/";
9402
10332
  if (normalized.startsWith(root)) {
9403
10333
  normalized = normalized.slice(root.length);
@@ -9422,12 +10352,12 @@ function followImportChain(fromFile, fileContents, maxDepth = 2) {
9422
10352
  while ((match = importRegex.exec(content)) !== null) {
9423
10353
  const importPath = match[1];
9424
10354
  if (!importPath.startsWith(".")) continue;
9425
- const dir = path15.dirname(current.file);
9426
- let resolved = path15.join(dir, importPath).replace(/\\/g, "/");
10355
+ const dir = path18.dirname(current.file);
10356
+ let resolved = path18.join(dir, importPath).replace(/\\/g, "/");
9427
10357
  if (!resolved.match(/\.(ts|tsx|js|jsx)$/)) {
9428
10358
  resolved += ".ts";
9429
10359
  }
9430
- resolved = path15.normalize(resolved).replace(/\\/g, "/");
10360
+ resolved = path18.normalize(resolved).replace(/\\/g, "/");
9431
10361
  if (!visited.has(resolved) && current.depth + 1 <= maxDepth) {
9432
10362
  queue.push({ file: resolved, depth: current.depth + 1 });
9433
10363
  }
@@ -9444,7 +10374,7 @@ async function validateFindings(options) {
9444
10374
  if (exclusionSet.isExcluded(normalizedFile, finding.lineRange) || exclusionSet.isExcluded(finding.file, finding.lineRange)) {
9445
10375
  continue;
9446
10376
  }
9447
- const absoluteFile = path15.isAbsolute(finding.file) ? finding.file : path15.join(projectRoot, finding.file).replace(/\\/g, "/");
10377
+ const absoluteFile = path18.isAbsolute(finding.file) ? finding.file : path18.join(projectRoot, finding.file).replace(/\\/g, "/");
9448
10378
  if (exclusionSet.isExcluded(absoluteFile, finding.lineRange)) {
9449
10379
  continue;
9450
10380
  }
@@ -9499,6 +10429,28 @@ async function validateFindings(options) {
9499
10429
  function rangesOverlap(a, b, gap) {
9500
10430
  return a[0] <= b[1] + gap && b[0] <= a[1] + gap;
9501
10431
  }
10432
+ function pickLongest(a, b) {
10433
+ if (a && b) return a.length >= b.length ? a : b;
10434
+ return a ?? b;
10435
+ }
10436
+ function buildMergedTitle(a, b, domains) {
10437
+ const primaryFinding = SEVERITY_RANK[a.severity] >= SEVERITY_RANK[b.severity] ? a : b;
10438
+ const domainList = [...domains].sort().join(", ");
10439
+ const cleanTitle = primaryFinding.title.replace(/^\[.*?\]\s*/, "");
10440
+ return { title: `[${domainList}] ${cleanTitle}`, primaryFinding };
10441
+ }
10442
+ function mergeSecurityFields(merged, primary, a, b) {
10443
+ const cweId = primary.cweId ?? a.cweId ?? b.cweId;
10444
+ const owaspCategory = primary.owaspCategory ?? a.owaspCategory ?? b.owaspCategory;
10445
+ const confidence = primary.confidence ?? a.confidence ?? b.confidence;
10446
+ const remediation = pickLongest(a.remediation, b.remediation);
10447
+ const mergedRefs = [.../* @__PURE__ */ new Set([...a.references ?? [], ...b.references ?? []])];
10448
+ if (cweId !== void 0) merged.cweId = cweId;
10449
+ if (owaspCategory !== void 0) merged.owaspCategory = owaspCategory;
10450
+ if (confidence !== void 0) merged.confidence = confidence;
10451
+ if (remediation !== void 0) merged.remediation = remediation;
10452
+ if (mergedRefs.length > 0) merged.references = mergedRefs;
10453
+ }
9502
10454
  function mergeFindings(a, b) {
9503
10455
  const highestSeverity = SEVERITY_RANK[a.severity] >= SEVERITY_RANK[b.severity] ? a.severity : b.severity;
9504
10456
  const highestValidatedBy = (VALIDATED_BY_RANK[a.validatedBy] ?? 0) >= (VALIDATED_BY_RANK[b.validatedBy] ?? 0) ? a.validatedBy : b.validatedBy;
@@ -9508,18 +10460,12 @@ function mergeFindings(a, b) {
9508
10460
  Math.min(a.lineRange[0], b.lineRange[0]),
9509
10461
  Math.max(a.lineRange[1], b.lineRange[1])
9510
10462
  ];
9511
- const domains = /* @__PURE__ */ new Set();
9512
- domains.add(a.domain);
9513
- domains.add(b.domain);
9514
- const suggestion = a.suggestion && b.suggestion ? a.suggestion.length >= b.suggestion.length ? a.suggestion : b.suggestion : a.suggestion ?? b.suggestion;
9515
- const primaryFinding = SEVERITY_RANK[a.severity] >= SEVERITY_RANK[b.severity] ? a : b;
9516
- const domainList = [...domains].sort().join(", ");
9517
- const cleanTitle = primaryFinding.title.replace(/^\[.*?\]\s*/, "");
9518
- const title = `[${domainList}] ${cleanTitle}`;
10463
+ const domains = /* @__PURE__ */ new Set([a.domain, b.domain]);
10464
+ const suggestion = pickLongest(a.suggestion, b.suggestion);
10465
+ const { title, primaryFinding } = buildMergedTitle(a, b, domains);
9519
10466
  const merged = {
9520
10467
  id: primaryFinding.id,
9521
10468
  file: a.file,
9522
- // same file for all merged findings
9523
10469
  lineRange,
9524
10470
  domain: primaryFinding.domain,
9525
10471
  severity: highestSeverity,
@@ -9531,16 +10477,7 @@ function mergeFindings(a, b) {
9531
10477
  if (suggestion !== void 0) {
9532
10478
  merged.suggestion = suggestion;
9533
10479
  }
9534
- const cweId = primaryFinding.cweId ?? a.cweId ?? b.cweId;
9535
- const owaspCategory = primaryFinding.owaspCategory ?? a.owaspCategory ?? b.owaspCategory;
9536
- const confidence = primaryFinding.confidence ?? a.confidence ?? b.confidence;
9537
- const remediation = a.remediation && b.remediation ? a.remediation.length >= b.remediation.length ? a.remediation : b.remediation : a.remediation ?? b.remediation;
9538
- const mergedRefs = [.../* @__PURE__ */ new Set([...a.references ?? [], ...b.references ?? []])];
9539
- if (cweId !== void 0) merged.cweId = cweId;
9540
- if (owaspCategory !== void 0) merged.owaspCategory = owaspCategory;
9541
- if (confidence !== void 0) merged.confidence = confidence;
9542
- if (remediation !== void 0) merged.remediation = remediation;
9543
- if (mergedRefs.length > 0) merged.references = mergedRefs;
10480
+ mergeSecurityFields(merged, primaryFinding, a, b);
9544
10481
  return merged;
9545
10482
  }
9546
10483
  function deduplicateFindings(options) {
@@ -9704,6 +10641,17 @@ function formatTerminalOutput(options) {
9704
10641
  if (suggestionCount > 0) parts.push(`${suggestionCount} suggestion(s)`);
9705
10642
  sections.push(` Found ${issueCount} issue(s): ${parts.join(", ")}.`);
9706
10643
  }
10644
+ if (options.evidenceCoverage) {
10645
+ const ec = options.evidenceCoverage;
10646
+ sections.push("");
10647
+ sections.push("## Evidence Coverage\n");
10648
+ sections.push(` Evidence entries: ${ec.totalEntries}`);
10649
+ sections.push(
10650
+ ` Findings with evidence: ${ec.findingsWithEvidence}/${ec.findingsWithEvidence + ec.uncitedCount}`
10651
+ );
10652
+ sections.push(` Uncited findings: ${ec.uncitedCount} (flagged as [UNVERIFIED])`);
10653
+ sections.push(` Coverage: ${ec.coveragePercentage}%`);
10654
+ }
9707
10655
  return sections.join("\n");
9708
10656
  }
9709
10657
  var SMALL_SUGGESTION_LINE_LIMIT = 10;
@@ -9778,8 +10726,105 @@ function formatGitHubSummary(options) {
9778
10726
  const assessment = determineAssessment(findings);
9779
10727
  const assessmentLabel = assessment === "approve" ? "Approve" : assessment === "comment" ? "Comment" : "Request Changes";
9780
10728
  sections.push(`## Assessment: ${assessmentLabel}`);
10729
+ if (options.evidenceCoverage) {
10730
+ const ec = options.evidenceCoverage;
10731
+ sections.push("");
10732
+ sections.push("## Evidence Coverage\n");
10733
+ sections.push(`- Evidence entries: ${ec.totalEntries}`);
10734
+ sections.push(
10735
+ `- Findings with evidence: ${ec.findingsWithEvidence}/${ec.findingsWithEvidence + ec.uncitedCount}`
10736
+ );
10737
+ sections.push(`- Uncited findings: ${ec.uncitedCount} (flagged as \\[UNVERIFIED\\])`);
10738
+ sections.push(`- Coverage: ${ec.coveragePercentage}%`);
10739
+ }
9781
10740
  return sections.join("\n");
9782
10741
  }
10742
+ var FILE_LINE_RANGE_PATTERN = /^([\w./@-]+\.\w+):(\d+)-(\d+)/;
10743
+ var FILE_LINE_PATTERN = /^([\w./@-]+\.\w+):(\d+)/;
10744
+ var FILE_ONLY_PATTERN = /^([\w./@-]+\.\w+)\s/;
10745
+ function parseEvidenceRef(content) {
10746
+ const trimmed = content.trim();
10747
+ const rangeMatch = trimmed.match(FILE_LINE_RANGE_PATTERN);
10748
+ if (rangeMatch) {
10749
+ return {
10750
+ file: rangeMatch[1],
10751
+ lineStart: parseInt(rangeMatch[2], 10),
10752
+ lineEnd: parseInt(rangeMatch[3], 10)
10753
+ };
10754
+ }
10755
+ const lineMatch = trimmed.match(FILE_LINE_PATTERN);
10756
+ if (lineMatch) {
10757
+ return {
10758
+ file: lineMatch[1],
10759
+ lineStart: parseInt(lineMatch[2], 10)
10760
+ };
10761
+ }
10762
+ const fileMatch = trimmed.match(FILE_ONLY_PATTERN);
10763
+ if (fileMatch) {
10764
+ return { file: fileMatch[1] };
10765
+ }
10766
+ return null;
10767
+ }
10768
+ function evidenceMatchesFinding(ref, finding) {
10769
+ if (ref.file !== finding.file) return false;
10770
+ if (ref.lineStart === void 0) return true;
10771
+ const [findStart, findEnd] = finding.lineRange;
10772
+ if (ref.lineEnd !== void 0) {
10773
+ return ref.lineStart <= findEnd && ref.lineEnd >= findStart;
10774
+ }
10775
+ return ref.lineStart >= findStart && ref.lineStart <= findEnd;
10776
+ }
10777
+ function checkEvidenceCoverage(findings, evidenceEntries) {
10778
+ if (findings.length === 0) {
10779
+ return {
10780
+ totalEntries: evidenceEntries.filter((e) => e.status === "active").length,
10781
+ findingsWithEvidence: 0,
10782
+ uncitedCount: 0,
10783
+ uncitedFindings: [],
10784
+ coveragePercentage: 100
10785
+ };
10786
+ }
10787
+ const activeEvidence = evidenceEntries.filter((e) => e.status === "active");
10788
+ const evidenceRefs = [];
10789
+ for (const entry of activeEvidence) {
10790
+ const ref = parseEvidenceRef(entry.content);
10791
+ if (ref) evidenceRefs.push(ref);
10792
+ }
10793
+ let findingsWithEvidence = 0;
10794
+ const uncitedFindings = [];
10795
+ for (const finding of findings) {
10796
+ const hasEvidence = evidenceRefs.some((ref) => evidenceMatchesFinding(ref, finding));
10797
+ if (hasEvidence) {
10798
+ findingsWithEvidence++;
10799
+ } else {
10800
+ uncitedFindings.push(finding.title);
10801
+ }
10802
+ }
10803
+ const uncitedCount = findings.length - findingsWithEvidence;
10804
+ const coveragePercentage = Math.round(findingsWithEvidence / findings.length * 100);
10805
+ return {
10806
+ totalEntries: activeEvidence.length,
10807
+ findingsWithEvidence,
10808
+ uncitedCount,
10809
+ uncitedFindings,
10810
+ coveragePercentage
10811
+ };
10812
+ }
10813
+ function tagUncitedFindings(findings, evidenceEntries) {
10814
+ const activeEvidence = evidenceEntries.filter((e) => e.status === "active");
10815
+ const evidenceRefs = [];
10816
+ for (const entry of activeEvidence) {
10817
+ const ref = parseEvidenceRef(entry.content);
10818
+ if (ref) evidenceRefs.push(ref);
10819
+ }
10820
+ for (const finding of findings) {
10821
+ const hasEvidence = evidenceRefs.some((ref) => evidenceMatchesFinding(ref, finding));
10822
+ if (!hasEvidence && !finding.title.startsWith("[UNVERIFIED]")) {
10823
+ finding.title = `[UNVERIFIED] ${finding.title}`;
10824
+ }
10825
+ }
10826
+ return findings;
10827
+ }
9783
10828
  async function runReviewPipeline(options) {
9784
10829
  const {
9785
10830
  projectRoot,
@@ -9791,7 +10836,8 @@ async function runReviewPipeline(options) {
9791
10836
  conventionFiles,
9792
10837
  checkDepsOutput,
9793
10838
  config = {},
9794
- commitHistory
10839
+ commitHistory,
10840
+ sessionSlug
9795
10841
  } = options;
9796
10842
  if (flags.ci && prMetadata) {
9797
10843
  const eligibility = checkEligibility(prMetadata, true);
@@ -9887,13 +10933,25 @@ async function runReviewPipeline(options) {
9887
10933
  projectRoot,
9888
10934
  fileContents
9889
10935
  });
10936
+ let evidenceCoverage;
10937
+ if (sessionSlug) {
10938
+ try {
10939
+ const evidenceResult = await readSessionSection(projectRoot, sessionSlug, "evidence");
10940
+ if (evidenceResult.ok) {
10941
+ evidenceCoverage = checkEvidenceCoverage(validatedFindings, evidenceResult.value);
10942
+ tagUncitedFindings(validatedFindings, evidenceResult.value);
10943
+ }
10944
+ } catch {
10945
+ }
10946
+ }
9890
10947
  const dedupedFindings = deduplicateFindings({ findings: validatedFindings });
9891
10948
  const strengths = [];
9892
10949
  const assessment = determineAssessment(dedupedFindings);
9893
10950
  const exitCode = getExitCode(assessment);
9894
10951
  const terminalOutput = formatTerminalOutput({
9895
10952
  findings: dedupedFindings,
9896
- strengths
10953
+ strengths,
10954
+ ...evidenceCoverage != null ? { evidenceCoverage } : {}
9897
10955
  });
9898
10956
  let githubComments = [];
9899
10957
  if (flags.comment) {
@@ -9908,7 +10966,8 @@ async function runReviewPipeline(options) {
9908
10966
  terminalOutput,
9909
10967
  githubComments,
9910
10968
  exitCode,
9911
- ...mechanicalResult !== void 0 ? { mechanicalResult } : {}
10969
+ ...mechanicalResult != null ? { mechanicalResult } : {},
10970
+ ...evidenceCoverage != null ? { evidenceCoverage } : {}
9912
10971
  };
9913
10972
  }
9914
10973
  var VALID_STATUSES = /* @__PURE__ */ new Set([
@@ -9924,7 +10983,7 @@ function parseRoadmap(markdown) {
9924
10983
  if (!fmMatch) {
9925
10984
  return Err(new Error("Missing or malformed YAML frontmatter"));
9926
10985
  }
9927
- const fmResult = parseFrontmatter(fmMatch[1]);
10986
+ const fmResult = parseFrontmatter2(fmMatch[1]);
9928
10987
  if (!fmResult.ok) return fmResult;
9929
10988
  const body = markdown.slice(fmMatch[0].length);
9930
10989
  const milestonesResult = parseMilestones(body);
@@ -9934,7 +10993,7 @@ function parseRoadmap(markdown) {
9934
10993
  milestones: milestonesResult.value
9935
10994
  });
9936
10995
  }
9937
- function parseFrontmatter(raw) {
10996
+ function parseFrontmatter2(raw) {
9938
10997
  const lines = raw.split("\n");
9939
10998
  const map = /* @__PURE__ */ new Map();
9940
10999
  for (const line of lines) {
@@ -10008,13 +11067,29 @@ function parseFeatures(sectionBody) {
10008
11067
  }
10009
11068
  return Ok(features);
10010
11069
  }
10011
- function parseFeatureFields(name, body) {
11070
+ function extractFieldMap(body) {
10012
11071
  const fieldMap = /* @__PURE__ */ new Map();
10013
11072
  const fieldPattern = /^- \*\*(.+?):\*\* (.+)$/gm;
10014
11073
  let match;
10015
11074
  while ((match = fieldPattern.exec(body)) !== null) {
10016
11075
  fieldMap.set(match[1], match[2]);
10017
11076
  }
11077
+ return fieldMap;
11078
+ }
11079
+ function parseListField(fieldMap, ...keys) {
11080
+ let raw = EM_DASH;
11081
+ for (const key of keys) {
11082
+ const val = fieldMap.get(key);
11083
+ if (val !== void 0) {
11084
+ raw = val;
11085
+ break;
11086
+ }
11087
+ }
11088
+ if (raw === EM_DASH || raw === "none") return [];
11089
+ return raw.split(",").map((s) => s.trim());
11090
+ }
11091
+ function parseFeatureFields(name, body) {
11092
+ const fieldMap = extractFieldMap(body);
10018
11093
  const statusRaw = fieldMap.get("Status");
10019
11094
  if (!statusRaw || !VALID_STATUSES.has(statusRaw)) {
10020
11095
  return Err(
@@ -10023,15 +11098,17 @@ function parseFeatureFields(name, body) {
10023
11098
  )
10024
11099
  );
10025
11100
  }
10026
- const status = statusRaw;
10027
11101
  const specRaw = fieldMap.get("Spec") ?? EM_DASH;
10028
- const spec = specRaw === EM_DASH ? null : specRaw;
10029
- const plansRaw = fieldMap.get("Plans") ?? fieldMap.get("Plan") ?? EM_DASH;
10030
- const plans = plansRaw === EM_DASH || plansRaw === "none" ? [] : plansRaw.split(",").map((p) => p.trim());
10031
- const blockedByRaw = fieldMap.get("Blocked by") ?? fieldMap.get("Blockers") ?? EM_DASH;
10032
- const blockedBy = blockedByRaw === EM_DASH || blockedByRaw === "none" ? [] : blockedByRaw.split(",").map((b) => b.trim());
10033
- const summary = fieldMap.get("Summary") ?? "";
10034
- return Ok({ name, status, spec, plans, blockedBy, summary });
11102
+ const plans = parseListField(fieldMap, "Plans", "Plan");
11103
+ const blockedBy = parseListField(fieldMap, "Blocked by", "Blockers");
11104
+ return Ok({
11105
+ name,
11106
+ status: statusRaw,
11107
+ spec: specRaw === EM_DASH ? null : specRaw,
11108
+ plans,
11109
+ blockedBy,
11110
+ summary: fieldMap.get("Summary") ?? ""
11111
+ });
10035
11112
  }
10036
11113
  var EM_DASH2 = "\u2014";
10037
11114
  function serializeRoadmap(roadmap) {
@@ -10091,10 +11168,10 @@ function inferStatus(feature, projectPath, allFeatures) {
10091
11168
  const featuresWithPlans = allFeatures.filter((f) => f.plans.length > 0);
10092
11169
  const useRootState = featuresWithPlans.length <= 1;
10093
11170
  if (useRootState) {
10094
- const rootStatePath = path16.join(projectPath, ".harness", "state.json");
10095
- if (fs16.existsSync(rootStatePath)) {
11171
+ const rootStatePath = path19.join(projectPath, ".harness", "state.json");
11172
+ if (fs19.existsSync(rootStatePath)) {
10096
11173
  try {
10097
- const raw = fs16.readFileSync(rootStatePath, "utf-8");
11174
+ const raw = fs19.readFileSync(rootStatePath, "utf-8");
10098
11175
  const state = JSON.parse(raw);
10099
11176
  if (state.progress) {
10100
11177
  for (const status of Object.values(state.progress)) {
@@ -10105,16 +11182,16 @@ function inferStatus(feature, projectPath, allFeatures) {
10105
11182
  }
10106
11183
  }
10107
11184
  }
10108
- const sessionsDir = path16.join(projectPath, ".harness", "sessions");
10109
- if (fs16.existsSync(sessionsDir)) {
11185
+ const sessionsDir = path19.join(projectPath, ".harness", "sessions");
11186
+ if (fs19.existsSync(sessionsDir)) {
10110
11187
  try {
10111
- const sessionDirs = fs16.readdirSync(sessionsDir, { withFileTypes: true });
11188
+ const sessionDirs = fs19.readdirSync(sessionsDir, { withFileTypes: true });
10112
11189
  for (const entry of sessionDirs) {
10113
11190
  if (!entry.isDirectory()) continue;
10114
- const autopilotPath = path16.join(sessionsDir, entry.name, "autopilot-state.json");
10115
- if (!fs16.existsSync(autopilotPath)) continue;
11191
+ const autopilotPath = path19.join(sessionsDir, entry.name, "autopilot-state.json");
11192
+ if (!fs19.existsSync(autopilotPath)) continue;
10116
11193
  try {
10117
- const raw = fs16.readFileSync(autopilotPath, "utf-8");
11194
+ const raw = fs19.readFileSync(autopilotPath, "utf-8");
10118
11195
  const autopilot = JSON.parse(raw);
10119
11196
  if (!autopilot.phases) continue;
10120
11197
  const linkedPhases = autopilot.phases.filter(
@@ -10144,17 +11221,26 @@ function inferStatus(feature, projectPath, allFeatures) {
10144
11221
  if (anyStarted) return "in-progress";
10145
11222
  return null;
10146
11223
  }
11224
+ var STATUS_RANK = {
11225
+ backlog: 0,
11226
+ planned: 1,
11227
+ blocked: 1,
11228
+ // lateral to planned — sync can move to/from blocked freely
11229
+ "in-progress": 2,
11230
+ done: 3
11231
+ };
11232
+ function isRegression(from, to) {
11233
+ return STATUS_RANK[to] < STATUS_RANK[from];
11234
+ }
10147
11235
  function syncRoadmap(options) {
10148
11236
  const { projectPath, roadmap, forceSync } = options;
10149
- const isManuallyEdited = new Date(roadmap.frontmatter.lastManualEdit) > new Date(roadmap.frontmatter.lastSynced);
10150
- const skipOverride = isManuallyEdited && !forceSync;
10151
11237
  const allFeatures = roadmap.milestones.flatMap((m) => m.features);
10152
11238
  const changes = [];
10153
11239
  for (const feature of allFeatures) {
10154
- if (skipOverride) continue;
10155
11240
  const inferred = inferStatus(feature, projectPath, allFeatures);
10156
11241
  if (inferred === null) continue;
10157
11242
  if (inferred === feature.status) continue;
11243
+ if (!forceSync && isRegression(feature.status, inferred)) continue;
10158
11244
  changes.push({
10159
11245
  feature: feature.name,
10160
11246
  from: feature.status,
@@ -10163,28 +11249,40 @@ function syncRoadmap(options) {
10163
11249
  }
10164
11250
  return Ok(changes);
10165
11251
  }
10166
- var InteractionTypeSchema = z6.enum(["question", "confirmation", "transition"]);
10167
- var QuestionSchema = z6.object({
10168
- text: z6.string(),
10169
- options: z6.array(z6.string()).optional(),
10170
- default: z6.string().optional()
11252
+ function applySyncChanges(roadmap, changes) {
11253
+ for (const change of changes) {
11254
+ for (const m of roadmap.milestones) {
11255
+ const feature = m.features.find((f) => f.name.toLowerCase() === change.feature.toLowerCase());
11256
+ if (feature) {
11257
+ feature.status = change.to;
11258
+ break;
11259
+ }
11260
+ }
11261
+ }
11262
+ roadmap.frontmatter.lastSynced = (/* @__PURE__ */ new Date()).toISOString();
11263
+ }
11264
+ var InteractionTypeSchema = z7.enum(["question", "confirmation", "transition"]);
11265
+ var QuestionSchema = z7.object({
11266
+ text: z7.string(),
11267
+ options: z7.array(z7.string()).optional(),
11268
+ default: z7.string().optional()
10171
11269
  });
10172
- var ConfirmationSchema = z6.object({
10173
- text: z6.string(),
10174
- context: z6.string()
11270
+ var ConfirmationSchema = z7.object({
11271
+ text: z7.string(),
11272
+ context: z7.string()
10175
11273
  });
10176
- var TransitionSchema = z6.object({
10177
- completedPhase: z6.string(),
10178
- suggestedNext: z6.string(),
10179
- reason: z6.string(),
10180
- artifacts: z6.array(z6.string()),
10181
- requiresConfirmation: z6.boolean(),
10182
- summary: z6.string()
11274
+ var TransitionSchema = z7.object({
11275
+ completedPhase: z7.string(),
11276
+ suggestedNext: z7.string(),
11277
+ reason: z7.string(),
11278
+ artifacts: z7.array(z7.string()),
11279
+ requiresConfirmation: z7.boolean(),
11280
+ summary: z7.string()
10183
11281
  });
10184
- var EmitInteractionInputSchema = z6.object({
10185
- path: z6.string(),
11282
+ var EmitInteractionInputSchema = z7.object({
11283
+ path: z7.string(),
10186
11284
  type: InteractionTypeSchema,
10187
- stream: z6.string().optional(),
11285
+ stream: z7.string().optional(),
10188
11286
  question: QuestionSchema.optional(),
10189
11287
  confirmation: ConfirmationSchema.optional(),
10190
11288
  transition: TransitionSchema.optional()
@@ -10194,10 +11292,10 @@ var ProjectScanner = class {
10194
11292
  this.rootDir = rootDir;
10195
11293
  }
10196
11294
  async scan() {
10197
- let projectName = path17.basename(this.rootDir);
11295
+ let projectName = path20.basename(this.rootDir);
10198
11296
  try {
10199
- const pkgPath = path17.join(this.rootDir, "package.json");
10200
- const pkgRaw = await fs17.readFile(pkgPath, "utf-8");
11297
+ const pkgPath = path20.join(this.rootDir, "package.json");
11298
+ const pkgRaw = await fs20.readFile(pkgPath, "utf-8");
10201
11299
  const pkg = JSON.parse(pkgRaw);
10202
11300
  if (pkg.name) projectName = pkg.name;
10203
11301
  } catch {
@@ -10310,13 +11408,13 @@ var BlueprintGenerator = class {
10310
11408
  styles: STYLES,
10311
11409
  scripts: SCRIPTS
10312
11410
  });
10313
- await fs18.mkdir(options.outputDir, { recursive: true });
10314
- await fs18.writeFile(path18.join(options.outputDir, "index.html"), html);
11411
+ await fs21.mkdir(options.outputDir, { recursive: true });
11412
+ await fs21.writeFile(path21.join(options.outputDir, "index.html"), html);
10315
11413
  }
10316
11414
  };
10317
11415
  function getStatePath() {
10318
11416
  const home = process.env["HOME"] || os.homedir();
10319
- return path19.join(home, ".harness", "update-check.json");
11417
+ return path22.join(home, ".harness", "update-check.json");
10320
11418
  }
10321
11419
  function isUpdateCheckEnabled(configInterval) {
10322
11420
  if (process.env["HARNESS_NO_UPDATE_CHECK"] === "1") return false;
@@ -10329,7 +11427,7 @@ function shouldRunCheck(state, intervalMs) {
10329
11427
  }
10330
11428
  function readCheckState() {
10331
11429
  try {
10332
- const raw = fs19.readFileSync(getStatePath(), "utf-8");
11430
+ const raw = fs22.readFileSync(getStatePath(), "utf-8");
10333
11431
  const parsed = JSON.parse(raw);
10334
11432
  if (typeof parsed === "object" && parsed !== null && "lastCheckTime" in parsed && typeof parsed.lastCheckTime === "number" && "currentVersion" in parsed && typeof parsed.currentVersion === "string") {
10335
11433
  const state = parsed;
@@ -10346,7 +11444,7 @@ function readCheckState() {
10346
11444
  }
10347
11445
  function spawnBackgroundCheck(currentVersion) {
10348
11446
  const statePath = getStatePath();
10349
- const stateDir = path19.dirname(statePath);
11447
+ const stateDir = path22.dirname(statePath);
10350
11448
  const script = `
10351
11449
  const { execSync } = require('child_process');
10352
11450
  const fs = require('fs');
@@ -10398,7 +11496,398 @@ function getUpdateNotification(currentVersion) {
10398
11496
  return `Update available: v${currentVersion} -> v${state.latestVersion}
10399
11497
  Run "harness update" to upgrade.`;
10400
11498
  }
10401
- var VERSION = "0.13.0";
11499
+ var EXTENSION_MAP = {
11500
+ ".ts": "typescript",
11501
+ ".tsx": "typescript",
11502
+ ".mts": "typescript",
11503
+ ".cts": "typescript",
11504
+ ".js": "javascript",
11505
+ ".jsx": "javascript",
11506
+ ".mjs": "javascript",
11507
+ ".cjs": "javascript",
11508
+ ".py": "python"
11509
+ };
11510
+ function detectLanguage(filePath) {
11511
+ const ext = filePath.slice(filePath.lastIndexOf("."));
11512
+ return EXTENSION_MAP[ext] ?? null;
11513
+ }
11514
+ var parserCache = /* @__PURE__ */ new Map();
11515
+ var initialized = false;
11516
+ var GRAMMAR_MAP = {
11517
+ typescript: "tree-sitter-typescript",
11518
+ javascript: "tree-sitter-javascript",
11519
+ python: "tree-sitter-python"
11520
+ };
11521
+ async function ensureInit() {
11522
+ if (!initialized) {
11523
+ await Parser.init();
11524
+ initialized = true;
11525
+ }
11526
+ }
11527
+ async function resolveWasmPath(grammarName) {
11528
+ const { createRequire } = await import("module");
11529
+ const require2 = createRequire(import.meta.url ?? __filename);
11530
+ const pkgPath = require2.resolve("tree-sitter-wasms/package.json");
11531
+ const path23 = await import("path");
11532
+ const pkgDir = path23.dirname(pkgPath);
11533
+ return path23.join(pkgDir, "out", `${grammarName}.wasm`);
11534
+ }
11535
+ async function loadLanguage(lang) {
11536
+ const grammarName = GRAMMAR_MAP[lang];
11537
+ const wasmPath = await resolveWasmPath(grammarName);
11538
+ return Parser.Language.load(wasmPath);
11539
+ }
11540
+ async function getParser(lang) {
11541
+ const cached = parserCache.get(lang);
11542
+ if (cached) return cached;
11543
+ await ensureInit();
11544
+ const parser = new Parser();
11545
+ const language = await loadLanguage(lang);
11546
+ parser.setLanguage(language);
11547
+ parserCache.set(lang, parser);
11548
+ return parser;
11549
+ }
11550
+ async function parseFile(filePath) {
11551
+ const lang = detectLanguage(filePath);
11552
+ if (!lang) {
11553
+ return Err({
11554
+ code: "UNSUPPORTED_LANGUAGE",
11555
+ message: `Unsupported file extension: ${filePath}`
11556
+ });
11557
+ }
11558
+ const contentResult = await readFileContent(filePath);
11559
+ if (!contentResult.ok) {
11560
+ return Err({
11561
+ code: "FILE_NOT_FOUND",
11562
+ message: `Cannot read file: ${filePath}`
11563
+ });
11564
+ }
11565
+ try {
11566
+ const parser = await getParser(lang);
11567
+ const tree = parser.parse(contentResult.value);
11568
+ return Ok({ tree, language: lang, source: contentResult.value, filePath });
11569
+ } catch (e) {
11570
+ return Err({
11571
+ code: "PARSE_FAILED",
11572
+ message: `Tree-sitter parse failed for ${filePath}: ${e.message}`
11573
+ });
11574
+ }
11575
+ }
11576
+ function resetParserCache() {
11577
+ parserCache.clear();
11578
+ initialized = false;
11579
+ }
11580
+ var TOP_LEVEL_TYPES = {
11581
+ typescript: {
11582
+ function_declaration: "function",
11583
+ class_declaration: "class",
11584
+ interface_declaration: "interface",
11585
+ type_alias_declaration: "type",
11586
+ lexical_declaration: "variable",
11587
+ variable_declaration: "variable",
11588
+ export_statement: "export",
11589
+ import_statement: "import",
11590
+ enum_declaration: "type"
11591
+ },
11592
+ javascript: {
11593
+ function_declaration: "function",
11594
+ class_declaration: "class",
11595
+ lexical_declaration: "variable",
11596
+ variable_declaration: "variable",
11597
+ export_statement: "export",
11598
+ import_statement: "import"
11599
+ },
11600
+ python: {
11601
+ function_definition: "function",
11602
+ class_definition: "class",
11603
+ assignment: "variable",
11604
+ import_statement: "import",
11605
+ import_from_statement: "import"
11606
+ }
11607
+ };
11608
+ var METHOD_TYPES = {
11609
+ typescript: ["method_definition", "public_field_definition"],
11610
+ javascript: ["method_definition"],
11611
+ python: ["function_definition"]
11612
+ };
11613
+ var IDENTIFIER_TYPES = /* @__PURE__ */ new Set(["identifier", "property_identifier", "type_identifier"]);
11614
+ function findIdentifier(node) {
11615
+ return node.childForFieldName("name") ?? node.children.find((c) => IDENTIFIER_TYPES.has(c.type)) ?? null;
11616
+ }
11617
+ function getVariableDeclarationName(node) {
11618
+ const declarator = node.children.find((c) => c.type === "variable_declarator");
11619
+ if (!declarator) return null;
11620
+ const id = findIdentifier(declarator);
11621
+ return id?.text ?? null;
11622
+ }
11623
+ function getExportName(node, source) {
11624
+ const decl = node.children.find(
11625
+ (c) => c.type !== "export" && c.type !== "default" && c.type !== "comment"
11626
+ );
11627
+ return decl ? getNodeName(decl, source) : "<anonymous>";
11628
+ }
11629
+ function getAssignmentName(node) {
11630
+ const left = node.childForFieldName("left") ?? node.children[0];
11631
+ return left?.text ?? "<anonymous>";
11632
+ }
11633
+ function getNodeName(node, source) {
11634
+ const id = findIdentifier(node);
11635
+ if (id) return id.text;
11636
+ const isVarDecl = node.type === "lexical_declaration" || node.type === "variable_declaration";
11637
+ if (isVarDecl) return getVariableDeclarationName(node) ?? "<anonymous>";
11638
+ if (node.type === "export_statement") return getExportName(node, source);
11639
+ if (node.type === "assignment") return getAssignmentName(node);
11640
+ return "<anonymous>";
11641
+ }
11642
+ function getSignature(node, source) {
11643
+ const startLine = node.startPosition.row;
11644
+ const lines = source.split("\n");
11645
+ return (lines[startLine] ?? "").trim();
11646
+ }
11647
+ function extractMethods(classNode, language, source, filePath) {
11648
+ const methodTypes = METHOD_TYPES[language] ?? [];
11649
+ const body = classNode.childForFieldName("body") ?? classNode.children.find((c) => c.type === "class_body" || c.type === "block");
11650
+ if (!body) return [];
11651
+ return body.children.filter((child) => methodTypes.includes(child.type)).map((child) => ({
11652
+ name: getNodeName(child, source),
11653
+ kind: "method",
11654
+ file: filePath,
11655
+ line: child.startPosition.row + 1,
11656
+ endLine: child.endPosition.row + 1,
11657
+ signature: getSignature(child, source)
11658
+ }));
11659
+ }
11660
+ function nodeToSymbol(node, kind, source, filePath) {
11661
+ return {
11662
+ name: getNodeName(node, source),
11663
+ kind,
11664
+ file: filePath,
11665
+ line: node.startPosition.row + 1,
11666
+ endLine: node.endPosition.row + 1,
11667
+ signature: getSignature(node, source)
11668
+ };
11669
+ }
11670
+ function processExportStatement(child, topLevelTypes, lang, source, filePath) {
11671
+ const declaration = child.children.find(
11672
+ (c) => c.type !== "export" && c.type !== "default" && c.type !== ";" && c.type !== "comment"
11673
+ );
11674
+ const kind = declaration ? topLevelTypes[declaration.type] : void 0;
11675
+ if (declaration && kind) {
11676
+ const sym = nodeToSymbol(child, kind, source, filePath);
11677
+ sym.name = getNodeName(declaration, source);
11678
+ if (kind === "class") {
11679
+ sym.children = extractMethods(declaration, lang, source, filePath);
11680
+ }
11681
+ return sym;
11682
+ }
11683
+ return nodeToSymbol(child, "export", source, filePath);
11684
+ }
11685
+ function extractSymbols(rootNode, lang, source, filePath) {
11686
+ const symbols = [];
11687
+ const topLevelTypes = TOP_LEVEL_TYPES[lang] ?? {};
11688
+ for (const child of rootNode.children) {
11689
+ if (child.type === "export_statement") {
11690
+ symbols.push(processExportStatement(child, topLevelTypes, lang, source, filePath));
11691
+ continue;
11692
+ }
11693
+ const kind = topLevelTypes[child.type];
11694
+ if (!kind || kind === "import") continue;
11695
+ const sym = nodeToSymbol(child, kind, source, filePath);
11696
+ if (kind === "class") {
11697
+ sym.children = extractMethods(child, lang, source, filePath);
11698
+ }
11699
+ symbols.push(sym);
11700
+ }
11701
+ return symbols;
11702
+ }
11703
+ function buildFailedResult(filePath, lang) {
11704
+ return { file: filePath, language: lang, totalLines: 0, symbols: [], error: "[parse-failed]" };
11705
+ }
11706
+ async function getOutline(filePath) {
11707
+ const lang = detectLanguage(filePath);
11708
+ if (!lang) return buildFailedResult(filePath, "unknown");
11709
+ const result = await parseFile(filePath);
11710
+ if (!result.ok) return buildFailedResult(filePath, lang);
11711
+ const { tree, source } = result.value;
11712
+ const totalLines = source.split("\n").length;
11713
+ const symbols = extractSymbols(tree.rootNode, lang, source, filePath);
11714
+ return { file: filePath, language: lang, totalLines, symbols };
11715
+ }
11716
+ function formatOutline(outline) {
11717
+ if (outline.error) {
11718
+ return `${outline.file} ${outline.error}`;
11719
+ }
11720
+ const lines = [`${outline.file} (${outline.totalLines} lines)`];
11721
+ const last = outline.symbols.length - 1;
11722
+ outline.symbols.forEach((sym, i) => {
11723
+ const prefix = i === last ? "\u2514\u2500\u2500" : "\u251C\u2500\u2500";
11724
+ lines.push(`${prefix} ${sym.signature} :${sym.line}`);
11725
+ if (sym.children) {
11726
+ const childLast = sym.children.length - 1;
11727
+ sym.children.forEach((child, j) => {
11728
+ const childConnector = i === last ? " " : "\u2502 ";
11729
+ const childPrefix = j === childLast ? "\u2514\u2500\u2500" : "\u251C\u2500\u2500";
11730
+ lines.push(`${childConnector}${childPrefix} ${child.signature} :${child.line}`);
11731
+ });
11732
+ }
11733
+ });
11734
+ return lines.join("\n");
11735
+ }
11736
+ function buildGlob(directory, fileGlob) {
11737
+ const dir = directory.replaceAll("\\", "/");
11738
+ if (fileGlob) {
11739
+ return `${dir}/**/${fileGlob}`;
11740
+ }
11741
+ const exts = Object.keys(EXTENSION_MAP).map((e) => e.slice(1));
11742
+ return `${dir}/**/*.{${exts.join(",")}}`;
11743
+ }
11744
+ function matchesQuery(name, query) {
11745
+ return name.toLowerCase().includes(query.toLowerCase());
11746
+ }
11747
+ function flattenSymbols(symbols) {
11748
+ const flat = [];
11749
+ for (const sym of symbols) {
11750
+ flat.push(sym);
11751
+ if (sym.children) {
11752
+ flat.push(...sym.children);
11753
+ }
11754
+ }
11755
+ return flat;
11756
+ }
11757
+ async function searchSymbols(query, directory, fileGlob) {
11758
+ const pattern = buildGlob(directory, fileGlob);
11759
+ let files;
11760
+ try {
11761
+ files = await findFiles(pattern, directory);
11762
+ } catch {
11763
+ files = [];
11764
+ }
11765
+ const matches = [];
11766
+ const skipped = [];
11767
+ for (const file of files) {
11768
+ const lang = detectLanguage(file);
11769
+ if (!lang) {
11770
+ skipped.push(file);
11771
+ continue;
11772
+ }
11773
+ const outline = await getOutline(file);
11774
+ if (outline.error) {
11775
+ skipped.push(file);
11776
+ continue;
11777
+ }
11778
+ const allSymbols = flattenSymbols(outline.symbols);
11779
+ for (const sym of allSymbols) {
11780
+ if (matchesQuery(sym.name, query)) {
11781
+ matches.push({
11782
+ symbol: sym,
11783
+ context: sym.signature
11784
+ });
11785
+ }
11786
+ }
11787
+ }
11788
+ return { query, matches, skipped };
11789
+ }
11790
+ function findSymbolInList(symbols, name) {
11791
+ for (const sym of symbols) {
11792
+ if (sym.name === name) return sym;
11793
+ if (sym.children) {
11794
+ const found = findSymbolInList(sym.children, name);
11795
+ if (found) return found;
11796
+ }
11797
+ }
11798
+ return null;
11799
+ }
11800
+ function extractLines(source, startLine, endLine) {
11801
+ const lines = source.split("\n");
11802
+ const start = Math.max(0, startLine - 1);
11803
+ const end = Math.min(lines.length, endLine);
11804
+ return lines.slice(start, end).join("\n");
11805
+ }
11806
+ function buildFallbackResult(filePath, symbolName, content, language) {
11807
+ const totalLines = content ? content.split("\n").length : 0;
11808
+ return {
11809
+ file: filePath,
11810
+ symbolName,
11811
+ startLine: content ? 1 : 0,
11812
+ endLine: totalLines,
11813
+ content,
11814
+ language,
11815
+ fallback: true,
11816
+ warning: "[fallback: raw content]"
11817
+ };
11818
+ }
11819
+ async function readContentSafe(filePath) {
11820
+ const result = await readFileContent(filePath);
11821
+ return result.ok ? result.value : "";
11822
+ }
11823
+ async function unfoldSymbol(filePath, symbolName) {
11824
+ const lang = detectLanguage(filePath);
11825
+ if (!lang) {
11826
+ const content2 = await readContentSafe(filePath);
11827
+ return buildFallbackResult(filePath, symbolName, content2, "unknown");
11828
+ }
11829
+ const outline = await getOutline(filePath);
11830
+ if (outline.error) {
11831
+ const content2 = await readContentSafe(filePath);
11832
+ return buildFallbackResult(filePath, symbolName, content2, lang);
11833
+ }
11834
+ const symbol = findSymbolInList(outline.symbols, symbolName);
11835
+ if (!symbol) {
11836
+ const content2 = await readContentSafe(filePath);
11837
+ return buildFallbackResult(filePath, symbolName, content2, lang);
11838
+ }
11839
+ const parseResult = await parseFile(filePath);
11840
+ if (!parseResult.ok) {
11841
+ const content2 = await readContentSafe(filePath);
11842
+ return {
11843
+ ...buildFallbackResult(
11844
+ filePath,
11845
+ symbolName,
11846
+ extractLines(content2, symbol.line, symbol.endLine),
11847
+ lang
11848
+ ),
11849
+ startLine: symbol.line,
11850
+ endLine: symbol.endLine
11851
+ };
11852
+ }
11853
+ const content = extractLines(parseResult.value.source, symbol.line, symbol.endLine);
11854
+ return {
11855
+ file: filePath,
11856
+ symbolName,
11857
+ startLine: symbol.line,
11858
+ endLine: symbol.endLine,
11859
+ content,
11860
+ language: lang,
11861
+ fallback: false
11862
+ };
11863
+ }
11864
+ async function unfoldRange(filePath, startLine, endLine) {
11865
+ const lang = detectLanguage(filePath) ?? "unknown";
11866
+ const contentResult = await readFileContent(filePath);
11867
+ if (!contentResult.ok) {
11868
+ return {
11869
+ file: filePath,
11870
+ startLine: 0,
11871
+ endLine: 0,
11872
+ content: "",
11873
+ language: lang,
11874
+ fallback: true,
11875
+ warning: "[fallback: raw content]"
11876
+ };
11877
+ }
11878
+ const totalLines = contentResult.value.split("\n").length;
11879
+ const clampedEnd = Math.min(endLine, totalLines);
11880
+ const content = extractLines(contentResult.value, startLine, clampedEnd);
11881
+ return {
11882
+ file: filePath,
11883
+ startLine,
11884
+ endLine: clampedEnd,
11885
+ content,
11886
+ language: lang,
11887
+ fallback: false
11888
+ };
11889
+ }
11890
+ var VERSION = "0.15.0";
10402
11891
 
10403
11892
  export {
10404
11893
  ArchMetricCategorySchema,
@@ -10516,7 +12005,7 @@ export {
10516
12005
  NoOpSink,
10517
12006
  syncConstraintNodes,
10518
12007
  detectStaleConstraints,
10519
- resolveThresholds,
12008
+ resolveThresholds2 as resolveThresholds,
10520
12009
  FailureEntrySchema,
10521
12010
  HandoffSchema,
10522
12011
  GateResultSchema,
@@ -10540,14 +12029,19 @@ export {
10540
12029
  updateSessionIndex,
10541
12030
  loadState,
10542
12031
  saveState,
12032
+ parseFrontmatter,
12033
+ extractIndexEntry,
10543
12034
  clearLearningsCache,
10544
12035
  appendLearning,
10545
12036
  parseDateFromEntry,
10546
12037
  analyzeLearningPatterns,
10547
12038
  loadBudgetedLearnings,
12039
+ loadIndexEntries,
10548
12040
  loadRelevantLearnings,
10549
12041
  archiveLearnings,
10550
12042
  pruneLearnings,
12043
+ promoteSessionLearnings,
12044
+ countLearningEntries,
10551
12045
  clearFailuresCache,
10552
12046
  appendFailure,
10553
12047
  loadFailures,
@@ -10558,6 +12052,16 @@ export {
10558
12052
  writeSessionSummary,
10559
12053
  loadSessionSummary,
10560
12054
  listActiveSessions,
12055
+ readSessionSections,
12056
+ readSessionSection,
12057
+ appendSessionEntry,
12058
+ updateSessionEntryStatus,
12059
+ archiveSession,
12060
+ SkillEventSchema,
12061
+ clearEventHashCache,
12062
+ emitEvent,
12063
+ loadEvents,
12064
+ formatEventTimeline,
10561
12065
  executeWorkflow,
10562
12066
  runPipeline,
10563
12067
  runMultiTurnPipeline,
@@ -10574,6 +12078,8 @@ export {
10574
12078
  pathTraversalRules,
10575
12079
  networkRules,
10576
12080
  deserializationRules,
12081
+ agentConfigRules,
12082
+ mcpRules,
10577
12083
  nodeRules,
10578
12084
  expressRules,
10579
12085
  reactRules,
@@ -10607,10 +12113,13 @@ export {
10607
12113
  isSmallSuggestion,
10608
12114
  formatGitHubComment,
10609
12115
  formatGitHubSummary,
12116
+ checkEvidenceCoverage,
12117
+ tagUncitedFindings,
10610
12118
  runReviewPipeline,
10611
12119
  parseRoadmap,
10612
12120
  serializeRoadmap,
10613
12121
  syncRoadmap,
12122
+ applySyncChanges,
10614
12123
  InteractionTypeSchema,
10615
12124
  QuestionSchema,
10616
12125
  ConfirmationSchema,
@@ -10624,5 +12133,15 @@ export {
10624
12133
  readCheckState,
10625
12134
  spawnBackgroundCheck,
10626
12135
  getUpdateNotification,
12136
+ EXTENSION_MAP,
12137
+ detectLanguage,
12138
+ getParser,
12139
+ parseFile,
12140
+ resetParserCache,
12141
+ getOutline,
12142
+ formatOutline,
12143
+ searchSymbols,
12144
+ unfoldSymbol,
12145
+ unfoldRange,
10627
12146
  VERSION
10628
12147
  };