@harness-engineering/cli 1.13.0 → 1.14.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (367) hide show
  1. package/dist/agents/skills/claude-code/add-harness-component/skill.yaml +1 -0
  2. package/dist/agents/skills/claude-code/align-documentation/skill.yaml +1 -0
  3. package/dist/agents/skills/claude-code/check-mechanical-constraints/skill.yaml +1 -0
  4. package/dist/agents/skills/claude-code/cleanup-dead-code/skill.yaml +1 -0
  5. package/dist/agents/skills/claude-code/detect-doc-drift/skill.yaml +1 -0
  6. package/dist/agents/skills/claude-code/enforce-architecture/skill.yaml +1 -0
  7. package/dist/agents/skills/claude-code/harness-accessibility/skill.yaml +1 -0
  8. package/dist/agents/skills/claude-code/harness-api-design/SKILL.md +304 -0
  9. package/dist/agents/skills/claude-code/harness-api-design/skill.yaml +74 -0
  10. package/dist/agents/skills/claude-code/harness-architecture-advisor/skill.yaml +1 -0
  11. package/dist/agents/skills/claude-code/harness-auth/SKILL.md +279 -0
  12. package/dist/agents/skills/claude-code/harness-auth/skill.yaml +81 -0
  13. package/dist/agents/skills/claude-code/harness-autopilot/skill.yaml +1 -0
  14. package/dist/agents/skills/claude-code/harness-brainstorming/SKILL.md +39 -0
  15. package/dist/agents/skills/claude-code/harness-brainstorming/skill.yaml +1 -0
  16. package/dist/agents/skills/claude-code/harness-caching/SKILL.md +309 -0
  17. package/dist/agents/skills/claude-code/harness-caching/skill.yaml +73 -0
  18. package/dist/agents/skills/claude-code/harness-chaos/SKILL.md +295 -0
  19. package/dist/agents/skills/claude-code/harness-chaos/skill.yaml +72 -0
  20. package/dist/agents/skills/claude-code/harness-code-review/SKILL.md +44 -0
  21. package/dist/agents/skills/claude-code/harness-code-review/skill.yaml +1 -0
  22. package/dist/agents/skills/claude-code/harness-codebase-cleanup/skill.yaml +1 -0
  23. package/dist/agents/skills/claude-code/harness-compliance/SKILL.md +303 -0
  24. package/dist/agents/skills/claude-code/harness-compliance/skill.yaml +78 -0
  25. package/dist/agents/skills/claude-code/harness-containerization/SKILL.md +284 -0
  26. package/dist/agents/skills/claude-code/harness-containerization/skill.yaml +80 -0
  27. package/dist/agents/skills/claude-code/harness-data-pipeline/SKILL.md +274 -0
  28. package/dist/agents/skills/claude-code/harness-data-pipeline/skill.yaml +81 -0
  29. package/dist/agents/skills/claude-code/harness-data-validation/SKILL.md +343 -0
  30. package/dist/agents/skills/claude-code/harness-data-validation/skill.yaml +75 -0
  31. package/dist/agents/skills/claude-code/harness-database/SKILL.md +258 -0
  32. package/dist/agents/skills/claude-code/harness-database/skill.yaml +80 -0
  33. package/dist/agents/skills/claude-code/harness-debugging/skill.yaml +1 -0
  34. package/dist/agents/skills/claude-code/harness-dependency-health/skill.yaml +1 -0
  35. package/dist/agents/skills/claude-code/harness-deployment/SKILL.md +255 -0
  36. package/dist/agents/skills/claude-code/harness-deployment/skill.yaml +77 -0
  37. package/dist/agents/skills/claude-code/harness-design/skill.yaml +1 -0
  38. package/dist/agents/skills/claude-code/harness-design-mobile/skill.yaml +1 -0
  39. package/dist/agents/skills/claude-code/harness-design-system/skill.yaml +1 -0
  40. package/dist/agents/skills/claude-code/harness-design-web/skill.yaml +1 -0
  41. package/dist/agents/skills/claude-code/harness-diagnostics/skill.yaml +1 -0
  42. package/dist/agents/skills/claude-code/harness-docs-pipeline/skill.yaml +1 -0
  43. package/dist/agents/skills/claude-code/harness-dx/SKILL.md +276 -0
  44. package/dist/agents/skills/claude-code/harness-dx/skill.yaml +76 -0
  45. package/dist/agents/skills/claude-code/harness-e2e/SKILL.md +245 -0
  46. package/dist/agents/skills/claude-code/harness-e2e/skill.yaml +78 -0
  47. package/dist/agents/skills/claude-code/harness-event-driven/SKILL.md +280 -0
  48. package/dist/agents/skills/claude-code/harness-event-driven/skill.yaml +77 -0
  49. package/dist/agents/skills/claude-code/harness-execution/SKILL.md +44 -0
  50. package/dist/agents/skills/claude-code/harness-execution/skill.yaml +1 -0
  51. package/dist/agents/skills/claude-code/harness-feature-flags/SKILL.md +287 -0
  52. package/dist/agents/skills/claude-code/harness-feature-flags/skill.yaml +74 -0
  53. package/dist/agents/skills/claude-code/harness-git-workflow/skill.yaml +1 -0
  54. package/dist/agents/skills/claude-code/harness-hotspot-detector/skill.yaml +1 -0
  55. package/dist/agents/skills/claude-code/harness-i18n/skill.yaml +1 -0
  56. package/dist/agents/skills/claude-code/harness-i18n-process/skill.yaml +1 -0
  57. package/dist/agents/skills/claude-code/harness-i18n-workflow/skill.yaml +1 -0
  58. package/dist/agents/skills/claude-code/harness-impact-analysis/skill.yaml +1 -0
  59. package/dist/agents/skills/claude-code/harness-incident-response/SKILL.md +223 -0
  60. package/dist/agents/skills/claude-code/harness-incident-response/skill.yaml +78 -0
  61. package/dist/agents/skills/claude-code/harness-infrastructure-as-code/SKILL.md +279 -0
  62. package/dist/agents/skills/claude-code/harness-infrastructure-as-code/skill.yaml +80 -0
  63. package/dist/agents/skills/claude-code/harness-integration-test/SKILL.md +271 -0
  64. package/dist/agents/skills/claude-code/harness-integration-test/skill.yaml +73 -0
  65. package/dist/agents/skills/claude-code/harness-integrity/skill.yaml +1 -0
  66. package/dist/agents/skills/claude-code/harness-knowledge-mapper/skill.yaml +1 -0
  67. package/dist/agents/skills/claude-code/harness-load-testing/SKILL.md +274 -0
  68. package/dist/agents/skills/claude-code/harness-load-testing/skill.yaml +79 -0
  69. package/dist/agents/skills/claude-code/harness-ml-ops/SKILL.md +341 -0
  70. package/dist/agents/skills/claude-code/harness-ml-ops/skill.yaml +79 -0
  71. package/dist/agents/skills/claude-code/harness-mobile-patterns/SKILL.md +326 -0
  72. package/dist/agents/skills/claude-code/harness-mobile-patterns/skill.yaml +82 -0
  73. package/dist/agents/skills/claude-code/harness-mutation-test/SKILL.md +251 -0
  74. package/dist/agents/skills/claude-code/harness-mutation-test/skill.yaml +70 -0
  75. package/dist/agents/skills/claude-code/harness-observability/SKILL.md +283 -0
  76. package/dist/agents/skills/claude-code/harness-observability/skill.yaml +78 -0
  77. package/dist/agents/skills/claude-code/harness-onboarding/skill.yaml +1 -0
  78. package/dist/agents/skills/claude-code/harness-parallel-agents/skill.yaml +1 -0
  79. package/dist/agents/skills/claude-code/harness-perf/skill.yaml +1 -0
  80. package/dist/agents/skills/claude-code/harness-perf-tdd/skill.yaml +1 -0
  81. package/dist/agents/skills/claude-code/harness-planning/SKILL.md +39 -0
  82. package/dist/agents/skills/claude-code/harness-planning/skill.yaml +1 -0
  83. package/dist/agents/skills/claude-code/harness-pre-commit-review/skill.yaml +1 -0
  84. package/dist/agents/skills/claude-code/harness-product-spec/SKILL.md +285 -0
  85. package/dist/agents/skills/claude-code/harness-product-spec/skill.yaml +72 -0
  86. package/dist/agents/skills/claude-code/harness-property-test/SKILL.md +281 -0
  87. package/dist/agents/skills/claude-code/harness-property-test/skill.yaml +71 -0
  88. package/dist/agents/skills/claude-code/harness-refactoring/skill.yaml +1 -0
  89. package/dist/agents/skills/claude-code/harness-release-readiness/SKILL.md +3 -3
  90. package/dist/agents/skills/claude-code/harness-release-readiness/skill.yaml +1 -0
  91. package/dist/agents/skills/claude-code/harness-resilience/SKILL.md +255 -0
  92. package/dist/agents/skills/claude-code/harness-resilience/skill.yaml +76 -0
  93. package/dist/agents/skills/claude-code/harness-roadmap/skill.yaml +1 -0
  94. package/dist/agents/skills/claude-code/harness-secrets/SKILL.md +293 -0
  95. package/dist/agents/skills/claude-code/harness-secrets/skill.yaml +76 -0
  96. package/dist/agents/skills/claude-code/harness-security-review/skill.yaml +1 -0
  97. package/dist/agents/skills/claude-code/harness-security-scan/skill.yaml +1 -0
  98. package/dist/agents/skills/claude-code/harness-skill-authoring/skill.yaml +1 -0
  99. package/dist/agents/skills/claude-code/harness-soundness-review/skill.yaml +1 -0
  100. package/dist/agents/skills/claude-code/harness-sql-review/SKILL.md +315 -0
  101. package/dist/agents/skills/claude-code/harness-sql-review/skill.yaml +74 -0
  102. package/dist/agents/skills/claude-code/harness-state-management/skill.yaml +1 -0
  103. package/dist/agents/skills/claude-code/harness-tdd/skill.yaml +1 -0
  104. package/dist/agents/skills/claude-code/harness-test-advisor/skill.yaml +1 -0
  105. package/dist/agents/skills/claude-code/harness-test-data/SKILL.md +268 -0
  106. package/dist/agents/skills/claude-code/harness-test-data/skill.yaml +74 -0
  107. package/dist/agents/skills/claude-code/harness-ux-copy/SKILL.md +271 -0
  108. package/dist/agents/skills/claude-code/harness-ux-copy/skill.yaml +77 -0
  109. package/dist/agents/skills/claude-code/harness-verification/SKILL.md +35 -0
  110. package/dist/agents/skills/claude-code/harness-verification/skill.yaml +1 -0
  111. package/dist/agents/skills/claude-code/harness-verify/skill.yaml +1 -0
  112. package/dist/agents/skills/claude-code/harness-visual-regression/SKILL.md +257 -0
  113. package/dist/agents/skills/claude-code/harness-visual-regression/skill.yaml +74 -0
  114. package/dist/agents/skills/claude-code/initialize-harness-project/SKILL.md +11 -3
  115. package/dist/agents/skills/claude-code/initialize-harness-project/skill.yaml +1 -0
  116. package/dist/agents/skills/claude-code/validate-context-engineering/skill.yaml +1 -0
  117. package/dist/agents/skills/gemini-cli/add-harness-component/skill.yaml +1 -0
  118. package/dist/agents/skills/gemini-cli/align-documentation/skill.yaml +1 -0
  119. package/dist/agents/skills/gemini-cli/check-mechanical-constraints/skill.yaml +1 -0
  120. package/dist/agents/skills/gemini-cli/cleanup-dead-code/skill.yaml +1 -0
  121. package/dist/agents/skills/gemini-cli/detect-doc-drift/skill.yaml +1 -0
  122. package/dist/agents/skills/gemini-cli/enforce-architecture/skill.yaml +1 -0
  123. package/dist/agents/skills/gemini-cli/harness-accessibility/skill.yaml +1 -0
  124. package/dist/agents/skills/gemini-cli/harness-api-design/SKILL.md +304 -0
  125. package/dist/agents/skills/gemini-cli/harness-api-design/skill.yaml +74 -0
  126. package/dist/agents/skills/gemini-cli/harness-architecture-advisor/skill.yaml +1 -0
  127. package/dist/agents/skills/gemini-cli/harness-auth/SKILL.md +279 -0
  128. package/dist/agents/skills/gemini-cli/harness-auth/skill.yaml +81 -0
  129. package/dist/agents/skills/gemini-cli/harness-autopilot/skill.yaml +1 -0
  130. package/dist/agents/skills/gemini-cli/harness-brainstorming/SKILL.md +39 -0
  131. package/dist/agents/skills/gemini-cli/harness-brainstorming/skill.yaml +1 -0
  132. package/dist/agents/skills/gemini-cli/harness-caching/SKILL.md +309 -0
  133. package/dist/agents/skills/gemini-cli/harness-caching/skill.yaml +73 -0
  134. package/dist/agents/skills/gemini-cli/harness-chaos/SKILL.md +295 -0
  135. package/dist/agents/skills/gemini-cli/harness-chaos/skill.yaml +72 -0
  136. package/dist/agents/skills/gemini-cli/harness-code-review/SKILL.md +44 -0
  137. package/dist/agents/skills/gemini-cli/harness-code-review/skill.yaml +1 -0
  138. package/dist/agents/skills/gemini-cli/harness-codebase-cleanup/skill.yaml +1 -0
  139. package/dist/agents/skills/gemini-cli/harness-compliance/SKILL.md +303 -0
  140. package/dist/agents/skills/gemini-cli/harness-compliance/skill.yaml +78 -0
  141. package/dist/agents/skills/gemini-cli/harness-containerization/SKILL.md +284 -0
  142. package/dist/agents/skills/gemini-cli/harness-containerization/skill.yaml +80 -0
  143. package/dist/agents/skills/gemini-cli/harness-data-pipeline/SKILL.md +274 -0
  144. package/dist/agents/skills/gemini-cli/harness-data-pipeline/skill.yaml +81 -0
  145. package/dist/agents/skills/gemini-cli/harness-data-validation/SKILL.md +343 -0
  146. package/dist/agents/skills/gemini-cli/harness-data-validation/skill.yaml +75 -0
  147. package/dist/agents/skills/gemini-cli/harness-database/SKILL.md +258 -0
  148. package/dist/agents/skills/gemini-cli/harness-database/skill.yaml +80 -0
  149. package/dist/agents/skills/gemini-cli/harness-debugging/skill.yaml +1 -0
  150. package/dist/agents/skills/gemini-cli/harness-dependency-health/skill.yaml +1 -0
  151. package/dist/agents/skills/gemini-cli/harness-deployment/SKILL.md +255 -0
  152. package/dist/agents/skills/gemini-cli/harness-deployment/skill.yaml +77 -0
  153. package/dist/agents/skills/gemini-cli/harness-design/skill.yaml +1 -0
  154. package/dist/agents/skills/gemini-cli/harness-design-mobile/skill.yaml +1 -0
  155. package/dist/agents/skills/gemini-cli/harness-design-system/skill.yaml +1 -0
  156. package/dist/agents/skills/gemini-cli/harness-design-web/skill.yaml +1 -0
  157. package/dist/agents/skills/gemini-cli/harness-diagnostics/skill.yaml +1 -0
  158. package/dist/agents/skills/gemini-cli/harness-docs-pipeline/skill.yaml +1 -0
  159. package/dist/agents/skills/gemini-cli/harness-dx/SKILL.md +276 -0
  160. package/dist/agents/skills/gemini-cli/harness-dx/skill.yaml +76 -0
  161. package/dist/agents/skills/gemini-cli/harness-e2e/SKILL.md +245 -0
  162. package/dist/agents/skills/gemini-cli/harness-e2e/skill.yaml +78 -0
  163. package/dist/agents/skills/gemini-cli/harness-event-driven/SKILL.md +280 -0
  164. package/dist/agents/skills/gemini-cli/harness-event-driven/skill.yaml +77 -0
  165. package/dist/agents/skills/gemini-cli/harness-execution/SKILL.md +44 -0
  166. package/dist/agents/skills/gemini-cli/harness-execution/skill.yaml +1 -0
  167. package/dist/agents/skills/gemini-cli/harness-feature-flags/SKILL.md +287 -0
  168. package/dist/agents/skills/gemini-cli/harness-feature-flags/skill.yaml +74 -0
  169. package/dist/agents/skills/gemini-cli/harness-git-workflow/skill.yaml +1 -0
  170. package/dist/agents/skills/gemini-cli/harness-hotspot-detector/skill.yaml +1 -0
  171. package/dist/agents/skills/gemini-cli/harness-i18n/skill.yaml +1 -0
  172. package/dist/agents/skills/gemini-cli/harness-i18n-process/skill.yaml +1 -0
  173. package/dist/agents/skills/gemini-cli/harness-i18n-workflow/skill.yaml +1 -0
  174. package/dist/agents/skills/gemini-cli/harness-impact-analysis/skill.yaml +1 -0
  175. package/dist/agents/skills/gemini-cli/harness-incident-response/SKILL.md +223 -0
  176. package/dist/agents/skills/gemini-cli/harness-incident-response/skill.yaml +78 -0
  177. package/dist/agents/skills/gemini-cli/harness-infrastructure-as-code/SKILL.md +279 -0
  178. package/dist/agents/skills/gemini-cli/harness-infrastructure-as-code/skill.yaml +80 -0
  179. package/dist/agents/skills/gemini-cli/harness-integration-test/SKILL.md +271 -0
  180. package/dist/agents/skills/gemini-cli/harness-integration-test/skill.yaml +73 -0
  181. package/dist/agents/skills/gemini-cli/harness-integrity/skill.yaml +1 -0
  182. package/dist/agents/skills/gemini-cli/harness-knowledge-mapper/skill.yaml +1 -0
  183. package/dist/agents/skills/gemini-cli/harness-load-testing/SKILL.md +274 -0
  184. package/dist/agents/skills/gemini-cli/harness-load-testing/skill.yaml +79 -0
  185. package/dist/agents/skills/gemini-cli/harness-ml-ops/SKILL.md +341 -0
  186. package/dist/agents/skills/gemini-cli/harness-ml-ops/skill.yaml +79 -0
  187. package/dist/agents/skills/gemini-cli/harness-mobile-patterns/SKILL.md +326 -0
  188. package/dist/agents/skills/gemini-cli/harness-mobile-patterns/skill.yaml +82 -0
  189. package/dist/agents/skills/gemini-cli/harness-mutation-test/SKILL.md +251 -0
  190. package/dist/agents/skills/gemini-cli/harness-mutation-test/skill.yaml +70 -0
  191. package/dist/agents/skills/gemini-cli/harness-observability/SKILL.md +283 -0
  192. package/dist/agents/skills/gemini-cli/harness-observability/skill.yaml +78 -0
  193. package/dist/agents/skills/gemini-cli/harness-onboarding/skill.yaml +1 -0
  194. package/dist/agents/skills/gemini-cli/harness-parallel-agents/skill.yaml +1 -0
  195. package/dist/agents/skills/gemini-cli/harness-perf/skill.yaml +1 -0
  196. package/dist/agents/skills/gemini-cli/harness-perf-tdd/skill.yaml +1 -0
  197. package/dist/agents/skills/gemini-cli/harness-planning/SKILL.md +39 -0
  198. package/dist/agents/skills/gemini-cli/harness-planning/skill.yaml +1 -0
  199. package/dist/agents/skills/gemini-cli/harness-pre-commit-review/skill.yaml +1 -0
  200. package/dist/agents/skills/gemini-cli/harness-product-spec/SKILL.md +285 -0
  201. package/dist/agents/skills/gemini-cli/harness-product-spec/skill.yaml +72 -0
  202. package/dist/agents/skills/gemini-cli/harness-property-test/SKILL.md +281 -0
  203. package/dist/agents/skills/gemini-cli/harness-property-test/skill.yaml +71 -0
  204. package/dist/agents/skills/gemini-cli/harness-refactoring/skill.yaml +1 -0
  205. package/dist/agents/skills/gemini-cli/harness-release-readiness/SKILL.md +3 -3
  206. package/dist/agents/skills/gemini-cli/harness-release-readiness/skill.yaml +1 -0
  207. package/dist/agents/skills/gemini-cli/harness-resilience/SKILL.md +255 -0
  208. package/dist/agents/skills/gemini-cli/harness-resilience/skill.yaml +76 -0
  209. package/dist/agents/skills/gemini-cli/harness-roadmap/skill.yaml +1 -0
  210. package/dist/agents/skills/gemini-cli/harness-secrets/SKILL.md +293 -0
  211. package/dist/agents/skills/gemini-cli/harness-secrets/skill.yaml +76 -0
  212. package/dist/agents/skills/gemini-cli/harness-security-review/SKILL.md +240 -0
  213. package/dist/agents/skills/gemini-cli/harness-security-review/skill.yaml +1 -0
  214. package/dist/agents/skills/gemini-cli/harness-security-scan/skill.yaml +1 -0
  215. package/dist/agents/skills/gemini-cli/harness-skill-authoring/skill.yaml +1 -0
  216. package/dist/agents/skills/gemini-cli/harness-soundness-review/skill.yaml +1 -0
  217. package/dist/agents/skills/gemini-cli/harness-sql-review/SKILL.md +315 -0
  218. package/dist/agents/skills/gemini-cli/harness-sql-review/skill.yaml +74 -0
  219. package/dist/agents/skills/gemini-cli/harness-state-management/skill.yaml +1 -0
  220. package/dist/agents/skills/gemini-cli/harness-tdd/skill.yaml +1 -0
  221. package/dist/agents/skills/gemini-cli/harness-test-advisor/skill.yaml +1 -0
  222. package/dist/agents/skills/gemini-cli/harness-test-data/SKILL.md +268 -0
  223. package/dist/agents/skills/gemini-cli/harness-test-data/skill.yaml +74 -0
  224. package/dist/agents/skills/gemini-cli/harness-ux-copy/SKILL.md +271 -0
  225. package/dist/agents/skills/gemini-cli/harness-ux-copy/skill.yaml +77 -0
  226. package/dist/agents/skills/gemini-cli/harness-verification/SKILL.md +35 -0
  227. package/dist/agents/skills/gemini-cli/harness-verification/skill.yaml +1 -0
  228. package/dist/agents/skills/gemini-cli/harness-verify/skill.yaml +1 -0
  229. package/dist/agents/skills/gemini-cli/harness-visual-regression/SKILL.md +257 -0
  230. package/dist/agents/skills/gemini-cli/harness-visual-regression/skill.yaml +74 -0
  231. package/dist/agents/skills/gemini-cli/initialize-harness-project/SKILL.md +11 -3
  232. package/dist/agents/skills/gemini-cli/initialize-harness-project/skill.yaml +1 -0
  233. package/dist/agents/skills/gemini-cli/validate-context-engineering/skill.yaml +1 -0
  234. package/dist/agents-md-YTYQDA3P.js +8 -0
  235. package/dist/{architecture-ESOOE26S.js → architecture-JQZYM4US.js} +4 -4
  236. package/dist/bin/harness-mcp.js +16 -15
  237. package/dist/bin/harness.js +31 -30
  238. package/dist/{check-phase-gate-S2MZKLFQ.js → check-phase-gate-L3RADYWO.js} +4 -3
  239. package/dist/{chunk-WPPDRIJL.js → chunk-3C2MLBPJ.js} +4 -4
  240. package/dist/chunk-6KTUUFRN.js +217 -0
  241. package/dist/{chunk-MI5XJQDY.js → chunk-7IP4JIFL.js} +24 -10
  242. package/dist/{chunk-C2ERUR3L.js → chunk-7MJAPE3Z.js} +165 -49
  243. package/dist/{chunk-KELT6K6M.js → chunk-ABQHQ6I5.js} +1861 -1418
  244. package/dist/{chunk-L2KLU56K.js → chunk-AOZRDOIP.js} +2 -2
  245. package/dist/{chunk-QPEH2QPG.js → chunk-DBSOCI3G.js} +53 -54
  246. package/dist/{chunk-MHBMTPW7.js → chunk-ERS5EVUZ.js} +9 -0
  247. package/dist/{chunk-JSTQ3AWB.js → chunk-FIAPHX37.js} +1 -1
  248. package/dist/{chunk-2YPZKGAG.js → chunk-FTMXDOR6.js} +1 -1
  249. package/dist/{chunk-72GHBOL2.js → chunk-GZKSBLQL.js} +1 -1
  250. package/dist/{chunk-K6XAPGML.js → chunk-H7Y5CKTM.js} +1 -1
  251. package/dist/{chunk-HD4IBGLA.js → chunk-N5G5QMS3.js} +24 -1
  252. package/dist/{chunk-LD3DKUK5.js → chunk-NLVUVUGD.js} +1 -1
  253. package/dist/{chunk-3KOLLWWE.js → chunk-O5OJVPL6.js} +26 -211
  254. package/dist/{chunk-NKDM3FMH.js → chunk-OD3S2NHN.js} +1 -1
  255. package/dist/{chunk-5VY23YK3.js → chunk-OSXBPAMK.js} +2 -2
  256. package/dist/{chunk-MACVXDZK.js → chunk-OXLLOSSR.js} +45 -47
  257. package/dist/{chunk-GNGELAXY.js → chunk-RCWZBSK5.js} +2 -2
  258. package/dist/{chunk-PSNN4LWX.js → chunk-S2FXOWOR.js} +3 -3
  259. package/dist/{chunk-VUCPTQ6G.js → chunk-SD3SQOZ2.js} +1 -1
  260. package/dist/{chunk-7PZWR4LI.js → chunk-TPOTOBR7.js} +9 -9
  261. package/dist/{chunk-RZSUJBZZ.js → chunk-XKECDXJS.js} +452 -353
  262. package/dist/{chunk-VRFZWGMS.js → chunk-XYLGHKG6.js} +5 -1
  263. package/dist/{chunk-6N4R6FVX.js → chunk-YBJ262QL.js} +1 -1
  264. package/dist/{chunk-2VU4MFM3.js → chunk-YPYGXRDR.js} +7 -7
  265. package/dist/{chunk-Q6AB7W5Z.js → chunk-YQ6KC6TE.js} +1 -1
  266. package/dist/{chunk-7KQSUZVG.js → chunk-YZD2MRNQ.js} +1528 -1010
  267. package/dist/ci-workflow-EQZFVX3P.js +8 -0
  268. package/dist/{create-skill-WPXHSLX2.js → create-skill-XSWHMSM5.js} +2 -2
  269. package/dist/{dist-M6BQODWC.js → dist-B26DFXMP.js} +573 -480
  270. package/dist/{dist-L7LAAQAS.js → dist-DZ63LLUD.js} +1 -1
  271. package/dist/{dist-WF4C7A4A.js → dist-HWXF2C3R.js} +18 -2
  272. package/dist/{dist-D4RYGUZE.js → dist-USY2C5JL.js} +3 -1
  273. package/dist/{docs-BPYCN2DR.js → docs-7ECGYMAV.js} +5 -3
  274. package/dist/engine-EG4EH4IX.js +8 -0
  275. package/dist/{entropy-4VDVV5CR.js → entropy-5USWKLVS.js} +3 -3
  276. package/dist/{feedback-63QB5RCA.js → feedback-UTBXZZHF.js} +1 -1
  277. package/dist/{generate-agent-definitions-QABOJG56.js → generate-agent-definitions-3PM5EU7V.js} +5 -5
  278. package/dist/{glob-helper-5OHBUQAI.js → glob-helper-R5FXNUPS.js} +1 -1
  279. package/dist/{graph-loader-KO4GJ5N2.js → graph-loader-2M2HXDQI.js} +1 -1
  280. package/dist/index.d.ts +183 -17
  281. package/dist/index.js +32 -30
  282. package/dist/loader-ZPALXIVR.js +10 -0
  283. package/dist/mcp-362EZHF4.js +35 -0
  284. package/dist/{performance-26BH47O4.js → performance-OQAFMJUD.js} +3 -3
  285. package/dist/{review-pipeline-GHR3WFBI.js → review-pipeline-C4GCFVGP.js} +1 -1
  286. package/dist/runtime-7YLVK453.js +9 -0
  287. package/dist/{security-UQFUZXEN.js → security-PZOX7AQS.js} +1 -1
  288. package/dist/skill-executor-XZLYZYAK.js +8 -0
  289. package/dist/templates/axum/Cargo.toml.hbs +8 -0
  290. package/dist/templates/axum/src/main.rs +12 -0
  291. package/dist/templates/axum/template.json +16 -0
  292. package/dist/templates/django/manage.py.hbs +19 -0
  293. package/dist/templates/django/requirements.txt.hbs +1 -0
  294. package/dist/templates/django/src/settings.py.hbs +44 -0
  295. package/dist/templates/django/src/urls.py +6 -0
  296. package/dist/templates/django/src/wsgi.py.hbs +9 -0
  297. package/dist/templates/django/template.json +21 -0
  298. package/dist/templates/express/package.json.hbs +15 -0
  299. package/dist/templates/express/src/app.ts +12 -0
  300. package/dist/templates/express/src/lib/.gitkeep +0 -0
  301. package/dist/templates/express/template.json +16 -0
  302. package/dist/templates/fastapi/requirements.txt.hbs +2 -0
  303. package/dist/templates/fastapi/src/main.py +8 -0
  304. package/dist/templates/fastapi/template.json +20 -0
  305. package/dist/templates/gin/go.mod.hbs +5 -0
  306. package/dist/templates/gin/main.go +15 -0
  307. package/dist/templates/gin/template.json +19 -0
  308. package/dist/templates/go-base/.golangci.yml +16 -0
  309. package/dist/templates/go-base/AGENTS.md.hbs +35 -0
  310. package/dist/templates/go-base/go.mod.hbs +3 -0
  311. package/dist/templates/go-base/harness.config.json.hbs +17 -0
  312. package/dist/templates/go-base/main.go +7 -0
  313. package/dist/templates/go-base/template.json +14 -0
  314. package/dist/templates/java-base/AGENTS.md.hbs +35 -0
  315. package/dist/templates/java-base/checkstyle.xml +20 -0
  316. package/dist/templates/java-base/harness.config.json.hbs +16 -0
  317. package/dist/templates/java-base/pom.xml.hbs +39 -0
  318. package/dist/templates/java-base/src/main/java/App.java.hbs +5 -0
  319. package/dist/templates/java-base/template.json +13 -0
  320. package/dist/templates/nestjs/nest-cli.json +5 -0
  321. package/dist/templates/nestjs/package.json.hbs +18 -0
  322. package/dist/templates/nestjs/src/app.module.ts +8 -0
  323. package/dist/templates/nestjs/src/lib/.gitkeep +0 -0
  324. package/dist/templates/nestjs/src/main.ts +11 -0
  325. package/dist/templates/nestjs/template.json +16 -0
  326. package/dist/templates/nextjs/template.json +15 -1
  327. package/dist/templates/python-base/.python-version +1 -0
  328. package/dist/templates/python-base/AGENTS.md.hbs +32 -0
  329. package/dist/templates/python-base/harness.config.json.hbs +16 -0
  330. package/dist/templates/python-base/pyproject.toml.hbs +18 -0
  331. package/dist/templates/python-base/ruff.toml +5 -0
  332. package/dist/templates/python-base/src/__init__.py +0 -0
  333. package/dist/templates/python-base/template.json +13 -0
  334. package/dist/templates/react-vite/index.html +12 -0
  335. package/dist/templates/react-vite/package.json.hbs +18 -0
  336. package/dist/templates/react-vite/src/App.tsx +7 -0
  337. package/dist/templates/react-vite/src/lib/.gitkeep +0 -0
  338. package/dist/templates/react-vite/src/main.tsx +9 -0
  339. package/dist/templates/react-vite/template.json +19 -0
  340. package/dist/templates/react-vite/vite.config.ts +6 -0
  341. package/dist/templates/rust-base/AGENTS.md.hbs +35 -0
  342. package/dist/templates/rust-base/Cargo.toml.hbs +6 -0
  343. package/dist/templates/rust-base/clippy.toml +2 -0
  344. package/dist/templates/rust-base/harness.config.json.hbs +17 -0
  345. package/dist/templates/rust-base/src/main.rs +3 -0
  346. package/dist/templates/rust-base/template.json +14 -0
  347. package/dist/templates/spring-boot/pom.xml.hbs +50 -0
  348. package/dist/templates/spring-boot/src/main/java/Application.java.hbs +19 -0
  349. package/dist/templates/spring-boot/template.json +15 -0
  350. package/dist/templates/vue/index.html +12 -0
  351. package/dist/templates/vue/package.json.hbs +16 -0
  352. package/dist/templates/vue/src/App.vue +7 -0
  353. package/dist/templates/vue/src/lib/.gitkeep +0 -0
  354. package/dist/templates/vue/src/main.ts +4 -0
  355. package/dist/templates/vue/template.json +19 -0
  356. package/dist/templates/vue/vite.config.ts +6 -0
  357. package/dist/{validate-N7QJOKFZ.js → validate-FD3Z6VJD.js} +4 -4
  358. package/dist/validate-cross-check-WNJM6H2D.js +8 -0
  359. package/package.json +6 -6
  360. package/dist/agents-md-P2RHSUV7.js +0 -8
  361. package/dist/ci-workflow-4NYBUG6R.js +0 -8
  362. package/dist/engine-LXLIWQQ3.js +0 -8
  363. package/dist/loader-Z2IT7QX3.js +0 -10
  364. package/dist/mcp-KQHEL5IF.js +0 -34
  365. package/dist/runtime-PDWD7UIK.js +0 -9
  366. package/dist/skill-executor-RG45LUO5.js +0 -8
  367. package/dist/validate-cross-check-EDQ5QGTM.js +0 -8
@@ -1,29 +1,26 @@
1
1
  import {
2
2
  Err,
3
- Ok
4
- } from "./chunk-MHBMTPW7.js";
3
+ Ok,
4
+ SESSION_SECTION_NAMES
5
+ } from "./chunk-ERS5EVUZ.js";
5
6
 
6
- // ../core/dist/chunk-ZHGBWFYD.mjs
7
+ // ../core/dist/chunk-BQUWXBGR.mjs
7
8
  import { z } from "zod";
8
- import { relative as relative2 } from "path";
9
9
  import { createHash } from "crypto";
10
10
  import { minimatch } from "minimatch";
11
11
  import { access, constants, readFile } from "fs";
12
12
  import { promisify } from "util";
13
+ import { relative } from "path";
13
14
  import { glob } from "glob";
14
- import { dirname, resolve, relative } from "path";
15
- import { relative as relative3 } from "path";
15
+ import { dirname, resolve } from "path";
16
16
  import { readFileSync, writeFileSync, renameSync, mkdirSync, existsSync } from "fs";
17
17
  import { randomBytes } from "crypto";
18
18
  import { join, dirname as dirname2 } from "path";
19
- import { relative as relative4 } from "path";
20
19
  import { readFile as readFile2 } from "fs/promises";
21
- import { relative as relative5 } from "path";
22
- import { relative as relative6 } from "path";
23
20
  import { readFile as readFile3, readdir } from "fs/promises";
24
- import { join as join2, relative as relative7 } from "path";
21
+ import { join as join2 } from "path";
25
22
  import { readFile as readFile4, readdir as readdir2 } from "fs/promises";
26
- import { join as join3, relative as relative8, dirname as dirname3, resolve as resolve2 } from "path";
23
+ import { join as join3, dirname as dirname3, resolve as resolve2 } from "path";
27
24
  var ArchMetricCategorySchema = z.enum([
28
25
  "circular-deps",
29
26
  "layer-violations",
@@ -106,8 +103,7 @@ var ConstraintRuleSchema = z.object({
106
103
  // forward-compat for governs edges
107
104
  });
108
105
  function violationId(relativePath, category, normalizedDetail) {
109
- const path20 = relativePath.replace(/\\/g, "/");
110
- const input = `${path20}:${category}:${normalizedDetail}`;
106
+ const input = `${relativePath}:${category}:${normalizedDetail}`;
111
107
  return createHash("sha256").update(input).digest("hex");
112
108
  }
113
109
  function constraintRuleId(category, scope, description) {
@@ -139,17 +135,17 @@ function resolveFileToLayer(file, layers) {
139
135
  }
140
136
  var accessAsync = promisify(access);
141
137
  var readFileAsync = promisify(readFile);
142
- async function fileExists(path20) {
138
+ async function fileExists(path22) {
143
139
  try {
144
- await accessAsync(path20, constants.F_OK);
140
+ await accessAsync(path22, constants.F_OK);
145
141
  return true;
146
142
  } catch {
147
143
  return false;
148
144
  }
149
145
  }
150
- async function readFileContent(path20) {
146
+ async function readFileContent(path22) {
151
147
  try {
152
- const content = await readFileAsync(path20, "utf-8");
148
+ const content = await readFileAsync(path22, "utf-8");
153
149
  return Ok(content);
154
150
  } catch (error) {
155
151
  return Err(error);
@@ -158,6 +154,9 @@ async function readFileContent(path20) {
158
154
  async function findFiles(pattern, cwd = process.cwd()) {
159
155
  return glob(pattern, { cwd, absolute: true });
160
156
  }
157
+ function relativePosix(from, to) {
158
+ return relative(from, to).replaceAll("\\", "/");
159
+ }
161
160
  function resolveImportPath(importSource, fromFile, _rootDir) {
162
161
  if (!importSource.startsWith(".") && !importSource.startsWith("/")) {
163
162
  return null;
@@ -209,8 +208,8 @@ async function buildDependencyGraph(files, parser, graphDependencyData) {
209
208
  function checkLayerViolations(graph, layers, rootDir) {
210
209
  const violations = [];
211
210
  for (const edge of graph.edges) {
212
- const fromRelative = relative(rootDir, edge.from);
213
- const toRelative = relative(rootDir, edge.to);
211
+ const fromRelative = relativePosix(rootDir, edge.from);
212
+ const toRelative = relativePosix(rootDir, edge.to);
214
213
  const fromLayer = resolveFileToLayer(fromRelative, layers);
215
214
  const toLayer = resolveFileToLayer(toRelative, layers);
216
215
  if (!fromLayer || !toLayer) continue;
@@ -292,65 +291,71 @@ async function validateDependencies(config) {
292
291
  graph: graphResult.value
293
292
  });
294
293
  }
295
- function tarjanSCC(graph) {
296
- const nodeMap = /* @__PURE__ */ new Map();
297
- const stack = [];
298
- const sccs = [];
299
- let index = 0;
294
+ function buildAdjacencyList(graph) {
300
295
  const adjacency = /* @__PURE__ */ new Map();
296
+ const nodeSet = new Set(graph.nodes);
301
297
  for (const node of graph.nodes) {
302
298
  adjacency.set(node, []);
303
299
  }
304
300
  for (const edge of graph.edges) {
305
301
  const neighbors = adjacency.get(edge.from);
306
- if (neighbors && graph.nodes.includes(edge.to)) {
302
+ if (neighbors && nodeSet.has(edge.to)) {
307
303
  neighbors.push(edge.to);
308
304
  }
309
305
  }
310
- function strongConnect(node) {
311
- nodeMap.set(node, {
312
- index,
313
- lowlink: index,
314
- onStack: true
315
- });
316
- index++;
317
- stack.push(node);
318
- const neighbors = adjacency.get(node) ?? [];
319
- for (const neighbor of neighbors) {
320
- const neighborData = nodeMap.get(neighbor);
321
- if (!neighborData) {
322
- strongConnect(neighbor);
323
- const nodeData2 = nodeMap.get(node);
324
- const updatedNeighborData = nodeMap.get(neighbor);
325
- nodeData2.lowlink = Math.min(nodeData2.lowlink, updatedNeighborData.lowlink);
326
- } else if (neighborData.onStack) {
327
- const nodeData2 = nodeMap.get(node);
328
- nodeData2.lowlink = Math.min(nodeData2.lowlink, neighborData.index);
329
- }
330
- }
331
- const nodeData = nodeMap.get(node);
332
- if (nodeData.lowlink === nodeData.index) {
333
- const scc = [];
334
- let w;
335
- do {
336
- w = stack.pop();
337
- nodeMap.get(w).onStack = false;
338
- scc.push(w);
339
- } while (w !== node);
340
- if (scc.length > 1) {
341
- sccs.push(scc);
342
- } else if (scc.length === 1) {
343
- const selfNode = scc[0];
344
- const selfNeighbors = adjacency.get(selfNode) ?? [];
345
- if (selfNeighbors.includes(selfNode)) {
346
- sccs.push(scc);
347
- }
348
- }
306
+ return adjacency;
307
+ }
308
+ function isCyclicSCC(scc, adjacency) {
309
+ if (scc.length > 1) return true;
310
+ if (scc.length === 1) {
311
+ const selfNode = scc[0];
312
+ const selfNeighbors = adjacency.get(selfNode) ?? [];
313
+ return selfNeighbors.includes(selfNode);
314
+ }
315
+ return false;
316
+ }
317
+ function processNeighbors(node, neighbors, nodeMap, stack, adjacency, sccs, indexRef) {
318
+ for (const neighbor of neighbors) {
319
+ const neighborData = nodeMap.get(neighbor);
320
+ if (!neighborData) {
321
+ strongConnectImpl(neighbor, nodeMap, stack, adjacency, sccs, indexRef);
322
+ const nodeData = nodeMap.get(node);
323
+ const updatedNeighborData = nodeMap.get(neighbor);
324
+ nodeData.lowlink = Math.min(nodeData.lowlink, updatedNeighborData.lowlink);
325
+ } else if (neighborData.onStack) {
326
+ const nodeData = nodeMap.get(node);
327
+ nodeData.lowlink = Math.min(nodeData.lowlink, neighborData.index);
328
+ }
329
+ }
330
+ }
331
+ function strongConnectImpl(node, nodeMap, stack, adjacency, sccs, indexRef) {
332
+ nodeMap.set(node, { index: indexRef.value, lowlink: indexRef.value, onStack: true });
333
+ indexRef.value++;
334
+ stack.push(node);
335
+ processNeighbors(node, adjacency.get(node) ?? [], nodeMap, stack, adjacency, sccs, indexRef);
336
+ const nodeData = nodeMap.get(node);
337
+ if (nodeData.lowlink === nodeData.index) {
338
+ const scc = [];
339
+ let w;
340
+ do {
341
+ w = stack.pop();
342
+ nodeMap.get(w).onStack = false;
343
+ scc.push(w);
344
+ } while (w !== node);
345
+ if (isCyclicSCC(scc, adjacency)) {
346
+ sccs.push(scc);
349
347
  }
350
348
  }
349
+ }
350
+ function tarjanSCC(graph) {
351
+ const nodeMap = /* @__PURE__ */ new Map();
352
+ const stack = [];
353
+ const sccs = [];
354
+ const indexRef = { value: 0 };
355
+ const adjacency = buildAdjacencyList(graph);
351
356
  for (const node of graph.nodes) {
352
357
  if (!nodeMap.has(node)) {
353
- strongConnect(node);
358
+ strongConnectImpl(node, nodeMap, stack, adjacency, sccs, indexRef);
354
359
  }
355
360
  }
356
361
  return sccs;
@@ -436,8 +441,8 @@ var CircularDepsCollector = class {
436
441
  }
437
442
  const { cycles, largestCycle } = result.value;
438
443
  const violations = cycles.map((cycle) => {
439
- const cyclePath = cycle.cycle.map((f) => relative2(rootDir, f)).join(" -> ");
440
- const firstFile = relative2(rootDir, cycle.cycle[0]);
444
+ const cyclePath = cycle.cycle.map((f) => relativePosix(rootDir, f)).join(" -> ");
445
+ const firstFile = relativePosix(rootDir, cycle.cycle[0]);
441
446
  return {
442
447
  id: violationId(firstFile, this.category, cyclePath),
443
448
  file: firstFile,
@@ -499,8 +504,8 @@ var LayerViolationCollector = class {
499
504
  (v) => v.reason === "WRONG_LAYER"
500
505
  );
501
506
  const violations = layerViolations.map((v) => {
502
- const relFile = relative3(rootDir, v.file);
503
- const relImport = relative3(rootDir, v.imports);
507
+ const relFile = relativePosix(rootDir, v.file);
508
+ const relImport = relativePosix(rootDir, v.imports);
504
509
  const detail = `${v.fromLayer} -> ${v.toLayer}: ${relFile} imports ${relImport}`;
505
510
  return {
506
511
  id: violationId(relFile, this.category, detail),
@@ -609,6 +614,31 @@ function aggregateByCategory(results) {
609
614
  }
610
615
  return map;
611
616
  }
617
+ function classifyViolations(violations, baselineViolationIds) {
618
+ const newViolations = [];
619
+ const preExisting = [];
620
+ for (const violation of violations) {
621
+ if (baselineViolationIds.has(violation.id)) {
622
+ preExisting.push(violation.id);
623
+ } else {
624
+ newViolations.push(violation);
625
+ }
626
+ }
627
+ return { newViolations, preExisting };
628
+ }
629
+ function findResolvedViolations(baselineCategory, currentViolationIds) {
630
+ if (!baselineCategory) return [];
631
+ return baselineCategory.violationIds.filter((id) => !currentViolationIds.has(id));
632
+ }
633
+ function collectOrphanedBaselineViolations(baseline, visitedCategories) {
634
+ const resolved = [];
635
+ for (const [category, baselineCategory] of Object.entries(baseline.metrics)) {
636
+ if (!visitedCategories.has(category) && baselineCategory) {
637
+ resolved.push(...baselineCategory.violationIds);
638
+ }
639
+ }
640
+ return resolved;
641
+ }
612
642
  function diff(current, baseline) {
613
643
  const aggregated = aggregateByCategory(current);
614
644
  const newViolations = [];
@@ -621,21 +651,11 @@ function diff(current, baseline) {
621
651
  const baselineCategory = baseline.metrics[category];
622
652
  const baselineViolationIds = new Set(baselineCategory?.violationIds ?? []);
623
653
  const baselineValue = baselineCategory?.value ?? 0;
624
- for (const violation of agg.violations) {
625
- if (baselineViolationIds.has(violation.id)) {
626
- preExisting.push(violation.id);
627
- } else {
628
- newViolations.push(violation);
629
- }
630
- }
654
+ const classified = classifyViolations(agg.violations, baselineViolationIds);
655
+ newViolations.push(...classified.newViolations);
656
+ preExisting.push(...classified.preExisting);
631
657
  const currentViolationIds = new Set(agg.violations.map((v) => v.id));
632
- if (baselineCategory) {
633
- for (const id of baselineCategory.violationIds) {
634
- if (!currentViolationIds.has(id)) {
635
- resolvedViolations.push(id);
636
- }
637
- }
638
- }
658
+ resolvedViolations.push(...findResolvedViolations(baselineCategory, currentViolationIds));
639
659
  if (baselineCategory && agg.value > baselineValue) {
640
660
  regressions.push({
641
661
  category,
@@ -645,16 +665,9 @@ function diff(current, baseline) {
645
665
  });
646
666
  }
647
667
  }
648
- for (const [category, baselineCategory] of Object.entries(baseline.metrics)) {
649
- if (!visitedCategories.has(category) && baselineCategory) {
650
- for (const id of baselineCategory.violationIds) {
651
- resolvedViolations.push(id);
652
- }
653
- }
654
- }
655
- const passed = newViolations.length === 0 && regressions.length === 0;
668
+ resolvedViolations.push(...collectOrphanedBaselineViolations(baseline, visitedCategories));
656
669
  return {
657
- passed,
670
+ passed: newViolations.length === 0 && regressions.length === 0,
658
671
  newViolations,
659
672
  resolvedViolations,
660
673
  preExisting,
@@ -669,22 +682,22 @@ var DEFAULT_THRESHOLDS = {
669
682
  fileLength: { info: 300 },
670
683
  hotspotPercentile: { error: 95 }
671
684
  };
685
+ var FUNCTION_PATTERNS = [
686
+ // function declarations: function name(params) {
687
+ /^\s*(?:export\s+)?(?:async\s+)?function\s+(\w+)\s*\(([^)]*)\)/,
688
+ // method declarations: name(params) {
689
+ /^\s*(?:async\s+)?(\w+)\s*\(([^)]*)\)\s*(?::\s*[^{]+)?\s*\{/,
690
+ // arrow functions assigned to const/let/var: const name = (params) =>
691
+ /^\s*(?:export\s+)?(?:const|let|var)\s+(\w+)\s*=\s*(?:async\s+)?\(([^)]*)\)\s*(?::\s*[^=]+)?\s*=>/,
692
+ // arrow functions assigned to const/let/var with single param: const name = param =>
693
+ /^\s*(?:export\s+)?(?:const|let|var)\s+(\w+)\s*=\s*(?:async\s+)?(\w+)\s*=>/
694
+ ];
672
695
  function extractFunctions(content) {
673
696
  const functions = [];
674
697
  const lines = content.split("\n");
675
- const patterns = [
676
- // function declarations: function name(params) {
677
- /^\s*(?:export\s+)?(?:async\s+)?function\s+(\w+)\s*\(([^)]*)\)/,
678
- // method declarations: name(params) {
679
- /^\s*(?:async\s+)?(\w+)\s*\(([^)]*)\)\s*(?::\s*[^{]+)?\s*\{/,
680
- // arrow functions assigned to const/let/var: const name = (params) =>
681
- /^\s*(?:export\s+)?(?:const|let|var)\s+(\w+)\s*=\s*(?:async\s+)?\(([^)]*)\)\s*(?::\s*[^=]+)?\s*=>/,
682
- // arrow functions assigned to const/let/var with single param: const name = param =>
683
- /^\s*(?:export\s+)?(?:const|let|var)\s+(\w+)\s*=\s*(?:async\s+)?(\w+)\s*=>/
684
- ];
685
698
  for (let i = 0; i < lines.length; i++) {
686
699
  const line = lines[i];
687
- for (const pattern of patterns) {
700
+ for (const pattern of FUNCTION_PATTERNS) {
688
701
  const match = line.match(pattern);
689
702
  if (match) {
690
703
  const name = match[1] ?? "anonymous";
@@ -773,26 +786,155 @@ function computeNestingDepth(body) {
773
786
  }
774
787
  return maxDepth;
775
788
  }
776
- async function detectComplexityViolations(snapshot, config, graphData) {
777
- const violations = [];
778
- const thresholds = {
789
+ function resolveThresholds(config) {
790
+ const userThresholds = config?.thresholds;
791
+ if (!userThresholds) return { ...DEFAULT_THRESHOLDS };
792
+ return {
779
793
  cyclomaticComplexity: {
780
- error: config?.thresholds?.cyclomaticComplexity?.error ?? DEFAULT_THRESHOLDS.cyclomaticComplexity.error,
781
- warn: config?.thresholds?.cyclomaticComplexity?.warn ?? DEFAULT_THRESHOLDS.cyclomaticComplexity.warn
794
+ ...DEFAULT_THRESHOLDS.cyclomaticComplexity,
795
+ ...stripUndefined(userThresholds.cyclomaticComplexity)
782
796
  },
783
797
  nestingDepth: {
784
- warn: config?.thresholds?.nestingDepth?.warn ?? DEFAULT_THRESHOLDS.nestingDepth.warn
798
+ ...DEFAULT_THRESHOLDS.nestingDepth,
799
+ ...stripUndefined(userThresholds.nestingDepth)
785
800
  },
786
801
  functionLength: {
787
- warn: config?.thresholds?.functionLength?.warn ?? DEFAULT_THRESHOLDS.functionLength.warn
802
+ ...DEFAULT_THRESHOLDS.functionLength,
803
+ ...stripUndefined(userThresholds.functionLength)
788
804
  },
789
805
  parameterCount: {
790
- warn: config?.thresholds?.parameterCount?.warn ?? DEFAULT_THRESHOLDS.parameterCount.warn
806
+ ...DEFAULT_THRESHOLDS.parameterCount,
807
+ ...stripUndefined(userThresholds.parameterCount)
791
808
  },
792
- fileLength: {
793
- info: config?.thresholds?.fileLength?.info ?? DEFAULT_THRESHOLDS.fileLength.info
794
- }
809
+ fileLength: { ...DEFAULT_THRESHOLDS.fileLength, ...stripUndefined(userThresholds.fileLength) }
810
+ };
811
+ }
812
+ function stripUndefined(obj) {
813
+ if (!obj) return {};
814
+ const result = {};
815
+ for (const [key, val] of Object.entries(obj)) {
816
+ if (val !== void 0) result[key] = val;
817
+ }
818
+ return result;
819
+ }
820
+ function checkFileLengthViolation(filePath, lineCount, threshold) {
821
+ if (lineCount <= threshold) return null;
822
+ return {
823
+ file: filePath,
824
+ function: "<file>",
825
+ line: 1,
826
+ metric: "fileLength",
827
+ value: lineCount,
828
+ threshold,
829
+ tier: 3,
830
+ severity: "info",
831
+ message: `File has ${lineCount} lines (threshold: ${threshold})`
832
+ };
833
+ }
834
+ function checkCyclomaticComplexity(filePath, fn, thresholds) {
835
+ const complexity = computeCyclomaticComplexity(fn.body);
836
+ if (complexity > thresholds.error) {
837
+ return {
838
+ file: filePath,
839
+ function: fn.name,
840
+ line: fn.line,
841
+ metric: "cyclomaticComplexity",
842
+ value: complexity,
843
+ threshold: thresholds.error,
844
+ tier: 1,
845
+ severity: "error",
846
+ message: `Function "${fn.name}" has cyclomatic complexity of ${complexity} (error threshold: ${thresholds.error})`
847
+ };
848
+ }
849
+ if (complexity > thresholds.warn) {
850
+ return {
851
+ file: filePath,
852
+ function: fn.name,
853
+ line: fn.line,
854
+ metric: "cyclomaticComplexity",
855
+ value: complexity,
856
+ threshold: thresholds.warn,
857
+ tier: 2,
858
+ severity: "warning",
859
+ message: `Function "${fn.name}" has cyclomatic complexity of ${complexity} (warning threshold: ${thresholds.warn})`
860
+ };
861
+ }
862
+ return null;
863
+ }
864
+ function checkNestingDepth(filePath, fn, threshold) {
865
+ const depth = computeNestingDepth(fn.body);
866
+ if (depth <= threshold) return null;
867
+ return {
868
+ file: filePath,
869
+ function: fn.name,
870
+ line: fn.line,
871
+ metric: "nestingDepth",
872
+ value: depth,
873
+ threshold,
874
+ tier: 2,
875
+ severity: "warning",
876
+ message: `Function "${fn.name}" has nesting depth of ${depth} (threshold: ${threshold})`
877
+ };
878
+ }
879
+ function checkFunctionLength(filePath, fn, threshold) {
880
+ const fnLength = fn.endLine - fn.startLine + 1;
881
+ if (fnLength <= threshold) return null;
882
+ return {
883
+ file: filePath,
884
+ function: fn.name,
885
+ line: fn.line,
886
+ metric: "functionLength",
887
+ value: fnLength,
888
+ threshold,
889
+ tier: 2,
890
+ severity: "warning",
891
+ message: `Function "${fn.name}" is ${fnLength} lines long (threshold: ${threshold})`
892
+ };
893
+ }
894
+ function checkParameterCount(filePath, fn, threshold) {
895
+ if (fn.params <= threshold) return null;
896
+ return {
897
+ file: filePath,
898
+ function: fn.name,
899
+ line: fn.line,
900
+ metric: "parameterCount",
901
+ value: fn.params,
902
+ threshold,
903
+ tier: 2,
904
+ severity: "warning",
905
+ message: `Function "${fn.name}" has ${fn.params} parameters (threshold: ${threshold})`
906
+ };
907
+ }
908
+ function checkHotspot(filePath, fn, graphData) {
909
+ const hotspot = graphData.hotspots.find((h) => h.file === filePath && h.function === fn.name);
910
+ if (!hotspot || hotspot.hotspotScore <= graphData.percentile95Score) return null;
911
+ return {
912
+ file: filePath,
913
+ function: fn.name,
914
+ line: fn.line,
915
+ metric: "hotspotScore",
916
+ value: hotspot.hotspotScore,
917
+ threshold: graphData.percentile95Score,
918
+ tier: 1,
919
+ severity: "error",
920
+ message: `Function "${fn.name}" is a complexity hotspot (score: ${hotspot.hotspotScore}, p95: ${graphData.percentile95Score})`
795
921
  };
922
+ }
923
+ function collectFunctionViolations(filePath, fn, thresholds, graphData) {
924
+ const checks = [
925
+ checkCyclomaticComplexity(filePath, fn, thresholds.cyclomaticComplexity),
926
+ checkNestingDepth(filePath, fn, thresholds.nestingDepth.warn),
927
+ checkFunctionLength(filePath, fn, thresholds.functionLength.warn),
928
+ checkParameterCount(filePath, fn, thresholds.parameterCount.warn)
929
+ ];
930
+ if (graphData) {
931
+ checks.push(checkHotspot(filePath, fn, graphData));
932
+ }
933
+ return checks.filter((v) => v !== null);
934
+ }
935
+ async function detectComplexityViolations(snapshot, config, graphData) {
936
+ const violations = [];
937
+ const thresholds = resolveThresholds(config);
796
938
  let totalFunctions = 0;
797
939
  for (const file of snapshot.files) {
798
940
  let content;
@@ -802,107 +944,16 @@ async function detectComplexityViolations(snapshot, config, graphData) {
802
944
  continue;
803
945
  }
804
946
  const lines = content.split("\n");
805
- if (lines.length > thresholds.fileLength.info) {
806
- violations.push({
807
- file: file.path,
808
- function: "<file>",
809
- line: 1,
810
- metric: "fileLength",
811
- value: lines.length,
812
- threshold: thresholds.fileLength.info,
813
- tier: 3,
814
- severity: "info",
815
- message: `File has ${lines.length} lines (threshold: ${thresholds.fileLength.info})`
816
- });
817
- }
947
+ const fileLenViolation = checkFileLengthViolation(
948
+ file.path,
949
+ lines.length,
950
+ thresholds.fileLength.info
951
+ );
952
+ if (fileLenViolation) violations.push(fileLenViolation);
818
953
  const functions = extractFunctions(content);
819
954
  totalFunctions += functions.length;
820
955
  for (const fn of functions) {
821
- const complexity = computeCyclomaticComplexity(fn.body);
822
- if (complexity > thresholds.cyclomaticComplexity.error) {
823
- violations.push({
824
- file: file.path,
825
- function: fn.name,
826
- line: fn.line,
827
- metric: "cyclomaticComplexity",
828
- value: complexity,
829
- threshold: thresholds.cyclomaticComplexity.error,
830
- tier: 1,
831
- severity: "error",
832
- message: `Function "${fn.name}" has cyclomatic complexity of ${complexity} (error threshold: ${thresholds.cyclomaticComplexity.error})`
833
- });
834
- } else if (complexity > thresholds.cyclomaticComplexity.warn) {
835
- violations.push({
836
- file: file.path,
837
- function: fn.name,
838
- line: fn.line,
839
- metric: "cyclomaticComplexity",
840
- value: complexity,
841
- threshold: thresholds.cyclomaticComplexity.warn,
842
- tier: 2,
843
- severity: "warning",
844
- message: `Function "${fn.name}" has cyclomatic complexity of ${complexity} (warning threshold: ${thresholds.cyclomaticComplexity.warn})`
845
- });
846
- }
847
- const nestingDepth = computeNestingDepth(fn.body);
848
- if (nestingDepth > thresholds.nestingDepth.warn) {
849
- violations.push({
850
- file: file.path,
851
- function: fn.name,
852
- line: fn.line,
853
- metric: "nestingDepth",
854
- value: nestingDepth,
855
- threshold: thresholds.nestingDepth.warn,
856
- tier: 2,
857
- severity: "warning",
858
- message: `Function "${fn.name}" has nesting depth of ${nestingDepth} (threshold: ${thresholds.nestingDepth.warn})`
859
- });
860
- }
861
- const fnLength = fn.endLine - fn.startLine + 1;
862
- if (fnLength > thresholds.functionLength.warn) {
863
- violations.push({
864
- file: file.path,
865
- function: fn.name,
866
- line: fn.line,
867
- metric: "functionLength",
868
- value: fnLength,
869
- threshold: thresholds.functionLength.warn,
870
- tier: 2,
871
- severity: "warning",
872
- message: `Function "${fn.name}" is ${fnLength} lines long (threshold: ${thresholds.functionLength.warn})`
873
- });
874
- }
875
- if (fn.params > thresholds.parameterCount.warn) {
876
- violations.push({
877
- file: file.path,
878
- function: fn.name,
879
- line: fn.line,
880
- metric: "parameterCount",
881
- value: fn.params,
882
- threshold: thresholds.parameterCount.warn,
883
- tier: 2,
884
- severity: "warning",
885
- message: `Function "${fn.name}" has ${fn.params} parameters (threshold: ${thresholds.parameterCount.warn})`
886
- });
887
- }
888
- if (graphData) {
889
- const hotspot = graphData.hotspots.find(
890
- (h) => h.file === file.path && h.function === fn.name
891
- );
892
- if (hotspot && hotspot.hotspotScore > graphData.percentile95Score) {
893
- violations.push({
894
- file: file.path,
895
- function: fn.name,
896
- line: fn.line,
897
- metric: "hotspotScore",
898
- value: hotspot.hotspotScore,
899
- threshold: graphData.percentile95Score,
900
- tier: 1,
901
- severity: "error",
902
- message: `Function "${fn.name}" is a complexity hotspot (score: ${hotspot.hotspotScore}, p95: ${graphData.percentile95Score})`
903
- });
904
- }
905
- }
956
+ violations.push(...collectFunctionViolations(file.path, fn, thresholds, graphData));
906
957
  }
907
958
  }
908
959
  const errorCount = violations.filter((v) => v.severity === "error").length;
@@ -980,7 +1031,7 @@ var ComplexityCollector = class {
980
1031
  (v) => v.severity === "error" || v.severity === "warning"
981
1032
  );
982
1033
  const violations = filtered.map((v) => {
983
- const relFile = relative4(rootDir, v.file);
1034
+ const relFile = relativePosix(rootDir, v.file);
984
1035
  const idDetail = `${v.metric}:${v.function}`;
985
1036
  return {
986
1037
  id: violationId(relFile, this.category, idDetail),
@@ -1202,7 +1253,7 @@ var CouplingCollector = class {
1202
1253
  (v) => v.severity === "error" || v.severity === "warning"
1203
1254
  );
1204
1255
  const violations = filtered.map((v) => {
1205
- const relFile = relative5(rootDir, v.file);
1256
+ const relFile = relativePosix(rootDir, v.file);
1206
1257
  const idDetail = `${v.metric}`;
1207
1258
  return {
1208
1259
  id: violationId(relFile, this.category, idDetail),
@@ -1266,8 +1317,8 @@ var ForbiddenImportCollector = class {
1266
1317
  (v) => v.reason === "FORBIDDEN_IMPORT"
1267
1318
  );
1268
1319
  const violations = forbidden.map((v) => {
1269
- const relFile = relative6(rootDir, v.file);
1270
- const relImport = relative6(rootDir, v.imports);
1320
+ const relFile = relativePosix(rootDir, v.file);
1321
+ const relImport = relativePosix(rootDir, v.imports);
1271
1322
  const detail = `forbidden import: ${relFile} -> ${relImport}`;
1272
1323
  return {
1273
1324
  id: violationId(relFile, this.category, detail),
@@ -1319,10 +1370,10 @@ async function discoverModules(rootDir) {
1319
1370
  }
1320
1371
  }
1321
1372
  modules.push({
1322
- modulePath: relative7(rootDir, dir),
1373
+ modulePath: relativePosix(rootDir, dir),
1323
1374
  fileCount: tsFiles.length,
1324
1375
  totalLoc,
1325
- files: tsFiles.map((f) => relative7(rootDir, f))
1376
+ files: tsFiles.map((f) => relativePosix(rootDir, f))
1326
1377
  });
1327
1378
  }
1328
1379
  for (const sub of subdirs) {
@@ -1494,7 +1545,7 @@ var DepDepthCollector = class {
1494
1545
  }
1495
1546
  const moduleMap = /* @__PURE__ */ new Map();
1496
1547
  for (const file of allFiles) {
1497
- const relDir = relative8(rootDir, dirname3(file));
1548
+ const relDir = relativePosix(rootDir, dirname3(file));
1498
1549
  if (!moduleMap.has(relDir)) moduleMap.set(relDir, []);
1499
1550
  moduleMap.get(relDir).push(file);
1500
1551
  }
@@ -1743,19 +1794,18 @@ var archMatchers = {
1743
1794
  // ../core/dist/index.mjs
1744
1795
  import { join as join4, dirname as dirname4 } from "path";
1745
1796
  import { minimatch as minimatch2 } from "minimatch";
1746
- import { basename, relative as relative9 } from "path";
1747
- import { join as join22, basename as basename2, relative as relative22 } from "path";
1748
- import { relative as relative32, basename as basename3, dirname as dirname22 } from "path";
1797
+ import { basename } from "path";
1798
+ import { join as join22, basename as basename2 } from "path";
1799
+ import { basename as basename3, dirname as dirname22 } from "path";
1749
1800
  import { z as z2 } from "zod";
1750
1801
  import * as fs from "fs/promises";
1751
1802
  import * as fs2 from "fs/promises";
1752
1803
  import { parse } from "@typescript-eslint/typescript-estree";
1753
- import { join as join32, resolve as resolve3, relative as relative42 } from "path";
1804
+ import { join as join32, resolve as resolve3 } from "path";
1754
1805
  import { minimatch as minimatch22 } from "minimatch";
1755
1806
  import { dirname as dirname32, resolve as resolve22 } from "path";
1756
1807
  import { dirname as dirname42, resolve as resolve32 } from "path";
1757
1808
  import { minimatch as minimatch3 } from "minimatch";
1758
- import { relative as relative52 } from "path";
1759
1809
  import { readdirSync, statSync } from "fs";
1760
1810
  import { join as join42 } from "path";
1761
1811
  import * as fs3 from "fs";
@@ -1791,24 +1841,28 @@ import * as path9 from "path";
1791
1841
  import { execSync as execSync2 } from "child_process";
1792
1842
  import * as fs13 from "fs";
1793
1843
  import * as path10 from "path";
1794
- import * as fs15 from "fs/promises";
1795
- import { z as z5 } from "zod";
1796
1844
  import * as fs14 from "fs";
1797
1845
  import * as path11 from "path";
1846
+ import * as fs15 from "fs";
1798
1847
  import * as path12 from "path";
1848
+ import * as fs17 from "fs/promises";
1849
+ import { z as z5 } from "zod";
1850
+ import * as fs16 from "fs";
1799
1851
  import * as path13 from "path";
1800
1852
  import * as path14 from "path";
1801
1853
  import * as path15 from "path";
1802
- import * as fs16 from "fs";
1803
1854
  import * as path16 from "path";
1804
- import { z as z6 } from "zod";
1805
- import * as fs17 from "fs/promises";
1806
1855
  import * as path17 from "path";
1807
- import * as fs18 from "fs/promises";
1856
+ import * as fs18 from "fs";
1808
1857
  import * as path18 from "path";
1809
- import * as ejs from "ejs";
1810
- import * as fs19 from "fs";
1858
+ import { z as z6 } from "zod";
1859
+ import * as fs19 from "fs/promises";
1811
1860
  import * as path19 from "path";
1861
+ import * as fs20 from "fs/promises";
1862
+ import * as path20 from "path";
1863
+ import * as ejs from "ejs";
1864
+ import * as fs21 from "fs";
1865
+ import * as path21 from "path";
1812
1866
  import * as os from "os";
1813
1867
  import { spawn } from "child_process";
1814
1868
  async function validateFileStructure(projectPath, conventions) {
@@ -1846,15 +1900,15 @@ function validateConfig(data, schema) {
1846
1900
  let message = "Configuration validation failed";
1847
1901
  const suggestions = [];
1848
1902
  if (firstError) {
1849
- const path20 = firstError.path.join(".");
1850
- const pathDisplay = path20 ? ` at "${path20}"` : "";
1903
+ const path22 = firstError.path.join(".");
1904
+ const pathDisplay = path22 ? ` at "${path22}"` : "";
1851
1905
  if (firstError.code === "invalid_type") {
1852
1906
  const received = firstError.received;
1853
1907
  const expected = firstError.expected;
1854
1908
  if (received === "undefined") {
1855
1909
  code = "MISSING_FIELD";
1856
1910
  message = `Missing required field${pathDisplay}: ${firstError.message}`;
1857
- suggestions.push(`Field "${path20}" is required and must be of type "${expected}"`);
1911
+ suggestions.push(`Field "${path22}" is required and must be of type "${expected}"`);
1858
1912
  } else {
1859
1913
  code = "INVALID_TYPE";
1860
1914
  message = `Invalid type${pathDisplay}: ${firstError.message}`;
@@ -2001,6 +2055,43 @@ function extractMarkdownLinks(content) {
2001
2055
  }
2002
2056
  return links;
2003
2057
  }
2058
+ function isDescriptionTerminator(trimmed) {
2059
+ return trimmed.startsWith("#") || trimmed.startsWith("-") || trimmed.startsWith("*") || trimmed.startsWith("```");
2060
+ }
2061
+ function extractDescription(sectionLines) {
2062
+ const descriptionLines = [];
2063
+ for (const line of sectionLines) {
2064
+ const trimmed = line.trim();
2065
+ if (trimmed === "") {
2066
+ if (descriptionLines.length > 0) break;
2067
+ continue;
2068
+ }
2069
+ if (isDescriptionTerminator(trimmed)) break;
2070
+ descriptionLines.push(trimmed);
2071
+ }
2072
+ return descriptionLines.length > 0 ? descriptionLines.join(" ") : void 0;
2073
+ }
2074
+ function buildAgentMapSection(section, lines) {
2075
+ const endIndex = section.endIndex ?? lines.length;
2076
+ const sectionLines = lines.slice(section.startIndex + 1, endIndex);
2077
+ const sectionContent = sectionLines.join("\n");
2078
+ const links = extractMarkdownLinks(sectionContent).map((link) => ({
2079
+ ...link,
2080
+ line: link.line + section.startIndex + 1,
2081
+ exists: false
2082
+ }));
2083
+ const result = {
2084
+ title: section.title,
2085
+ level: section.level,
2086
+ line: section.line,
2087
+ links
2088
+ };
2089
+ const description = extractDescription(sectionLines);
2090
+ if (description) {
2091
+ result.description = description;
2092
+ }
2093
+ return result;
2094
+ }
2004
2095
  function extractSections(content) {
2005
2096
  const lines = content.split("\n");
2006
2097
  const sections = [];
@@ -2013,7 +2104,6 @@ function extractSections(content) {
2013
2104
  title: match[2].trim(),
2014
2105
  level: match[1].length,
2015
2106
  line: i + 1,
2016
- // 1-indexed
2017
2107
  startIndex: i
2018
2108
  });
2019
2109
  }
@@ -2025,62 +2115,29 @@ function extractSections(content) {
2025
2115
  currentSection.endIndex = nextSection ? nextSection.startIndex : lines.length;
2026
2116
  }
2027
2117
  }
2028
- return sections.map((section) => {
2029
- const endIndex = section.endIndex ?? lines.length;
2030
- const sectionLines = lines.slice(section.startIndex + 1, endIndex);
2031
- const sectionContent = sectionLines.join("\n");
2032
- const links = extractMarkdownLinks(sectionContent).map((link) => ({
2033
- ...link,
2034
- line: link.line + section.startIndex + 1,
2035
- // Adjust line number
2036
- exists: false
2037
- // Will be set later by validateAgentsMap
2038
- }));
2039
- const descriptionLines = [];
2040
- for (const line of sectionLines) {
2041
- const trimmed = line.trim();
2042
- if (trimmed === "") {
2043
- if (descriptionLines.length > 0) break;
2044
- continue;
2045
- }
2046
- if (trimmed.startsWith("#")) break;
2047
- if (trimmed.startsWith("-") || trimmed.startsWith("*")) break;
2048
- if (trimmed.startsWith("```")) break;
2049
- descriptionLines.push(trimmed);
2050
- }
2051
- const result = {
2052
- title: section.title,
2053
- level: section.level,
2054
- line: section.line,
2055
- links
2056
- };
2057
- if (descriptionLines.length > 0) {
2058
- result.description = descriptionLines.join(" ");
2059
- }
2060
- return result;
2061
- });
2118
+ return sections.map((section) => buildAgentMapSection(section, lines));
2062
2119
  }
2063
- function isExternalLink(path20) {
2064
- return path20.startsWith("http://") || path20.startsWith("https://") || path20.startsWith("#") || path20.startsWith("mailto:");
2120
+ function isExternalLink(path22) {
2121
+ return path22.startsWith("http://") || path22.startsWith("https://") || path22.startsWith("#") || path22.startsWith("mailto:");
2065
2122
  }
2066
2123
  function resolveLinkPath(linkPath, baseDir) {
2067
2124
  return linkPath.startsWith(".") ? join4(baseDir, linkPath) : linkPath;
2068
2125
  }
2069
- async function validateAgentsMap(path20 = "./AGENTS.md") {
2070
- const contentResult = await readFileContent(path20);
2126
+ async function validateAgentsMap(path22 = "./AGENTS.md") {
2127
+ const contentResult = await readFileContent(path22);
2071
2128
  if (!contentResult.ok) {
2072
2129
  return Err(
2073
2130
  createError(
2074
2131
  "PARSE_ERROR",
2075
2132
  `Failed to read AGENTS.md: ${contentResult.error.message}`,
2076
- { path: path20 },
2133
+ { path: path22 },
2077
2134
  ["Ensure the file exists", "Check file permissions"]
2078
2135
  )
2079
2136
  );
2080
2137
  }
2081
2138
  const content = contentResult.value;
2082
2139
  const sections = extractSections(content);
2083
- const baseDir = dirname4(path20);
2140
+ const baseDir = dirname4(path22);
2084
2141
  const sectionTitles = sections.map((s) => s.title);
2085
2142
  const missingSections = REQUIRED_SECTIONS.filter(
2086
2143
  (required) => !sectionTitles.some((title) => title.toLowerCase().includes(required.toLowerCase()))
@@ -2157,7 +2214,7 @@ async function checkDocCoverage(domain, options = {}) {
2157
2214
  try {
2158
2215
  const sourceFiles = await findFiles("**/*.{ts,js,tsx,jsx}", sourceDir);
2159
2216
  const filteredSourceFiles = sourceFiles.filter((file) => {
2160
- const relativePath = relative9(sourceDir, file);
2217
+ const relativePath = relativePosix(sourceDir, file);
2161
2218
  return !excludePatterns.some((pattern) => {
2162
2219
  return minimatch2(relativePath, pattern, { dot: true }) || minimatch2(file, pattern, { dot: true });
2163
2220
  });
@@ -2180,7 +2237,7 @@ async function checkDocCoverage(domain, options = {}) {
2180
2237
  const undocumented = [];
2181
2238
  const gaps = [];
2182
2239
  for (const sourceFile of filteredSourceFiles) {
2183
- const relativePath = relative9(sourceDir, sourceFile);
2240
+ const relativePath = relativePosix(sourceDir, sourceFile);
2184
2241
  const fileName = basename(sourceFile);
2185
2242
  const isDocumented = documentedPaths.has(relativePath) || documentedPaths.has(fileName) || documentedPaths.has(`src/${relativePath}`);
2186
2243
  if (isDocumented) {
@@ -2214,8 +2271,8 @@ async function checkDocCoverage(domain, options = {}) {
2214
2271
  );
2215
2272
  }
2216
2273
  }
2217
- function suggestFix(path20, existingFiles) {
2218
- const targetName = basename2(path20).toLowerCase();
2274
+ function suggestFix(path22, existingFiles) {
2275
+ const targetName = basename2(path22).toLowerCase();
2219
2276
  const similar = existingFiles.find((file) => {
2220
2277
  const fileName = basename2(file).toLowerCase();
2221
2278
  return fileName.includes(targetName) || targetName.includes(fileName);
@@ -2223,7 +2280,7 @@ function suggestFix(path20, existingFiles) {
2223
2280
  if (similar) {
2224
2281
  return `Did you mean "${similar}"?`;
2225
2282
  }
2226
- return `Create the file "${path20}" or remove the link`;
2283
+ return `Create the file "${path22}" or remove the link`;
2227
2284
  }
2228
2285
  async function validateKnowledgeMap(rootDir = process.cwd()) {
2229
2286
  const agentsPath = join22(rootDir, "AGENTS.md");
@@ -2237,7 +2294,7 @@ async function validateKnowledgeMap(rootDir = process.cwd()) {
2237
2294
  totalLinks: agentsTotalLinks
2238
2295
  } = agentsResult.value;
2239
2296
  const existingFiles = await findFiles("**/*", rootDir);
2240
- const relativeExistingFiles = existingFiles.map((f) => relative22(rootDir, f));
2297
+ const relativeExistingFiles = existingFiles.map((f) => relativePosix(rootDir, f));
2241
2298
  const brokenLinks = agentsBrokenLinks.map((link) => {
2242
2299
  const section = sections.find(
2243
2300
  (s) => s.links.some((l) => l.path === link.path && l.line === link.line)
@@ -2275,7 +2332,7 @@ var DEFAULT_SECTIONS = [
2275
2332
  function groupByDirectory(files, rootDir) {
2276
2333
  const groups = /* @__PURE__ */ new Map();
2277
2334
  for (const file of files) {
2278
- const relativePath = relative32(rootDir, file);
2335
+ const relativePath = relativePosix(rootDir, file);
2279
2336
  const dir = dirname22(relativePath);
2280
2337
  if (!groups.has(dir)) {
2281
2338
  groups.set(dir, []);
@@ -2331,7 +2388,7 @@ async function generateAgentsMap(config, graphSections) {
2331
2388
  allFiles.push(...files);
2332
2389
  }
2333
2390
  const filteredFiles = allFiles.filter((file) => {
2334
- const relativePath = relative32(rootDir, file);
2391
+ const relativePath = relativePosix(rootDir, file);
2335
2392
  return !matchesExcludePattern(relativePath, excludePaths);
2336
2393
  });
2337
2394
  lines.push("## Repository Structure");
@@ -2359,11 +2416,11 @@ async function generateAgentsMap(config, graphSections) {
2359
2416
  }
2360
2417
  const sectionFiles = await findFiles(section.pattern, rootDir);
2361
2418
  const filteredSectionFiles = sectionFiles.filter((file) => {
2362
- const relativePath = relative32(rootDir, file);
2419
+ const relativePath = relativePosix(rootDir, file);
2363
2420
  return !matchesExcludePattern(relativePath, excludePaths);
2364
2421
  });
2365
2422
  for (const file of filteredSectionFiles.slice(0, 20)) {
2366
- lines.push(formatFileLink(relative32(rootDir, file)));
2423
+ lines.push(formatFileLink(relativePosix(rootDir, file)));
2367
2424
  }
2368
2425
  if (filteredSectionFiles.length > 20) {
2369
2426
  lines.push(`- _... and ${filteredSectionFiles.length - 20} more files_`);
@@ -2566,8 +2623,8 @@ function createBoundaryValidator(schema, name) {
2566
2623
  return Ok(result.data);
2567
2624
  }
2568
2625
  const suggestions = result.error.issues.map((issue) => {
2569
- const path20 = issue.path.join(".");
2570
- return path20 ? `${path20}: ${issue.message}` : issue.message;
2626
+ const path22 = issue.path.join(".");
2627
+ return path22 ? `${path22}: ${issue.message}` : issue.message;
2571
2628
  });
2572
2629
  return Err(
2573
2630
  createError(
@@ -2777,175 +2834,183 @@ function stringArraysEqual(a, b) {
2777
2834
  const sortedB = [...b].sort();
2778
2835
  return sortedA.every((val, i) => val === sortedB[i]);
2779
2836
  }
2780
- function deepMergeConstraints(localConfig, bundleConstraints, _existingContributions) {
2781
- const config = { ...localConfig };
2782
- const contributions = {};
2783
- const conflicts = [];
2784
- if (bundleConstraints.layers && bundleConstraints.layers.length > 0) {
2785
- const localLayers = Array.isArray(localConfig.layers) ? localConfig.layers : [];
2786
- const mergedLayers = [...localLayers];
2787
- const contributedLayerNames = [];
2788
- for (const bundleLayer of bundleConstraints.layers) {
2789
- const existing = localLayers.find((l) => l.name === bundleLayer.name);
2790
- if (!existing) {
2791
- mergedLayers.push(bundleLayer);
2792
- contributedLayerNames.push(bundleLayer.name);
2793
- } else {
2794
- const same = existing.pattern === bundleLayer.pattern && stringArraysEqual(existing.allowedDependencies, bundleLayer.allowedDependencies);
2795
- if (!same) {
2796
- conflicts.push({
2797
- section: "layers",
2798
- key: bundleLayer.name,
2799
- localValue: existing,
2800
- packageValue: bundleLayer,
2801
- description: `Layer '${bundleLayer.name}' already exists locally with different configuration`
2802
- });
2803
- }
2837
+ function mergeLayers(localConfig, bundleLayers, config, contributions, conflicts) {
2838
+ const localLayers = Array.isArray(localConfig.layers) ? localConfig.layers : [];
2839
+ const mergedLayers = [...localLayers];
2840
+ const contributedLayerNames = [];
2841
+ for (const bundleLayer of bundleLayers) {
2842
+ const existing = localLayers.find((l) => l.name === bundleLayer.name);
2843
+ if (!existing) {
2844
+ mergedLayers.push(bundleLayer);
2845
+ contributedLayerNames.push(bundleLayer.name);
2846
+ } else {
2847
+ const same = existing.pattern === bundleLayer.pattern && stringArraysEqual(existing.allowedDependencies, bundleLayer.allowedDependencies);
2848
+ if (!same) {
2849
+ conflicts.push({
2850
+ section: "layers",
2851
+ key: bundleLayer.name,
2852
+ localValue: existing,
2853
+ packageValue: bundleLayer,
2854
+ description: `Layer '${bundleLayer.name}' already exists locally with different configuration`
2855
+ });
2856
+ }
2857
+ }
2858
+ }
2859
+ config.layers = mergedLayers;
2860
+ if (contributedLayerNames.length > 0) contributions.layers = contributedLayerNames;
2861
+ }
2862
+ function mergeForbiddenImports(localConfig, bundleRules, config, contributions, conflicts) {
2863
+ const localFI = Array.isArray(localConfig.forbiddenImports) ? localConfig.forbiddenImports : [];
2864
+ const mergedFI = [...localFI];
2865
+ const contributedFromKeys = [];
2866
+ for (const bundleRule of bundleRules) {
2867
+ const existing = localFI.find((r) => r.from === bundleRule.from);
2868
+ if (!existing) {
2869
+ const entry = { from: bundleRule.from, disallow: bundleRule.disallow };
2870
+ if (bundleRule.message !== void 0) entry.message = bundleRule.message;
2871
+ mergedFI.push(entry);
2872
+ contributedFromKeys.push(bundleRule.from);
2873
+ } else {
2874
+ if (!stringArraysEqual(existing.disallow, bundleRule.disallow)) {
2875
+ conflicts.push({
2876
+ section: "forbiddenImports",
2877
+ key: bundleRule.from,
2878
+ localValue: existing,
2879
+ packageValue: bundleRule,
2880
+ description: `Forbidden import rule for '${bundleRule.from}' already exists locally with different disallow list`
2881
+ });
2804
2882
  }
2805
2883
  }
2806
- config.layers = mergedLayers;
2807
- if (contributedLayerNames.length > 0) {
2808
- contributions.layers = contributedLayerNames;
2884
+ }
2885
+ config.forbiddenImports = mergedFI;
2886
+ if (contributedFromKeys.length > 0) contributions.forbiddenImports = contributedFromKeys;
2887
+ }
2888
+ function mergeBoundaries(localConfig, bundleBoundaries, config, contributions) {
2889
+ const localBoundaries = localConfig.boundaries ?? { requireSchema: [] };
2890
+ const localSchemas = new Set(localBoundaries.requireSchema ?? []);
2891
+ const newSchemas = [];
2892
+ for (const schema of bundleBoundaries.requireSchema ?? []) {
2893
+ if (!localSchemas.has(schema)) {
2894
+ newSchemas.push(schema);
2895
+ localSchemas.add(schema);
2896
+ }
2897
+ }
2898
+ config.boundaries = { requireSchema: [...localBoundaries.requireSchema ?? [], ...newSchemas] };
2899
+ if (newSchemas.length > 0) contributions.boundaries = newSchemas;
2900
+ }
2901
+ function mergeArchitecture(localConfig, bundleArch, config, contributions, conflicts) {
2902
+ const localArch = localConfig.architecture ?? { thresholds: {}, modules: {} };
2903
+ const mergedThresholds = { ...localArch.thresholds };
2904
+ const contributedThresholdKeys = [];
2905
+ for (const [category, value] of Object.entries(bundleArch.thresholds ?? {})) {
2906
+ if (!(category in mergedThresholds)) {
2907
+ mergedThresholds[category] = value;
2908
+ contributedThresholdKeys.push(category);
2909
+ } else if (!deepEqual(mergedThresholds[category], value)) {
2910
+ conflicts.push({
2911
+ section: "architecture.thresholds",
2912
+ key: category,
2913
+ localValue: mergedThresholds[category],
2914
+ packageValue: value,
2915
+ description: `Architecture threshold '${category}' already exists locally with a different value`
2916
+ });
2809
2917
  }
2810
2918
  }
2811
- if (bundleConstraints.forbiddenImports && bundleConstraints.forbiddenImports.length > 0) {
2812
- const localFI = Array.isArray(localConfig.forbiddenImports) ? localConfig.forbiddenImports : [];
2813
- const mergedFI = [...localFI];
2814
- const contributedFromKeys = [];
2815
- for (const bundleRule of bundleConstraints.forbiddenImports) {
2816
- const existing = localFI.find((r) => r.from === bundleRule.from);
2817
- if (!existing) {
2818
- const entry = {
2819
- from: bundleRule.from,
2820
- disallow: bundleRule.disallow
2821
- };
2822
- if (bundleRule.message !== void 0) {
2823
- entry.message = bundleRule.message;
2824
- }
2825
- mergedFI.push(entry);
2826
- contributedFromKeys.push(bundleRule.from);
2827
- } else {
2828
- const same = stringArraysEqual(existing.disallow, bundleRule.disallow);
2829
- if (!same) {
2919
+ const mergedModules = { ...localArch.modules };
2920
+ const contributedModuleKeys = [];
2921
+ for (const [modulePath, bundleCategoryMap] of Object.entries(bundleArch.modules ?? {})) {
2922
+ if (!(modulePath in mergedModules)) {
2923
+ mergedModules[modulePath] = bundleCategoryMap;
2924
+ for (const cat of Object.keys(bundleCategoryMap))
2925
+ contributedModuleKeys.push(`${modulePath}:${cat}`);
2926
+ } else {
2927
+ const mergedCategoryMap = { ...mergedModules[modulePath] };
2928
+ for (const [category, value] of Object.entries(bundleCategoryMap)) {
2929
+ if (!(category in mergedCategoryMap)) {
2930
+ mergedCategoryMap[category] = value;
2931
+ contributedModuleKeys.push(`${modulePath}:${category}`);
2932
+ } else if (!deepEqual(mergedCategoryMap[category], value)) {
2830
2933
  conflicts.push({
2831
- section: "forbiddenImports",
2832
- key: bundleRule.from,
2833
- localValue: existing,
2834
- packageValue: bundleRule,
2835
- description: `Forbidden import rule for '${bundleRule.from}' already exists locally with different disallow list`
2934
+ section: "architecture.modules",
2935
+ key: `${modulePath}:${category}`,
2936
+ localValue: mergedCategoryMap[category],
2937
+ packageValue: value,
2938
+ description: `Architecture module override '${modulePath}' category '${category}' already exists locally with a different value`
2836
2939
  });
2837
2940
  }
2838
2941
  }
2942
+ mergedModules[modulePath] = mergedCategoryMap;
2943
+ }
2944
+ }
2945
+ config.architecture = { ...localArch, thresholds: mergedThresholds, modules: mergedModules };
2946
+ if (contributedThresholdKeys.length > 0)
2947
+ contributions["architecture.thresholds"] = contributedThresholdKeys;
2948
+ if (contributedModuleKeys.length > 0)
2949
+ contributions["architecture.modules"] = contributedModuleKeys;
2950
+ }
2951
+ function mergeSecurityRules(localConfig, bundleRules, config, contributions, conflicts) {
2952
+ const localSecurity = localConfig.security ?? { rules: {} };
2953
+ const localRules = localSecurity.rules ?? {};
2954
+ const mergedRules = { ...localRules };
2955
+ const contributedRuleIds = [];
2956
+ for (const [ruleId, severity] of Object.entries(bundleRules)) {
2957
+ if (!(ruleId in mergedRules)) {
2958
+ mergedRules[ruleId] = severity;
2959
+ contributedRuleIds.push(ruleId);
2960
+ } else if (mergedRules[ruleId] !== severity) {
2961
+ conflicts.push({
2962
+ section: "security.rules",
2963
+ key: ruleId,
2964
+ localValue: mergedRules[ruleId],
2965
+ packageValue: severity,
2966
+ description: `Security rule '${ruleId}' already exists locally with severity '${mergedRules[ruleId]}', bundle has '${severity}'`
2967
+ });
2839
2968
  }
2840
- config.forbiddenImports = mergedFI;
2841
- if (contributedFromKeys.length > 0) {
2842
- contributions.forbiddenImports = contributedFromKeys;
2843
- }
2969
+ }
2970
+ config.security = { ...localSecurity, rules: mergedRules };
2971
+ if (contributedRuleIds.length > 0) contributions["security.rules"] = contributedRuleIds;
2972
+ }
2973
+ function deepMergeConstraints(localConfig, bundleConstraints, _existingContributions) {
2974
+ const config = { ...localConfig };
2975
+ const contributions = {};
2976
+ const conflicts = [];
2977
+ if (bundleConstraints.layers && bundleConstraints.layers.length > 0) {
2978
+ mergeLayers(localConfig, bundleConstraints.layers, config, contributions, conflicts);
2979
+ }
2980
+ if (bundleConstraints.forbiddenImports && bundleConstraints.forbiddenImports.length > 0) {
2981
+ mergeForbiddenImports(
2982
+ localConfig,
2983
+ bundleConstraints.forbiddenImports,
2984
+ config,
2985
+ contributions,
2986
+ conflicts
2987
+ );
2844
2988
  }
2845
2989
  if (bundleConstraints.boundaries) {
2846
- const localBoundaries = localConfig.boundaries ?? { requireSchema: [] };
2847
- const localSchemas = new Set(localBoundaries.requireSchema ?? []);
2848
- const bundleSchemas = bundleConstraints.boundaries.requireSchema ?? [];
2849
- const newSchemas = [];
2850
- for (const schema of bundleSchemas) {
2851
- if (!localSchemas.has(schema)) {
2852
- newSchemas.push(schema);
2853
- localSchemas.add(schema);
2854
- }
2855
- }
2856
- config.boundaries = {
2857
- requireSchema: [...localBoundaries.requireSchema ?? [], ...newSchemas]
2858
- };
2859
- if (newSchemas.length > 0) {
2860
- contributions.boundaries = newSchemas;
2861
- }
2990
+ mergeBoundaries(
2991
+ localConfig,
2992
+ bundleConstraints.boundaries,
2993
+ config,
2994
+ contributions
2995
+ );
2862
2996
  }
2863
2997
  if (bundleConstraints.architecture) {
2864
- const localArch = localConfig.architecture ?? {
2865
- thresholds: {},
2866
- modules: {}
2867
- };
2868
- const mergedThresholds = { ...localArch.thresholds };
2869
- const contributedThresholdKeys = [];
2870
- const bundleThresholds = bundleConstraints.architecture.thresholds ?? {};
2871
- for (const [category, value] of Object.entries(bundleThresholds)) {
2872
- if (!(category in mergedThresholds)) {
2873
- mergedThresholds[category] = value;
2874
- contributedThresholdKeys.push(category);
2875
- } else if (!deepEqual(mergedThresholds[category], value)) {
2876
- conflicts.push({
2877
- section: "architecture.thresholds",
2878
- key: category,
2879
- localValue: mergedThresholds[category],
2880
- packageValue: value,
2881
- description: `Architecture threshold '${category}' already exists locally with a different value`
2882
- });
2883
- }
2884
- }
2885
- const mergedModules = { ...localArch.modules };
2886
- const contributedModuleKeys = [];
2887
- const bundleModules = bundleConstraints.architecture.modules ?? {};
2888
- for (const [modulePath, bundleCategoryMap] of Object.entries(bundleModules)) {
2889
- if (!(modulePath in mergedModules)) {
2890
- mergedModules[modulePath] = bundleCategoryMap;
2891
- for (const cat of Object.keys(bundleCategoryMap)) {
2892
- contributedModuleKeys.push(`${modulePath}:${cat}`);
2893
- }
2894
- } else {
2895
- const localCategoryMap = mergedModules[modulePath];
2896
- const mergedCategoryMap = { ...localCategoryMap };
2897
- for (const [category, value] of Object.entries(bundleCategoryMap)) {
2898
- if (!(category in mergedCategoryMap)) {
2899
- mergedCategoryMap[category] = value;
2900
- contributedModuleKeys.push(`${modulePath}:${category}`);
2901
- } else if (!deepEqual(mergedCategoryMap[category], value)) {
2902
- conflicts.push({
2903
- section: "architecture.modules",
2904
- key: `${modulePath}:${category}`,
2905
- localValue: mergedCategoryMap[category],
2906
- packageValue: value,
2907
- description: `Architecture module override '${modulePath}' category '${category}' already exists locally with a different value`
2908
- });
2909
- }
2910
- }
2911
- mergedModules[modulePath] = mergedCategoryMap;
2912
- }
2913
- }
2914
- config.architecture = {
2915
- ...localArch,
2916
- thresholds: mergedThresholds,
2917
- modules: mergedModules
2918
- };
2919
- if (contributedThresholdKeys.length > 0) {
2920
- contributions["architecture.thresholds"] = contributedThresholdKeys;
2921
- }
2922
- if (contributedModuleKeys.length > 0) {
2923
- contributions["architecture.modules"] = contributedModuleKeys;
2924
- }
2998
+ mergeArchitecture(
2999
+ localConfig,
3000
+ bundleConstraints.architecture,
3001
+ config,
3002
+ contributions,
3003
+ conflicts
3004
+ );
2925
3005
  }
2926
3006
  if (bundleConstraints.security?.rules) {
2927
- const localSecurity = localConfig.security ?? { rules: {} };
2928
- const localRules = localSecurity.rules ?? {};
2929
- const mergedRules = { ...localRules };
2930
- const contributedRuleIds = [];
2931
- for (const [ruleId, severity] of Object.entries(bundleConstraints.security.rules)) {
2932
- if (!(ruleId in mergedRules)) {
2933
- mergedRules[ruleId] = severity;
2934
- contributedRuleIds.push(ruleId);
2935
- } else if (mergedRules[ruleId] !== severity) {
2936
- conflicts.push({
2937
- section: "security.rules",
2938
- key: ruleId,
2939
- localValue: mergedRules[ruleId],
2940
- packageValue: severity,
2941
- description: `Security rule '${ruleId}' already exists locally with severity '${mergedRules[ruleId]}', bundle has '${severity}'`
2942
- });
2943
- }
2944
- }
2945
- config.security = { ...localSecurity, rules: mergedRules };
2946
- if (contributedRuleIds.length > 0) {
2947
- contributions["security.rules"] = contributedRuleIds;
2948
- }
3007
+ mergeSecurityRules(
3008
+ localConfig,
3009
+ bundleConstraints.security.rules,
3010
+ config,
3011
+ contributions,
3012
+ conflicts
3013
+ );
2949
3014
  }
2950
3015
  return { config, contributions, conflicts };
2951
3016
  }
@@ -3094,14 +3159,84 @@ function walk(node, visitor) {
3094
3159
  }
3095
3160
  }
3096
3161
  }
3162
+ function makeLocation(node) {
3163
+ return {
3164
+ file: "",
3165
+ line: node.loc?.start.line ?? 0,
3166
+ column: node.loc?.start.column ?? 0
3167
+ };
3168
+ }
3169
+ function processImportSpecifiers(importDecl, imp) {
3170
+ for (const spec of importDecl.specifiers) {
3171
+ if (spec.type === "ImportDefaultSpecifier") {
3172
+ imp.default = spec.local.name;
3173
+ } else if (spec.type === "ImportNamespaceSpecifier") {
3174
+ imp.namespace = spec.local.name;
3175
+ } else if (spec.type === "ImportSpecifier") {
3176
+ imp.specifiers.push(spec.local.name);
3177
+ if (spec.importKind === "type") {
3178
+ imp.kind = "type";
3179
+ }
3180
+ }
3181
+ }
3182
+ }
3183
+ function getExportedName(exported) {
3184
+ return exported.type === "Identifier" ? exported.name : String(exported.value);
3185
+ }
3186
+ function processReExportSpecifiers(exportDecl, exports) {
3187
+ for (const spec of exportDecl.specifiers) {
3188
+ if (spec.type !== "ExportSpecifier") continue;
3189
+ exports.push({
3190
+ name: getExportedName(spec.exported),
3191
+ type: "named",
3192
+ location: makeLocation(exportDecl),
3193
+ isReExport: true,
3194
+ source: exportDecl.source.value
3195
+ });
3196
+ }
3197
+ }
3198
+ function processExportDeclaration(exportDecl, exports) {
3199
+ const decl = exportDecl.declaration;
3200
+ if (!decl) return;
3201
+ if (decl.type === "VariableDeclaration") {
3202
+ for (const declarator of decl.declarations) {
3203
+ if (declarator.id.type === "Identifier") {
3204
+ exports.push({
3205
+ name: declarator.id.name,
3206
+ type: "named",
3207
+ location: makeLocation(decl),
3208
+ isReExport: false
3209
+ });
3210
+ }
3211
+ }
3212
+ } else if ((decl.type === "FunctionDeclaration" || decl.type === "ClassDeclaration") && decl.id) {
3213
+ exports.push({
3214
+ name: decl.id.name,
3215
+ type: "named",
3216
+ location: makeLocation(decl),
3217
+ isReExport: false
3218
+ });
3219
+ }
3220
+ }
3221
+ function processExportListSpecifiers(exportDecl, exports) {
3222
+ for (const spec of exportDecl.specifiers) {
3223
+ if (spec.type !== "ExportSpecifier") continue;
3224
+ exports.push({
3225
+ name: getExportedName(spec.exported),
3226
+ type: "named",
3227
+ location: makeLocation(exportDecl),
3228
+ isReExport: false
3229
+ });
3230
+ }
3231
+ }
3097
3232
  var TypeScriptParser = class {
3098
3233
  name = "typescript";
3099
3234
  extensions = [".ts", ".tsx", ".mts", ".cts"];
3100
- async parseFile(path20) {
3101
- const contentResult = await readFileContent(path20);
3235
+ async parseFile(path22) {
3236
+ const contentResult = await readFileContent(path22);
3102
3237
  if (!contentResult.ok) {
3103
3238
  return Err(
3104
- createParseError("NOT_FOUND", `File not found: ${path20}`, { path: path20 }, [
3239
+ createParseError("NOT_FOUND", `File not found: ${path22}`, { path: path22 }, [
3105
3240
  "Check that the file exists",
3106
3241
  "Verify the path is correct"
3107
3242
  ])
@@ -3111,7 +3246,7 @@ var TypeScriptParser = class {
3111
3246
  const ast = parse(contentResult.value, {
3112
3247
  loc: true,
3113
3248
  range: true,
3114
- jsx: path20.endsWith(".tsx"),
3249
+ jsx: path22.endsWith(".tsx"),
3115
3250
  errorOnUnknownASTType: false
3116
3251
  });
3117
3252
  return Ok({
@@ -3122,7 +3257,7 @@ var TypeScriptParser = class {
3122
3257
  } catch (e) {
3123
3258
  const error = e;
3124
3259
  return Err(
3125
- createParseError("SYNTAX_ERROR", `Failed to parse ${path20}: ${error.message}`, { path: path20 }, [
3260
+ createParseError("SYNTAX_ERROR", `Failed to parse ${path22}: ${error.message}`, { path: path22 }, [
3126
3261
  "Check for syntax errors in the file",
3127
3262
  "Ensure valid TypeScript syntax"
3128
3263
  ])
@@ -3138,26 +3273,12 @@ var TypeScriptParser = class {
3138
3273
  const imp = {
3139
3274
  source: importDecl.source.value,
3140
3275
  specifiers: [],
3141
- location: {
3142
- file: "",
3143
- line: importDecl.loc?.start.line ?? 0,
3144
- column: importDecl.loc?.start.column ?? 0
3145
- },
3276
+ location: makeLocation(importDecl),
3146
3277
  kind: importDecl.importKind === "type" ? "type" : "value"
3147
3278
  };
3148
- for (const spec of importDecl.specifiers) {
3149
- if (spec.type === "ImportDefaultSpecifier") {
3150
- imp.default = spec.local.name;
3151
- } else if (spec.type === "ImportNamespaceSpecifier") {
3152
- imp.namespace = spec.local.name;
3153
- } else if (spec.type === "ImportSpecifier") {
3154
- imp.specifiers.push(spec.local.name);
3155
- if (spec.importKind === "type") {
3156
- imp.kind = "type";
3157
- }
3158
- }
3159
- }
3279
+ processImportSpecifiers(importDecl, imp);
3160
3280
  imports.push(imp);
3281
+ return;
3161
3282
  }
3162
3283
  if (node.type === "ImportExpression") {
3163
3284
  const importExpr = node;
@@ -3165,11 +3286,7 @@ var TypeScriptParser = class {
3165
3286
  imports.push({
3166
3287
  source: importExpr.source.value,
3167
3288
  specifiers: [],
3168
- location: {
3169
- file: "",
3170
- line: importExpr.loc?.start.line ?? 0,
3171
- column: importExpr.loc?.start.column ?? 0
3172
- },
3289
+ location: makeLocation(importExpr),
3173
3290
  kind: "value"
3174
3291
  });
3175
3292
  }
@@ -3184,97 +3301,29 @@ var TypeScriptParser = class {
3184
3301
  if (node.type === "ExportNamedDeclaration") {
3185
3302
  const exportDecl = node;
3186
3303
  if (exportDecl.source) {
3187
- for (const spec of exportDecl.specifiers) {
3188
- if (spec.type === "ExportSpecifier") {
3189
- const exported = spec.exported;
3190
- const name = exported.type === "Identifier" ? exported.name : String(exported.value);
3191
- exports.push({
3192
- name,
3193
- type: "named",
3194
- location: {
3195
- file: "",
3196
- line: exportDecl.loc?.start.line ?? 0,
3197
- column: exportDecl.loc?.start.column ?? 0
3198
- },
3199
- isReExport: true,
3200
- source: exportDecl.source.value
3201
- });
3202
- }
3203
- }
3304
+ processReExportSpecifiers(exportDecl, exports);
3204
3305
  return;
3205
3306
  }
3206
- if (exportDecl.declaration) {
3207
- const decl = exportDecl.declaration;
3208
- if (decl.type === "VariableDeclaration") {
3209
- for (const declarator of decl.declarations) {
3210
- if (declarator.id.type === "Identifier") {
3211
- exports.push({
3212
- name: declarator.id.name,
3213
- type: "named",
3214
- location: {
3215
- file: "",
3216
- line: decl.loc?.start.line ?? 0,
3217
- column: decl.loc?.start.column ?? 0
3218
- },
3219
- isReExport: false
3220
- });
3221
- }
3222
- }
3223
- } else if (decl.type === "FunctionDeclaration" || decl.type === "ClassDeclaration") {
3224
- if (decl.id) {
3225
- exports.push({
3226
- name: decl.id.name,
3227
- type: "named",
3228
- location: {
3229
- file: "",
3230
- line: decl.loc?.start.line ?? 0,
3231
- column: decl.loc?.start.column ?? 0
3232
- },
3233
- isReExport: false
3234
- });
3235
- }
3236
- }
3237
- }
3238
- for (const spec of exportDecl.specifiers) {
3239
- if (spec.type === "ExportSpecifier") {
3240
- const exported = spec.exported;
3241
- const name = exported.type === "Identifier" ? exported.name : String(exported.value);
3242
- exports.push({
3243
- name,
3244
- type: "named",
3245
- location: {
3246
- file: "",
3247
- line: exportDecl.loc?.start.line ?? 0,
3248
- column: exportDecl.loc?.start.column ?? 0
3249
- },
3250
- isReExport: false
3251
- });
3252
- }
3253
- }
3307
+ processExportDeclaration(exportDecl, exports);
3308
+ processExportListSpecifiers(exportDecl, exports);
3309
+ return;
3254
3310
  }
3255
3311
  if (node.type === "ExportDefaultDeclaration") {
3256
3312
  const exportDecl = node;
3257
3313
  exports.push({
3258
3314
  name: "default",
3259
3315
  type: "default",
3260
- location: {
3261
- file: "",
3262
- line: exportDecl.loc?.start.line ?? 0,
3263
- column: exportDecl.loc?.start.column ?? 0
3264
- },
3316
+ location: makeLocation(exportDecl),
3265
3317
  isReExport: false
3266
3318
  });
3319
+ return;
3267
3320
  }
3268
3321
  if (node.type === "ExportAllDeclaration") {
3269
3322
  const exportDecl = node;
3270
3323
  exports.push({
3271
3324
  name: exportDecl.exported?.name ?? "*",
3272
3325
  type: "namespace",
3273
- location: {
3274
- file: "",
3275
- line: exportDecl.loc?.start.line ?? 0,
3276
- column: exportDecl.loc?.start.column ?? 0
3277
- },
3326
+ location: makeLocation(exportDecl),
3278
3327
  isReExport: true,
3279
3328
  source: exportDecl.source.value
3280
3329
  });
@@ -3286,10 +3335,27 @@ var TypeScriptParser = class {
3286
3335
  return Ok({ available: true, version: "7.0.0" });
3287
3336
  }
3288
3337
  };
3338
+ function collectFieldEntries(rootDir, field) {
3339
+ if (typeof field === "string") return [resolve3(rootDir, field)];
3340
+ if (typeof field === "object" && field !== null) {
3341
+ return Object.values(field).filter((v) => typeof v === "string").map((v) => resolve3(rootDir, v));
3342
+ }
3343
+ return [];
3344
+ }
3345
+ function extractPackageEntries(rootDir, pkg) {
3346
+ const entries = [];
3347
+ entries.push(...collectFieldEntries(rootDir, pkg["exports"]));
3348
+ if (entries.length === 0 && typeof pkg["main"] === "string") {
3349
+ entries.push(resolve3(rootDir, pkg["main"]));
3350
+ }
3351
+ if (pkg["bin"]) {
3352
+ entries.push(...collectFieldEntries(rootDir, pkg["bin"]));
3353
+ }
3354
+ return entries;
3355
+ }
3289
3356
  async function resolveEntryPoints(rootDir, explicitEntries) {
3290
3357
  if (explicitEntries && explicitEntries.length > 0) {
3291
- const resolved = explicitEntries.map((e) => resolve3(rootDir, e));
3292
- return Ok(resolved);
3358
+ return Ok(explicitEntries.map((e) => resolve3(rootDir, e)));
3293
3359
  }
3294
3360
  const pkgPath = join32(rootDir, "package.json");
3295
3361
  if (await fileExists(pkgPath)) {
@@ -3297,38 +3363,8 @@ async function resolveEntryPoints(rootDir, explicitEntries) {
3297
3363
  if (pkgContent.ok) {
3298
3364
  try {
3299
3365
  const pkg = JSON.parse(pkgContent.value);
3300
- const entries = [];
3301
- if (pkg["exports"]) {
3302
- const exports = pkg["exports"];
3303
- if (typeof exports === "string") {
3304
- entries.push(resolve3(rootDir, exports));
3305
- } else if (typeof exports === "object" && exports !== null) {
3306
- for (const value of Object.values(exports)) {
3307
- if (typeof value === "string") {
3308
- entries.push(resolve3(rootDir, value));
3309
- }
3310
- }
3311
- }
3312
- }
3313
- const main = pkg["main"];
3314
- if (typeof main === "string" && entries.length === 0) {
3315
- entries.push(resolve3(rootDir, main));
3316
- }
3317
- const bin = pkg["bin"];
3318
- if (bin) {
3319
- if (typeof bin === "string") {
3320
- entries.push(resolve3(rootDir, bin));
3321
- } else if (typeof bin === "object") {
3322
- for (const value of Object.values(bin)) {
3323
- if (typeof value === "string") {
3324
- entries.push(resolve3(rootDir, value));
3325
- }
3326
- }
3327
- }
3328
- }
3329
- if (entries.length > 0) {
3330
- return Ok(entries);
3331
- }
3366
+ const entries = extractPackageEntries(rootDir, pkg);
3367
+ if (entries.length > 0) return Ok(entries);
3332
3368
  } catch {
3333
3369
  }
3334
3370
  }
@@ -3402,66 +3438,49 @@ function extractInlineRefs(content) {
3402
3438
  }
3403
3439
  return refs;
3404
3440
  }
3405
- async function parseDocumentationFile(path20) {
3406
- const contentResult = await readFileContent(path20);
3441
+ async function parseDocumentationFile(path22) {
3442
+ const contentResult = await readFileContent(path22);
3407
3443
  if (!contentResult.ok) {
3408
3444
  return Err(
3409
3445
  createEntropyError(
3410
3446
  "PARSE_ERROR",
3411
- `Failed to read documentation file: ${path20}`,
3412
- { file: path20 },
3447
+ `Failed to read documentation file: ${path22}`,
3448
+ { file: path22 },
3413
3449
  ["Check that the file exists"]
3414
3450
  )
3415
3451
  );
3416
3452
  }
3417
3453
  const content = contentResult.value;
3418
- const type = path20.endsWith(".md") ? "markdown" : "text";
3454
+ const type = path22.endsWith(".md") ? "markdown" : "text";
3419
3455
  return Ok({
3420
- path: path20,
3456
+ path: path22,
3421
3457
  type,
3422
3458
  content,
3423
3459
  codeBlocks: extractCodeBlocks(content),
3424
3460
  inlineRefs: extractInlineRefs(content)
3425
3461
  });
3426
3462
  }
3463
+ function makeInternalSymbol(name, type, line) {
3464
+ return { name, type, line, references: 0, calledBy: [] };
3465
+ }
3466
+ function extractSymbolsFromNode(node) {
3467
+ const line = node.loc?.start?.line || 0;
3468
+ if (node.type === "FunctionDeclaration" && node.id?.name) {
3469
+ return [makeInternalSymbol(node.id.name, "function", line)];
3470
+ }
3471
+ if (node.type === "VariableDeclaration") {
3472
+ return (node.declarations || []).filter((decl) => decl.id?.name).map((decl) => makeInternalSymbol(decl.id.name, "variable", line));
3473
+ }
3474
+ if (node.type === "ClassDeclaration" && node.id?.name) {
3475
+ return [makeInternalSymbol(node.id.name, "class", line)];
3476
+ }
3477
+ return [];
3478
+ }
3427
3479
  function extractInternalSymbols(ast) {
3428
- const symbols = [];
3429
3480
  const body = ast.body;
3430
- if (!body?.body) return symbols;
3431
- for (const node of body.body) {
3432
- if (node.type === "FunctionDeclaration" && node.id?.name) {
3433
- symbols.push({
3434
- name: node.id.name,
3435
- type: "function",
3436
- line: node.loc?.start?.line || 0,
3437
- references: 0,
3438
- calledBy: []
3439
- });
3440
- }
3441
- if (node.type === "VariableDeclaration") {
3442
- for (const decl of node.declarations || []) {
3443
- if (decl.id?.name) {
3444
- symbols.push({
3445
- name: decl.id.name,
3446
- type: "variable",
3447
- line: node.loc?.start?.line || 0,
3448
- references: 0,
3449
- calledBy: []
3450
- });
3451
- }
3452
- }
3453
- }
3454
- if (node.type === "ClassDeclaration" && node.id?.name) {
3455
- symbols.push({
3456
- name: node.id.name,
3457
- type: "class",
3458
- line: node.loc?.start?.line || 0,
3459
- references: 0,
3460
- calledBy: []
3461
- });
3462
- }
3463
- }
3464
- return symbols;
3481
+ if (!body?.body) return [];
3482
+ const nodes = body.body;
3483
+ return nodes.flatMap(extractSymbolsFromNode);
3465
3484
  }
3466
3485
  function extractJSDocComments(ast) {
3467
3486
  const comments = [];
@@ -3548,7 +3567,7 @@ async function buildSnapshot(config) {
3548
3567
  sourceFilePaths.push(...files2);
3549
3568
  }
3550
3569
  sourceFilePaths = sourceFilePaths.filter((f) => {
3551
- const rel = relative42(rootDir, f);
3570
+ const rel = relativePosix(rootDir, f);
3552
3571
  return !excludePatterns.some((p) => minimatch22(rel, p));
3553
3572
  });
3554
3573
  const files = [];
@@ -3599,27 +3618,34 @@ async function buildSnapshot(config) {
3599
3618
  buildTime
3600
3619
  });
3601
3620
  }
3602
- function levenshteinDistance(a, b) {
3621
+ function initLevenshteinMatrix(aLen, bLen) {
3603
3622
  const matrix = [];
3604
- for (let i = 0; i <= b.length; i++) {
3623
+ for (let i = 0; i <= bLen; i++) {
3605
3624
  matrix[i] = [i];
3606
3625
  }
3607
- for (let j = 0; j <= a.length; j++) {
3608
- const row = matrix[0];
3609
- if (row) {
3610
- row[j] = j;
3626
+ const firstRow = matrix[0];
3627
+ if (firstRow) {
3628
+ for (let j = 0; j <= aLen; j++) {
3629
+ firstRow[j] = j;
3611
3630
  }
3612
3631
  }
3632
+ return matrix;
3633
+ }
3634
+ function computeLevenshteinCell(row, prevRow, j, charsMatch) {
3635
+ if (charsMatch) {
3636
+ row[j] = prevRow[j - 1] ?? 0;
3637
+ } else {
3638
+ row[j] = Math.min((prevRow[j - 1] ?? 0) + 1, (row[j - 1] ?? 0) + 1, (prevRow[j] ?? 0) + 1);
3639
+ }
3640
+ }
3641
+ function levenshteinDistance(a, b) {
3642
+ const matrix = initLevenshteinMatrix(a.length, b.length);
3613
3643
  for (let i = 1; i <= b.length; i++) {
3614
3644
  for (let j = 1; j <= a.length; j++) {
3615
3645
  const row = matrix[i];
3616
3646
  const prevRow = matrix[i - 1];
3617
3647
  if (!row || !prevRow) continue;
3618
- if (b.charAt(i - 1) === a.charAt(j - 1)) {
3619
- row[j] = prevRow[j - 1] ?? 0;
3620
- } else {
3621
- row[j] = Math.min((prevRow[j - 1] ?? 0) + 1, (row[j - 1] ?? 0) + 1, (prevRow[j] ?? 0) + 1);
3622
- }
3648
+ computeLevenshteinCell(row, prevRow, j, b.charAt(i - 1) === a.charAt(j - 1));
3623
3649
  }
3624
3650
  }
3625
3651
  const lastRow = matrix[b.length];
@@ -3902,32 +3928,27 @@ function findDeadExports(snapshot, usageMap, reachability) {
3902
3928
  }
3903
3929
  return deadExports;
3904
3930
  }
3905
- function countLinesFromAST(ast) {
3906
- if (ast.body && Array.isArray(ast.body)) {
3907
- let maxLine = 0;
3908
- const traverse = (node) => {
3909
- if (node && typeof node === "object") {
3910
- const n = node;
3911
- if (n.loc?.end?.line && n.loc.end.line > maxLine) {
3912
- maxLine = n.loc.end.line;
3913
- }
3914
- for (const key of Object.keys(node)) {
3915
- const value = node[key];
3916
- if (Array.isArray(value)) {
3917
- for (const item of value) {
3918
- traverse(item);
3919
- }
3920
- } else if (value && typeof value === "object") {
3921
- traverse(value);
3922
- }
3923
- }
3931
+ function findMaxLineInNode(node) {
3932
+ if (!node || typeof node !== "object") return 0;
3933
+ const n = node;
3934
+ let maxLine = n.loc?.end?.line ?? 0;
3935
+ for (const key of Object.keys(node)) {
3936
+ const value = node[key];
3937
+ if (Array.isArray(value)) {
3938
+ for (const item of value) {
3939
+ maxLine = Math.max(maxLine, findMaxLineInNode(item));
3924
3940
  }
3925
- };
3926
- traverse(ast);
3927
- if (maxLine > 0) return maxLine;
3928
- return Math.max(ast.body.length * 3, 1);
3941
+ } else if (value && typeof value === "object") {
3942
+ maxLine = Math.max(maxLine, findMaxLineInNode(value));
3943
+ }
3929
3944
  }
3930
- return 1;
3945
+ return maxLine;
3946
+ }
3947
+ function countLinesFromAST(ast) {
3948
+ if (!ast.body || !Array.isArray(ast.body)) return 1;
3949
+ const maxLine = findMaxLineInNode(ast);
3950
+ if (maxLine > 0) return maxLine;
3951
+ return Math.max(ast.body.length * 3, 1);
3931
3952
  }
3932
3953
  function findDeadFiles(snapshot, reachability) {
3933
3954
  const deadFiles = [];
@@ -4072,133 +4093,149 @@ async function detectDeadCode(snapshot, graphDeadCodeData) {
4072
4093
  return Ok(report);
4073
4094
  }
4074
4095
  function fileMatchesPattern(filePath, pattern, rootDir) {
4075
- const relativePath = relative52(rootDir, filePath);
4096
+ const relativePath = relativePosix(rootDir, filePath);
4076
4097
  return minimatch3(relativePath, pattern);
4077
4098
  }
4078
- function checkConfigPattern(pattern, file, rootDir) {
4099
+ var CONVENTION_DESCRIPTIONS = {
4100
+ camelCase: "camelCase (e.g., myFunction)",
4101
+ PascalCase: "PascalCase (e.g., MyClass)",
4102
+ UPPER_SNAKE: "UPPER_SNAKE_CASE (e.g., MY_CONSTANT)",
4103
+ "kebab-case": "kebab-case (e.g., my-component)"
4104
+ };
4105
+ function checkMustExport(rule, file, message) {
4106
+ if (rule.type !== "must-export") return [];
4079
4107
  const matches = [];
4080
- const fileMatches = pattern.files.some((glob2) => fileMatchesPattern(file.path, glob2, rootDir));
4081
- if (!fileMatches) {
4082
- return matches;
4083
- }
4084
- const rule = pattern.rule;
4085
- switch (rule.type) {
4086
- case "must-export": {
4087
- for (const name of rule.names) {
4088
- const hasExport = file.exports.some((e) => e.name === name);
4089
- if (!hasExport) {
4090
- matches.push({
4091
- line: 1,
4092
- message: pattern.message || `Missing required export: "${name}"`,
4093
- suggestion: `Add export for "${name}"`
4094
- });
4095
- }
4096
- }
4097
- break;
4098
- }
4099
- case "must-export-default": {
4100
- const hasDefault = file.exports.some((e) => e.type === "default");
4101
- if (!hasDefault) {
4102
- matches.push({
4103
- line: 1,
4104
- message: pattern.message || "File must have a default export",
4105
- suggestion: "Add a default export"
4106
- });
4107
- }
4108
- break;
4109
- }
4110
- case "no-export": {
4111
- for (const name of rule.names) {
4112
- const exp = file.exports.find((e) => e.name === name);
4113
- if (exp) {
4114
- matches.push({
4115
- line: exp.location.line,
4116
- message: pattern.message || `Forbidden export: "${name}"`,
4117
- suggestion: `Remove export "${name}"`
4118
- });
4119
- }
4120
- }
4121
- break;
4108
+ for (const name of rule.names) {
4109
+ if (!file.exports.some((e) => e.name === name)) {
4110
+ matches.push({
4111
+ line: 1,
4112
+ message: message || `Missing required export: "${name}"`,
4113
+ suggestion: `Add export for "${name}"`
4114
+ });
4122
4115
  }
4123
- case "must-import": {
4124
- const hasImport = file.imports.some(
4125
- (i) => i.source === rule.from || i.source.endsWith(rule.from)
4126
- );
4127
- if (!hasImport) {
4128
- matches.push({
4129
- line: 1,
4130
- message: pattern.message || `Missing required import from "${rule.from}"`,
4131
- suggestion: `Add import from "${rule.from}"`
4132
- });
4116
+ }
4117
+ return matches;
4118
+ }
4119
+ function checkMustExportDefault(_rule, file, message) {
4120
+ if (!file.exports.some((e) => e.type === "default")) {
4121
+ return [
4122
+ {
4123
+ line: 1,
4124
+ message: message || "File must have a default export",
4125
+ suggestion: "Add a default export"
4133
4126
  }
4134
- break;
4127
+ ];
4128
+ }
4129
+ return [];
4130
+ }
4131
+ function checkNoExport(rule, file, message) {
4132
+ if (rule.type !== "no-export") return [];
4133
+ const matches = [];
4134
+ for (const name of rule.names) {
4135
+ const exp = file.exports.find((e) => e.name === name);
4136
+ if (exp) {
4137
+ matches.push({
4138
+ line: exp.location.line,
4139
+ message: message || `Forbidden export: "${name}"`,
4140
+ suggestion: `Remove export "${name}"`
4141
+ });
4135
4142
  }
4136
- case "no-import": {
4137
- const forbiddenImport = file.imports.find(
4138
- (i) => i.source === rule.from || i.source.endsWith(rule.from)
4139
- );
4140
- if (forbiddenImport) {
4141
- matches.push({
4142
- line: forbiddenImport.location.line,
4143
- message: pattern.message || `Forbidden import from "${rule.from}"`,
4144
- suggestion: `Remove import from "${rule.from}"`
4145
- });
4143
+ }
4144
+ return matches;
4145
+ }
4146
+ function checkMustImport(rule, file, message) {
4147
+ if (rule.type !== "must-import") return [];
4148
+ const hasImport = file.imports.some(
4149
+ (i) => i.source === rule.from || i.source.endsWith(rule.from)
4150
+ );
4151
+ if (!hasImport) {
4152
+ return [
4153
+ {
4154
+ line: 1,
4155
+ message: message || `Missing required import from "${rule.from}"`,
4156
+ suggestion: `Add import from "${rule.from}"`
4146
4157
  }
4147
- break;
4148
- }
4149
- case "naming": {
4150
- const regex = new RegExp(rule.match);
4151
- for (const exp of file.exports) {
4152
- if (!regex.test(exp.name)) {
4153
- let expected = "";
4154
- switch (rule.convention) {
4155
- case "camelCase":
4156
- expected = "camelCase (e.g., myFunction)";
4157
- break;
4158
- case "PascalCase":
4159
- expected = "PascalCase (e.g., MyClass)";
4160
- break;
4161
- case "UPPER_SNAKE":
4162
- expected = "UPPER_SNAKE_CASE (e.g., MY_CONSTANT)";
4163
- break;
4164
- case "kebab-case":
4165
- expected = "kebab-case (e.g., my-component)";
4166
- break;
4167
- }
4168
- matches.push({
4169
- line: exp.location.line,
4170
- message: pattern.message || `"${exp.name}" does not follow ${rule.convention} convention`,
4171
- suggestion: `Rename to follow ${expected}`
4172
- });
4173
- }
4158
+ ];
4159
+ }
4160
+ return [];
4161
+ }
4162
+ function checkNoImport(rule, file, message) {
4163
+ if (rule.type !== "no-import") return [];
4164
+ const forbiddenImport = file.imports.find(
4165
+ (i) => i.source === rule.from || i.source.endsWith(rule.from)
4166
+ );
4167
+ if (forbiddenImport) {
4168
+ return [
4169
+ {
4170
+ line: forbiddenImport.location.line,
4171
+ message: message || `Forbidden import from "${rule.from}"`,
4172
+ suggestion: `Remove import from "${rule.from}"`
4174
4173
  }
4175
- break;
4174
+ ];
4175
+ }
4176
+ return [];
4177
+ }
4178
+ function checkNaming(rule, file, message) {
4179
+ if (rule.type !== "naming") return [];
4180
+ const regex = new RegExp(rule.match);
4181
+ const matches = [];
4182
+ for (const exp of file.exports) {
4183
+ if (!regex.test(exp.name)) {
4184
+ const expected = CONVENTION_DESCRIPTIONS[rule.convention] ?? rule.convention;
4185
+ matches.push({
4186
+ line: exp.location.line,
4187
+ message: message || `"${exp.name}" does not follow ${rule.convention} convention`,
4188
+ suggestion: `Rename to follow ${expected}`
4189
+ });
4176
4190
  }
4177
- case "max-exports": {
4178
- if (file.exports.length > rule.count) {
4179
- matches.push({
4180
- line: 1,
4181
- message: pattern.message || `File has ${file.exports.length} exports, max is ${rule.count}`,
4182
- suggestion: `Split into multiple files or reduce exports to ${rule.count}`
4183
- });
4191
+ }
4192
+ return matches;
4193
+ }
4194
+ function checkMaxExports(rule, file, message) {
4195
+ if (rule.type !== "max-exports") return [];
4196
+ if (file.exports.length > rule.count) {
4197
+ return [
4198
+ {
4199
+ line: 1,
4200
+ message: message || `File has ${file.exports.length} exports, max is ${rule.count}`,
4201
+ suggestion: `Split into multiple files or reduce exports to ${rule.count}`
4184
4202
  }
4185
- break;
4186
- }
4187
- case "max-lines": {
4188
- break;
4189
- }
4190
- case "require-jsdoc": {
4191
- if (file.jsDocComments.length === 0 && file.exports.length > 0) {
4192
- matches.push({
4193
- line: 1,
4194
- message: pattern.message || "Exported symbols require JSDoc documentation",
4195
- suggestion: "Add JSDoc comments to exports"
4196
- });
4203
+ ];
4204
+ }
4205
+ return [];
4206
+ }
4207
+ function checkMaxLines(_rule, _file, _message) {
4208
+ return [];
4209
+ }
4210
+ function checkRequireJsdoc(_rule, file, message) {
4211
+ if (file.jsDocComments.length === 0 && file.exports.length > 0) {
4212
+ return [
4213
+ {
4214
+ line: 1,
4215
+ message: message || "Exported symbols require JSDoc documentation",
4216
+ suggestion: "Add JSDoc comments to exports"
4197
4217
  }
4198
- break;
4199
- }
4218
+ ];
4200
4219
  }
4201
- return matches;
4220
+ return [];
4221
+ }
4222
+ var RULE_CHECKERS = {
4223
+ "must-export": checkMustExport,
4224
+ "must-export-default": checkMustExportDefault,
4225
+ "no-export": checkNoExport,
4226
+ "must-import": checkMustImport,
4227
+ "no-import": checkNoImport,
4228
+ naming: checkNaming,
4229
+ "max-exports": checkMaxExports,
4230
+ "max-lines": checkMaxLines,
4231
+ "require-jsdoc": checkRequireJsdoc
4232
+ };
4233
+ function checkConfigPattern(pattern, file, rootDir) {
4234
+ const fileMatches = pattern.files.some((glob2) => fileMatchesPattern(file.path, glob2, rootDir));
4235
+ if (!fileMatches) return [];
4236
+ const checker = RULE_CHECKERS[pattern.rule.type];
4237
+ if (!checker) return [];
4238
+ return checker(pattern.rule, file, pattern.message);
4202
4239
  }
4203
4240
  async function detectPatternViolations(snapshot, config) {
4204
4241
  const violations = [];
@@ -4704,17 +4741,35 @@ function createUnusedImportFixes(deadCodeReport) {
4704
4741
  reversible: true
4705
4742
  }));
4706
4743
  }
4744
+ var EXPORT_TYPE_KEYWORD = {
4745
+ class: "class",
4746
+ function: "function",
4747
+ variable: "const",
4748
+ type: "type",
4749
+ interface: "interface",
4750
+ enum: "enum"
4751
+ };
4752
+ function getExportKeyword(exportType) {
4753
+ return EXPORT_TYPE_KEYWORD[exportType] ?? "enum";
4754
+ }
4755
+ function getDefaultExportKeyword(exportType) {
4756
+ if (exportType === "class" || exportType === "function") return exportType;
4757
+ return "";
4758
+ }
4707
4759
  function createDeadExportFixes(deadCodeReport) {
4708
- return deadCodeReport.deadExports.filter((exp) => exp.reason === "NO_IMPORTERS").map((exp) => ({
4709
- type: "dead-exports",
4710
- file: exp.file,
4711
- description: `Remove export keyword from ${exp.name} (${exp.reason})`,
4712
- action: "replace",
4713
- oldContent: exp.isDefault ? `export default ${exp.type === "class" ? "class" : exp.type === "function" ? "function" : ""} ${exp.name}` : `export ${exp.type === "class" ? "class" : exp.type === "function" ? "function" : exp.type === "variable" ? "const" : exp.type === "type" ? "type" : exp.type === "interface" ? "interface" : "enum"} ${exp.name}`,
4714
- newContent: exp.isDefault ? `${exp.type === "class" ? "class" : exp.type === "function" ? "function" : ""} ${exp.name}` : `${exp.type === "class" ? "class" : exp.type === "function" ? "function" : exp.type === "variable" ? "const" : exp.type === "type" ? "type" : exp.type === "interface" ? "interface" : "enum"} ${exp.name}`,
4715
- safe: true,
4716
- reversible: true
4717
- }));
4760
+ return deadCodeReport.deadExports.filter((exp) => exp.reason === "NO_IMPORTERS").map((exp) => {
4761
+ const keyword = exp.isDefault ? getDefaultExportKeyword(exp.type) : getExportKeyword(exp.type);
4762
+ return {
4763
+ type: "dead-exports",
4764
+ file: exp.file,
4765
+ description: `Remove export keyword from ${exp.name} (${exp.reason})`,
4766
+ action: "replace",
4767
+ oldContent: exp.isDefault ? `export default ${keyword} ${exp.name}` : `export ${keyword} ${exp.name}`,
4768
+ newContent: `${keyword} ${exp.name}`,
4769
+ safe: true,
4770
+ reversible: true
4771
+ };
4772
+ });
4718
4773
  }
4719
4774
  function createCommentedCodeFixes(blocks) {
4720
4775
  return blocks.map((block) => ({
@@ -4889,53 +4944,80 @@ var ALWAYS_UNSAFE_TYPES = /* @__PURE__ */ new Set([
4889
4944
  "dead-internal"
4890
4945
  ]);
4891
4946
  var idCounter = 0;
4947
+ var DEAD_CODE_FIX_ACTIONS = {
4948
+ "dead-export": "Remove export keyword",
4949
+ "dead-file": "Delete file",
4950
+ "commented-code": "Delete commented block",
4951
+ "unused-import": "Remove import"
4952
+ };
4953
+ function classifyDeadCode(input) {
4954
+ if (input.isPublicApi) {
4955
+ return {
4956
+ safety: "unsafe",
4957
+ safetyReason: "Public API export may have external consumers",
4958
+ suggestion: "Deprecate before removing"
4959
+ };
4960
+ }
4961
+ const fixAction = DEAD_CODE_FIX_ACTIONS[input.type];
4962
+ if (fixAction) {
4963
+ return {
4964
+ safety: "safe",
4965
+ safetyReason: "zero importers, non-public",
4966
+ fixAction,
4967
+ suggestion: fixAction
4968
+ };
4969
+ }
4970
+ if (input.type === "orphaned-dep") {
4971
+ return {
4972
+ safety: "probably-safe",
4973
+ safetyReason: "No imports found, but needs install+test verification",
4974
+ fixAction: "Remove from package.json",
4975
+ suggestion: "Remove from package.json"
4976
+ };
4977
+ }
4978
+ return {
4979
+ safety: "unsafe",
4980
+ safetyReason: "Unknown dead code type",
4981
+ suggestion: "Manual review required"
4982
+ };
4983
+ }
4984
+ function classifyArchitecture(input) {
4985
+ if (input.type === "import-ordering") {
4986
+ return {
4987
+ safety: "safe",
4988
+ safetyReason: "Mechanical reorder, no semantic change",
4989
+ fixAction: "Reorder imports",
4990
+ suggestion: "Reorder imports"
4991
+ };
4992
+ }
4993
+ if (input.type === "forbidden-import" && input.hasAlternative) {
4994
+ return {
4995
+ safety: "probably-safe",
4996
+ safetyReason: "Alternative configured, needs typecheck+test",
4997
+ fixAction: "Replace with configured alternative",
4998
+ suggestion: "Replace with configured alternative"
4999
+ };
5000
+ }
5001
+ return {
5002
+ safety: "unsafe",
5003
+ safetyReason: `${input.type} requires structural changes`,
5004
+ suggestion: "Restructure code to fix violation"
5005
+ };
5006
+ }
4892
5007
  function classifyFinding(input) {
4893
5008
  idCounter++;
4894
5009
  const id = `${input.concern === "dead-code" ? "dc" : "arch"}-${idCounter}`;
4895
- let safety;
4896
- let safetyReason;
4897
- let fixAction;
4898
- let suggestion;
5010
+ let classification;
4899
5011
  if (ALWAYS_UNSAFE_TYPES.has(input.type)) {
4900
- safety = "unsafe";
4901
- safetyReason = `${input.type} requires human judgment`;
4902
- suggestion = "Review and refactor manually";
5012
+ classification = {
5013
+ safety: "unsafe",
5014
+ safetyReason: `${input.type} requires human judgment`,
5015
+ suggestion: "Review and refactor manually"
5016
+ };
4903
5017
  } else if (input.concern === "dead-code") {
4904
- if (input.isPublicApi) {
4905
- safety = "unsafe";
4906
- safetyReason = "Public API export may have external consumers";
4907
- suggestion = "Deprecate before removing";
4908
- } else if (input.type === "dead-export" || input.type === "unused-import" || input.type === "commented-code" || input.type === "dead-file") {
4909
- safety = "safe";
4910
- safetyReason = "zero importers, non-public";
4911
- fixAction = input.type === "dead-export" ? "Remove export keyword" : input.type === "dead-file" ? "Delete file" : input.type === "commented-code" ? "Delete commented block" : "Remove import";
4912
- suggestion = fixAction;
4913
- } else if (input.type === "orphaned-dep") {
4914
- safety = "probably-safe";
4915
- safetyReason = "No imports found, but needs install+test verification";
4916
- fixAction = "Remove from package.json";
4917
- suggestion = fixAction;
4918
- } else {
4919
- safety = "unsafe";
4920
- safetyReason = "Unknown dead code type";
4921
- suggestion = "Manual review required";
4922
- }
5018
+ classification = classifyDeadCode(input);
4923
5019
  } else {
4924
- if (input.type === "import-ordering") {
4925
- safety = "safe";
4926
- safetyReason = "Mechanical reorder, no semantic change";
4927
- fixAction = "Reorder imports";
4928
- suggestion = fixAction;
4929
- } else if (input.type === "forbidden-import" && input.hasAlternative) {
4930
- safety = "probably-safe";
4931
- safetyReason = "Alternative configured, needs typecheck+test";
4932
- fixAction = "Replace with configured alternative";
4933
- suggestion = fixAction;
4934
- } else {
4935
- safety = "unsafe";
4936
- safetyReason = `${input.type} requires structural changes`;
4937
- suggestion = "Restructure code to fix violation";
4938
- }
5020
+ classification = classifyArchitecture(input);
4939
5021
  }
4940
5022
  return {
4941
5023
  id,
@@ -4944,11 +5026,11 @@ function classifyFinding(input) {
4944
5026
  ...input.line !== void 0 ? { line: input.line } : {},
4945
5027
  type: input.type,
4946
5028
  description: input.description,
4947
- safety,
4948
- safetyReason,
5029
+ safety: classification.safety,
5030
+ safetyReason: classification.safetyReason,
4949
5031
  hotspotDowngraded: false,
4950
- ...fixAction !== void 0 ? { fixAction } : {},
4951
- suggestion
5032
+ ...classification.fixAction !== void 0 ? { fixAction: classification.fixAction } : {},
5033
+ suggestion: classification.suggestion
4952
5034
  };
4953
5035
  }
4954
5036
  function applyHotspotDowngrade(finding, hotspot) {
@@ -5232,43 +5314,57 @@ var BenchmarkRunner = class {
5232
5314
  };
5233
5315
  }
5234
5316
  }
5317
+ /**
5318
+ * Extract a BenchmarkResult from a single assertion with benchmark data.
5319
+ */
5320
+ parseBenchAssertion(assertion, file) {
5321
+ if (!assertion.benchmark) return null;
5322
+ const bench = assertion.benchmark;
5323
+ return {
5324
+ name: assertion.fullName || assertion.title || "unknown",
5325
+ file: file.replace(process.cwd() + "/", ""),
5326
+ opsPerSec: Math.round(bench.hz || 0),
5327
+ meanMs: bench.mean ? bench.mean * 1e3 : 0,
5328
+ p99Ms: bench.p99 ? bench.p99 * 1e3 : bench.mean ? bench.mean * 1e3 * 1.5 : 0,
5329
+ marginOfError: bench.rme ? bench.rme / 100 : 0.05
5330
+ };
5331
+ }
5332
+ /**
5333
+ * Extract JSON from output that may contain non-JSON preamble.
5334
+ */
5335
+ extractJson(output) {
5336
+ const jsonStart = output.indexOf("{");
5337
+ const jsonEnd = output.lastIndexOf("}");
5338
+ if (jsonStart === -1 || jsonEnd === -1) return null;
5339
+ return JSON.parse(output.slice(jsonStart, jsonEnd + 1));
5340
+ }
5235
5341
  /**
5236
5342
  * Parse vitest bench JSON reporter output into BenchmarkResult[].
5237
5343
  * Vitest bench JSON output contains testResults with benchmark data.
5238
5344
  */
5239
- parseVitestBenchOutput(output) {
5345
+ collectAssertionResults(testResults) {
5240
5346
  const results = [];
5241
- try {
5242
- const jsonStart = output.indexOf("{");
5243
- const jsonEnd = output.lastIndexOf("}");
5244
- if (jsonStart === -1 || jsonEnd === -1) return results;
5245
- const jsonStr = output.slice(jsonStart, jsonEnd + 1);
5246
- const parsed = JSON.parse(jsonStr);
5247
- if (parsed.testResults) {
5248
- for (const testResult of parsed.testResults) {
5249
- const file = testResult.name || testResult.filepath || "";
5250
- if (testResult.assertionResults) {
5251
- for (const assertion of testResult.assertionResults) {
5252
- if (assertion.benchmark) {
5253
- const bench = assertion.benchmark;
5254
- results.push({
5255
- name: assertion.fullName || assertion.title || "unknown",
5256
- file: file.replace(process.cwd() + "/", ""),
5257
- opsPerSec: Math.round(bench.hz || 0),
5258
- meanMs: bench.mean ? bench.mean * 1e3 : 0,
5259
- // p99: use actual p99 if available, otherwise estimate as 1.5× mean
5260
- p99Ms: bench.p99 ? bench.p99 * 1e3 : bench.mean ? bench.mean * 1e3 * 1.5 : 0,
5261
- marginOfError: bench.rme ? bench.rme / 100 : 0.05
5262
- });
5263
- }
5264
- }
5265
- }
5266
- }
5347
+ for (const testResult of testResults) {
5348
+ const file = testResult.name || testResult.filepath || "";
5349
+ const assertions = testResult.assertionResults ?? [];
5350
+ for (const assertion of assertions) {
5351
+ const result = this.parseBenchAssertion(assertion, file);
5352
+ if (result) results.push(result);
5267
5353
  }
5268
- } catch {
5269
5354
  }
5270
5355
  return results;
5271
5356
  }
5357
+ parseVitestBenchOutput(output) {
5358
+ try {
5359
+ const parsed = this.extractJson(output);
5360
+ if (!parsed) return [];
5361
+ const testResults = parsed.testResults;
5362
+ if (!testResults) return [];
5363
+ return this.collectAssertionResults(testResults);
5364
+ } catch {
5365
+ return [];
5366
+ }
5367
+ }
5272
5368
  };
5273
5369
  var RegressionDetector = class {
5274
5370
  detect(results, baselines, criticalPaths) {
@@ -5560,39 +5656,31 @@ function getFeedbackConfig() {
5560
5656
  function resetFeedbackConfig() {
5561
5657
  feedbackConfig = null;
5562
5658
  }
5659
+ function detectFileStatus(part) {
5660
+ if (/new file mode/.test(part)) return "added";
5661
+ if (/deleted file mode/.test(part)) return "deleted";
5662
+ if (part.includes("rename from")) return "renamed";
5663
+ return "modified";
5664
+ }
5665
+ function parseDiffPart(part) {
5666
+ if (!part.trim()) return null;
5667
+ const headerMatch = /diff --git a\/(.+?) b\/(.+?)(?:\n|$)/.exec(part);
5668
+ if (!headerMatch || !headerMatch[2]) return null;
5669
+ const additionRegex = /^\+(?!\+\+)/gm;
5670
+ const deletionRegex = /^-(?!--)/gm;
5671
+ return {
5672
+ path: headerMatch[2],
5673
+ status: detectFileStatus(part),
5674
+ additions: (part.match(additionRegex) || []).length,
5675
+ deletions: (part.match(deletionRegex) || []).length
5676
+ };
5677
+ }
5563
5678
  function parseDiff(diff2) {
5564
5679
  try {
5565
5680
  if (!diff2.trim()) {
5566
5681
  return Ok({ diff: diff2, files: [] });
5567
5682
  }
5568
- const files = [];
5569
- const newFileRegex = /new file mode/;
5570
- const deletedFileRegex = /deleted file mode/;
5571
- const additionRegex = /^\+(?!\+\+)/gm;
5572
- const deletionRegex = /^-(?!--)/gm;
5573
- const diffParts = diff2.split(/(?=diff --git)/);
5574
- for (const part of diffParts) {
5575
- if (!part.trim()) continue;
5576
- const headerMatch = /diff --git a\/(.+?) b\/(.+?)(?:\n|$)/.exec(part);
5577
- if (!headerMatch || !headerMatch[2]) continue;
5578
- const filePath = headerMatch[2];
5579
- let status = "modified";
5580
- if (newFileRegex.test(part)) {
5581
- status = "added";
5582
- } else if (deletedFileRegex.test(part)) {
5583
- status = "deleted";
5584
- } else if (part.includes("rename from")) {
5585
- status = "renamed";
5586
- }
5587
- const additions = (part.match(additionRegex) || []).length;
5588
- const deletions = (part.match(deletionRegex) || []).length;
5589
- files.push({
5590
- path: filePath,
5591
- status,
5592
- additions,
5593
- deletions
5594
- });
5595
- }
5683
+ const files = diff2.split(/(?=diff --git)/).map(parseDiffPart).filter((f) => f !== null);
5596
5684
  return Ok({ diff: diff2, files });
5597
5685
  } catch (error) {
5598
5686
  return Err({
@@ -5756,107 +5844,123 @@ var ChecklistBuilder = class {
5756
5844
  this.graphImpactData = graphImpactData;
5757
5845
  return this;
5758
5846
  }
5759
- async run(changes) {
5760
- const startTime = Date.now();
5847
+ /**
5848
+ * Build a single harness check item with or without graph data.
5849
+ */
5850
+ buildHarnessCheckItem(id, check, fallbackDetails, graphItemBuilder) {
5851
+ if (this.graphHarnessData && graphItemBuilder) {
5852
+ return graphItemBuilder();
5853
+ }
5854
+ return {
5855
+ id,
5856
+ category: "harness",
5857
+ check,
5858
+ passed: true,
5859
+ severity: "info",
5860
+ details: fallbackDetails
5861
+ };
5862
+ }
5863
+ /**
5864
+ * Build all harness check items based on harnessOptions and graph data.
5865
+ */
5866
+ buildHarnessItems() {
5867
+ if (!this.harnessOptions) return [];
5761
5868
  const items = [];
5762
- if (this.harnessOptions) {
5763
- if (this.harnessOptions.context !== false) {
5764
- if (this.graphHarnessData) {
5765
- items.push({
5766
- id: "harness-context",
5767
- category: "harness",
5768
- check: "Context validation",
5769
- passed: this.graphHarnessData.graphExists && this.graphHarnessData.nodeCount > 0,
5770
- severity: "info",
5771
- details: this.graphHarnessData.graphExists ? `Graph loaded: ${this.graphHarnessData.nodeCount} nodes, ${this.graphHarnessData.edgeCount} edges` : "No graph available \u2014 run harness scan to build the knowledge graph"
5772
- });
5773
- } else {
5774
- items.push({
5869
+ const graphData = this.graphHarnessData;
5870
+ if (this.harnessOptions.context !== false) {
5871
+ items.push(
5872
+ this.buildHarnessCheckItem(
5873
+ "harness-context",
5874
+ "Context validation",
5875
+ "Harness context validation not yet integrated (run with graph for real checks)",
5876
+ graphData ? () => ({
5775
5877
  id: "harness-context",
5776
5878
  category: "harness",
5777
5879
  check: "Context validation",
5778
- passed: true,
5779
- severity: "info",
5780
- details: "Harness context validation not yet integrated (run with graph for real checks)"
5781
- });
5782
- }
5783
- }
5784
- if (this.harnessOptions.constraints !== false) {
5785
- if (this.graphHarnessData) {
5786
- const violations = this.graphHarnessData.constraintViolations;
5787
- items.push({
5788
- id: "harness-constraints",
5789
- category: "harness",
5790
- check: "Constraint validation",
5791
- passed: violations === 0,
5792
- severity: violations > 0 ? "error" : "info",
5793
- details: violations === 0 ? "No constraint violations detected" : `${violations} constraint violation(s) detected`
5794
- });
5795
- } else {
5796
- items.push({
5797
- id: "harness-constraints",
5798
- category: "harness",
5799
- check: "Constraint validation",
5800
- passed: true,
5880
+ passed: graphData.graphExists && graphData.nodeCount > 0,
5801
5881
  severity: "info",
5802
- details: "Harness constraint validation not yet integrated (run with graph for real checks)"
5803
- });
5804
- }
5805
- }
5806
- if (this.harnessOptions.entropy !== false) {
5807
- if (this.graphHarnessData) {
5808
- const issues = this.graphHarnessData.unreachableNodes + this.graphHarnessData.undocumentedFiles;
5809
- items.push({
5810
- id: "harness-entropy",
5811
- category: "harness",
5812
- check: "Entropy detection",
5813
- passed: issues === 0,
5814
- severity: issues > 0 ? "warning" : "info",
5815
- details: issues === 0 ? "No entropy issues detected" : `${this.graphHarnessData.unreachableNodes} unreachable node(s), ${this.graphHarnessData.undocumentedFiles} undocumented file(s)`
5816
- });
5817
- } else {
5818
- items.push({
5819
- id: "harness-entropy",
5820
- category: "harness",
5821
- check: "Entropy detection",
5822
- passed: true,
5823
- severity: "info",
5824
- details: "Harness entropy detection not yet integrated (run with graph for real checks)"
5825
- });
5826
- }
5827
- }
5882
+ details: graphData.graphExists ? `Graph loaded: ${graphData.nodeCount} nodes, ${graphData.edgeCount} edges` : "No graph available \u2014 run harness scan to build the knowledge graph"
5883
+ }) : void 0
5884
+ )
5885
+ );
5886
+ }
5887
+ if (this.harnessOptions.constraints !== false) {
5888
+ items.push(
5889
+ this.buildHarnessCheckItem(
5890
+ "harness-constraints",
5891
+ "Constraint validation",
5892
+ "Harness constraint validation not yet integrated (run with graph for real checks)",
5893
+ graphData ? () => {
5894
+ const violations = graphData.constraintViolations;
5895
+ return {
5896
+ id: "harness-constraints",
5897
+ category: "harness",
5898
+ check: "Constraint validation",
5899
+ passed: violations === 0,
5900
+ severity: violations > 0 ? "error" : "info",
5901
+ details: violations === 0 ? "No constraint violations detected" : `${violations} constraint violation(s) detected`
5902
+ };
5903
+ } : void 0
5904
+ )
5905
+ );
5906
+ }
5907
+ if (this.harnessOptions.entropy !== false) {
5908
+ items.push(
5909
+ this.buildHarnessCheckItem(
5910
+ "harness-entropy",
5911
+ "Entropy detection",
5912
+ "Harness entropy detection not yet integrated (run with graph for real checks)",
5913
+ graphData ? () => {
5914
+ const issues = graphData.unreachableNodes + graphData.undocumentedFiles;
5915
+ return {
5916
+ id: "harness-entropy",
5917
+ category: "harness",
5918
+ check: "Entropy detection",
5919
+ passed: issues === 0,
5920
+ severity: issues > 0 ? "warning" : "info",
5921
+ details: issues === 0 ? "No entropy issues detected" : `${graphData.unreachableNodes} unreachable node(s), ${graphData.undocumentedFiles} undocumented file(s)`
5922
+ };
5923
+ } : void 0
5924
+ )
5925
+ );
5926
+ }
5927
+ return items;
5928
+ }
5929
+ /**
5930
+ * Execute a single custom rule and return a ReviewItem.
5931
+ */
5932
+ async executeCustomRule(rule, changes) {
5933
+ try {
5934
+ const result = await rule.check(changes, this.rootDir);
5935
+ const item = {
5936
+ id: rule.id,
5937
+ category: "custom",
5938
+ check: rule.name,
5939
+ passed: result.passed,
5940
+ severity: rule.severity,
5941
+ details: result.details
5942
+ };
5943
+ if (result.suggestion !== void 0) item.suggestion = result.suggestion;
5944
+ if (result.file !== void 0) item.file = result.file;
5945
+ if (result.line !== void 0) item.line = result.line;
5946
+ return item;
5947
+ } catch (error) {
5948
+ return {
5949
+ id: rule.id,
5950
+ category: "custom",
5951
+ check: rule.name,
5952
+ passed: false,
5953
+ severity: "error",
5954
+ details: `Rule execution failed: ${String(error)}`
5955
+ };
5828
5956
  }
5957
+ }
5958
+ async run(changes) {
5959
+ const startTime = Date.now();
5960
+ const items = [];
5961
+ items.push(...this.buildHarnessItems());
5829
5962
  for (const rule of this.customRules) {
5830
- try {
5831
- const result = await rule.check(changes, this.rootDir);
5832
- const item = {
5833
- id: rule.id,
5834
- category: "custom",
5835
- check: rule.name,
5836
- passed: result.passed,
5837
- severity: rule.severity,
5838
- details: result.details
5839
- };
5840
- if (result.suggestion !== void 0) {
5841
- item.suggestion = result.suggestion;
5842
- }
5843
- if (result.file !== void 0) {
5844
- item.file = result.file;
5845
- }
5846
- if (result.line !== void 0) {
5847
- item.line = result.line;
5848
- }
5849
- items.push(item);
5850
- } catch (error) {
5851
- items.push({
5852
- id: rule.id,
5853
- category: "custom",
5854
- check: rule.name,
5855
- passed: false,
5856
- severity: "error",
5857
- details: `Rule execution failed: ${String(error)}`
5858
- });
5859
- }
5963
+ items.push(await this.executeCustomRule(rule, changes));
5860
5964
  }
5861
5965
  if (this.diffOptions) {
5862
5966
  const diffResult = await analyzeDiff(changes, this.diffOptions, this.graphImpactData);
@@ -5871,7 +5975,6 @@ var ChecklistBuilder = class {
5871
5975
  const checklist = {
5872
5976
  items,
5873
5977
  passed: failed === 0,
5874
- // Pass if no failed items
5875
5978
  summary: {
5876
5979
  total: items.length,
5877
5980
  passed,
@@ -6255,7 +6358,7 @@ function detectStaleConstraints(store, windowDays = 30, category) {
6255
6358
  staleConstraints.sort((a, b) => b.daysSinceLastViolation - a.daysSinceLastViolation);
6256
6359
  return { staleConstraints, totalConstraints, windowDays };
6257
6360
  }
6258
- function resolveThresholds(scope, config) {
6361
+ function resolveThresholds2(scope, config) {
6259
6362
  const projectThresholds = {};
6260
6363
  for (const [key, val] of Object.entries(config.thresholds)) {
6261
6364
  projectThresholds[key] = typeof val === "object" && val !== null && !Array.isArray(val) ? { ...val } : val;
@@ -6385,6 +6488,8 @@ var INDEX_FILE = "index.json";
6385
6488
  var SESSIONS_DIR = "sessions";
6386
6489
  var SESSION_INDEX_FILE = "index.md";
6387
6490
  var SUMMARY_FILE = "summary.md";
6491
+ var SESSION_STATE_FILE = "session-state.json";
6492
+ var ARCHIVE_DIR = "archive";
6388
6493
  var STREAMS_DIR = "streams";
6389
6494
  var STREAM_NAME_REGEX = /^[a-z0-9][a-z0-9._-]*$/;
6390
6495
  function streamsDir(projectPath) {
@@ -7261,6 +7366,134 @@ function listActiveSessions(projectPath) {
7261
7366
  );
7262
7367
  }
7263
7368
  }
7369
+ function emptySections() {
7370
+ const sections = {};
7371
+ for (const name of SESSION_SECTION_NAMES) {
7372
+ sections[name] = [];
7373
+ }
7374
+ return sections;
7375
+ }
7376
+ async function loadSessionState(projectPath, sessionSlug) {
7377
+ const dirResult = resolveSessionDir(projectPath, sessionSlug);
7378
+ if (!dirResult.ok) return dirResult;
7379
+ const sessionDir = dirResult.value;
7380
+ const filePath = path11.join(sessionDir, SESSION_STATE_FILE);
7381
+ if (!fs14.existsSync(filePath)) {
7382
+ return Ok(emptySections());
7383
+ }
7384
+ try {
7385
+ const raw = fs14.readFileSync(filePath, "utf-8");
7386
+ const parsed = JSON.parse(raw);
7387
+ const sections = emptySections();
7388
+ for (const name of SESSION_SECTION_NAMES) {
7389
+ if (Array.isArray(parsed[name])) {
7390
+ sections[name] = parsed[name];
7391
+ }
7392
+ }
7393
+ return Ok(sections);
7394
+ } catch (error) {
7395
+ return Err(
7396
+ new Error(
7397
+ `Failed to load session state: ${error instanceof Error ? error.message : String(error)}`
7398
+ )
7399
+ );
7400
+ }
7401
+ }
7402
+ async function saveSessionState(projectPath, sessionSlug, sections) {
7403
+ const dirResult = resolveSessionDir(projectPath, sessionSlug, { create: true });
7404
+ if (!dirResult.ok) return dirResult;
7405
+ const sessionDir = dirResult.value;
7406
+ const filePath = path11.join(sessionDir, SESSION_STATE_FILE);
7407
+ try {
7408
+ fs14.writeFileSync(filePath, JSON.stringify(sections, null, 2));
7409
+ return Ok(void 0);
7410
+ } catch (error) {
7411
+ return Err(
7412
+ new Error(
7413
+ `Failed to save session state: ${error instanceof Error ? error.message : String(error)}`
7414
+ )
7415
+ );
7416
+ }
7417
+ }
7418
+ async function readSessionSections(projectPath, sessionSlug) {
7419
+ return loadSessionState(projectPath, sessionSlug);
7420
+ }
7421
+ async function readSessionSection(projectPath, sessionSlug, section) {
7422
+ const result = await loadSessionState(projectPath, sessionSlug);
7423
+ if (!result.ok) return result;
7424
+ return Ok(result.value[section]);
7425
+ }
7426
+ async function appendSessionEntry(projectPath, sessionSlug, section, authorSkill, content) {
7427
+ const loadResult = await loadSessionState(projectPath, sessionSlug);
7428
+ if (!loadResult.ok) return loadResult;
7429
+ const sections = loadResult.value;
7430
+ const entry = {
7431
+ id: generateEntryId(),
7432
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
7433
+ authorSkill,
7434
+ content,
7435
+ status: "active"
7436
+ };
7437
+ sections[section].push(entry);
7438
+ const saveResult = await saveSessionState(projectPath, sessionSlug, sections);
7439
+ if (!saveResult.ok) return saveResult;
7440
+ return Ok(entry);
7441
+ }
7442
+ async function updateSessionEntryStatus(projectPath, sessionSlug, section, entryId, newStatus) {
7443
+ const loadResult = await loadSessionState(projectPath, sessionSlug);
7444
+ if (!loadResult.ok) return loadResult;
7445
+ const sections = loadResult.value;
7446
+ const entry = sections[section].find((e) => e.id === entryId);
7447
+ if (!entry) {
7448
+ return Err(new Error(`Entry '${entryId}' not found in section '${section}'`));
7449
+ }
7450
+ entry.status = newStatus;
7451
+ const saveResult = await saveSessionState(projectPath, sessionSlug, sections);
7452
+ if (!saveResult.ok) return saveResult;
7453
+ return Ok(entry);
7454
+ }
7455
+ function generateEntryId() {
7456
+ const timestamp = Date.now().toString(36);
7457
+ const random = Math.random().toString(36).substring(2, 8);
7458
+ return `${timestamp}-${random}`;
7459
+ }
7460
+ async function archiveSession(projectPath, sessionSlug) {
7461
+ const dirResult = resolveSessionDir(projectPath, sessionSlug);
7462
+ if (!dirResult.ok) return dirResult;
7463
+ const sessionDir = dirResult.value;
7464
+ if (!fs15.existsSync(sessionDir)) {
7465
+ return Err(new Error(`Session '${sessionSlug}' not found at ${sessionDir}`));
7466
+ }
7467
+ const archiveBase = path12.join(projectPath, HARNESS_DIR, ARCHIVE_DIR, "sessions");
7468
+ try {
7469
+ fs15.mkdirSync(archiveBase, { recursive: true });
7470
+ const date = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
7471
+ let archiveName = `${sessionSlug}-${date}`;
7472
+ let counter = 1;
7473
+ while (fs15.existsSync(path12.join(archiveBase, archiveName))) {
7474
+ archiveName = `${sessionSlug}-${date}-${counter}`;
7475
+ counter++;
7476
+ }
7477
+ const dest = path12.join(archiveBase, archiveName);
7478
+ try {
7479
+ fs15.renameSync(sessionDir, dest);
7480
+ } catch (renameErr) {
7481
+ if (renameErr instanceof Error && "code" in renameErr && renameErr.code === "EXDEV") {
7482
+ fs15.cpSync(sessionDir, dest, { recursive: true });
7483
+ fs15.rmSync(sessionDir, { recursive: true });
7484
+ } else {
7485
+ throw renameErr;
7486
+ }
7487
+ }
7488
+ return Ok(void 0);
7489
+ } catch (error) {
7490
+ return Err(
7491
+ new Error(
7492
+ `Failed to archive session: ${error instanceof Error ? error.message : String(error)}`
7493
+ )
7494
+ );
7495
+ }
7496
+ }
7264
7497
  async function executeWorkflow(workflow, executor) {
7265
7498
  const stepResults = [];
7266
7499
  const startTime = Date.now();
@@ -7482,11 +7715,11 @@ function resolveRuleSeverity(ruleId, defaultSeverity, overrides, strict) {
7482
7715
  }
7483
7716
  function detectStack(projectRoot) {
7484
7717
  const stacks = [];
7485
- const pkgJsonPath = path11.join(projectRoot, "package.json");
7486
- if (fs14.existsSync(pkgJsonPath)) {
7718
+ const pkgJsonPath = path13.join(projectRoot, "package.json");
7719
+ if (fs16.existsSync(pkgJsonPath)) {
7487
7720
  stacks.push("node");
7488
7721
  try {
7489
- const pkgJson = JSON.parse(fs14.readFileSync(pkgJsonPath, "utf-8"));
7722
+ const pkgJson = JSON.parse(fs16.readFileSync(pkgJsonPath, "utf-8"));
7490
7723
  const allDeps = {
7491
7724
  ...pkgJson.dependencies,
7492
7725
  ...pkgJson.devDependencies
@@ -7501,13 +7734,13 @@ function detectStack(projectRoot) {
7501
7734
  } catch {
7502
7735
  }
7503
7736
  }
7504
- const goModPath = path11.join(projectRoot, "go.mod");
7505
- if (fs14.existsSync(goModPath)) {
7737
+ const goModPath = path13.join(projectRoot, "go.mod");
7738
+ if (fs16.existsSync(goModPath)) {
7506
7739
  stacks.push("go");
7507
7740
  }
7508
- const requirementsPath = path11.join(projectRoot, "requirements.txt");
7509
- const pyprojectPath = path11.join(projectRoot, "pyproject.toml");
7510
- if (fs14.existsSync(requirementsPath) || fs14.existsSync(pyprojectPath)) {
7741
+ const requirementsPath = path13.join(projectRoot, "requirements.txt");
7742
+ const pyprojectPath = path13.join(projectRoot, "pyproject.toml");
7743
+ if (fs16.existsSync(requirementsPath) || fs16.existsSync(pyprojectPath)) {
7511
7744
  stacks.push("python");
7512
7745
  }
7513
7746
  return stacks;
@@ -7910,7 +8143,7 @@ var SecurityScanner = class {
7910
8143
  }
7911
8144
  async scanFile(filePath) {
7912
8145
  if (!this.config.enabled) return [];
7913
- const content = await fs15.readFile(filePath, "utf-8");
8146
+ const content = await fs17.readFile(filePath, "utf-8");
7914
8147
  return this.scanContent(content, filePath, 1);
7915
8148
  }
7916
8149
  async scanFiles(filePaths) {
@@ -7943,238 +8176,276 @@ var ALL_CHECKS = [
7943
8176
  "phase-gate",
7944
8177
  "arch"
7945
8178
  ];
7946
- async function runSingleCheck(name, projectRoot, config) {
7947
- const start = Date.now();
8179
+ async function runValidateCheck(projectRoot, config) {
7948
8180
  const issues = [];
7949
- try {
7950
- switch (name) {
7951
- case "validate": {
7952
- const agentsPath = path12.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
7953
- const result = await validateAgentsMap(agentsPath);
7954
- if (!result.ok) {
7955
- issues.push({ severity: "error", message: result.error.message });
7956
- } else if (!result.value.valid) {
7957
- if (result.value.errors) {
7958
- for (const err of result.value.errors) {
7959
- issues.push({ severity: "error", message: err.message });
7960
- }
7961
- }
7962
- for (const section of result.value.missingSections) {
7963
- issues.push({ severity: "warning", message: `Missing section: ${section}` });
7964
- }
7965
- for (const link of result.value.brokenLinks) {
7966
- issues.push({
7967
- severity: "warning",
7968
- message: `Broken link: ${link.text} \u2192 ${link.path}`,
7969
- file: link.path
7970
- });
7971
- }
7972
- }
7973
- break;
8181
+ const agentsPath = path14.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
8182
+ const result = await validateAgentsMap(agentsPath);
8183
+ if (!result.ok) {
8184
+ issues.push({ severity: "error", message: result.error.message });
8185
+ } else if (!result.value.valid) {
8186
+ if (result.value.errors) {
8187
+ for (const err of result.value.errors) {
8188
+ issues.push({ severity: "error", message: err.message });
7974
8189
  }
7975
- case "deps": {
7976
- const rawLayers = config.layers;
7977
- if (rawLayers && rawLayers.length > 0) {
7978
- const parser = new TypeScriptParser();
7979
- const layers = rawLayers.map(
7980
- (l) => defineLayer(
7981
- l.name,
7982
- Array.isArray(l.patterns) ? l.patterns : [l.pattern],
7983
- l.allowedDependencies
7984
- )
7985
- );
7986
- const result = await validateDependencies({
7987
- layers,
7988
- rootDir: projectRoot,
7989
- parser
7990
- });
7991
- if (!result.ok) {
7992
- issues.push({ severity: "error", message: result.error.message });
7993
- } else if (result.value.violations.length > 0) {
7994
- for (const v of result.value.violations) {
7995
- issues.push({
7996
- severity: "error",
7997
- message: `${v.reason}: ${v.file} imports ${v.imports} (${v.fromLayer} \u2192 ${v.toLayer})`,
7998
- file: v.file,
7999
- line: v.line
8000
- });
8001
- }
8002
- }
8003
- }
8004
- break;
8190
+ }
8191
+ for (const section of result.value.missingSections) {
8192
+ issues.push({ severity: "warning", message: `Missing section: ${section}` });
8193
+ }
8194
+ for (const link of result.value.brokenLinks) {
8195
+ issues.push({
8196
+ severity: "warning",
8197
+ message: `Broken link: ${link.text} \u2192 ${link.path}`,
8198
+ file: link.path
8199
+ });
8200
+ }
8201
+ }
8202
+ return issues;
8203
+ }
8204
+ async function runDepsCheck(projectRoot, config) {
8205
+ const issues = [];
8206
+ const rawLayers = config.layers;
8207
+ if (rawLayers && rawLayers.length > 0) {
8208
+ const parser = new TypeScriptParser();
8209
+ const layers = rawLayers.map(
8210
+ (l) => defineLayer(
8211
+ l.name,
8212
+ Array.isArray(l.patterns) ? l.patterns : [l.pattern],
8213
+ l.allowedDependencies
8214
+ )
8215
+ );
8216
+ const result = await validateDependencies({
8217
+ layers,
8218
+ rootDir: projectRoot,
8219
+ parser
8220
+ });
8221
+ if (!result.ok) {
8222
+ issues.push({ severity: "error", message: result.error.message });
8223
+ } else if (result.value.violations.length > 0) {
8224
+ for (const v of result.value.violations) {
8225
+ issues.push({
8226
+ severity: "error",
8227
+ message: `${v.reason}: ${v.file} imports ${v.imports} (${v.fromLayer} \u2192 ${v.toLayer})`,
8228
+ file: v.file,
8229
+ line: v.line
8230
+ });
8005
8231
  }
8006
- case "docs": {
8007
- const docsDir = path12.join(projectRoot, config.docsDir ?? "docs");
8008
- const entropyConfig = config.entropy || {};
8009
- const result = await checkDocCoverage("project", {
8010
- docsDir,
8011
- sourceDir: projectRoot,
8012
- excludePatterns: entropyConfig.excludePatterns || [
8013
- "**/node_modules/**",
8014
- "**/dist/**",
8015
- "**/*.test.ts",
8016
- "**/fixtures/**"
8017
- ]
8232
+ }
8233
+ }
8234
+ return issues;
8235
+ }
8236
+ async function runDocsCheck(projectRoot, config) {
8237
+ const issues = [];
8238
+ const docsDir = path14.join(projectRoot, config.docsDir ?? "docs");
8239
+ const entropyConfig = config.entropy || {};
8240
+ const result = await checkDocCoverage("project", {
8241
+ docsDir,
8242
+ sourceDir: projectRoot,
8243
+ excludePatterns: entropyConfig.excludePatterns || [
8244
+ "**/node_modules/**",
8245
+ "**/dist/**",
8246
+ "**/*.test.ts",
8247
+ "**/fixtures/**"
8248
+ ]
8249
+ });
8250
+ if (!result.ok) {
8251
+ issues.push({ severity: "warning", message: result.error.message });
8252
+ } else if (result.value.gaps.length > 0) {
8253
+ for (const gap of result.value.gaps) {
8254
+ issues.push({
8255
+ severity: "warning",
8256
+ message: `Undocumented: ${gap.file} (suggested: ${gap.suggestedSection})`,
8257
+ file: gap.file
8258
+ });
8259
+ }
8260
+ }
8261
+ return issues;
8262
+ }
8263
+ async function runEntropyCheck(projectRoot, config) {
8264
+ const issues = [];
8265
+ const entropyConfig = config.entropy || {};
8266
+ const perfConfig = config.performance || {};
8267
+ const entryPoints = entropyConfig.entryPoints ?? perfConfig.entryPoints;
8268
+ const analyzer = new EntropyAnalyzer({
8269
+ rootDir: projectRoot,
8270
+ ...entryPoints ? { entryPoints } : {},
8271
+ analyze: { drift: true, deadCode: true, patterns: false }
8272
+ });
8273
+ const result = await analyzer.analyze();
8274
+ if (!result.ok) {
8275
+ issues.push({ severity: "warning", message: result.error.message });
8276
+ } else {
8277
+ const report = result.value;
8278
+ if (report.drift) {
8279
+ for (const drift of report.drift.drifts) {
8280
+ issues.push({
8281
+ severity: "warning",
8282
+ message: `Doc drift (${drift.type}): ${drift.details}`,
8283
+ file: drift.docFile,
8284
+ line: drift.line
8018
8285
  });
8019
- if (!result.ok) {
8020
- issues.push({ severity: "warning", message: result.error.message });
8021
- } else if (result.value.gaps.length > 0) {
8022
- for (const gap of result.value.gaps) {
8023
- issues.push({
8024
- severity: "warning",
8025
- message: `Undocumented: ${gap.file} (suggested: ${gap.suggestedSection})`,
8026
- file: gap.file
8027
- });
8028
- }
8029
- }
8030
- break;
8031
8286
  }
8032
- case "entropy": {
8033
- const analyzer = new EntropyAnalyzer({
8034
- rootDir: projectRoot,
8035
- analyze: { drift: true, deadCode: true, patterns: false }
8287
+ }
8288
+ if (report.deadCode) {
8289
+ for (const dead of report.deadCode.deadExports) {
8290
+ issues.push({
8291
+ severity: "warning",
8292
+ message: `Dead export: ${dead.name}`,
8293
+ file: dead.file,
8294
+ line: dead.line
8036
8295
  });
8037
- const result = await analyzer.analyze();
8038
- if (!result.ok) {
8039
- issues.push({ severity: "warning", message: result.error.message });
8040
- } else {
8041
- const report = result.value;
8042
- if (report.drift) {
8043
- for (const drift of report.drift.drifts) {
8044
- issues.push({
8045
- severity: "warning",
8046
- message: `Doc drift (${drift.type}): ${drift.details}`,
8047
- file: drift.docFile,
8048
- line: drift.line
8049
- });
8050
- }
8051
- }
8052
- if (report.deadCode) {
8053
- for (const dead of report.deadCode.deadExports) {
8054
- issues.push({
8055
- severity: "warning",
8056
- message: `Dead export: ${dead.name}`,
8057
- file: dead.file,
8058
- line: dead.line
8059
- });
8060
- }
8061
- }
8062
- }
8063
- break;
8064
8296
  }
8065
- case "security": {
8066
- const securityConfig = parseSecurityConfig(config.security);
8067
- if (!securityConfig.enabled) break;
8068
- const scanner = new SecurityScanner(securityConfig);
8069
- scanner.configureForProject(projectRoot);
8070
- const { glob: globFn } = await import("glob");
8071
- const sourceFiles = await globFn("**/*.{ts,tsx,js,jsx,go,py}", {
8072
- cwd: projectRoot,
8073
- ignore: securityConfig.exclude ?? [
8074
- "**/node_modules/**",
8075
- "**/dist/**",
8076
- "**/*.test.ts",
8077
- "**/fixtures/**"
8078
- ],
8079
- absolute: true
8297
+ }
8298
+ }
8299
+ return issues;
8300
+ }
8301
+ async function runSecurityCheck(projectRoot, config) {
8302
+ const issues = [];
8303
+ const securityConfig = parseSecurityConfig(config.security);
8304
+ if (!securityConfig.enabled) return issues;
8305
+ const scanner = new SecurityScanner(securityConfig);
8306
+ scanner.configureForProject(projectRoot);
8307
+ const { glob: globFn } = await import("glob");
8308
+ const sourceFiles = await globFn("**/*.{ts,tsx,js,jsx,go,py}", {
8309
+ cwd: projectRoot,
8310
+ ignore: securityConfig.exclude ?? [
8311
+ "**/node_modules/**",
8312
+ "**/dist/**",
8313
+ "**/*.test.ts",
8314
+ "**/fixtures/**"
8315
+ ],
8316
+ absolute: true
8317
+ });
8318
+ const scanResult = await scanner.scanFiles(sourceFiles);
8319
+ for (const finding of scanResult.findings) {
8320
+ issues.push({
8321
+ severity: finding.severity === "info" ? "warning" : finding.severity,
8322
+ message: `[${finding.ruleId}] ${finding.message}: ${finding.match}`,
8323
+ file: finding.file,
8324
+ line: finding.line
8325
+ });
8326
+ }
8327
+ return issues;
8328
+ }
8329
+ async function runPerfCheck(projectRoot, config) {
8330
+ const issues = [];
8331
+ const perfConfig = config.performance || {};
8332
+ const entryPoints = perfConfig.entryPoints;
8333
+ const perfAnalyzer = new EntropyAnalyzer({
8334
+ rootDir: projectRoot,
8335
+ ...entryPoints ? { entryPoints } : {},
8336
+ analyze: {
8337
+ complexity: perfConfig.complexity || true,
8338
+ coupling: perfConfig.coupling || true,
8339
+ sizeBudget: perfConfig.sizeBudget || false
8340
+ }
8341
+ });
8342
+ const perfResult = await perfAnalyzer.analyze();
8343
+ if (!perfResult.ok) {
8344
+ issues.push({ severity: "warning", message: perfResult.error.message });
8345
+ } else {
8346
+ const perfReport = perfResult.value;
8347
+ if (perfReport.complexity) {
8348
+ for (const v of perfReport.complexity.violations) {
8349
+ issues.push({
8350
+ severity: v.severity === "info" ? "warning" : v.severity,
8351
+ message: `[Tier ${v.tier}] ${v.metric}: ${v.function} in ${v.file} (${v.value} > ${v.threshold})`,
8352
+ file: v.file,
8353
+ line: v.line
8080
8354
  });
8081
- const scanResult = await scanner.scanFiles(sourceFiles);
8082
- for (const finding of scanResult.findings) {
8083
- issues.push({
8084
- severity: finding.severity === "info" ? "warning" : finding.severity,
8085
- message: `[${finding.ruleId}] ${finding.message}: ${finding.match}`,
8086
- file: finding.file,
8087
- line: finding.line
8088
- });
8089
- }
8090
- break;
8091
8355
  }
8092
- case "perf": {
8093
- const perfConfig = config.performance || {};
8094
- const perfAnalyzer = new EntropyAnalyzer({
8095
- rootDir: projectRoot,
8096
- analyze: {
8097
- complexity: perfConfig.complexity || true,
8098
- coupling: perfConfig.coupling || true,
8099
- sizeBudget: perfConfig.sizeBudget || false
8100
- }
8356
+ }
8357
+ if (perfReport.coupling) {
8358
+ for (const v of perfReport.coupling.violations) {
8359
+ issues.push({
8360
+ severity: v.severity === "info" ? "warning" : v.severity,
8361
+ message: `[Tier ${v.tier}] ${v.metric}: ${v.file} (${v.value} > ${v.threshold})`,
8362
+ file: v.file
8101
8363
  });
8102
- const perfResult = await perfAnalyzer.analyze();
8103
- if (!perfResult.ok) {
8104
- issues.push({ severity: "warning", message: perfResult.error.message });
8105
- } else {
8106
- const perfReport = perfResult.value;
8107
- if (perfReport.complexity) {
8108
- for (const v of perfReport.complexity.violations) {
8109
- issues.push({
8110
- severity: v.severity === "info" ? "warning" : v.severity,
8111
- message: `[Tier ${v.tier}] ${v.metric}: ${v.function} in ${v.file} (${v.value} > ${v.threshold})`,
8112
- file: v.file,
8113
- line: v.line
8114
- });
8115
- }
8116
- }
8117
- if (perfReport.coupling) {
8118
- for (const v of perfReport.coupling.violations) {
8119
- issues.push({
8120
- severity: v.severity === "info" ? "warning" : v.severity,
8121
- message: `[Tier ${v.tier}] ${v.metric}: ${v.file} (${v.value} > ${v.threshold})`,
8122
- file: v.file
8123
- });
8124
- }
8125
- }
8126
- }
8127
- break;
8128
8364
  }
8129
- case "phase-gate": {
8130
- const phaseGates = config.phaseGates;
8131
- if (!phaseGates?.enabled) {
8132
- break;
8133
- }
8365
+ }
8366
+ }
8367
+ return issues;
8368
+ }
8369
+ async function runPhaseGateCheck(_projectRoot, config) {
8370
+ const issues = [];
8371
+ const phaseGates = config.phaseGates;
8372
+ if (!phaseGates?.enabled) {
8373
+ return issues;
8374
+ }
8375
+ issues.push({
8376
+ severity: "warning",
8377
+ message: "Phase gate is enabled but requires CLI context. Run `harness check-phase-gate` separately for full validation."
8378
+ });
8379
+ return issues;
8380
+ }
8381
+ async function runArchCheck(projectRoot, config) {
8382
+ const issues = [];
8383
+ const rawArchConfig = config.architecture;
8384
+ const archConfig = ArchConfigSchema.parse(rawArchConfig ?? {});
8385
+ if (!archConfig.enabled) return issues;
8386
+ const results = await runAll(archConfig, projectRoot);
8387
+ const baselineManager = new ArchBaselineManager(projectRoot, archConfig.baselinePath);
8388
+ const baseline = baselineManager.load();
8389
+ if (baseline) {
8390
+ const diffResult = diff(results, baseline);
8391
+ if (!diffResult.passed) {
8392
+ for (const v of diffResult.newViolations) {
8134
8393
  issues.push({
8135
- severity: "warning",
8136
- message: "Phase gate is enabled but requires CLI context. Run `harness check-phase-gate` separately for full validation."
8394
+ severity: v.severity,
8395
+ message: `[${v.category || "arch"}] NEW: ${v.detail}`,
8396
+ file: v.file
8137
8397
  });
8138
- break;
8139
8398
  }
8140
- case "arch": {
8141
- const rawArchConfig = config.architecture;
8142
- const archConfig = ArchConfigSchema.parse(rawArchConfig ?? {});
8143
- if (!archConfig.enabled) break;
8144
- const results = await runAll(archConfig, projectRoot);
8145
- const baselineManager = new ArchBaselineManager(projectRoot, archConfig.baselinePath);
8146
- const baseline = baselineManager.load();
8147
- if (baseline) {
8148
- const diffResult = diff(results, baseline);
8149
- if (!diffResult.passed) {
8150
- for (const v of diffResult.newViolations) {
8151
- issues.push({
8152
- severity: v.severity,
8153
- message: `[${v.category || "arch"}] NEW: ${v.detail}`,
8154
- file: v.file
8155
- });
8156
- }
8157
- for (const r of diffResult.regressions) {
8158
- issues.push({
8159
- severity: "error",
8160
- message: `[${r.category}] REGRESSION: ${r.currentValue} > ${r.baselineValue} (delta: ${r.delta})`
8161
- });
8162
- }
8163
- }
8164
- } else {
8165
- for (const result of results) {
8166
- for (const v of result.violations) {
8167
- issues.push({
8168
- severity: v.severity,
8169
- message: `[${result.category}] ${v.detail}`,
8170
- file: v.file
8171
- });
8172
- }
8173
- }
8174
- }
8175
- break;
8399
+ for (const r of diffResult.regressions) {
8400
+ issues.push({
8401
+ severity: "error",
8402
+ message: `[${r.category}] REGRESSION: ${r.currentValue} > ${r.baselineValue} (delta: ${r.delta})`
8403
+ });
8176
8404
  }
8177
8405
  }
8406
+ } else {
8407
+ for (const result of results) {
8408
+ for (const v of result.violations) {
8409
+ issues.push({
8410
+ severity: v.severity,
8411
+ message: `[${result.category}] ${v.detail}`,
8412
+ file: v.file
8413
+ });
8414
+ }
8415
+ }
8416
+ }
8417
+ return issues;
8418
+ }
8419
+ async function runSingleCheck(name, projectRoot, config) {
8420
+ const start = Date.now();
8421
+ const issues = [];
8422
+ try {
8423
+ switch (name) {
8424
+ case "validate":
8425
+ issues.push(...await runValidateCheck(projectRoot, config));
8426
+ break;
8427
+ case "deps":
8428
+ issues.push(...await runDepsCheck(projectRoot, config));
8429
+ break;
8430
+ case "docs":
8431
+ issues.push(...await runDocsCheck(projectRoot, config));
8432
+ break;
8433
+ case "entropy":
8434
+ issues.push(...await runEntropyCheck(projectRoot, config));
8435
+ break;
8436
+ case "security":
8437
+ issues.push(...await runSecurityCheck(projectRoot, config));
8438
+ break;
8439
+ case "perf":
8440
+ issues.push(...await runPerfCheck(projectRoot, config));
8441
+ break;
8442
+ case "phase-gate":
8443
+ issues.push(...await runPhaseGateCheck(projectRoot, config));
8444
+ break;
8445
+ case "arch":
8446
+ issues.push(...await runArchCheck(projectRoot, config));
8447
+ break;
8448
+ }
8178
8449
  } catch (error) {
8179
8450
  issues.push({
8180
8451
  severity: "error",
@@ -8251,7 +8522,7 @@ async function runMechanicalChecks(options) {
8251
8522
  };
8252
8523
  if (!skip.includes("validate")) {
8253
8524
  try {
8254
- const agentsPath = path13.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
8525
+ const agentsPath = path15.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
8255
8526
  const result = await validateAgentsMap(agentsPath);
8256
8527
  if (!result.ok) {
8257
8528
  statuses.validate = "fail";
@@ -8288,7 +8559,7 @@ async function runMechanicalChecks(options) {
8288
8559
  statuses.validate = "fail";
8289
8560
  findings.push({
8290
8561
  tool: "validate",
8291
- file: path13.join(projectRoot, "AGENTS.md"),
8562
+ file: path15.join(projectRoot, "AGENTS.md"),
8292
8563
  message: err instanceof Error ? err.message : String(err),
8293
8564
  severity: "error"
8294
8565
  });
@@ -8352,7 +8623,7 @@ async function runMechanicalChecks(options) {
8352
8623
  (async () => {
8353
8624
  const localFindings = [];
8354
8625
  try {
8355
- const docsDir = path13.join(projectRoot, config.docsDir ?? "docs");
8626
+ const docsDir = path15.join(projectRoot, config.docsDir ?? "docs");
8356
8627
  const result = await checkDocCoverage("project", { docsDir });
8357
8628
  if (!result.ok) {
8358
8629
  statuses["check-docs"] = "warn";
@@ -8379,7 +8650,7 @@ async function runMechanicalChecks(options) {
8379
8650
  statuses["check-docs"] = "warn";
8380
8651
  localFindings.push({
8381
8652
  tool: "check-docs",
8382
- file: path13.join(projectRoot, "docs"),
8653
+ file: path15.join(projectRoot, "docs"),
8383
8654
  message: err instanceof Error ? err.message : String(err),
8384
8655
  severity: "warning"
8385
8656
  });
@@ -8528,18 +8799,18 @@ function computeContextBudget(diffLines) {
8528
8799
  return diffLines;
8529
8800
  }
8530
8801
  function isWithinProject(absPath, projectRoot) {
8531
- const resolvedRoot = path14.resolve(projectRoot) + path14.sep;
8532
- const resolvedPath = path14.resolve(absPath);
8533
- return resolvedPath.startsWith(resolvedRoot) || resolvedPath === path14.resolve(projectRoot);
8802
+ const resolvedRoot = path16.resolve(projectRoot) + path16.sep;
8803
+ const resolvedPath = path16.resolve(absPath);
8804
+ return resolvedPath.startsWith(resolvedRoot) || resolvedPath === path16.resolve(projectRoot);
8534
8805
  }
8535
8806
  async function readContextFile(projectRoot, filePath, reason) {
8536
- const absPath = path14.isAbsolute(filePath) ? filePath : path14.join(projectRoot, filePath);
8807
+ const absPath = path16.isAbsolute(filePath) ? filePath : path16.join(projectRoot, filePath);
8537
8808
  if (!isWithinProject(absPath, projectRoot)) return null;
8538
8809
  const result = await readFileContent(absPath);
8539
8810
  if (!result.ok) return null;
8540
8811
  const content = result.value;
8541
8812
  const lines = content.split("\n").length;
8542
- const relPath = path14.isAbsolute(filePath) ? path14.relative(projectRoot, filePath) : filePath;
8813
+ const relPath = path16.isAbsolute(filePath) ? relativePosix(projectRoot, filePath) : filePath;
8543
8814
  return { path: relPath, content, reason, lines };
8544
8815
  }
8545
8816
  function extractImportSources2(content) {
@@ -8554,18 +8825,18 @@ function extractImportSources2(content) {
8554
8825
  }
8555
8826
  async function resolveImportPath2(projectRoot, fromFile, importSource) {
8556
8827
  if (!importSource.startsWith(".")) return null;
8557
- const fromDir = path14.dirname(path14.join(projectRoot, fromFile));
8558
- const basePath = path14.resolve(fromDir, importSource);
8828
+ const fromDir = path16.dirname(path16.join(projectRoot, fromFile));
8829
+ const basePath = path16.resolve(fromDir, importSource);
8559
8830
  if (!isWithinProject(basePath, projectRoot)) return null;
8560
- const relBase = path14.relative(projectRoot, basePath);
8831
+ const relBase = relativePosix(projectRoot, basePath);
8561
8832
  const candidates = [
8562
8833
  relBase + ".ts",
8563
8834
  relBase + ".tsx",
8564
8835
  relBase + ".mts",
8565
- path14.join(relBase, "index.ts")
8836
+ path16.join(relBase, "index.ts")
8566
8837
  ];
8567
8838
  for (const candidate of candidates) {
8568
- const absCandidate = path14.join(projectRoot, candidate);
8839
+ const absCandidate = path16.join(projectRoot, candidate);
8569
8840
  if (await fileExists(absCandidate)) {
8570
8841
  return candidate;
8571
8842
  }
@@ -8573,10 +8844,10 @@ async function resolveImportPath2(projectRoot, fromFile, importSource) {
8573
8844
  return null;
8574
8845
  }
8575
8846
  async function findTestFiles(projectRoot, sourceFile) {
8576
- const baseName = path14.basename(sourceFile, path14.extname(sourceFile));
8847
+ const baseName = path16.basename(sourceFile, path16.extname(sourceFile));
8577
8848
  const pattern = `**/${baseName}.{test,spec}.{ts,tsx,mts}`;
8578
8849
  const results = await findFiles(pattern, projectRoot);
8579
- return results.map((f) => path14.relative(projectRoot, f));
8850
+ return results.map((f) => relativePosix(projectRoot, f));
8580
8851
  }
8581
8852
  async function gatherImportContext(projectRoot, changedFiles, budget) {
8582
8853
  const contextFiles = [];
@@ -8858,101 +9129,102 @@ function findMissingJsDoc(bundle) {
8858
9129
  }
8859
9130
  return missing;
8860
9131
  }
8861
- function runComplianceAgent(bundle) {
9132
+ function checkMissingJsDoc(bundle, rules) {
9133
+ const jsDocRule = rules.find((r) => r.text.toLowerCase().includes("jsdoc"));
9134
+ if (!jsDocRule) return [];
9135
+ const missingDocs = findMissingJsDoc(bundle);
9136
+ return missingDocs.map((m) => ({
9137
+ id: makeFindingId("compliance", m.file, m.line, `Missing JSDoc ${m.exportName}`),
9138
+ file: m.file,
9139
+ lineRange: [m.line, m.line],
9140
+ domain: "compliance",
9141
+ severity: "important",
9142
+ title: `Missing JSDoc on exported \`${m.exportName}\``,
9143
+ rationale: `Convention requires all exports to have JSDoc comments (from ${jsDocRule.source}).`,
9144
+ suggestion: `Add a JSDoc comment above the export of \`${m.exportName}\`.`,
9145
+ evidence: [`changeType: ${bundle.changeType}`, `Convention rule: "${jsDocRule.text}"`],
9146
+ validatedBy: "heuristic"
9147
+ }));
9148
+ }
9149
+ function checkFeatureSpec(bundle) {
9150
+ const hasSpecContext = bundle.contextFiles.some(
9151
+ (f) => f.reason === "spec" || f.reason === "convention"
9152
+ );
9153
+ if (hasSpecContext || bundle.changedFiles.length === 0) return [];
9154
+ const firstFile = bundle.changedFiles[0];
9155
+ return [
9156
+ {
9157
+ id: makeFindingId("compliance", firstFile.path, 1, "No spec for feature"),
9158
+ file: firstFile.path,
9159
+ lineRange: [1, 1],
9160
+ domain: "compliance",
9161
+ severity: "suggestion",
9162
+ title: "No spec/design doc found for feature change",
9163
+ rationale: "Feature changes should reference a spec or design doc to verify alignment. No spec context was included in the review bundle.",
9164
+ evidence: [`changeType: feature`, `contextFiles count: ${bundle.contextFiles.length}`],
9165
+ validatedBy: "heuristic"
9166
+ }
9167
+ ];
9168
+ }
9169
+ function checkBugfixHistory(bundle) {
9170
+ if (bundle.commitHistory.length > 0 || bundle.changedFiles.length === 0) return [];
9171
+ const firstFile = bundle.changedFiles[0];
9172
+ return [
9173
+ {
9174
+ id: makeFindingId("compliance", firstFile.path, 1, "Bugfix no history"),
9175
+ file: firstFile.path,
9176
+ lineRange: [1, 1],
9177
+ domain: "compliance",
9178
+ severity: "suggestion",
9179
+ title: "Bugfix without commit history context",
9180
+ rationale: "Bugfix changes benefit from commit history to verify the root cause is addressed, not just the symptom. No commit history was provided.",
9181
+ evidence: [`changeType: bugfix`, `commitHistory entries: ${bundle.commitHistory.length}`],
9182
+ validatedBy: "heuristic"
9183
+ }
9184
+ ];
9185
+ }
9186
+ function checkChangeTypeSpecific(bundle) {
9187
+ switch (bundle.changeType) {
9188
+ case "feature":
9189
+ return checkFeatureSpec(bundle);
9190
+ case "bugfix":
9191
+ return checkBugfixHistory(bundle);
9192
+ default:
9193
+ return [];
9194
+ }
9195
+ }
9196
+ function checkResultTypeConvention(bundle, rules) {
9197
+ const resultTypeRule = rules.find((r) => r.text.toLowerCase().includes("result type"));
9198
+ if (!resultTypeRule) return [];
8862
9199
  const findings = [];
8863
- const rules = extractConventionRules(bundle);
8864
- const jsDocRuleExists = rules.some((r) => r.text.toLowerCase().includes("jsdoc"));
8865
- if (jsDocRuleExists) {
8866
- const missingDocs = findMissingJsDoc(bundle);
8867
- for (const m of missingDocs) {
9200
+ for (const cf of bundle.changedFiles) {
9201
+ const hasTryCatch = cf.content.includes("try {") || cf.content.includes("try{");
9202
+ const usesResult = cf.content.includes("Result<") || cf.content.includes("Result >") || cf.content.includes(": Result");
9203
+ if (hasTryCatch && !usesResult) {
8868
9204
  findings.push({
8869
- id: makeFindingId("compliance", m.file, m.line, `Missing JSDoc ${m.exportName}`),
8870
- file: m.file,
8871
- lineRange: [m.line, m.line],
9205
+ id: makeFindingId("compliance", cf.path, 1, "try-catch not Result"),
9206
+ file: cf.path,
9207
+ lineRange: [1, cf.lines],
8872
9208
  domain: "compliance",
8873
- severity: "important",
8874
- title: `Missing JSDoc on exported \`${m.exportName}\``,
8875
- rationale: `Convention requires all exports to have JSDoc comments (from ${rules.find((r) => r.text.toLowerCase().includes("jsdoc"))?.source ?? "conventions"}).`,
8876
- suggestion: `Add a JSDoc comment above the export of \`${m.exportName}\`.`,
8877
- evidence: [
8878
- `changeType: ${bundle.changeType}`,
8879
- `Convention rule: "${rules.find((r) => r.text.toLowerCase().includes("jsdoc"))?.text ?? ""}"`
8880
- ],
9209
+ severity: "suggestion",
9210
+ title: "Fallible operation uses try/catch instead of Result type",
9211
+ rationale: `Convention requires using Result type for fallible operations (from ${resultTypeRule.source}).`,
9212
+ suggestion: "Refactor error handling to use the Result type pattern.",
9213
+ evidence: [`changeType: ${bundle.changeType}`, `Convention rule: "${resultTypeRule.text}"`],
8881
9214
  validatedBy: "heuristic"
8882
9215
  });
8883
9216
  }
8884
9217
  }
8885
- switch (bundle.changeType) {
8886
- case "feature": {
8887
- const hasSpecContext = bundle.contextFiles.some(
8888
- (f) => f.reason === "spec" || f.reason === "convention"
8889
- );
8890
- if (!hasSpecContext && bundle.changedFiles.length > 0) {
8891
- const firstFile = bundle.changedFiles[0];
8892
- findings.push({
8893
- id: makeFindingId("compliance", firstFile.path, 1, "No spec for feature"),
8894
- file: firstFile.path,
8895
- lineRange: [1, 1],
8896
- domain: "compliance",
8897
- severity: "suggestion",
8898
- title: "No spec/design doc found for feature change",
8899
- rationale: "Feature changes should reference a spec or design doc to verify alignment. No spec context was included in the review bundle.",
8900
- evidence: [`changeType: feature`, `contextFiles count: ${bundle.contextFiles.length}`],
8901
- validatedBy: "heuristic"
8902
- });
8903
- }
8904
- break;
8905
- }
8906
- case "bugfix": {
8907
- if (bundle.commitHistory.length === 0 && bundle.changedFiles.length > 0) {
8908
- const firstFile = bundle.changedFiles[0];
8909
- findings.push({
8910
- id: makeFindingId("compliance", firstFile.path, 1, "Bugfix no history"),
8911
- file: firstFile.path,
8912
- lineRange: [1, 1],
8913
- domain: "compliance",
8914
- severity: "suggestion",
8915
- title: "Bugfix without commit history context",
8916
- rationale: "Bugfix changes benefit from commit history to verify the root cause is addressed, not just the symptom. No commit history was provided.",
8917
- evidence: [`changeType: bugfix`, `commitHistory entries: ${bundle.commitHistory.length}`],
8918
- validatedBy: "heuristic"
8919
- });
8920
- }
8921
- break;
8922
- }
8923
- case "refactor": {
8924
- break;
8925
- }
8926
- case "docs": {
8927
- break;
8928
- }
8929
- }
8930
- const resultTypeRule = rules.find((r) => r.text.toLowerCase().includes("result type"));
8931
- if (resultTypeRule) {
8932
- for (const cf of bundle.changedFiles) {
8933
- const hasTryCatch = cf.content.includes("try {") || cf.content.includes("try{");
8934
- const usesResult = cf.content.includes("Result<") || cf.content.includes("Result >") || cf.content.includes(": Result");
8935
- if (hasTryCatch && !usesResult) {
8936
- findings.push({
8937
- id: makeFindingId("compliance", cf.path, 1, "try-catch not Result"),
8938
- file: cf.path,
8939
- lineRange: [1, cf.lines],
8940
- domain: "compliance",
8941
- severity: "suggestion",
8942
- title: "Fallible operation uses try/catch instead of Result type",
8943
- rationale: `Convention requires using Result type for fallible operations (from ${resultTypeRule.source}).`,
8944
- suggestion: "Refactor error handling to use the Result type pattern.",
8945
- evidence: [
8946
- `changeType: ${bundle.changeType}`,
8947
- `Convention rule: "${resultTypeRule.text}"`
8948
- ],
8949
- validatedBy: "heuristic"
8950
- });
8951
- }
8952
- }
8953
- }
8954
9218
  return findings;
8955
9219
  }
9220
+ function runComplianceAgent(bundle) {
9221
+ const rules = extractConventionRules(bundle);
9222
+ return [
9223
+ ...checkMissingJsDoc(bundle, rules),
9224
+ ...checkChangeTypeSpecific(bundle),
9225
+ ...checkResultTypeConvention(bundle, rules)
9226
+ ];
9227
+ }
8956
9228
  var BUG_DETECTION_DESCRIPTOR = {
8957
9229
  domain: "bug",
8958
9230
  tier: "strong",
@@ -9223,31 +9495,32 @@ var ARCHITECTURE_DESCRIPTOR = {
9223
9495
  ]
9224
9496
  };
9225
9497
  var LARGE_FILE_THRESHOLD = 300;
9498
+ function isViolationLine(line) {
9499
+ const lower = line.toLowerCase();
9500
+ return lower.includes("violation") || lower.includes("layer");
9501
+ }
9502
+ function createLayerViolationFinding(line, fallbackPath) {
9503
+ const fileMatch = line.match(/(?:in\s+)?(\S+\.(?:ts|tsx|js|jsx))(?::(\d+))?/);
9504
+ const file = fileMatch?.[1] ?? fallbackPath;
9505
+ const lineNum = fileMatch?.[2] ? parseInt(fileMatch[2], 10) : 1;
9506
+ return {
9507
+ id: makeFindingId("arch", file, lineNum, "layer violation"),
9508
+ file,
9509
+ lineRange: [lineNum, lineNum],
9510
+ domain: "architecture",
9511
+ severity: "critical",
9512
+ title: "Layer boundary violation detected by check-deps",
9513
+ rationale: `Architectural layer violation: ${line.trim()}. Imports must flow in the correct direction per the project's layer definitions.`,
9514
+ suggestion: "Route the dependency through the correct intermediate layer (e.g., routes -> services -> db, not routes -> db).",
9515
+ evidence: [line.trim()],
9516
+ validatedBy: "heuristic"
9517
+ };
9518
+ }
9226
9519
  function detectLayerViolations(bundle) {
9227
- const findings = [];
9228
9520
  const checkDepsFile = bundle.contextFiles.find((f) => f.path === "harness-check-deps-output");
9229
- if (!checkDepsFile) return findings;
9230
- const lines = checkDepsFile.content.split("\n");
9231
- for (const line of lines) {
9232
- if (line.toLowerCase().includes("violation") || line.toLowerCase().includes("layer")) {
9233
- const fileMatch = line.match(/(?:in\s+)?(\S+\.(?:ts|tsx|js|jsx))(?::(\d+))?/);
9234
- const file = fileMatch?.[1] ?? bundle.changedFiles[0]?.path ?? "unknown";
9235
- const lineNum = fileMatch?.[2] ? parseInt(fileMatch[2], 10) : 1;
9236
- findings.push({
9237
- id: makeFindingId("arch", file, lineNum, "layer violation"),
9238
- file,
9239
- lineRange: [lineNum, lineNum],
9240
- domain: "architecture",
9241
- severity: "critical",
9242
- title: "Layer boundary violation detected by check-deps",
9243
- rationale: `Architectural layer violation: ${line.trim()}. Imports must flow in the correct direction per the project's layer definitions.`,
9244
- suggestion: "Route the dependency through the correct intermediate layer (e.g., routes -> services -> db, not routes -> db).",
9245
- evidence: [line.trim()],
9246
- validatedBy: "heuristic"
9247
- });
9248
- }
9249
- }
9250
- return findings;
9521
+ if (!checkDepsFile) return [];
9522
+ const fallbackPath = bundle.changedFiles[0]?.path ?? "unknown";
9523
+ return checkDepsFile.content.split("\n").filter(isViolationLine).map((line) => createLayerViolationFinding(line, fallbackPath));
9251
9524
  }
9252
9525
  function detectLargeFiles(bundle) {
9253
9526
  const findings = [];
@@ -9269,45 +9542,61 @@ function detectLargeFiles(bundle) {
9269
9542
  }
9270
9543
  return findings;
9271
9544
  }
9545
+ function extractRelativeImports(content) {
9546
+ const importRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
9547
+ let match;
9548
+ const imports = /* @__PURE__ */ new Set();
9549
+ while ((match = importRegex.exec(content)) !== null) {
9550
+ const source = match[1];
9551
+ if (source.startsWith(".")) {
9552
+ imports.add(source.replace(/^\.\//, "").replace(/^\.\.\//, ""));
9553
+ }
9554
+ }
9555
+ return imports;
9556
+ }
9557
+ function fileBaseName(filePath) {
9558
+ return filePath.replace(/.*\//, "").replace(/\.(ts|tsx|js|jsx)$/, "");
9559
+ }
9560
+ function findCircularImportInCtxFile(ctxFile, changedFilePath, changedPaths, fileImports) {
9561
+ const ctxImportRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
9562
+ let ctxMatch;
9563
+ while ((ctxMatch = ctxImportRegex.exec(ctxFile.content)) !== null) {
9564
+ const ctxSource = ctxMatch[1];
9565
+ if (!ctxSource.startsWith(".")) continue;
9566
+ for (const changedPath of changedPaths) {
9567
+ const baseName = fileBaseName(changedPath);
9568
+ const ctxBaseName = fileBaseName(ctxFile.path);
9569
+ if (ctxSource.includes(baseName) && fileImports.has(ctxBaseName)) {
9570
+ return {
9571
+ id: makeFindingId("arch", changedFilePath, 1, `circular ${ctxFile.path}`),
9572
+ file: changedFilePath,
9573
+ lineRange: [1, 1],
9574
+ domain: "architecture",
9575
+ severity: "important",
9576
+ title: `Potential circular import between ${changedFilePath} and ${ctxFile.path}`,
9577
+ rationale: "Circular imports can cause runtime issues (undefined values at import time) and indicate tightly coupled modules that should be refactored.",
9578
+ suggestion: "Extract shared types/interfaces into a separate module that both files can import from.",
9579
+ evidence: [
9580
+ `${changedFilePath} imports from a module that also imports from ${changedFilePath}`
9581
+ ],
9582
+ validatedBy: "heuristic"
9583
+ };
9584
+ }
9585
+ }
9586
+ }
9587
+ return null;
9588
+ }
9272
9589
  function detectCircularImports(bundle) {
9273
9590
  const findings = [];
9274
9591
  const changedPaths = new Set(bundle.changedFiles.map((f) => f.path));
9592
+ const relevantCtxFiles = bundle.contextFiles.filter(
9593
+ (f) => f.reason === "import" || f.reason === "graph-dependency"
9594
+ );
9275
9595
  for (const cf of bundle.changedFiles) {
9276
- const importRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
9277
- let match;
9278
- const imports = /* @__PURE__ */ new Set();
9279
- while ((match = importRegex.exec(cf.content)) !== null) {
9280
- const source = match[1];
9281
- if (source.startsWith(".")) {
9282
- imports.add(source.replace(/^\.\//, "").replace(/^\.\.\//, ""));
9283
- }
9284
- }
9285
- for (const ctxFile of bundle.contextFiles) {
9286
- if (ctxFile.reason !== "import" && ctxFile.reason !== "graph-dependency") continue;
9287
- const ctxImportRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
9288
- let ctxMatch;
9289
- while ((ctxMatch = ctxImportRegex.exec(ctxFile.content)) !== null) {
9290
- const ctxSource = ctxMatch[1];
9291
- if (ctxSource.startsWith(".")) {
9292
- for (const changedPath of changedPaths) {
9293
- const baseName = changedPath.replace(/.*\//, "").replace(/\.(ts|tsx|js|jsx)$/, "");
9294
- if (ctxSource.includes(baseName) && imports.has(ctxFile.path.replace(/.*\//, "").replace(/\.(ts|tsx|js|jsx)$/, ""))) {
9295
- findings.push({
9296
- id: makeFindingId("arch", cf.path, 1, `circular ${ctxFile.path}`),
9297
- file: cf.path,
9298
- lineRange: [1, 1],
9299
- domain: "architecture",
9300
- severity: "important",
9301
- title: `Potential circular import between ${cf.path} and ${ctxFile.path}`,
9302
- rationale: "Circular imports can cause runtime issues (undefined values at import time) and indicate tightly coupled modules that should be refactored.",
9303
- suggestion: "Extract shared types/interfaces into a separate module that both files can import from.",
9304
- evidence: [`${cf.path} imports from a module that also imports from ${cf.path}`],
9305
- validatedBy: "heuristic"
9306
- });
9307
- }
9308
- }
9309
- }
9310
- }
9596
+ const imports = extractRelativeImports(cf.content);
9597
+ for (const ctxFile of relevantCtxFiles) {
9598
+ const finding = findCircularImportInCtxFile(ctxFile, cf.path, changedPaths, imports);
9599
+ if (finding) findings.push(finding);
9311
9600
  }
9312
9601
  }
9313
9602
  return findings;
@@ -9368,7 +9657,7 @@ function normalizePath(filePath, projectRoot) {
9368
9657
  let normalized = filePath;
9369
9658
  normalized = normalized.replace(/\\/g, "/");
9370
9659
  const normalizedRoot = projectRoot.replace(/\\/g, "/");
9371
- if (path15.isAbsolute(normalized)) {
9660
+ if (path17.isAbsolute(normalized)) {
9372
9661
  const root = normalizedRoot.endsWith("/") ? normalizedRoot : normalizedRoot + "/";
9373
9662
  if (normalized.startsWith(root)) {
9374
9663
  normalized = normalized.slice(root.length);
@@ -9393,12 +9682,12 @@ function followImportChain(fromFile, fileContents, maxDepth = 2) {
9393
9682
  while ((match = importRegex.exec(content)) !== null) {
9394
9683
  const importPath = match[1];
9395
9684
  if (!importPath.startsWith(".")) continue;
9396
- const dir = path15.dirname(current.file);
9397
- let resolved = path15.join(dir, importPath).replace(/\\/g, "/");
9685
+ const dir = path17.dirname(current.file);
9686
+ let resolved = path17.join(dir, importPath).replace(/\\/g, "/");
9398
9687
  if (!resolved.match(/\.(ts|tsx|js|jsx)$/)) {
9399
9688
  resolved += ".ts";
9400
9689
  }
9401
- resolved = path15.normalize(resolved).replace(/\\/g, "/");
9690
+ resolved = path17.normalize(resolved).replace(/\\/g, "/");
9402
9691
  if (!visited.has(resolved) && current.depth + 1 <= maxDepth) {
9403
9692
  queue.push({ file: resolved, depth: current.depth + 1 });
9404
9693
  }
@@ -9415,7 +9704,7 @@ async function validateFindings(options) {
9415
9704
  if (exclusionSet.isExcluded(normalizedFile, finding.lineRange) || exclusionSet.isExcluded(finding.file, finding.lineRange)) {
9416
9705
  continue;
9417
9706
  }
9418
- const absoluteFile = path15.isAbsolute(finding.file) ? finding.file : path15.join(projectRoot, finding.file).replace(/\\/g, "/");
9707
+ const absoluteFile = path17.isAbsolute(finding.file) ? finding.file : path17.join(projectRoot, finding.file).replace(/\\/g, "/");
9419
9708
  if (exclusionSet.isExcluded(absoluteFile, finding.lineRange)) {
9420
9709
  continue;
9421
9710
  }
@@ -9470,6 +9759,28 @@ async function validateFindings(options) {
9470
9759
  function rangesOverlap(a, b, gap) {
9471
9760
  return a[0] <= b[1] + gap && b[0] <= a[1] + gap;
9472
9761
  }
9762
+ function pickLongest(a, b) {
9763
+ if (a && b) return a.length >= b.length ? a : b;
9764
+ return a ?? b;
9765
+ }
9766
+ function buildMergedTitle(a, b, domains) {
9767
+ const primaryFinding = SEVERITY_RANK[a.severity] >= SEVERITY_RANK[b.severity] ? a : b;
9768
+ const domainList = [...domains].sort().join(", ");
9769
+ const cleanTitle = primaryFinding.title.replace(/^\[.*?\]\s*/, "");
9770
+ return { title: `[${domainList}] ${cleanTitle}`, primaryFinding };
9771
+ }
9772
+ function mergeSecurityFields(merged, primary, a, b) {
9773
+ const cweId = primary.cweId ?? a.cweId ?? b.cweId;
9774
+ const owaspCategory = primary.owaspCategory ?? a.owaspCategory ?? b.owaspCategory;
9775
+ const confidence = primary.confidence ?? a.confidence ?? b.confidence;
9776
+ const remediation = pickLongest(a.remediation, b.remediation);
9777
+ const mergedRefs = [.../* @__PURE__ */ new Set([...a.references ?? [], ...b.references ?? []])];
9778
+ if (cweId !== void 0) merged.cweId = cweId;
9779
+ if (owaspCategory !== void 0) merged.owaspCategory = owaspCategory;
9780
+ if (confidence !== void 0) merged.confidence = confidence;
9781
+ if (remediation !== void 0) merged.remediation = remediation;
9782
+ if (mergedRefs.length > 0) merged.references = mergedRefs;
9783
+ }
9473
9784
  function mergeFindings(a, b) {
9474
9785
  const highestSeverity = SEVERITY_RANK[a.severity] >= SEVERITY_RANK[b.severity] ? a.severity : b.severity;
9475
9786
  const highestValidatedBy = (VALIDATED_BY_RANK[a.validatedBy] ?? 0) >= (VALIDATED_BY_RANK[b.validatedBy] ?? 0) ? a.validatedBy : b.validatedBy;
@@ -9479,18 +9790,12 @@ function mergeFindings(a, b) {
9479
9790
  Math.min(a.lineRange[0], b.lineRange[0]),
9480
9791
  Math.max(a.lineRange[1], b.lineRange[1])
9481
9792
  ];
9482
- const domains = /* @__PURE__ */ new Set();
9483
- domains.add(a.domain);
9484
- domains.add(b.domain);
9485
- const suggestion = a.suggestion && b.suggestion ? a.suggestion.length >= b.suggestion.length ? a.suggestion : b.suggestion : a.suggestion ?? b.suggestion;
9486
- const primaryFinding = SEVERITY_RANK[a.severity] >= SEVERITY_RANK[b.severity] ? a : b;
9487
- const domainList = [...domains].sort().join(", ");
9488
- const cleanTitle = primaryFinding.title.replace(/^\[.*?\]\s*/, "");
9489
- const title = `[${domainList}] ${cleanTitle}`;
9793
+ const domains = /* @__PURE__ */ new Set([a.domain, b.domain]);
9794
+ const suggestion = pickLongest(a.suggestion, b.suggestion);
9795
+ const { title, primaryFinding } = buildMergedTitle(a, b, domains);
9490
9796
  const merged = {
9491
9797
  id: primaryFinding.id,
9492
9798
  file: a.file,
9493
- // same file for all merged findings
9494
9799
  lineRange,
9495
9800
  domain: primaryFinding.domain,
9496
9801
  severity: highestSeverity,
@@ -9502,16 +9807,7 @@ function mergeFindings(a, b) {
9502
9807
  if (suggestion !== void 0) {
9503
9808
  merged.suggestion = suggestion;
9504
9809
  }
9505
- const cweId = primaryFinding.cweId ?? a.cweId ?? b.cweId;
9506
- const owaspCategory = primaryFinding.owaspCategory ?? a.owaspCategory ?? b.owaspCategory;
9507
- const confidence = primaryFinding.confidence ?? a.confidence ?? b.confidence;
9508
- const remediation = a.remediation && b.remediation ? a.remediation.length >= b.remediation.length ? a.remediation : b.remediation : a.remediation ?? b.remediation;
9509
- const mergedRefs = [.../* @__PURE__ */ new Set([...a.references ?? [], ...b.references ?? []])];
9510
- if (cweId !== void 0) merged.cweId = cweId;
9511
- if (owaspCategory !== void 0) merged.owaspCategory = owaspCategory;
9512
- if (confidence !== void 0) merged.confidence = confidence;
9513
- if (remediation !== void 0) merged.remediation = remediation;
9514
- if (mergedRefs.length > 0) merged.references = mergedRefs;
9810
+ mergeSecurityFields(merged, primaryFinding, a, b);
9515
9811
  return merged;
9516
9812
  }
9517
9813
  function deduplicateFindings(options) {
@@ -9675,6 +9971,17 @@ function formatTerminalOutput(options) {
9675
9971
  if (suggestionCount > 0) parts.push(`${suggestionCount} suggestion(s)`);
9676
9972
  sections.push(` Found ${issueCount} issue(s): ${parts.join(", ")}.`);
9677
9973
  }
9974
+ if (options.evidenceCoverage) {
9975
+ const ec = options.evidenceCoverage;
9976
+ sections.push("");
9977
+ sections.push("## Evidence Coverage\n");
9978
+ sections.push(` Evidence entries: ${ec.totalEntries}`);
9979
+ sections.push(
9980
+ ` Findings with evidence: ${ec.findingsWithEvidence}/${ec.findingsWithEvidence + ec.uncitedCount}`
9981
+ );
9982
+ sections.push(` Uncited findings: ${ec.uncitedCount} (flagged as [UNVERIFIED])`);
9983
+ sections.push(` Coverage: ${ec.coveragePercentage}%`);
9984
+ }
9678
9985
  return sections.join("\n");
9679
9986
  }
9680
9987
  var SMALL_SUGGESTION_LINE_LIMIT = 10;
@@ -9749,8 +10056,105 @@ function formatGitHubSummary(options) {
9749
10056
  const assessment = determineAssessment(findings);
9750
10057
  const assessmentLabel = assessment === "approve" ? "Approve" : assessment === "comment" ? "Comment" : "Request Changes";
9751
10058
  sections.push(`## Assessment: ${assessmentLabel}`);
10059
+ if (options.evidenceCoverage) {
10060
+ const ec = options.evidenceCoverage;
10061
+ sections.push("");
10062
+ sections.push("## Evidence Coverage\n");
10063
+ sections.push(`- Evidence entries: ${ec.totalEntries}`);
10064
+ sections.push(
10065
+ `- Findings with evidence: ${ec.findingsWithEvidence}/${ec.findingsWithEvidence + ec.uncitedCount}`
10066
+ );
10067
+ sections.push(`- Uncited findings: ${ec.uncitedCount} (flagged as \\[UNVERIFIED\\])`);
10068
+ sections.push(`- Coverage: ${ec.coveragePercentage}%`);
10069
+ }
9752
10070
  return sections.join("\n");
9753
10071
  }
10072
+ var FILE_LINE_RANGE_PATTERN = /^([\w./@-]+\.\w+):(\d+)-(\d+)/;
10073
+ var FILE_LINE_PATTERN = /^([\w./@-]+\.\w+):(\d+)/;
10074
+ var FILE_ONLY_PATTERN = /^([\w./@-]+\.\w+)\s/;
10075
+ function parseEvidenceRef(content) {
10076
+ const trimmed = content.trim();
10077
+ const rangeMatch = trimmed.match(FILE_LINE_RANGE_PATTERN);
10078
+ if (rangeMatch) {
10079
+ return {
10080
+ file: rangeMatch[1],
10081
+ lineStart: parseInt(rangeMatch[2], 10),
10082
+ lineEnd: parseInt(rangeMatch[3], 10)
10083
+ };
10084
+ }
10085
+ const lineMatch = trimmed.match(FILE_LINE_PATTERN);
10086
+ if (lineMatch) {
10087
+ return {
10088
+ file: lineMatch[1],
10089
+ lineStart: parseInt(lineMatch[2], 10)
10090
+ };
10091
+ }
10092
+ const fileMatch = trimmed.match(FILE_ONLY_PATTERN);
10093
+ if (fileMatch) {
10094
+ return { file: fileMatch[1] };
10095
+ }
10096
+ return null;
10097
+ }
10098
+ function evidenceMatchesFinding(ref, finding) {
10099
+ if (ref.file !== finding.file) return false;
10100
+ if (ref.lineStart === void 0) return true;
10101
+ const [findStart, findEnd] = finding.lineRange;
10102
+ if (ref.lineEnd !== void 0) {
10103
+ return ref.lineStart <= findEnd && ref.lineEnd >= findStart;
10104
+ }
10105
+ return ref.lineStart >= findStart && ref.lineStart <= findEnd;
10106
+ }
10107
+ function checkEvidenceCoverage(findings, evidenceEntries) {
10108
+ if (findings.length === 0) {
10109
+ return {
10110
+ totalEntries: evidenceEntries.filter((e) => e.status === "active").length,
10111
+ findingsWithEvidence: 0,
10112
+ uncitedCount: 0,
10113
+ uncitedFindings: [],
10114
+ coveragePercentage: 100
10115
+ };
10116
+ }
10117
+ const activeEvidence = evidenceEntries.filter((e) => e.status === "active");
10118
+ const evidenceRefs = [];
10119
+ for (const entry of activeEvidence) {
10120
+ const ref = parseEvidenceRef(entry.content);
10121
+ if (ref) evidenceRefs.push(ref);
10122
+ }
10123
+ let findingsWithEvidence = 0;
10124
+ const uncitedFindings = [];
10125
+ for (const finding of findings) {
10126
+ const hasEvidence = evidenceRefs.some((ref) => evidenceMatchesFinding(ref, finding));
10127
+ if (hasEvidence) {
10128
+ findingsWithEvidence++;
10129
+ } else {
10130
+ uncitedFindings.push(finding.title);
10131
+ }
10132
+ }
10133
+ const uncitedCount = findings.length - findingsWithEvidence;
10134
+ const coveragePercentage = Math.round(findingsWithEvidence / findings.length * 100);
10135
+ return {
10136
+ totalEntries: activeEvidence.length,
10137
+ findingsWithEvidence,
10138
+ uncitedCount,
10139
+ uncitedFindings,
10140
+ coveragePercentage
10141
+ };
10142
+ }
10143
+ function tagUncitedFindings(findings, evidenceEntries) {
10144
+ const activeEvidence = evidenceEntries.filter((e) => e.status === "active");
10145
+ const evidenceRefs = [];
10146
+ for (const entry of activeEvidence) {
10147
+ const ref = parseEvidenceRef(entry.content);
10148
+ if (ref) evidenceRefs.push(ref);
10149
+ }
10150
+ for (const finding of findings) {
10151
+ const hasEvidence = evidenceRefs.some((ref) => evidenceMatchesFinding(ref, finding));
10152
+ if (!hasEvidence && !finding.title.startsWith("[UNVERIFIED]")) {
10153
+ finding.title = `[UNVERIFIED] ${finding.title}`;
10154
+ }
10155
+ }
10156
+ return findings;
10157
+ }
9754
10158
  async function runReviewPipeline(options) {
9755
10159
  const {
9756
10160
  projectRoot,
@@ -9762,7 +10166,8 @@ async function runReviewPipeline(options) {
9762
10166
  conventionFiles,
9763
10167
  checkDepsOutput,
9764
10168
  config = {},
9765
- commitHistory
10169
+ commitHistory,
10170
+ sessionSlug
9766
10171
  } = options;
9767
10172
  if (flags.ci && prMetadata) {
9768
10173
  const eligibility = checkEligibility(prMetadata, true);
@@ -9858,13 +10263,25 @@ async function runReviewPipeline(options) {
9858
10263
  projectRoot,
9859
10264
  fileContents
9860
10265
  });
10266
+ let evidenceCoverage;
10267
+ if (sessionSlug) {
10268
+ try {
10269
+ const evidenceResult = await readSessionSection(projectRoot, sessionSlug, "evidence");
10270
+ if (evidenceResult.ok) {
10271
+ evidenceCoverage = checkEvidenceCoverage(validatedFindings, evidenceResult.value);
10272
+ tagUncitedFindings(validatedFindings, evidenceResult.value);
10273
+ }
10274
+ } catch {
10275
+ }
10276
+ }
9861
10277
  const dedupedFindings = deduplicateFindings({ findings: validatedFindings });
9862
10278
  const strengths = [];
9863
10279
  const assessment = determineAssessment(dedupedFindings);
9864
10280
  const exitCode = getExitCode(assessment);
9865
10281
  const terminalOutput = formatTerminalOutput({
9866
10282
  findings: dedupedFindings,
9867
- strengths
10283
+ strengths,
10284
+ ...evidenceCoverage != null ? { evidenceCoverage } : {}
9868
10285
  });
9869
10286
  let githubComments = [];
9870
10287
  if (flags.comment) {
@@ -9879,7 +10296,8 @@ async function runReviewPipeline(options) {
9879
10296
  terminalOutput,
9880
10297
  githubComments,
9881
10298
  exitCode,
9882
- ...mechanicalResult !== void 0 ? { mechanicalResult } : {}
10299
+ ...mechanicalResult != null ? { mechanicalResult } : {},
10300
+ ...evidenceCoverage != null ? { evidenceCoverage } : {}
9883
10301
  };
9884
10302
  }
9885
10303
  var VALID_STATUSES = /* @__PURE__ */ new Set([
@@ -9979,13 +10397,29 @@ function parseFeatures(sectionBody) {
9979
10397
  }
9980
10398
  return Ok(features);
9981
10399
  }
9982
- function parseFeatureFields(name, body) {
10400
+ function extractFieldMap(body) {
9983
10401
  const fieldMap = /* @__PURE__ */ new Map();
9984
10402
  const fieldPattern = /^- \*\*(.+?):\*\* (.+)$/gm;
9985
10403
  let match;
9986
10404
  while ((match = fieldPattern.exec(body)) !== null) {
9987
10405
  fieldMap.set(match[1], match[2]);
9988
10406
  }
10407
+ return fieldMap;
10408
+ }
10409
+ function parseListField(fieldMap, ...keys) {
10410
+ let raw = EM_DASH;
10411
+ for (const key of keys) {
10412
+ const val = fieldMap.get(key);
10413
+ if (val !== void 0) {
10414
+ raw = val;
10415
+ break;
10416
+ }
10417
+ }
10418
+ if (raw === EM_DASH || raw === "none") return [];
10419
+ return raw.split(",").map((s) => s.trim());
10420
+ }
10421
+ function parseFeatureFields(name, body) {
10422
+ const fieldMap = extractFieldMap(body);
9989
10423
  const statusRaw = fieldMap.get("Status");
9990
10424
  if (!statusRaw || !VALID_STATUSES.has(statusRaw)) {
9991
10425
  return Err(
@@ -9994,15 +10428,17 @@ function parseFeatureFields(name, body) {
9994
10428
  )
9995
10429
  );
9996
10430
  }
9997
- const status = statusRaw;
9998
10431
  const specRaw = fieldMap.get("Spec") ?? EM_DASH;
9999
- const spec = specRaw === EM_DASH ? null : specRaw;
10000
- const plansRaw = fieldMap.get("Plans") ?? fieldMap.get("Plan") ?? EM_DASH;
10001
- const plans = plansRaw === EM_DASH || plansRaw === "none" ? [] : plansRaw.split(",").map((p) => p.trim());
10002
- const blockedByRaw = fieldMap.get("Blocked by") ?? fieldMap.get("Blockers") ?? EM_DASH;
10003
- const blockedBy = blockedByRaw === EM_DASH || blockedByRaw === "none" ? [] : blockedByRaw.split(",").map((b) => b.trim());
10004
- const summary = fieldMap.get("Summary") ?? "";
10005
- return Ok({ name, status, spec, plans, blockedBy, summary });
10432
+ const plans = parseListField(fieldMap, "Plans", "Plan");
10433
+ const blockedBy = parseListField(fieldMap, "Blocked by", "Blockers");
10434
+ return Ok({
10435
+ name,
10436
+ status: statusRaw,
10437
+ spec: specRaw === EM_DASH ? null : specRaw,
10438
+ plans,
10439
+ blockedBy,
10440
+ summary: fieldMap.get("Summary") ?? ""
10441
+ });
10006
10442
  }
10007
10443
  var EM_DASH2 = "\u2014";
10008
10444
  function serializeRoadmap(roadmap) {
@@ -10062,10 +10498,10 @@ function inferStatus(feature, projectPath, allFeatures) {
10062
10498
  const featuresWithPlans = allFeatures.filter((f) => f.plans.length > 0);
10063
10499
  const useRootState = featuresWithPlans.length <= 1;
10064
10500
  if (useRootState) {
10065
- const rootStatePath = path16.join(projectPath, ".harness", "state.json");
10066
- if (fs16.existsSync(rootStatePath)) {
10501
+ const rootStatePath = path18.join(projectPath, ".harness", "state.json");
10502
+ if (fs18.existsSync(rootStatePath)) {
10067
10503
  try {
10068
- const raw = fs16.readFileSync(rootStatePath, "utf-8");
10504
+ const raw = fs18.readFileSync(rootStatePath, "utf-8");
10069
10505
  const state = JSON.parse(raw);
10070
10506
  if (state.progress) {
10071
10507
  for (const status of Object.values(state.progress)) {
@@ -10076,16 +10512,16 @@ function inferStatus(feature, projectPath, allFeatures) {
10076
10512
  }
10077
10513
  }
10078
10514
  }
10079
- const sessionsDir = path16.join(projectPath, ".harness", "sessions");
10080
- if (fs16.existsSync(sessionsDir)) {
10515
+ const sessionsDir = path18.join(projectPath, ".harness", "sessions");
10516
+ if (fs18.existsSync(sessionsDir)) {
10081
10517
  try {
10082
- const sessionDirs = fs16.readdirSync(sessionsDir, { withFileTypes: true });
10518
+ const sessionDirs = fs18.readdirSync(sessionsDir, { withFileTypes: true });
10083
10519
  for (const entry of sessionDirs) {
10084
10520
  if (!entry.isDirectory()) continue;
10085
- const autopilotPath = path16.join(sessionsDir, entry.name, "autopilot-state.json");
10086
- if (!fs16.existsSync(autopilotPath)) continue;
10521
+ const autopilotPath = path18.join(sessionsDir, entry.name, "autopilot-state.json");
10522
+ if (!fs18.existsSync(autopilotPath)) continue;
10087
10523
  try {
10088
- const raw = fs16.readFileSync(autopilotPath, "utf-8");
10524
+ const raw = fs18.readFileSync(autopilotPath, "utf-8");
10089
10525
  const autopilot = JSON.parse(raw);
10090
10526
  if (!autopilot.phases) continue;
10091
10527
  const linkedPhases = autopilot.phases.filter(
@@ -10165,10 +10601,10 @@ var ProjectScanner = class {
10165
10601
  this.rootDir = rootDir;
10166
10602
  }
10167
10603
  async scan() {
10168
- let projectName = path17.basename(this.rootDir);
10604
+ let projectName = path19.basename(this.rootDir);
10169
10605
  try {
10170
- const pkgPath = path17.join(this.rootDir, "package.json");
10171
- const pkgRaw = await fs17.readFile(pkgPath, "utf-8");
10606
+ const pkgPath = path19.join(this.rootDir, "package.json");
10607
+ const pkgRaw = await fs19.readFile(pkgPath, "utf-8");
10172
10608
  const pkg = JSON.parse(pkgRaw);
10173
10609
  if (pkg.name) projectName = pkg.name;
10174
10610
  } catch {
@@ -10281,13 +10717,13 @@ var BlueprintGenerator = class {
10281
10717
  styles: STYLES,
10282
10718
  scripts: SCRIPTS
10283
10719
  });
10284
- await fs18.mkdir(options.outputDir, { recursive: true });
10285
- await fs18.writeFile(path18.join(options.outputDir, "index.html"), html);
10720
+ await fs20.mkdir(options.outputDir, { recursive: true });
10721
+ await fs20.writeFile(path20.join(options.outputDir, "index.html"), html);
10286
10722
  }
10287
10723
  };
10288
10724
  function getStatePath() {
10289
10725
  const home = process.env["HOME"] || os.homedir();
10290
- return path19.join(home, ".harness", "update-check.json");
10726
+ return path21.join(home, ".harness", "update-check.json");
10291
10727
  }
10292
10728
  function isUpdateCheckEnabled(configInterval) {
10293
10729
  if (process.env["HARNESS_NO_UPDATE_CHECK"] === "1") return false;
@@ -10300,7 +10736,7 @@ function shouldRunCheck(state, intervalMs) {
10300
10736
  }
10301
10737
  function readCheckState() {
10302
10738
  try {
10303
- const raw = fs19.readFileSync(getStatePath(), "utf-8");
10739
+ const raw = fs21.readFileSync(getStatePath(), "utf-8");
10304
10740
  const parsed = JSON.parse(raw);
10305
10741
  if (typeof parsed === "object" && parsed !== null && "lastCheckTime" in parsed && typeof parsed.lastCheckTime === "number" && "currentVersion" in parsed && typeof parsed.currentVersion === "string") {
10306
10742
  const state = parsed;
@@ -10317,7 +10753,7 @@ function readCheckState() {
10317
10753
  }
10318
10754
  function spawnBackgroundCheck(currentVersion) {
10319
10755
  const statePath = getStatePath();
10320
- const stateDir = path19.dirname(statePath);
10756
+ const stateDir = path21.dirname(statePath);
10321
10757
  const script = `
10322
10758
  const { execSync } = require('child_process');
10323
10759
  const fs = require('fs');
@@ -10369,7 +10805,7 @@ function getUpdateNotification(currentVersion) {
10369
10805
  return `Update available: v${currentVersion} -> v${state.latestVersion}
10370
10806
  Run "harness update" to upgrade.`;
10371
10807
  }
10372
- var VERSION = "0.11.0";
10808
+ var VERSION = "0.14.0";
10373
10809
 
10374
10810
  export {
10375
10811
  ArchMetricCategorySchema,
@@ -10487,7 +10923,7 @@ export {
10487
10923
  NoOpSink,
10488
10924
  syncConstraintNodes,
10489
10925
  detectStaleConstraints,
10490
- resolveThresholds,
10926
+ resolveThresholds2 as resolveThresholds,
10491
10927
  FailureEntrySchema,
10492
10928
  HandoffSchema,
10493
10929
  GateResultSchema,
@@ -10529,6 +10965,11 @@ export {
10529
10965
  writeSessionSummary,
10530
10966
  loadSessionSummary,
10531
10967
  listActiveSessions,
10968
+ readSessionSections,
10969
+ readSessionSection,
10970
+ appendSessionEntry,
10971
+ updateSessionEntryStatus,
10972
+ archiveSession,
10532
10973
  executeWorkflow,
10533
10974
  runPipeline,
10534
10975
  runMultiTurnPipeline,
@@ -10578,6 +11019,8 @@ export {
10578
11019
  isSmallSuggestion,
10579
11020
  formatGitHubComment,
10580
11021
  formatGitHubSummary,
11022
+ checkEvidenceCoverage,
11023
+ tagUncitedFindings,
10581
11024
  runReviewPipeline,
10582
11025
  parseRoadmap,
10583
11026
  serializeRoadmap,