@harness-engineering/cli 1.6.2 → 1.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (180) hide show
  1. package/dist/agents/personas/documentation-maintainer.yaml +3 -1
  2. package/dist/agents/personas/performance-guardian.yaml +23 -0
  3. package/dist/agents/personas/planner.yaml +27 -0
  4. package/dist/agents/personas/verifier.yaml +30 -0
  5. package/dist/agents/skills/claude-code/align-documentation/SKILL.md +13 -0
  6. package/dist/agents/skills/claude-code/cleanup-dead-code/SKILL.md +25 -1
  7. package/dist/agents/skills/claude-code/cleanup-dead-code/skill.yaml +5 -2
  8. package/dist/agents/skills/claude-code/detect-doc-drift/SKILL.md +12 -0
  9. package/dist/agents/skills/claude-code/enforce-architecture/SKILL.md +67 -1
  10. package/dist/agents/skills/claude-code/enforce-architecture/skill.yaml +5 -2
  11. package/dist/agents/skills/claude-code/harness-accessibility/SKILL.md +281 -0
  12. package/dist/agents/skills/claude-code/harness-accessibility/skill.yaml +51 -0
  13. package/dist/agents/skills/claude-code/harness-autopilot/SKILL.md +119 -72
  14. package/dist/agents/skills/claude-code/harness-autopilot/skill.yaml +4 -2
  15. package/dist/agents/skills/claude-code/harness-brainstorming/SKILL.md +76 -4
  16. package/dist/agents/skills/claude-code/harness-brainstorming/skill.yaml +2 -0
  17. package/dist/agents/skills/claude-code/harness-code-review/SKILL.md +487 -234
  18. package/dist/agents/skills/claude-code/harness-code-review/skill.yaml +15 -2
  19. package/dist/agents/skills/claude-code/harness-codebase-cleanup/SKILL.md +226 -0
  20. package/dist/agents/skills/claude-code/harness-codebase-cleanup/skill.yaml +64 -0
  21. package/dist/agents/skills/claude-code/harness-dependency-health/SKILL.md +35 -6
  22. package/dist/agents/skills/claude-code/harness-dependency-health/skill.yaml +1 -1
  23. package/dist/agents/skills/claude-code/harness-design/SKILL.md +265 -0
  24. package/dist/agents/skills/claude-code/harness-design/skill.yaml +53 -0
  25. package/dist/agents/skills/claude-code/harness-design-mobile/SKILL.md +336 -0
  26. package/dist/agents/skills/claude-code/harness-design-mobile/skill.yaml +49 -0
  27. package/dist/agents/skills/claude-code/harness-design-system/SKILL.md +282 -0
  28. package/dist/agents/skills/claude-code/harness-design-system/skill.yaml +50 -0
  29. package/dist/agents/skills/claude-code/harness-design-web/SKILL.md +360 -0
  30. package/dist/agents/skills/claude-code/harness-design-web/skill.yaml +52 -0
  31. package/dist/agents/skills/claude-code/harness-docs-pipeline/SKILL.md +460 -0
  32. package/dist/agents/skills/claude-code/harness-docs-pipeline/skill.yaml +69 -0
  33. package/dist/agents/skills/claude-code/harness-execution/SKILL.md +73 -8
  34. package/dist/agents/skills/claude-code/harness-execution/skill.yaml +1 -0
  35. package/dist/agents/skills/claude-code/harness-hotspot-detector/SKILL.md +32 -6
  36. package/dist/agents/skills/claude-code/harness-hotspot-detector/skill.yaml +1 -1
  37. package/dist/agents/skills/claude-code/harness-i18n/SKILL.md +484 -0
  38. package/dist/agents/skills/claude-code/harness-i18n/skill.yaml +54 -0
  39. package/dist/agents/skills/claude-code/harness-i18n-process/SKILL.md +388 -0
  40. package/dist/agents/skills/claude-code/harness-i18n-process/skill.yaml +43 -0
  41. package/dist/agents/skills/claude-code/harness-i18n-workflow/SKILL.md +512 -0
  42. package/dist/agents/skills/claude-code/harness-i18n-workflow/skill.yaml +53 -0
  43. package/dist/agents/skills/claude-code/harness-impact-analysis/SKILL.md +51 -6
  44. package/dist/agents/skills/claude-code/harness-integrity/SKILL.md +35 -1
  45. package/dist/agents/skills/claude-code/harness-knowledge-mapper/SKILL.md +46 -5
  46. package/dist/agents/skills/claude-code/harness-knowledge-mapper/skill.yaml +1 -1
  47. package/dist/agents/skills/claude-code/harness-onboarding/SKILL.md +19 -1
  48. package/dist/agents/skills/claude-code/harness-perf/SKILL.md +37 -8
  49. package/dist/agents/skills/claude-code/harness-perf/skill.yaml +3 -0
  50. package/dist/agents/skills/claude-code/harness-perf-tdd/SKILL.md +17 -4
  51. package/dist/agents/skills/claude-code/harness-planning/SKILL.md +57 -3
  52. package/dist/agents/skills/claude-code/harness-planning/skill.yaml +2 -0
  53. package/dist/agents/skills/claude-code/harness-release-readiness/SKILL.md +29 -9
  54. package/dist/agents/skills/claude-code/harness-roadmap/SKILL.md +562 -0
  55. package/dist/agents/skills/claude-code/harness-roadmap/skill.yaml +43 -0
  56. package/dist/agents/skills/claude-code/harness-security-review/SKILL.md +36 -2
  57. package/dist/agents/skills/claude-code/harness-security-review/skill.yaml +8 -6
  58. package/dist/agents/skills/claude-code/harness-security-scan/skill.yaml +1 -1
  59. package/dist/agents/skills/claude-code/harness-soundness-review/SKILL.md +1267 -0
  60. package/dist/agents/skills/claude-code/harness-soundness-review/skill.yaml +48 -0
  61. package/dist/agents/skills/claude-code/harness-test-advisor/SKILL.md +35 -6
  62. package/dist/agents/skills/claude-code/harness-verification/SKILL.md +66 -0
  63. package/dist/agents/skills/claude-code/harness-verification/skill.yaml +1 -0
  64. package/dist/agents/skills/claude-code/harness-verify/SKILL.md +37 -0
  65. package/dist/agents/skills/claude-code/initialize-harness-project/SKILL.md +15 -1
  66. package/dist/agents/skills/claude-code/validate-context-engineering/SKILL.md +12 -0
  67. package/dist/agents/skills/gemini-cli/harness-accessibility/SKILL.md +281 -0
  68. package/dist/agents/skills/gemini-cli/harness-accessibility/skill.yaml +51 -0
  69. package/dist/agents/skills/gemini-cli/harness-autopilot/SKILL.md +119 -72
  70. package/dist/agents/skills/gemini-cli/harness-autopilot/skill.yaml +4 -2
  71. package/dist/agents/skills/gemini-cli/harness-codebase-cleanup/SKILL.md +226 -0
  72. package/dist/agents/skills/gemini-cli/harness-codebase-cleanup/skill.yaml +64 -0
  73. package/dist/agents/skills/gemini-cli/harness-dependency-health/SKILL.md +35 -6
  74. package/dist/agents/skills/gemini-cli/harness-dependency-health/skill.yaml +1 -1
  75. package/dist/agents/skills/gemini-cli/harness-design/SKILL.md +265 -0
  76. package/dist/agents/skills/gemini-cli/harness-design/skill.yaml +53 -0
  77. package/dist/agents/skills/gemini-cli/harness-design-mobile/SKILL.md +336 -0
  78. package/dist/agents/skills/gemini-cli/harness-design-mobile/skill.yaml +49 -0
  79. package/dist/agents/skills/gemini-cli/harness-design-system/SKILL.md +282 -0
  80. package/dist/agents/skills/gemini-cli/harness-design-system/skill.yaml +50 -0
  81. package/dist/agents/skills/gemini-cli/harness-design-web/SKILL.md +360 -0
  82. package/dist/agents/skills/gemini-cli/harness-design-web/skill.yaml +52 -0
  83. package/dist/agents/skills/gemini-cli/harness-docs-pipeline/SKILL.md +460 -0
  84. package/dist/agents/skills/gemini-cli/harness-docs-pipeline/skill.yaml +69 -0
  85. package/dist/agents/skills/gemini-cli/harness-hotspot-detector/SKILL.md +32 -6
  86. package/dist/agents/skills/gemini-cli/harness-hotspot-detector/skill.yaml +1 -1
  87. package/dist/agents/skills/gemini-cli/harness-i18n/SKILL.md +484 -0
  88. package/dist/agents/skills/gemini-cli/harness-i18n/skill.yaml +54 -0
  89. package/dist/agents/skills/gemini-cli/harness-i18n-process/SKILL.md +388 -0
  90. package/dist/agents/skills/gemini-cli/harness-i18n-process/skill.yaml +43 -0
  91. package/dist/agents/skills/gemini-cli/harness-i18n-workflow/SKILL.md +512 -0
  92. package/dist/agents/skills/gemini-cli/harness-i18n-workflow/skill.yaml +53 -0
  93. package/dist/agents/skills/gemini-cli/harness-impact-analysis/SKILL.md +51 -6
  94. package/dist/agents/skills/gemini-cli/harness-knowledge-mapper/SKILL.md +46 -5
  95. package/dist/agents/skills/gemini-cli/harness-knowledge-mapper/skill.yaml +1 -1
  96. package/dist/agents/skills/gemini-cli/harness-perf/SKILL.md +37 -8
  97. package/dist/agents/skills/gemini-cli/harness-perf/skill.yaml +3 -0
  98. package/dist/agents/skills/gemini-cli/harness-perf-tdd/SKILL.md +17 -4
  99. package/dist/agents/skills/gemini-cli/harness-release-readiness/SKILL.md +29 -9
  100. package/dist/agents/skills/gemini-cli/harness-roadmap/SKILL.md +562 -0
  101. package/dist/agents/skills/gemini-cli/harness-roadmap/skill.yaml +43 -0
  102. package/dist/agents/skills/gemini-cli/harness-security-review/skill.yaml +8 -6
  103. package/dist/agents/skills/gemini-cli/harness-security-scan/skill.yaml +1 -1
  104. package/dist/agents/skills/gemini-cli/harness-soundness-review/SKILL.md +1267 -0
  105. package/dist/agents/skills/gemini-cli/harness-soundness-review/skill.yaml +48 -0
  106. package/dist/agents/skills/gemini-cli/harness-test-advisor/SKILL.md +35 -6
  107. package/dist/agents/skills/node_modules/.bin/vitest +2 -2
  108. package/dist/agents/skills/shared/design-knowledge/anti-patterns/color.yaml +106 -0
  109. package/dist/agents/skills/shared/design-knowledge/anti-patterns/layout.yaml +109 -0
  110. package/dist/agents/skills/shared/design-knowledge/anti-patterns/motion.yaml +109 -0
  111. package/dist/agents/skills/shared/design-knowledge/anti-patterns/typography.yaml +112 -0
  112. package/dist/agents/skills/shared/design-knowledge/industries/creative.yaml +80 -0
  113. package/dist/agents/skills/shared/design-knowledge/industries/ecommerce.yaml +80 -0
  114. package/dist/agents/skills/shared/design-knowledge/industries/emerging-tech.yaml +83 -0
  115. package/dist/agents/skills/shared/design-knowledge/industries/fintech.yaml +80 -0
  116. package/dist/agents/skills/shared/design-knowledge/industries/healthcare.yaml +80 -0
  117. package/dist/agents/skills/shared/design-knowledge/industries/lifestyle.yaml +80 -0
  118. package/dist/agents/skills/shared/design-knowledge/industries/saas.yaml +80 -0
  119. package/dist/agents/skills/shared/design-knowledge/industries/services.yaml +80 -0
  120. package/dist/agents/skills/shared/design-knowledge/palettes/curated.yaml +234 -0
  121. package/dist/agents/skills/shared/design-knowledge/platform-rules/android.yaml +125 -0
  122. package/dist/agents/skills/shared/design-knowledge/platform-rules/flutter.yaml +144 -0
  123. package/dist/agents/skills/shared/design-knowledge/platform-rules/ios.yaml +106 -0
  124. package/dist/agents/skills/shared/design-knowledge/platform-rules/web.yaml +102 -0
  125. package/dist/agents/skills/shared/design-knowledge/typography/pairings.yaml +274 -0
  126. package/dist/agents/skills/shared/i18n-knowledge/accessibility/intersection.yaml +142 -0
  127. package/dist/agents/skills/shared/i18n-knowledge/anti-patterns/encoding.yaml +67 -0
  128. package/dist/agents/skills/shared/i18n-knowledge/anti-patterns/formatting.yaml +106 -0
  129. package/dist/agents/skills/shared/i18n-knowledge/anti-patterns/layout.yaml +80 -0
  130. package/dist/agents/skills/shared/i18n-knowledge/anti-patterns/pluralization.yaml +80 -0
  131. package/dist/agents/skills/shared/i18n-knowledge/anti-patterns/string-handling.yaml +106 -0
  132. package/dist/agents/skills/shared/i18n-knowledge/frameworks/android-resources.yaml +47 -0
  133. package/dist/agents/skills/shared/i18n-knowledge/frameworks/apple-strings.yaml +47 -0
  134. package/dist/agents/skills/shared/i18n-knowledge/frameworks/backend-patterns.yaml +50 -0
  135. package/dist/agents/skills/shared/i18n-knowledge/frameworks/flutter-intl.yaml +47 -0
  136. package/dist/agents/skills/shared/i18n-knowledge/frameworks/i18next.yaml +47 -0
  137. package/dist/agents/skills/shared/i18n-knowledge/frameworks/react-intl.yaml +47 -0
  138. package/dist/agents/skills/shared/i18n-knowledge/frameworks/vue-i18n.yaml +47 -0
  139. package/dist/agents/skills/shared/i18n-knowledge/industries/ecommerce.yaml +66 -0
  140. package/dist/agents/skills/shared/i18n-knowledge/industries/fintech.yaml +66 -0
  141. package/dist/agents/skills/shared/i18n-knowledge/industries/gaming.yaml +69 -0
  142. package/dist/agents/skills/shared/i18n-knowledge/industries/healthcare.yaml +66 -0
  143. package/dist/agents/skills/shared/i18n-knowledge/industries/legal.yaml +66 -0
  144. package/dist/agents/skills/shared/i18n-knowledge/locales/ar.yaml +41 -0
  145. package/dist/agents/skills/shared/i18n-knowledge/locales/de.yaml +35 -0
  146. package/dist/agents/skills/shared/i18n-knowledge/locales/en.yaml +32 -0
  147. package/dist/agents/skills/shared/i18n-knowledge/locales/es.yaml +35 -0
  148. package/dist/agents/skills/shared/i18n-knowledge/locales/fi.yaml +35 -0
  149. package/dist/agents/skills/shared/i18n-knowledge/locales/fr.yaml +35 -0
  150. package/dist/agents/skills/shared/i18n-knowledge/locales/he.yaml +41 -0
  151. package/dist/agents/skills/shared/i18n-knowledge/locales/hi.yaml +35 -0
  152. package/dist/agents/skills/shared/i18n-knowledge/locales/it.yaml +32 -0
  153. package/dist/agents/skills/shared/i18n-knowledge/locales/ja.yaml +38 -0
  154. package/dist/agents/skills/shared/i18n-knowledge/locales/ko.yaml +38 -0
  155. package/dist/agents/skills/shared/i18n-knowledge/locales/nl.yaml +32 -0
  156. package/dist/agents/skills/shared/i18n-knowledge/locales/pl.yaml +35 -0
  157. package/dist/agents/skills/shared/i18n-knowledge/locales/pt.yaml +32 -0
  158. package/dist/agents/skills/shared/i18n-knowledge/locales/ru.yaml +35 -0
  159. package/dist/agents/skills/shared/i18n-knowledge/locales/sv.yaml +32 -0
  160. package/dist/agents/skills/shared/i18n-knowledge/locales/th.yaml +35 -0
  161. package/dist/agents/skills/shared/i18n-knowledge/locales/tr.yaml +35 -0
  162. package/dist/agents/skills/shared/i18n-knowledge/locales/zh-Hans.yaml +38 -0
  163. package/dist/agents/skills/shared/i18n-knowledge/locales/zh-Hant.yaml +35 -0
  164. package/dist/agents/skills/shared/i18n-knowledge/mcp-interop/i18next-mcp.yaml +56 -0
  165. package/dist/agents/skills/shared/i18n-knowledge/mcp-interop/lingo-dev.yaml +56 -0
  166. package/dist/agents/skills/shared/i18n-knowledge/mcp-interop/lokalise.yaml +60 -0
  167. package/dist/agents/skills/shared/i18n-knowledge/mcp-interop/tolgee.yaml +60 -0
  168. package/dist/agents/skills/shared/i18n-knowledge/testing/locale-testing.yaml +107 -0
  169. package/dist/agents/skills/shared/i18n-knowledge/testing/pseudo-localization.yaml +86 -0
  170. package/dist/bin/harness.js +64 -4
  171. package/dist/{chunk-UDWGSL3T.js → chunk-3JWCBVUZ.js} +3 -3
  172. package/dist/{chunk-IUFFBBYV.js → chunk-LNI4T7R6.js} +179 -61
  173. package/dist/{chunk-USEYPS7F.js → chunk-SJECMKSS.js} +2250 -40
  174. package/dist/{dist-4MYPT3OE.js → dist-BDO5GFEM.js} +295 -14
  175. package/dist/{dist-RBZXXJHG.js → dist-NT3GXHQZ.js} +95 -1
  176. package/dist/index.d.ts +266 -7
  177. package/dist/index.js +7 -3
  178. package/dist/validate-cross-check-2OPGCGGU.js +7 -0
  179. package/package.json +7 -7
  180. package/dist/validate-cross-check-CPEPNLOD.js +0 -7
@@ -46,7 +46,7 @@ import { dirname as dirname6, basename as basename4, join as join5 } from "path"
46
46
  import { z } from "zod";
47
47
  import { readFileSync, writeFileSync, mkdirSync, existsSync } from "fs";
48
48
  import { join as join6, dirname as dirname7 } from "path";
49
- import { execFileSync, execSync } from "child_process";
49
+ import { execFileSync } from "child_process";
50
50
  import * as fs2 from "fs";
51
51
  import * as path from "path";
52
52
  import { appendFileSync, writeFileSync as writeFileSync2, existsSync as existsSync2, mkdirSync as mkdirSync2 } from "fs";
@@ -54,16 +54,26 @@ import { dirname as dirname8 } from "path";
54
54
  import { z as z2 } from "zod";
55
55
  import * as fs4 from "fs";
56
56
  import * as path3 from "path";
57
- import { execSync as execSync3 } from "child_process";
57
+ import { execSync as execSync2 } from "child_process";
58
58
  import * as fs3 from "fs";
59
59
  import * as path2 from "path";
60
- import { execSync as execSync2 } from "child_process";
60
+ import { execSync } from "child_process";
61
61
  import { z as z3 } from "zod";
62
62
  import * as fs6 from "fs/promises";
63
63
  import { z as z4 } from "zod";
64
64
  import * as fs5 from "fs";
65
65
  import * as path4 from "path";
66
66
  import * as path5 from "path";
67
+ import * as path6 from "path";
68
+ import * as path7 from "path";
69
+ import * as path8 from "path";
70
+ import * as fs7 from "fs";
71
+ import * as path9 from "path";
72
+ import { z as z5 } from "zod";
73
+ import * as fs8 from "fs";
74
+ import * as path10 from "path";
75
+ import * as os from "os";
76
+ import { spawn } from "child_process";
67
77
  function createError(code, message, details = {}, suggestions = []) {
68
78
  return { code, message, details, suggestions };
69
79
  }
@@ -72,17 +82,17 @@ function createEntropyError(code, message, details = {}, suggestions = []) {
72
82
  }
73
83
  var accessAsync = promisify(access);
74
84
  var readFileAsync = promisify(readFile);
75
- async function fileExists(path6) {
85
+ async function fileExists(path11) {
76
86
  try {
77
- await accessAsync(path6, constants.F_OK);
87
+ await accessAsync(path11, constants.F_OK);
78
88
  return true;
79
89
  } catch {
80
90
  return false;
81
91
  }
82
92
  }
83
- async function readFileContent(path6) {
93
+ async function readFileContent(path11) {
84
94
  try {
85
- const content = await readFileAsync(path6, "utf-8");
95
+ const content = await readFileAsync(path11, "utf-8");
86
96
  return Ok(content);
87
97
  } catch (error) {
88
98
  return Err(error);
@@ -126,15 +136,15 @@ function validateConfig(data, schema) {
126
136
  let message = "Configuration validation failed";
127
137
  const suggestions = [];
128
138
  if (firstError) {
129
- const path6 = firstError.path.join(".");
130
- const pathDisplay = path6 ? ` at "${path6}"` : "";
139
+ const path11 = firstError.path.join(".");
140
+ const pathDisplay = path11 ? ` at "${path11}"` : "";
131
141
  if (firstError.code === "invalid_type") {
132
142
  const received = firstError.received;
133
143
  const expected = firstError.expected;
134
144
  if (received === "undefined") {
135
145
  code = "MISSING_FIELD";
136
146
  message = `Missing required field${pathDisplay}: ${firstError.message}`;
137
- suggestions.push(`Field "${path6}" is required and must be of type "${expected}"`);
147
+ suggestions.push(`Field "${path11}" is required and must be of type "${expected}"`);
138
148
  } else {
139
149
  code = "INVALID_TYPE";
140
150
  message = `Invalid type${pathDisplay}: ${firstError.message}`;
@@ -340,30 +350,30 @@ function extractSections(content) {
340
350
  return result;
341
351
  });
342
352
  }
343
- function isExternalLink(path6) {
344
- return path6.startsWith("http://") || path6.startsWith("https://") || path6.startsWith("#") || path6.startsWith("mailto:");
353
+ function isExternalLink(path11) {
354
+ return path11.startsWith("http://") || path11.startsWith("https://") || path11.startsWith("#") || path11.startsWith("mailto:");
345
355
  }
346
356
  function resolveLinkPath(linkPath, baseDir) {
347
357
  return linkPath.startsWith(".") ? join(baseDir, linkPath) : linkPath;
348
358
  }
349
- async function validateAgentsMap(path6 = "./AGENTS.md") {
359
+ async function validateAgentsMap(path11 = "./AGENTS.md") {
350
360
  console.warn(
351
361
  "[harness] validateAgentsMap() is deprecated. Use graph-based validation via Assembler.checkCoverage() from @harness-engineering/graph"
352
362
  );
353
- const contentResult = await readFileContent(path6);
363
+ const contentResult = await readFileContent(path11);
354
364
  if (!contentResult.ok) {
355
365
  return Err(
356
366
  createError(
357
367
  "PARSE_ERROR",
358
368
  `Failed to read AGENTS.md: ${contentResult.error.message}`,
359
- { path: path6 },
369
+ { path: path11 },
360
370
  ["Ensure the file exists", "Check file permissions"]
361
371
  )
362
372
  );
363
373
  }
364
374
  const content = contentResult.value;
365
375
  const sections = extractSections(content);
366
- const baseDir = dirname(path6);
376
+ const baseDir = dirname(path11);
367
377
  const sectionTitles = sections.map((s) => s.title);
368
378
  const missingSections = REQUIRED_SECTIONS.filter(
369
379
  (required) => !sectionTitles.some((title) => title.toLowerCase().includes(required.toLowerCase()))
@@ -499,8 +509,8 @@ async function checkDocCoverage(domain, options = {}) {
499
509
  );
500
510
  }
501
511
  }
502
- function suggestFix(path6, existingFiles) {
503
- const targetName = basename2(path6).toLowerCase();
512
+ function suggestFix(path11, existingFiles) {
513
+ const targetName = basename2(path11).toLowerCase();
504
514
  const similar = existingFiles.find((file) => {
505
515
  const fileName = basename2(file).toLowerCase();
506
516
  return fileName.includes(targetName) || targetName.includes(fileName);
@@ -508,7 +518,7 @@ function suggestFix(path6, existingFiles) {
508
518
  if (similar) {
509
519
  return `Did you mean "${similar}"?`;
510
520
  }
511
- return `Create the file "${path6}" or remove the link`;
521
+ return `Create the file "${path11}" or remove the link`;
512
522
  }
513
523
  async function validateKnowledgeMap(rootDir = process.cwd()) {
514
524
  console.warn(
@@ -1085,8 +1095,8 @@ function createBoundaryValidator(schema, name) {
1085
1095
  return Ok(result.data);
1086
1096
  }
1087
1097
  const suggestions = result.error.issues.map((issue) => {
1088
- const path6 = issue.path.join(".");
1089
- return path6 ? `${path6}: ${issue.message}` : issue.message;
1098
+ const path11 = issue.path.join(".");
1099
+ return path11 ? `${path11}: ${issue.message}` : issue.message;
1090
1100
  });
1091
1101
  return Err(
1092
1102
  createError(
@@ -1148,11 +1158,11 @@ function walk(node, visitor) {
1148
1158
  var TypeScriptParser = class {
1149
1159
  name = "typescript";
1150
1160
  extensions = [".ts", ".tsx", ".mts", ".cts"];
1151
- async parseFile(path6) {
1152
- const contentResult = await readFileContent(path6);
1161
+ async parseFile(path11) {
1162
+ const contentResult = await readFileContent(path11);
1153
1163
  if (!contentResult.ok) {
1154
1164
  return Err(
1155
- createParseError("NOT_FOUND", `File not found: ${path6}`, { path: path6 }, [
1165
+ createParseError("NOT_FOUND", `File not found: ${path11}`, { path: path11 }, [
1156
1166
  "Check that the file exists",
1157
1167
  "Verify the path is correct"
1158
1168
  ])
@@ -1162,7 +1172,7 @@ var TypeScriptParser = class {
1162
1172
  const ast = parse(contentResult.value, {
1163
1173
  loc: true,
1164
1174
  range: true,
1165
- jsx: path6.endsWith(".tsx"),
1175
+ jsx: path11.endsWith(".tsx"),
1166
1176
  errorOnUnknownASTType: false
1167
1177
  });
1168
1178
  return Ok({
@@ -1173,7 +1183,7 @@ var TypeScriptParser = class {
1173
1183
  } catch (e) {
1174
1184
  const error = e;
1175
1185
  return Err(
1176
- createParseError("SYNTAX_ERROR", `Failed to parse ${path6}: ${error.message}`, { path: path6 }, [
1186
+ createParseError("SYNTAX_ERROR", `Failed to parse ${path11}: ${error.message}`, { path: path11 }, [
1177
1187
  "Check for syntax errors in the file",
1178
1188
  "Ensure valid TypeScript syntax"
1179
1189
  ])
@@ -1453,22 +1463,22 @@ function extractInlineRefs(content) {
1453
1463
  }
1454
1464
  return refs;
1455
1465
  }
1456
- async function parseDocumentationFile(path6) {
1457
- const contentResult = await readFileContent(path6);
1466
+ async function parseDocumentationFile(path11) {
1467
+ const contentResult = await readFileContent(path11);
1458
1468
  if (!contentResult.ok) {
1459
1469
  return Err(
1460
1470
  createEntropyError(
1461
1471
  "PARSE_ERROR",
1462
- `Failed to read documentation file: ${path6}`,
1463
- { file: path6 },
1472
+ `Failed to read documentation file: ${path11}`,
1473
+ { file: path11 },
1464
1474
  ["Check that the file exists"]
1465
1475
  )
1466
1476
  );
1467
1477
  }
1468
1478
  const content = contentResult.value;
1469
- const type = path6.endsWith(".md") ? "markdown" : "text";
1479
+ const type = path11.endsWith(".md") ? "markdown" : "text";
1470
1480
  return Ok({
1471
- path: path6,
1481
+ path: path11,
1472
1482
  type,
1473
1483
  content,
1474
1484
  codeBlocks: extractCodeBlocks(content),
@@ -3143,6 +3153,40 @@ function createUnusedImportFixes(deadCodeReport) {
3143
3153
  reversible: true
3144
3154
  }));
3145
3155
  }
3156
+ function createDeadExportFixes(deadCodeReport) {
3157
+ return deadCodeReport.deadExports.filter((exp) => exp.reason === "NO_IMPORTERS").map((exp) => ({
3158
+ type: "dead-exports",
3159
+ file: exp.file,
3160
+ description: `Remove export keyword from ${exp.name} (${exp.reason})`,
3161
+ action: "replace",
3162
+ oldContent: exp.isDefault ? `export default ${exp.type === "class" ? "class" : exp.type === "function" ? "function" : ""} ${exp.name}` : `export ${exp.type === "class" ? "class" : exp.type === "function" ? "function" : exp.type === "variable" ? "const" : exp.type === "type" ? "type" : exp.type === "interface" ? "interface" : "enum"} ${exp.name}`,
3163
+ newContent: exp.isDefault ? `${exp.type === "class" ? "class" : exp.type === "function" ? "function" : ""} ${exp.name}` : `${exp.type === "class" ? "class" : exp.type === "function" ? "function" : exp.type === "variable" ? "const" : exp.type === "type" ? "type" : exp.type === "interface" ? "interface" : "enum"} ${exp.name}`,
3164
+ safe: true,
3165
+ reversible: true
3166
+ }));
3167
+ }
3168
+ function createCommentedCodeFixes(blocks) {
3169
+ return blocks.map((block) => ({
3170
+ type: "commented-code",
3171
+ file: block.file,
3172
+ description: `Remove commented-out code block (lines ${block.startLine}-${block.endLine})`,
3173
+ action: "replace",
3174
+ oldContent: block.content,
3175
+ newContent: "",
3176
+ safe: true,
3177
+ reversible: true
3178
+ }));
3179
+ }
3180
+ function createOrphanedDepFixes(deps) {
3181
+ return deps.map((dep) => ({
3182
+ type: "orphaned-deps",
3183
+ file: dep.packageJsonPath,
3184
+ description: `Remove orphaned dependency: ${dep.name}`,
3185
+ action: "replace",
3186
+ safe: true,
3187
+ reversible: true
3188
+ }));
3189
+ }
3146
3190
  function createFixes(deadCodeReport, config) {
3147
3191
  const fullConfig = { ...DEFAULT_FIX_CONFIG, ...config };
3148
3192
  const fixes = [];
@@ -3152,6 +3196,9 @@ function createFixes(deadCodeReport, config) {
3152
3196
  if (fullConfig.fixTypes.includes("unused-imports")) {
3153
3197
  fixes.push(...createUnusedImportFixes(deadCodeReport));
3154
3198
  }
3199
+ if (fullConfig.fixTypes.includes("dead-exports")) {
3200
+ fixes.push(...createDeadExportFixes(deadCodeReport));
3201
+ }
3155
3202
  return fixes;
3156
3203
  }
3157
3204
  function previewFix(fix) {
@@ -3271,6 +3318,129 @@ async function applyFixes(fixes, config) {
3271
3318
  }
3272
3319
  });
3273
3320
  }
3321
+ function createForbiddenImportFixes(violations) {
3322
+ return violations.filter((v) => v.alternative !== void 0).map((v) => ({
3323
+ type: "forbidden-import-replacement",
3324
+ file: v.file,
3325
+ description: `Replace forbidden import '${v.forbiddenImport}' with '${v.alternative}'`,
3326
+ action: "replace",
3327
+ line: v.line,
3328
+ oldContent: `from '${v.forbiddenImport}'`,
3329
+ newContent: `from '${v.alternative}'`,
3330
+ safe: true,
3331
+ reversible: true
3332
+ }));
3333
+ }
3334
+ var ALWAYS_UNSAFE_TYPES = /* @__PURE__ */ new Set([
3335
+ "upward-dependency",
3336
+ "skip-layer-dependency",
3337
+ "circular-dependency",
3338
+ "dead-internal"
3339
+ ]);
3340
+ var idCounter = 0;
3341
+ function classifyFinding(input) {
3342
+ idCounter++;
3343
+ const id = `${input.concern === "dead-code" ? "dc" : "arch"}-${idCounter}`;
3344
+ let safety;
3345
+ let safetyReason;
3346
+ let fixAction;
3347
+ let suggestion;
3348
+ if (ALWAYS_UNSAFE_TYPES.has(input.type)) {
3349
+ safety = "unsafe";
3350
+ safetyReason = `${input.type} requires human judgment`;
3351
+ suggestion = "Review and refactor manually";
3352
+ } else if (input.concern === "dead-code") {
3353
+ if (input.isPublicApi) {
3354
+ safety = "unsafe";
3355
+ safetyReason = "Public API export may have external consumers";
3356
+ suggestion = "Deprecate before removing";
3357
+ } else if (input.type === "dead-export" || input.type === "unused-import" || input.type === "commented-code" || input.type === "dead-file") {
3358
+ safety = "safe";
3359
+ safetyReason = "zero importers, non-public";
3360
+ fixAction = input.type === "dead-export" ? "Remove export keyword" : input.type === "dead-file" ? "Delete file" : input.type === "commented-code" ? "Delete commented block" : "Remove import";
3361
+ suggestion = fixAction;
3362
+ } else if (input.type === "orphaned-dep") {
3363
+ safety = "probably-safe";
3364
+ safetyReason = "No imports found, but needs install+test verification";
3365
+ fixAction = "Remove from package.json";
3366
+ suggestion = fixAction;
3367
+ } else {
3368
+ safety = "unsafe";
3369
+ safetyReason = "Unknown dead code type";
3370
+ suggestion = "Manual review required";
3371
+ }
3372
+ } else {
3373
+ if (input.type === "import-ordering") {
3374
+ safety = "safe";
3375
+ safetyReason = "Mechanical reorder, no semantic change";
3376
+ fixAction = "Reorder imports";
3377
+ suggestion = fixAction;
3378
+ } else if (input.type === "forbidden-import" && input.hasAlternative) {
3379
+ safety = "probably-safe";
3380
+ safetyReason = "Alternative configured, needs typecheck+test";
3381
+ fixAction = "Replace with configured alternative";
3382
+ suggestion = fixAction;
3383
+ } else {
3384
+ safety = "unsafe";
3385
+ safetyReason = `${input.type} requires structural changes`;
3386
+ suggestion = "Restructure code to fix violation";
3387
+ }
3388
+ }
3389
+ return {
3390
+ id,
3391
+ concern: input.concern,
3392
+ file: input.file,
3393
+ ...input.line !== void 0 ? { line: input.line } : {},
3394
+ type: input.type,
3395
+ description: input.description,
3396
+ safety,
3397
+ safetyReason,
3398
+ hotspotDowngraded: false,
3399
+ ...fixAction !== void 0 ? { fixAction } : {},
3400
+ suggestion
3401
+ };
3402
+ }
3403
+ function applyHotspotDowngrade(finding, hotspot) {
3404
+ if (finding.safety !== "safe") return finding;
3405
+ const churn = hotspot.churnMap.get(finding.file) ?? 0;
3406
+ if (churn >= hotspot.topPercentileThreshold) {
3407
+ return {
3408
+ ...finding,
3409
+ safety: "probably-safe",
3410
+ safetyReason: `${finding.safetyReason}; downgraded due to high churn (${churn} commits)`,
3411
+ hotspotDowngraded: true
3412
+ };
3413
+ }
3414
+ return finding;
3415
+ }
3416
+ function deduplicateCleanupFindings(findings) {
3417
+ const byFileAndLine = /* @__PURE__ */ new Map();
3418
+ for (const f of findings) {
3419
+ const key = `${f.file}:${f.line ?? "none"}`;
3420
+ const group = byFileAndLine.get(key) ?? [];
3421
+ group.push(f);
3422
+ byFileAndLine.set(key, group);
3423
+ }
3424
+ const result = [];
3425
+ for (const group of byFileAndLine.values()) {
3426
+ if (group.length === 1) {
3427
+ result.push(group[0]);
3428
+ continue;
3429
+ }
3430
+ const deadCode = group.find((f) => f.concern === "dead-code");
3431
+ const arch = group.find((f) => f.concern === "architecture");
3432
+ if (deadCode && arch) {
3433
+ result.push({
3434
+ ...deadCode,
3435
+ description: `${deadCode.description} (also violates architecture: ${arch.type})`,
3436
+ suggestion: deadCode.fixAction ? `${deadCode.fixAction} (resolves both dead code and architecture violation)` : deadCode.suggestion
3437
+ });
3438
+ } else {
3439
+ result.push(...group);
3440
+ }
3441
+ }
3442
+ return result;
3443
+ }
3274
3444
  var MustExportRuleSchema = z.object({
3275
3445
  type: z.literal("must-export"),
3276
3446
  names: z.array(z.string())
@@ -3492,7 +3662,7 @@ var BenchmarkRunner = class {
3492
3662
  }
3493
3663
  args.push("--reporter=json");
3494
3664
  try {
3495
- const rawOutput = execSync(`npx ${args.join(" ")}`, {
3665
+ const rawOutput = execFileSync("npx", args, {
3496
3666
  cwd,
3497
3667
  encoding: "utf-8",
3498
3668
  timeout,
@@ -3726,11 +3896,17 @@ function generateId() {
3726
3896
  if (typeof globalThis !== "undefined" && "crypto" in globalThis && typeof globalThis.crypto.randomUUID === "function") {
3727
3897
  return globalThis.crypto.randomUUID();
3728
3898
  }
3729
- return "xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g, function(c) {
3730
- const r = Math.random() * 16 | 0;
3731
- const v = c === "x" ? r : r & 3 | 8;
3732
- return v.toString(16);
3733
- });
3899
+ if (typeof globalThis.crypto?.getRandomValues !== "function") {
3900
+ throw new Error(
3901
+ "No cryptographic random source available \u2014 requires Node.js 15+ or a browser with Web Crypto API"
3902
+ );
3903
+ }
3904
+ const bytes = new Uint8Array(16);
3905
+ globalThis.crypto.getRandomValues(bytes);
3906
+ bytes[6] = bytes[6] & 15 | 64;
3907
+ bytes[8] = bytes[8] & 63 | 128;
3908
+ const hex = [...bytes].map((b) => b.toString(16).padStart(2, "0")).join("");
3909
+ return `${hex.slice(0, 8)}-${hex.slice(8, 12)}-${hex.slice(12, 16)}-${hex.slice(16, 20)}-${hex.slice(20)}`;
3734
3910
  }
3735
3911
  var NoOpExecutor = class {
3736
3912
  name = "noop";
@@ -4612,7 +4788,7 @@ function getCurrentBranch(projectPath) {
4612
4788
  return cached.branch;
4613
4789
  }
4614
4790
  try {
4615
- const branch = execSync2("git rev-parse --abbrev-ref HEAD", {
4791
+ const branch = execSync("git rev-parse --abbrev-ref HEAD", {
4616
4792
  cwd: projectPath,
4617
4793
  stdio: "pipe"
4618
4794
  }).toString().trim();
@@ -5068,10 +5244,21 @@ async function runMechanicalGate(projectPath) {
5068
5244
  }
5069
5245
  }
5070
5246
  const results = [];
5247
+ const SAFE_GATE_COMMAND = /^(?:npm|pnpm|yarn)\s+(?:test|run\s+[\w.-]+|run-script\s+[\w.-]+)$|^go\s+(?:test|build|vet|fmt)\s+[\w./ -]+$|^(?:python|python3)\s+-m\s+[\w.-]+$|^make\s+[\w.-]+$|^cargo\s+(?:test|build|check|clippy)(?:\s+[\w./ -]+)?$|^(?:gradle|mvn)\s+[\w:.-]+$/;
5071
5248
  for (const check of checks) {
5249
+ if (!SAFE_GATE_COMMAND.test(check.command)) {
5250
+ results.push({
5251
+ name: check.name,
5252
+ passed: false,
5253
+ command: check.command,
5254
+ output: `Blocked: command does not match safe gate pattern. Allowed prefixes: npm, npx, pnpm, yarn, go, python, python3, make, cargo, gradle, mvn`,
5255
+ duration: 0
5256
+ });
5257
+ continue;
5258
+ }
5072
5259
  const start = Date.now();
5073
5260
  try {
5074
- execSync3(check.command, {
5261
+ execSync2(check.command, {
5075
5262
  cwd: projectPath,
5076
5263
  stdio: "pipe",
5077
5264
  timeout: 12e4
@@ -5727,6 +5914,7 @@ var SecurityScanner = class {
5727
5914
  if (resolved === "off") continue;
5728
5915
  for (let i = 0; i < lines.length; i++) {
5729
5916
  const line = lines[i] ?? "";
5917
+ if (line.includes("harness-ignore") && line.includes(rule.id)) continue;
5730
5918
  for (const pattern of rule.patterns) {
5731
5919
  pattern.lastIndex = 0;
5732
5920
  if (pattern.test(line)) {
@@ -6025,6 +6213,1981 @@ async function runCIChecks(input) {
6025
6213
  return Err(error instanceof Error ? error : new Error(String(error)));
6026
6214
  }
6027
6215
  }
6216
+ async function runMechanicalChecks(options) {
6217
+ const { projectRoot, config, skip = [], changedFiles } = options;
6218
+ const findings = [];
6219
+ const statuses = {
6220
+ validate: "skip",
6221
+ "check-deps": "skip",
6222
+ "check-docs": "skip",
6223
+ "security-scan": "skip"
6224
+ };
6225
+ if (!skip.includes("validate")) {
6226
+ try {
6227
+ const agentsPath = path6.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
6228
+ const result = await validateAgentsMap(agentsPath);
6229
+ if (!result.ok) {
6230
+ statuses.validate = "fail";
6231
+ findings.push({
6232
+ tool: "validate",
6233
+ file: agentsPath,
6234
+ message: result.error.message,
6235
+ severity: "error"
6236
+ });
6237
+ } else if (!result.value.valid) {
6238
+ statuses.validate = "fail";
6239
+ if (result.value.errors) {
6240
+ for (const err of result.value.errors) {
6241
+ findings.push({
6242
+ tool: "validate",
6243
+ file: agentsPath,
6244
+ message: err.message,
6245
+ severity: "error"
6246
+ });
6247
+ }
6248
+ }
6249
+ for (const section of result.value.missingSections) {
6250
+ findings.push({
6251
+ tool: "validate",
6252
+ file: agentsPath,
6253
+ message: `Missing section: ${section}`,
6254
+ severity: "warning"
6255
+ });
6256
+ }
6257
+ } else {
6258
+ statuses.validate = "pass";
6259
+ }
6260
+ } catch (err) {
6261
+ statuses.validate = "fail";
6262
+ findings.push({
6263
+ tool: "validate",
6264
+ file: path6.join(projectRoot, "AGENTS.md"),
6265
+ message: err instanceof Error ? err.message : String(err),
6266
+ severity: "error"
6267
+ });
6268
+ }
6269
+ }
6270
+ if (!skip.includes("check-deps")) {
6271
+ try {
6272
+ const rawLayers = config.layers;
6273
+ if (rawLayers && rawLayers.length > 0) {
6274
+ const parser = new TypeScriptParser();
6275
+ const layers = rawLayers.map(
6276
+ (l) => defineLayer(
6277
+ l.name,
6278
+ Array.isArray(l.patterns) ? l.patterns : [l.pattern],
6279
+ l.allowedDependencies
6280
+ )
6281
+ );
6282
+ const result = await validateDependencies({
6283
+ layers,
6284
+ rootDir: projectRoot,
6285
+ parser
6286
+ });
6287
+ if (!result.ok) {
6288
+ statuses["check-deps"] = "fail";
6289
+ findings.push({
6290
+ tool: "check-deps",
6291
+ file: projectRoot,
6292
+ message: result.error.message,
6293
+ severity: "error"
6294
+ });
6295
+ } else if (result.value.violations.length > 0) {
6296
+ statuses["check-deps"] = "fail";
6297
+ for (const v of result.value.violations) {
6298
+ findings.push({
6299
+ tool: "check-deps",
6300
+ file: v.file,
6301
+ line: v.line,
6302
+ message: `Layer violation: ${v.fromLayer} -> ${v.toLayer}: ${v.reason}`,
6303
+ severity: "error"
6304
+ });
6305
+ }
6306
+ } else {
6307
+ statuses["check-deps"] = "pass";
6308
+ }
6309
+ } else {
6310
+ statuses["check-deps"] = "pass";
6311
+ }
6312
+ } catch (err) {
6313
+ statuses["check-deps"] = "fail";
6314
+ findings.push({
6315
+ tool: "check-deps",
6316
+ file: projectRoot,
6317
+ message: err instanceof Error ? err.message : String(err),
6318
+ severity: "error"
6319
+ });
6320
+ }
6321
+ }
6322
+ if (!skip.includes("check-docs")) {
6323
+ try {
6324
+ const docsDir = path6.join(projectRoot, config.docsDir ?? "docs");
6325
+ const result = await checkDocCoverage("project", { docsDir });
6326
+ if (!result.ok) {
6327
+ statuses["check-docs"] = "warn";
6328
+ findings.push({
6329
+ tool: "check-docs",
6330
+ file: docsDir,
6331
+ message: result.error.message,
6332
+ severity: "warning"
6333
+ });
6334
+ } else if (result.value.gaps && result.value.gaps.length > 0) {
6335
+ statuses["check-docs"] = "warn";
6336
+ for (const gap of result.value.gaps) {
6337
+ findings.push({
6338
+ tool: "check-docs",
6339
+ file: gap.file,
6340
+ message: `Undocumented: ${gap.file} (suggested: ${gap.suggestedSection})`,
6341
+ severity: "warning"
6342
+ });
6343
+ }
6344
+ } else {
6345
+ statuses["check-docs"] = "pass";
6346
+ }
6347
+ } catch (err) {
6348
+ statuses["check-docs"] = "warn";
6349
+ findings.push({
6350
+ tool: "check-docs",
6351
+ file: path6.join(projectRoot, "docs"),
6352
+ message: err instanceof Error ? err.message : String(err),
6353
+ severity: "warning"
6354
+ });
6355
+ }
6356
+ }
6357
+ if (!skip.includes("security-scan")) {
6358
+ try {
6359
+ const securityConfig = parseSecurityConfig(config.security);
6360
+ if (!securityConfig.enabled) {
6361
+ statuses["security-scan"] = "skip";
6362
+ } else {
6363
+ const scanner = new SecurityScanner(securityConfig);
6364
+ scanner.configureForProject(projectRoot);
6365
+ const filesToScan = changedFiles ?? [];
6366
+ const scanResult = await scanner.scanFiles(filesToScan);
6367
+ if (scanResult.findings.length > 0) {
6368
+ statuses["security-scan"] = "warn";
6369
+ for (const f of scanResult.findings) {
6370
+ findings.push({
6371
+ tool: "security-scan",
6372
+ file: f.file,
6373
+ line: f.line,
6374
+ ruleId: f.ruleId,
6375
+ message: f.message,
6376
+ severity: f.severity === "info" ? "warning" : f.severity
6377
+ });
6378
+ }
6379
+ } else {
6380
+ statuses["security-scan"] = "pass";
6381
+ }
6382
+ }
6383
+ } catch (err) {
6384
+ statuses["security-scan"] = "warn";
6385
+ findings.push({
6386
+ tool: "security-scan",
6387
+ file: projectRoot,
6388
+ message: err instanceof Error ? err.message : String(err),
6389
+ severity: "warning"
6390
+ });
6391
+ }
6392
+ }
6393
+ const hasErrors = findings.some((f) => f.severity === "error");
6394
+ const stopPipeline = statuses.validate === "fail" || statuses["check-deps"] === "fail";
6395
+ return Ok({
6396
+ pass: !hasErrors,
6397
+ stopPipeline,
6398
+ findings,
6399
+ checks: {
6400
+ validate: statuses.validate,
6401
+ checkDeps: statuses["check-deps"],
6402
+ checkDocs: statuses["check-docs"],
6403
+ securityScan: statuses["security-scan"]
6404
+ }
6405
+ });
6406
+ }
6407
+ var ExclusionSet = class {
6408
+ /** Findings indexed by file path for O(1) file lookup */
6409
+ byFile;
6410
+ allFindings;
6411
+ constructor(findings) {
6412
+ this.allFindings = [...findings];
6413
+ this.byFile = /* @__PURE__ */ new Map();
6414
+ for (const f of findings) {
6415
+ const existing = this.byFile.get(f.file);
6416
+ if (existing) {
6417
+ existing.push(f);
6418
+ } else {
6419
+ this.byFile.set(f.file, [f]);
6420
+ }
6421
+ }
6422
+ }
6423
+ /**
6424
+ * Returns true if any mechanical finding covers the given file + line range.
6425
+ *
6426
+ * A mechanical finding "covers" a range if:
6427
+ * - The file matches, AND
6428
+ * - The finding has no line (file-level finding — covers everything), OR
6429
+ * - The finding's line falls within [startLine, endLine] inclusive.
6430
+ */
6431
+ isExcluded(file, lineRange) {
6432
+ const fileFindings = this.byFile.get(file);
6433
+ if (!fileFindings) return false;
6434
+ const [start, end] = lineRange;
6435
+ return fileFindings.some((f) => {
6436
+ if (f.line === void 0) return true;
6437
+ return f.line >= start && f.line <= end;
6438
+ });
6439
+ }
6440
+ /** Number of findings in the set */
6441
+ get size() {
6442
+ return this.allFindings.length;
6443
+ }
6444
+ /** Returns a copy of all findings */
6445
+ getFindings() {
6446
+ return [...this.allFindings];
6447
+ }
6448
+ };
6449
+ function buildExclusionSet(findings) {
6450
+ return new ExclusionSet(findings);
6451
+ }
6452
+ var PREFIX_PATTERNS = [
6453
+ { pattern: /^(feat|feature)(\([^)]*\))?:/i, type: "feature" },
6454
+ { pattern: /^(fix|bugfix)(\([^)]*\))?:/i, type: "bugfix" },
6455
+ { pattern: /^refactor(\([^)]*\))?:/i, type: "refactor" },
6456
+ { pattern: /^docs?(\([^)]*\))?:/i, type: "docs" }
6457
+ ];
6458
+ var TEST_FILE_PATTERN = /\.(test|spec)\.(ts|tsx|js|jsx|mts|cts)$/;
6459
+ var MD_FILE_PATTERN = /\.md$/;
6460
+ function detectChangeType(commitMessage, diff) {
6461
+ const trimmed = commitMessage.trim();
6462
+ for (const { pattern, type } of PREFIX_PATTERNS) {
6463
+ if (pattern.test(trimmed)) {
6464
+ return type;
6465
+ }
6466
+ }
6467
+ if (diff.changedFiles.length > 0 && diff.changedFiles.every((f) => MD_FILE_PATTERN.test(f))) {
6468
+ return "docs";
6469
+ }
6470
+ const newNonTestFiles = diff.newFiles.filter((f) => !TEST_FILE_PATTERN.test(f));
6471
+ if (newNonTestFiles.length > 0) {
6472
+ return "feature";
6473
+ }
6474
+ const hasNewTestFile = diff.newFiles.some((f) => TEST_FILE_PATTERN.test(f));
6475
+ if (diff.totalDiffLines < 20 && hasNewTestFile) {
6476
+ return "bugfix";
6477
+ }
6478
+ return "feature";
6479
+ }
6480
+ var ALL_DOMAINS = ["compliance", "bug", "security", "architecture"];
6481
+ var SECURITY_PATTERNS = /auth|crypto|password|secret|token|session|cookie|hash|encrypt|decrypt|sql|shell|exec|eval/i;
6482
+ function computeContextBudget(diffLines) {
6483
+ if (diffLines < 20) return diffLines * 3;
6484
+ return diffLines;
6485
+ }
6486
+ function isWithinProject(absPath, projectRoot) {
6487
+ const resolvedRoot = path7.resolve(projectRoot) + path7.sep;
6488
+ const resolvedPath = path7.resolve(absPath);
6489
+ return resolvedPath.startsWith(resolvedRoot) || resolvedPath === path7.resolve(projectRoot);
6490
+ }
6491
+ async function readContextFile(projectRoot, filePath, reason) {
6492
+ const absPath = path7.isAbsolute(filePath) ? filePath : path7.join(projectRoot, filePath);
6493
+ if (!isWithinProject(absPath, projectRoot)) return null;
6494
+ const result = await readFileContent(absPath);
6495
+ if (!result.ok) return null;
6496
+ const content = result.value;
6497
+ const lines = content.split("\n").length;
6498
+ const relPath = path7.isAbsolute(filePath) ? path7.relative(projectRoot, filePath) : filePath;
6499
+ return { path: relPath, content, reason, lines };
6500
+ }
6501
+ function extractImportSources(content) {
6502
+ const sources = [];
6503
+ const importRegex = /(?:import\s+(?:.*?\s+from\s+)?['"]([^'"]+)['"]|require\(\s*['"]([^'"]+)['"]\s*\))/g;
6504
+ let match;
6505
+ while ((match = importRegex.exec(content)) !== null) {
6506
+ const source = match[1] ?? match[2];
6507
+ if (source) sources.push(source);
6508
+ }
6509
+ return sources;
6510
+ }
6511
+ async function resolveImportPath2(projectRoot, fromFile, importSource) {
6512
+ if (!importSource.startsWith(".")) return null;
6513
+ const fromDir = path7.dirname(path7.join(projectRoot, fromFile));
6514
+ const basePath = path7.resolve(fromDir, importSource);
6515
+ if (!isWithinProject(basePath, projectRoot)) return null;
6516
+ const relBase = path7.relative(projectRoot, basePath);
6517
+ const candidates = [
6518
+ relBase + ".ts",
6519
+ relBase + ".tsx",
6520
+ relBase + ".mts",
6521
+ path7.join(relBase, "index.ts")
6522
+ ];
6523
+ for (const candidate of candidates) {
6524
+ const absCandidate = path7.join(projectRoot, candidate);
6525
+ if (await fileExists(absCandidate)) {
6526
+ return candidate;
6527
+ }
6528
+ }
6529
+ return null;
6530
+ }
6531
+ async function findTestFiles(projectRoot, sourceFile) {
6532
+ const baseName = path7.basename(sourceFile, path7.extname(sourceFile));
6533
+ const pattern = `**/${baseName}.{test,spec}.{ts,tsx,mts}`;
6534
+ const results = await findFiles(pattern, projectRoot);
6535
+ return results.map((f) => path7.relative(projectRoot, f));
6536
+ }
6537
+ async function gatherImportContext(projectRoot, changedFiles, budget) {
6538
+ const contextFiles = [];
6539
+ let linesGathered = 0;
6540
+ const seen = new Set(changedFiles.map((f) => f.path));
6541
+ for (const cf of changedFiles) {
6542
+ if (linesGathered >= budget) break;
6543
+ const sources = extractImportSources(cf.content);
6544
+ for (const source of sources) {
6545
+ if (linesGathered >= budget) break;
6546
+ const resolved = await resolveImportPath2(projectRoot, cf.path, source);
6547
+ if (resolved && !seen.has(resolved)) {
6548
+ seen.add(resolved);
6549
+ const contextFile = await readContextFile(projectRoot, resolved, "import");
6550
+ if (contextFile) {
6551
+ contextFiles.push(contextFile);
6552
+ linesGathered += contextFile.lines;
6553
+ }
6554
+ }
6555
+ }
6556
+ }
6557
+ return contextFiles;
6558
+ }
6559
+ async function gatherGraphDependencyContext(projectRoot, changedFilePaths, graph, budget) {
6560
+ const contextFiles = [];
6561
+ let linesGathered = 0;
6562
+ const seen = new Set(changedFilePaths);
6563
+ for (const filePath of changedFilePaths) {
6564
+ if (linesGathered >= budget) break;
6565
+ let deps;
6566
+ try {
6567
+ deps = await graph.getDependencies(filePath);
6568
+ } catch {
6569
+ continue;
6570
+ }
6571
+ for (const dep of deps) {
6572
+ if (linesGathered >= budget) break;
6573
+ if (seen.has(dep)) continue;
6574
+ seen.add(dep);
6575
+ const contextFile = await readContextFile(projectRoot, dep, "graph-dependency");
6576
+ if (contextFile) {
6577
+ contextFiles.push(contextFile);
6578
+ linesGathered += contextFile.lines;
6579
+ }
6580
+ }
6581
+ }
6582
+ return contextFiles;
6583
+ }
6584
+ async function gatherTestContext(projectRoot, changedFilePaths, graph) {
6585
+ const testFiles = [];
6586
+ const seen = /* @__PURE__ */ new Set();
6587
+ if (graph) {
6588
+ for (const filePath of changedFilePaths) {
6589
+ let impact;
6590
+ try {
6591
+ impact = await graph.getImpact(filePath);
6592
+ } catch {
6593
+ continue;
6594
+ }
6595
+ for (const testFile of impact.tests) {
6596
+ if (seen.has(testFile)) continue;
6597
+ seen.add(testFile);
6598
+ const cf = await readContextFile(projectRoot, testFile, "test");
6599
+ if (cf) testFiles.push(cf);
6600
+ }
6601
+ }
6602
+ } else {
6603
+ for (const filePath of changedFilePaths) {
6604
+ const found = await findTestFiles(projectRoot, filePath);
6605
+ for (const testFile of found) {
6606
+ if (seen.has(testFile)) continue;
6607
+ seen.add(testFile);
6608
+ const cf = await readContextFile(projectRoot, testFile, "test");
6609
+ if (cf) testFiles.push(cf);
6610
+ }
6611
+ }
6612
+ }
6613
+ return testFiles;
6614
+ }
6615
+ async function scopeComplianceContext(projectRoot, _changedFiles, options) {
6616
+ const contextFiles = [];
6617
+ const conventionFiles = options.conventionFiles ?? ["CLAUDE.md", "AGENTS.md"];
6618
+ for (const cf of conventionFiles) {
6619
+ const file = await readContextFile(projectRoot, cf, "convention");
6620
+ if (file) contextFiles.push(file);
6621
+ }
6622
+ return contextFiles;
6623
+ }
6624
+ async function scopeBugContext(projectRoot, changedFiles, budget, options) {
6625
+ const contextFiles = [];
6626
+ const changedPaths = changedFiles.map((f) => f.path);
6627
+ if (options.graph) {
6628
+ const deps = await gatherGraphDependencyContext(
6629
+ projectRoot,
6630
+ changedPaths,
6631
+ options.graph,
6632
+ budget
6633
+ );
6634
+ contextFiles.push(...deps);
6635
+ } else {
6636
+ const deps = await gatherImportContext(projectRoot, changedFiles, budget);
6637
+ contextFiles.push(...deps);
6638
+ }
6639
+ const tests = await gatherTestContext(projectRoot, changedPaths, options.graph);
6640
+ contextFiles.push(...tests);
6641
+ return contextFiles;
6642
+ }
6643
+ async function scopeSecurityContext(projectRoot, changedFiles, budget, options) {
6644
+ const contextFiles = [];
6645
+ const changedPaths = changedFiles.map((f) => f.path);
6646
+ if (options.graph) {
6647
+ const allPaths = [];
6648
+ for (const filePath of changedPaths) {
6649
+ try {
6650
+ const deps = await options.graph.getDependencies(filePath);
6651
+ allPaths.push(...deps);
6652
+ } catch {
6653
+ continue;
6654
+ }
6655
+ }
6656
+ const uniquePaths = [...new Set(allPaths)];
6657
+ const securityFirst = uniquePaths.sort((a, b) => {
6658
+ const aMatch = SECURITY_PATTERNS.test(a) ? 0 : 1;
6659
+ const bMatch = SECURITY_PATTERNS.test(b) ? 0 : 1;
6660
+ return aMatch - bMatch;
6661
+ });
6662
+ for (const depPath of securityFirst) {
6663
+ if (contextFiles.reduce((sum, f) => sum + f.lines, 0) >= budget) break;
6664
+ const cf = await readContextFile(projectRoot, depPath, "graph-dependency");
6665
+ if (cf) contextFiles.push(cf);
6666
+ }
6667
+ } else {
6668
+ const deps = await gatherImportContext(projectRoot, changedFiles, budget);
6669
+ contextFiles.push(...deps);
6670
+ }
6671
+ return contextFiles;
6672
+ }
6673
+ async function scopeArchitectureContext(projectRoot, changedFiles, budget, options) {
6674
+ const contextFiles = [];
6675
+ const changedPaths = changedFiles.map((f) => f.path);
6676
+ if (options.graph) {
6677
+ let linesGathered = 0;
6678
+ for (const filePath of changedPaths) {
6679
+ if (linesGathered >= budget) break;
6680
+ let impact;
6681
+ try {
6682
+ impact = await options.graph.getImpact(filePath);
6683
+ } catch {
6684
+ continue;
6685
+ }
6686
+ for (const codePath of impact.code) {
6687
+ if (linesGathered >= budget) break;
6688
+ const cf = await readContextFile(projectRoot, codePath, "graph-impact");
6689
+ if (cf) {
6690
+ contextFiles.push(cf);
6691
+ linesGathered += cf.lines;
6692
+ }
6693
+ }
6694
+ }
6695
+ } else {
6696
+ const deps = await gatherImportContext(projectRoot, changedFiles, budget);
6697
+ contextFiles.push(...deps);
6698
+ if (options.checkDepsOutput) {
6699
+ contextFiles.push({
6700
+ path: "harness-check-deps-output",
6701
+ content: options.checkDepsOutput,
6702
+ lines: options.checkDepsOutput.split("\n").length,
6703
+ reason: "convention"
6704
+ });
6705
+ }
6706
+ }
6707
+ return contextFiles;
6708
+ }
6709
+ async function scopeContext(options) {
6710
+ const { projectRoot, diff, commitMessage } = options;
6711
+ const changeType = detectChangeType(commitMessage, diff);
6712
+ const budget = computeContextBudget(diff.totalDiffLines);
6713
+ const changedFiles = [];
6714
+ for (const filePath of diff.changedFiles) {
6715
+ const cf = await readContextFile(projectRoot, filePath, "changed");
6716
+ if (cf) changedFiles.push(cf);
6717
+ }
6718
+ const scopers = {
6719
+ compliance: () => scopeComplianceContext(projectRoot, changedFiles, options),
6720
+ bug: () => scopeBugContext(projectRoot, changedFiles, budget, options),
6721
+ security: () => scopeSecurityContext(projectRoot, changedFiles, budget, options),
6722
+ architecture: () => scopeArchitectureContext(projectRoot, changedFiles, budget, options)
6723
+ };
6724
+ const bundles = [];
6725
+ for (const domain of ALL_DOMAINS) {
6726
+ const contextFiles = await scopers[domain]();
6727
+ const contextLines = contextFiles.reduce((sum, f) => sum + f.lines, 0);
6728
+ bundles.push({
6729
+ domain,
6730
+ changeType,
6731
+ changedFiles: [...changedFiles],
6732
+ contextFiles,
6733
+ commitHistory: options.commitHistory ?? [],
6734
+ diffLines: diff.totalDiffLines,
6735
+ contextLines
6736
+ });
6737
+ }
6738
+ return bundles;
6739
+ }
6740
+ var SEVERITY_RANK = {
6741
+ suggestion: 0,
6742
+ important: 1,
6743
+ critical: 2
6744
+ };
6745
+ var SEVERITY_ORDER = ["critical", "important", "suggestion"];
6746
+ var SEVERITY_LABELS = {
6747
+ critical: "Critical",
6748
+ important: "Important",
6749
+ suggestion: "Suggestion"
6750
+ };
6751
+ var VALIDATED_BY_RANK = {
6752
+ mechanical: 0,
6753
+ heuristic: 1,
6754
+ graph: 2
6755
+ };
6756
+ function makeFindingId(domain, file, line, title) {
6757
+ const hash = title.slice(0, 20).replace(/[^a-zA-Z0-9]/g, "");
6758
+ return `${domain}-${file.replace(/[^a-zA-Z0-9]/g, "-")}-${line}-${hash}`;
6759
+ }
6760
+ var COMPLIANCE_DESCRIPTOR = {
6761
+ domain: "compliance",
6762
+ tier: "standard",
6763
+ displayName: "Compliance",
6764
+ focusAreas: [
6765
+ "Spec alignment \u2014 implementation matches design doc",
6766
+ "API surface \u2014 new public interfaces are minimal and well-named",
6767
+ "Backward compatibility \u2014 no breaking changes without migration path",
6768
+ "Convention adherence \u2014 project conventions from CLAUDE.md/AGENTS.md followed",
6769
+ "Documentation completeness \u2014 all public interfaces documented"
6770
+ ]
6771
+ };
6772
+ function extractConventionRules(bundle) {
6773
+ const rules = [];
6774
+ const conventionFiles = bundle.contextFiles.filter((f) => f.reason === "convention");
6775
+ for (const file of conventionFiles) {
6776
+ const lines = file.content.split("\n");
6777
+ for (const line of lines) {
6778
+ const trimmed = line.trim();
6779
+ if (trimmed.startsWith("- ") || trimmed.startsWith("* ")) {
6780
+ rules.push({ text: trimmed.slice(2).trim(), source: file.path });
6781
+ }
6782
+ }
6783
+ }
6784
+ return rules;
6785
+ }
6786
+ function findMissingJsDoc(bundle) {
6787
+ const missing = [];
6788
+ for (const cf of bundle.changedFiles) {
6789
+ const lines = cf.content.split("\n");
6790
+ for (let i = 0; i < lines.length; i++) {
6791
+ const line = lines[i];
6792
+ const exportMatch = line.match(
6793
+ /export\s+(?:async\s+)?(?:function|const|class|interface|type)\s+(\w+)/
6794
+ );
6795
+ if (exportMatch) {
6796
+ let hasJsDoc = false;
6797
+ for (let j = i - 1; j >= 0; j--) {
6798
+ const prev = lines[j].trim();
6799
+ if (prev === "") continue;
6800
+ if (prev.endsWith("*/")) {
6801
+ hasJsDoc = true;
6802
+ }
6803
+ break;
6804
+ }
6805
+ if (!hasJsDoc) {
6806
+ missing.push({
6807
+ file: cf.path,
6808
+ line: i + 1,
6809
+ exportName: exportMatch[1]
6810
+ });
6811
+ }
6812
+ }
6813
+ }
6814
+ }
6815
+ return missing;
6816
+ }
6817
+ function runComplianceAgent(bundle) {
6818
+ const findings = [];
6819
+ const rules = extractConventionRules(bundle);
6820
+ const jsDocRuleExists = rules.some((r) => r.text.toLowerCase().includes("jsdoc"));
6821
+ if (jsDocRuleExists) {
6822
+ const missingDocs = findMissingJsDoc(bundle);
6823
+ for (const m of missingDocs) {
6824
+ findings.push({
6825
+ id: makeFindingId("compliance", m.file, m.line, `Missing JSDoc ${m.exportName}`),
6826
+ file: m.file,
6827
+ lineRange: [m.line, m.line],
6828
+ domain: "compliance",
6829
+ severity: "important",
6830
+ title: `Missing JSDoc on exported \`${m.exportName}\``,
6831
+ rationale: `Convention requires all exports to have JSDoc comments (from ${rules.find((r) => r.text.toLowerCase().includes("jsdoc"))?.source ?? "conventions"}).`,
6832
+ suggestion: `Add a JSDoc comment above the export of \`${m.exportName}\`.`,
6833
+ evidence: [
6834
+ `changeType: ${bundle.changeType}`,
6835
+ `Convention rule: "${rules.find((r) => r.text.toLowerCase().includes("jsdoc"))?.text ?? ""}"`
6836
+ ],
6837
+ validatedBy: "heuristic"
6838
+ });
6839
+ }
6840
+ }
6841
+ switch (bundle.changeType) {
6842
+ case "feature": {
6843
+ const hasSpecContext = bundle.contextFiles.some(
6844
+ (f) => f.reason === "spec" || f.reason === "convention"
6845
+ );
6846
+ if (!hasSpecContext && bundle.changedFiles.length > 0) {
6847
+ const firstFile = bundle.changedFiles[0];
6848
+ findings.push({
6849
+ id: makeFindingId("compliance", firstFile.path, 1, "No spec for feature"),
6850
+ file: firstFile.path,
6851
+ lineRange: [1, 1],
6852
+ domain: "compliance",
6853
+ severity: "suggestion",
6854
+ title: "No spec/design doc found for feature change",
6855
+ rationale: "Feature changes should reference a spec or design doc to verify alignment. No spec context was included in the review bundle.",
6856
+ evidence: [`changeType: feature`, `contextFiles count: ${bundle.contextFiles.length}`],
6857
+ validatedBy: "heuristic"
6858
+ });
6859
+ }
6860
+ break;
6861
+ }
6862
+ case "bugfix": {
6863
+ if (bundle.commitHistory.length === 0 && bundle.changedFiles.length > 0) {
6864
+ const firstFile = bundle.changedFiles[0];
6865
+ findings.push({
6866
+ id: makeFindingId("compliance", firstFile.path, 1, "Bugfix no history"),
6867
+ file: firstFile.path,
6868
+ lineRange: [1, 1],
6869
+ domain: "compliance",
6870
+ severity: "suggestion",
6871
+ title: "Bugfix without commit history context",
6872
+ rationale: "Bugfix changes benefit from commit history to verify the root cause is addressed, not just the symptom. No commit history was provided.",
6873
+ evidence: [`changeType: bugfix`, `commitHistory entries: ${bundle.commitHistory.length}`],
6874
+ validatedBy: "heuristic"
6875
+ });
6876
+ }
6877
+ break;
6878
+ }
6879
+ case "refactor": {
6880
+ break;
6881
+ }
6882
+ case "docs": {
6883
+ break;
6884
+ }
6885
+ }
6886
+ const resultTypeRule = rules.find((r) => r.text.toLowerCase().includes("result type"));
6887
+ if (resultTypeRule) {
6888
+ for (const cf of bundle.changedFiles) {
6889
+ const hasTryCatch = cf.content.includes("try {") || cf.content.includes("try{");
6890
+ const usesResult = cf.content.includes("Result<") || cf.content.includes("Result >") || cf.content.includes(": Result");
6891
+ if (hasTryCatch && !usesResult) {
6892
+ findings.push({
6893
+ id: makeFindingId("compliance", cf.path, 1, "try-catch not Result"),
6894
+ file: cf.path,
6895
+ lineRange: [1, cf.lines],
6896
+ domain: "compliance",
6897
+ severity: "suggestion",
6898
+ title: "Fallible operation uses try/catch instead of Result type",
6899
+ rationale: `Convention requires using Result type for fallible operations (from ${resultTypeRule.source}).`,
6900
+ suggestion: "Refactor error handling to use the Result type pattern.",
6901
+ evidence: [
6902
+ `changeType: ${bundle.changeType}`,
6903
+ `Convention rule: "${resultTypeRule.text}"`
6904
+ ],
6905
+ validatedBy: "heuristic"
6906
+ });
6907
+ }
6908
+ }
6909
+ }
6910
+ return findings;
6911
+ }
6912
+ var BUG_DETECTION_DESCRIPTOR = {
6913
+ domain: "bug",
6914
+ tier: "strong",
6915
+ displayName: "Bug Detection",
6916
+ focusAreas: [
6917
+ "Edge cases \u2014 boundary conditions, empty input, max values, null, concurrent access",
6918
+ "Error handling \u2014 errors handled at appropriate level, no silent swallowing",
6919
+ "Logic errors \u2014 off-by-one, incorrect boolean logic, missing early returns",
6920
+ "Race conditions \u2014 concurrent access to shared state",
6921
+ "Resource leaks \u2014 unclosed handles, missing cleanup in error paths",
6922
+ "Type safety \u2014 type mismatches, unsafe casts, missing null checks",
6923
+ "Test coverage \u2014 tests for happy path, error paths, and edge cases"
6924
+ ]
6925
+ };
6926
+ function detectDivisionByZero(bundle) {
6927
+ const findings = [];
6928
+ for (const cf of bundle.changedFiles) {
6929
+ const lines = cf.content.split("\n");
6930
+ for (let i = 0; i < lines.length; i++) {
6931
+ const line = lines[i];
6932
+ if (line.match(/[^=!<>]\s*\/\s*[a-zA-Z_]\w*/) && !line.includes("//")) {
6933
+ const preceding = lines.slice(Math.max(0, i - 3), i).join("\n");
6934
+ if (!preceding.includes("=== 0") && !preceding.includes("!== 0") && !preceding.includes("== 0") && !preceding.includes("!= 0")) {
6935
+ findings.push({
6936
+ id: makeFindingId("bug", cf.path, i + 1, "division by zero"),
6937
+ file: cf.path,
6938
+ lineRange: [i + 1, i + 1],
6939
+ domain: "bug",
6940
+ severity: "important",
6941
+ title: "Potential division by zero without guard",
6942
+ rationale: "Division operation found without a preceding zero check on the divisor. This can cause Infinity or NaN at runtime.",
6943
+ suggestion: "Add a check for zero before dividing, or use a safe division utility.",
6944
+ evidence: [`Line ${i + 1}: ${line.trim()}`],
6945
+ validatedBy: "heuristic"
6946
+ });
6947
+ }
6948
+ }
6949
+ }
6950
+ }
6951
+ return findings;
6952
+ }
6953
+ function detectEmptyCatch(bundle) {
6954
+ const findings = [];
6955
+ for (const cf of bundle.changedFiles) {
6956
+ const lines = cf.content.split("\n");
6957
+ for (let i = 0; i < lines.length; i++) {
6958
+ const line = lines[i];
6959
+ if (line.match(/catch\s*\([^)]*\)\s*\{\s*\}/) || line.match(/catch\s*\([^)]*\)\s*\{/) && i + 1 < lines.length && lines[i + 1].trim() === "}") {
6960
+ findings.push({
6961
+ id: makeFindingId("bug", cf.path, i + 1, "empty catch block"),
6962
+ file: cf.path,
6963
+ lineRange: [i + 1, i + 2],
6964
+ domain: "bug",
6965
+ severity: "important",
6966
+ title: "Empty catch block silently swallows error",
6967
+ rationale: "Catching an error without handling, logging, or re-throwing it hides failures and makes debugging difficult.",
6968
+ suggestion: "Log the error, re-throw it, or handle it explicitly. If intentionally ignoring, add a comment explaining why.",
6969
+ evidence: [`Line ${i + 1}: ${line.trim()}`],
6970
+ validatedBy: "heuristic"
6971
+ });
6972
+ }
6973
+ }
6974
+ }
6975
+ return findings;
6976
+ }
6977
+ function detectMissingTests(bundle) {
6978
+ const findings = [];
6979
+ const hasTestFiles = bundle.contextFiles.some((f) => f.reason === "test");
6980
+ if (!hasTestFiles) {
6981
+ const sourceFiles = bundle.changedFiles.filter(
6982
+ (f) => !f.path.match(/\.(test|spec)\.(ts|tsx|js|jsx)$/)
6983
+ );
6984
+ if (sourceFiles.length > 0) {
6985
+ const firstFile = sourceFiles[0];
6986
+ findings.push({
6987
+ id: makeFindingId("bug", firstFile.path, 1, "no test files"),
6988
+ file: firstFile.path,
6989
+ lineRange: [1, 1],
6990
+ domain: "bug",
6991
+ severity: "suggestion",
6992
+ title: "No test files found for changed source files",
6993
+ rationale: "Changed source files should have corresponding test files. No test files were found in the review context.",
6994
+ evidence: [`Source files without tests: ${sourceFiles.map((f) => f.path).join(", ")}`],
6995
+ validatedBy: "heuristic"
6996
+ });
6997
+ }
6998
+ }
6999
+ return findings;
7000
+ }
7001
+ function runBugDetectionAgent(bundle) {
7002
+ const findings = [];
7003
+ findings.push(...detectDivisionByZero(bundle));
7004
+ findings.push(...detectEmptyCatch(bundle));
7005
+ findings.push(...detectMissingTests(bundle));
7006
+ return findings;
7007
+ }
7008
+ var SECURITY_DESCRIPTOR = {
7009
+ domain: "security",
7010
+ tier: "strong",
7011
+ displayName: "Security",
7012
+ focusAreas: [
7013
+ "Input validation \u2014 user input flowing to dangerous sinks (SQL, shell, HTML)",
7014
+ "Authorization \u2014 missing auth checks on new/modified endpoints",
7015
+ "Data exposure \u2014 sensitive data in logs, error messages, API responses",
7016
+ "Authentication bypass \u2014 paths introduced by the change",
7017
+ "Insecure defaults \u2014 new configuration options with unsafe defaults",
7018
+ "Node.js specific \u2014 prototype pollution, ReDoS, path traversal"
7019
+ ]
7020
+ };
7021
+ var EVAL_PATTERN = /\beval\s*\(|new\s+Function\s*\(/;
7022
+ var SECRET_PATTERNS = [
7023
+ /(?:api[_-]?key|secret|password|token|private[_-]?key)\s*=\s*["'][^"']{8,}/i,
7024
+ /["'](?:sk|pk|api|key|secret|token|password)[-_][a-zA-Z0-9]{10,}["']/i
7025
+ ];
7026
+ var SQL_CONCAT_PATTERN = /(?:SELECT|INSERT|UPDATE|DELETE|DROP|CREATE|ALTER)\s+.*?\+\s*\w+|`[^`]*\$\{[^}]*\}[^`]*(?:SELECT|INSERT|UPDATE|DELETE|WHERE)/i;
7027
+ var SHELL_EXEC_PATTERN = /(?:exec|execSync|spawn|spawnSync)\s*\(\s*`[^`]*\$\{/;
7028
+ function detectEvalUsage(bundle) {
7029
+ const findings = [];
7030
+ for (const cf of bundle.changedFiles) {
7031
+ const lines = cf.content.split("\n");
7032
+ for (let i = 0; i < lines.length; i++) {
7033
+ const line = lines[i];
7034
+ if (EVAL_PATTERN.test(line)) {
7035
+ findings.push({
7036
+ id: makeFindingId("security", cf.path, i + 1, "eval usage CWE-94"),
7037
+ file: cf.path,
7038
+ lineRange: [i + 1, i + 1],
7039
+ domain: "security",
7040
+ severity: "critical",
7041
+ title: `Dangerous ${"eval"}() or new ${"Function"}() usage`,
7042
+ rationale: `${"eval"}() and new ${"Function"}() execute arbitrary code. If user input reaches these calls, it enables Remote Code Execution (CWE-94).`,
7043
+ suggestion: "Replace eval/Function with a safe alternative (JSON.parse for data, a sandboxed evaluator for expressions).",
7044
+ evidence: [`Line ${i + 1}: ${line.trim()}`],
7045
+ validatedBy: "heuristic",
7046
+ cweId: "CWE-94",
7047
+ owaspCategory: "A03:2021 Injection",
7048
+ confidence: "high",
7049
+ remediation: "Replace eval/Function with a safe alternative (JSON.parse for data, a sandboxed evaluator for expressions).",
7050
+ references: [
7051
+ "https://cwe.mitre.org/data/definitions/94.html",
7052
+ "https://owasp.org/Top10/A03_2021-Injection/"
7053
+ ]
7054
+ });
7055
+ }
7056
+ }
7057
+ }
7058
+ return findings;
7059
+ }
7060
+ function detectHardcodedSecrets(bundle) {
7061
+ const findings = [];
7062
+ for (const cf of bundle.changedFiles) {
7063
+ const lines = cf.content.split("\n");
7064
+ for (let i = 0; i < lines.length; i++) {
7065
+ const line = lines[i];
7066
+ const codePart = line.includes("//") ? line.slice(0, line.indexOf("//")) : line;
7067
+ for (const pattern of SECRET_PATTERNS) {
7068
+ if (pattern.test(codePart)) {
7069
+ findings.push({
7070
+ id: makeFindingId("security", cf.path, i + 1, "hardcoded secret CWE-798"),
7071
+ file: cf.path,
7072
+ lineRange: [i + 1, i + 1],
7073
+ domain: "security",
7074
+ severity: "critical",
7075
+ title: "Hardcoded secret or API key detected",
7076
+ rationale: "Hardcoded secrets in source code can be extracted from version history even after removal. Use environment variables or a secrets manager (CWE-798).",
7077
+ suggestion: "Move the secret to an environment variable and access it via process.env.",
7078
+ evidence: [`Line ${i + 1}: [secret detected \u2014 value redacted]`],
7079
+ validatedBy: "heuristic",
7080
+ cweId: "CWE-798",
7081
+ owaspCategory: "A07:2021 Identification and Authentication Failures",
7082
+ confidence: "high",
7083
+ remediation: "Move the secret to an environment variable and access it via process.env.",
7084
+ references: [
7085
+ "https://cwe.mitre.org/data/definitions/798.html",
7086
+ "https://owasp.org/Top10/A07_2021-Identification_and_Authentication_Failures/"
7087
+ ]
7088
+ });
7089
+ break;
7090
+ }
7091
+ }
7092
+ }
7093
+ }
7094
+ return findings;
7095
+ }
7096
+ function detectSqlInjection(bundle) {
7097
+ const findings = [];
7098
+ for (const cf of bundle.changedFiles) {
7099
+ const lines = cf.content.split("\n");
7100
+ for (let i = 0; i < lines.length; i++) {
7101
+ const line = lines[i];
7102
+ if (SQL_CONCAT_PATTERN.test(line)) {
7103
+ findings.push({
7104
+ id: makeFindingId("security", cf.path, i + 1, "SQL injection CWE-89"),
7105
+ file: cf.path,
7106
+ lineRange: [i + 1, i + 1],
7107
+ domain: "security",
7108
+ severity: "critical",
7109
+ title: "Potential SQL injection via string concatenation",
7110
+ rationale: "Building SQL queries with string concatenation or template literals allows attackers to inject malicious SQL (CWE-89).",
7111
+ suggestion: "Use parameterized queries or a query builder (e.g., Knex, Prisma) instead of string concatenation.",
7112
+ evidence: [`Line ${i + 1}: ${line.trim()}`],
7113
+ validatedBy: "heuristic",
7114
+ cweId: "CWE-89",
7115
+ owaspCategory: "A03:2021 Injection",
7116
+ confidence: "high",
7117
+ remediation: "Use parameterized queries or a query builder (e.g., Knex, Prisma) instead of string concatenation.",
7118
+ references: [
7119
+ "https://cwe.mitre.org/data/definitions/89.html",
7120
+ "https://owasp.org/Top10/A03_2021-Injection/"
7121
+ ]
7122
+ });
7123
+ }
7124
+ }
7125
+ }
7126
+ return findings;
7127
+ }
7128
+ function detectCommandInjection(bundle) {
7129
+ const findings = [];
7130
+ for (const cf of bundle.changedFiles) {
7131
+ const lines = cf.content.split("\n");
7132
+ for (let i = 0; i < lines.length; i++) {
7133
+ const line = lines[i];
7134
+ if (SHELL_EXEC_PATTERN.test(line)) {
7135
+ findings.push({
7136
+ id: makeFindingId("security", cf.path, i + 1, "command injection CWE-78"),
7137
+ file: cf.path,
7138
+ lineRange: [i + 1, i + 1],
7139
+ domain: "security",
7140
+ severity: "critical",
7141
+ title: "Potential command injection via shell exec with interpolation",
7142
+ rationale: "Using exec/spawn with template literal interpolation allows attackers to inject shell commands (CWE-78).",
7143
+ suggestion: "Use execFile or spawn with an arguments array instead of shell string interpolation.",
7144
+ evidence: [`Line ${i + 1}: ${line.trim()}`],
7145
+ validatedBy: "heuristic",
7146
+ cweId: "CWE-78",
7147
+ owaspCategory: "A03:2021 Injection",
7148
+ confidence: "high",
7149
+ remediation: "Use execFile or spawn with an arguments array instead of shell string interpolation.",
7150
+ references: [
7151
+ "https://cwe.mitre.org/data/definitions/78.html",
7152
+ "https://owasp.org/Top10/A03_2021-Injection/"
7153
+ ]
7154
+ });
7155
+ }
7156
+ }
7157
+ }
7158
+ return findings;
7159
+ }
7160
+ function runSecurityAgent(bundle) {
7161
+ const findings = [];
7162
+ findings.push(...detectEvalUsage(bundle));
7163
+ findings.push(...detectHardcodedSecrets(bundle));
7164
+ findings.push(...detectSqlInjection(bundle));
7165
+ findings.push(...detectCommandInjection(bundle));
7166
+ return findings;
7167
+ }
7168
+ var ARCHITECTURE_DESCRIPTOR = {
7169
+ domain: "architecture",
7170
+ tier: "standard",
7171
+ displayName: "Architecture",
7172
+ focusAreas: [
7173
+ "Layer compliance \u2014 imports flow in the correct direction per architectural layers",
7174
+ "Dependency direction \u2014 modules depend on abstractions, not concretions",
7175
+ "Single Responsibility \u2014 each module has one reason to change",
7176
+ "Pattern consistency \u2014 code follows established codebase patterns",
7177
+ "Separation of concerns \u2014 business logic separated from infrastructure",
7178
+ "DRY violations \u2014 duplicated logic that should be extracted (excluding intentional duplication)"
7179
+ ]
7180
+ };
7181
+ var LARGE_FILE_THRESHOLD = 300;
7182
+ function detectLayerViolations(bundle) {
7183
+ const findings = [];
7184
+ const checkDepsFile = bundle.contextFiles.find((f) => f.path === "harness-check-deps-output");
7185
+ if (!checkDepsFile) return findings;
7186
+ const lines = checkDepsFile.content.split("\n");
7187
+ for (const line of lines) {
7188
+ if (line.toLowerCase().includes("violation") || line.toLowerCase().includes("layer")) {
7189
+ const fileMatch = line.match(/(?:in\s+)?(\S+\.(?:ts|tsx|js|jsx))(?::(\d+))?/);
7190
+ const file = fileMatch?.[1] ?? bundle.changedFiles[0]?.path ?? "unknown";
7191
+ const lineNum = fileMatch?.[2] ? parseInt(fileMatch[2], 10) : 1;
7192
+ findings.push({
7193
+ id: makeFindingId("arch", file, lineNum, "layer violation"),
7194
+ file,
7195
+ lineRange: [lineNum, lineNum],
7196
+ domain: "architecture",
7197
+ severity: "critical",
7198
+ title: "Layer boundary violation detected by check-deps",
7199
+ rationale: `Architectural layer violation: ${line.trim()}. Imports must flow in the correct direction per the project's layer definitions.`,
7200
+ suggestion: "Route the dependency through the correct intermediate layer (e.g., routes -> services -> db, not routes -> db).",
7201
+ evidence: [line.trim()],
7202
+ validatedBy: "heuristic"
7203
+ });
7204
+ }
7205
+ }
7206
+ return findings;
7207
+ }
7208
+ function detectLargeFiles(bundle) {
7209
+ const findings = [];
7210
+ for (const cf of bundle.changedFiles) {
7211
+ if (cf.lines > LARGE_FILE_THRESHOLD) {
7212
+ findings.push({
7213
+ id: makeFindingId("arch", cf.path, 1, "large file SRP"),
7214
+ file: cf.path,
7215
+ lineRange: [1, cf.lines],
7216
+ domain: "architecture",
7217
+ severity: "suggestion",
7218
+ title: `Large file (${cf.lines} lines) may violate Single Responsibility`,
7219
+ rationale: `Files over ${LARGE_FILE_THRESHOLD} lines often contain multiple responsibilities. Consider splitting into focused modules.`,
7220
+ suggestion: "Identify distinct responsibilities and extract them into separate modules.",
7221
+ evidence: [`File has ${cf.lines} lines (threshold: ${LARGE_FILE_THRESHOLD})`],
7222
+ validatedBy: "heuristic"
7223
+ });
7224
+ }
7225
+ }
7226
+ return findings;
7227
+ }
7228
+ function detectCircularImports(bundle) {
7229
+ const findings = [];
7230
+ const changedPaths = new Set(bundle.changedFiles.map((f) => f.path));
7231
+ for (const cf of bundle.changedFiles) {
7232
+ const importRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
7233
+ let match;
7234
+ const imports = /* @__PURE__ */ new Set();
7235
+ while ((match = importRegex.exec(cf.content)) !== null) {
7236
+ const source = match[1];
7237
+ if (source.startsWith(".")) {
7238
+ imports.add(source.replace(/^\.\//, "").replace(/^\.\.\//, ""));
7239
+ }
7240
+ }
7241
+ for (const ctxFile of bundle.contextFiles) {
7242
+ if (ctxFile.reason !== "import" && ctxFile.reason !== "graph-dependency") continue;
7243
+ const ctxImportRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
7244
+ let ctxMatch;
7245
+ while ((ctxMatch = ctxImportRegex.exec(ctxFile.content)) !== null) {
7246
+ const ctxSource = ctxMatch[1];
7247
+ if (ctxSource.startsWith(".")) {
7248
+ for (const changedPath of changedPaths) {
7249
+ const baseName = changedPath.replace(/.*\//, "").replace(/\.(ts|tsx|js|jsx)$/, "");
7250
+ if (ctxSource.includes(baseName) && imports.has(ctxFile.path.replace(/.*\//, "").replace(/\.(ts|tsx|js|jsx)$/, ""))) {
7251
+ findings.push({
7252
+ id: makeFindingId("arch", cf.path, 1, `circular ${ctxFile.path}`),
7253
+ file: cf.path,
7254
+ lineRange: [1, 1],
7255
+ domain: "architecture",
7256
+ severity: "important",
7257
+ title: `Potential circular import between ${cf.path} and ${ctxFile.path}`,
7258
+ rationale: "Circular imports can cause runtime issues (undefined values at import time) and indicate tightly coupled modules that should be refactored.",
7259
+ suggestion: "Extract shared types/interfaces into a separate module that both files can import from.",
7260
+ evidence: [`${cf.path} imports from a module that also imports from ${cf.path}`],
7261
+ validatedBy: "heuristic"
7262
+ });
7263
+ }
7264
+ }
7265
+ }
7266
+ }
7267
+ }
7268
+ }
7269
+ return findings;
7270
+ }
7271
+ function runArchitectureAgent(bundle) {
7272
+ const findings = [];
7273
+ findings.push(...detectLayerViolations(bundle));
7274
+ findings.push(...detectLargeFiles(bundle));
7275
+ findings.push(...detectCircularImports(bundle));
7276
+ return findings;
7277
+ }
7278
+ var AGENT_DESCRIPTORS = {
7279
+ compliance: COMPLIANCE_DESCRIPTOR,
7280
+ bug: BUG_DETECTION_DESCRIPTOR,
7281
+ security: SECURITY_DESCRIPTOR,
7282
+ architecture: ARCHITECTURE_DESCRIPTOR
7283
+ };
7284
+ var AGENT_RUNNERS = {
7285
+ compliance: runComplianceAgent,
7286
+ bug: runBugDetectionAgent,
7287
+ security: runSecurityAgent,
7288
+ architecture: runArchitectureAgent
7289
+ };
7290
+ async function runAgent(bundle) {
7291
+ const start = Date.now();
7292
+ const runner = AGENT_RUNNERS[bundle.domain];
7293
+ const findings = runner(bundle);
7294
+ const durationMs = Date.now() - start;
7295
+ return {
7296
+ domain: bundle.domain,
7297
+ findings,
7298
+ durationMs
7299
+ };
7300
+ }
7301
+ async function fanOutReview(options) {
7302
+ const { bundles } = options;
7303
+ if (bundles.length === 0) return [];
7304
+ const results = await Promise.all(bundles.map((bundle) => runAgent(bundle)));
7305
+ return results;
7306
+ }
7307
+ var DOWNGRADE_MAP = {
7308
+ critical: "important",
7309
+ important: "suggestion",
7310
+ suggestion: "suggestion"
7311
+ };
7312
+ function extractCrossFileRefs(finding) {
7313
+ const refs = [];
7314
+ const crossFilePattern = /([^\s]+\.(?:ts|tsx|js|jsx))\s+affects\s+([^\s]+\.(?:ts|tsx|js|jsx))/i;
7315
+ for (const ev of finding.evidence) {
7316
+ const match = ev.match(crossFilePattern);
7317
+ if (match) {
7318
+ refs.push({ from: match[1], to: match[2] });
7319
+ }
7320
+ }
7321
+ return refs;
7322
+ }
7323
+ function normalizePath(filePath, projectRoot) {
7324
+ let normalized = filePath;
7325
+ if (path8.isAbsolute(normalized)) {
7326
+ const root = projectRoot.endsWith(path8.sep) ? projectRoot : projectRoot + path8.sep;
7327
+ if (normalized.startsWith(root)) {
7328
+ normalized = normalized.slice(root.length);
7329
+ }
7330
+ }
7331
+ if (normalized.startsWith("./")) {
7332
+ normalized = normalized.slice(2);
7333
+ }
7334
+ return path8.normalize(normalized);
7335
+ }
7336
+ function followImportChain(fromFile, fileContents, maxDepth = 2) {
7337
+ const visited = /* @__PURE__ */ new Set();
7338
+ const queue = [{ file: fromFile, depth: 0 }];
7339
+ while (queue.length > 0) {
7340
+ const current = queue.shift();
7341
+ if (visited.has(current.file) || current.depth > maxDepth) continue;
7342
+ visited.add(current.file);
7343
+ const content = fileContents.get(current.file);
7344
+ if (!content) continue;
7345
+ const importRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
7346
+ let match;
7347
+ while ((match = importRegex.exec(content)) !== null) {
7348
+ const importPath = match[1];
7349
+ if (!importPath.startsWith(".")) continue;
7350
+ const dir = path8.dirname(current.file);
7351
+ let resolved = path8.join(dir, importPath);
7352
+ if (!resolved.match(/\.(ts|tsx|js|jsx)$/)) {
7353
+ resolved += ".ts";
7354
+ }
7355
+ resolved = path8.normalize(resolved);
7356
+ if (!visited.has(resolved) && current.depth + 1 <= maxDepth) {
7357
+ queue.push({ file: resolved, depth: current.depth + 1 });
7358
+ }
7359
+ }
7360
+ }
7361
+ visited.delete(fromFile);
7362
+ return visited;
7363
+ }
7364
+ async function validateFindings(options) {
7365
+ const { findings, exclusionSet, graph, projectRoot, fileContents } = options;
7366
+ const validated = [];
7367
+ for (const finding of findings) {
7368
+ const normalizedFile = normalizePath(finding.file, projectRoot);
7369
+ if (exclusionSet.isExcluded(normalizedFile, finding.lineRange) || exclusionSet.isExcluded(finding.file, finding.lineRange)) {
7370
+ continue;
7371
+ }
7372
+ const absoluteFile = path8.isAbsolute(finding.file) ? finding.file : path8.join(projectRoot, finding.file);
7373
+ if (exclusionSet.isExcluded(absoluteFile, finding.lineRange)) {
7374
+ continue;
7375
+ }
7376
+ const crossFileRefs = extractCrossFileRefs(finding);
7377
+ if (crossFileRefs.length === 0) {
7378
+ validated.push({ ...finding });
7379
+ continue;
7380
+ }
7381
+ if (graph) {
7382
+ try {
7383
+ let allReachable = true;
7384
+ for (const ref of crossFileRefs) {
7385
+ const reachable = await graph.isReachable(ref.from, ref.to);
7386
+ if (!reachable) {
7387
+ allReachable = false;
7388
+ break;
7389
+ }
7390
+ }
7391
+ if (allReachable) {
7392
+ validated.push({ ...finding, validatedBy: "graph" });
7393
+ }
7394
+ continue;
7395
+ } catch {
7396
+ }
7397
+ }
7398
+ {
7399
+ let chainValidated = false;
7400
+ if (fileContents) {
7401
+ for (const ref of crossFileRefs) {
7402
+ const normalizedFrom = normalizePath(ref.from, projectRoot);
7403
+ const reachable = followImportChain(normalizedFrom, fileContents, 2);
7404
+ const normalizedTo = normalizePath(ref.to, projectRoot);
7405
+ if (reachable.has(normalizedTo)) {
7406
+ chainValidated = true;
7407
+ break;
7408
+ }
7409
+ }
7410
+ }
7411
+ if (chainValidated) {
7412
+ validated.push({ ...finding, validatedBy: "heuristic" });
7413
+ } else {
7414
+ validated.push({
7415
+ ...finding,
7416
+ severity: DOWNGRADE_MAP[finding.severity],
7417
+ validatedBy: "heuristic"
7418
+ });
7419
+ }
7420
+ }
7421
+ }
7422
+ return validated;
7423
+ }
7424
+ function rangesOverlap(a, b, gap) {
7425
+ return a[0] <= b[1] + gap && b[0] <= a[1] + gap;
7426
+ }
7427
+ function mergeFindings(a, b) {
7428
+ const highestSeverity = SEVERITY_RANK[a.severity] >= SEVERITY_RANK[b.severity] ? a.severity : b.severity;
7429
+ const highestValidatedBy = (VALIDATED_BY_RANK[a.validatedBy] ?? 0) >= (VALIDATED_BY_RANK[b.validatedBy] ?? 0) ? a.validatedBy : b.validatedBy;
7430
+ const longestRationale = a.rationale.length >= b.rationale.length ? a.rationale : b.rationale;
7431
+ const evidenceSet = /* @__PURE__ */ new Set([...a.evidence, ...b.evidence]);
7432
+ const lineRange = [
7433
+ Math.min(a.lineRange[0], b.lineRange[0]),
7434
+ Math.max(a.lineRange[1], b.lineRange[1])
7435
+ ];
7436
+ const domains = /* @__PURE__ */ new Set();
7437
+ domains.add(a.domain);
7438
+ domains.add(b.domain);
7439
+ const suggestion = a.suggestion && b.suggestion ? a.suggestion.length >= b.suggestion.length ? a.suggestion : b.suggestion : a.suggestion ?? b.suggestion;
7440
+ const primaryFinding = SEVERITY_RANK[a.severity] >= SEVERITY_RANK[b.severity] ? a : b;
7441
+ const domainList = [...domains].sort().join(", ");
7442
+ const cleanTitle = primaryFinding.title.replace(/^\[.*?\]\s*/, "");
7443
+ const title = `[${domainList}] ${cleanTitle}`;
7444
+ const merged = {
7445
+ id: primaryFinding.id,
7446
+ file: a.file,
7447
+ // same file for all merged findings
7448
+ lineRange,
7449
+ domain: primaryFinding.domain,
7450
+ severity: highestSeverity,
7451
+ title,
7452
+ rationale: longestRationale,
7453
+ evidence: [...evidenceSet],
7454
+ validatedBy: highestValidatedBy
7455
+ };
7456
+ if (suggestion !== void 0) {
7457
+ merged.suggestion = suggestion;
7458
+ }
7459
+ const cweId = primaryFinding.cweId ?? a.cweId ?? b.cweId;
7460
+ const owaspCategory = primaryFinding.owaspCategory ?? a.owaspCategory ?? b.owaspCategory;
7461
+ const confidence = primaryFinding.confidence ?? a.confidence ?? b.confidence;
7462
+ const remediation = a.remediation && b.remediation ? a.remediation.length >= b.remediation.length ? a.remediation : b.remediation : a.remediation ?? b.remediation;
7463
+ const mergedRefs = [.../* @__PURE__ */ new Set([...a.references ?? [], ...b.references ?? []])];
7464
+ if (cweId !== void 0) merged.cweId = cweId;
7465
+ if (owaspCategory !== void 0) merged.owaspCategory = owaspCategory;
7466
+ if (confidence !== void 0) merged.confidence = confidence;
7467
+ if (remediation !== void 0) merged.remediation = remediation;
7468
+ if (mergedRefs.length > 0) merged.references = mergedRefs;
7469
+ return merged;
7470
+ }
7471
+ function deduplicateFindings(options) {
7472
+ const { findings, lineGap = 3 } = options;
7473
+ if (findings.length === 0) return [];
7474
+ const byFile = /* @__PURE__ */ new Map();
7475
+ for (const f of findings) {
7476
+ const existing = byFile.get(f.file);
7477
+ if (existing) {
7478
+ existing.push(f);
7479
+ } else {
7480
+ byFile.set(f.file, [f]);
7481
+ }
7482
+ }
7483
+ const result = [];
7484
+ for (const [, fileFindings] of byFile) {
7485
+ const sorted = [...fileFindings].sort((a, b) => a.lineRange[0] - b.lineRange[0]);
7486
+ const clusters = [];
7487
+ let current = sorted[0];
7488
+ for (let i = 1; i < sorted.length; i++) {
7489
+ const next = sorted[i];
7490
+ if (rangesOverlap(current.lineRange, next.lineRange, lineGap)) {
7491
+ current = mergeFindings(current, next);
7492
+ } else {
7493
+ clusters.push(current);
7494
+ current = next;
7495
+ }
7496
+ }
7497
+ clusters.push(current);
7498
+ result.push(...clusters);
7499
+ }
7500
+ return result;
7501
+ }
7502
+ function checkEligibility(pr, ciMode) {
7503
+ if (!ciMode) {
7504
+ return { eligible: true };
7505
+ }
7506
+ if (pr.state === "closed") {
7507
+ return { eligible: false, reason: "PR is closed" };
7508
+ }
7509
+ if (pr.state === "merged") {
7510
+ return { eligible: false, reason: "PR is merged" };
7511
+ }
7512
+ if (pr.isDraft) {
7513
+ return { eligible: false, reason: "PR is a draft" };
7514
+ }
7515
+ if (pr.changedFiles.length > 0 && pr.changedFiles.every((f) => f.endsWith(".md"))) {
7516
+ return { eligible: false, reason: "Trivial change: documentation only" };
7517
+ }
7518
+ const priorMatch = pr.priorReviews.find((r) => r.headSha === pr.headSha);
7519
+ if (priorMatch) {
7520
+ return { eligible: false, reason: `Already reviewed at ${priorMatch.headSha}` };
7521
+ }
7522
+ return { eligible: true };
7523
+ }
7524
+ var DEFAULT_PROVIDER_TIERS = {
7525
+ claude: {
7526
+ fast: "haiku",
7527
+ standard: "sonnet",
7528
+ strong: "opus"
7529
+ },
7530
+ openai: {
7531
+ fast: "gpt-4o-mini",
7532
+ standard: "gpt-4o",
7533
+ strong: "o1"
7534
+ },
7535
+ gemini: {
7536
+ fast: "gemini-flash",
7537
+ standard: "gemini-pro",
7538
+ strong: "gemini-ultra"
7539
+ }
7540
+ };
7541
+ function resolveModelTier(tier, config, provider) {
7542
+ const configValue = config?.[tier];
7543
+ if (configValue !== void 0) {
7544
+ return configValue;
7545
+ }
7546
+ if (provider) {
7547
+ const providerDefaults = DEFAULT_PROVIDER_TIERS[provider];
7548
+ const defaultValue = providerDefaults[tier];
7549
+ if (defaultValue !== void 0) {
7550
+ return defaultValue;
7551
+ }
7552
+ }
7553
+ return void 0;
7554
+ }
7555
+ function determineAssessment(findings) {
7556
+ if (findings.length === 0) return "approve";
7557
+ let maxSeverity = "suggestion";
7558
+ for (const f of findings) {
7559
+ if (SEVERITY_RANK[f.severity] > SEVERITY_RANK[maxSeverity]) {
7560
+ maxSeverity = f.severity;
7561
+ }
7562
+ }
7563
+ switch (maxSeverity) {
7564
+ case "critical":
7565
+ return "request-changes";
7566
+ case "important":
7567
+ return "comment";
7568
+ case "suggestion":
7569
+ return "approve";
7570
+ }
7571
+ }
7572
+ function getExitCode(assessment) {
7573
+ return assessment === "request-changes" ? 1 : 0;
7574
+ }
7575
+ function formatFindingBlock(finding) {
7576
+ const lines = [];
7577
+ const location = `${finding.file}:L${finding.lineRange[0]}-${finding.lineRange[1]}`;
7578
+ lines.push(` [${finding.domain}] ${finding.title}`);
7579
+ lines.push(` Location: ${location}`);
7580
+ lines.push(` Rationale: ${finding.rationale}`);
7581
+ if (finding.suggestion) {
7582
+ lines.push(` Suggestion: ${finding.suggestion}`);
7583
+ }
7584
+ return lines.join("\n");
7585
+ }
7586
+ function formatTerminalOutput(options) {
7587
+ const { findings, strengths } = options;
7588
+ const sections = [];
7589
+ sections.push("## Strengths\n");
7590
+ if (strengths.length === 0) {
7591
+ sections.push(" No specific strengths noted.\n");
7592
+ } else {
7593
+ for (const s of strengths) {
7594
+ const prefix = s.file ? `${s.file}: ` : "";
7595
+ sections.push(` + ${prefix}${s.description}`);
7596
+ }
7597
+ sections.push("");
7598
+ }
7599
+ sections.push("## Issues\n");
7600
+ let hasIssues = false;
7601
+ for (const severity of SEVERITY_ORDER) {
7602
+ const group = findings.filter((f) => f.severity === severity);
7603
+ if (group.length === 0) continue;
7604
+ hasIssues = true;
7605
+ sections.push(`### ${SEVERITY_LABELS[severity]} (${group.length})
7606
+ `);
7607
+ for (const finding of group) {
7608
+ sections.push(formatFindingBlock(finding));
7609
+ sections.push("");
7610
+ }
7611
+ }
7612
+ if (!hasIssues) {
7613
+ sections.push(" No issues found.\n");
7614
+ }
7615
+ const assessment = determineAssessment(findings);
7616
+ const assessmentLabel = assessment === "approve" ? "Approve" : assessment === "comment" ? "Comment" : "Request Changes";
7617
+ sections.push(`## Assessment: ${assessmentLabel}
7618
+ `);
7619
+ const issueCount = findings.length;
7620
+ const criticalCount = findings.filter((f) => f.severity === "critical").length;
7621
+ const importantCount = findings.filter((f) => f.severity === "important").length;
7622
+ const suggestionCount = findings.filter((f) => f.severity === "suggestion").length;
7623
+ if (issueCount === 0) {
7624
+ sections.push(" No issues found. The changes look good.");
7625
+ } else {
7626
+ const parts = [];
7627
+ if (criticalCount > 0) parts.push(`${criticalCount} critical`);
7628
+ if (importantCount > 0) parts.push(`${importantCount} important`);
7629
+ if (suggestionCount > 0) parts.push(`${suggestionCount} suggestion(s)`);
7630
+ sections.push(` Found ${issueCount} issue(s): ${parts.join(", ")}.`);
7631
+ }
7632
+ return sections.join("\n");
7633
+ }
7634
+ var SMALL_SUGGESTION_LINE_LIMIT = 10;
7635
+ function sanitizeMarkdown(text) {
7636
+ return text.replace(/</g, "&lt;").replace(/>/g, "&gt;");
7637
+ }
7638
+ function isSmallSuggestion(suggestion) {
7639
+ if (!suggestion) return false;
7640
+ const lineCount = suggestion.split("\n").length;
7641
+ return lineCount < SMALL_SUGGESTION_LINE_LIMIT;
7642
+ }
7643
+ function formatGitHubComment(finding) {
7644
+ const severityBadge = `**${finding.severity.toUpperCase()}**`;
7645
+ const header = `${severityBadge} [${finding.domain}] ${sanitizeMarkdown(finding.title)}`;
7646
+ let body;
7647
+ if (isSmallSuggestion(finding.suggestion)) {
7648
+ body = [
7649
+ header,
7650
+ "",
7651
+ sanitizeMarkdown(finding.rationale),
7652
+ "",
7653
+ "```suggestion",
7654
+ finding.suggestion,
7655
+ "```"
7656
+ ].join("\n");
7657
+ } else {
7658
+ const parts = [header, "", `**Rationale:** ${sanitizeMarkdown(finding.rationale)}`];
7659
+ if (finding.suggestion) {
7660
+ parts.push("", `**Suggested approach:** ${sanitizeMarkdown(finding.suggestion)}`);
7661
+ }
7662
+ body = parts.join("\n");
7663
+ }
7664
+ return {
7665
+ path: finding.file,
7666
+ line: finding.lineRange[1],
7667
+ // Comment on end line of range
7668
+ side: "RIGHT",
7669
+ body
7670
+ };
7671
+ }
7672
+ function formatGitHubSummary(options) {
7673
+ const { findings, strengths } = options;
7674
+ const sections = [];
7675
+ sections.push("## Strengths\n");
7676
+ if (strengths.length === 0) {
7677
+ sections.push("No specific strengths noted.\n");
7678
+ } else {
7679
+ for (const s of strengths) {
7680
+ const prefix = s.file ? `**${s.file}:** ` : "";
7681
+ sections.push(`- ${prefix}${sanitizeMarkdown(s.description)}`);
7682
+ }
7683
+ sections.push("");
7684
+ }
7685
+ sections.push("## Issues\n");
7686
+ let hasIssues = false;
7687
+ for (const severity of SEVERITY_ORDER) {
7688
+ const group = findings.filter((f) => f.severity === severity);
7689
+ if (group.length === 0) continue;
7690
+ hasIssues = true;
7691
+ sections.push(`### ${SEVERITY_LABELS[severity]} (${group.length})
7692
+ `);
7693
+ for (const finding of group) {
7694
+ const location = `\`${finding.file}:L${finding.lineRange[0]}-${finding.lineRange[1]}\``;
7695
+ sections.push(`- **${sanitizeMarkdown(finding.title)}** at ${location}`);
7696
+ sections.push(` ${sanitizeMarkdown(finding.rationale)}`);
7697
+ sections.push("");
7698
+ }
7699
+ }
7700
+ if (!hasIssues) {
7701
+ sections.push("No issues found.\n");
7702
+ }
7703
+ const assessment = determineAssessment(findings);
7704
+ const assessmentLabel = assessment === "approve" ? "Approve" : assessment === "comment" ? "Comment" : "Request Changes";
7705
+ sections.push(`## Assessment: ${assessmentLabel}`);
7706
+ return sections.join("\n");
7707
+ }
7708
+ async function runReviewPipeline(options) {
7709
+ const {
7710
+ projectRoot,
7711
+ diff,
7712
+ commitMessage,
7713
+ flags,
7714
+ graph,
7715
+ prMetadata,
7716
+ conventionFiles,
7717
+ checkDepsOutput,
7718
+ config = {},
7719
+ commitHistory
7720
+ } = options;
7721
+ if (flags.ci && prMetadata) {
7722
+ const eligibility = checkEligibility(prMetadata, true);
7723
+ if (!eligibility.eligible) {
7724
+ return {
7725
+ skipped: true,
7726
+ ...eligibility.reason != null ? { skipReason: eligibility.reason } : {},
7727
+ stoppedByMechanical: false,
7728
+ findings: [],
7729
+ strengths: [],
7730
+ terminalOutput: `Review skipped: ${eligibility.reason ?? "ineligible"}`,
7731
+ githubComments: [],
7732
+ exitCode: 0
7733
+ };
7734
+ }
7735
+ }
7736
+ let mechanicalResult;
7737
+ let exclusionSet;
7738
+ if (flags.noMechanical) {
7739
+ exclusionSet = buildExclusionSet([]);
7740
+ } else {
7741
+ try {
7742
+ const mechResult = await runMechanicalChecks({
7743
+ projectRoot,
7744
+ config,
7745
+ changedFiles: diff.changedFiles
7746
+ });
7747
+ if (mechResult.ok) {
7748
+ mechanicalResult = mechResult.value;
7749
+ exclusionSet = buildExclusionSet(mechResult.value.findings);
7750
+ if (mechResult.value.stopPipeline) {
7751
+ const mechFindings = mechResult.value.findings.filter((f) => f.severity === "error").map((f) => ` x ${f.tool}: ${f.file}${f.line ? `:${f.line}` : ""} - ${f.message}`).join("\n");
7752
+ const terminalOutput2 = [
7753
+ "## Strengths\n",
7754
+ " No AI review performed (mechanical checks failed).\n",
7755
+ "## Issues\n",
7756
+ "### Critical (mechanical)\n",
7757
+ mechFindings,
7758
+ "\n## Assessment: Request Changes\n",
7759
+ " Mechanical checks must pass before AI review."
7760
+ ].join("\n");
7761
+ return {
7762
+ skipped: false,
7763
+ stoppedByMechanical: true,
7764
+ assessment: "request-changes",
7765
+ findings: [],
7766
+ strengths: [],
7767
+ terminalOutput: terminalOutput2,
7768
+ githubComments: [],
7769
+ exitCode: 1,
7770
+ mechanicalResult
7771
+ };
7772
+ }
7773
+ } else {
7774
+ exclusionSet = buildExclusionSet([]);
7775
+ }
7776
+ } catch {
7777
+ exclusionSet = buildExclusionSet([]);
7778
+ }
7779
+ }
7780
+ let contextBundles;
7781
+ try {
7782
+ contextBundles = await scopeContext({
7783
+ projectRoot,
7784
+ diff,
7785
+ commitMessage,
7786
+ ...graph != null ? { graph } : {},
7787
+ ...conventionFiles != null ? { conventionFiles } : {},
7788
+ ...checkDepsOutput != null ? { checkDepsOutput } : {},
7789
+ ...commitHistory != null ? { commitHistory } : {}
7790
+ });
7791
+ } catch {
7792
+ contextBundles = ["compliance", "bug", "security", "architecture"].map((domain) => ({
7793
+ domain,
7794
+ changeType: "feature",
7795
+ changedFiles: [],
7796
+ contextFiles: [],
7797
+ commitHistory: [],
7798
+ diffLines: diff.totalDiffLines,
7799
+ contextLines: 0
7800
+ }));
7801
+ }
7802
+ const agentResults = await fanOutReview({ bundles: contextBundles });
7803
+ const rawFindings = agentResults.flatMap((r) => r.findings);
7804
+ const fileContents = /* @__PURE__ */ new Map();
7805
+ for (const [file, content] of diff.fileDiffs) {
7806
+ fileContents.set(file, content);
7807
+ }
7808
+ const validatedFindings = await validateFindings({
7809
+ findings: rawFindings,
7810
+ exclusionSet,
7811
+ ...graph != null ? { graph } : {},
7812
+ projectRoot,
7813
+ fileContents
7814
+ });
7815
+ const dedupedFindings = deduplicateFindings({ findings: validatedFindings });
7816
+ const strengths = [];
7817
+ const assessment = determineAssessment(dedupedFindings);
7818
+ const exitCode = getExitCode(assessment);
7819
+ const terminalOutput = formatTerminalOutput({
7820
+ findings: dedupedFindings,
7821
+ strengths
7822
+ });
7823
+ let githubComments = [];
7824
+ if (flags.comment) {
7825
+ githubComments = dedupedFindings.map((f) => formatGitHubComment(f));
7826
+ }
7827
+ return {
7828
+ skipped: false,
7829
+ stoppedByMechanical: false,
7830
+ assessment,
7831
+ findings: dedupedFindings,
7832
+ strengths,
7833
+ terminalOutput,
7834
+ githubComments,
7835
+ exitCode,
7836
+ ...mechanicalResult !== void 0 ? { mechanicalResult } : {}
7837
+ };
7838
+ }
7839
+ var VALID_STATUSES = /* @__PURE__ */ new Set([
7840
+ "backlog",
7841
+ "planned",
7842
+ "in-progress",
7843
+ "done",
7844
+ "blocked"
7845
+ ]);
7846
+ var EM_DASH = "\u2014";
7847
+ function parseRoadmap(markdown) {
7848
+ const fmMatch = markdown.match(/^---\n([\s\S]*?)\n---/);
7849
+ if (!fmMatch) {
7850
+ return Err(new Error("Missing or malformed YAML frontmatter"));
7851
+ }
7852
+ const fmResult = parseFrontmatter(fmMatch[1]);
7853
+ if (!fmResult.ok) return fmResult;
7854
+ const body = markdown.slice(fmMatch[0].length);
7855
+ const milestonesResult = parseMilestones(body);
7856
+ if (!milestonesResult.ok) return milestonesResult;
7857
+ return Ok({
7858
+ frontmatter: fmResult.value,
7859
+ milestones: milestonesResult.value
7860
+ });
7861
+ }
7862
+ function parseFrontmatter(raw) {
7863
+ const lines = raw.split("\n");
7864
+ const map = /* @__PURE__ */ new Map();
7865
+ for (const line of lines) {
7866
+ const idx = line.indexOf(":");
7867
+ if (idx === -1) continue;
7868
+ const key = line.slice(0, idx).trim();
7869
+ const val = line.slice(idx + 1).trim();
7870
+ map.set(key, val);
7871
+ }
7872
+ const project = map.get("project");
7873
+ const versionStr = map.get("version");
7874
+ const lastSynced = map.get("last_synced");
7875
+ const lastManualEdit = map.get("last_manual_edit");
7876
+ if (!project || !versionStr || !lastSynced || !lastManualEdit) {
7877
+ return Err(
7878
+ new Error(
7879
+ "Frontmatter missing required fields: project, version, last_synced, last_manual_edit"
7880
+ )
7881
+ );
7882
+ }
7883
+ const version = parseInt(versionStr, 10);
7884
+ if (isNaN(version)) {
7885
+ return Err(new Error("Frontmatter version must be a number"));
7886
+ }
7887
+ return Ok({ project, version, lastSynced, lastManualEdit });
7888
+ }
7889
+ function parseMilestones(body) {
7890
+ const milestones = [];
7891
+ const h2Pattern = /^## (.+)$/gm;
7892
+ const h2Matches = [];
7893
+ let match;
7894
+ while ((match = h2Pattern.exec(body)) !== null) {
7895
+ h2Matches.push({ heading: match[1], startIndex: match.index });
7896
+ }
7897
+ for (let i = 0; i < h2Matches.length; i++) {
7898
+ const h2 = h2Matches[i];
7899
+ const nextStart = i + 1 < h2Matches.length ? h2Matches[i + 1].startIndex : body.length;
7900
+ const sectionBody = body.slice(h2.startIndex + h2.heading.length + 4, nextStart);
7901
+ const isBacklog = h2.heading === "Backlog";
7902
+ const milestoneName = isBacklog ? "Backlog" : h2.heading.replace(/^Milestone:\s*/, "");
7903
+ const featuresResult = parseFeatures(sectionBody);
7904
+ if (!featuresResult.ok) return featuresResult;
7905
+ milestones.push({
7906
+ name: milestoneName,
7907
+ isBacklog,
7908
+ features: featuresResult.value
7909
+ });
7910
+ }
7911
+ return Ok(milestones);
7912
+ }
7913
+ function parseFeatures(sectionBody) {
7914
+ const features = [];
7915
+ const h3Pattern = /^### Feature: (.+)$/gm;
7916
+ const h3Matches = [];
7917
+ let match;
7918
+ while ((match = h3Pattern.exec(sectionBody)) !== null) {
7919
+ h3Matches.push({ name: match[1], startIndex: match.index });
7920
+ }
7921
+ for (let i = 0; i < h3Matches.length; i++) {
7922
+ const h3 = h3Matches[i];
7923
+ const nextStart = i + 1 < h3Matches.length ? h3Matches[i + 1].startIndex : sectionBody.length;
7924
+ const featureBody = sectionBody.slice(
7925
+ h3.startIndex + `### Feature: ${h3.name}`.length,
7926
+ nextStart
7927
+ );
7928
+ const featureResult = parseFeatureFields(h3.name, featureBody);
7929
+ if (!featureResult.ok) return featureResult;
7930
+ features.push(featureResult.value);
7931
+ }
7932
+ return Ok(features);
7933
+ }
7934
+ function parseFeatureFields(name, body) {
7935
+ const fieldMap = /* @__PURE__ */ new Map();
7936
+ const fieldPattern = /^- \*\*(.+?):\*\* (.+)$/gm;
7937
+ let match;
7938
+ while ((match = fieldPattern.exec(body)) !== null) {
7939
+ fieldMap.set(match[1], match[2]);
7940
+ }
7941
+ const statusRaw = fieldMap.get("Status");
7942
+ if (!statusRaw || !VALID_STATUSES.has(statusRaw)) {
7943
+ return Err(
7944
+ new Error(
7945
+ `Feature "${name}" has invalid status: "${statusRaw ?? "(missing)"}". Valid statuses: ${[...VALID_STATUSES].join(", ")}`
7946
+ )
7947
+ );
7948
+ }
7949
+ const status = statusRaw;
7950
+ const specRaw = fieldMap.get("Spec") ?? EM_DASH;
7951
+ const spec = specRaw === EM_DASH ? null : specRaw;
7952
+ const plansRaw = fieldMap.get("Plans") ?? EM_DASH;
7953
+ const plans = plansRaw === EM_DASH ? [] : plansRaw.split(",").map((p) => p.trim());
7954
+ const blockedByRaw = fieldMap.get("Blocked by") ?? EM_DASH;
7955
+ const blockedBy = blockedByRaw === EM_DASH ? [] : blockedByRaw.split(",").map((b) => b.trim());
7956
+ const summary = fieldMap.get("Summary") ?? "";
7957
+ return Ok({ name, status, spec, plans, blockedBy, summary });
7958
+ }
7959
+ var EM_DASH2 = "\u2014";
7960
+ function serializeRoadmap(roadmap) {
7961
+ const lines = [];
7962
+ lines.push("---");
7963
+ lines.push(`project: ${roadmap.frontmatter.project}`);
7964
+ lines.push(`version: ${roadmap.frontmatter.version}`);
7965
+ lines.push(`last_synced: ${roadmap.frontmatter.lastSynced}`);
7966
+ lines.push(`last_manual_edit: ${roadmap.frontmatter.lastManualEdit}`);
7967
+ lines.push("---");
7968
+ lines.push("");
7969
+ lines.push("# Project Roadmap");
7970
+ for (const milestone of roadmap.milestones) {
7971
+ lines.push("");
7972
+ lines.push(serializeMilestoneHeading(milestone));
7973
+ for (const feature of milestone.features) {
7974
+ lines.push("");
7975
+ lines.push(...serializeFeature(feature));
7976
+ }
7977
+ }
7978
+ lines.push("");
7979
+ return lines.join("\n");
7980
+ }
7981
+ function serializeMilestoneHeading(milestone) {
7982
+ return milestone.isBacklog ? "## Backlog" : `## Milestone: ${milestone.name}`;
7983
+ }
7984
+ function serializeFeature(feature) {
7985
+ const spec = feature.spec ?? EM_DASH2;
7986
+ const plans = feature.plans.length > 0 ? feature.plans.join(", ") : EM_DASH2;
7987
+ const blockedBy = feature.blockedBy.length > 0 ? feature.blockedBy.join(", ") : EM_DASH2;
7988
+ return [
7989
+ `### Feature: ${feature.name}`,
7990
+ `- **Status:** ${feature.status}`,
7991
+ `- **Spec:** ${spec}`,
7992
+ `- **Plans:** ${plans}`,
7993
+ `- **Blocked by:** ${blockedBy}`,
7994
+ `- **Summary:** ${feature.summary}`
7995
+ ];
7996
+ }
7997
+ function inferStatus(feature, projectPath, allFeatures) {
7998
+ if (feature.blockedBy.length > 0) {
7999
+ const blockerNotDone = feature.blockedBy.some((blockerName) => {
8000
+ const blocker = allFeatures.find((f) => f.name.toLowerCase() === blockerName.toLowerCase());
8001
+ return !blocker || blocker.status !== "done";
8002
+ });
8003
+ if (blockerNotDone) return "blocked";
8004
+ }
8005
+ if (feature.plans.length === 0) return null;
8006
+ const allTaskStatuses = [];
8007
+ const featuresWithPlans = allFeatures.filter((f) => f.plans.length > 0);
8008
+ const useRootState = featuresWithPlans.length <= 1;
8009
+ if (useRootState) {
8010
+ const rootStatePath = path9.join(projectPath, ".harness", "state.json");
8011
+ if (fs7.existsSync(rootStatePath)) {
8012
+ try {
8013
+ const raw = fs7.readFileSync(rootStatePath, "utf-8");
8014
+ const state = JSON.parse(raw);
8015
+ if (state.progress) {
8016
+ for (const status of Object.values(state.progress)) {
8017
+ allTaskStatuses.push(status);
8018
+ }
8019
+ }
8020
+ } catch {
8021
+ }
8022
+ }
8023
+ }
8024
+ const sessionsDir = path9.join(projectPath, ".harness", "sessions");
8025
+ if (fs7.existsSync(sessionsDir)) {
8026
+ try {
8027
+ const sessionDirs = fs7.readdirSync(sessionsDir, { withFileTypes: true });
8028
+ for (const entry of sessionDirs) {
8029
+ if (!entry.isDirectory()) continue;
8030
+ const autopilotPath = path9.join(sessionsDir, entry.name, "autopilot-state.json");
8031
+ if (!fs7.existsSync(autopilotPath)) continue;
8032
+ try {
8033
+ const raw = fs7.readFileSync(autopilotPath, "utf-8");
8034
+ const autopilot = JSON.parse(raw);
8035
+ if (!autopilot.phases) continue;
8036
+ const linkedPhases = autopilot.phases.filter(
8037
+ (phase) => phase.planPath ? feature.plans.some((p) => p === phase.planPath || phase.planPath.endsWith(p)) : false
8038
+ );
8039
+ if (linkedPhases.length > 0) {
8040
+ for (const phase of linkedPhases) {
8041
+ if (phase.status === "complete") {
8042
+ allTaskStatuses.push("complete");
8043
+ } else if (phase.status === "pending") {
8044
+ allTaskStatuses.push("pending");
8045
+ } else {
8046
+ allTaskStatuses.push("in_progress");
8047
+ }
8048
+ }
8049
+ }
8050
+ } catch {
8051
+ }
8052
+ }
8053
+ } catch {
8054
+ }
8055
+ }
8056
+ if (allTaskStatuses.length === 0) return null;
8057
+ const allComplete = allTaskStatuses.every((s) => s === "complete");
8058
+ if (allComplete) return "done";
8059
+ const anyStarted = allTaskStatuses.some((s) => s === "in_progress" || s === "complete");
8060
+ if (anyStarted) return "in-progress";
8061
+ return null;
8062
+ }
8063
+ function syncRoadmap(options) {
8064
+ const { projectPath, roadmap, forceSync } = options;
8065
+ const isManuallyEdited = new Date(roadmap.frontmatter.lastManualEdit) > new Date(roadmap.frontmatter.lastSynced);
8066
+ const skipOverride = isManuallyEdited && !forceSync;
8067
+ const allFeatures = roadmap.milestones.flatMap((m) => m.features);
8068
+ const changes = [];
8069
+ for (const feature of allFeatures) {
8070
+ if (skipOverride) continue;
8071
+ const inferred = inferStatus(feature, projectPath, allFeatures);
8072
+ if (inferred === null) continue;
8073
+ if (inferred === feature.status) continue;
8074
+ changes.push({
8075
+ feature: feature.name,
8076
+ from: feature.status,
8077
+ to: inferred
8078
+ });
8079
+ }
8080
+ return Ok(changes);
8081
+ }
8082
+ var InteractionTypeSchema = z5.enum(["question", "confirmation", "transition"]);
8083
+ var QuestionSchema = z5.object({
8084
+ text: z5.string(),
8085
+ options: z5.array(z5.string()).optional(),
8086
+ default: z5.string().optional()
8087
+ });
8088
+ var ConfirmationSchema = z5.object({
8089
+ text: z5.string(),
8090
+ context: z5.string()
8091
+ });
8092
+ var TransitionSchema = z5.object({
8093
+ completedPhase: z5.string(),
8094
+ suggestedNext: z5.string(),
8095
+ reason: z5.string(),
8096
+ artifacts: z5.array(z5.string()),
8097
+ requiresConfirmation: z5.boolean(),
8098
+ summary: z5.string()
8099
+ });
8100
+ var EmitInteractionInputSchema = z5.object({
8101
+ path: z5.string(),
8102
+ type: InteractionTypeSchema,
8103
+ stream: z5.string().optional(),
8104
+ question: QuestionSchema.optional(),
8105
+ confirmation: ConfirmationSchema.optional(),
8106
+ transition: TransitionSchema.optional()
8107
+ });
8108
+ function getStatePath() {
8109
+ return path10.join(os.homedir(), ".harness", "update-check.json");
8110
+ }
8111
+ function isUpdateCheckEnabled(configInterval) {
8112
+ if (process.env["HARNESS_NO_UPDATE_CHECK"] === "1") return false;
8113
+ if (configInterval === 0) return false;
8114
+ return true;
8115
+ }
8116
+ function shouldRunCheck(state, intervalMs) {
8117
+ if (state === null) return true;
8118
+ return state.lastCheckTime + intervalMs <= Date.now();
8119
+ }
8120
+ function readCheckState() {
8121
+ try {
8122
+ const raw = fs8.readFileSync(getStatePath(), "utf-8");
8123
+ const parsed = JSON.parse(raw);
8124
+ if (typeof parsed === "object" && parsed !== null && "lastCheckTime" in parsed && typeof parsed.lastCheckTime === "number" && "currentVersion" in parsed && typeof parsed.currentVersion === "string") {
8125
+ const state = parsed;
8126
+ return {
8127
+ lastCheckTime: state.lastCheckTime,
8128
+ latestVersion: typeof state.latestVersion === "string" ? state.latestVersion : null,
8129
+ currentVersion: state.currentVersion
8130
+ };
8131
+ }
8132
+ return null;
8133
+ } catch {
8134
+ return null;
8135
+ }
8136
+ }
8137
+ function spawnBackgroundCheck(currentVersion) {
8138
+ const statePath = getStatePath();
8139
+ const stateDir = path10.dirname(statePath);
8140
+ const script = `
8141
+ const { execSync } = require('child_process');
8142
+ const fs = require('fs');
8143
+ const path = require('path');
8144
+ const crypto = require('crypto');
8145
+ try {
8146
+ const latest = execSync('npm view @harness-engineering/cli dist-tags.latest', {
8147
+ encoding: 'utf-8',
8148
+ timeout: 15000,
8149
+ stdio: ['ignore', 'pipe', 'ignore'],
8150
+ }).trim();
8151
+ const stateDir = ${JSON.stringify(stateDir)};
8152
+ const statePath = ${JSON.stringify(statePath)};
8153
+ fs.mkdirSync(stateDir, { recursive: true });
8154
+ const tmpFile = path.join(stateDir, '.update-check-' + crypto.randomBytes(4).toString('hex') + '.tmp');
8155
+ fs.writeFileSync(tmpFile, JSON.stringify({
8156
+ lastCheckTime: Date.now(),
8157
+ latestVersion: latest || null,
8158
+ currentVersion: ${JSON.stringify(currentVersion)},
8159
+ }), { mode: 0o644 });
8160
+ fs.renameSync(tmpFile, statePath);
8161
+ } catch (_) {}
8162
+ `.trim();
8163
+ try {
8164
+ const child = spawn(process.execPath, ["-e", script], {
8165
+ detached: true,
8166
+ stdio: "ignore"
8167
+ });
8168
+ child.unref();
8169
+ } catch {
8170
+ }
8171
+ }
8172
+ function compareVersions(a, b) {
8173
+ const pa = a.split(".").map(Number);
8174
+ const pb = b.split(".").map(Number);
8175
+ for (let i = 0; i < 3; i++) {
8176
+ const na = pa[i] ?? 0;
8177
+ const nb = pb[i] ?? 0;
8178
+ if (na > nb) return 1;
8179
+ if (na < nb) return -1;
8180
+ }
8181
+ return 0;
8182
+ }
8183
+ function getUpdateNotification(currentVersion) {
8184
+ const state = readCheckState();
8185
+ if (!state) return null;
8186
+ if (!state.latestVersion) return null;
8187
+ if (compareVersions(state.latestVersion, currentVersion) <= 0) return null;
8188
+ return `Update available: v${currentVersion} -> v${state.latestVersion}
8189
+ Run "harness update" to upgrade.`;
8190
+ }
6028
8191
  var VERSION = "0.8.0";
6029
8192
 
6030
8193
  export {
@@ -6067,9 +8230,15 @@ export {
6067
8230
  detectSizeBudgetViolations,
6068
8231
  generateSuggestions,
6069
8232
  EntropyAnalyzer,
8233
+ createCommentedCodeFixes,
8234
+ createOrphanedDepFixes,
6070
8235
  createFixes,
6071
8236
  previewFix,
6072
8237
  applyFixes,
8238
+ createForbiddenImportFixes,
8239
+ classifyFinding,
8240
+ applyHotspotDowngrade,
8241
+ deduplicateCleanupFindings,
6073
8242
  PatternConfigSchema,
6074
8243
  EntropyConfigSchema,
6075
8244
  validatePatternConfig,
@@ -6146,5 +8315,46 @@ export {
6146
8315
  goRules,
6147
8316
  SecurityScanner,
6148
8317
  runCIChecks,
8318
+ runMechanicalChecks,
8319
+ ExclusionSet,
8320
+ buildExclusionSet,
8321
+ detectChangeType,
8322
+ scopeContext,
8323
+ COMPLIANCE_DESCRIPTOR,
8324
+ runComplianceAgent,
8325
+ BUG_DETECTION_DESCRIPTOR,
8326
+ runBugDetectionAgent,
8327
+ SECURITY_DESCRIPTOR,
8328
+ runSecurityAgent,
8329
+ ARCHITECTURE_DESCRIPTOR,
8330
+ runArchitectureAgent,
8331
+ AGENT_DESCRIPTORS,
8332
+ fanOutReview,
8333
+ validateFindings,
8334
+ deduplicateFindings,
8335
+ checkEligibility,
8336
+ DEFAULT_PROVIDER_TIERS,
8337
+ resolveModelTier,
8338
+ determineAssessment,
8339
+ getExitCode,
8340
+ formatFindingBlock,
8341
+ formatTerminalOutput,
8342
+ isSmallSuggestion,
8343
+ formatGitHubComment,
8344
+ formatGitHubSummary,
8345
+ runReviewPipeline,
8346
+ parseRoadmap,
8347
+ serializeRoadmap,
8348
+ syncRoadmap,
8349
+ InteractionTypeSchema,
8350
+ QuestionSchema,
8351
+ ConfirmationSchema,
8352
+ TransitionSchema,
8353
+ EmitInteractionInputSchema,
8354
+ isUpdateCheckEnabled,
8355
+ shouldRunCheck,
8356
+ readCheckState,
8357
+ spawnBackgroundCheck,
8358
+ getUpdateNotification,
6149
8359
  VERSION
6150
8360
  };