codeharness 0.17.7 → 0.18.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -19,8 +19,8 @@ import {
19
19
  import { Command } from "commander";
20
20
 
21
21
  // src/commands/init.ts
22
- import { existsSync as existsSync6, readFileSync as readFileSync6 } from "fs";
23
- import { join as join6, basename as basename2 } from "path";
22
+ import { existsSync as existsSync7, readFileSync as readFileSync7 } from "fs";
23
+ import { join as join7, basename as basename2 } from "path";
24
24
 
25
25
  // src/lib/output.ts
26
26
  function ok(message, options) {
@@ -865,8 +865,8 @@ function configureHookCoexistence(dir) {
865
865
 
866
866
  // src/lib/bmad.ts
867
867
  import { execFileSync as execFileSync4 } from "child_process";
868
- import { existsSync as existsSync5, readFileSync as readFileSync5 } from "fs";
869
- import { join as join5 } from "path";
868
+ import { existsSync as existsSync6, readFileSync as readFileSync6 } from "fs";
869
+ import { join as join6 } from "path";
870
870
 
871
871
  // src/lib/patch-engine.ts
872
872
  import { readFileSync as readFileSync4, writeFileSync as writeFileSync4 } from "fs";
@@ -935,133 +935,81 @@ function removePatch(filePath, patchName) {
935
935
  }
936
936
 
937
937
  // src/templates/bmad-patches.ts
938
+ import { readFileSync as readFileSync5, existsSync as existsSync5 } from "fs";
939
+ import { join as join5, dirname as dirname2 } from "path";
940
+ import { fileURLToPath } from "url";
941
+ var __dirname = dirname2(fileURLToPath(import.meta.url));
942
+ function readPatchFile(name) {
943
+ const projectRoot = join5(__dirname, "..", "..");
944
+ const projectPath = join5(projectRoot, "patches", `${name}.md`);
945
+ if (existsSync5(projectPath)) {
946
+ return readFileSync5(projectPath, "utf-8").trim();
947
+ }
948
+ const pkgPath = join5(__dirname, "..", "..", "patches", `${name}.md`);
949
+ if (existsSync5(pkgPath)) {
950
+ return readFileSync5(pkgPath, "utf-8").trim();
951
+ }
952
+ return null;
953
+ }
938
954
  function storyVerificationPatch() {
939
- return `## Verification Requirements
955
+ return readPatchFile("story-verification") ?? `## Verification Requirements
940
956
 
941
- - [ ] Showboat proof document created (\`docs/exec-plans/active/<story-key>.proof.md\`)
942
- - [ ] All acceptance criteria verified with real-world evidence
957
+ - [ ] Showboat proof document created (verification/<story-key>-proof.md)
958
+ - [ ] All acceptance criteria verified with real-world evidence via docker exec
943
959
  - [ ] Test coverage meets target (100%)
944
960
 
945
- ### Verification Tags
946
-
947
- For each AC, append a verification tag to indicate how it can be verified:
948
- - \`<!-- verification: cli-verifiable -->\` \u2014 AC can be verified by running CLI commands in a subprocess
949
- - \`<!-- verification: integration-required -->\` \u2014 AC requires integration testing, multi-system interaction, or manual verification
950
-
951
- ACs referencing workflows, sprint planning, user sessions, or external system interactions should be tagged as \`integration-required\`. If no tag is present, a heuristic classifier will attempt to determine verifiability at runtime.
952
-
953
961
  ## Documentation Requirements
954
962
 
955
- - [ ] Relevant AGENTS.md files updated (list modules touched)
956
- - [ ] Exec-plan created in \`docs/exec-plans/active/<story-key>.md\`
963
+ - [ ] Relevant AGENTS.md files updated
964
+ - [ ] Exec-plan created in docs/exec-plans/active/<story-key>.md
957
965
 
958
966
  ## Testing Requirements
959
967
 
960
968
  - [ ] Unit tests written for all new/changed code
961
- - [ ] Integration tests for cross-module interactions
962
969
  - [ ] Coverage target: 100%`;
963
970
  }
964
971
  function devEnforcementPatch() {
965
- return `## Codeharness Enforcement
972
+ return readPatchFile("dev-enforcement") ?? `## Codeharness Enforcement
966
973
 
967
974
  ### Observability Check
968
975
  - [ ] Query VictoriaLogs after test runs to verify telemetry flows
969
- - [ ] Confirm logs, metrics, and traces are being collected
970
976
 
971
977
  ### Documentation Update
972
978
  - [ ] AGENTS.md updated for all changed modules
973
- - [ ] Exec-plan reflects current implementation state
974
979
 
975
980
  ### Test Enforcement
976
- - [ ] All tests pass (\`npm test\` / \`pytest\`)
977
- - [ ] Coverage gate: 100% of new/changed code
978
- - [ ] No skipped or pending tests without justification`;
981
+ - [ ] All tests pass
982
+ - [ ] Coverage gate: 100% of new/changed code`;
979
983
  }
980
984
  function reviewEnforcementPatch() {
981
- return `## Codeharness Review Gates
985
+ return readPatchFile("review-enforcement") ?? `## Codeharness Review Gates
982
986
 
983
987
  ### Verification
984
- - [ ] Showboat proof document exists and passes \`showboat verify\`
988
+ - [ ] Proof document exists and passes codeharness verify
985
989
  - [ ] All acceptance criteria have evidence in proof document
986
990
 
987
- ### Documentation Freshness
988
- - [ ] AGENTS.md is current for all changed modules
989
- - [ ] No stale references to removed or renamed modules
990
-
991
991
  ### Coverage
992
- - [ ] Coverage delta reported (before vs after)
993
- - [ ] No coverage regression in changed files
994
- - [ ] Overall coverage meets project target`;
992
+ - [ ] No coverage regression in changed files`;
995
993
  }
996
994
  function retroEnforcementPatch() {
997
- return `## Codeharness Quality Metrics
995
+ return readPatchFile("retro-enforcement") ?? `## Codeharness Quality Metrics
998
996
 
999
997
  ### Verification Effectiveness
1000
998
  - [ ] How many ACs were caught by verification vs manual review?
1001
- - [ ] Were there any false positives in Showboat proofs?
1002
- - [ ] Time spent on verification vs value delivered
1003
-
1004
- ### Documentation Health
1005
- - [ ] AGENTS.md accuracy grade (A/B/C/D/F)
1006
- - [ ] Exec-plans completeness \u2014 are all active stories documented?
1007
- - [ ] Stale documentation identified and cleaned up
999
+ - [ ] Were there any false positives in proofs?
1008
1000
 
1009
1001
  ### Test Quality
1010
- - [ ] Coverage trend (improving, stable, declining)
1011
- - [ ] Test reliability \u2014 any flaky tests introduced?
1012
- - [ ] Integration test coverage for cross-module interactions`;
1002
+ - [ ] Coverage trend (improving, stable, declining)`;
1013
1003
  }
1014
1004
  function sprintBeadsPatch() {
1015
- return `## Codeharness Backlog Integration
1005
+ return readPatchFile("sprint-planning") ?? `## Codeharness Sprint Planning
1016
1006
 
1017
- ### Pre-Triage Import Verification
1018
- - [ ] Confirm \`codeharness retro-import\` was run for all completed retrospectives
1019
- - [ ] Confirm \`codeharness github-import\` was run to pull labeled GitHub issues
1020
- - [ ] Verify all sources are reflected in beads before starting triage
1021
-
1022
- ### Beads Issue Status
1023
- - [ ] Run \`bd ready\` to display issues ready for development
1024
- - [ ] Review beads issue counts by status (open, in-progress, done)
1025
- - [ ] Verify issues from all sources are visible: retro (\`[gap:retro:...]\`), GitHub (\`[source:github:...]\`), and manual
1026
- - [ ] Verify no blocked issues without documented reason
1027
-
1028
- ### Sprint Readiness
1029
- - [ ] All selected stories have corresponding beads issues
1030
- - [ ] Dependencies between stories are reflected in beads deps
1031
- - [ ] Capacity aligns with estimated story complexity`;
1007
+ - [ ] Review unresolved retrospective action items
1008
+ - [ ] Import from all backlog sources before triage
1009
+ - [ ] Verify story ACs are testable via CLI + Docker`;
1032
1010
  }
1033
1011
  function sprintPlanningRetroPatch() {
1034
- return `## Retrospective Action Items Review
1035
-
1036
- ### Unresolved Action Items from Previous Retrospectives
1037
-
1038
- Before starting sprint planning, review all completed retrospectives for unresolved action items:
1039
-
1040
- 1. **Scan for retrospective files:** Look for all \`epic-N-retrospective.md\` files in \`_bmad-output/implementation-artifacts/\`
1041
- 2. **Import retro findings to beads:** For each retrospective not yet imported, run \`codeharness retro-import --epic N\` to classify findings and create beads issues with \`[gap:retro:epic-N-item-M]\` gap-ids
1042
- 3. **Import GitHub issues to beads:** Run \`codeharness github-import\` to pull labeled issues into beads with \`[source:github:owner/repo#N]\` gap-ids
1043
- 4. **Display combined backlog:** Run \`bd ready\` to present the unified backlog containing retro findings, GitHub issues, and manually created issues
1044
- 5. **Identify unresolved items:** Filter for action items that are NOT marked as completed/done
1045
- 6. **Surface during planning:** Present unresolved items to the team before selecting stories for the sprint
1046
-
1047
- ### Source-Aware Backlog Presentation
1048
-
1049
- When presenting the backlog during triage, issues should be identifiable by source:
1050
-
1051
- - **Retro findings** have gap-ids matching \`[gap:retro:...]\` \u2014 originated from retrospective action items
1052
- - **GitHub issues** have gap-ids matching \`[source:github:...]\` \u2014 imported from GitHub via label query
1053
- - **Manual issues** have no gap-id prefix \u2014 created directly in beads
1054
-
1055
- ### Integration with Sprint Planning
1056
-
1057
- - [ ] All \`epic-N-retrospective.md\` files scanned for action items
1058
- - [ ] \`codeharness retro-import --epic N\` run for each unimported retrospective
1059
- - [ ] \`codeharness github-import\` run to pull labeled GitHub issues
1060
- - [ ] \`bd ready\` run to display combined backlog from all sources
1061
- - [ ] Unresolved action items listed and reviewed
1062
- - [ ] Relevant action items incorporated into sprint goals or new stories
1063
- - [ ] Recurring issues from multiple retros flagged for systemic fixes
1064
- - [ ] All sources (retro, GitHub, manual) triaged uniformly \u2014 no source left unreviewed`;
1012
+ return sprintBeadsPatch();
1065
1013
  }
1066
1014
  var PATCH_TEMPLATES = {
1067
1015
  "story-verification": storyVerificationPatch,
@@ -1090,15 +1038,15 @@ var PATCH_TARGETS = {
1090
1038
  "sprint-retro": "bmm/workflows/4-implementation/sprint-planning/instructions.md"
1091
1039
  };
1092
1040
  function isBmadInstalled(dir) {
1093
- const bmadDir = join5(dir ?? process.cwd(), "_bmad");
1094
- return existsSync5(bmadDir);
1041
+ const bmadDir = join6(dir ?? process.cwd(), "_bmad");
1042
+ return existsSync6(bmadDir);
1095
1043
  }
1096
1044
  function detectBmadVersion(dir) {
1097
1045
  const root = dir ?? process.cwd();
1098
- const moduleYamlPath = join5(root, "_bmad", "core", "module.yaml");
1099
- if (existsSync5(moduleYamlPath)) {
1046
+ const moduleYamlPath = join6(root, "_bmad", "core", "module.yaml");
1047
+ if (existsSync6(moduleYamlPath)) {
1100
1048
  try {
1101
- const content = readFileSync5(moduleYamlPath, "utf-8");
1049
+ const content = readFileSync6(moduleYamlPath, "utf-8");
1102
1050
  const versionMatch = content.match(/version:\s*["']?([^\s"']+)["']?/);
1103
1051
  if (versionMatch) {
1104
1052
  return versionMatch[1];
@@ -1106,17 +1054,17 @@ function detectBmadVersion(dir) {
1106
1054
  } catch {
1107
1055
  }
1108
1056
  }
1109
- const versionFilePath = join5(root, "_bmad", "VERSION");
1110
- if (existsSync5(versionFilePath)) {
1057
+ const versionFilePath = join6(root, "_bmad", "VERSION");
1058
+ if (existsSync6(versionFilePath)) {
1111
1059
  try {
1112
- return readFileSync5(versionFilePath, "utf-8").trim() || null;
1060
+ return readFileSync6(versionFilePath, "utf-8").trim() || null;
1113
1061
  } catch {
1114
1062
  }
1115
1063
  }
1116
- const packageJsonPath = join5(root, "_bmad", "package.json");
1117
- if (existsSync5(packageJsonPath)) {
1064
+ const packageJsonPath = join6(root, "_bmad", "package.json");
1065
+ if (existsSync6(packageJsonPath)) {
1118
1066
  try {
1119
- const pkg = JSON.parse(readFileSync5(packageJsonPath, "utf-8"));
1067
+ const pkg = JSON.parse(readFileSync6(packageJsonPath, "utf-8"));
1120
1068
  return pkg.version ?? null;
1121
1069
  } catch {
1122
1070
  }
@@ -1133,11 +1081,12 @@ function installBmad(dir) {
1133
1081
  patches_applied: []
1134
1082
  };
1135
1083
  }
1136
- const cmdStr = "npx bmad-method install";
1084
+ const cmdStr = "npx bmad-method install --yes --tools claude-code";
1137
1085
  try {
1138
- execFileSync4("npx", ["bmad-method", "install"], {
1086
+ execFileSync4("npx", ["bmad-method", "install", "--yes", "--tools", "claude-code"], {
1139
1087
  stdio: "pipe",
1140
- timeout: 6e4,
1088
+ timeout: 12e4,
1089
+ // 2 min — npx may need to download the package first time
1141
1090
  cwd: root
1142
1091
  });
1143
1092
  } catch (err) {
@@ -1158,8 +1107,8 @@ function applyAllPatches(dir) {
1158
1107
  const root = dir ?? process.cwd();
1159
1108
  const results = [];
1160
1109
  for (const [patchName, relativePath] of Object.entries(PATCH_TARGETS)) {
1161
- const targetFile = join5(root, "_bmad", relativePath);
1162
- if (!existsSync5(targetFile)) {
1110
+ const targetFile = join6(root, "_bmad", relativePath);
1111
+ if (!existsSync6(targetFile)) {
1163
1112
  warn(`Patch target not found: ${relativePath}`);
1164
1113
  results.push({
1165
1114
  patchName,
@@ -1206,12 +1155,12 @@ function applyAllPatches(dir) {
1206
1155
  function detectBmalph(dir) {
1207
1156
  const root = dir ?? process.cwd();
1208
1157
  const files = [];
1209
- const ralphRcPath = join5(root, ".ralph", ".ralphrc");
1210
- if (existsSync5(ralphRcPath)) {
1158
+ const ralphRcPath = join6(root, ".ralph", ".ralphrc");
1159
+ if (existsSync6(ralphRcPath)) {
1211
1160
  files.push(".ralph/.ralphrc");
1212
1161
  }
1213
- const dotRalphDir = join5(root, ".ralph");
1214
- if (existsSync5(dotRalphDir)) {
1162
+ const dotRalphDir = join6(root, ".ralph");
1163
+ if (existsSync6(dotRalphDir)) {
1215
1164
  if (files.length === 0) {
1216
1165
  files.push(".ralph/");
1217
1166
  }
@@ -1226,10 +1175,10 @@ function getStoryFilePath(storyKey) {
1226
1175
  return `_bmad-output/implementation-artifacts/${storyKey}.md`;
1227
1176
  }
1228
1177
  function parseEpicsFile(filePath) {
1229
- if (!existsSync5(filePath)) {
1178
+ if (!existsSync6(filePath)) {
1230
1179
  return [];
1231
1180
  }
1232
- const content = readFileSync5(filePath, "utf-8");
1181
+ const content = readFileSync6(filePath, "utf-8");
1233
1182
  if (!content.trim()) {
1234
1183
  return [];
1235
1184
  }
@@ -1443,12 +1392,12 @@ function getInstallCommand(stack) {
1443
1392
  }
1444
1393
 
1445
1394
  // src/commands/init.ts
1446
- var HARNESS_VERSION = true ? "0.17.7" : "0.0.0-dev";
1395
+ var HARNESS_VERSION = true ? "0.18.1" : "0.0.0-dev";
1447
1396
  function getProjectName(projectDir) {
1448
1397
  try {
1449
- const pkgPath = join6(projectDir, "package.json");
1450
- if (existsSync6(pkgPath)) {
1451
- const pkg = JSON.parse(readFileSync6(pkgPath, "utf-8"));
1398
+ const pkgPath = join7(projectDir, "package.json");
1399
+ if (existsSync7(pkgPath)) {
1400
+ const pkg = JSON.parse(readFileSync7(pkgPath, "utf-8"));
1452
1401
  if (pkg.name && typeof pkg.name === "string") {
1453
1402
  return pkg.name;
1454
1403
  }
@@ -1558,7 +1507,7 @@ function registerInitCommand(program) {
1558
1507
  }
1559
1508
  };
1560
1509
  const statePath = getStatePath(projectDir);
1561
- if (existsSync6(statePath)) {
1510
+ if (existsSync7(statePath)) {
1562
1511
  try {
1563
1512
  const existingState = readState(projectDir);
1564
1513
  const legacyObsDisabled = existingState.enforcement.observability === false;
@@ -1820,27 +1769,27 @@ function registerInitCommand(program) {
1820
1769
  if (!isJson) {
1821
1770
  ok("State file: .claude/codeharness.local.md created");
1822
1771
  }
1823
- const agentsMdPath = join6(projectDir, "AGENTS.md");
1824
- if (!existsSync6(agentsMdPath)) {
1772
+ const agentsMdPath = join7(projectDir, "AGENTS.md");
1773
+ if (!existsSync7(agentsMdPath)) {
1825
1774
  const agentsMdContent = generateAgentsMdContent(projectDir, stack);
1826
1775
  generateFile(agentsMdPath, agentsMdContent);
1827
1776
  result.documentation.agents_md = "created";
1828
1777
  } else {
1829
1778
  result.documentation.agents_md = "exists";
1830
1779
  }
1831
- const docsDir = join6(projectDir, "docs");
1832
- if (!existsSync6(docsDir)) {
1833
- generateFile(join6(docsDir, "index.md"), generateDocsIndexContent());
1834
- generateFile(join6(docsDir, "exec-plans", "active", ".gitkeep"), "");
1835
- generateFile(join6(docsDir, "exec-plans", "completed", ".gitkeep"), "");
1836
- generateFile(join6(docsDir, "quality", ".gitkeep"), DO_NOT_EDIT_HEADER);
1837
- generateFile(join6(docsDir, "generated", ".gitkeep"), DO_NOT_EDIT_HEADER);
1780
+ const docsDir = join7(projectDir, "docs");
1781
+ if (!existsSync7(docsDir)) {
1782
+ generateFile(join7(docsDir, "index.md"), generateDocsIndexContent());
1783
+ generateFile(join7(docsDir, "exec-plans", "active", ".gitkeep"), "");
1784
+ generateFile(join7(docsDir, "exec-plans", "completed", ".gitkeep"), "");
1785
+ generateFile(join7(docsDir, "quality", ".gitkeep"), DO_NOT_EDIT_HEADER);
1786
+ generateFile(join7(docsDir, "generated", ".gitkeep"), DO_NOT_EDIT_HEADER);
1838
1787
  result.documentation.docs_scaffold = "created";
1839
1788
  } else {
1840
1789
  result.documentation.docs_scaffold = "exists";
1841
1790
  }
1842
- const readmePath = join6(projectDir, "README.md");
1843
- if (!existsSync6(readmePath)) {
1791
+ const readmePath = join7(projectDir, "README.md");
1792
+ if (!existsSync7(readmePath)) {
1844
1793
  let cliHelpOutput = "";
1845
1794
  try {
1846
1795
  const { execFileSync: execFileSync8 } = await import("child_process");
@@ -2047,7 +1996,7 @@ function registerInitCommand(program) {
2047
1996
  }
2048
1997
 
2049
1998
  // src/commands/bridge.ts
2050
- import { existsSync as existsSync7 } from "fs";
1999
+ import { existsSync as existsSync8 } from "fs";
2051
2000
  function registerBridgeCommand(program) {
2052
2001
  program.command("bridge").description("Bridge BMAD epics/stories into beads task store").option("--epics <path>", "Path to BMAD epics markdown file").option("--dry-run", "Parse and display without creating beads issues").action((opts, cmd) => {
2053
2002
  const globalOpts = cmd.optsWithGlobals();
@@ -2059,7 +2008,7 @@ function registerBridgeCommand(program) {
2059
2008
  process.exitCode = 2;
2060
2009
  return;
2061
2010
  }
2062
- if (!existsSync7(epicsPath)) {
2011
+ if (!existsSync8(epicsPath)) {
2063
2012
  fail(`Epics file not found: ${epicsPath}`, { json: isJson });
2064
2013
  process.exitCode = 1;
2065
2014
  return;
@@ -2132,13 +2081,13 @@ function registerBridgeCommand(program) {
2132
2081
 
2133
2082
  // src/commands/run.ts
2134
2083
  import { spawn } from "child_process";
2135
- import { existsSync as existsSync9, mkdirSync as mkdirSync3, readFileSync as readFileSync8, writeFileSync as writeFileSync6 } from "fs";
2136
- import { join as join8, dirname as dirname2 } from "path";
2137
- import { fileURLToPath } from "url";
2084
+ import { existsSync as existsSync10, mkdirSync as mkdirSync3, readFileSync as readFileSync9, writeFileSync as writeFileSync6 } from "fs";
2085
+ import { join as join9, dirname as dirname3 } from "path";
2086
+ import { fileURLToPath as fileURLToPath2 } from "url";
2138
2087
 
2139
2088
  // src/lib/beads-sync.ts
2140
- import { existsSync as existsSync8, readFileSync as readFileSync7, writeFileSync as writeFileSync5 } from "fs";
2141
- import { join as join7 } from "path";
2089
+ import { existsSync as existsSync9, readFileSync as readFileSync8, writeFileSync as writeFileSync5 } from "fs";
2090
+ import { join as join8 } from "path";
2142
2091
  import { parse as parse2 } from "yaml";
2143
2092
  var BEADS_TO_STORY_STATUS = {
2144
2093
  open: "in-progress",
@@ -2169,10 +2118,10 @@ function resolveStoryFilePath(beadsIssue) {
2169
2118
  return trimmed;
2170
2119
  }
2171
2120
  function readStoryFileStatus(filePath) {
2172
- if (!existsSync8(filePath)) {
2121
+ if (!existsSync9(filePath)) {
2173
2122
  return null;
2174
2123
  }
2175
- const content = readFileSync7(filePath, "utf-8");
2124
+ const content = readFileSync8(filePath, "utf-8");
2176
2125
  const match = content.match(/^Status:\s*(.+)$/m);
2177
2126
  if (!match) {
2178
2127
  return null;
@@ -2180,7 +2129,7 @@ function readStoryFileStatus(filePath) {
2180
2129
  return match[1].trim();
2181
2130
  }
2182
2131
  function updateStoryFileStatus(filePath, newStatus) {
2183
- const content = readFileSync7(filePath, "utf-8");
2132
+ const content = readFileSync8(filePath, "utf-8");
2184
2133
  const statusRegex = /^Status:\s*.+$/m;
2185
2134
  if (statusRegex.test(content)) {
2186
2135
  const updated = content.replace(statusRegex, `Status: ${newStatus}`);
@@ -2199,12 +2148,12 @@ function updateStoryFileStatus(filePath, newStatus) {
2199
2148
  var SPRINT_STATUS_PATH = "_bmad-output/implementation-artifacts/sprint-status.yaml";
2200
2149
  function readSprintStatus(dir) {
2201
2150
  const root = dir ?? process.cwd();
2202
- const filePath = join7(root, SPRINT_STATUS_PATH);
2203
- if (!existsSync8(filePath)) {
2151
+ const filePath = join8(root, SPRINT_STATUS_PATH);
2152
+ if (!existsSync9(filePath)) {
2204
2153
  return {};
2205
2154
  }
2206
2155
  try {
2207
- const content = readFileSync7(filePath, "utf-8");
2156
+ const content = readFileSync8(filePath, "utf-8");
2208
2157
  const parsed = parse2(content);
2209
2158
  if (!parsed || typeof parsed !== "object") {
2210
2159
  return {};
@@ -2220,12 +2169,12 @@ function readSprintStatus(dir) {
2220
2169
  }
2221
2170
  function updateSprintStatus(storyKey, newStatus, dir) {
2222
2171
  const root = dir ?? process.cwd();
2223
- const filePath = join7(root, SPRINT_STATUS_PATH);
2224
- if (!existsSync8(filePath)) {
2172
+ const filePath = join8(root, SPRINT_STATUS_PATH);
2173
+ if (!existsSync9(filePath)) {
2225
2174
  warn(`sprint-status.yaml not found at ${filePath}, skipping update`);
2226
2175
  return;
2227
2176
  }
2228
- const content = readFileSync7(filePath, "utf-8");
2177
+ const content = readFileSync8(filePath, "utf-8");
2229
2178
  const keyPattern = new RegExp(`^(\\s*${escapeRegExp(storyKey)}:\\s*)\\S+(.*)$`, "m");
2230
2179
  if (!keyPattern.test(content)) {
2231
2180
  return;
@@ -2252,8 +2201,8 @@ function slugify(title) {
2252
2201
  }
2253
2202
  function appendOnboardingEpicToSprint(stories, dir) {
2254
2203
  const root = dir ?? process.cwd();
2255
- const filePath = join7(root, SPRINT_STATUS_PATH);
2256
- if (!existsSync8(filePath)) {
2204
+ const filePath = join8(root, SPRINT_STATUS_PATH);
2205
+ if (!existsSync9(filePath)) {
2257
2206
  warn(`sprint-status.yaml not found at ${filePath}, cannot append onboarding epic`);
2258
2207
  return { epicNumber: -1, storyKeys: [] };
2259
2208
  }
@@ -2270,7 +2219,7 @@ function appendOnboardingEpicToSprint(stories, dir) {
2270
2219
  }
2271
2220
  lines.push(` epic-${epicNum}-retrospective: optional`);
2272
2221
  lines.push("");
2273
- const content = readFileSync7(filePath, "utf-8");
2222
+ const content = readFileSync8(filePath, "utf-8");
2274
2223
  const updated = content.trimEnd() + "\n" + lines.join("\n");
2275
2224
  writeFileSync5(filePath, updated, "utf-8");
2276
2225
  return { epicNumber: epicNum, storyKeys };
@@ -2305,7 +2254,7 @@ function syncBeadsToStoryFile(beadsId, beadsFns, dir) {
2305
2254
  };
2306
2255
  }
2307
2256
  const storyKey = storyKeyFromPath(storyFilePath);
2308
- const fullPath = join7(root, storyFilePath);
2257
+ const fullPath = join8(root, storyFilePath);
2309
2258
  const currentStoryStatus = readStoryFileStatus(fullPath);
2310
2259
  if (currentStoryStatus === null) {
2311
2260
  return {
@@ -2350,7 +2299,7 @@ function syncBeadsToStoryFile(beadsId, beadsFns, dir) {
2350
2299
  function syncStoryFileToBeads(storyKey, beadsFns, dir) {
2351
2300
  const root = dir ?? process.cwd();
2352
2301
  const storyFilePath = `_bmad-output/implementation-artifacts/${storyKey}.md`;
2353
- const fullPath = join7(root, storyFilePath);
2302
+ const fullPath = join8(root, storyFilePath);
2354
2303
  const currentStoryStatus = readStoryFileStatus(fullPath);
2355
2304
  if (currentStoryStatus === null) {
2356
2305
  return {
@@ -2439,10 +2388,10 @@ function syncClose(beadsId, beadsFns, dir) {
2439
2388
  };
2440
2389
  }
2441
2390
  const storyKey = storyKeyFromPath(storyFilePath);
2442
- const fullPath = join7(root, storyFilePath);
2391
+ const fullPath = join8(root, storyFilePath);
2443
2392
  const previousStatus = readStoryFileStatus(fullPath);
2444
2393
  if (previousStatus === null) {
2445
- if (!existsSync8(fullPath)) {
2394
+ if (!existsSync9(fullPath)) {
2446
2395
  return {
2447
2396
  storyKey,
2448
2397
  beadsId,
@@ -2587,16 +2536,16 @@ function generateRalphPrompt(config) {
2587
2536
  var SPRINT_STATUS_REL = "_bmad-output/implementation-artifacts/sprint-status.yaml";
2588
2537
  var STORY_KEY_PATTERN = /^\d+-\d+-/;
2589
2538
  function resolveRalphPath() {
2590
- const currentFile = fileURLToPath(import.meta.url);
2591
- const currentDir = dirname2(currentFile);
2592
- let root = dirname2(currentDir);
2539
+ const currentFile = fileURLToPath2(import.meta.url);
2540
+ const currentDir = dirname3(currentFile);
2541
+ let root = dirname3(currentDir);
2593
2542
  if (root.endsWith("/src") || root.endsWith("\\src")) {
2594
- root = dirname2(root);
2543
+ root = dirname3(root);
2595
2544
  }
2596
- return join8(root, "ralph", "ralph.sh");
2545
+ return join9(root, "ralph", "ralph.sh");
2597
2546
  }
2598
2547
  function resolvePluginDir() {
2599
- return join8(process.cwd(), ".claude");
2548
+ return join9(process.cwd(), ".claude");
2600
2549
  }
2601
2550
  function countStories(statuses) {
2602
2551
  let total = 0;
@@ -2647,19 +2596,19 @@ function registerRunCommand(program) {
2647
2596
  const isJson = !!globalOpts.json;
2648
2597
  const outputOpts = { json: isJson };
2649
2598
  const ralphPath = resolveRalphPath();
2650
- if (!existsSync9(ralphPath)) {
2599
+ if (!existsSync10(ralphPath)) {
2651
2600
  fail("Ralph loop not found \u2014 reinstall codeharness", outputOpts);
2652
2601
  process.exitCode = 1;
2653
2602
  return;
2654
2603
  }
2655
2604
  const pluginDir = resolvePluginDir();
2656
- if (!existsSync9(pluginDir)) {
2605
+ if (!existsSync10(pluginDir)) {
2657
2606
  fail("Plugin directory not found \u2014 run codeharness init first", outputOpts);
2658
2607
  process.exitCode = 1;
2659
2608
  return;
2660
2609
  }
2661
2610
  const projectDir = process.cwd();
2662
- const sprintStatusPath = join8(projectDir, SPRINT_STATUS_REL);
2611
+ const sprintStatusPath = join9(projectDir, SPRINT_STATUS_REL);
2663
2612
  const statuses = readSprintStatus(projectDir);
2664
2613
  const counts = countStories(statuses);
2665
2614
  if (counts.total === 0) {
@@ -2678,12 +2627,12 @@ function registerRunCommand(program) {
2678
2627
  process.exitCode = 1;
2679
2628
  return;
2680
2629
  }
2681
- const promptFile = join8(projectDir, "ralph", ".harness-prompt.md");
2682
- const flaggedFilePath = join8(projectDir, "ralph", ".flagged_stories");
2630
+ const promptFile = join9(projectDir, "ralph", ".harness-prompt.md");
2631
+ const flaggedFilePath = join9(projectDir, "ralph", ".flagged_stories");
2683
2632
  let flaggedStories;
2684
- if (existsSync9(flaggedFilePath)) {
2633
+ if (existsSync10(flaggedFilePath)) {
2685
2634
  try {
2686
- const flaggedContent = readFileSync8(flaggedFilePath, "utf-8");
2635
+ const flaggedContent = readFileSync9(flaggedFilePath, "utf-8");
2687
2636
  flaggedStories = flaggedContent.split("\n").filter((s) => s.trim().length > 0);
2688
2637
  } catch {
2689
2638
  }
@@ -2694,7 +2643,7 @@ function registerRunCommand(program) {
2694
2643
  flaggedStories
2695
2644
  });
2696
2645
  try {
2697
- mkdirSync3(dirname2(promptFile), { recursive: true });
2646
+ mkdirSync3(dirname3(promptFile), { recursive: true });
2698
2647
  writeFileSync6(promptFile, promptContent, "utf-8");
2699
2648
  } catch (err) {
2700
2649
  const message = err instanceof Error ? err.message : String(err);
@@ -2733,10 +2682,10 @@ function registerRunCommand(program) {
2733
2682
  });
2734
2683
  });
2735
2684
  if (isJson) {
2736
- const statusFile = join8(projectDir, "ralph", "status.json");
2737
- if (existsSync9(statusFile)) {
2685
+ const statusFile = join9(projectDir, "ralph", "status.json");
2686
+ if (existsSync10(statusFile)) {
2738
2687
  try {
2739
- const statusData = JSON.parse(readFileSync8(statusFile, "utf-8"));
2688
+ const statusData = JSON.parse(readFileSync9(statusFile, "utf-8"));
2740
2689
  const finalStatuses = readSprintStatus(projectDir);
2741
2690
  const finalCounts = countStories(finalStatuses);
2742
2691
  jsonOutput({
@@ -2786,11 +2735,11 @@ function registerRunCommand(program) {
2786
2735
  }
2787
2736
 
2788
2737
  // src/commands/verify.ts
2789
- import { existsSync as existsSync13, readFileSync as readFileSync12 } from "fs";
2790
- import { join as join11 } from "path";
2738
+ import { existsSync as existsSync14, readFileSync as readFileSync13 } from "fs";
2739
+ import { join as join12 } from "path";
2791
2740
 
2792
2741
  // src/lib/verify-parser.ts
2793
- import { existsSync as existsSync10, readFileSync as readFileSync9 } from "fs";
2742
+ import { existsSync as existsSync11, readFileSync as readFileSync10 } from "fs";
2794
2743
  var UI_KEYWORDS = [
2795
2744
  "agent-browser",
2796
2745
  "screenshot",
@@ -2860,12 +2809,12 @@ function classifyAC(description) {
2860
2809
  return "general";
2861
2810
  }
2862
2811
  function parseStoryACs(storyFilePath) {
2863
- if (!existsSync10(storyFilePath)) {
2812
+ if (!existsSync11(storyFilePath)) {
2864
2813
  throw new Error(
2865
2814
  `Story file not found: ${storyFilePath}. Ensure the story file exists at the expected path.`
2866
2815
  );
2867
2816
  }
2868
- const content = readFileSync9(storyFilePath, "utf-8");
2817
+ const content = readFileSync10(storyFilePath, "utf-8");
2869
2818
  const lines = content.split("\n");
2870
2819
  let acSectionStart = -1;
2871
2820
  for (let i = 0; i < lines.length; i++) {
@@ -2926,21 +2875,21 @@ function parseStoryACs(storyFilePath) {
2926
2875
 
2927
2876
  // src/lib/verify.ts
2928
2877
  import { execFileSync as execFileSync5 } from "child_process";
2929
- import { existsSync as existsSync12, mkdirSync as mkdirSync5, readFileSync as readFileSync11, writeFileSync as writeFileSync8 } from "fs";
2930
- import { join as join10 } from "path";
2878
+ import { existsSync as existsSync13, mkdirSync as mkdirSync5, readFileSync as readFileSync12, writeFileSync as writeFileSync8 } from "fs";
2879
+ import { join as join11 } from "path";
2931
2880
 
2932
2881
  // src/lib/doc-health.ts
2933
2882
  import { execSync } from "child_process";
2934
2883
  import {
2935
- existsSync as existsSync11,
2884
+ existsSync as existsSync12,
2936
2885
  mkdirSync as mkdirSync4,
2937
- readFileSync as readFileSync10,
2886
+ readFileSync as readFileSync11,
2938
2887
  readdirSync as readdirSync2,
2939
2888
  statSync,
2940
2889
  unlinkSync,
2941
2890
  writeFileSync as writeFileSync7
2942
2891
  } from "fs";
2943
- import { join as join9, relative } from "path";
2892
+ import { join as join10, relative } from "path";
2944
2893
  var DO_NOT_EDIT_HEADER2 = "<!-- DO NOT EDIT MANUALLY";
2945
2894
  var SOURCE_EXTENSIONS = /* @__PURE__ */ new Set([".ts", ".js", ".py"]);
2946
2895
  var DEFAULT_MODULE_THRESHOLD = 3;
@@ -2962,7 +2911,7 @@ function findModules(dir, threshold) {
2962
2911
  let sourceCount = 0;
2963
2912
  const subdirs = [];
2964
2913
  for (const entry of entries) {
2965
- const fullPath = join9(current, entry);
2914
+ const fullPath = join10(current, entry);
2966
2915
  let stat;
2967
2916
  try {
2968
2917
  stat = statSync(fullPath);
@@ -2999,8 +2948,8 @@ function isTestFile(filename) {
2999
2948
  return filename.includes(".test.") || filename.includes(".spec.") || filename.includes("__tests__") || filename.startsWith("test_");
3000
2949
  }
3001
2950
  function isDocStale(docPath, codeDir) {
3002
- if (!existsSync11(docPath)) return true;
3003
- if (!existsSync11(codeDir)) return false;
2951
+ if (!existsSync12(docPath)) return true;
2952
+ if (!existsSync12(codeDir)) return false;
3004
2953
  const docMtime = statSync(docPath).mtime;
3005
2954
  const newestCode = getNewestSourceMtime(codeDir);
3006
2955
  if (newestCode === null) return false;
@@ -3018,7 +2967,7 @@ function getNewestSourceMtime(dir) {
3018
2967
  const dirName = current.split("/").pop() ?? "";
3019
2968
  if (dirName === "node_modules" || dirName === ".git") return;
3020
2969
  for (const entry of entries) {
3021
- const fullPath = join9(current, entry);
2970
+ const fullPath = join10(current, entry);
3022
2971
  let stat;
3023
2972
  try {
3024
2973
  stat = statSync(fullPath);
@@ -3054,7 +3003,7 @@ function getSourceFilesInModule(modulePath) {
3054
3003
  const dirName = current.split("/").pop() ?? "";
3055
3004
  if (dirName === "node_modules" || dirName === ".git" || dirName === "__tests__" || dirName === "dist" || dirName === "coverage" || dirName.startsWith(".") && current !== modulePath) return;
3056
3005
  for (const entry of entries) {
3057
- const fullPath = join9(current, entry);
3006
+ const fullPath = join10(current, entry);
3058
3007
  let stat;
3059
3008
  try {
3060
3009
  stat = statSync(fullPath);
@@ -3075,8 +3024,8 @@ function getSourceFilesInModule(modulePath) {
3075
3024
  return files;
3076
3025
  }
3077
3026
  function getMentionedFilesInAgentsMd(agentsPath) {
3078
- if (!existsSync11(agentsPath)) return [];
3079
- const content = readFileSync10(agentsPath, "utf-8");
3027
+ if (!existsSync12(agentsPath)) return [];
3028
+ const content = readFileSync11(agentsPath, "utf-8");
3080
3029
  const mentioned = /* @__PURE__ */ new Set();
3081
3030
  const filenamePattern = /[\w./-]*[\w-]+\.(?:ts|js|py)\b/g;
3082
3031
  let match;
@@ -3100,12 +3049,12 @@ function checkAgentsMdCompleteness(agentsPath, modulePath) {
3100
3049
  }
3101
3050
  function checkAgentsMdForModule(modulePath, dir) {
3102
3051
  const root = dir ?? process.cwd();
3103
- const fullModulePath = join9(root, modulePath);
3104
- let agentsPath = join9(fullModulePath, "AGENTS.md");
3105
- if (!existsSync11(agentsPath)) {
3106
- agentsPath = join9(root, "AGENTS.md");
3052
+ const fullModulePath = join10(root, modulePath);
3053
+ let agentsPath = join10(fullModulePath, "AGENTS.md");
3054
+ if (!existsSync12(agentsPath)) {
3055
+ agentsPath = join10(root, "AGENTS.md");
3107
3056
  }
3108
- if (!existsSync11(agentsPath)) {
3057
+ if (!existsSync12(agentsPath)) {
3109
3058
  return {
3110
3059
  path: relative(root, agentsPath),
3111
3060
  grade: "missing",
@@ -3136,9 +3085,9 @@ function checkAgentsMdForModule(modulePath, dir) {
3136
3085
  };
3137
3086
  }
3138
3087
  function checkDoNotEditHeaders(docPath) {
3139
- if (!existsSync11(docPath)) return false;
3088
+ if (!existsSync12(docPath)) return false;
3140
3089
  try {
3141
- const content = readFileSync10(docPath, "utf-8");
3090
+ const content = readFileSync11(docPath, "utf-8");
3142
3091
  if (content.length === 0) return false;
3143
3092
  return content.trimStart().startsWith(DO_NOT_EDIT_HEADER2);
3144
3093
  } catch {
@@ -3150,17 +3099,17 @@ function scanDocHealth(dir) {
3150
3099
  const root = dir ?? process.cwd();
3151
3100
  const documents = [];
3152
3101
  const modules = findModules(root);
3153
- const rootAgentsPath = join9(root, "AGENTS.md");
3154
- if (existsSync11(rootAgentsPath)) {
3102
+ const rootAgentsPath = join10(root, "AGENTS.md");
3103
+ if (existsSync12(rootAgentsPath)) {
3155
3104
  if (modules.length > 0) {
3156
3105
  const docMtime = statSync(rootAgentsPath).mtime;
3157
3106
  let allMissing = [];
3158
3107
  let staleModule = "";
3159
3108
  let newestCode = null;
3160
3109
  for (const mod of modules) {
3161
- const fullModPath = join9(root, mod);
3162
- const modAgentsPath = join9(fullModPath, "AGENTS.md");
3163
- if (existsSync11(modAgentsPath)) continue;
3110
+ const fullModPath = join10(root, mod);
3111
+ const modAgentsPath = join10(fullModPath, "AGENTS.md");
3112
+ if (existsSync12(modAgentsPath)) continue;
3164
3113
  const { missing } = checkAgentsMdCompleteness(rootAgentsPath, fullModPath);
3165
3114
  if (missing.length > 0 && staleModule === "") {
3166
3115
  staleModule = mod;
@@ -3208,8 +3157,8 @@ function scanDocHealth(dir) {
3208
3157
  });
3209
3158
  }
3210
3159
  for (const mod of modules) {
3211
- const modAgentsPath = join9(root, mod, "AGENTS.md");
3212
- if (existsSync11(modAgentsPath)) {
3160
+ const modAgentsPath = join10(root, mod, "AGENTS.md");
3161
+ if (existsSync12(modAgentsPath)) {
3213
3162
  const result = checkAgentsMdForModule(mod, root);
3214
3163
  if (result.path !== "AGENTS.md") {
3215
3164
  documents.push(result);
@@ -3217,9 +3166,9 @@ function scanDocHealth(dir) {
3217
3166
  }
3218
3167
  }
3219
3168
  }
3220
- const indexPath = join9(root, "docs", "index.md");
3221
- if (existsSync11(indexPath)) {
3222
- const content = readFileSync10(indexPath, "utf-8");
3169
+ const indexPath = join10(root, "docs", "index.md");
3170
+ if (existsSync12(indexPath)) {
3171
+ const content = readFileSync11(indexPath, "utf-8");
3223
3172
  const hasAbsolutePaths = /https?:\/\/|file:\/\//i.test(content);
3224
3173
  documents.push({
3225
3174
  path: "docs/index.md",
@@ -3229,11 +3178,11 @@ function scanDocHealth(dir) {
3229
3178
  reason: hasAbsolutePaths ? "Contains absolute URLs (may violate NFR25)" : "Uses relative paths"
3230
3179
  });
3231
3180
  }
3232
- const activeDir = join9(root, "docs", "exec-plans", "active");
3233
- if (existsSync11(activeDir)) {
3181
+ const activeDir = join10(root, "docs", "exec-plans", "active");
3182
+ if (existsSync12(activeDir)) {
3234
3183
  const files = readdirSync2(activeDir).filter((f) => f.endsWith(".md"));
3235
3184
  for (const file of files) {
3236
- const filePath = join9(activeDir, file);
3185
+ const filePath = join10(activeDir, file);
3237
3186
  documents.push({
3238
3187
  path: `docs/exec-plans/active/${file}`,
3239
3188
  grade: "fresh",
@@ -3244,11 +3193,11 @@ function scanDocHealth(dir) {
3244
3193
  }
3245
3194
  }
3246
3195
  for (const subdir of ["quality", "generated"]) {
3247
- const dirPath = join9(root, "docs", subdir);
3248
- if (!existsSync11(dirPath)) continue;
3196
+ const dirPath = join10(root, "docs", subdir);
3197
+ if (!existsSync12(dirPath)) continue;
3249
3198
  const files = readdirSync2(dirPath).filter((f) => !f.startsWith("."));
3250
3199
  for (const file of files) {
3251
- const filePath = join9(dirPath, file);
3200
+ const filePath = join10(dirPath, file);
3252
3201
  let stat;
3253
3202
  try {
3254
3203
  stat = statSync(filePath);
@@ -3281,7 +3230,7 @@ function scanDocHealth(dir) {
3281
3230
  }
3282
3231
  function checkAgentsMdLineCount(filePath, docPath, documents) {
3283
3232
  try {
3284
- const content = readFileSync10(filePath, "utf-8");
3233
+ const content = readFileSync11(filePath, "utf-8");
3285
3234
  const lineCount = content.split("\n").length;
3286
3235
  if (lineCount > 100) {
3287
3236
  documents.push({
@@ -3319,15 +3268,15 @@ function checkStoryDocFreshness(storyId, dir) {
3319
3268
  for (const mod of modulesToCheck) {
3320
3269
  const result = checkAgentsMdForModule(mod, root);
3321
3270
  documents.push(result);
3322
- const moduleAgentsPath = join9(root, mod, "AGENTS.md");
3323
- const actualAgentsPath = existsSync11(moduleAgentsPath) ? moduleAgentsPath : join9(root, "AGENTS.md");
3324
- if (existsSync11(actualAgentsPath)) {
3271
+ const moduleAgentsPath = join10(root, mod, "AGENTS.md");
3272
+ const actualAgentsPath = existsSync12(moduleAgentsPath) ? moduleAgentsPath : join10(root, "AGENTS.md");
3273
+ if (existsSync12(actualAgentsPath)) {
3325
3274
  checkAgentsMdLineCount(actualAgentsPath, result.path, documents);
3326
3275
  }
3327
3276
  }
3328
3277
  if (modulesToCheck.length === 0) {
3329
- const rootAgentsPath = join9(root, "AGENTS.md");
3330
- if (existsSync11(rootAgentsPath)) {
3278
+ const rootAgentsPath = join10(root, "AGENTS.md");
3279
+ if (existsSync12(rootAgentsPath)) {
3331
3280
  documents.push({
3332
3281
  path: "AGENTS.md",
3333
3282
  grade: "fresh",
@@ -3365,11 +3314,11 @@ function getRecentlyChangedFiles(dir) {
3365
3314
  }
3366
3315
  function completeExecPlan(storyId, dir) {
3367
3316
  const root = dir ?? process.cwd();
3368
- const activePath = join9(root, "docs", "exec-plans", "active", `${storyId}.md`);
3369
- if (!existsSync11(activePath)) {
3317
+ const activePath = join10(root, "docs", "exec-plans", "active", `${storyId}.md`);
3318
+ if (!existsSync12(activePath)) {
3370
3319
  return null;
3371
3320
  }
3372
- let content = readFileSync10(activePath, "utf-8");
3321
+ let content = readFileSync11(activePath, "utf-8");
3373
3322
  content = content.replace(/^Status:\s*active$/m, "Status: completed");
3374
3323
  const timestamp = (/* @__PURE__ */ new Date()).toISOString();
3375
3324
  content = content.replace(
@@ -3377,9 +3326,9 @@ function completeExecPlan(storyId, dir) {
3377
3326
  `$1
3378
3327
  Completed: ${timestamp}`
3379
3328
  );
3380
- const completedDir = join9(root, "docs", "exec-plans", "completed");
3329
+ const completedDir = join10(root, "docs", "exec-plans", "completed");
3381
3330
  mkdirSync4(completedDir, { recursive: true });
3382
- const completedPath = join9(completedDir, `${storyId}.md`);
3331
+ const completedPath = join10(completedDir, `${storyId}.md`);
3383
3332
  writeFileSync7(completedPath, content, "utf-8");
3384
3333
  try {
3385
3334
  unlinkSync(activePath);
@@ -3571,8 +3520,8 @@ function checkPreconditions(dir, storyId) {
3571
3520
  }
3572
3521
  function createProofDocument(storyId, storyTitle, acs, dir) {
3573
3522
  const root = dir ?? process.cwd();
3574
- const verificationDir = join10(root, "verification");
3575
- const screenshotsDir = join10(verificationDir, "screenshots");
3523
+ const verificationDir = join11(root, "verification");
3524
+ const screenshotsDir = join11(verificationDir, "screenshots");
3576
3525
  mkdirSync5(verificationDir, { recursive: true });
3577
3526
  mkdirSync5(screenshotsDir, { recursive: true });
3578
3527
  const criteria = acs.map((ac) => ({
@@ -3586,7 +3535,7 @@ function createProofDocument(storyId, storyTitle, acs, dir) {
3586
3535
  storyTitle,
3587
3536
  acceptanceCriteria: criteria
3588
3537
  });
3589
- const proofPath = join10(verificationDir, `${storyId}-proof.md`);
3538
+ const proofPath = join11(verificationDir, `${storyId}-proof.md`);
3590
3539
  writeFileSync8(proofPath, content, "utf-8");
3591
3540
  return proofPath;
3592
3541
  }
@@ -3620,10 +3569,10 @@ function validateProofQuality(proofPath) {
3620
3569
  otherCount: 0,
3621
3570
  blackBoxPass: false
3622
3571
  };
3623
- if (!existsSync12(proofPath)) {
3572
+ if (!existsSync13(proofPath)) {
3624
3573
  return emptyResult;
3625
3574
  }
3626
- const content = readFileSync11(proofPath, "utf-8");
3575
+ const content = readFileSync12(proofPath, "utf-8");
3627
3576
  const bbEnforcement = checkBlackBoxEnforcement(content);
3628
3577
  function buildResult(base) {
3629
3578
  const basePassed = base.pending === 0 && base.verified > 0;
@@ -3827,8 +3776,8 @@ function verifyRetro(opts, isJson, root) {
3827
3776
  return;
3828
3777
  }
3829
3778
  const retroFile = `epic-${epicNum}-retrospective.md`;
3830
- const retroPath = join11(root, STORY_DIR, retroFile);
3831
- if (!existsSync13(retroPath)) {
3779
+ const retroPath = join12(root, STORY_DIR, retroFile);
3780
+ if (!existsSync14(retroPath)) {
3832
3781
  if (isJson) {
3833
3782
  jsonOutput({ status: "fail", epic: epicNum, retroFile, message: `${retroFile} not found` });
3834
3783
  } else {
@@ -3845,7 +3794,7 @@ function verifyRetro(opts, isJson, root) {
3845
3794
  warn(`Failed to update sprint status: ${message}`);
3846
3795
  }
3847
3796
  if (isJson) {
3848
- jsonOutput({ status: "ok", epic: epicNum, retroFile: join11(STORY_DIR, retroFile) });
3797
+ jsonOutput({ status: "ok", epic: epicNum, retroFile: join12(STORY_DIR, retroFile) });
3849
3798
  } else {
3850
3799
  ok(`Epic ${epicNum} retrospective: marked done`);
3851
3800
  }
@@ -3856,8 +3805,8 @@ function verifyStory(storyId, isJson, root) {
3856
3805
  process.exitCode = 1;
3857
3806
  return;
3858
3807
  }
3859
- const readmePath = join11(root, "README.md");
3860
- if (!existsSync13(readmePath)) {
3808
+ const readmePath = join12(root, "README.md");
3809
+ if (!existsSync14(readmePath)) {
3861
3810
  if (isJson) {
3862
3811
  jsonOutput({ status: "fail", message: "No README.md found \u2014 verification requires user documentation" });
3863
3812
  } else {
@@ -3866,8 +3815,8 @@ function verifyStory(storyId, isJson, root) {
3866
3815
  process.exitCode = 1;
3867
3816
  return;
3868
3817
  }
3869
- const storyFilePath = join11(root, STORY_DIR, `${storyId}.md`);
3870
- if (!existsSync13(storyFilePath)) {
3818
+ const storyFilePath = join12(root, STORY_DIR, `${storyId}.md`);
3819
+ if (!existsSync14(storyFilePath)) {
3871
3820
  fail(`Story file not found: ${storyFilePath}`, { json: isJson });
3872
3821
  process.exitCode = 1;
3873
3822
  return;
@@ -3907,8 +3856,8 @@ function verifyStory(storyId, isJson, root) {
3907
3856
  return;
3908
3857
  }
3909
3858
  const storyTitle = extractStoryTitle(storyFilePath);
3910
- const expectedProofPath = join11(root, "verification", `${storyId}-proof.md`);
3911
- const proofPath = existsSync13(expectedProofPath) ? expectedProofPath : createProofDocument(storyId, storyTitle, acs, root);
3859
+ const expectedProofPath = join12(root, "verification", `${storyId}-proof.md`);
3860
+ const proofPath = existsSync14(expectedProofPath) ? expectedProofPath : createProofDocument(storyId, storyTitle, acs, root);
3912
3861
  const proofQuality = validateProofQuality(proofPath);
3913
3862
  if (!proofQuality.passed) {
3914
3863
  if (isJson) {
@@ -4002,7 +3951,7 @@ function verifyStory(storyId, isJson, root) {
4002
3951
  }
4003
3952
  function extractStoryTitle(filePath) {
4004
3953
  try {
4005
- const content = readFileSync12(filePath, "utf-8");
3954
+ const content = readFileSync13(filePath, "utf-8");
4006
3955
  const match = /^#\s+(.+)$/m.exec(content);
4007
3956
  return match ? match[1] : "Unknown Story";
4008
3957
  } catch {
@@ -4011,14 +3960,14 @@ function extractStoryTitle(filePath) {
4011
3960
  }
4012
3961
 
4013
3962
  // src/lib/onboard-checks.ts
4014
- import { existsSync as existsSync15 } from "fs";
4015
- import { join as join13, dirname as dirname3 } from "path";
4016
- import { fileURLToPath as fileURLToPath2 } from "url";
3963
+ import { existsSync as existsSync16 } from "fs";
3964
+ import { join as join14, dirname as dirname4 } from "path";
3965
+ import { fileURLToPath as fileURLToPath3 } from "url";
4017
3966
 
4018
3967
  // src/lib/coverage.ts
4019
3968
  import { execSync as execSync2 } from "child_process";
4020
- import { existsSync as existsSync14, readFileSync as readFileSync13 } from "fs";
4021
- import { join as join12 } from "path";
3969
+ import { existsSync as existsSync15, readFileSync as readFileSync14 } from "fs";
3970
+ import { join as join13 } from "path";
4022
3971
  function detectCoverageTool(dir) {
4023
3972
  const baseDir = dir ?? process.cwd();
4024
3973
  const stateHint = getStateToolHint(baseDir);
@@ -4041,16 +3990,16 @@ function getStateToolHint(dir) {
4041
3990
  }
4042
3991
  }
4043
3992
  function detectNodeCoverageTool(dir, stateHint) {
4044
- const hasVitestConfig = existsSync14(join12(dir, "vitest.config.ts")) || existsSync14(join12(dir, "vitest.config.js"));
4045
- const pkgPath = join12(dir, "package.json");
3993
+ const hasVitestConfig = existsSync15(join13(dir, "vitest.config.ts")) || existsSync15(join13(dir, "vitest.config.js"));
3994
+ const pkgPath = join13(dir, "package.json");
4046
3995
  let hasVitestCoverageV8 = false;
4047
3996
  let hasVitestCoverageIstanbul = false;
4048
3997
  let hasC8 = false;
4049
3998
  let hasJest = false;
4050
3999
  let pkgScripts = {};
4051
- if (existsSync14(pkgPath)) {
4000
+ if (existsSync15(pkgPath)) {
4052
4001
  try {
4053
- const pkg = JSON.parse(readFileSync13(pkgPath, "utf-8"));
4002
+ const pkg = JSON.parse(readFileSync14(pkgPath, "utf-8"));
4054
4003
  const allDeps = { ...pkg.dependencies ?? {}, ...pkg.devDependencies ?? {} };
4055
4004
  hasVitestCoverageV8 = "@vitest/coverage-v8" in allDeps;
4056
4005
  hasVitestCoverageIstanbul = "@vitest/coverage-istanbul" in allDeps;
@@ -4103,10 +4052,10 @@ function getNodeTestCommand(scripts, runner) {
4103
4052
  return "npm test";
4104
4053
  }
4105
4054
  function detectPythonCoverageTool(dir) {
4106
- const reqPath = join12(dir, "requirements.txt");
4107
- if (existsSync14(reqPath)) {
4055
+ const reqPath = join13(dir, "requirements.txt");
4056
+ if (existsSync15(reqPath)) {
4108
4057
  try {
4109
- const content = readFileSync13(reqPath, "utf-8");
4058
+ const content = readFileSync14(reqPath, "utf-8");
4110
4059
  if (content.includes("pytest-cov") || content.includes("coverage")) {
4111
4060
  return {
4112
4061
  tool: "coverage.py",
@@ -4117,10 +4066,10 @@ function detectPythonCoverageTool(dir) {
4117
4066
  } catch {
4118
4067
  }
4119
4068
  }
4120
- const pyprojectPath = join12(dir, "pyproject.toml");
4121
- if (existsSync14(pyprojectPath)) {
4069
+ const pyprojectPath = join13(dir, "pyproject.toml");
4070
+ if (existsSync15(pyprojectPath)) {
4122
4071
  try {
4123
- const content = readFileSync13(pyprojectPath, "utf-8");
4072
+ const content = readFileSync14(pyprojectPath, "utf-8");
4124
4073
  if (content.includes("pytest-cov") || content.includes("coverage")) {
4125
4074
  return {
4126
4075
  tool: "coverage.py",
@@ -4202,7 +4151,7 @@ function parseVitestCoverage(dir) {
4202
4151
  return 0;
4203
4152
  }
4204
4153
  try {
4205
- const report = JSON.parse(readFileSync13(reportPath, "utf-8"));
4154
+ const report = JSON.parse(readFileSync14(reportPath, "utf-8"));
4206
4155
  return report.total?.statements?.pct ?? 0;
4207
4156
  } catch {
4208
4157
  warn("Failed to parse coverage report");
@@ -4210,13 +4159,13 @@ function parseVitestCoverage(dir) {
4210
4159
  }
4211
4160
  }
4212
4161
  function parsePythonCoverage(dir) {
4213
- const reportPath = join12(dir, "coverage.json");
4214
- if (!existsSync14(reportPath)) {
4162
+ const reportPath = join13(dir, "coverage.json");
4163
+ if (!existsSync15(reportPath)) {
4215
4164
  warn("Coverage report not found at coverage.json");
4216
4165
  return 0;
4217
4166
  }
4218
4167
  try {
4219
- const report = JSON.parse(readFileSync13(reportPath, "utf-8"));
4168
+ const report = JSON.parse(readFileSync14(reportPath, "utf-8"));
4220
4169
  return report.totals?.percent_covered ?? 0;
4221
4170
  } catch {
4222
4171
  warn("Failed to parse coverage report");
@@ -4317,7 +4266,7 @@ function checkPerFileCoverage(floor, dir) {
4317
4266
  }
4318
4267
  let report;
4319
4268
  try {
4320
- report = JSON.parse(readFileSync13(reportPath, "utf-8"));
4269
+ report = JSON.parse(readFileSync14(reportPath, "utf-8"));
4321
4270
  } catch {
4322
4271
  warn("Failed to parse coverage-summary.json");
4323
4272
  return { floor, violations: [], totalFiles: 0 };
@@ -4347,11 +4296,11 @@ function checkPerFileCoverage(floor, dir) {
4347
4296
  }
4348
4297
  function findCoverageSummary(dir) {
4349
4298
  const candidates = [
4350
- join12(dir, "coverage", "coverage-summary.json"),
4351
- join12(dir, "src", "coverage", "coverage-summary.json")
4299
+ join13(dir, "coverage", "coverage-summary.json"),
4300
+ join13(dir, "src", "coverage", "coverage-summary.json")
4352
4301
  ];
4353
4302
  for (const p of candidates) {
4354
- if (existsSync14(p)) return p;
4303
+ if (existsSync15(p)) return p;
4355
4304
  }
4356
4305
  return null;
4357
4306
  }
@@ -4376,16 +4325,16 @@ function printCoverageOutput(result, evaluation) {
4376
4325
  // src/lib/onboard-checks.ts
4377
4326
  function checkHarnessInitialized(dir) {
4378
4327
  const statePath = getStatePath(dir ?? process.cwd());
4379
- return { ok: existsSync15(statePath) };
4328
+ return { ok: existsSync16(statePath) };
4380
4329
  }
4381
4330
  function checkBmadInstalled(dir) {
4382
4331
  return { ok: isBmadInstalled(dir) };
4383
4332
  }
4384
4333
  function checkHooksRegistered(dir) {
4385
- const __filename = fileURLToPath2(import.meta.url);
4386
- const __dirname = dirname3(__filename);
4387
- const hooksPath = join13(__dirname, "..", "..", "hooks", "hooks.json");
4388
- return { ok: existsSync15(hooksPath) };
4334
+ const __filename = fileURLToPath3(import.meta.url);
4335
+ const __dirname2 = dirname4(__filename);
4336
+ const hooksPath = join14(__dirname2, "..", "..", "hooks", "hooks.json");
4337
+ return { ok: existsSync16(hooksPath) };
4389
4338
  }
4390
4339
  function runPreconditions(dir) {
4391
4340
  const harnessCheck = checkHarnessInitialized(dir);
@@ -4425,8 +4374,8 @@ function findVerificationGaps(dir) {
4425
4374
  for (const [key, status] of Object.entries(statuses)) {
4426
4375
  if (status !== "done") continue;
4427
4376
  if (!STORY_KEY_PATTERN2.test(key)) continue;
4428
- const proofPath = join13(root, "verification", `${key}-proof.md`);
4429
- if (!existsSync15(proofPath)) {
4377
+ const proofPath = join14(root, "verification", `${key}-proof.md`);
4378
+ if (!existsSync16(proofPath)) {
4430
4379
  unverified.push(key);
4431
4380
  }
4432
4381
  }
@@ -4632,29 +4581,18 @@ function handleFullStatus(isJson) {
4632
4581
  const composeFile = state.docker?.compose_file ?? "docker-compose.harness.yml";
4633
4582
  const stackDir = getStackDir();
4634
4583
  const isShared = composeFile.startsWith(stackDir);
4635
- if (isShared) {
4636
- console.log(`Docker: shared stack at ~/.codeharness/stack/`);
4637
- const sharedComposeFile = getComposeFilePath();
4638
- const health = getStackHealth(sharedComposeFile, "codeharness-shared");
4639
- for (const svc of health.services) {
4640
- console.log(` ${svc.name}: ${svc.running ? "running" : "stopped"}`);
4641
- }
4642
- if (health.healthy) {
4643
- console.log(
4644
- ` Endpoints: logs=${DEFAULT_ENDPOINTS.logs} metrics=${DEFAULT_ENDPOINTS.metrics} traces=${DEFAULT_ENDPOINTS.traces}`
4645
- );
4646
- }
4647
- } else {
4648
- const health = getStackHealth(composeFile);
4649
- console.log("Docker:");
4650
- for (const svc of health.services) {
4651
- console.log(` ${svc.name}: ${svc.running ? "running" : "stopped"}`);
4652
- }
4653
- if (health.healthy) {
4654
- console.log(
4655
- ` Endpoints: logs=${DEFAULT_ENDPOINTS.logs} metrics=${DEFAULT_ENDPOINTS.metrics} traces=${DEFAULT_ENDPOINTS.traces}`
4656
- );
4657
- }
4584
+ const resolvedComposeFile = isShared ? getComposeFilePath() : composeFile;
4585
+ const projectName = isShared ? "codeharness-shared" : void 0;
4586
+ const header = isShared ? "Docker: shared stack at ~/.codeharness/stack/" : "Docker:";
4587
+ console.log(header);
4588
+ const health = getStackHealth(resolvedComposeFile, projectName);
4589
+ for (const svc of health.services) {
4590
+ console.log(` ${svc.name}: ${svc.running ? "running" : "stopped"}`);
4591
+ }
4592
+ if (health.healthy) {
4593
+ console.log(
4594
+ ` Endpoints: logs=${DEFAULT_ENDPOINTS.logs} metrics=${DEFAULT_ENDPOINTS.metrics} traces=${DEFAULT_ENDPOINTS.traces}`
4595
+ );
4658
4596
  }
4659
4597
  }
4660
4598
  }
@@ -5011,16 +4949,16 @@ function getBeadsData() {
5011
4949
  }
5012
4950
 
5013
4951
  // src/commands/onboard.ts
5014
- import { join as join17 } from "path";
4952
+ import { join as join18 } from "path";
5015
4953
 
5016
4954
  // src/lib/scanner.ts
5017
4955
  import {
5018
- existsSync as existsSync16,
4956
+ existsSync as existsSync17,
5019
4957
  readdirSync as readdirSync3,
5020
- readFileSync as readFileSync14,
4958
+ readFileSync as readFileSync15,
5021
4959
  statSync as statSync2
5022
4960
  } from "fs";
5023
- import { join as join14, relative as relative2 } from "path";
4961
+ import { join as join15, relative as relative2 } from "path";
5024
4962
  var SOURCE_EXTENSIONS2 = /* @__PURE__ */ new Set([".ts", ".js", ".py"]);
5025
4963
  var DEFAULT_MIN_MODULE_SIZE = 3;
5026
4964
  function getExtension2(filename) {
@@ -5045,7 +4983,7 @@ function countSourceFiles(dir) {
5045
4983
  for (const entry of entries) {
5046
4984
  if (isSkippedDir(entry)) continue;
5047
4985
  if (entry.startsWith(".") && current !== dir) continue;
5048
- const fullPath = join14(current, entry);
4986
+ const fullPath = join15(current, entry);
5049
4987
  let stat;
5050
4988
  try {
5051
4989
  stat = statSync2(fullPath);
@@ -5068,7 +5006,7 @@ function countSourceFiles(dir) {
5068
5006
  return count;
5069
5007
  }
5070
5008
  function countModuleFiles(modulePath, rootDir) {
5071
- const fullModulePath = join14(rootDir, modulePath);
5009
+ const fullModulePath = join15(rootDir, modulePath);
5072
5010
  let sourceFiles = 0;
5073
5011
  let testFiles = 0;
5074
5012
  function walk(current) {
@@ -5080,7 +5018,7 @@ function countModuleFiles(modulePath, rootDir) {
5080
5018
  }
5081
5019
  for (const entry of entries) {
5082
5020
  if (isSkippedDir(entry)) continue;
5083
- const fullPath = join14(current, entry);
5021
+ const fullPath = join15(current, entry);
5084
5022
  let stat;
5085
5023
  try {
5086
5024
  stat = statSync2(fullPath);
@@ -5105,8 +5043,8 @@ function countModuleFiles(modulePath, rootDir) {
5105
5043
  return { sourceFiles, testFiles };
5106
5044
  }
5107
5045
  function detectArtifacts(dir) {
5108
- const bmadPath = join14(dir, "_bmad");
5109
- const hasBmad = existsSync16(bmadPath);
5046
+ const bmadPath = join15(dir, "_bmad");
5047
+ const hasBmad = existsSync17(bmadPath);
5110
5048
  return {
5111
5049
  hasBmad,
5112
5050
  bmadPath: hasBmad ? relative2(dir, bmadPath) || "_bmad" : null
@@ -5188,10 +5126,10 @@ function readPerFileCoverage(dir, format) {
5188
5126
  return null;
5189
5127
  }
5190
5128
  function readVitestPerFileCoverage(dir) {
5191
- const reportPath = join14(dir, "coverage", "coverage-summary.json");
5192
- if (!existsSync16(reportPath)) return null;
5129
+ const reportPath = join15(dir, "coverage", "coverage-summary.json");
5130
+ if (!existsSync17(reportPath)) return null;
5193
5131
  try {
5194
- const report = JSON.parse(readFileSync14(reportPath, "utf-8"));
5132
+ const report = JSON.parse(readFileSync15(reportPath, "utf-8"));
5195
5133
  const result = /* @__PURE__ */ new Map();
5196
5134
  for (const [key, value] of Object.entries(report)) {
5197
5135
  if (key === "total") continue;
@@ -5203,10 +5141,10 @@ function readVitestPerFileCoverage(dir) {
5203
5141
  }
5204
5142
  }
5205
5143
  function readPythonPerFileCoverage(dir) {
5206
- const reportPath = join14(dir, "coverage.json");
5207
- if (!existsSync16(reportPath)) return null;
5144
+ const reportPath = join15(dir, "coverage.json");
5145
+ if (!existsSync17(reportPath)) return null;
5208
5146
  try {
5209
- const report = JSON.parse(readFileSync14(reportPath, "utf-8"));
5147
+ const report = JSON.parse(readFileSync15(reportPath, "utf-8"));
5210
5148
  if (!report.files) return null;
5211
5149
  const result = /* @__PURE__ */ new Map();
5212
5150
  for (const [key, value] of Object.entries(report.files)) {
@@ -5222,13 +5160,13 @@ function auditDocumentation(dir) {
5222
5160
  const root = dir ?? process.cwd();
5223
5161
  const documents = [];
5224
5162
  for (const docName of AUDIT_DOCUMENTS) {
5225
- const docPath = join14(root, docName);
5226
- if (!existsSync16(docPath)) {
5163
+ const docPath = join15(root, docName);
5164
+ if (!existsSync17(docPath)) {
5227
5165
  documents.push({ name: docName, grade: "missing", path: null });
5228
5166
  continue;
5229
5167
  }
5230
- const srcDir = join14(root, "src");
5231
- const codeDir = existsSync16(srcDir) ? srcDir : root;
5168
+ const srcDir = join15(root, "src");
5169
+ const codeDir = existsSync17(srcDir) ? srcDir : root;
5232
5170
  const stale = isDocStale(docPath, codeDir);
5233
5171
  documents.push({
5234
5172
  name: docName,
@@ -5236,8 +5174,8 @@ function auditDocumentation(dir) {
5236
5174
  path: docName
5237
5175
  });
5238
5176
  }
5239
- const docsDir = join14(root, "docs");
5240
- if (existsSync16(docsDir)) {
5177
+ const docsDir = join15(root, "docs");
5178
+ if (existsSync17(docsDir)) {
5241
5179
  try {
5242
5180
  const stat = statSync2(docsDir);
5243
5181
  if (stat.isDirectory()) {
@@ -5249,10 +5187,10 @@ function auditDocumentation(dir) {
5249
5187
  } else {
5250
5188
  documents.push({ name: "docs/", grade: "missing", path: null });
5251
5189
  }
5252
- const indexPath = join14(root, "docs", "index.md");
5253
- if (existsSync16(indexPath)) {
5254
- const srcDir = join14(root, "src");
5255
- const indexCodeDir = existsSync16(srcDir) ? srcDir : root;
5190
+ const indexPath = join15(root, "docs", "index.md");
5191
+ if (existsSync17(indexPath)) {
5192
+ const srcDir = join15(root, "src");
5193
+ const indexCodeDir = existsSync17(srcDir) ? srcDir : root;
5256
5194
  const indexStale = isDocStale(indexPath, indexCodeDir);
5257
5195
  documents.push({
5258
5196
  name: "docs/index.md",
@@ -5269,8 +5207,8 @@ function auditDocumentation(dir) {
5269
5207
 
5270
5208
  // src/lib/epic-generator.ts
5271
5209
  import { createInterface } from "readline";
5272
- import { existsSync as existsSync17, mkdirSync as mkdirSync6, writeFileSync as writeFileSync9 } from "fs";
5273
- import { dirname as dirname4, join as join15 } from "path";
5210
+ import { existsSync as existsSync18, mkdirSync as mkdirSync6, writeFileSync as writeFileSync9 } from "fs";
5211
+ import { dirname as dirname5, join as join16 } from "path";
5274
5212
  var PRIORITY_BY_TYPE = {
5275
5213
  observability: 1,
5276
5214
  coverage: 2,
@@ -5308,8 +5246,8 @@ function generateOnboardingEpic(scan, coverage, audit, rootDir) {
5308
5246
  storyNum++;
5309
5247
  }
5310
5248
  for (const mod of scan.modules) {
5311
- const agentsPath = join15(root, mod.path, "AGENTS.md");
5312
- if (!existsSync17(agentsPath)) {
5249
+ const agentsPath = join16(root, mod.path, "AGENTS.md");
5250
+ if (!existsSync18(agentsPath)) {
5313
5251
  stories.push({
5314
5252
  key: `0.${storyNum}`,
5315
5253
  title: `Create ${mod.path}/AGENTS.md`,
@@ -5375,7 +5313,7 @@ function generateOnboardingEpic(scan, coverage, audit, rootDir) {
5375
5313
  };
5376
5314
  }
5377
5315
  function writeOnboardingEpic(epic, outputPath) {
5378
- mkdirSync6(dirname4(outputPath), { recursive: true });
5316
+ mkdirSync6(dirname5(outputPath), { recursive: true });
5379
5317
  const lines = [];
5380
5318
  lines.push(`# ${epic.title}`);
5381
5319
  lines.push("");
@@ -5507,29 +5445,29 @@ function getGapIdFromTitle(title) {
5507
5445
  }
5508
5446
 
5509
5447
  // src/lib/scan-cache.ts
5510
- import { existsSync as existsSync18, mkdirSync as mkdirSync7, readFileSync as readFileSync15, writeFileSync as writeFileSync10 } from "fs";
5511
- import { join as join16 } from "path";
5448
+ import { existsSync as existsSync19, mkdirSync as mkdirSync7, readFileSync as readFileSync16, writeFileSync as writeFileSync10 } from "fs";
5449
+ import { join as join17 } from "path";
5512
5450
  var CACHE_DIR = ".harness";
5513
5451
  var CACHE_FILE = "last-onboard-scan.json";
5514
5452
  var DEFAULT_MAX_AGE_MS = 864e5;
5515
5453
  function saveScanCache(entry, dir) {
5516
5454
  try {
5517
5455
  const root = dir ?? process.cwd();
5518
- const cacheDir = join16(root, CACHE_DIR);
5456
+ const cacheDir = join17(root, CACHE_DIR);
5519
5457
  mkdirSync7(cacheDir, { recursive: true });
5520
- const cachePath = join16(cacheDir, CACHE_FILE);
5458
+ const cachePath = join17(cacheDir, CACHE_FILE);
5521
5459
  writeFileSync10(cachePath, JSON.stringify(entry, null, 2), "utf-8");
5522
5460
  } catch {
5523
5461
  }
5524
5462
  }
5525
5463
  function loadScanCache(dir) {
5526
5464
  const root = dir ?? process.cwd();
5527
- const cachePath = join16(root, CACHE_DIR, CACHE_FILE);
5528
- if (!existsSync18(cachePath)) {
5465
+ const cachePath = join17(root, CACHE_DIR, CACHE_FILE);
5466
+ if (!existsSync19(cachePath)) {
5529
5467
  return null;
5530
5468
  }
5531
5469
  try {
5532
- const raw = readFileSync15(cachePath, "utf-8");
5470
+ const raw = readFileSync16(cachePath, "utf-8");
5533
5471
  return JSON.parse(raw);
5534
5472
  } catch {
5535
5473
  return null;
@@ -5702,7 +5640,7 @@ function registerOnboardCommand(program) {
5702
5640
  }
5703
5641
  coverage = lastCoverageResult ?? runCoverageAnalysis(scan);
5704
5642
  audit = lastAuditResult ?? runAudit();
5705
- const epicPath = join17(process.cwd(), "ralph", "onboarding-epic.md");
5643
+ const epicPath = join18(process.cwd(), "ralph", "onboarding-epic.md");
5706
5644
  const epic = generateOnboardingEpic(scan, coverage, audit);
5707
5645
  mergeExtendedGaps(epic);
5708
5646
  if (!isFull) {
@@ -5775,7 +5713,7 @@ function registerOnboardCommand(program) {
5775
5713
  coverage,
5776
5714
  audit
5777
5715
  });
5778
- const epicPath = join17(process.cwd(), "ralph", "onboarding-epic.md");
5716
+ const epicPath = join18(process.cwd(), "ralph", "onboarding-epic.md");
5779
5717
  const epic = generateOnboardingEpic(scan, coverage, audit);
5780
5718
  mergeExtendedGaps(epic);
5781
5719
  if (!isFull) {
@@ -5883,8 +5821,8 @@ function printEpicOutput(epic) {
5883
5821
  }
5884
5822
 
5885
5823
  // src/commands/teardown.ts
5886
- import { existsSync as existsSync19, unlinkSync as unlinkSync2, readFileSync as readFileSync16, writeFileSync as writeFileSync11, rmSync } from "fs";
5887
- import { join as join18 } from "path";
5824
+ import { existsSync as existsSync20, unlinkSync as unlinkSync2, readFileSync as readFileSync17, writeFileSync as writeFileSync11, rmSync } from "fs";
5825
+ import { join as join19 } from "path";
5888
5826
  function buildDefaultResult() {
5889
5827
  return {
5890
5828
  status: "ok",
@@ -5987,16 +5925,16 @@ function registerTeardownCommand(program) {
5987
5925
  info("Docker stack: not running, skipping");
5988
5926
  }
5989
5927
  }
5990
- const composeFilePath = join18(projectDir, composeFile);
5991
- if (existsSync19(composeFilePath)) {
5928
+ const composeFilePath = join19(projectDir, composeFile);
5929
+ if (existsSync20(composeFilePath)) {
5992
5930
  unlinkSync2(composeFilePath);
5993
5931
  result.removed.push(composeFile);
5994
5932
  if (!isJson) {
5995
5933
  ok(`Removed: ${composeFile}`);
5996
5934
  }
5997
5935
  }
5998
- const otelConfigPath = join18(projectDir, "otel-collector-config.yaml");
5999
- if (existsSync19(otelConfigPath)) {
5936
+ const otelConfigPath = join19(projectDir, "otel-collector-config.yaml");
5937
+ if (existsSync20(otelConfigPath)) {
6000
5938
  unlinkSync2(otelConfigPath);
6001
5939
  result.removed.push("otel-collector-config.yaml");
6002
5940
  if (!isJson) {
@@ -6006,8 +5944,8 @@ function registerTeardownCommand(program) {
6006
5944
  }
6007
5945
  let patchesRemoved = 0;
6008
5946
  for (const [patchName, relativePath] of Object.entries(PATCH_TARGETS)) {
6009
- const filePath = join18(projectDir, "_bmad", relativePath);
6010
- if (!existsSync19(filePath)) {
5947
+ const filePath = join19(projectDir, "_bmad", relativePath);
5948
+ if (!existsSync20(filePath)) {
6011
5949
  continue;
6012
5950
  }
6013
5951
  try {
@@ -6027,10 +5965,10 @@ function registerTeardownCommand(program) {
6027
5965
  }
6028
5966
  }
6029
5967
  if (state.otlp?.enabled && state.stack === "nodejs") {
6030
- const pkgPath = join18(projectDir, "package.json");
6031
- if (existsSync19(pkgPath)) {
5968
+ const pkgPath = join19(projectDir, "package.json");
5969
+ if (existsSync20(pkgPath)) {
6032
5970
  try {
6033
- const raw = readFileSync16(pkgPath, "utf-8");
5971
+ const raw = readFileSync17(pkgPath, "utf-8");
6034
5972
  const pkg = JSON.parse(raw);
6035
5973
  const scripts = pkg["scripts"];
6036
5974
  if (scripts) {
@@ -6070,8 +6008,8 @@ function registerTeardownCommand(program) {
6070
6008
  }
6071
6009
  }
6072
6010
  }
6073
- const harnessDir = join18(projectDir, ".harness");
6074
- if (existsSync19(harnessDir)) {
6011
+ const harnessDir = join19(projectDir, ".harness");
6012
+ if (existsSync20(harnessDir)) {
6075
6013
  rmSync(harnessDir, { recursive: true, force: true });
6076
6014
  result.removed.push(".harness/");
6077
6015
  if (!isJson) {
@@ -6079,7 +6017,7 @@ function registerTeardownCommand(program) {
6079
6017
  }
6080
6018
  }
6081
6019
  const statePath = getStatePath(projectDir);
6082
- if (existsSync19(statePath)) {
6020
+ if (existsSync20(statePath)) {
6083
6021
  unlinkSync2(statePath);
6084
6022
  result.removed.push(".claude/codeharness.local.md");
6085
6023
  if (!isJson) {
@@ -6823,8 +6761,8 @@ function registerQueryCommand(program) {
6823
6761
  }
6824
6762
 
6825
6763
  // src/commands/retro-import.ts
6826
- import { existsSync as existsSync20, readFileSync as readFileSync17 } from "fs";
6827
- import { join as join19 } from "path";
6764
+ import { existsSync as existsSync21, readFileSync as readFileSync18 } from "fs";
6765
+ import { join as join20 } from "path";
6828
6766
 
6829
6767
  // src/lib/retro-parser.ts
6830
6768
  var KNOWN_TOOLS = ["showboat", "ralph", "beads", "bmad"];
@@ -6993,15 +6931,15 @@ function registerRetroImportCommand(program) {
6993
6931
  return;
6994
6932
  }
6995
6933
  const retroFile = `epic-${epicNum}-retrospective.md`;
6996
- const retroPath = join19(root, STORY_DIR2, retroFile);
6997
- if (!existsSync20(retroPath)) {
6934
+ const retroPath = join20(root, STORY_DIR2, retroFile);
6935
+ if (!existsSync21(retroPath)) {
6998
6936
  fail(`Retro file not found: ${retroFile}`, { json: isJson });
6999
6937
  process.exitCode = 1;
7000
6938
  return;
7001
6939
  }
7002
6940
  let content;
7003
6941
  try {
7004
- content = readFileSync17(retroPath, "utf-8");
6942
+ content = readFileSync18(retroPath, "utf-8");
7005
6943
  } catch (err) {
7006
6944
  const message = err instanceof Error ? err.message : String(err);
7007
6945
  fail(`Failed to read retro file: ${message}`, { json: isJson });
@@ -7269,8 +7207,8 @@ function registerGithubImportCommand(program) {
7269
7207
 
7270
7208
  // src/lib/verify-env.ts
7271
7209
  import { execFileSync as execFileSync7 } from "child_process";
7272
- import { existsSync as existsSync21, mkdirSync as mkdirSync8, readdirSync as readdirSync4, readFileSync as readFileSync18, cpSync, rmSync as rmSync2, statSync as statSync3 } from "fs";
7273
- import { join as join20, basename as basename3 } from "path";
7210
+ import { existsSync as existsSync22, mkdirSync as mkdirSync8, readdirSync as readdirSync4, readFileSync as readFileSync19, cpSync, rmSync as rmSync2, statSync as statSync3 } from "fs";
7211
+ import { join as join21, basename as basename3 } from "path";
7274
7212
  import { createHash } from "crypto";
7275
7213
  var IMAGE_TAG = "codeharness-verify";
7276
7214
  var STORY_DIR3 = "_bmad-output/implementation-artifacts";
@@ -7283,14 +7221,14 @@ function isValidStoryKey(storyKey) {
7283
7221
  return /^[a-zA-Z0-9_-]+$/.test(storyKey);
7284
7222
  }
7285
7223
  function computeDistHash(projectDir) {
7286
- const distDir = join20(projectDir, "dist");
7287
- if (!existsSync21(distDir)) {
7224
+ const distDir = join21(projectDir, "dist");
7225
+ if (!existsSync22(distDir)) {
7288
7226
  return null;
7289
7227
  }
7290
7228
  const hash = createHash("sha256");
7291
7229
  const files = collectFiles(distDir).sort();
7292
7230
  for (const file of files) {
7293
- const content = readFileSync18(file);
7231
+ const content = readFileSync19(file);
7294
7232
  hash.update(file.slice(distDir.length));
7295
7233
  hash.update(content);
7296
7234
  }
@@ -7300,7 +7238,7 @@ function collectFiles(dir) {
7300
7238
  const results = [];
7301
7239
  const entries = readdirSync4(dir, { withFileTypes: true });
7302
7240
  for (const entry of entries) {
7303
- const fullPath = join20(dir, entry.name);
7241
+ const fullPath = join21(dir, entry.name);
7304
7242
  if (entry.isDirectory()) {
7305
7243
  results.push(...collectFiles(fullPath));
7306
7244
  } else {
@@ -7372,13 +7310,13 @@ function buildNodeImage(projectDir) {
7372
7310
  throw new Error("npm pack produced no output \u2014 cannot determine tarball filename.");
7373
7311
  }
7374
7312
  const tarballName = basename3(lastLine);
7375
- const tarballPath = join20("/tmp", tarballName);
7376
- const buildContext = join20("/tmp", `codeharness-verify-build-${Date.now()}`);
7313
+ const tarballPath = join21("/tmp", tarballName);
7314
+ const buildContext = join21("/tmp", `codeharness-verify-build-${Date.now()}`);
7377
7315
  mkdirSync8(buildContext, { recursive: true });
7378
7316
  try {
7379
- cpSync(tarballPath, join20(buildContext, tarballName));
7317
+ cpSync(tarballPath, join21(buildContext, tarballName));
7380
7318
  const dockerfileSrc = resolveDockerfileTemplate(projectDir);
7381
- cpSync(dockerfileSrc, join20(buildContext, "Dockerfile"));
7319
+ cpSync(dockerfileSrc, join21(buildContext, "Dockerfile"));
7382
7320
  execFileSync7("docker", [
7383
7321
  "build",
7384
7322
  "-t",
@@ -7397,7 +7335,7 @@ function buildNodeImage(projectDir) {
7397
7335
  }
7398
7336
  }
7399
7337
  function buildPythonImage(projectDir) {
7400
- const distDir = join20(projectDir, "dist");
7338
+ const distDir = join21(projectDir, "dist");
7401
7339
  const distFiles = readdirSync4(distDir).filter(
7402
7340
  (f) => f.endsWith(".tar.gz") || f.endsWith(".whl")
7403
7341
  );
@@ -7405,12 +7343,12 @@ function buildPythonImage(projectDir) {
7405
7343
  throw new Error("No distribution files found in dist/. Run your build command first (e.g., python -m build).");
7406
7344
  }
7407
7345
  const distFile = distFiles.filter((f) => f.endsWith(".tar.gz"))[0] ?? distFiles[0];
7408
- const buildContext = join20("/tmp", `codeharness-verify-build-${Date.now()}`);
7346
+ const buildContext = join21("/tmp", `codeharness-verify-build-${Date.now()}`);
7409
7347
  mkdirSync8(buildContext, { recursive: true });
7410
7348
  try {
7411
- cpSync(join20(distDir, distFile), join20(buildContext, distFile));
7349
+ cpSync(join21(distDir, distFile), join21(buildContext, distFile));
7412
7350
  const dockerfileSrc = resolveDockerfileTemplate(projectDir);
7413
- cpSync(dockerfileSrc, join20(buildContext, "Dockerfile"));
7351
+ cpSync(dockerfileSrc, join21(buildContext, "Dockerfile"));
7414
7352
  execFileSync7("docker", [
7415
7353
  "build",
7416
7354
  "-t",
@@ -7432,25 +7370,25 @@ function prepareVerifyWorkspace(storyKey, projectDir) {
7432
7370
  if (!isValidStoryKey(storyKey)) {
7433
7371
  throw new Error(`Invalid story key: ${storyKey}. Keys must contain only alphanumeric characters, hyphens, and underscores.`);
7434
7372
  }
7435
- const storyFile = join20(root, STORY_DIR3, `${storyKey}.md`);
7436
- if (!existsSync21(storyFile)) {
7373
+ const storyFile = join21(root, STORY_DIR3, `${storyKey}.md`);
7374
+ if (!existsSync22(storyFile)) {
7437
7375
  throw new Error(`Story file not found: ${storyFile}`);
7438
7376
  }
7439
7377
  const workspace = `${TEMP_PREFIX}${storyKey}`;
7440
- if (existsSync21(workspace)) {
7378
+ if (existsSync22(workspace)) {
7441
7379
  rmSync2(workspace, { recursive: true, force: true });
7442
7380
  }
7443
7381
  mkdirSync8(workspace, { recursive: true });
7444
- cpSync(storyFile, join20(workspace, "story.md"));
7445
- const readmePath = join20(root, "README.md");
7446
- if (existsSync21(readmePath)) {
7447
- cpSync(readmePath, join20(workspace, "README.md"));
7382
+ cpSync(storyFile, join21(workspace, "story.md"));
7383
+ const readmePath = join21(root, "README.md");
7384
+ if (existsSync22(readmePath)) {
7385
+ cpSync(readmePath, join21(workspace, "README.md"));
7448
7386
  }
7449
- const docsDir = join20(root, "docs");
7450
- if (existsSync21(docsDir) && statSync3(docsDir).isDirectory()) {
7451
- cpSync(docsDir, join20(workspace, "docs"), { recursive: true });
7387
+ const docsDir = join21(root, "docs");
7388
+ if (existsSync22(docsDir) && statSync3(docsDir).isDirectory()) {
7389
+ cpSync(docsDir, join21(workspace, "docs"), { recursive: true });
7452
7390
  }
7453
- mkdirSync8(join20(workspace, "verification"), { recursive: true });
7391
+ mkdirSync8(join21(workspace, "verification"), { recursive: true });
7454
7392
  return workspace;
7455
7393
  }
7456
7394
  function checkVerifyEnv() {
@@ -7499,7 +7437,7 @@ function cleanupVerifyEnv(storyKey) {
7499
7437
  }
7500
7438
  const workspace = `${TEMP_PREFIX}${storyKey}`;
7501
7439
  const containerName = `codeharness-verify-${storyKey}`;
7502
- if (existsSync21(workspace)) {
7440
+ if (existsSync22(workspace)) {
7503
7441
  rmSync2(workspace, { recursive: true, force: true });
7504
7442
  }
7505
7443
  try {
@@ -7518,11 +7456,11 @@ function cleanupVerifyEnv(storyKey) {
7518
7456
  }
7519
7457
  }
7520
7458
  function resolveDockerfileTemplate(projectDir) {
7521
- const local = join20(projectDir, "templates", "Dockerfile.verify");
7522
- if (existsSync21(local)) return local;
7459
+ const local = join21(projectDir, "templates", "Dockerfile.verify");
7460
+ if (existsSync22(local)) return local;
7523
7461
  const pkgDir = new URL("../../", import.meta.url).pathname;
7524
- const pkg = join20(pkgDir, "templates", "Dockerfile.verify");
7525
- if (existsSync21(pkg)) return pkg;
7462
+ const pkg = join21(pkgDir, "templates", "Dockerfile.verify");
7463
+ if (existsSync22(pkg)) return pkg;
7526
7464
  throw new Error("Dockerfile.verify not found. Ensure templates/Dockerfile.verify exists in the project or installed package.");
7527
7465
  }
7528
7466
  function dockerImageExists(tag) {
@@ -7668,26 +7606,26 @@ function registerVerifyEnvCommand(program) {
7668
7606
  }
7669
7607
 
7670
7608
  // src/commands/retry.ts
7671
- import { join as join22 } from "path";
7609
+ import { join as join23 } from "path";
7672
7610
 
7673
7611
  // src/lib/retry-state.ts
7674
- import { existsSync as existsSync22, readFileSync as readFileSync19, writeFileSync as writeFileSync12 } from "fs";
7675
- import { join as join21 } from "path";
7612
+ import { existsSync as existsSync23, readFileSync as readFileSync20, writeFileSync as writeFileSync12 } from "fs";
7613
+ import { join as join22 } from "path";
7676
7614
  var RETRIES_FILE = ".story_retries";
7677
7615
  var FLAGGED_FILE = ".flagged_stories";
7678
7616
  var LINE_PATTERN = /^([^=]+)=(\d+)$/;
7679
7617
  function retriesPath(dir) {
7680
- return join21(dir, RETRIES_FILE);
7618
+ return join22(dir, RETRIES_FILE);
7681
7619
  }
7682
7620
  function flaggedPath(dir) {
7683
- return join21(dir, FLAGGED_FILE);
7621
+ return join22(dir, FLAGGED_FILE);
7684
7622
  }
7685
7623
  function readRetries(dir) {
7686
7624
  const filePath = retriesPath(dir);
7687
- if (!existsSync22(filePath)) {
7625
+ if (!existsSync23(filePath)) {
7688
7626
  return /* @__PURE__ */ new Map();
7689
7627
  }
7690
- const raw = readFileSync19(filePath, "utf-8");
7628
+ const raw = readFileSync20(filePath, "utf-8");
7691
7629
  const result = /* @__PURE__ */ new Map();
7692
7630
  for (const line of raw.split("\n")) {
7693
7631
  const trimmed = line.trim();
@@ -7724,10 +7662,10 @@ function resetRetry(dir, storyKey) {
7724
7662
  }
7725
7663
  function readFlaggedStories(dir) {
7726
7664
  const filePath = flaggedPath(dir);
7727
- if (!existsSync22(filePath)) {
7665
+ if (!existsSync23(filePath)) {
7728
7666
  return [];
7729
7667
  }
7730
- const raw = readFileSync19(filePath, "utf-8");
7668
+ const raw = readFileSync20(filePath, "utf-8");
7731
7669
  return raw.split("\n").map((l) => l.trim()).filter((l) => l !== "");
7732
7670
  }
7733
7671
  function writeFlaggedStories(dir, stories) {
@@ -7752,7 +7690,7 @@ function registerRetryCommand(program) {
7752
7690
  program.command("retry").description("Manage retry state for stories").option("--reset", "Clear retry counters and flagged stories").option("--story <key>", "Target a specific story key (used with --reset or --status)").option("--status", "Show retry status for all stories").action((_options, cmd) => {
7753
7691
  const opts = cmd.optsWithGlobals();
7754
7692
  const isJson = opts.json === true;
7755
- const dir = join22(process.cwd(), RALPH_SUBDIR);
7693
+ const dir = join23(process.cwd(), RALPH_SUBDIR);
7756
7694
  if (opts.story && !isValidStoryKey3(opts.story)) {
7757
7695
  if (isJson) {
7758
7696
  jsonOutput({ status: "fail", message: `Invalid story key: ${opts.story}` });
@@ -7826,7 +7764,7 @@ function handleStatus(dir, isJson, filterStory) {
7826
7764
  }
7827
7765
 
7828
7766
  // src/index.ts
7829
- var VERSION = true ? "0.17.7" : "0.0.0-dev";
7767
+ var VERSION = true ? "0.18.1" : "0.0.0-dev";
7830
7768
  function createProgram() {
7831
7769
  const program = new Command();
7832
7770
  program.name("codeharness").description("Makes autonomous coding agents produce software that actually works").version(VERSION).option("--json", "Output in machine-readable JSON format");