codeharness 0.7.3 → 0.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +816 -117
  2. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -822,6 +822,18 @@ function appendGapId(existingDescription, gapId) {
822
822
  return `${existingDescription}
823
823
  ${gapId}`;
824
824
  }
825
+ function createOrFindIssue(title, gapId, opts) {
826
+ const issues = listIssues();
827
+ const existing = findExistingByGapId(gapId, issues);
828
+ if (existing) {
829
+ return { issue: existing, created: false };
830
+ }
831
+ const issue = createIssue(title, {
832
+ ...opts,
833
+ description: appendGapId(opts?.description, gapId)
834
+ });
835
+ return { issue, created: true };
836
+ }
825
837
  function configureHookCoexistence(dir) {
826
838
  const detection = detectBeadsHooks(dir);
827
839
  if (!detection.hasHooks) {
@@ -907,6 +919,14 @@ function storyVerificationPatch() {
907
919
  - [ ] All acceptance criteria verified with real-world evidence
908
920
  - [ ] Test coverage meets target (100%)
909
921
 
922
+ ### Verification Tags
923
+
924
+ For each AC, append a verification tag to indicate how it can be verified:
925
+ - \`<!-- verification: cli-verifiable -->\` \u2014 AC can be verified by running CLI commands in a subprocess
926
+ - \`<!-- verification: integration-required -->\` \u2014 AC requires integration testing, multi-system interaction, or manual verification
927
+
928
+ ACs referencing workflows, sprint planning, user sessions, or external system interactions should be tagged as \`integration-required\`. If no tag is present, a heuristic classifier will attempt to determine verifiability at runtime.
929
+
910
930
  ## Documentation Requirements
911
931
 
912
932
  - [ ] Relevant AGENTS.md files updated (list modules touched)
@@ -971,9 +991,15 @@ function retroEnforcementPatch() {
971
991
  function sprintBeadsPatch() {
972
992
  return `## Codeharness Backlog Integration
973
993
 
994
+ ### Pre-Triage Import Verification
995
+ - [ ] Confirm \`codeharness retro-import\` was run for all completed retrospectives
996
+ - [ ] Confirm \`codeharness github-import\` was run to pull labeled GitHub issues
997
+ - [ ] Verify all sources are reflected in beads before starting triage
998
+
974
999
  ### Beads Issue Status
975
1000
  - [ ] Run \`bd ready\` to display issues ready for development
976
1001
  - [ ] Review beads issue counts by status (open, in-progress, done)
1002
+ - [ ] Verify issues from all sources are visible: retro (\`[gap:retro:...]\`), GitHub (\`[source:github:...]\`), and manual
977
1003
  - [ ] Verify no blocked issues without documented reason
978
1004
 
979
1005
  ### Sprint Readiness
@@ -981,12 +1007,46 @@ function sprintBeadsPatch() {
981
1007
  - [ ] Dependencies between stories are reflected in beads deps
982
1008
  - [ ] Capacity aligns with estimated story complexity`;
983
1009
  }
1010
+ function sprintPlanningRetroPatch() {
1011
+ return `## Retrospective Action Items Review
1012
+
1013
+ ### Unresolved Action Items from Previous Retrospectives
1014
+
1015
+ Before starting sprint planning, review all completed retrospectives for unresolved action items:
1016
+
1017
+ 1. **Scan for retrospective files:** Look for all \`epic-N-retrospective.md\` files in \`_bmad-output/implementation-artifacts/\`
1018
+ 2. **Import retro findings to beads:** For each retrospective not yet imported, run \`codeharness retro-import --epic N\` to classify findings and create beads issues with \`[gap:retro:epic-N-item-M]\` gap-ids
1019
+ 3. **Import GitHub issues to beads:** Run \`codeharness github-import\` to pull labeled issues into beads with \`[source:github:owner/repo#N]\` gap-ids
1020
+ 4. **Display combined backlog:** Run \`bd ready\` to present the unified backlog containing retro findings, GitHub issues, and manually created issues
1021
+ 5. **Identify unresolved items:** Filter for action items that are NOT marked as completed/done
1022
+ 6. **Surface during planning:** Present unresolved items to the team before selecting stories for the sprint
1023
+
1024
+ ### Source-Aware Backlog Presentation
1025
+
1026
+ When presenting the backlog during triage, issues should be identifiable by source:
1027
+
1028
+ - **Retro findings** have gap-ids matching \`[gap:retro:...]\` \u2014 originated from retrospective action items
1029
+ - **GitHub issues** have gap-ids matching \`[source:github:...]\` \u2014 imported from GitHub via label query
1030
+ - **Manual issues** have no gap-id prefix \u2014 created directly in beads
1031
+
1032
+ ### Integration with Sprint Planning
1033
+
1034
+ - [ ] All \`epic-N-retrospective.md\` files scanned for action items
1035
+ - [ ] \`codeharness retro-import --epic N\` run for each unimported retrospective
1036
+ - [ ] \`codeharness github-import\` run to pull labeled GitHub issues
1037
+ - [ ] \`bd ready\` run to display combined backlog from all sources
1038
+ - [ ] Unresolved action items listed and reviewed
1039
+ - [ ] Relevant action items incorporated into sprint goals or new stories
1040
+ - [ ] Recurring issues from multiple retros flagged for systemic fixes
1041
+ - [ ] All sources (retro, GitHub, manual) triaged uniformly \u2014 no source left unreviewed`;
1042
+ }
984
1043
  var PATCH_TEMPLATES = {
985
1044
  "story-verification": storyVerificationPatch,
986
1045
  "dev-enforcement": devEnforcementPatch,
987
1046
  "review-enforcement": reviewEnforcementPatch,
988
1047
  "retro-enforcement": retroEnforcementPatch,
989
- "sprint-beads": sprintBeadsPatch
1048
+ "sprint-beads": sprintBeadsPatch,
1049
+ "sprint-retro": sprintPlanningRetroPatch
990
1050
  };
991
1051
 
992
1052
  // src/lib/bmad.ts
@@ -1003,7 +1063,8 @@ var PATCH_TARGETS = {
1003
1063
  "dev-enforcement": "bmm/workflows/4-implementation/dev-story/checklist.md",
1004
1064
  "review-enforcement": "bmm/workflows/4-implementation/code-review/checklist.md",
1005
1065
  "retro-enforcement": "bmm/workflows/4-implementation/retrospective/instructions.md",
1006
- "sprint-beads": "bmm/workflows/4-implementation/sprint-planning/checklist.md"
1066
+ "sprint-beads": "bmm/workflows/4-implementation/sprint-planning/checklist.md",
1067
+ "sprint-retro": "bmm/workflows/4-implementation/sprint-planning/instructions.md"
1007
1068
  };
1008
1069
  function isBmadInstalled(dir) {
1009
1070
  const bmadDir = join5(dir ?? process.cwd(), "_bmad");
@@ -1064,8 +1125,7 @@ function installBmad(dir) {
1064
1125
  return {
1065
1126
  status: "installed",
1066
1127
  version,
1067
- patches_applied: [],
1068
- bmalph_detected: false
1128
+ patches_applied: []
1069
1129
  };
1070
1130
  }
1071
1131
  function applyAllPatches(dir) {
@@ -1288,7 +1348,7 @@ function importStoriesToBeads(stories, opts, beadsFns) {
1288
1348
  }
1289
1349
 
1290
1350
  // src/commands/init.ts
1291
- var HARNESS_VERSION = true ? "0.7.3" : "0.0.0-dev";
1351
+ var HARNESS_VERSION = true ? "0.9.0" : "0.0.0-dev";
1292
1352
  function getStackLabel(stack) {
1293
1353
  if (stack === "nodejs") return "Node.js (package.json)";
1294
1354
  if (stack === "python") return "Python";
@@ -2521,6 +2581,29 @@ var DB_KEYWORDS = [
2521
2581
  "sql",
2522
2582
  "table"
2523
2583
  ];
2584
+ var INTEGRATION_KEYWORDS = [
2585
+ "sprint planning",
2586
+ "workflow",
2587
+ "run /command",
2588
+ "user session",
2589
+ "multi-step",
2590
+ "external system",
2591
+ "real infrastructure",
2592
+ "integration test",
2593
+ "manual verification"
2594
+ ];
2595
+ function classifyVerifiability(description) {
2596
+ const lower = description.toLowerCase();
2597
+ for (const kw of INTEGRATION_KEYWORDS) {
2598
+ if (lower.includes(kw)) return "integration-required";
2599
+ }
2600
+ return "cli-verifiable";
2601
+ }
2602
+ var VERIFICATION_TAG_PATTERN = /<!--\s*verification:\s*(cli-verifiable|integration-required)\s*-->/;
2603
+ function parseVerificationTag(text) {
2604
+ const match = VERIFICATION_TAG_PATTERN.exec(text);
2605
+ return match ? match[1] : null;
2606
+ }
2524
2607
  function classifyAC(description) {
2525
2608
  const lower = description.toLowerCase();
2526
2609
  for (const kw of UI_KEYWORDS) {
@@ -2570,10 +2653,13 @@ function parseStoryACs(storyFilePath) {
2570
2653
  if (currentId !== null && currentDesc.length > 0) {
2571
2654
  const description = currentDesc.join(" ").trim();
2572
2655
  if (description) {
2656
+ const tag = parseVerificationTag(description);
2657
+ const verifiability = tag ?? classifyVerifiability(description);
2573
2658
  acs.push({
2574
2659
  id: currentId,
2575
2660
  description,
2576
- type: classifyAC(description)
2661
+ type: classifyAC(description),
2662
+ verifiability
2577
2663
  });
2578
2664
  } else {
2579
2665
  warn(`Skipping malformed AC #${currentId}: empty description`);
@@ -2712,6 +2798,62 @@ function getNewestSourceMtime(dir) {
2712
2798
  walk(dir);
2713
2799
  return newest;
2714
2800
  }
2801
+ function getSourceFilesInModule(modulePath) {
2802
+ const files = [];
2803
+ function walk(current) {
2804
+ let entries;
2805
+ try {
2806
+ entries = readdirSync2(current);
2807
+ } catch {
2808
+ return;
2809
+ }
2810
+ const dirName = current.split("/").pop() ?? "";
2811
+ if (dirName === "node_modules" || dirName === ".git" || dirName === "__tests__" || dirName === "dist" || dirName === "coverage" || dirName.startsWith(".") && current !== modulePath) return;
2812
+ for (const entry of entries) {
2813
+ const fullPath = join9(current, entry);
2814
+ let stat;
2815
+ try {
2816
+ stat = statSync(fullPath);
2817
+ } catch {
2818
+ continue;
2819
+ }
2820
+ if (stat.isDirectory()) {
2821
+ walk(fullPath);
2822
+ } else if (stat.isFile()) {
2823
+ const ext = getExtension(entry);
2824
+ if (SOURCE_EXTENSIONS.has(ext) && !isTestFile(entry)) {
2825
+ files.push(entry);
2826
+ }
2827
+ }
2828
+ }
2829
+ }
2830
+ walk(modulePath);
2831
+ return files;
2832
+ }
2833
+ function getMentionedFilesInAgentsMd(agentsPath) {
2834
+ if (!existsSync11(agentsPath)) return [];
2835
+ const content = readFileSync9(agentsPath, "utf-8");
2836
+ const mentioned = /* @__PURE__ */ new Set();
2837
+ const filenamePattern = /[\w./-]*[\w-]+\.(?:ts|js|py)\b/g;
2838
+ let match;
2839
+ while ((match = filenamePattern.exec(content)) !== null) {
2840
+ const fullMatch = match[0];
2841
+ const basename3 = fullMatch.split("/").pop();
2842
+ if (!isTestFile(basename3)) {
2843
+ mentioned.add(basename3);
2844
+ }
2845
+ }
2846
+ return Array.from(mentioned);
2847
+ }
2848
+ function checkAgentsMdCompleteness(agentsPath, modulePath) {
2849
+ const sourceFiles = getSourceFilesInModule(modulePath);
2850
+ const mentionedFiles = new Set(getMentionedFilesInAgentsMd(agentsPath));
2851
+ const missing = sourceFiles.filter((f) => !mentionedFiles.has(f));
2852
+ return {
2853
+ complete: missing.length === 0,
2854
+ missing
2855
+ };
2856
+ }
2715
2857
  function checkAgentsMdForModule(modulePath, dir) {
2716
2858
  const root = dir ?? process.cwd();
2717
2859
  const fullModulePath = join9(root, modulePath);
@@ -2730,13 +2872,15 @@ function checkAgentsMdForModule(modulePath, dir) {
2730
2872
  }
2731
2873
  const docMtime = statSync(agentsPath).mtime;
2732
2874
  const codeMtime = getNewestSourceMtime(fullModulePath);
2733
- if (codeMtime !== null && codeMtime.getTime() > docMtime.getTime()) {
2875
+ const { complete, missing } = checkAgentsMdCompleteness(agentsPath, fullModulePath);
2876
+ if (!complete) {
2877
+ const missingList = missing.join(", ");
2734
2878
  return {
2735
2879
  path: relative(root, agentsPath),
2736
2880
  grade: "stale",
2737
2881
  lastModified: docMtime,
2738
2882
  codeLastModified: codeMtime,
2739
- reason: `AGENTS.md stale for module: ${modulePath}`
2883
+ reason: `AGENTS.md stale for module: ${modulePath} \u2014 missing: ${missingList}`
2740
2884
  };
2741
2885
  }
2742
2886
  return {
@@ -2766,22 +2910,30 @@ function scanDocHealth(dir) {
2766
2910
  if (existsSync11(rootAgentsPath)) {
2767
2911
  if (modules.length > 0) {
2768
2912
  const docMtime = statSync(rootAgentsPath).mtime;
2769
- let newestCode = null;
2913
+ let allMissing = [];
2770
2914
  let staleModule = "";
2915
+ let newestCode = null;
2771
2916
  for (const mod of modules) {
2772
- const modMtime = getNewestSourceMtime(join9(root, mod));
2917
+ const fullModPath = join9(root, mod);
2918
+ const modAgentsPath = join9(fullModPath, "AGENTS.md");
2919
+ if (existsSync11(modAgentsPath)) continue;
2920
+ const { missing } = checkAgentsMdCompleteness(rootAgentsPath, fullModPath);
2921
+ if (missing.length > 0 && staleModule === "") {
2922
+ staleModule = mod;
2923
+ allMissing = missing;
2924
+ }
2925
+ const modMtime = getNewestSourceMtime(fullModPath);
2773
2926
  if (modMtime !== null && (newestCode === null || modMtime.getTime() > newestCode.getTime())) {
2774
2927
  newestCode = modMtime;
2775
- staleModule = mod;
2776
2928
  }
2777
2929
  }
2778
- if (newestCode !== null && newestCode.getTime() > docMtime.getTime()) {
2930
+ if (allMissing.length > 0) {
2779
2931
  documents.push({
2780
2932
  path: "AGENTS.md",
2781
2933
  grade: "stale",
2782
2934
  lastModified: docMtime,
2783
2935
  codeLastModified: newestCode,
2784
- reason: `AGENTS.md stale for module: ${staleModule}`
2936
+ reason: `AGENTS.md stale for module: ${staleModule} \u2014 missing: ${allMissing.join(", ")}`
2785
2937
  });
2786
2938
  } else {
2787
2939
  documents.push({
@@ -3137,10 +3289,44 @@ function runShowboatVerify(proofPath) {
3137
3289
  return { passed: false, output: stdout || stderr || message };
3138
3290
  }
3139
3291
  }
3140
- function proofHasContent(proofPath) {
3141
- if (!existsSync12(proofPath)) return false;
3292
+ function validateProofQuality(proofPath) {
3293
+ if (!existsSync12(proofPath)) {
3294
+ return { verified: 0, pending: 0, escalated: 0, total: 0, passed: false };
3295
+ }
3142
3296
  const content = readFileSync10(proofPath, "utf-8");
3143
- return content.includes("<!-- /showboat exec -->") || content.includes("<!-- showboat image:");
3297
+ const acHeaderPattern = /^## AC \d+:/gm;
3298
+ const matches = [...content.matchAll(acHeaderPattern)];
3299
+ if (matches.length === 0) {
3300
+ return { verified: 0, pending: 0, escalated: 0, total: 0, passed: false };
3301
+ }
3302
+ let verified = 0;
3303
+ let pending = 0;
3304
+ let escalated = 0;
3305
+ for (let i = 0; i < matches.length; i++) {
3306
+ const start = matches[i].index;
3307
+ const end = i + 1 < matches.length ? matches[i + 1].index : content.length;
3308
+ const section = content.slice(start, end);
3309
+ if (section.includes("[ESCALATE]")) {
3310
+ escalated++;
3311
+ continue;
3312
+ }
3313
+ const hasEvidence = section.includes("<!-- /showboat exec -->") || section.includes("<!-- showboat image:") || /```(?:bash|shell)\n[\s\S]*?```\n+```output\n/m.test(section);
3314
+ if (hasEvidence) {
3315
+ verified++;
3316
+ } else {
3317
+ pending++;
3318
+ }
3319
+ }
3320
+ const total = verified + pending + escalated;
3321
+ return {
3322
+ verified,
3323
+ pending,
3324
+ escalated,
3325
+ total,
3326
+ // Proof passes when no pending ACs remain and at least one is verified.
3327
+ // Escalated ACs are allowed — they are explicitly acknowledged as unverifiable.
3328
+ passed: pending === 0 && verified > 0
3329
+ };
3144
3330
  }
3145
3331
  function updateVerificationState(storyId, result, dir) {
3146
3332
  const { state, body } = readStateWithBody(dir);
@@ -3182,123 +3368,189 @@ function isValidStoryId(storyId) {
3182
3368
  return /^[a-zA-Z0-9_-]+$/.test(storyId);
3183
3369
  }
3184
3370
  function registerVerifyCommand(program) {
3185
- program.command("verify").description("Run verification pipeline on completed work").requiredOption("--story <id>", "Story ID to verify").action((opts, cmd) => {
3371
+ program.command("verify").description("Run verification pipeline on completed work").option("--story <id>", "Story ID to verify").option("--retro", "Verify retrospective completion for an epic").option("--epic <n>", "Epic number (required with --retro)").action((opts, cmd) => {
3186
3372
  const globalOpts = cmd.optsWithGlobals();
3187
3373
  const isJson = globalOpts.json === true;
3188
- const storyId = opts.story;
3189
3374
  const root = process.cwd();
3190
- if (!isValidStoryId(storyId)) {
3191
- fail(`Invalid story ID: ${storyId}. Story IDs must contain only alphanumeric characters, hyphens, and underscores.`, { json: isJson });
3192
- process.exitCode = 1;
3375
+ if (opts.retro) {
3376
+ verifyRetro(opts, isJson, root);
3193
3377
  return;
3194
3378
  }
3195
- const storyFilePath = join11(root, STORY_DIR, `${storyId}.md`);
3196
- if (!existsSync13(storyFilePath)) {
3197
- fail(`Story file not found: ${storyFilePath}`, { json: isJson });
3379
+ if (!opts.story) {
3380
+ fail("--story is required when --retro is not set", { json: isJson });
3198
3381
  process.exitCode = 1;
3199
3382
  return;
3200
3383
  }
3201
- let preconditions;
3202
- try {
3203
- preconditions = checkPreconditions(root, storyId);
3204
- } catch (err) {
3205
- const message = err instanceof Error ? err.message : String(err);
3206
- fail(`Precondition check failed: ${message}`, { json: isJson });
3207
- process.exitCode = 1;
3208
- return;
3384
+ verifyStory(opts.story, isJson, root);
3385
+ });
3386
+ }
3387
+ function verifyRetro(opts, isJson, root) {
3388
+ if (!opts.epic) {
3389
+ fail("--epic is required with --retro", { json: isJson });
3390
+ process.exitCode = 1;
3391
+ return;
3392
+ }
3393
+ const epicNum = parseInt(opts.epic, 10);
3394
+ if (isNaN(epicNum) || epicNum < 1) {
3395
+ fail(`Invalid epic number: ${opts.epic}`, { json: isJson });
3396
+ process.exitCode = 1;
3397
+ return;
3398
+ }
3399
+ const retroFile = `epic-${epicNum}-retrospective.md`;
3400
+ const retroPath = join11(root, STORY_DIR, retroFile);
3401
+ if (!existsSync13(retroPath)) {
3402
+ if (isJson) {
3403
+ jsonOutput({ status: "fail", epic: epicNum, retroFile, message: `${retroFile} not found` });
3404
+ } else {
3405
+ fail(`${retroFile} not found`);
3209
3406
  }
3210
- if (!preconditions.passed) {
3211
- if (isJson) {
3212
- jsonOutput({
3213
- status: "fail",
3214
- message: "Preconditions not met",
3215
- failures: preconditions.failures
3216
- });
3217
- } else {
3218
- fail("Preconditions not met:");
3219
- for (const f of preconditions.failures) {
3220
- info(` - ${f}`);
3221
- }
3407
+ process.exitCode = 1;
3408
+ return;
3409
+ }
3410
+ const retroKey = `epic-${epicNum}-retrospective`;
3411
+ try {
3412
+ updateSprintStatus(retroKey, "done", root);
3413
+ } catch (err) {
3414
+ const message = err instanceof Error ? err.message : String(err);
3415
+ warn(`Failed to update sprint status: ${message}`);
3416
+ }
3417
+ if (isJson) {
3418
+ jsonOutput({ status: "ok", epic: epicNum, retroFile: join11(STORY_DIR, retroFile) });
3419
+ } else {
3420
+ ok(`Epic ${epicNum} retrospective: marked done`);
3421
+ }
3422
+ }
3423
+ function verifyStory(storyId, isJson, root) {
3424
+ if (!isValidStoryId(storyId)) {
3425
+ fail(`Invalid story ID: ${storyId}. Story IDs must contain only alphanumeric characters, hyphens, and underscores.`, { json: isJson });
3426
+ process.exitCode = 1;
3427
+ return;
3428
+ }
3429
+ const storyFilePath = join11(root, STORY_DIR, `${storyId}.md`);
3430
+ if (!existsSync13(storyFilePath)) {
3431
+ fail(`Story file not found: ${storyFilePath}`, { json: isJson });
3432
+ process.exitCode = 1;
3433
+ return;
3434
+ }
3435
+ let preconditions;
3436
+ try {
3437
+ preconditions = checkPreconditions(root, storyId);
3438
+ } catch (err) {
3439
+ const message = err instanceof Error ? err.message : String(err);
3440
+ fail(`Precondition check failed: ${message}`, { json: isJson });
3441
+ process.exitCode = 1;
3442
+ return;
3443
+ }
3444
+ if (!preconditions.passed) {
3445
+ if (isJson) {
3446
+ jsonOutput({
3447
+ status: "fail",
3448
+ message: "Preconditions not met",
3449
+ failures: preconditions.failures
3450
+ });
3451
+ } else {
3452
+ fail("Preconditions not met:");
3453
+ for (const f of preconditions.failures) {
3454
+ info(` - ${f}`);
3222
3455
  }
3223
- process.exitCode = 1;
3224
- return;
3225
3456
  }
3226
- let acs;
3227
- try {
3228
- acs = parseStoryACs(storyFilePath);
3229
- } catch (err) {
3230
- const message = err instanceof Error ? err.message : String(err);
3231
- fail(`Failed to parse story file: ${message}`, { json: isJson });
3457
+ process.exitCode = 1;
3458
+ return;
3459
+ }
3460
+ let acs;
3461
+ try {
3462
+ acs = parseStoryACs(storyFilePath);
3463
+ } catch (err) {
3464
+ const message = err instanceof Error ? err.message : String(err);
3465
+ fail(`Failed to parse story file: ${message}`, { json: isJson });
3466
+ process.exitCode = 1;
3467
+ return;
3468
+ }
3469
+ const storyTitle = extractStoryTitle(storyFilePath);
3470
+ const expectedProofPath = join11(root, "verification", `${storyId}-proof.md`);
3471
+ const proofPath = existsSync13(expectedProofPath) ? expectedProofPath : createProofDocument(storyId, storyTitle, acs, root);
3472
+ const proofQuality = validateProofQuality(proofPath);
3473
+ if (!proofQuality.passed) {
3474
+ if (isJson) {
3475
+ jsonOutput({
3476
+ status: "fail",
3477
+ message: `Proof quality check failed: ${proofQuality.verified}/${proofQuality.total} ACs verified`,
3478
+ proofQuality: { verified: proofQuality.verified, pending: proofQuality.pending, escalated: proofQuality.escalated, total: proofQuality.total }
3479
+ });
3480
+ } else {
3481
+ fail(`Proof quality check failed: ${proofQuality.verified}/${proofQuality.total} ACs verified`);
3482
+ }
3483
+ process.exitCode = 1;
3484
+ return;
3485
+ }
3486
+ if (proofQuality.escalated > 0) {
3487
+ warn(`Story ${storyId} has ${proofQuality.escalated} ACs requiring integration verification`);
3488
+ info("Run these ACs manually or in a dedicated verification session");
3489
+ }
3490
+ let showboatStatus = "skipped";
3491
+ const showboatResult = runShowboatVerify(proofPath);
3492
+ if (showboatResult.output === "showboat not available") {
3493
+ showboatStatus = "skipped";
3494
+ warn("Showboat not installed \u2014 skipping re-verification");
3495
+ } else {
3496
+ showboatStatus = showboatResult.passed ? "pass" : "fail";
3497
+ if (!showboatResult.passed) {
3498
+ fail(`Showboat verify failed: ${showboatResult.output}`, { json: isJson });
3232
3499
  process.exitCode = 1;
3233
3500
  return;
3234
3501
  }
3235
- const storyTitle = extractStoryTitle(storyFilePath);
3236
- const proofPath = createProofDocument(storyId, storyTitle, acs, root);
3237
- let showboatStatus = "skipped";
3238
- if (proofHasContent(proofPath)) {
3239
- const showboatResult = runShowboatVerify(proofPath);
3240
- if (showboatResult.output === "showboat not available") {
3241
- showboatStatus = "skipped";
3242
- warn("Showboat not installed \u2014 skipping re-verification");
3243
- } else {
3244
- showboatStatus = showboatResult.passed ? "pass" : "fail";
3245
- if (!showboatResult.passed) {
3246
- fail(`Showboat verify failed: ${showboatResult.output}`, { json: isJson });
3247
- process.exitCode = 1;
3248
- return;
3249
- }
3250
- }
3251
- }
3252
- const acsVerified = showboatStatus === "pass";
3253
- const verifiedCount = acsVerified ? acs.length : 0;
3254
- const result = {
3255
- storyId,
3256
- success: true,
3257
- totalACs: acs.length,
3258
- verifiedCount,
3259
- failedCount: acs.length - verifiedCount,
3260
- proofPath: `verification/${storyId}-proof.md`,
3261
- showboatVerifyStatus: showboatStatus,
3262
- perAC: acs.map((ac) => ({
3263
- id: ac.id,
3264
- description: ac.description,
3265
- verified: acsVerified,
3266
- evidencePaths: []
3267
- }))
3268
- };
3269
- try {
3270
- updateVerificationState(storyId, result, root);
3271
- } catch (err) {
3272
- const message = err instanceof Error ? err.message : String(err);
3273
- warn(`Failed to update state: ${message}`);
3274
- }
3275
- try {
3276
- closeBeadsIssue(storyId, root);
3277
- } catch (err) {
3278
- const message = err instanceof Error ? err.message : String(err);
3279
- warn(`Failed to close beads issue: ${message}`);
3280
- }
3281
- try {
3282
- const completedPath = completeExecPlan(storyId, root);
3283
- if (completedPath) {
3284
- if (!isJson) {
3285
- ok(`Exec-plan moved to completed: ${completedPath}`);
3286
- }
3287
- } else {
3288
- if (!isJson) {
3289
- warn(`No exec-plan found for story: ${storyId}`);
3290
- }
3502
+ }
3503
+ const result = {
3504
+ storyId,
3505
+ success: true,
3506
+ totalACs: proofQuality.total,
3507
+ verifiedCount: proofQuality.verified,
3508
+ failedCount: proofQuality.pending,
3509
+ escalatedCount: proofQuality.escalated,
3510
+ proofPath: `verification/${storyId}-proof.md`,
3511
+ showboatVerifyStatus: showboatStatus,
3512
+ perAC: acs.map((ac) => ({
3513
+ id: ac.id,
3514
+ description: ac.description,
3515
+ verified: true,
3516
+ evidencePaths: []
3517
+ }))
3518
+ };
3519
+ try {
3520
+ updateVerificationState(storyId, result, root);
3521
+ } catch (err) {
3522
+ const message = err instanceof Error ? err.message : String(err);
3523
+ warn(`Failed to update state: ${message}`);
3524
+ }
3525
+ try {
3526
+ closeBeadsIssue(storyId, root);
3527
+ } catch (err) {
3528
+ const message = err instanceof Error ? err.message : String(err);
3529
+ warn(`Failed to close beads issue: ${message}`);
3530
+ }
3531
+ try {
3532
+ const completedPath = completeExecPlan(storyId, root);
3533
+ if (completedPath) {
3534
+ if (!isJson) {
3535
+ ok(`Exec-plan moved to completed: ${completedPath}`);
3291
3536
  }
3292
- } catch (err) {
3293
- const message = err instanceof Error ? err.message : String(err);
3294
- warn(`Failed to complete exec-plan: ${message}`);
3295
- }
3296
- if (isJson) {
3297
- jsonOutput(result);
3298
3537
  } else {
3299
- ok(`Story ${storyId}: verified \u2014 proof at verification/${storyId}-proof.md`);
3538
+ if (!isJson) {
3539
+ warn(`No exec-plan found for story: ${storyId}`);
3540
+ }
3300
3541
  }
3301
- });
3542
+ } catch (err) {
3543
+ const message = err instanceof Error ? err.message : String(err);
3544
+ warn(`Failed to complete exec-plan: ${message}`);
3545
+ }
3546
+ if (isJson) {
3547
+ jsonOutput({
3548
+ ...result,
3549
+ proofQuality: { verified: proofQuality.verified, pending: proofQuality.pending, escalated: proofQuality.escalated, total: proofQuality.total }
3550
+ });
3551
+ } else {
3552
+ ok(`Story ${storyId}: verified \u2014 proof at verification/${storyId}-proof.md`);
3553
+ }
3302
3554
  }
3303
3555
  function extractStoryTitle(filePath) {
3304
3556
  try {
@@ -6085,8 +6337,453 @@ function registerQueryCommand(program) {
6085
6337
  });
6086
6338
  }
6087
6339
 
6340
+ // src/commands/retro-import.ts
6341
+ import { existsSync as existsSync20, readFileSync as readFileSync16 } from "fs";
6342
+ import { join as join19 } from "path";
6343
+
6344
+ // src/lib/retro-parser.ts
6345
+ var KNOWN_TOOLS = ["showboat", "ralph", "beads", "bmad"];
6346
+ function parseRetroActionItems(content) {
6347
+ const lines = content.split("\n");
6348
+ const items = [];
6349
+ let inTable = false;
6350
+ for (const line of lines) {
6351
+ const trimmed = line.trim();
6352
+ if (!inTable && /^\|\s*#\s*\|\s*Action\s*\|\s*Status\s*\|\s*Notes\s*\|/i.test(trimmed)) {
6353
+ inTable = true;
6354
+ continue;
6355
+ }
6356
+ if (inTable && /^\|[\s\-|]+\|$/.test(trimmed)) {
6357
+ continue;
6358
+ }
6359
+ if (inTable && trimmed.startsWith("|")) {
6360
+ const cells = trimmed.split("|").slice(1, -1).map((c) => c.trim());
6361
+ if (cells.length >= 4) {
6362
+ const number = cells[0];
6363
+ const description = cells[1];
6364
+ const status = cells[2];
6365
+ const notes = cells[3];
6366
+ if (/^[A-Za-z]\d+$/.test(number)) {
6367
+ items.push({ number, description, status, notes });
6368
+ }
6369
+ }
6370
+ }
6371
+ if (inTable && !trimmed.startsWith("|") && trimmed !== "") {
6372
+ inTable = false;
6373
+ }
6374
+ }
6375
+ return items;
6376
+ }
6377
+ function classifyFinding(item) {
6378
+ const text = item.description.toLowerCase();
6379
+ if (text.includes("harness") || text.includes("codeharness")) {
6380
+ return { type: "harness" };
6381
+ }
6382
+ for (const tool of KNOWN_TOOLS) {
6383
+ if (text.includes(tool)) {
6384
+ return { type: "tool", name: tool };
6385
+ }
6386
+ }
6387
+ return { type: "project" };
6388
+ }
6389
+ function derivePriority(item) {
6390
+ const statusLower = item.status.toLowerCase();
6391
+ const notesLower = item.notes.toLowerCase();
6392
+ if (statusLower.includes("regressed") || notesLower.includes("urgent") || notesLower.includes("critical")) {
6393
+ return 1;
6394
+ }
6395
+ return 2;
6396
+ }
6397
+
6398
+ // src/lib/github.ts
6399
+ import { execFileSync as execFileSync6 } from "child_process";
6400
+ var GitHubError = class extends Error {
6401
+ constructor(command, originalMessage) {
6402
+ super(`GitHub CLI failed: ${originalMessage}. Command: ${command}`);
6403
+ this.command = command;
6404
+ this.originalMessage = originalMessage;
6405
+ this.name = "GitHubError";
6406
+ }
6407
+ };
6408
+ function isGhAvailable() {
6409
+ try {
6410
+ execFileSync6("which", ["gh"], { stdio: "pipe", timeout: 5e3 });
6411
+ return true;
6412
+ } catch {
6413
+ return false;
6414
+ }
6415
+ }
6416
+ function ghIssueCreate(repo, title, body, labels) {
6417
+ const args = ["issue", "create", "--repo", repo, "--title", title, "--body", body];
6418
+ for (const label of labels) {
6419
+ args.push("--label", label);
6420
+ }
6421
+ args.push("--json", "number,url");
6422
+ const cmdStr = `gh ${args.join(" ")}`;
6423
+ try {
6424
+ const output = execFileSync6("gh", args, {
6425
+ stdio: "pipe",
6426
+ timeout: 3e4
6427
+ });
6428
+ const result = JSON.parse(output.toString().trim());
6429
+ return result;
6430
+ } catch (err) {
6431
+ const message = err instanceof Error ? err.message : String(err);
6432
+ throw new GitHubError(cmdStr, message);
6433
+ }
6434
+ }
6435
+ function ghIssueSearch(repo, query) {
6436
+ const args = ["issue", "list", "--repo", repo, "--search", query, "--state", "all", "--json", "number,title,body,url,labels"];
6437
+ const cmdStr = `gh ${args.join(" ")}`;
6438
+ try {
6439
+ const output = execFileSync6("gh", args, {
6440
+ stdio: "pipe",
6441
+ timeout: 3e4
6442
+ });
6443
+ const text = output.toString().trim();
6444
+ if (!text) return [];
6445
+ return JSON.parse(text);
6446
+ } catch (err) {
6447
+ const message = err instanceof Error ? err.message : String(err);
6448
+ throw new GitHubError(cmdStr, message);
6449
+ }
6450
+ }
6451
+ function findExistingGhIssue(repo, gapId) {
6452
+ try {
6453
+ const issues = ghIssueSearch(repo, gapId);
6454
+ return issues.find((issue) => issue.body?.includes(gapId));
6455
+ } catch {
6456
+ return void 0;
6457
+ }
6458
+ }
6459
+ function getRepoFromRemote() {
6460
+ try {
6461
+ const output = execFileSync6("git", ["remote", "get-url", "origin"], {
6462
+ stdio: "pipe",
6463
+ timeout: 5e3
6464
+ });
6465
+ const url = output.toString().trim();
6466
+ return parseRepoFromUrl(url);
6467
+ } catch {
6468
+ return void 0;
6469
+ }
6470
+ }
6471
+ function parseRepoFromUrl(url) {
6472
+ const sshMatch = url.match(/git@[^:]+:([^/]+\/[^/]+?)(?:\.git)?$/);
6473
+ if (sshMatch) return sshMatch[1];
6474
+ const httpsMatch = url.match(/https?:\/\/[^/]+\/([^/]+\/[^/]+?)(?:\.git)?$/);
6475
+ if (httpsMatch) return httpsMatch[1];
6476
+ return void 0;
6477
+ }
6478
+ function ensureLabels(repo, labels) {
6479
+ for (const label of labels) {
6480
+ try {
6481
+ execFileSync6("gh", ["label", "create", label, "--repo", repo], {
6482
+ stdio: "pipe",
6483
+ timeout: 1e4
6484
+ });
6485
+ } catch {
6486
+ }
6487
+ }
6488
+ }
6489
+
6490
+ // src/commands/retro-import.ts
6491
+ var STORY_DIR2 = "_bmad-output/implementation-artifacts";
6492
+ var MAX_TITLE_LENGTH = 120;
6493
+ function classificationToString(c) {
6494
+ if (c.type === "tool") {
6495
+ return `tool:${c.name}`;
6496
+ }
6497
+ return c.type;
6498
+ }
6499
+ function registerRetroImportCommand(program) {
6500
+ program.command("retro-import").description("Import retrospective action items as beads issues").requiredOption("--epic <n>", "Epic number to import action items from").action((opts, cmd) => {
6501
+ const globalOpts = cmd.optsWithGlobals();
6502
+ const isJson = globalOpts.json === true;
6503
+ const root = process.cwd();
6504
+ const epicNum = parseInt(opts.epic, 10);
6505
+ if (isNaN(epicNum) || epicNum < 1) {
6506
+ fail(`Invalid epic number: ${opts.epic}`, { json: isJson });
6507
+ process.exitCode = 1;
6508
+ return;
6509
+ }
6510
+ const retroFile = `epic-${epicNum}-retrospective.md`;
6511
+ const retroPath = join19(root, STORY_DIR2, retroFile);
6512
+ if (!existsSync20(retroPath)) {
6513
+ fail(`Retro file not found: ${retroFile}`, { json: isJson });
6514
+ process.exitCode = 1;
6515
+ return;
6516
+ }
6517
+ let content;
6518
+ try {
6519
+ content = readFileSync16(retroPath, "utf-8");
6520
+ } catch (err) {
6521
+ const message = err instanceof Error ? err.message : String(err);
6522
+ fail(`Failed to read retro file: ${message}`, { json: isJson });
6523
+ process.exitCode = 1;
6524
+ return;
6525
+ }
6526
+ const items = parseRetroActionItems(content);
6527
+ if (items.length === 0) {
6528
+ if (isJson) {
6529
+ jsonOutput({ imported: 0, skipped: 0, issues: [] });
6530
+ } else {
6531
+ info("No action items found in retro file");
6532
+ }
6533
+ return;
6534
+ }
6535
+ let imported = 0;
6536
+ let skipped = 0;
6537
+ const issues = [];
6538
+ for (const item of items) {
6539
+ const classification = classifyFinding(item);
6540
+ const priority = derivePriority(item);
6541
+ const gapId = buildGapId("retro", `epic-${epicNum}-item-${item.number}`);
6542
+ const title = item.description.length > MAX_TITLE_LENGTH ? item.description.slice(0, MAX_TITLE_LENGTH - 3) + "..." : item.description;
6543
+ const retroContext = `Retro action item ${item.number} from Epic ${epicNum}.
6544
+ Status: ${item.status}
6545
+ Notes: ${item.notes}
6546
+ Classification: ${classificationToString(classification)}`;
6547
+ try {
6548
+ const result = createOrFindIssue(title, gapId, {
6549
+ type: "task",
6550
+ priority,
6551
+ description: retroContext
6552
+ });
6553
+ const issueRecord = {
6554
+ number: item.number,
6555
+ title,
6556
+ gapId,
6557
+ classification: classificationToString(classification),
6558
+ created: result.created,
6559
+ status: item.status,
6560
+ notes: item.notes
6561
+ };
6562
+ issues.push(issueRecord);
6563
+ if (result.created) {
6564
+ imported++;
6565
+ if (!isJson) {
6566
+ ok(`Imported: ${title}`);
6567
+ }
6568
+ } else {
6569
+ skipped++;
6570
+ if (!isJson) {
6571
+ info(`Skipping existing: ${title}`);
6572
+ }
6573
+ }
6574
+ } catch (err) {
6575
+ const message = err instanceof Error ? err.message : String(err);
6576
+ fail(`Failed to import ${item.number}: ${message}`, { json: isJson });
6577
+ }
6578
+ }
6579
+ const githubResult = createGitHubIssues(issues, epicNum, isJson);
6580
+ if (isJson) {
6581
+ jsonOutput({
6582
+ imported,
6583
+ skipped,
6584
+ issues,
6585
+ github: githubResult
6586
+ });
6587
+ }
6588
+ });
6589
+ }
6590
+ function resolveTargetRepo(classification, targets) {
6591
+ if (targets.length === 0) return void 0;
6592
+ if (classification === "harness") {
6593
+ const explicit = targets.find((t) => t.repo === "iVintik/codeharness");
6594
+ if (explicit) return explicit;
6595
+ const nonAuto = targets.find((t) => t.repo !== "auto");
6596
+ if (nonAuto) return nonAuto;
6597
+ return targets[0];
6598
+ }
6599
+ const auto = targets.find((t) => t.repo === "auto");
6600
+ if (auto) return auto;
6601
+ return targets[0];
6602
+ }
6603
+ function buildGitHubIssueBody(item, epicNum, projectName) {
6604
+ return `## Retro Action Item ${item.number} \u2014 Epic ${epicNum}
6605
+
6606
+ **Source project:** ${projectName}
6607
+ **Classification:** ${item.classification}
6608
+ **Original status:** ${item.status}
6609
+ **Notes:** ${item.notes}
6610
+
6611
+ ${item.title}
6612
+
6613
+ <!-- gap-id: ${item.gapId} -->`;
6614
+ }
6615
+ function createGitHubIssues(issues, epicNum, isJson) {
6616
+ let targets;
6617
+ try {
6618
+ const state = readState();
6619
+ targets = state.retro_issue_targets;
6620
+ } catch (err) {
6621
+ if (err instanceof StateFileNotFoundError) {
6622
+ if (!isJson) {
6623
+ info("No state file found \u2014 skipping GitHub issues");
6624
+ }
6625
+ return void 0;
6626
+ }
6627
+ if (!isJson) {
6628
+ info("Could not read state file \u2014 skipping GitHub issues");
6629
+ }
6630
+ return void 0;
6631
+ }
6632
+ if (!targets || targets.length === 0) {
6633
+ if (!isJson) {
6634
+ info("No retro_issue_targets configured \u2014 skipping GitHub issues");
6635
+ }
6636
+ return void 0;
6637
+ }
6638
+ if (!isGhAvailable()) {
6639
+ if (!isJson) {
6640
+ warn("gh CLI not available \u2014 skipping GitHub issue creation");
6641
+ }
6642
+ return void 0;
6643
+ }
6644
+ const resolvedAutoRepo = getRepoFromRemote();
6645
+ const result = { created: 0, skipped: 0, errors: 0 };
6646
+ const projectName = resolvedAutoRepo ?? "unknown";
6647
+ for (const item of issues) {
6648
+ const target = resolveTargetRepo(item.classification, targets);
6649
+ if (!target) continue;
6650
+ const repo = target.repo === "auto" ? resolvedAutoRepo : target.repo;
6651
+ if (!repo) {
6652
+ if (!isJson) {
6653
+ warn(`Cannot resolve repo for ${item.number} \u2014 git remote not detected`);
6654
+ }
6655
+ result.errors++;
6656
+ continue;
6657
+ }
6658
+ try {
6659
+ const existing = findExistingGhIssue(repo, item.gapId);
6660
+ if (existing) {
6661
+ if (!isJson) {
6662
+ info(`GitHub issue exists: ${repo}#${existing.number}`);
6663
+ }
6664
+ result.skipped++;
6665
+ continue;
6666
+ }
6667
+ ensureLabels(repo, target.labels);
6668
+ const body = buildGitHubIssueBody(item, epicNum, projectName);
6669
+ const created = ghIssueCreate(repo, item.title, body, target.labels);
6670
+ if (!isJson) {
6671
+ ok(`GitHub issue created: ${repo}#${created.number}`);
6672
+ }
6673
+ result.created++;
6674
+ } catch (err) {
6675
+ const message = err instanceof Error ? err.message : String(err);
6676
+ if (!isJson) {
6677
+ fail(`GitHub issue failed for ${item.number}: ${message}`);
6678
+ }
6679
+ result.errors++;
6680
+ }
6681
+ }
6682
+ return result;
6683
+ }
6684
+
6685
+ // src/commands/github-import.ts
6686
+ var MAX_TITLE_LENGTH2 = 120;
6687
+ function mapLabelsToType(labels) {
6688
+ if (!labels) return "task";
6689
+ const names = labels.map((l) => l.name);
6690
+ if (names.includes("bug")) return "bug";
6691
+ if (names.includes("enhancement")) return "story";
6692
+ return "task";
6693
+ }
6694
+ function mapLabelsToPriority(labels) {
6695
+ if (!labels) return 2;
6696
+ const names = labels.map((l) => l.name);
6697
+ if (names.includes("priority:high")) return 1;
6698
+ if (names.includes("priority:low")) return 3;
6699
+ return 2;
6700
+ }
6701
+ function registerGithubImportCommand(program) {
6702
+ program.command("github-import").description("Import GitHub issues labeled for sprint planning into beads").option("--repo <owner/repo>", "GitHub repository (auto-detected from git remote if omitted)").option("--label <label>", "GitHub label to filter issues by", "sprint-candidate").action((opts, cmd) => {
6703
+ const globalOpts = cmd.optsWithGlobals();
6704
+ const isJson = globalOpts.json === true;
6705
+ if (!isGhAvailable()) {
6706
+ fail("gh CLI not found. Install: https://cli.github.com/", { json: isJson });
6707
+ process.exitCode = 1;
6708
+ return;
6709
+ }
6710
+ let repo = opts.repo;
6711
+ if (!repo) {
6712
+ repo = getRepoFromRemote();
6713
+ }
6714
+ if (!repo) {
6715
+ fail("Cannot detect repo. Use --repo owner/repo", { json: isJson });
6716
+ process.exitCode = 1;
6717
+ return;
6718
+ }
6719
+ const label = opts.label;
6720
+ let ghIssues;
6721
+ try {
6722
+ ghIssues = ghIssueSearch(repo, `label:${label}`);
6723
+ } catch (err) {
6724
+ const message = err instanceof Error ? err.message : String(err);
6725
+ fail(`Failed to search GitHub issues: ${message}`, { json: isJson });
6726
+ process.exitCode = 1;
6727
+ return;
6728
+ }
6729
+ let imported = 0;
6730
+ let skipped = 0;
6731
+ let errors = 0;
6732
+ const issues = [];
6733
+ for (const ghIssue of ghIssues) {
6734
+ const gapId = buildGapId("source", `github:${repo}#${ghIssue.number}`);
6735
+ const type = mapLabelsToType(ghIssue.labels);
6736
+ const priority = mapLabelsToPriority(ghIssue.labels);
6737
+ const title = ghIssue.title.length > MAX_TITLE_LENGTH2 ? ghIssue.title.slice(0, MAX_TITLE_LENGTH2 - 3) + "..." : ghIssue.title;
6738
+ try {
6739
+ const result = createOrFindIssue(title, gapId, {
6740
+ type,
6741
+ priority,
6742
+ description: ghIssue.body ?? ""
6743
+ });
6744
+ const issueRecord = {
6745
+ number: ghIssue.number,
6746
+ title,
6747
+ gapId,
6748
+ type,
6749
+ created: result.created
6750
+ };
6751
+ issues.push(issueRecord);
6752
+ if (result.created) {
6753
+ imported++;
6754
+ if (!isJson) {
6755
+ ok(`Imported: ${repo}#${ghIssue.number} \u2014 ${title}`);
6756
+ }
6757
+ } else {
6758
+ skipped++;
6759
+ if (!isJson) {
6760
+ info(`Skipping existing: ${repo}#${ghIssue.number} \u2014 ${title}`);
6761
+ }
6762
+ }
6763
+ } catch (err) {
6764
+ errors++;
6765
+ const message = err instanceof Error ? err.message : String(err);
6766
+ fail(`Failed to import ${repo}#${ghIssue.number}: ${message}`, { json: isJson });
6767
+ }
6768
+ }
6769
+ if (errors > 0) {
6770
+ process.exitCode = 1;
6771
+ }
6772
+ if (isJson) {
6773
+ jsonOutput({
6774
+ imported,
6775
+ skipped,
6776
+ errors,
6777
+ issues
6778
+ });
6779
+ } else if (ghIssues.length > 0) {
6780
+ info(`Summary: ${imported} imported, ${skipped} skipped, ${errors} errors`);
6781
+ }
6782
+ });
6783
+ }
6784
+
6088
6785
  // src/index.ts
6089
- var VERSION = true ? "0.7.3" : "0.0.0-dev";
6786
+ var VERSION = true ? "0.9.0" : "0.0.0-dev";
6090
6787
  function createProgram() {
6091
6788
  const program = new Command();
6092
6789
  program.name("codeharness").description("Makes autonomous coding agents produce software that actually works").version(VERSION).option("--json", "Output in machine-readable JSON format");
@@ -6103,6 +6800,8 @@ function createProgram() {
6103
6800
  registerDocHealthCommand(program);
6104
6801
  registerStackCommand(program);
6105
6802
  registerQueryCommand(program);
6803
+ registerRetroImportCommand(program);
6804
+ registerGithubImportCommand(program);
6106
6805
  return program;
6107
6806
  }
6108
6807
  if (!process.env["VITEST"]) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "codeharness",
3
- "version": "0.7.3",
3
+ "version": "0.9.0",
4
4
  "type": "module",
5
5
  "description": "CLI for codeharness — makes autonomous coding agents produce software that actually works",
6
6
  "bin": {