substrate-ai 0.2.3 → 0.2.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli/index.js CHANGED
@@ -2,10 +2,10 @@
2
2
  import { createLogger, deepMask } from "../logger-C6n1g8uP.js";
3
3
  import { AdapterRegistry, createEventBus } from "../event-bus-J-bw-pkp.js";
4
4
  import { CURRENT_CONFIG_FORMAT_VERSION, CURRENT_TASK_GRAPH_VERSION, PartialSubstrateConfigSchema, SUPPORTED_CONFIG_FORMAT_VERSIONS, SubstrateConfigSchema, defaultConfigMigrator } from "../version-manager-impl-BpVx2DkY.js";
5
- import { DatabaseWrapper, SUBSTRATE_OWNED_SETTINGS_KEYS, VALID_PHASES, buildPipelineStatusOutput, createContextCompiler, createDispatcher, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStopAfterGate, findPackageRoot, formatOutput, formatPhaseCompletionSummary, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getSubstrateDefaultSettings, registerRunCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, runAnalysisPhase, runMigrations, runPlanningPhase, runSolutioningPhase, validateStopAfterFromConflict } from "../run-DlOWhkIF.js";
5
+ import { DatabaseWrapper, SUBSTRATE_OWNED_SETTINGS_KEYS, VALID_PHASES, buildPipelineStatusOutput, createContextCompiler, createDispatcher, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStopAfterGate, findPackageRoot, formatOutput, formatPhaseCompletionSummary, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getSubstrateDefaultSettings, parseDbTimestampAsUtc, registerRunCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, runAnalysisPhase, runMigrations, runPlanningPhase, runSolutioningPhase, validateStopAfterFromConflict } from "../run-CDYE1PT3.js";
6
6
  import { ConfigError, ConfigIncompatibleFormatError } from "../errors-BPqtzQ4U.js";
7
- import { addTokenUsage, createDecision, getDecisionsByPhaseForRun, getLatestRun, getPipelineRunById, getTokenUsageSummary, updatePipelineRun } from "../decisions-BBLMsN_c.js";
8
- import { compareRunMetrics, getBaselineRunMetrics, getRunMetrics, getStoryMetricsForRun, incrementRunRestarts, listRunMetrics, tagRunAsBaseline } from "../metrics-BSg8VIHd.js";
7
+ import { addTokenUsage, createDecision, getDecisionsByPhaseForRun, getLatestRun, getPipelineRunById, getTokenUsageSummary, listRequirements, updatePipelineRun } from "../decisions-DNYByk0U.js";
8
+ import { aggregateTokenUsageForRun, compareRunMetrics, getBaselineRunMetrics, getRunMetrics, getStoryMetricsForRun, incrementRunRestarts, listRunMetrics, tagRunAsBaseline } from "../metrics-BSg8VIHd.js";
9
9
  import { abortMerge, createWorktree, getConflictingFiles, getMergedFiles, getOrphanedWorktrees, performMerge, removeBranch, removeWorktree, simulateMerge, verifyGitVersion } from "../git-utils-BtI5eNoN.js";
10
10
  import { registerUpgradeCommand } from "../upgrade-rV26kdh3.js";
11
11
  import { createRequire } from "module";
@@ -17,8 +17,9 @@ import { chmodSync, cpSync, existsSync, mkdirSync, readFileSync, readdirSync, re
17
17
  import yaml from "js-yaml";
18
18
  import { createRequire as createRequire$1 } from "node:module";
19
19
  import * as path$1 from "node:path";
20
+ import { isAbsolute, join as join$1 } from "node:path";
20
21
  import BetterSqlite3 from "better-sqlite3";
21
- import { existsSync as existsSync$1 } from "node:fs";
22
+ import { existsSync as existsSync$1, mkdirSync as mkdirSync$1, writeFileSync as writeFileSync$1 } from "node:fs";
22
23
  import { createInterface } from "node:readline";
23
24
  import { homedir } from "os";
24
25
  import { access as access$1 } from "node:fs/promises";
@@ -328,7 +329,7 @@ const DEFAULT_CONFIG = {
328
329
 
329
330
  //#endregion
330
331
  //#region src/cli/commands/init.ts
331
- const logger$16 = createLogger("init");
332
+ const logger$17 = createLogger("init");
332
333
  const __dirname = dirname(new URL(import.meta.url).pathname);
333
334
  const INIT_EXIT_SUCCESS = 0;
334
335
  const INIT_EXIT_ERROR = 1;
@@ -349,7 +350,7 @@ async function scaffoldBmadFramework(projectRoot, force, outputFormat) {
349
350
  const version = resolveBmadMethodVersion();
350
351
  if (force && bmadExists) process.stderr.write(`Warning: Replacing existing _bmad/ framework with bmad-method@${version}\n`);
351
352
  process.stdout.write(`Scaffolding BMAD framework from bmad-method@${version}\n`);
352
- logger$16.info({
353
+ logger$17.info({
353
354
  version,
354
355
  dest: bmadDest
355
356
  }, "Scaffolding BMAD framework");
@@ -359,7 +360,7 @@ async function scaffoldBmadFramework(projectRoot, force, outputFormat) {
359
360
  const destDir = join(bmadDest, dir);
360
361
  mkdirSync(destDir, { recursive: true });
361
362
  cpSync(srcDir, destDir, { recursive: true });
362
- logger$16.info({
363
+ logger$17.info({
363
364
  dir,
364
365
  dest: destDir
365
366
  }, "Scaffolded BMAD framework directory");
@@ -378,7 +379,7 @@ async function scaffoldBmadFramework(projectRoot, force, outputFormat) {
378
379
  "document_output_language: English"
379
380
  ].join("\n") + "\n";
380
381
  await writeFile(configFile, configStub, "utf8");
381
- logger$16.info({ configFile }, "Generated _bmad/_config/config.yaml stub");
382
+ logger$17.info({ configFile }, "Generated _bmad/_config/config.yaml stub");
382
383
  }
383
384
  }
384
385
  const CLAUDE_MD_START_MARKER = "<!-- substrate:start -->";
@@ -393,7 +394,7 @@ async function scaffoldClaudeMd(projectRoot) {
393
394
  try {
394
395
  sectionContent = await readFile(templatePath, "utf8");
395
396
  } catch {
396
- logger$16.warn({ templatePath }, "CLAUDE.md substrate section template not found; skipping");
397
+ logger$17.warn({ templatePath }, "CLAUDE.md substrate section template not found; skipping");
397
398
  return;
398
399
  }
399
400
  if (!sectionContent.endsWith("\n")) sectionContent += "\n";
@@ -411,7 +412,7 @@ async function scaffoldClaudeMd(projectRoot) {
411
412
  newContent = existingContent + separator + sectionContent;
412
413
  }
413
414
  await writeFile(claudeMdPath, newContent, "utf8");
414
- logger$16.info({ claudeMdPath }, "Wrote substrate section to CLAUDE.md");
415
+ logger$17.info({ claudeMdPath }, "Wrote substrate section to CLAUDE.md");
415
416
  }
416
417
  async function scaffoldStatuslineScript(projectRoot) {
417
418
  const pkgRoot = findPackageRoot(__dirname);
@@ -422,7 +423,7 @@ async function scaffoldStatuslineScript(projectRoot) {
422
423
  try {
423
424
  content = await readFile(templatePath, "utf8");
424
425
  } catch {
425
- logger$16.warn({ templatePath }, "statusline.sh template not found; skipping");
426
+ logger$17.warn({ templatePath }, "statusline.sh template not found; skipping");
426
427
  return;
427
428
  }
428
429
  const claudeDir = join(projectRoot, ".claude");
@@ -430,7 +431,7 @@ async function scaffoldStatuslineScript(projectRoot) {
430
431
  mkdirSync(claudeDir, { recursive: true });
431
432
  await writeFile(statuslinePath, content, "utf8");
432
433
  chmodSync(statuslinePath, 493);
433
- logger$16.info({ statuslinePath }, "Wrote .claude/statusline.sh");
434
+ logger$17.info({ statuslinePath }, "Wrote .claude/statusline.sh");
434
435
  }
435
436
  async function scaffoldClaudeSettings(projectRoot) {
436
437
  const claudeDir = join(projectRoot, ".claude");
@@ -446,7 +447,7 @@ async function scaffoldClaudeSettings(projectRoot) {
446
447
  if (!merged["$schema"]) merged["$schema"] = "https://json.schemastore.org/claude-code-settings.json";
447
448
  mkdirSync(claudeDir, { recursive: true });
448
449
  await writeFile(settingsPath, JSON.stringify(merged, null, 2) + "\n", "utf8");
449
- logger$16.info({ settingsPath }, "Wrote substrate settings to .claude/settings.json");
450
+ logger$17.info({ settingsPath }, "Wrote substrate settings to .claude/settings.json");
450
451
  }
451
452
  function resolveBmadMethodInstallerLibPath(fromDir = __dirname) {
452
453
  try {
@@ -516,7 +517,7 @@ async function compileBmadAgents(bmadDir) {
516
517
  writeFileSync(mdPath, result.xml, "utf-8");
517
518
  compiled++;
518
519
  } catch (compileErr) {
519
- logger$16.debug({
520
+ logger$17.debug({
520
521
  err: compileErr,
521
522
  file
522
523
  }, "Failed to compile agent YAML");
@@ -537,9 +538,9 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
537
538
  const _require = createRequire$1(join(__dirname, "synthetic.js"));
538
539
  try {
539
540
  const compiledCount = await compileBmadAgents(bmadDir);
540
- if (compiledCount > 0) logger$16.info({ compiledCount }, "Compiled agent YAML files to MD");
541
+ if (compiledCount > 0) logger$17.info({ compiledCount }, "Compiled agent YAML files to MD");
541
542
  } catch (compileErr) {
542
- logger$16.warn({ err: compileErr }, "Agent compilation failed; agent commands may be incomplete");
543
+ logger$17.warn({ err: compileErr }, "Agent compilation failed; agent commands may be incomplete");
543
544
  }
544
545
  const { AgentCommandGenerator } = _require(join(installerLibPath, "ide", "shared", "agent-command-generator.js"));
545
546
  const { WorkflowCommandGenerator } = _require(join(installerLibPath, "ide", "shared", "workflow-command-generator.js"));
@@ -551,7 +552,7 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
551
552
  const manifestGen = new ManifestGenerator();
552
553
  await manifestGen.generateManifests(bmadDir, allModules, [], { ides: ["claude-code"] });
553
554
  } catch (manifestErr) {
554
- logger$16.warn({ err: manifestErr }, "ManifestGenerator failed; workflow/task commands may be incomplete");
555
+ logger$17.warn({ err: manifestErr }, "ManifestGenerator failed; workflow/task commands may be incomplete");
555
556
  }
556
557
  const commandsDir = join(projectRoot, ".claude", "commands");
557
558
  mkdirSync(commandsDir, { recursive: true });
@@ -567,7 +568,7 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
567
568
  const taskToolCount = await taskToolGen.writeDashArtifacts(commandsDir, taskToolArtifacts);
568
569
  const total = agentCount + workflowCount + taskToolCount;
569
570
  if (outputFormat !== "json") process.stdout.write(`Generated ${String(total)} Claude Code commands (${String(agentCount)} agents, ${String(workflowCount)} workflows, ${String(taskToolCount)} tasks/tools)\n`);
570
- logger$16.info({
571
+ logger$17.info({
571
572
  agentCount,
572
573
  workflowCount,
573
574
  taskToolCount,
@@ -577,7 +578,7 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
577
578
  } catch (err) {
578
579
  const msg = err instanceof Error ? err.message : String(err);
579
580
  if (outputFormat !== "json") process.stderr.write(`Warning: .claude/commands/ generation failed: ${msg}\n`);
580
- logger$16.warn({ err }, "scaffoldClaudeCommands failed; init continues");
581
+ logger$17.warn({ err }, "scaffoldClaudeCommands failed; init continues");
581
582
  }
582
583
  }
583
584
  const PROVIDER_DEFAULTS = DEFAULT_CONFIG.providers;
@@ -651,7 +652,7 @@ async function runInitAction(options) {
651
652
  discoveryReport = await registry.discoverAndRegister();
652
653
  } catch (err) {
653
654
  const message = err instanceof Error ? err.message : String(err);
654
- logger$16.error({ err }, "Adapter discovery failed");
655
+ logger$17.error({ err }, "Adapter discovery failed");
655
656
  if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, `Adapter discovery failed: ${message}`) + "\n");
656
657
  else process.stderr.write(` Error: adapter discovery failed — ${message}\n`);
657
658
  return INIT_EXIT_ERROR;
@@ -700,12 +701,12 @@ async function runInitAction(options) {
700
701
  return INIT_EXIT_ERROR;
701
702
  }
702
703
  if (force && existsSync(localManifest)) {
703
- logger$16.info({ pack: packName }, "Replacing existing pack with bundled version");
704
+ logger$17.info({ pack: packName }, "Replacing existing pack with bundled version");
704
705
  process.stderr.write(`Warning: Replacing existing pack '${packName}' with bundled version\n`);
705
706
  }
706
707
  mkdirSync(dirname(packPath), { recursive: true });
707
708
  cpSync(bundledPackPath, packPath, { recursive: true });
708
- logger$16.info({
709
+ logger$17.info({
709
710
  pack: packName,
710
711
  dest: packPath
711
712
  }, "Scaffolded methodology pack");
@@ -758,7 +759,7 @@ async function runInitAction(options) {
758
759
  const msg = err instanceof Error ? err.message : String(err);
759
760
  if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
760
761
  else process.stderr.write(`Error: ${msg}\n`);
761
- logger$16.error({ err }, "init failed");
762
+ logger$17.error({ err }, "init failed");
762
763
  return INIT_EXIT_ERROR;
763
764
  }
764
765
  }
@@ -804,7 +805,7 @@ function formatUnsupportedVersionError(formatType, version, supported) {
804
805
 
805
806
  //#endregion
806
807
  //#region src/modules/config/config-system-impl.ts
807
- const logger$15 = createLogger("config");
808
+ const logger$16 = createLogger("config");
808
809
  function deepMerge(base, override) {
809
810
  const result = { ...base };
810
811
  for (const [key, val] of Object.entries(override)) if (val !== null && val !== void 0 && typeof val === "object" && !Array.isArray(val) && typeof result[key] === "object" && result[key] !== null && !Array.isArray(result[key])) result[key] = deepMerge(result[key], val);
@@ -849,7 +850,7 @@ function readEnvOverrides() {
849
850
  }
850
851
  const parsed = PartialSubstrateConfigSchema.safeParse(overrides);
851
852
  if (!parsed.success) {
852
- logger$15.warn({ errors: parsed.error.issues }, "Invalid environment variable overrides ignored");
853
+ logger$16.warn({ errors: parsed.error.issues }, "Invalid environment variable overrides ignored");
853
854
  return {};
854
855
  }
855
856
  return parsed.data;
@@ -913,7 +914,7 @@ var ConfigSystemImpl = class {
913
914
  throw new ConfigError(`Configuration validation failed:\n${issues}`, { issues: result.error.issues });
914
915
  }
915
916
  this._config = result.data;
916
- logger$15.debug("Configuration loaded successfully");
917
+ logger$16.debug("Configuration loaded successfully");
917
918
  }
918
919
  getConfig() {
919
920
  if (this._config === null) throw new ConfigError("Configuration has not been loaded. Call load() before getConfig().", {});
@@ -976,7 +977,7 @@ var ConfigSystemImpl = class {
976
977
  if (version !== void 0 && typeof version === "string" && !isVersionSupported(version, SUPPORTED_CONFIG_FORMAT_VERSIONS)) if (defaultConfigMigrator.canMigrate(version, CURRENT_CONFIG_FORMAT_VERSION)) {
977
978
  const migrationOutput = defaultConfigMigrator.migrate(rawObj, version, CURRENT_CONFIG_FORMAT_VERSION, filePath);
978
979
  if (migrationOutput.result.success) {
979
- logger$15.info({
980
+ logger$16.info({
980
981
  from: version,
981
982
  to: CURRENT_CONFIG_FORMAT_VERSION,
982
983
  backup: migrationOutput.result.backupPath
@@ -1019,7 +1020,7 @@ function createConfigSystem(options = {}) {
1019
1020
 
1020
1021
  //#endregion
1021
1022
  //#region src/cli/commands/config.ts
1022
- const logger$14 = createLogger("config-cmd");
1023
+ const logger$15 = createLogger("config-cmd");
1023
1024
  const CONFIG_EXIT_SUCCESS = 0;
1024
1025
  const CONFIG_EXIT_ERROR = 1;
1025
1026
  const CONFIG_EXIT_INVALID = 2;
@@ -1045,7 +1046,7 @@ async function runConfigShow(opts = {}) {
1045
1046
  return CONFIG_EXIT_INVALID;
1046
1047
  }
1047
1048
  const message = err instanceof Error ? err.message : String(err);
1048
- logger$14.error({ err }, "Failed to load configuration");
1049
+ logger$15.error({ err }, "Failed to load configuration");
1049
1050
  process.stderr.write(` Error loading configuration: ${message}\n`);
1050
1051
  return CONFIG_EXIT_ERROR;
1051
1052
  }
@@ -1119,7 +1120,7 @@ async function runConfigExport(opts = {}) {
1119
1120
  return CONFIG_EXIT_INVALID;
1120
1121
  }
1121
1122
  const message = err instanceof Error ? err.message : String(err);
1122
- logger$14.error({ err }, "Failed to load configuration");
1123
+ logger$15.error({ err }, "Failed to load configuration");
1123
1124
  process.stderr.write(`Error loading configuration: ${message}\n`);
1124
1125
  return CONFIG_EXIT_ERROR;
1125
1126
  }
@@ -1273,7 +1274,7 @@ function registerConfigCommand(program, _version) {
1273
1274
 
1274
1275
  //#endregion
1275
1276
  //#region src/cli/commands/resume.ts
1276
- const logger$13 = createLogger("resume-cmd");
1277
+ const logger$14 = createLogger("resume-cmd");
1277
1278
  async function runResumeAction(options) {
1278
1279
  const { runId: specifiedRunId, stopAfter, outputFormat, projectRoot, concurrency, pack: packName } = options;
1279
1280
  if (stopAfter !== void 0 && !VALID_PHASES.includes(stopAfter)) {
@@ -1355,7 +1356,7 @@ async function runResumeAction(options) {
1355
1356
  const msg = err instanceof Error ? err.message : String(err);
1356
1357
  if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
1357
1358
  else process.stderr.write(`Error: ${msg}\n`);
1358
- logger$13.error({ err }, "auto resume failed");
1359
+ logger$14.error({ err }, "auto resume failed");
1359
1360
  return 1;
1360
1361
  } finally {
1361
1362
  try {
@@ -1506,7 +1507,7 @@ async function runFullPipelineFromPhase(options) {
1506
1507
  });
1507
1508
  }
1508
1509
  } catch (err) {
1509
- logger$13.warn({ err }, "Failed to record token usage");
1510
+ logger$14.warn({ err }, "Failed to record token usage");
1510
1511
  }
1511
1512
  });
1512
1513
  const storyDecisions = db.prepare(`SELECT description FROM requirements WHERE pipeline_run_id = ? AND source = 'solutioning-phase'`).all(runId);
@@ -1565,7 +1566,7 @@ async function runFullPipelineFromPhase(options) {
1565
1566
  const msg = err instanceof Error ? err.message : String(err);
1566
1567
  if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
1567
1568
  else process.stderr.write(`Error: ${msg}\n`);
1568
- logger$13.error({ err }, "pipeline from phase failed");
1569
+ logger$14.error({ err }, "pipeline from phase failed");
1569
1570
  return 1;
1570
1571
  } finally {
1571
1572
  try {
@@ -1590,7 +1591,7 @@ function registerResumeCommand(program, _version = "0.0.0", projectRoot = proces
1590
1591
 
1591
1592
  //#endregion
1592
1593
  //#region src/cli/commands/status.ts
1593
- const logger$12 = createLogger("status-cmd");
1594
+ const logger$13 = createLogger("status-cmd");
1594
1595
  async function runStatusAction(options) {
1595
1596
  const { outputFormat, runId, projectRoot } = options;
1596
1597
  const dbRoot = await resolveMainRepoRoot(projectRoot);
@@ -1667,7 +1668,7 @@ async function runStatusAction(options) {
1667
1668
  const msg = err instanceof Error ? err.message : String(err);
1668
1669
  if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
1669
1670
  else process.stderr.write(`Error: ${msg}\n`);
1670
- logger$12.error({ err }, "status action failed");
1671
+ logger$13.error({ err }, "status action failed");
1671
1672
  return 1;
1672
1673
  } finally {
1673
1674
  try {
@@ -2091,7 +2092,7 @@ Analyze thoroughly and return ONLY the JSON array with no additional text.`;
2091
2092
 
2092
2093
  //#endregion
2093
2094
  //#region src/cli/commands/amend.ts
2094
- const logger$11 = createLogger("amend-cmd");
2095
+ const logger$12 = createLogger("amend-cmd");
2095
2096
  /**
2096
2097
  * Detect and apply supersessions after a phase completes in an amendment run.
2097
2098
  *
@@ -2122,7 +2123,7 @@ function runPostPhaseSupersessionDetection(db, amendmentRunId, currentPhase, han
2122
2123
  });
2123
2124
  } catch (err) {
2124
2125
  const msg = err instanceof Error ? err.message : String(err);
2125
- logger$11.warn({
2126
+ logger$12.warn({
2126
2127
  err,
2127
2128
  originalId: parentMatch.id,
2128
2129
  supersedingId: newDec.id
@@ -2257,7 +2258,7 @@ async function runAmendAction(options) {
2257
2258
  for (let i = startIdx; i < phaseOrder.length; i++) {
2258
2259
  const currentPhase = phaseOrder[i];
2259
2260
  const amendmentContext = handler.loadContextForPhase(currentPhase);
2260
- logger$11.info({
2261
+ logger$12.info({
2261
2262
  phase: currentPhase,
2262
2263
  amendmentContextLen: amendmentContext.length
2263
2264
  }, "Amendment context loaded for phase");
@@ -2377,7 +2378,7 @@ async function runAmendAction(options) {
2377
2378
  } catch (err) {
2378
2379
  const msg = err instanceof Error ? err.message : String(err);
2379
2380
  process.stderr.write(`Error: ${msg}\n`);
2380
- logger$11.error({ err }, "amend failed");
2381
+ logger$12.error({ err }, "amend failed");
2381
2382
  return 1;
2382
2383
  } finally {
2383
2384
  try {
@@ -2402,21 +2403,47 @@ function registerAmendCommand(program, _version = "0.0.0", projectRoot = process
2402
2403
 
2403
2404
  //#endregion
2404
2405
  //#region src/cli/commands/health.ts
2405
- const logger$10 = createLogger("health-cmd");
2406
- function inspectProcessTree() {
2406
+ const logger$11 = createLogger("health-cmd");
2407
+ /** Default stall threshold in seconds — also used by supervisor default */
2408
+ const DEFAULT_STALL_THRESHOLD_SECONDS = 600;
2409
+ /**
2410
+ * Determine whether a ps output line represents the substrate pipeline orchestrator.
2411
+ * Handles invocation via:
2412
+ * - `substrate run` (globally installed)
2413
+ * - `substrate-ai run`
2414
+ * - `node dist/cli/index.js run` (npm run substrate:dev)
2415
+ * - `npx substrate run`
2416
+ * - any node process whose command contains `run` with `--events` or `--stories`
2417
+ */
2418
+ function isOrchestratorProcessLine(line) {
2419
+ if (line.includes("grep")) return false;
2420
+ if (line.includes("substrate run")) return true;
2421
+ if (line.includes("substrate-ai run")) return true;
2422
+ if (line.includes("index.js run")) return true;
2423
+ if (line.includes("node") && /\srun(\s|$)/.test(line) && (line.includes("--events") || line.includes("--stories"))) return true;
2424
+ return false;
2425
+ }
2426
+ function inspectProcessTree(execFileSyncOverride) {
2407
2427
  const result = {
2408
2428
  orchestrator_pid: null,
2409
2429
  child_pids: [],
2410
2430
  zombies: []
2411
2431
  };
2412
2432
  try {
2413
- const { execFileSync } = __require("node:child_process");
2414
- const psOutput = execFileSync("ps", ["-eo", "pid,ppid,stat,command"], {
2433
+ let psOutput;
2434
+ if (execFileSyncOverride !== void 0) psOutput = execFileSyncOverride("ps", ["-eo", "pid,ppid,stat,command"], {
2415
2435
  encoding: "utf-8",
2416
2436
  timeout: 5e3
2417
2437
  });
2438
+ else {
2439
+ const { execFileSync } = __require("node:child_process");
2440
+ psOutput = execFileSync("ps", ["-eo", "pid,ppid,stat,command"], {
2441
+ encoding: "utf-8",
2442
+ timeout: 5e3
2443
+ });
2444
+ }
2418
2445
  const lines = psOutput.split("\n");
2419
- for (const line of lines) if (line.includes("substrate run") && !line.includes("grep")) {
2446
+ for (const line of lines) if (isOrchestratorProcessLine(line)) {
2420
2447
  const match = line.trim().match(/^(\d+)/);
2421
2448
  if (match) {
2422
2449
  result.orchestrator_pid = parseInt(match[1], 10);
@@ -2477,7 +2504,7 @@ async function getAutoHealthData(options) {
2477
2504
  if (runId !== void 0) run = getPipelineRunById(db, runId);
2478
2505
  else run = getLatestRun(db);
2479
2506
  if (run === void 0) return NO_PIPELINE;
2480
- const updatedAt = new Date(run.updated_at);
2507
+ const updatedAt = parseDbTimestampAsUtc(run.updated_at);
2481
2508
  const stalenessSeconds = Math.round((Date.now() - updatedAt.getTime()) / 1e3);
2482
2509
  let storyDetails = {};
2483
2510
  let active = 0;
@@ -2499,8 +2526,9 @@ async function getAutoHealthData(options) {
2499
2526
  } catch {}
2500
2527
  const processInfo = inspectProcessTree();
2501
2528
  let verdict = "NO_PIPELINE_RUNNING";
2502
- if (run.status === "running") if (processInfo.zombies.length > 0) verdict = "STALLED";
2503
- else if (stalenessSeconds > 600) verdict = "STALLED";
2529
+ if (run.status === "running") if (processInfo.orchestrator_pid === null && active === 0 && completed > 0) verdict = "NO_PIPELINE_RUNNING";
2530
+ else if (processInfo.zombies.length > 0) verdict = "STALLED";
2531
+ else if (stalenessSeconds > DEFAULT_STALL_THRESHOLD_SECONDS) verdict = "STALLED";
2504
2532
  else if (processInfo.orchestrator_pid !== null && processInfo.child_pids.length === 0 && active > 0) verdict = "STALLED";
2505
2533
  else verdict = "HEALTHY";
2506
2534
  else if (run.status === "completed" || run.status === "failed" || run.status === "stopped") verdict = "NO_PIPELINE_RUNNING";
@@ -2526,124 +2554,31 @@ async function getAutoHealthData(options) {
2526
2554
  }
2527
2555
  }
2528
2556
  async function runHealthAction(options) {
2529
- const { outputFormat, runId, projectRoot } = options;
2530
- const dbRoot = await resolveMainRepoRoot(projectRoot);
2531
- const dbPath = join(dbRoot, ".substrate", "substrate.db");
2532
- if (!existsSync(dbPath)) {
2533
- const output = {
2534
- verdict: "NO_PIPELINE_RUNNING",
2535
- run_id: null,
2536
- status: null,
2537
- current_phase: null,
2538
- staleness_seconds: 0,
2539
- last_activity: "",
2540
- process: {
2541
- orchestrator_pid: null,
2542
- child_pids: [],
2543
- zombies: []
2544
- },
2545
- stories: {
2546
- active: 0,
2547
- completed: 0,
2548
- escalated: 0,
2549
- details: {}
2550
- }
2551
- };
2552
- if (outputFormat === "json") process.stdout.write(formatOutput(output, "json", true) + "\n");
2553
- else process.stdout.write("NO_PIPELINE_RUNNING — no substrate database found\n");
2554
- return 0;
2555
- }
2556
- const dbWrapper = new DatabaseWrapper(dbPath);
2557
+ const { outputFormat } = options;
2557
2558
  try {
2558
- dbWrapper.open();
2559
- const db = dbWrapper.db;
2560
- let run;
2561
- if (runId !== void 0) run = getPipelineRunById(db, runId);
2562
- else run = getLatestRun(db);
2563
- if (run === void 0) {
2564
- const output$1 = {
2565
- verdict: "NO_PIPELINE_RUNNING",
2566
- run_id: null,
2567
- status: null,
2568
- current_phase: null,
2569
- staleness_seconds: 0,
2570
- last_activity: "",
2571
- process: {
2572
- orchestrator_pid: null,
2573
- child_pids: [],
2574
- zombies: []
2575
- },
2576
- stories: {
2577
- active: 0,
2578
- completed: 0,
2579
- escalated: 0,
2580
- details: {}
2581
- }
2582
- };
2583
- if (outputFormat === "json") process.stdout.write(formatOutput(output$1, "json", true) + "\n");
2584
- else process.stdout.write("NO_PIPELINE_RUNNING — no pipeline runs found\n");
2585
- return 0;
2586
- }
2587
- const updatedAt = new Date(run.updated_at);
2588
- const stalenessSeconds = Math.round((Date.now() - updatedAt.getTime()) / 1e3);
2589
- let storyDetails = {};
2590
- let active = 0;
2591
- let completed = 0;
2592
- let escalated = 0;
2593
- try {
2594
- if (run.token_usage_json) {
2595
- const state = JSON.parse(run.token_usage_json);
2596
- if (state.stories) for (const [key, s] of Object.entries(state.stories)) {
2597
- storyDetails[key] = {
2598
- phase: s.phase,
2599
- review_cycles: s.reviewCycles
2600
- };
2601
- if (s.phase === "COMPLETE") completed++;
2602
- else if (s.phase === "ESCALATED") escalated++;
2603
- else if (s.phase !== "PENDING") active++;
2604
- }
2605
- }
2606
- } catch {}
2607
- const processInfo = inspectProcessTree();
2608
- let verdict = "NO_PIPELINE_RUNNING";
2609
- if (run.status === "running") if (processInfo.zombies.length > 0) verdict = "STALLED";
2610
- else if (stalenessSeconds > 600) verdict = "STALLED";
2611
- else if (processInfo.orchestrator_pid !== null && processInfo.child_pids.length === 0 && active > 0) verdict = "STALLED";
2612
- else verdict = "HEALTHY";
2613
- else if (run.status === "completed" || run.status === "failed" || run.status === "stopped") verdict = "NO_PIPELINE_RUNNING";
2614
- const output = {
2615
- verdict,
2616
- run_id: run.id,
2617
- status: run.status,
2618
- current_phase: run.current_phase,
2619
- staleness_seconds: stalenessSeconds,
2620
- last_activity: run.updated_at,
2621
- process: processInfo,
2622
- stories: {
2623
- active,
2624
- completed,
2625
- escalated,
2626
- details: storyDetails
2627
- }
2628
- };
2629
- if (outputFormat === "json") process.stdout.write(formatOutput(output, "json", true) + "\n");
2559
+ const health = await getAutoHealthData(options);
2560
+ if (outputFormat === "json") process.stdout.write(formatOutput(health, "json", true) + "\n");
2630
2561
  else {
2631
- const verdictLabel = verdict === "HEALTHY" ? "HEALTHY" : verdict === "STALLED" ? "STALLED" : "NO PIPELINE RUNNING";
2562
+ const verdictLabel = health.verdict === "HEALTHY" ? "HEALTHY" : health.verdict === "STALLED" ? "STALLED" : "NO PIPELINE RUNNING";
2632
2563
  process.stdout.write(`\nPipeline Health: ${verdictLabel}\n`);
2633
- process.stdout.write(` Run: ${run.id}\n`);
2634
- process.stdout.write(` Status: ${run.status}\n`);
2635
- process.stdout.write(` Phase: ${run.current_phase ?? "N/A"}\n`);
2636
- process.stdout.write(` Last Active: ${run.updated_at} (${stalenessSeconds}s ago)\n`);
2637
- if (processInfo.orchestrator_pid !== null) {
2638
- process.stdout.write(` Orchestrator: PID ${processInfo.orchestrator_pid}\n`);
2639
- process.stdout.write(` Children: ${processInfo.child_pids.length} active`);
2640
- if (processInfo.zombies.length > 0) process.stdout.write(` (${processInfo.zombies.length} ZOMBIE)`);
2641
- process.stdout.write("\n");
2642
- } else process.stdout.write(" Orchestrator: not running\n");
2643
- if (Object.keys(storyDetails).length > 0) {
2644
- process.stdout.write("\n Stories:\n");
2645
- for (const [key, s] of Object.entries(storyDetails)) process.stdout.write(` ${key}: ${s.phase} (${s.review_cycles} review cycles)\n`);
2646
- process.stdout.write(`\n Summary: ${active} active, ${completed} completed, ${escalated} escalated\n`);
2564
+ if (health.run_id !== null) {
2565
+ process.stdout.write(` Run: ${health.run_id}\n`);
2566
+ process.stdout.write(` Status: ${health.status}\n`);
2567
+ process.stdout.write(` Phase: ${health.current_phase ?? "N/A"}\n`);
2568
+ process.stdout.write(` Last Active: ${health.last_activity} (${health.staleness_seconds}s ago)\n`);
2569
+ const processInfo = health.process;
2570
+ if (processInfo.orchestrator_pid !== null) {
2571
+ process.stdout.write(` Orchestrator: PID ${processInfo.orchestrator_pid}\n`);
2572
+ process.stdout.write(` Children: ${processInfo.child_pids.length} active`);
2573
+ if (processInfo.zombies.length > 0) process.stdout.write(` (${processInfo.zombies.length} ZOMBIE)`);
2574
+ process.stdout.write("\n");
2575
+ } else process.stdout.write(" Orchestrator: not running\n");
2576
+ const storyDetails = health.stories.details;
2577
+ if (Object.keys(storyDetails).length > 0) {
2578
+ process.stdout.write("\n Stories:\n");
2579
+ for (const [key, s] of Object.entries(storyDetails)) process.stdout.write(` ${key}: ${s.phase} (${s.review_cycles} review cycles)\n`);
2580
+ process.stdout.write(`\n Summary: ${health.stories.active} active, ${health.stories.completed} completed, ${health.stories.escalated} escalated\n`);
2581
+ }
2647
2582
  }
2648
2583
  }
2649
2584
  return 0;
@@ -2651,12 +2586,8 @@ async function runHealthAction(options) {
2651
2586
  const msg = err instanceof Error ? err.message : String(err);
2652
2587
  if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
2653
2588
  else process.stderr.write(`Error: ${msg}\n`);
2654
- logger$10.error({ err }, "health action failed");
2589
+ logger$11.error({ err }, "health action failed");
2655
2590
  return 1;
2656
- } finally {
2657
- try {
2658
- dbWrapper.close();
2659
- } catch {}
2660
2591
  }
2661
2592
  }
2662
2593
  function registerHealthCommand(program, _version = "0.0.0", projectRoot = process.cwd()) {
@@ -2699,6 +2630,36 @@ function defaultSupervisorDeps() {
2699
2630
  }
2700
2631
  };
2701
2632
  })(),
2633
+ getTokenSnapshot: (runId, projectRoot) => {
2634
+ try {
2635
+ const dbPath = join(projectRoot, ".substrate", "substrate.db");
2636
+ if (!existsSync(dbPath)) return {
2637
+ input: 0,
2638
+ output: 0,
2639
+ cost_usd: 0
2640
+ };
2641
+ const dbWrapper = new DatabaseWrapper(dbPath);
2642
+ try {
2643
+ dbWrapper.open();
2644
+ const agg = aggregateTokenUsageForRun(dbWrapper.db, runId);
2645
+ return {
2646
+ input: agg.input,
2647
+ output: agg.output,
2648
+ cost_usd: agg.cost
2649
+ };
2650
+ } finally {
2651
+ try {
2652
+ dbWrapper.close();
2653
+ } catch {}
2654
+ }
2655
+ } catch {
2656
+ return {
2657
+ input: 0,
2658
+ output: 0,
2659
+ cost_usd: 0
2660
+ };
2661
+ }
2662
+ },
2702
2663
  runAnalysis: async (runId, projectRoot) => {
2703
2664
  const dbPath = join(projectRoot, ".substrate", "substrate.db");
2704
2665
  if (!existsSync(dbPath)) return;
@@ -2740,7 +2701,7 @@ function defaultSupervisorDeps() {
2740
2701
  */
2741
2702
  async function runSupervisorAction(options, deps = {}) {
2742
2703
  const { pollInterval, stallThreshold, maxRestarts, outputFormat, projectRoot, runId, pack, experiment, maxExperiments } = options;
2743
- const { getHealth, killPid, resumePipeline, sleep, incrementRestarts, runAnalysis } = {
2704
+ const { getHealth, killPid, resumePipeline, sleep, incrementRestarts, runAnalysis, getTokenSnapshot } = {
2744
2705
  ...defaultSupervisorDeps(),
2745
2706
  ...deps
2746
2707
  };
@@ -2764,6 +2725,36 @@ async function runSupervisorAction(options, deps = {}) {
2764
2725
  projectRoot
2765
2726
  });
2766
2727
  const ts = new Date().toISOString();
2728
+ if (outputFormat === "json") {
2729
+ const tokenSnapshot = health.run_id !== null ? getTokenSnapshot(health.run_id, projectRoot) : {
2730
+ input: 0,
2731
+ output: 0,
2732
+ cost_usd: 0
2733
+ };
2734
+ const proc = health.process ?? {
2735
+ orchestrator_pid: null,
2736
+ child_pids: [],
2737
+ zombies: []
2738
+ };
2739
+ emitEvent({
2740
+ type: "supervisor:poll",
2741
+ run_id: health.run_id,
2742
+ verdict: health.verdict,
2743
+ staleness_seconds: health.staleness_seconds,
2744
+ stories: {
2745
+ active: health.stories.active,
2746
+ completed: health.stories.completed,
2747
+ escalated: health.stories.escalated
2748
+ },
2749
+ story_details: health.stories.details,
2750
+ tokens: tokenSnapshot,
2751
+ process: {
2752
+ orchestrator_pid: proc.orchestrator_pid,
2753
+ child_count: proc.child_pids.length,
2754
+ zombie_count: proc.zombies.length
2755
+ }
2756
+ });
2757
+ }
2767
2758
  log(`[${ts}] Health: ${health.verdict} | staleness=${health.staleness_seconds}s | stories: active=${health.stories.active} completed=${health.stories.completed} escalated=${health.stories.escalated}`);
2768
2759
  if (health.verdict === "NO_PIPELINE_RUNNING") {
2769
2760
  const elapsedSeconds = Math.round((Date.now() - startTime) / 1e3);
@@ -2832,7 +2823,7 @@ async function runSupervisorAction(options, deps = {}) {
2832
2823
  );
2833
2824
  const { getLatestRun: getLatest } = await import(
2834
2825
  /* @vite-ignore */
2835
- "../decisions-WIsicZiG.js"
2826
+ "../decisions-DKXc-jnv.js"
2836
2827
  );
2837
2828
  const dbPath = join(projectRoot, ".substrate", "substrate.db");
2838
2829
  const expDbWrapper = new DatabaseWrapper(dbPath);
@@ -2842,7 +2833,7 @@ async function runSupervisorAction(options, deps = {}) {
2842
2833
  const expDb = expDbWrapper.db;
2843
2834
  const { runRunAction: runPipeline } = await import(
2844
2835
  /* @vite-ignore */
2845
- "../run-CRmhkcwN.js"
2836
+ "../run-XkrV99HV.js"
2846
2837
  );
2847
2838
  const runStoryFn = async (opts) => {
2848
2839
  const exitCode = await runPipeline({
@@ -2999,7 +2990,7 @@ function registerSupervisorCommand(program, _version = "0.0.0", projectRoot = pr
2999
2990
 
3000
2991
  //#endregion
3001
2992
  //#region src/cli/commands/metrics.ts
3002
- const logger$9 = createLogger("metrics-cmd");
2993
+ const logger$10 = createLogger("metrics-cmd");
3003
2994
  async function runMetricsAction(options) {
3004
2995
  const { outputFormat, projectRoot, limit = 10, compare, tagBaseline, analysis } = options;
3005
2996
  if (analysis !== void 0) {
@@ -3105,7 +3096,7 @@ async function runMetricsAction(options) {
3105
3096
  const msg = err instanceof Error ? err.message : String(err);
3106
3097
  if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
3107
3098
  else process.stderr.write(`Error: ${msg}\n`);
3108
- logger$9.error({ err }, "metrics action failed");
3099
+ logger$10.error({ err }, "metrics action failed");
3109
3100
  return 1;
3110
3101
  } finally {
3111
3102
  try {
@@ -3359,7 +3350,7 @@ function getPlanningCostTotal(db, sessionId) {
3359
3350
  function getLatestSessionId(_db) {
3360
3351
  return null;
3361
3352
  }
3362
- const logger$8 = createLogger("cost-cmd");
3353
+ const logger$9 = createLogger("cost-cmd");
3363
3354
  const COST_EXIT_SUCCESS = 0;
3364
3355
  const COST_EXIT_ERROR = 1;
3365
3356
  /**
@@ -3605,7 +3596,7 @@ async function runCostAction(options) {
3605
3596
  } catch (err) {
3606
3597
  const message = err instanceof Error ? err.message : String(err);
3607
3598
  process.stderr.write(`Error: ${message}\n`);
3608
- logger$8.error({ err }, "runCostAction failed");
3599
+ logger$9.error({ err }, "runCostAction failed");
3609
3600
  return COST_EXIT_ERROR;
3610
3601
  } finally {
3611
3602
  if (wrapper !== null) try {
@@ -3707,7 +3698,7 @@ function applyMonitorSchema(db) {
3707
3698
 
3708
3699
  //#endregion
3709
3700
  //#region src/persistence/monitor-database.ts
3710
- const logger$7 = createLogger("persistence:monitor-db");
3701
+ const logger$8 = createLogger("persistence:monitor-db");
3711
3702
  var MonitorDatabaseImpl = class {
3712
3703
  _db = null;
3713
3704
  _path;
@@ -3718,10 +3709,10 @@ var MonitorDatabaseImpl = class {
3718
3709
  this._open();
3719
3710
  }
3720
3711
  _open() {
3721
- logger$7.info({ path: this._path }, "Opening monitor database");
3712
+ logger$8.info({ path: this._path }, "Opening monitor database");
3722
3713
  this._db = new BetterSqlite3(this._path);
3723
3714
  const walResult = this._db.pragma("journal_mode = WAL");
3724
- if (walResult?.[0]?.journal_mode !== "wal") logger$7.warn({ result: walResult?.[0]?.journal_mode }, "Monitor DB: WAL pragma did not confirm wal mode");
3715
+ if (walResult?.[0]?.journal_mode !== "wal") logger$8.warn({ result: walResult?.[0]?.journal_mode }, "Monitor DB: WAL pragma did not confirm wal mode");
3725
3716
  this._db.pragma("synchronous = NORMAL");
3726
3717
  this._db.pragma("busy_timeout = 5000");
3727
3718
  this._db.pragma("foreign_keys = ON");
@@ -3756,7 +3747,7 @@ var MonitorDatabaseImpl = class {
3756
3747
  total_retries = total_retries + @retries,
3757
3748
  last_updated = @lastUpdated
3758
3749
  `);
3759
- logger$7.info({ path: this._path }, "Monitor database ready");
3750
+ logger$8.info({ path: this._path }, "Monitor database ready");
3760
3751
  }
3761
3752
  _assertOpen() {
3762
3753
  if (this._db === null) throw new Error("MonitorDatabase: connection is closed");
@@ -3905,7 +3896,7 @@ var MonitorDatabaseImpl = class {
3905
3896
  const db = this._assertOpen();
3906
3897
  const cutoff = new Date(Date.now() - retentionDays * 24 * 60 * 60 * 1e3).toISOString();
3907
3898
  const result = db.prepare("DELETE FROM task_metrics WHERE recorded_at < @cutoff").run({ cutoff });
3908
- logger$7.info({
3899
+ logger$8.info({
3909
3900
  cutoff,
3910
3901
  deleted: result.changes
3911
3902
  }, "Pruned old task_metrics rows");
@@ -3944,13 +3935,13 @@ var MonitorDatabaseImpl = class {
3944
3935
  db.exec("ROLLBACK");
3945
3936
  throw err;
3946
3937
  }
3947
- logger$7.info("Rebuilt performance_aggregates from task_metrics");
3938
+ logger$8.info("Rebuilt performance_aggregates from task_metrics");
3948
3939
  }
3949
3940
  resetAllData() {
3950
3941
  const db = this._assertOpen();
3951
3942
  db.exec("DELETE FROM task_metrics");
3952
3943
  db.exec("DELETE FROM performance_aggregates");
3953
- logger$7.info({ path: this._path }, "Monitor data reset — all rows deleted");
3944
+ logger$8.info({ path: this._path }, "Monitor data reset — all rows deleted");
3954
3945
  }
3955
3946
  getTaskMetricsDateRange() {
3956
3947
  const db = this._assertOpen();
@@ -3967,7 +3958,7 @@ var MonitorDatabaseImpl = class {
3967
3958
  if (this._db === null) return;
3968
3959
  this._db.close();
3969
3960
  this._db = null;
3970
- logger$7.info({ path: this._path }, "Monitor database closed");
3961
+ logger$8.info({ path: this._path }, "Monitor database closed");
3971
3962
  }
3972
3963
  /**
3973
3964
  * Access the raw underlying database for testing purposes only.
@@ -3980,7 +3971,7 @@ var MonitorDatabaseImpl = class {
3980
3971
 
3981
3972
  //#endregion
3982
3973
  //#region src/modules/monitor/recommendation-engine.ts
3983
- const logger$6 = createLogger("monitor:recommendations");
3974
+ const logger$7 = createLogger("monitor:recommendations");
3984
3975
  var RecommendationEngine = class {
3985
3976
  _monitorDb;
3986
3977
  _filters;
@@ -4013,7 +4004,7 @@ var RecommendationEngine = class {
4013
4004
  const sinceDate = new Date(Date.now() - this._historyDays * 24 * 60 * 60 * 1e3).toISOString();
4014
4005
  const aggregates = this._monitorDb.getAggregates({ sinceDate });
4015
4006
  if (aggregates.length === 0) {
4016
- logger$6.debug("No performance aggregates found — no recommendations to generate");
4007
+ logger$7.debug("No performance aggregates found — no recommendations to generate");
4017
4008
  return [];
4018
4009
  }
4019
4010
  const byTaskType = new Map();
@@ -4078,7 +4069,7 @@ var RecommendationEngine = class {
4078
4069
  if (confDiff !== 0) return confDiff;
4079
4070
  return b.improvement_percentage - a.improvement_percentage;
4080
4071
  });
4081
- logger$6.debug({ count: recommendations.length }, "Generated routing recommendations");
4072
+ logger$7.debug({ count: recommendations.length }, "Generated routing recommendations");
4082
4073
  return recommendations;
4083
4074
  }
4084
4075
  /**
@@ -4244,7 +4235,7 @@ function generateMonitorReport(monitorDb, options = {}) {
4244
4235
 
4245
4236
  //#endregion
4246
4237
  //#region src/cli/commands/monitor.ts
4247
- const logger$5 = createLogger("monitor-cmd");
4238
+ const logger$6 = createLogger("monitor-cmd");
4248
4239
  const MONITOR_EXIT_SUCCESS = 0;
4249
4240
  const MONITOR_EXIT_ERROR = 1;
4250
4241
  /**
@@ -4447,7 +4438,7 @@ async function runMonitorReportAction(options) {
4447
4438
  } catch (err) {
4448
4439
  const message = err instanceof Error ? err.message : String(err);
4449
4440
  process.stderr.write(`Error: ${message}\n`);
4450
- logger$5.error({ err }, "runMonitorReportAction failed");
4441
+ logger$6.error({ err }, "runMonitorReportAction failed");
4451
4442
  return MONITOR_EXIT_ERROR;
4452
4443
  } finally {
4453
4444
  if (monitorDb !== null) try {
@@ -4509,7 +4500,7 @@ async function runMonitorStatusAction(options) {
4509
4500
  } catch (err) {
4510
4501
  const message = err instanceof Error ? err.message : String(err);
4511
4502
  process.stderr.write(`Error: ${message}\n`);
4512
- logger$5.error({ err }, "runMonitorStatusAction failed");
4503
+ logger$6.error({ err }, "runMonitorStatusAction failed");
4513
4504
  return MONITOR_EXIT_ERROR;
4514
4505
  } finally {
4515
4506
  if (monitorDb !== null) try {
@@ -4544,7 +4535,7 @@ async function runMonitorResetAction(options) {
4544
4535
  } catch (err) {
4545
4536
  const message = err instanceof Error ? err.message : String(err);
4546
4537
  process.stderr.write(`Error: ${message}\n`);
4547
- logger$5.error({ err }, "runMonitorResetAction failed");
4538
+ logger$6.error({ err }, "runMonitorResetAction failed");
4548
4539
  return MONITOR_EXIT_ERROR;
4549
4540
  } finally {
4550
4541
  if (monitorDb !== null) try {
@@ -4592,7 +4583,7 @@ async function runMonitorRecommendationsAction(options) {
4592
4583
  } catch (err) {
4593
4584
  const message = err instanceof Error ? err.message : String(err);
4594
4585
  process.stderr.write(`Error: ${message}\n`);
4595
- logger$5.error({ err }, "runMonitorRecommendationsAction failed");
4586
+ logger$6.error({ err }, "runMonitorRecommendationsAction failed");
4596
4587
  return MONITOR_EXIT_ERROR;
4597
4588
  } finally {
4598
4589
  if (monitorDb !== null) try {
@@ -4670,7 +4661,7 @@ function registerMonitorCommand(program, version = "0.0.0", projectRoot = proces
4670
4661
 
4671
4662
  //#endregion
4672
4663
  //#region src/modules/git-worktree/git-worktree-manager-impl.ts
4673
- const logger$4 = createLogger("git-worktree");
4664
+ const logger$5 = createLogger("git-worktree");
4674
4665
  const BRANCH_PREFIX = "substrate/task-";
4675
4666
  const DEFAULT_WORKTREE_BASE = ".substrate-worktrees";
4676
4667
  var GitWorktreeManagerImpl = class {
@@ -4689,7 +4680,7 @@ var GitWorktreeManagerImpl = class {
4689
4680
  this._db = db;
4690
4681
  this._onTaskReady = ({ taskId }) => {
4691
4682
  this._handleTaskReady(taskId).catch((err) => {
4692
- logger$4.error({
4683
+ logger$5.error({
4693
4684
  taskId,
4694
4685
  err
4695
4686
  }, "Unhandled error in _handleTaskReady");
@@ -4703,40 +4694,40 @@ var GitWorktreeManagerImpl = class {
4703
4694
  };
4704
4695
  }
4705
4696
  async initialize() {
4706
- logger$4.info({ projectRoot: this._projectRoot }, "GitWorktreeManager.initialize()");
4697
+ logger$5.info({ projectRoot: this._projectRoot }, "GitWorktreeManager.initialize()");
4707
4698
  await this.verifyGitVersion();
4708
4699
  const cleaned = await this.cleanupAllWorktrees();
4709
- if (cleaned > 0) logger$4.info({ cleaned }, "Recovered orphaned worktrees on startup");
4700
+ if (cleaned > 0) logger$5.info({ cleaned }, "Recovered orphaned worktrees on startup");
4710
4701
  this._eventBus.on("task:ready", this._onTaskReady);
4711
4702
  this._eventBus.on("task:complete", this._onTaskComplete);
4712
4703
  this._eventBus.on("task:failed", this._onTaskFailed);
4713
- logger$4.info("GitWorktreeManager initialized");
4704
+ logger$5.info("GitWorktreeManager initialized");
4714
4705
  }
4715
4706
  async shutdown() {
4716
- logger$4.info("GitWorktreeManager.shutdown()");
4707
+ logger$5.info("GitWorktreeManager.shutdown()");
4717
4708
  this._eventBus.off("task:ready", this._onTaskReady);
4718
4709
  this._eventBus.off("task:complete", this._onTaskComplete);
4719
4710
  this._eventBus.off("task:failed", this._onTaskFailed);
4720
4711
  await this.cleanupAllWorktrees();
4721
- logger$4.info("GitWorktreeManager shutdown complete");
4712
+ logger$5.info("GitWorktreeManager shutdown complete");
4722
4713
  }
4723
4714
  async _handleTaskReady(taskId) {
4724
- logger$4.debug({ taskId }, "task:ready — creating worktree");
4715
+ logger$5.debug({ taskId }, "task:ready — creating worktree");
4725
4716
  try {
4726
4717
  await this.createWorktree(taskId);
4727
4718
  } catch (err) {
4728
- logger$4.error({
4719
+ logger$5.error({
4729
4720
  taskId,
4730
4721
  err
4731
4722
  }, "Failed to create worktree for task");
4732
4723
  }
4733
4724
  }
4734
4725
  async _handleTaskDone(taskId) {
4735
- logger$4.debug({ taskId }, "task done — cleaning up worktree");
4726
+ logger$5.debug({ taskId }, "task done — cleaning up worktree");
4736
4727
  try {
4737
4728
  await this.cleanupWorktree(taskId);
4738
4729
  } catch (err) {
4739
- logger$4.warn({
4730
+ logger$5.warn({
4740
4731
  taskId,
4741
4732
  err
4742
4733
  }, "Failed to cleanup worktree for task");
@@ -4746,7 +4737,7 @@ var GitWorktreeManagerImpl = class {
4746
4737
  if (!taskId || taskId.trim().length === 0) throw new Error("createWorktree: taskId must be a non-empty string");
4747
4738
  const branchName = BRANCH_PREFIX + taskId;
4748
4739
  const worktreePath = this.getWorktreePath(taskId);
4749
- logger$4.debug({
4740
+ logger$5.debug({
4750
4741
  taskId,
4751
4742
  branchName,
4752
4743
  worktreePath,
@@ -4766,7 +4757,7 @@ var GitWorktreeManagerImpl = class {
4766
4757
  worktreePath,
4767
4758
  createdAt
4768
4759
  };
4769
- logger$4.info({
4760
+ logger$5.info({
4770
4761
  taskId,
4771
4762
  branchName,
4772
4763
  worktreePath
@@ -4776,7 +4767,7 @@ var GitWorktreeManagerImpl = class {
4776
4767
  async cleanupWorktree(taskId) {
4777
4768
  const branchName = BRANCH_PREFIX + taskId;
4778
4769
  const worktreePath = this.getWorktreePath(taskId);
4779
- logger$4.debug({
4770
+ logger$5.debug({
4780
4771
  taskId,
4781
4772
  branchName,
4782
4773
  worktreePath
@@ -4786,7 +4777,7 @@ var GitWorktreeManagerImpl = class {
4786
4777
  await access$1(worktreePath);
4787
4778
  worktreeExists = true;
4788
4779
  } catch {
4789
- logger$4.debug({
4780
+ logger$5.debug({
4790
4781
  taskId,
4791
4782
  worktreePath
4792
4783
  }, "cleanupWorktree: worktree does not exist, skipping removal");
@@ -4794,7 +4785,7 @@ var GitWorktreeManagerImpl = class {
4794
4785
  if (worktreeExists) try {
4795
4786
  await removeWorktree(worktreePath, this._projectRoot);
4796
4787
  } catch (err) {
4797
- logger$4.warn({
4788
+ logger$5.warn({
4798
4789
  taskId,
4799
4790
  worktreePath,
4800
4791
  err
@@ -4803,7 +4794,7 @@ var GitWorktreeManagerImpl = class {
4803
4794
  try {
4804
4795
  await removeBranch(branchName, this._projectRoot);
4805
4796
  } catch (err) {
4806
- logger$4.warn({
4797
+ logger$5.warn({
4807
4798
  taskId,
4808
4799
  branchName,
4809
4800
  err
@@ -4813,13 +4804,13 @@ var GitWorktreeManagerImpl = class {
4813
4804
  taskId,
4814
4805
  branchName
4815
4806
  });
4816
- logger$4.info({
4807
+ logger$5.info({
4817
4808
  taskId,
4818
4809
  branchName
4819
4810
  }, "Worktree cleaned up");
4820
4811
  }
4821
4812
  async cleanupAllWorktrees() {
4822
- logger$4.debug({ projectRoot: this._projectRoot }, "cleanupAllWorktrees");
4813
+ logger$5.debug({ projectRoot: this._projectRoot }, "cleanupAllWorktrees");
4823
4814
  const orphanedPaths = await getOrphanedWorktrees(this._projectRoot, this._baseDirectory);
4824
4815
  let cleaned = 0;
4825
4816
  for (const worktreePath of orphanedPaths) {
@@ -4828,12 +4819,12 @@ var GitWorktreeManagerImpl = class {
4828
4819
  try {
4829
4820
  await removeWorktree(worktreePath, this._projectRoot);
4830
4821
  worktreeRemoved = true;
4831
- logger$4.debug({
4822
+ logger$5.debug({
4832
4823
  taskId,
4833
4824
  worktreePath
4834
4825
  }, "cleanupAllWorktrees: removed orphaned worktree");
4835
4826
  } catch (err) {
4836
- logger$4.warn({
4827
+ logger$5.warn({
4837
4828
  taskId,
4838
4829
  worktreePath,
4839
4830
  err
@@ -4843,12 +4834,12 @@ var GitWorktreeManagerImpl = class {
4843
4834
  let branchRemoved = false;
4844
4835
  try {
4845
4836
  branchRemoved = await removeBranch(branchName, this._projectRoot);
4846
- if (branchRemoved) logger$4.debug({
4837
+ if (branchRemoved) logger$5.debug({
4847
4838
  taskId,
4848
4839
  branchName
4849
4840
  }, "cleanupAllWorktrees: removed orphaned branch");
4850
4841
  } catch (err) {
4851
- logger$4.warn({
4842
+ logger$5.warn({
4852
4843
  taskId,
4853
4844
  branchName,
4854
4845
  err
@@ -4856,14 +4847,14 @@ var GitWorktreeManagerImpl = class {
4856
4847
  }
4857
4848
  if (worktreeRemoved) cleaned++;
4858
4849
  }
4859
- if (cleaned > 0) logger$4.info({ cleaned }, "cleanupAllWorktrees: recovered orphaned worktrees");
4850
+ if (cleaned > 0) logger$5.info({ cleaned }, "cleanupAllWorktrees: recovered orphaned worktrees");
4860
4851
  return cleaned;
4861
4852
  }
4862
4853
  async detectConflicts(taskId, targetBranch = "main") {
4863
4854
  if (!taskId || taskId.trim().length === 0) throw new Error("detectConflicts: taskId must be a non-empty string");
4864
4855
  const branchName = BRANCH_PREFIX + taskId;
4865
4856
  const worktreePath = this.getWorktreePath(taskId);
4866
- logger$4.debug({
4857
+ logger$5.debug({
4867
4858
  taskId,
4868
4859
  branchName,
4869
4860
  targetBranch
@@ -4891,7 +4882,7 @@ var GitWorktreeManagerImpl = class {
4891
4882
  branch: branchName,
4892
4883
  conflictingFiles: report.conflictingFiles
4893
4884
  });
4894
- logger$4.info({
4885
+ logger$5.info({
4895
4886
  taskId,
4896
4887
  hasConflicts: report.hasConflicts,
4897
4888
  conflictCount: conflictingFiles.length
@@ -4901,14 +4892,14 @@ var GitWorktreeManagerImpl = class {
4901
4892
  async mergeWorktree(taskId, targetBranch = "main") {
4902
4893
  if (!taskId || taskId.trim().length === 0) throw new Error("mergeWorktree: taskId must be a non-empty string");
4903
4894
  const branchName = BRANCH_PREFIX + taskId;
4904
- logger$4.debug({
4895
+ logger$5.debug({
4905
4896
  taskId,
4906
4897
  branchName,
4907
4898
  targetBranch
4908
4899
  }, "mergeWorktree");
4909
4900
  const conflictReport = await this.detectConflicts(taskId, targetBranch);
4910
4901
  if (conflictReport.hasConflicts) {
4911
- logger$4.info({
4902
+ logger$5.info({
4912
4903
  taskId,
4913
4904
  conflictCount: conflictReport.conflictingFiles.length
4914
4905
  }, "Merge skipped due to conflicts");
@@ -4930,7 +4921,7 @@ var GitWorktreeManagerImpl = class {
4930
4921
  success: true,
4931
4922
  mergedFiles
4932
4923
  };
4933
- logger$4.info({
4924
+ logger$5.info({
4934
4925
  taskId,
4935
4926
  branchName,
4936
4927
  mergedFileCount: mergedFiles.length
@@ -4938,7 +4929,7 @@ var GitWorktreeManagerImpl = class {
4938
4929
  return result;
4939
4930
  }
4940
4931
  async listWorktrees() {
4941
- logger$4.debug({
4932
+ logger$5.debug({
4942
4933
  projectRoot: this._projectRoot,
4943
4934
  baseDirectory: this._baseDirectory
4944
4935
  }, "listWorktrees");
@@ -4962,7 +4953,7 @@ var GitWorktreeManagerImpl = class {
4962
4953
  createdAt
4963
4954
  });
4964
4955
  }
4965
- logger$4.debug({ count: results.length }, "listWorktrees: found worktrees");
4956
+ logger$5.debug({ count: results.length }, "listWorktrees: found worktrees");
4966
4957
  return results;
4967
4958
  }
4968
4959
  getWorktreePath(taskId) {
@@ -4982,7 +4973,7 @@ function createGitWorktreeManager(options) {
4982
4973
 
4983
4974
  //#endregion
4984
4975
  //#region src/cli/commands/merge.ts
4985
- const logger$3 = createLogger("merge-cmd");
4976
+ const logger$4 = createLogger("merge-cmd");
4986
4977
  const MERGE_EXIT_SUCCESS = 0;
4987
4978
  const MERGE_EXIT_CONFLICT = 1;
4988
4979
  const MERGE_EXIT_ERROR = 2;
@@ -5020,7 +5011,7 @@ async function mergeTask(taskId, targetBranch, projectRoot) {
5020
5011
  projectRoot
5021
5012
  });
5022
5013
  try {
5023
- logger$3.info({
5014
+ logger$4.info({
5024
5015
  taskId,
5025
5016
  targetBranch
5026
5017
  }, "Running conflict detection...");
@@ -5042,7 +5033,7 @@ async function mergeTask(taskId, targetBranch, projectRoot) {
5042
5033
  } catch (err) {
5043
5034
  const message = err instanceof Error ? err.message : String(err);
5044
5035
  console.error(`Error merging task "${taskId}": ${message}`);
5045
- logger$3.error({
5036
+ logger$4.error({
5046
5037
  taskId,
5047
5038
  err
5048
5039
  }, "merge --task failed");
@@ -5096,7 +5087,7 @@ async function mergeAll(targetBranch, projectRoot, taskIds) {
5096
5087
  error: message
5097
5088
  });
5098
5089
  console.log(` Error for task "${taskId}": ${message}`);
5099
- logger$3.error({
5090
+ logger$4.error({
5100
5091
  taskId,
5101
5092
  err
5102
5093
  }, "merge --all: task failed");
@@ -5149,7 +5140,7 @@ function registerMergeCommand(program, projectRoot = process.cwd()) {
5149
5140
 
5150
5141
  //#endregion
5151
5142
  //#region src/cli/commands/worktrees.ts
5152
- const logger$2 = createLogger("worktrees-cmd");
5143
+ const logger$3 = createLogger("worktrees-cmd");
5153
5144
  const WORKTREES_EXIT_SUCCESS = 0;
5154
5145
  const WORKTREES_EXIT_ERROR = 1;
5155
5146
  /** Valid task statuses for filtering */
@@ -5276,7 +5267,7 @@ async function listWorktreesAction(options) {
5276
5267
  try {
5277
5268
  worktreeInfos = await manager.listWorktrees();
5278
5269
  } catch (err) {
5279
- logger$2.error({ err }, "Failed to list worktrees");
5270
+ logger$3.error({ err }, "Failed to list worktrees");
5280
5271
  const message = err instanceof Error ? err.message : String(err);
5281
5272
  process.stderr.write(`Error listing worktrees: ${message}\n`);
5282
5273
  return WORKTREES_EXIT_ERROR;
@@ -5303,7 +5294,7 @@ async function listWorktreesAction(options) {
5303
5294
  } catch (err) {
5304
5295
  const message = err instanceof Error ? err.message : String(err);
5305
5296
  process.stderr.write(`Error: ${message}\n`);
5306
- logger$2.error({ err }, "listWorktreesAction failed");
5297
+ logger$3.error({ err }, "listWorktreesAction failed");
5307
5298
  return WORKTREES_EXIT_ERROR;
5308
5299
  }
5309
5300
  }
@@ -5344,7 +5335,7 @@ function registerWorktreesCommand(program, version = "0.0.0", projectRoot = proc
5344
5335
 
5345
5336
  //#endregion
5346
5337
  //#region src/cli/commands/brainstorm.ts
5347
- const logger$1 = createLogger("brainstorm-cmd");
5338
+ const logger$2 = createLogger("brainstorm-cmd");
5348
5339
  /**
5349
5340
  * Detect whether the project has existing planning artifacts that indicate
5350
5341
  * this is an amendment session (vs. a brand-new project brainstorm).
@@ -5390,13 +5381,13 @@ async function loadAmendmentContextDocuments(projectRoot) {
5390
5381
  try {
5391
5382
  brief = await readFile(briefPath, "utf-8");
5392
5383
  } catch {
5393
- logger$1.warn({ briefPath }, "product-brief.md not found — continuing without brief context");
5384
+ logger$2.warn({ briefPath }, "product-brief.md not found — continuing without brief context");
5394
5385
  process.stderr.write(`Warning: product-brief.md not found at ${briefPath}\n`);
5395
5386
  }
5396
5387
  try {
5397
5388
  prd = await readFile(prdPath, "utf-8");
5398
5389
  } catch {
5399
- logger$1.warn({ prdPath }, "requirements.md not found — continuing without PRD context");
5390
+ logger$2.warn({ prdPath }, "requirements.md not found — continuing without PRD context");
5400
5391
  process.stderr.write(`Warning: requirements.md not found at ${prdPath}\n`);
5401
5392
  }
5402
5393
  return {
@@ -5605,7 +5596,7 @@ async function dispatchToPersonas(userPrompt, context, llmDispatch) {
5605
5596
  }
5606
5597
  ];
5607
5598
  const defaultDispatch = async (prompt, personaName) => {
5608
- logger$1.debug({
5599
+ logger$2.debug({
5609
5600
  personaName,
5610
5601
  promptLength: prompt.length
5611
5602
  }, "Dispatching to persona (stub mode)");
@@ -5622,7 +5613,7 @@ async function dispatchToPersonas(userPrompt, context, llmDispatch) {
5622
5613
  };
5623
5614
  } catch (err) {
5624
5615
  const msg = err instanceof Error ? err.message : String(err);
5625
- logger$1.error({
5616
+ logger$2.error({
5626
5617
  err,
5627
5618
  personaName: persona.name
5628
5619
  }, "Persona dispatch failed");
@@ -5774,7 +5765,7 @@ async function runBrainstormSession(options, llmDispatch, rlInterface) {
5774
5765
  }
5775
5766
  });
5776
5767
  rl.on("error", (err) => {
5777
- logger$1.error({ err }, "readline error");
5768
+ logger$2.error({ err }, "readline error");
5778
5769
  if (!sessionEnded) endSession(false);
5779
5770
  });
5780
5771
  });
@@ -5813,6 +5804,590 @@ function registerBrainstormCommand(program, _version = "0.0.0", projectRoot = pr
5813
5804
  });
5814
5805
  }
5815
5806
 
5807
+ //#endregion
5808
+ //#region src/modules/export/renderers.ts
5809
+ /** Fields from analysis/product-brief decisions to render, in display order */
5810
+ const PRODUCT_BRIEF_FIELDS = [
5811
+ "problem_statement",
5812
+ "target_users",
5813
+ "core_features",
5814
+ "success_metrics",
5815
+ "constraints",
5816
+ "technology_constraints"
5817
+ ];
5818
+ /**
5819
+ * Known acronyms that should appear fully uppercased when they are a standalone
5820
+ * word in a label (e.g. 'fr_coverage' → 'FR Coverage', 'api_style' → 'API Style').
5821
+ */
5822
+ const UPPERCASE_ACRONYMS = new Set([
5823
+ "fr",
5824
+ "nfr",
5825
+ "ux",
5826
+ "api",
5827
+ "db",
5828
+ "id",
5829
+ "url"
5830
+ ]);
5831
+ /**
5832
+ * Convert a snake_case key to Title Case for display headings.
5833
+ * Known acronyms (fr, nfr, ux, api, db, id, url) are rendered fully uppercased.
5834
+ */
5835
+ function fieldLabel(key) {
5836
+ return key.replace(/_/g, " ").replace(/\b\w+/g, (word) => {
5837
+ const lower = word.toLowerCase();
5838
+ if (UPPERCASE_ACRONYMS.has(lower)) return lower.toUpperCase();
5839
+ return word.charAt(0).toUpperCase() + word.slice(1).toLowerCase();
5840
+ });
5841
+ }
5842
+ /**
5843
+ * Safely parse a JSON string; returns the original string if parsing fails.
5844
+ */
5845
+ function safeParseJson(value) {
5846
+ try {
5847
+ return JSON.parse(value);
5848
+ } catch {
5849
+ return value;
5850
+ }
5851
+ }
5852
+ /**
5853
+ * Render a decision value to a markdown-friendly string.
5854
+ * - Arrays → bulleted list items
5855
+ * - Objects → key: value lines
5856
+ * - Primitives → plain string
5857
+ */
5858
+ function renderValue(rawValue) {
5859
+ const parsed = safeParseJson(rawValue);
5860
+ if (Array.isArray(parsed)) return parsed.map((item) => `- ${String(item)}`).join("\n");
5861
+ if (typeof parsed === "object" && parsed !== null) return Object.entries(parsed).map(([k, v]) => `- **${fieldLabel(k)}**: ${String(v)}`).join("\n");
5862
+ return String(parsed);
5863
+ }
5864
+ /**
5865
+ * Render analysis-phase decisions as a `product-brief.md` file.
5866
+ *
5867
+ * Merges `product-brief` category decisions with `technology-constraints`
5868
+ * category decisions (they are stored separately in the decision store).
5869
+ *
5870
+ * @param decisions - All decisions from the analysis phase (any category)
5871
+ * @returns Formatted markdown content for product-brief.md
5872
+ */
5873
+ function renderProductBrief(decisions) {
5874
+ const briefDecisions = decisions.filter((d) => d.category === "product-brief");
5875
+ const techConstraintDecisions = decisions.filter((d) => d.category === "technology-constraints");
5876
+ const briefMap = Object.fromEntries(briefDecisions.map((d) => [d.key, d.value]));
5877
+ if (techConstraintDecisions.length > 0 && briefMap["technology_constraints"] === void 0) {
5878
+ const tcBullets = techConstraintDecisions.flatMap((d) => {
5879
+ const parsed = safeParseJson(d.value);
5880
+ if (Array.isArray(parsed)) return parsed.map((item) => String(item));
5881
+ return [String(parsed)];
5882
+ });
5883
+ briefMap["technology_constraints"] = JSON.stringify(tcBullets);
5884
+ }
5885
+ if (briefDecisions.length === 0 && techConstraintDecisions.length === 0) return "";
5886
+ const parts = ["# Product Brief", ""];
5887
+ for (const field of PRODUCT_BRIEF_FIELDS) {
5888
+ const rawValue = briefMap[field];
5889
+ if (rawValue === void 0) continue;
5890
+ parts.push(`## ${fieldLabel(field)}`);
5891
+ parts.push("");
5892
+ parts.push(renderValue(rawValue));
5893
+ parts.push("");
5894
+ }
5895
+ return parts.join("\n");
5896
+ }
5897
+ /**
5898
+ * Render planning-phase decisions (and requirements table) as a `prd.md` file.
5899
+ *
5900
+ * Sections rendered (when data is present):
5901
+ * - Project Classification (classification decisions)
5902
+ * - Functional Requirements (functional-requirements decisions)
5903
+ * - Non-Functional Requirements (non-functional-requirements decisions)
5904
+ * - Domain Model (domain-model decisions)
5905
+ * - User Stories (user-stories decisions)
5906
+ * - Tech Stack (tech-stack decisions)
5907
+ * - Out of Scope (out-of-scope decisions)
5908
+ *
5909
+ * @param decisions - All decisions from the planning phase
5910
+ * @param requirements - Requirements records from the requirements table (optional)
5911
+ * @returns Formatted markdown content for prd.md
5912
+ */
5913
+ function renderPrd(decisions, requirements = []) {
5914
+ if (decisions.length === 0) return "";
5915
+ const parts = ["# Product Requirements Document", ""];
5916
+ const classificationDecisions = decisions.filter((d) => d.category === "classification");
5917
+ if (classificationDecisions.length > 0) {
5918
+ parts.push("## Project Classification");
5919
+ parts.push("");
5920
+ for (const d of classificationDecisions) {
5921
+ const parsed = safeParseJson(d.value);
5922
+ if (Array.isArray(parsed)) {
5923
+ parts.push(`**${fieldLabel(d.key)}**:`);
5924
+ for (const item of parsed) parts.push(`- ${String(item)}`);
5925
+ } else parts.push(`**${fieldLabel(d.key)}**: ${String(parsed)}`);
5926
+ }
5927
+ parts.push("");
5928
+ }
5929
+ const frDecisions = decisions.filter((d) => d.category === "functional-requirements");
5930
+ if (frDecisions.length > 0) {
5931
+ parts.push("## Functional Requirements");
5932
+ parts.push("");
5933
+ for (const d of frDecisions) {
5934
+ const parsed = safeParseJson(d.value);
5935
+ if (typeof parsed === "object" && parsed !== null && !Array.isArray(parsed)) {
5936
+ const fr = parsed;
5937
+ const id = fr.id ?? d.key;
5938
+ const priority = fr.priority ? ` [${fr.priority.toUpperCase()}]` : "";
5939
+ parts.push(`- **${id}**${priority}: ${fr.description ?? d.value}`);
5940
+ if (fr.acceptance_criteria && fr.acceptance_criteria.length > 0) for (const ac of fr.acceptance_criteria) parts.push(` - ${ac}`);
5941
+ } else parts.push(`- **${d.key}**: ${renderValue(d.value)}`);
5942
+ }
5943
+ parts.push("");
5944
+ }
5945
+ const nfrDecisions = decisions.filter((d) => d.category === "non-functional-requirements");
5946
+ if (nfrDecisions.length > 0) {
5947
+ parts.push("## Non-Functional Requirements");
5948
+ parts.push("");
5949
+ for (const d of nfrDecisions) {
5950
+ const parsed = safeParseJson(d.value);
5951
+ if (typeof parsed === "object" && parsed !== null && !Array.isArray(parsed)) {
5952
+ const nfr = parsed;
5953
+ const id = nfr.id ?? d.key;
5954
+ const cat = nfr.category ? ` [${nfr.category.toUpperCase()}]` : "";
5955
+ parts.push(`- **${id}**${cat}: ${nfr.description ?? d.value}`);
5956
+ } else parts.push(`- **${d.key}**: ${renderValue(d.value)}`);
5957
+ }
5958
+ parts.push("");
5959
+ }
5960
+ const domainDecisions = decisions.filter((d) => d.category === "domain-model");
5961
+ if (domainDecisions.length > 0) {
5962
+ parts.push("## Domain Model");
5963
+ parts.push("");
5964
+ for (const d of domainDecisions) parts.push(renderValue(d.value));
5965
+ parts.push("");
5966
+ }
5967
+ const userStoryDecisions = decisions.filter((d) => d.category === "user-stories");
5968
+ if (userStoryDecisions.length > 0) {
5969
+ parts.push("## User Stories");
5970
+ parts.push("");
5971
+ for (const d of userStoryDecisions) {
5972
+ const parsed = safeParseJson(d.value);
5973
+ if (typeof parsed === "object" && parsed !== null && !Array.isArray(parsed)) {
5974
+ const us = parsed;
5975
+ if (us.title) {
5976
+ parts.push(`### ${us.title}`);
5977
+ parts.push("");
5978
+ if (us.description) {
5979
+ parts.push(us.description);
5980
+ parts.push("");
5981
+ }
5982
+ } else {
5983
+ parts.push(renderValue(d.value));
5984
+ parts.push("");
5985
+ }
5986
+ } else {
5987
+ parts.push(renderValue(d.value));
5988
+ parts.push("");
5989
+ }
5990
+ }
5991
+ }
5992
+ const techStackDecisions = decisions.filter((d) => d.category === "tech-stack");
5993
+ if (techStackDecisions.length > 0) {
5994
+ parts.push("## Tech Stack");
5995
+ parts.push("");
5996
+ for (const d of techStackDecisions) if (d.key === "tech_stack") {
5997
+ const parsed = safeParseJson(d.value);
5998
+ if (typeof parsed === "object" && parsed !== null && !Array.isArray(parsed)) for (const [k, v] of Object.entries(parsed)) parts.push(`- **${fieldLabel(k)}**: ${String(v)}`);
5999
+ else parts.push(`- **${fieldLabel(d.key)}**: ${d.value}`);
6000
+ } else parts.push(`- **${fieldLabel(d.key)}**: ${d.value}`);
6001
+ parts.push("");
6002
+ }
6003
+ const outOfScopeDecisions = decisions.filter((d) => d.category === "out-of-scope");
6004
+ if (outOfScopeDecisions.length > 0) {
6005
+ parts.push("## Out of Scope");
6006
+ parts.push("");
6007
+ for (const d of outOfScopeDecisions) parts.push(renderValue(d.value));
6008
+ parts.push("");
6009
+ }
6010
+ const functionalReqs = requirements.filter((r) => r.type === "functional");
6011
+ const nonFunctionalReqs = requirements.filter((r) => r.type === "non_functional");
6012
+ if ((functionalReqs.length > 0 || nonFunctionalReqs.length > 0) && frDecisions.length === 0 && nfrDecisions.length === 0) {
6013
+ parts.push("## Requirements (from Requirements Table)");
6014
+ parts.push("");
6015
+ if (functionalReqs.length > 0) {
6016
+ parts.push("### Functional Requirements");
6017
+ parts.push("");
6018
+ for (const r of functionalReqs) {
6019
+ const priority = r.priority ? ` [${r.priority.toUpperCase()}]` : "";
6020
+ parts.push(`- ${r.source ?? ""}${priority}: ${r.description}`);
6021
+ }
6022
+ parts.push("");
6023
+ }
6024
+ if (nonFunctionalReqs.length > 0) {
6025
+ parts.push("### Non-Functional Requirements");
6026
+ parts.push("");
6027
+ for (const r of nonFunctionalReqs) {
6028
+ const priority = r.priority ? ` [${r.priority.toUpperCase()}]` : "";
6029
+ parts.push(`- ${priority}: ${r.description}`);
6030
+ }
6031
+ parts.push("");
6032
+ }
6033
+ }
6034
+ return parts.join("\n");
6035
+ }
6036
+ /**
6037
+ * Render solutioning-phase architecture decisions as an `architecture.md` file.
6038
+ *
6039
+ * Groups all architecture decisions into a single `## Architecture Decisions`
6040
+ * section, formatting each as `**key**: value` with italicised rationale where
6041
+ * present. The heading pattern matches the regex used by `seedMethodologyContext()`
6042
+ * so that the exported file can be round-tripped back into the decision store.
6043
+ *
6044
+ * @param decisions - All decisions from the solutioning phase (any category)
6045
+ * @returns Formatted markdown content for architecture.md, or '' if no data
6046
+ */
6047
+ function renderArchitecture(decisions) {
6048
+ const archDecisions = decisions.filter((d) => d.category === "architecture");
6049
+ if (archDecisions.length === 0) return "";
6050
+ const parts = ["# Architecture", ""];
6051
+ parts.push("## Architecture Decisions");
6052
+ parts.push("");
6053
+ for (const d of archDecisions) {
6054
+ const value = safeParseJson(d.value);
6055
+ let displayValue;
6056
+ if (typeof value === "object" && value !== null && !Array.isArray(value)) {
6057
+ displayValue = Object.entries(value).map(([k, v]) => ` - *${fieldLabel(k)}*: ${String(v)}`).join("\n");
6058
+ parts.push(`**${d.key}**:`);
6059
+ parts.push(displayValue);
6060
+ } else if (Array.isArray(value)) {
6061
+ displayValue = value.map((item) => ` - ${String(item)}`).join("\n");
6062
+ parts.push(`**${d.key}**:`);
6063
+ parts.push(displayValue);
6064
+ } else {
6065
+ displayValue = String(value);
6066
+ if (d.rationale) parts.push(`**${d.key}**: ${displayValue} *(${d.rationale})*`);
6067
+ else parts.push(`**${d.key}**: ${displayValue}`);
6068
+ }
6069
+ }
6070
+ parts.push("");
6071
+ return parts.join("\n");
6072
+ }
6073
+ /**
6074
+ * Render solutioning-phase epics and stories decisions as an `epics.md` file.
6075
+ *
6076
+ * Output format:
6077
+ * ```
6078
+ * ## Epic 1: Title
6079
+ * Description
6080
+ *
6081
+ * ### Story 1-1: Title
6082
+ * **Priority**: must
6083
+ * **Description**: ...
6084
+ * **Acceptance Criteria**:
6085
+ * - AC1
6086
+ * - AC2
6087
+ * ```
6088
+ *
6089
+ * The `## Epic N:` heading pattern is parsed by `parseEpicShards()` in
6090
+ * `seed-methodology-context.ts`, satisfying the round-trip contract (AC5).
6091
+ *
6092
+ * Stories are associated with their parent epic by the numeric prefix of the
6093
+ * story key (e.g., story key `2-3` → epic 2).
6094
+ *
6095
+ * @param decisions - All decisions from the solutioning phase (any category)
6096
+ * @returns Formatted markdown content for epics.md, or '' if no data
6097
+ */
6098
+ function renderEpics(decisions) {
6099
+ const epicDecisions = decisions.filter((d) => d.category === "epics");
6100
+ const storyDecisions = decisions.filter((d) => d.category === "stories");
6101
+ if (epicDecisions.length === 0 && storyDecisions.length === 0) return "";
6102
+ const epicMap = new Map();
6103
+ for (const d of epicDecisions) {
6104
+ const match = /^epic-(\d+)$/i.exec(d.key);
6105
+ if (match === null) continue;
6106
+ const epicNum = parseInt(match[1], 10);
6107
+ const parsed = safeParseJson(d.value);
6108
+ if (typeof parsed === "object" && parsed !== null && !Array.isArray(parsed)) {
6109
+ const p = parsed;
6110
+ epicMap.set(epicNum, {
6111
+ num: epicNum,
6112
+ title: p.title ?? `Epic ${epicNum}`,
6113
+ description: p.description ?? ""
6114
+ });
6115
+ } else epicMap.set(epicNum, {
6116
+ num: epicNum,
6117
+ title: String(parsed),
6118
+ description: ""
6119
+ });
6120
+ }
6121
+ const storyMap = new Map();
6122
+ for (const d of storyDecisions) {
6123
+ const parsed = safeParseJson(d.value);
6124
+ let story;
6125
+ if (typeof parsed === "object" && parsed !== null && !Array.isArray(parsed)) {
6126
+ const p = parsed;
6127
+ const storyKey = p.key ?? d.key;
6128
+ const keyMatch = /^(\d+)-(\d+)/.exec(storyKey);
6129
+ if (keyMatch === null) continue;
6130
+ const epicNum = parseInt(keyMatch[1], 10);
6131
+ const storyNum = parseInt(keyMatch[2], 10);
6132
+ story = {
6133
+ key: storyKey,
6134
+ epicNum,
6135
+ storyNum,
6136
+ title: p.title ?? `Story ${storyKey}`,
6137
+ description: p.description ?? "",
6138
+ ac: p.acceptance_criteria ?? p.ac ?? [],
6139
+ priority: p.priority ?? "must"
6140
+ };
6141
+ } else {
6142
+ const storyKey = d.key;
6143
+ const keyMatch = /^(\d+)-(\d+)/.exec(storyKey);
6144
+ if (keyMatch === null) continue;
6145
+ const epicNum = parseInt(keyMatch[1], 10);
6146
+ const storyNum = parseInt(keyMatch[2], 10);
6147
+ story = {
6148
+ key: storyKey,
6149
+ epicNum,
6150
+ storyNum,
6151
+ title: `Story ${storyKey}`,
6152
+ description: String(parsed),
6153
+ ac: [],
6154
+ priority: "must"
6155
+ };
6156
+ }
6157
+ if (!storyMap.has(story.epicNum)) storyMap.set(story.epicNum, []);
6158
+ storyMap.get(story.epicNum).push(story);
6159
+ }
6160
+ for (const stories of storyMap.values()) stories.sort((a, b) => a.storyNum - b.storyNum);
6161
+ const allEpicNums = new Set([...epicMap.keys(), ...storyMap.keys()]);
6162
+ const sortedEpicNums = [...allEpicNums].sort((a, b) => a - b);
6163
+ const parts = ["# Epics and Stories", ""];
6164
+ for (const epicNum of sortedEpicNums) {
6165
+ const epic = epicMap.get(epicNum);
6166
+ const epicTitle = epic?.title ?? `Epic ${epicNum}`;
6167
+ const epicDescription = epic?.description ?? "";
6168
+ parts.push(`## Epic ${epicNum}: ${epicTitle}`);
6169
+ parts.push("");
6170
+ if (epicDescription) {
6171
+ parts.push(epicDescription);
6172
+ parts.push("");
6173
+ }
6174
+ const stories = storyMap.get(epicNum) ?? [];
6175
+ for (const story of stories) {
6176
+ parts.push(`### Story ${story.key}: ${story.title}`);
6177
+ parts.push("");
6178
+ parts.push(`**Priority**: ${story.priority}`);
6179
+ if (story.description) parts.push(`**Description**: ${story.description}`);
6180
+ if (story.ac.length > 0) {
6181
+ parts.push("**Acceptance Criteria**:");
6182
+ for (const ac of story.ac) parts.push(`- ${ac}`);
6183
+ }
6184
+ parts.push("");
6185
+ }
6186
+ }
6187
+ return parts.join("\n");
6188
+ }
6189
+ /**
6190
+ * Render solutioning-phase readiness-findings decisions as a `readiness-report.md`.
6191
+ *
6192
+ * Groups findings by category, shows severity per finding, and emits an
6193
+ * overall pass/fail verdict based on whether any blockers were found.
6194
+ *
6195
+ * @param decisions - All decisions from the solutioning phase (any category)
6196
+ * @returns Formatted markdown content for readiness-report.md, or '' if no data
6197
+ */
6198
+ function renderReadinessReport(decisions) {
6199
+ const findingDecisions = decisions.filter((d) => d.category === "readiness-findings");
6200
+ if (findingDecisions.length === 0) return "";
6201
+ const findings = [];
6202
+ for (const d of findingDecisions) {
6203
+ const parsed = safeParseJson(d.value);
6204
+ if (typeof parsed === "object" && parsed !== null && !Array.isArray(parsed)) {
6205
+ const p = parsed;
6206
+ findings.push({
6207
+ category: p.category ?? "general",
6208
+ severity: p.severity ?? "minor",
6209
+ description: p.description ?? String(parsed),
6210
+ affected_items: p.affected_items ?? []
6211
+ });
6212
+ } else findings.push({
6213
+ category: "general",
6214
+ severity: "minor",
6215
+ description: String(parsed),
6216
+ affected_items: []
6217
+ });
6218
+ }
6219
+ const hasCritical = findings.some((f) => f.severity === "blocker" || f.severity === "major");
6220
+ const verdict = hasCritical ? "FAIL" : "PASS";
6221
+ const parts = ["# Readiness Report", ""];
6222
+ parts.push(`**Overall Verdict**: ${verdict}`);
6223
+ parts.push("");
6224
+ parts.push(`**Total Findings**: ${findings.length}`);
6225
+ parts.push(`**Blockers**: ${findings.filter((f) => f.severity === "blocker").length}`);
6226
+ parts.push(`**Major**: ${findings.filter((f) => f.severity === "major").length}`);
6227
+ parts.push(`**Minor**: ${findings.filter((f) => f.severity === "minor").length}`);
6228
+ parts.push("");
6229
+ const byCategory = new Map();
6230
+ for (const finding of findings) {
6231
+ if (!byCategory.has(finding.category)) byCategory.set(finding.category, []);
6232
+ byCategory.get(finding.category).push(finding);
6233
+ }
6234
+ const categoryOrder = [
6235
+ "fr_coverage",
6236
+ "architecture_compliance",
6237
+ "story_quality",
6238
+ "ux_alignment",
6239
+ "dependency_validity",
6240
+ "general"
6241
+ ];
6242
+ const sortedCategories = [...byCategory.keys()].sort((a, b) => {
6243
+ const ai = categoryOrder.indexOf(a);
6244
+ const bi = categoryOrder.indexOf(b);
6245
+ return (ai === -1 ? 999 : ai) - (bi === -1 ? 999 : bi);
6246
+ });
6247
+ for (const category of sortedCategories) {
6248
+ const categoryFindings = byCategory.get(category);
6249
+ const categoryLabel = fieldLabel(category);
6250
+ parts.push(`## ${categoryLabel}`);
6251
+ parts.push("");
6252
+ for (const finding of categoryFindings) {
6253
+ const severityTag = `[${finding.severity.toUpperCase()}]`;
6254
+ parts.push(`- ${severityTag} ${finding.description}`);
6255
+ if (finding.affected_items.length > 0) parts.push(` - *Affected*: ${finding.affected_items.join(", ")}`);
6256
+ }
6257
+ parts.push("");
6258
+ }
6259
+ return parts.join("\n");
6260
+ }
6261
+
6262
+ //#endregion
6263
+ //#region src/cli/commands/export.ts
6264
+ const logger$1 = createLogger("export-cmd");
6265
+ /**
6266
+ * Execute the export action.
6267
+ * Returns an exit code (0 = success, 1 = error).
6268
+ */
6269
+ async function runExportAction(options) {
6270
+ const { runId, outputDir, projectRoot, outputFormat } = options;
6271
+ let dbWrapper;
6272
+ try {
6273
+ const dbRoot = await resolveMainRepoRoot(projectRoot);
6274
+ const dbPath = join$1(dbRoot, ".substrate", "substrate.db");
6275
+ if (!existsSync$1(dbPath)) {
6276
+ const errorMsg = `Decision store not initialized. Run 'substrate init' first.`;
6277
+ if (outputFormat === "json") process.stdout.write(JSON.stringify({ error: errorMsg }) + "\n");
6278
+ else process.stderr.write(`Error: ${errorMsg}\n`);
6279
+ return 1;
6280
+ }
6281
+ dbWrapper = new DatabaseWrapper(dbPath);
6282
+ dbWrapper.open();
6283
+ const db = dbWrapper.db;
6284
+ let run;
6285
+ if (runId !== void 0 && runId !== "") run = db.prepare("SELECT * FROM pipeline_runs WHERE id = ?").get(runId);
6286
+ else run = getLatestRun(db);
6287
+ if (run === void 0) {
6288
+ const errorMsg = runId !== void 0 ? `Pipeline run '${runId}' not found.` : "No pipeline runs found. Run `substrate run` first.";
6289
+ if (outputFormat === "json") process.stdout.write(JSON.stringify({ error: errorMsg }) + "\n");
6290
+ else process.stderr.write(`Error: ${errorMsg}\n`);
6291
+ return 1;
6292
+ }
6293
+ const activeRunId = run.id;
6294
+ const resolvedOutputDir = isAbsolute(outputDir) ? outputDir : join$1(projectRoot, outputDir);
6295
+ if (!existsSync$1(resolvedOutputDir)) mkdirSync$1(resolvedOutputDir, { recursive: true });
6296
+ const filesWritten = [];
6297
+ const phasesExported = [];
6298
+ const analysisDecisions = getDecisionsByPhaseForRun(db, activeRunId, "analysis");
6299
+ if (analysisDecisions.length > 0) {
6300
+ const content = renderProductBrief(analysisDecisions);
6301
+ if (content !== "") {
6302
+ const filePath = join$1(resolvedOutputDir, "product-brief.md");
6303
+ writeFileSync$1(filePath, content, "utf-8");
6304
+ filesWritten.push(filePath);
6305
+ phasesExported.push("analysis");
6306
+ if (outputFormat === "human") process.stdout.write(` Written: ${filePath}\n`);
6307
+ }
6308
+ }
6309
+ const planningDecisions = getDecisionsByPhaseForRun(db, activeRunId, "planning");
6310
+ if (planningDecisions.length > 0) {
6311
+ const requirements = listRequirements(db).filter((r) => r.pipeline_run_id === activeRunId);
6312
+ const content = renderPrd(planningDecisions, requirements);
6313
+ if (content !== "") {
6314
+ const filePath = join$1(resolvedOutputDir, "prd.md");
6315
+ writeFileSync$1(filePath, content, "utf-8");
6316
+ filesWritten.push(filePath);
6317
+ if (!phasesExported.includes("planning")) phasesExported.push("planning");
6318
+ if (outputFormat === "human") process.stdout.write(` Written: ${filePath}\n`);
6319
+ }
6320
+ }
6321
+ const solutioningDecisions = getDecisionsByPhaseForRun(db, activeRunId, "solutioning");
6322
+ if (solutioningDecisions.length > 0) {
6323
+ const archContent = renderArchitecture(solutioningDecisions);
6324
+ if (archContent !== "") {
6325
+ const filePath = join$1(resolvedOutputDir, "architecture.md");
6326
+ writeFileSync$1(filePath, archContent, "utf-8");
6327
+ filesWritten.push(filePath);
6328
+ if (!phasesExported.includes("solutioning")) phasesExported.push("solutioning");
6329
+ if (outputFormat === "human") process.stdout.write(` Written: ${filePath}\n`);
6330
+ }
6331
+ const epicsContent = renderEpics(solutioningDecisions);
6332
+ if (epicsContent !== "") {
6333
+ const filePath = join$1(resolvedOutputDir, "epics.md");
6334
+ writeFileSync$1(filePath, epicsContent, "utf-8");
6335
+ filesWritten.push(filePath);
6336
+ if (!phasesExported.includes("solutioning")) phasesExported.push("solutioning");
6337
+ if (outputFormat === "human") process.stdout.write(` Written: ${filePath}\n`);
6338
+ }
6339
+ const readinessContent = renderReadinessReport(solutioningDecisions);
6340
+ if (readinessContent !== "") {
6341
+ const filePath = join$1(resolvedOutputDir, "readiness-report.md");
6342
+ writeFileSync$1(filePath, readinessContent, "utf-8");
6343
+ filesWritten.push(filePath);
6344
+ if (!phasesExported.includes("solutioning")) phasesExported.push("solutioning");
6345
+ if (outputFormat === "human") process.stdout.write(` Written: ${filePath}\n`);
6346
+ }
6347
+ }
6348
+ if (outputFormat === "json") {
6349
+ const result = {
6350
+ files_written: filesWritten,
6351
+ run_id: activeRunId,
6352
+ phases_exported: phasesExported
6353
+ };
6354
+ process.stdout.write(JSON.stringify(result) + "\n");
6355
+ } else {
6356
+ if (filesWritten.length === 0) process.stdout.write(`No data found for run ${activeRunId}. The pipeline may not have completed any phases.\n`);
6357
+ else process.stdout.write(`\nExported ${filesWritten.length} file(s) from run ${activeRunId}.\n`);
6358
+ const skippedPhases = [];
6359
+ if (!phasesExported.includes("analysis")) skippedPhases.push("analysis");
6360
+ if (!phasesExported.includes("planning")) skippedPhases.push("planning");
6361
+ if (!phasesExported.includes("solutioning")) skippedPhases.push("solutioning");
6362
+ if (skippedPhases.length > 0) process.stdout.write(`Phases with no data (skipped): ${skippedPhases.join(", ")}\n`);
6363
+ }
6364
+ return 0;
6365
+ } catch (err) {
6366
+ const msg = err instanceof Error ? err.message : String(err);
6367
+ if (outputFormat === "json") process.stdout.write(JSON.stringify({ error: msg }) + "\n");
6368
+ else process.stderr.write(`Error: ${msg}\n`);
6369
+ logger$1.error({ err }, "export action failed");
6370
+ return 1;
6371
+ } finally {
6372
+ if (dbWrapper !== void 0) try {
6373
+ dbWrapper.close();
6374
+ } catch {}
6375
+ }
6376
+ }
6377
+ function registerExportCommand(program, _version = "0.0.0", projectRoot = process.cwd()) {
6378
+ program.command("export").description("Export decision store contents as human-readable markdown files").option("--run-id <id>", "Pipeline run ID to export (defaults to latest run)").option("--output-dir <path>", "Directory to write exported files to", "_bmad-output/planning-artifacts/").option("--project-root <path>", "Project root directory", projectRoot).option("--output-format <format>", "Output format: human (default) or json", "human").action(async (opts) => {
6379
+ if (opts.outputFormat !== "json" && opts.outputFormat !== "human") process.stderr.write(`Warning: unknown --output-format '${opts.outputFormat}', defaulting to 'human'\n`);
6380
+ const outputFormat = opts.outputFormat === "json" ? "json" : "human";
6381
+ const exitCode = await runExportAction({
6382
+ runId: opts.runId,
6383
+ outputDir: opts.outputDir,
6384
+ projectRoot: opts.projectRoot,
6385
+ outputFormat
6386
+ });
6387
+ process.exitCode = exitCode;
6388
+ });
6389
+ }
6390
+
5816
6391
  //#endregion
5817
6392
  //#region src/cli/index.ts
5818
6393
  process.setMaxListeners(20);
@@ -5863,6 +6438,7 @@ async function createProgram() {
5863
6438
  registerMergeCommand(program);
5864
6439
  registerWorktreesCommand(program, version);
5865
6440
  registerBrainstormCommand(program, version);
6441
+ registerExportCommand(program, version);
5866
6442
  registerUpgradeCommand(program);
5867
6443
  return program;
5868
6444
  }