substrate-ai 0.2.2 → 0.2.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli/index.js CHANGED
@@ -2,9 +2,9 @@
2
2
  import { createLogger, deepMask } from "../logger-C6n1g8uP.js";
3
3
  import { AdapterRegistry, createEventBus } from "../event-bus-J-bw-pkp.js";
4
4
  import { CURRENT_CONFIG_FORMAT_VERSION, CURRENT_TASK_GRAPH_VERSION, PartialSubstrateConfigSchema, SUPPORTED_CONFIG_FORMAT_VERSIONS, SubstrateConfigSchema, defaultConfigMigrator } from "../version-manager-impl-BpVx2DkY.js";
5
- import { DatabaseWrapper, SUBSTRATE_OWNED_SETTINGS_KEYS, VALID_PHASES, buildPipelineStatusOutput, createContextCompiler, createDispatcher, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStopAfterGate, findPackageRoot, formatOutput, formatPhaseCompletionSummary, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getSubstrateDefaultSettings, registerRunCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, runAnalysisPhase, runMigrations, runPlanningPhase, runSolutioningPhase, validateStopAfterFromConflict } from "../run-DlOWhkIF.js";
5
+ import { DatabaseWrapper, SUBSTRATE_OWNED_SETTINGS_KEYS, VALID_PHASES, buildPipelineStatusOutput, createContextCompiler, createDispatcher, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStopAfterGate, findPackageRoot, formatOutput, formatPhaseCompletionSummary, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getSubstrateDefaultSettings, registerRunCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, runAnalysisPhase, runMigrations, runPlanningPhase, runSolutioningPhase, validateStopAfterFromConflict } from "../run-D3ZscMlL.js";
6
6
  import { ConfigError, ConfigIncompatibleFormatError } from "../errors-BPqtzQ4U.js";
7
- import { addTokenUsage, createDecision, getDecisionsByPhaseForRun, getLatestRun, getPipelineRunById, getTokenUsageSummary, updatePipelineRun } from "../decisions-BBLMsN_c.js";
7
+ import { addTokenUsage, createDecision, getDecisionsByPhaseForRun, getLatestRun, getPipelineRunById, getTokenUsageSummary, listRequirements, updatePipelineRun } from "../decisions-DNYByk0U.js";
8
8
  import { compareRunMetrics, getBaselineRunMetrics, getRunMetrics, getStoryMetricsForRun, incrementRunRestarts, listRunMetrics, tagRunAsBaseline } from "../metrics-BSg8VIHd.js";
9
9
  import { abortMerge, createWorktree, getConflictingFiles, getMergedFiles, getOrphanedWorktrees, performMerge, removeBranch, removeWorktree, simulateMerge, verifyGitVersion } from "../git-utils-BtI5eNoN.js";
10
10
  import { registerUpgradeCommand } from "../upgrade-rV26kdh3.js";
@@ -17,8 +17,9 @@ import { chmodSync, cpSync, existsSync, mkdirSync, readFileSync, readdirSync, re
17
17
  import yaml from "js-yaml";
18
18
  import { createRequire as createRequire$1 } from "node:module";
19
19
  import * as path$1 from "node:path";
20
+ import { isAbsolute, join as join$1 } from "node:path";
20
21
  import BetterSqlite3 from "better-sqlite3";
21
- import { existsSync as existsSync$1 } from "node:fs";
22
+ import { existsSync as existsSync$1, mkdirSync as mkdirSync$1, writeFileSync as writeFileSync$1 } from "node:fs";
22
23
  import { createInterface } from "node:readline";
23
24
  import { homedir } from "os";
24
25
  import { access as access$1 } from "node:fs/promises";
@@ -328,7 +329,7 @@ const DEFAULT_CONFIG = {
328
329
 
329
330
  //#endregion
330
331
  //#region src/cli/commands/init.ts
331
- const logger$16 = createLogger("init");
332
+ const logger$17 = createLogger("init");
332
333
  const __dirname = dirname(new URL(import.meta.url).pathname);
333
334
  const INIT_EXIT_SUCCESS = 0;
334
335
  const INIT_EXIT_ERROR = 1;
@@ -349,7 +350,7 @@ async function scaffoldBmadFramework(projectRoot, force, outputFormat) {
349
350
  const version = resolveBmadMethodVersion();
350
351
  if (force && bmadExists) process.stderr.write(`Warning: Replacing existing _bmad/ framework with bmad-method@${version}\n`);
351
352
  process.stdout.write(`Scaffolding BMAD framework from bmad-method@${version}\n`);
352
- logger$16.info({
353
+ logger$17.info({
353
354
  version,
354
355
  dest: bmadDest
355
356
  }, "Scaffolding BMAD framework");
@@ -359,7 +360,7 @@ async function scaffoldBmadFramework(projectRoot, force, outputFormat) {
359
360
  const destDir = join(bmadDest, dir);
360
361
  mkdirSync(destDir, { recursive: true });
361
362
  cpSync(srcDir, destDir, { recursive: true });
362
- logger$16.info({
363
+ logger$17.info({
363
364
  dir,
364
365
  dest: destDir
365
366
  }, "Scaffolded BMAD framework directory");
@@ -378,7 +379,7 @@ async function scaffoldBmadFramework(projectRoot, force, outputFormat) {
378
379
  "document_output_language: English"
379
380
  ].join("\n") + "\n";
380
381
  await writeFile(configFile, configStub, "utf8");
381
- logger$16.info({ configFile }, "Generated _bmad/_config/config.yaml stub");
382
+ logger$17.info({ configFile }, "Generated _bmad/_config/config.yaml stub");
382
383
  }
383
384
  }
384
385
  const CLAUDE_MD_START_MARKER = "<!-- substrate:start -->";
@@ -393,7 +394,7 @@ async function scaffoldClaudeMd(projectRoot) {
393
394
  try {
394
395
  sectionContent = await readFile(templatePath, "utf8");
395
396
  } catch {
396
- logger$16.warn({ templatePath }, "CLAUDE.md substrate section template not found; skipping");
397
+ logger$17.warn({ templatePath }, "CLAUDE.md substrate section template not found; skipping");
397
398
  return;
398
399
  }
399
400
  if (!sectionContent.endsWith("\n")) sectionContent += "\n";
@@ -411,7 +412,7 @@ async function scaffoldClaudeMd(projectRoot) {
411
412
  newContent = existingContent + separator + sectionContent;
412
413
  }
413
414
  await writeFile(claudeMdPath, newContent, "utf8");
414
- logger$16.info({ claudeMdPath }, "Wrote substrate section to CLAUDE.md");
415
+ logger$17.info({ claudeMdPath }, "Wrote substrate section to CLAUDE.md");
415
416
  }
416
417
  async function scaffoldStatuslineScript(projectRoot) {
417
418
  const pkgRoot = findPackageRoot(__dirname);
@@ -422,7 +423,7 @@ async function scaffoldStatuslineScript(projectRoot) {
422
423
  try {
423
424
  content = await readFile(templatePath, "utf8");
424
425
  } catch {
425
- logger$16.warn({ templatePath }, "statusline.sh template not found; skipping");
426
+ logger$17.warn({ templatePath }, "statusline.sh template not found; skipping");
426
427
  return;
427
428
  }
428
429
  const claudeDir = join(projectRoot, ".claude");
@@ -430,7 +431,7 @@ async function scaffoldStatuslineScript(projectRoot) {
430
431
  mkdirSync(claudeDir, { recursive: true });
431
432
  await writeFile(statuslinePath, content, "utf8");
432
433
  chmodSync(statuslinePath, 493);
433
- logger$16.info({ statuslinePath }, "Wrote .claude/statusline.sh");
434
+ logger$17.info({ statuslinePath }, "Wrote .claude/statusline.sh");
434
435
  }
435
436
  async function scaffoldClaudeSettings(projectRoot) {
436
437
  const claudeDir = join(projectRoot, ".claude");
@@ -446,7 +447,7 @@ async function scaffoldClaudeSettings(projectRoot) {
446
447
  if (!merged["$schema"]) merged["$schema"] = "https://json.schemastore.org/claude-code-settings.json";
447
448
  mkdirSync(claudeDir, { recursive: true });
448
449
  await writeFile(settingsPath, JSON.stringify(merged, null, 2) + "\n", "utf8");
449
- logger$16.info({ settingsPath }, "Wrote substrate settings to .claude/settings.json");
450
+ logger$17.info({ settingsPath }, "Wrote substrate settings to .claude/settings.json");
450
451
  }
451
452
  function resolveBmadMethodInstallerLibPath(fromDir = __dirname) {
452
453
  try {
@@ -516,7 +517,7 @@ async function compileBmadAgents(bmadDir) {
516
517
  writeFileSync(mdPath, result.xml, "utf-8");
517
518
  compiled++;
518
519
  } catch (compileErr) {
519
- logger$16.debug({
520
+ logger$17.debug({
520
521
  err: compileErr,
521
522
  file
522
523
  }, "Failed to compile agent YAML");
@@ -537,9 +538,9 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
537
538
  const _require = createRequire$1(join(__dirname, "synthetic.js"));
538
539
  try {
539
540
  const compiledCount = await compileBmadAgents(bmadDir);
540
- if (compiledCount > 0) logger$16.info({ compiledCount }, "Compiled agent YAML files to MD");
541
+ if (compiledCount > 0) logger$17.info({ compiledCount }, "Compiled agent YAML files to MD");
541
542
  } catch (compileErr) {
542
- logger$16.warn({ err: compileErr }, "Agent compilation failed; agent commands may be incomplete");
543
+ logger$17.warn({ err: compileErr }, "Agent compilation failed; agent commands may be incomplete");
543
544
  }
544
545
  const { AgentCommandGenerator } = _require(join(installerLibPath, "ide", "shared", "agent-command-generator.js"));
545
546
  const { WorkflowCommandGenerator } = _require(join(installerLibPath, "ide", "shared", "workflow-command-generator.js"));
@@ -551,7 +552,7 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
551
552
  const manifestGen = new ManifestGenerator();
552
553
  await manifestGen.generateManifests(bmadDir, allModules, [], { ides: ["claude-code"] });
553
554
  } catch (manifestErr) {
554
- logger$16.warn({ err: manifestErr }, "ManifestGenerator failed; workflow/task commands may be incomplete");
555
+ logger$17.warn({ err: manifestErr }, "ManifestGenerator failed; workflow/task commands may be incomplete");
555
556
  }
556
557
  const commandsDir = join(projectRoot, ".claude", "commands");
557
558
  mkdirSync(commandsDir, { recursive: true });
@@ -567,7 +568,7 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
567
568
  const taskToolCount = await taskToolGen.writeDashArtifacts(commandsDir, taskToolArtifacts);
568
569
  const total = agentCount + workflowCount + taskToolCount;
569
570
  if (outputFormat !== "json") process.stdout.write(`Generated ${String(total)} Claude Code commands (${String(agentCount)} agents, ${String(workflowCount)} workflows, ${String(taskToolCount)} tasks/tools)\n`);
570
- logger$16.info({
571
+ logger$17.info({
571
572
  agentCount,
572
573
  workflowCount,
573
574
  taskToolCount,
@@ -577,7 +578,7 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
577
578
  } catch (err) {
578
579
  const msg = err instanceof Error ? err.message : String(err);
579
580
  if (outputFormat !== "json") process.stderr.write(`Warning: .claude/commands/ generation failed: ${msg}\n`);
580
- logger$16.warn({ err }, "scaffoldClaudeCommands failed; init continues");
581
+ logger$17.warn({ err }, "scaffoldClaudeCommands failed; init continues");
581
582
  }
582
583
  }
583
584
  const PROVIDER_DEFAULTS = DEFAULT_CONFIG.providers;
@@ -651,7 +652,7 @@ async function runInitAction(options) {
651
652
  discoveryReport = await registry.discoverAndRegister();
652
653
  } catch (err) {
653
654
  const message = err instanceof Error ? err.message : String(err);
654
- logger$16.error({ err }, "Adapter discovery failed");
655
+ logger$17.error({ err }, "Adapter discovery failed");
655
656
  if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, `Adapter discovery failed: ${message}`) + "\n");
656
657
  else process.stderr.write(` Error: adapter discovery failed — ${message}\n`);
657
658
  return INIT_EXIT_ERROR;
@@ -700,12 +701,12 @@ async function runInitAction(options) {
700
701
  return INIT_EXIT_ERROR;
701
702
  }
702
703
  if (force && existsSync(localManifest)) {
703
- logger$16.info({ pack: packName }, "Replacing existing pack with bundled version");
704
+ logger$17.info({ pack: packName }, "Replacing existing pack with bundled version");
704
705
  process.stderr.write(`Warning: Replacing existing pack '${packName}' with bundled version\n`);
705
706
  }
706
707
  mkdirSync(dirname(packPath), { recursive: true });
707
708
  cpSync(bundledPackPath, packPath, { recursive: true });
708
- logger$16.info({
709
+ logger$17.info({
709
710
  pack: packName,
710
711
  dest: packPath
711
712
  }, "Scaffolded methodology pack");
@@ -740,20 +741,25 @@ async function runInitAction(options) {
740
741
  }, "json", true) + "\n");
741
742
  else {
742
743
  process.stdout.write(`\n Substrate initialized successfully!\n\n`);
743
- process.stdout.write(` Created:\n`);
744
- process.stdout.write(` ${configPath}\n`);
745
- process.stdout.write(` ${routingPolicyPath}\n`);
746
- process.stdout.write(` ${dbPath}\n`);
747
- process.stdout.write(`\n ${successMsg}\n`);
748
- const prefix = process.env["npm_command"] === "exec" ? "npx " : "";
749
- process.stdout.write(`\n Next steps:\n 1. Run \`${prefix}substrate adapters check\` to verify your setup\n 2. Run \`${prefix}substrate config show\` to review your configuration\n 3. Run \`${prefix}substrate run --from analysis --concept "your idea"\` to start the pipeline\n`);
744
+ const healthRows = buildAdapterHealthRows(discoveryReport.results);
745
+ if (healthRows.length > 0) {
746
+ process.stdout.write(` Agents:\n`);
747
+ const table = formatAdapterHealthTable(healthRows);
748
+ for (const line of table.split("\n")) process.stdout.write(` ${line}\n`);
749
+ process.stdout.write("\n");
750
+ }
751
+ process.stdout.write(` Scaffolded:\n`);
752
+ process.stdout.write(` CLAUDE.md pipeline instructions for Claude Code\n`);
753
+ process.stdout.write(` .claude/commands/ /substrate-run, /substrate-supervisor, /substrate-metrics\n`);
754
+ process.stdout.write(` .substrate/ config, database, routing policy\n`);
755
+ process.stdout.write("\n Next steps:\n 1. Start a Claude Code session in this project\n 2. Tell Claude: \"Run the substrate pipeline\"\n 3. Or use the /substrate-run slash command for a guided run\n");
750
756
  }
751
757
  return INIT_EXIT_SUCCESS;
752
758
  } catch (err) {
753
759
  const msg = err instanceof Error ? err.message : String(err);
754
760
  if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
755
761
  else process.stderr.write(`Error: ${msg}\n`);
756
- logger$16.error({ err }, "init failed");
762
+ logger$17.error({ err }, "init failed");
757
763
  return INIT_EXIT_ERROR;
758
764
  }
759
765
  }
@@ -799,7 +805,7 @@ function formatUnsupportedVersionError(formatType, version, supported) {
799
805
 
800
806
  //#endregion
801
807
  //#region src/modules/config/config-system-impl.ts
802
- const logger$15 = createLogger("config");
808
+ const logger$16 = createLogger("config");
803
809
  function deepMerge(base, override) {
804
810
  const result = { ...base };
805
811
  for (const [key, val] of Object.entries(override)) if (val !== null && val !== void 0 && typeof val === "object" && !Array.isArray(val) && typeof result[key] === "object" && result[key] !== null && !Array.isArray(result[key])) result[key] = deepMerge(result[key], val);
@@ -844,7 +850,7 @@ function readEnvOverrides() {
844
850
  }
845
851
  const parsed = PartialSubstrateConfigSchema.safeParse(overrides);
846
852
  if (!parsed.success) {
847
- logger$15.warn({ errors: parsed.error.issues }, "Invalid environment variable overrides ignored");
853
+ logger$16.warn({ errors: parsed.error.issues }, "Invalid environment variable overrides ignored");
848
854
  return {};
849
855
  }
850
856
  return parsed.data;
@@ -908,7 +914,7 @@ var ConfigSystemImpl = class {
908
914
  throw new ConfigError(`Configuration validation failed:\n${issues}`, { issues: result.error.issues });
909
915
  }
910
916
  this._config = result.data;
911
- logger$15.debug("Configuration loaded successfully");
917
+ logger$16.debug("Configuration loaded successfully");
912
918
  }
913
919
  getConfig() {
914
920
  if (this._config === null) throw new ConfigError("Configuration has not been loaded. Call load() before getConfig().", {});
@@ -971,7 +977,7 @@ var ConfigSystemImpl = class {
971
977
  if (version !== void 0 && typeof version === "string" && !isVersionSupported(version, SUPPORTED_CONFIG_FORMAT_VERSIONS)) if (defaultConfigMigrator.canMigrate(version, CURRENT_CONFIG_FORMAT_VERSION)) {
972
978
  const migrationOutput = defaultConfigMigrator.migrate(rawObj, version, CURRENT_CONFIG_FORMAT_VERSION, filePath);
973
979
  if (migrationOutput.result.success) {
974
- logger$15.info({
980
+ logger$16.info({
975
981
  from: version,
976
982
  to: CURRENT_CONFIG_FORMAT_VERSION,
977
983
  backup: migrationOutput.result.backupPath
@@ -1014,7 +1020,7 @@ function createConfigSystem(options = {}) {
1014
1020
 
1015
1021
  //#endregion
1016
1022
  //#region src/cli/commands/config.ts
1017
- const logger$14 = createLogger("config-cmd");
1023
+ const logger$15 = createLogger("config-cmd");
1018
1024
  const CONFIG_EXIT_SUCCESS = 0;
1019
1025
  const CONFIG_EXIT_ERROR = 1;
1020
1026
  const CONFIG_EXIT_INVALID = 2;
@@ -1040,7 +1046,7 @@ async function runConfigShow(opts = {}) {
1040
1046
  return CONFIG_EXIT_INVALID;
1041
1047
  }
1042
1048
  const message = err instanceof Error ? err.message : String(err);
1043
- logger$14.error({ err }, "Failed to load configuration");
1049
+ logger$15.error({ err }, "Failed to load configuration");
1044
1050
  process.stderr.write(` Error loading configuration: ${message}\n`);
1045
1051
  return CONFIG_EXIT_ERROR;
1046
1052
  }
@@ -1114,7 +1120,7 @@ async function runConfigExport(opts = {}) {
1114
1120
  return CONFIG_EXIT_INVALID;
1115
1121
  }
1116
1122
  const message = err instanceof Error ? err.message : String(err);
1117
- logger$14.error({ err }, "Failed to load configuration");
1123
+ logger$15.error({ err }, "Failed to load configuration");
1118
1124
  process.stderr.write(`Error loading configuration: ${message}\n`);
1119
1125
  return CONFIG_EXIT_ERROR;
1120
1126
  }
@@ -1268,7 +1274,7 @@ function registerConfigCommand(program, _version) {
1268
1274
 
1269
1275
  //#endregion
1270
1276
  //#region src/cli/commands/resume.ts
1271
- const logger$13 = createLogger("resume-cmd");
1277
+ const logger$14 = createLogger("resume-cmd");
1272
1278
  async function runResumeAction(options) {
1273
1279
  const { runId: specifiedRunId, stopAfter, outputFormat, projectRoot, concurrency, pack: packName } = options;
1274
1280
  if (stopAfter !== void 0 && !VALID_PHASES.includes(stopAfter)) {
@@ -1350,7 +1356,7 @@ async function runResumeAction(options) {
1350
1356
  const msg = err instanceof Error ? err.message : String(err);
1351
1357
  if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
1352
1358
  else process.stderr.write(`Error: ${msg}\n`);
1353
- logger$13.error({ err }, "auto resume failed");
1359
+ logger$14.error({ err }, "auto resume failed");
1354
1360
  return 1;
1355
1361
  } finally {
1356
1362
  try {
@@ -1501,7 +1507,7 @@ async function runFullPipelineFromPhase(options) {
1501
1507
  });
1502
1508
  }
1503
1509
  } catch (err) {
1504
- logger$13.warn({ err }, "Failed to record token usage");
1510
+ logger$14.warn({ err }, "Failed to record token usage");
1505
1511
  }
1506
1512
  });
1507
1513
  const storyDecisions = db.prepare(`SELECT description FROM requirements WHERE pipeline_run_id = ? AND source = 'solutioning-phase'`).all(runId);
@@ -1560,7 +1566,7 @@ async function runFullPipelineFromPhase(options) {
1560
1566
  const msg = err instanceof Error ? err.message : String(err);
1561
1567
  if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
1562
1568
  else process.stderr.write(`Error: ${msg}\n`);
1563
- logger$13.error({ err }, "pipeline from phase failed");
1569
+ logger$14.error({ err }, "pipeline from phase failed");
1564
1570
  return 1;
1565
1571
  } finally {
1566
1572
  try {
@@ -1585,7 +1591,7 @@ function registerResumeCommand(program, _version = "0.0.0", projectRoot = proces
1585
1591
 
1586
1592
  //#endregion
1587
1593
  //#region src/cli/commands/status.ts
1588
- const logger$12 = createLogger("status-cmd");
1594
+ const logger$13 = createLogger("status-cmd");
1589
1595
  async function runStatusAction(options) {
1590
1596
  const { outputFormat, runId, projectRoot } = options;
1591
1597
  const dbRoot = await resolveMainRepoRoot(projectRoot);
@@ -1662,7 +1668,7 @@ async function runStatusAction(options) {
1662
1668
  const msg = err instanceof Error ? err.message : String(err);
1663
1669
  if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
1664
1670
  else process.stderr.write(`Error: ${msg}\n`);
1665
- logger$12.error({ err }, "status action failed");
1671
+ logger$13.error({ err }, "status action failed");
1666
1672
  return 1;
1667
1673
  } finally {
1668
1674
  try {
@@ -2086,7 +2092,7 @@ Analyze thoroughly and return ONLY the JSON array with no additional text.`;
2086
2092
 
2087
2093
  //#endregion
2088
2094
  //#region src/cli/commands/amend.ts
2089
- const logger$11 = createLogger("amend-cmd");
2095
+ const logger$12 = createLogger("amend-cmd");
2090
2096
  /**
2091
2097
  * Detect and apply supersessions after a phase completes in an amendment run.
2092
2098
  *
@@ -2117,7 +2123,7 @@ function runPostPhaseSupersessionDetection(db, amendmentRunId, currentPhase, han
2117
2123
  });
2118
2124
  } catch (err) {
2119
2125
  const msg = err instanceof Error ? err.message : String(err);
2120
- logger$11.warn({
2126
+ logger$12.warn({
2121
2127
  err,
2122
2128
  originalId: parentMatch.id,
2123
2129
  supersedingId: newDec.id
@@ -2252,7 +2258,7 @@ async function runAmendAction(options) {
2252
2258
  for (let i = startIdx; i < phaseOrder.length; i++) {
2253
2259
  const currentPhase = phaseOrder[i];
2254
2260
  const amendmentContext = handler.loadContextForPhase(currentPhase);
2255
- logger$11.info({
2261
+ logger$12.info({
2256
2262
  phase: currentPhase,
2257
2263
  amendmentContextLen: amendmentContext.length
2258
2264
  }, "Amendment context loaded for phase");
@@ -2372,7 +2378,7 @@ async function runAmendAction(options) {
2372
2378
  } catch (err) {
2373
2379
  const msg = err instanceof Error ? err.message : String(err);
2374
2380
  process.stderr.write(`Error: ${msg}\n`);
2375
- logger$11.error({ err }, "amend failed");
2381
+ logger$12.error({ err }, "amend failed");
2376
2382
  return 1;
2377
2383
  } finally {
2378
2384
  try {
@@ -2397,7 +2403,7 @@ function registerAmendCommand(program, _version = "0.0.0", projectRoot = process
2397
2403
 
2398
2404
  //#endregion
2399
2405
  //#region src/cli/commands/health.ts
2400
- const logger$10 = createLogger("health-cmd");
2406
+ const logger$11 = createLogger("health-cmd");
2401
2407
  function inspectProcessTree() {
2402
2408
  const result = {
2403
2409
  orchestrator_pid: null,
@@ -2646,7 +2652,7 @@ async function runHealthAction(options) {
2646
2652
  const msg = err instanceof Error ? err.message : String(err);
2647
2653
  if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
2648
2654
  else process.stderr.write(`Error: ${msg}\n`);
2649
- logger$10.error({ err }, "health action failed");
2655
+ logger$11.error({ err }, "health action failed");
2650
2656
  return 1;
2651
2657
  } finally {
2652
2658
  try {
@@ -2827,7 +2833,7 @@ async function runSupervisorAction(options, deps = {}) {
2827
2833
  );
2828
2834
  const { getLatestRun: getLatest } = await import(
2829
2835
  /* @vite-ignore */
2830
- "../decisions-WIsicZiG.js"
2836
+ "../decisions-DKXc-jnv.js"
2831
2837
  );
2832
2838
  const dbPath = join(projectRoot, ".substrate", "substrate.db");
2833
2839
  const expDbWrapper = new DatabaseWrapper(dbPath);
@@ -2837,7 +2843,7 @@ async function runSupervisorAction(options, deps = {}) {
2837
2843
  const expDb = expDbWrapper.db;
2838
2844
  const { runRunAction: runPipeline } = await import(
2839
2845
  /* @vite-ignore */
2840
- "../run-CRmhkcwN.js"
2846
+ "../run-Bwyy5-RY.js"
2841
2847
  );
2842
2848
  const runStoryFn = async (opts) => {
2843
2849
  const exitCode = await runPipeline({
@@ -2994,7 +3000,7 @@ function registerSupervisorCommand(program, _version = "0.0.0", projectRoot = pr
2994
3000
 
2995
3001
  //#endregion
2996
3002
  //#region src/cli/commands/metrics.ts
2997
- const logger$9 = createLogger("metrics-cmd");
3003
+ const logger$10 = createLogger("metrics-cmd");
2998
3004
  async function runMetricsAction(options) {
2999
3005
  const { outputFormat, projectRoot, limit = 10, compare, tagBaseline, analysis } = options;
3000
3006
  if (analysis !== void 0) {
@@ -3100,7 +3106,7 @@ async function runMetricsAction(options) {
3100
3106
  const msg = err instanceof Error ? err.message : String(err);
3101
3107
  if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
3102
3108
  else process.stderr.write(`Error: ${msg}\n`);
3103
- logger$9.error({ err }, "metrics action failed");
3109
+ logger$10.error({ err }, "metrics action failed");
3104
3110
  return 1;
3105
3111
  } finally {
3106
3112
  try {
@@ -3354,7 +3360,7 @@ function getPlanningCostTotal(db, sessionId) {
3354
3360
  function getLatestSessionId(_db) {
3355
3361
  return null;
3356
3362
  }
3357
- const logger$8 = createLogger("cost-cmd");
3363
+ const logger$9 = createLogger("cost-cmd");
3358
3364
  const COST_EXIT_SUCCESS = 0;
3359
3365
  const COST_EXIT_ERROR = 1;
3360
3366
  /**
@@ -3600,7 +3606,7 @@ async function runCostAction(options) {
3600
3606
  } catch (err) {
3601
3607
  const message = err instanceof Error ? err.message : String(err);
3602
3608
  process.stderr.write(`Error: ${message}\n`);
3603
- logger$8.error({ err }, "runCostAction failed");
3609
+ logger$9.error({ err }, "runCostAction failed");
3604
3610
  return COST_EXIT_ERROR;
3605
3611
  } finally {
3606
3612
  if (wrapper !== null) try {
@@ -3702,7 +3708,7 @@ function applyMonitorSchema(db) {
3702
3708
 
3703
3709
  //#endregion
3704
3710
  //#region src/persistence/monitor-database.ts
3705
- const logger$7 = createLogger("persistence:monitor-db");
3711
+ const logger$8 = createLogger("persistence:monitor-db");
3706
3712
  var MonitorDatabaseImpl = class {
3707
3713
  _db = null;
3708
3714
  _path;
@@ -3713,10 +3719,10 @@ var MonitorDatabaseImpl = class {
3713
3719
  this._open();
3714
3720
  }
3715
3721
  _open() {
3716
- logger$7.info({ path: this._path }, "Opening monitor database");
3722
+ logger$8.info({ path: this._path }, "Opening monitor database");
3717
3723
  this._db = new BetterSqlite3(this._path);
3718
3724
  const walResult = this._db.pragma("journal_mode = WAL");
3719
- if (walResult?.[0]?.journal_mode !== "wal") logger$7.warn({ result: walResult?.[0]?.journal_mode }, "Monitor DB: WAL pragma did not confirm wal mode");
3725
+ if (walResult?.[0]?.journal_mode !== "wal") logger$8.warn({ result: walResult?.[0]?.journal_mode }, "Monitor DB: WAL pragma did not confirm wal mode");
3720
3726
  this._db.pragma("synchronous = NORMAL");
3721
3727
  this._db.pragma("busy_timeout = 5000");
3722
3728
  this._db.pragma("foreign_keys = ON");
@@ -3751,7 +3757,7 @@ var MonitorDatabaseImpl = class {
3751
3757
  total_retries = total_retries + @retries,
3752
3758
  last_updated = @lastUpdated
3753
3759
  `);
3754
- logger$7.info({ path: this._path }, "Monitor database ready");
3760
+ logger$8.info({ path: this._path }, "Monitor database ready");
3755
3761
  }
3756
3762
  _assertOpen() {
3757
3763
  if (this._db === null) throw new Error("MonitorDatabase: connection is closed");
@@ -3900,7 +3906,7 @@ var MonitorDatabaseImpl = class {
3900
3906
  const db = this._assertOpen();
3901
3907
  const cutoff = new Date(Date.now() - retentionDays * 24 * 60 * 60 * 1e3).toISOString();
3902
3908
  const result = db.prepare("DELETE FROM task_metrics WHERE recorded_at < @cutoff").run({ cutoff });
3903
- logger$7.info({
3909
+ logger$8.info({
3904
3910
  cutoff,
3905
3911
  deleted: result.changes
3906
3912
  }, "Pruned old task_metrics rows");
@@ -3939,13 +3945,13 @@ var MonitorDatabaseImpl = class {
3939
3945
  db.exec("ROLLBACK");
3940
3946
  throw err;
3941
3947
  }
3942
- logger$7.info("Rebuilt performance_aggregates from task_metrics");
3948
+ logger$8.info("Rebuilt performance_aggregates from task_metrics");
3943
3949
  }
3944
3950
  resetAllData() {
3945
3951
  const db = this._assertOpen();
3946
3952
  db.exec("DELETE FROM task_metrics");
3947
3953
  db.exec("DELETE FROM performance_aggregates");
3948
- logger$7.info({ path: this._path }, "Monitor data reset — all rows deleted");
3954
+ logger$8.info({ path: this._path }, "Monitor data reset — all rows deleted");
3949
3955
  }
3950
3956
  getTaskMetricsDateRange() {
3951
3957
  const db = this._assertOpen();
@@ -3962,7 +3968,7 @@ var MonitorDatabaseImpl = class {
3962
3968
  if (this._db === null) return;
3963
3969
  this._db.close();
3964
3970
  this._db = null;
3965
- logger$7.info({ path: this._path }, "Monitor database closed");
3971
+ logger$8.info({ path: this._path }, "Monitor database closed");
3966
3972
  }
3967
3973
  /**
3968
3974
  * Access the raw underlying database for testing purposes only.
@@ -3975,7 +3981,7 @@ var MonitorDatabaseImpl = class {
3975
3981
 
3976
3982
  //#endregion
3977
3983
  //#region src/modules/monitor/recommendation-engine.ts
3978
- const logger$6 = createLogger("monitor:recommendations");
3984
+ const logger$7 = createLogger("monitor:recommendations");
3979
3985
  var RecommendationEngine = class {
3980
3986
  _monitorDb;
3981
3987
  _filters;
@@ -4008,7 +4014,7 @@ var RecommendationEngine = class {
4008
4014
  const sinceDate = new Date(Date.now() - this._historyDays * 24 * 60 * 60 * 1e3).toISOString();
4009
4015
  const aggregates = this._monitorDb.getAggregates({ sinceDate });
4010
4016
  if (aggregates.length === 0) {
4011
- logger$6.debug("No performance aggregates found — no recommendations to generate");
4017
+ logger$7.debug("No performance aggregates found — no recommendations to generate");
4012
4018
  return [];
4013
4019
  }
4014
4020
  const byTaskType = new Map();
@@ -4073,7 +4079,7 @@ var RecommendationEngine = class {
4073
4079
  if (confDiff !== 0) return confDiff;
4074
4080
  return b.improvement_percentage - a.improvement_percentage;
4075
4081
  });
4076
- logger$6.debug({ count: recommendations.length }, "Generated routing recommendations");
4082
+ logger$7.debug({ count: recommendations.length }, "Generated routing recommendations");
4077
4083
  return recommendations;
4078
4084
  }
4079
4085
  /**
@@ -4239,7 +4245,7 @@ function generateMonitorReport(monitorDb, options = {}) {
4239
4245
 
4240
4246
  //#endregion
4241
4247
  //#region src/cli/commands/monitor.ts
4242
- const logger$5 = createLogger("monitor-cmd");
4248
+ const logger$6 = createLogger("monitor-cmd");
4243
4249
  const MONITOR_EXIT_SUCCESS = 0;
4244
4250
  const MONITOR_EXIT_ERROR = 1;
4245
4251
  /**
@@ -4442,7 +4448,7 @@ async function runMonitorReportAction(options) {
4442
4448
  } catch (err) {
4443
4449
  const message = err instanceof Error ? err.message : String(err);
4444
4450
  process.stderr.write(`Error: ${message}\n`);
4445
- logger$5.error({ err }, "runMonitorReportAction failed");
4451
+ logger$6.error({ err }, "runMonitorReportAction failed");
4446
4452
  return MONITOR_EXIT_ERROR;
4447
4453
  } finally {
4448
4454
  if (monitorDb !== null) try {
@@ -4504,7 +4510,7 @@ async function runMonitorStatusAction(options) {
4504
4510
  } catch (err) {
4505
4511
  const message = err instanceof Error ? err.message : String(err);
4506
4512
  process.stderr.write(`Error: ${message}\n`);
4507
- logger$5.error({ err }, "runMonitorStatusAction failed");
4513
+ logger$6.error({ err }, "runMonitorStatusAction failed");
4508
4514
  return MONITOR_EXIT_ERROR;
4509
4515
  } finally {
4510
4516
  if (monitorDb !== null) try {
@@ -4539,7 +4545,7 @@ async function runMonitorResetAction(options) {
4539
4545
  } catch (err) {
4540
4546
  const message = err instanceof Error ? err.message : String(err);
4541
4547
  process.stderr.write(`Error: ${message}\n`);
4542
- logger$5.error({ err }, "runMonitorResetAction failed");
4548
+ logger$6.error({ err }, "runMonitorResetAction failed");
4543
4549
  return MONITOR_EXIT_ERROR;
4544
4550
  } finally {
4545
4551
  if (monitorDb !== null) try {
@@ -4587,7 +4593,7 @@ async function runMonitorRecommendationsAction(options) {
4587
4593
  } catch (err) {
4588
4594
  const message = err instanceof Error ? err.message : String(err);
4589
4595
  process.stderr.write(`Error: ${message}\n`);
4590
- logger$5.error({ err }, "runMonitorRecommendationsAction failed");
4596
+ logger$6.error({ err }, "runMonitorRecommendationsAction failed");
4591
4597
  return MONITOR_EXIT_ERROR;
4592
4598
  } finally {
4593
4599
  if (monitorDb !== null) try {
@@ -4665,7 +4671,7 @@ function registerMonitorCommand(program, version = "0.0.0", projectRoot = proces
4665
4671
 
4666
4672
  //#endregion
4667
4673
  //#region src/modules/git-worktree/git-worktree-manager-impl.ts
4668
- const logger$4 = createLogger("git-worktree");
4674
+ const logger$5 = createLogger("git-worktree");
4669
4675
  const BRANCH_PREFIX = "substrate/task-";
4670
4676
  const DEFAULT_WORKTREE_BASE = ".substrate-worktrees";
4671
4677
  var GitWorktreeManagerImpl = class {
@@ -4684,7 +4690,7 @@ var GitWorktreeManagerImpl = class {
4684
4690
  this._db = db;
4685
4691
  this._onTaskReady = ({ taskId }) => {
4686
4692
  this._handleTaskReady(taskId).catch((err) => {
4687
- logger$4.error({
4693
+ logger$5.error({
4688
4694
  taskId,
4689
4695
  err
4690
4696
  }, "Unhandled error in _handleTaskReady");
@@ -4698,40 +4704,40 @@ var GitWorktreeManagerImpl = class {
4698
4704
  };
4699
4705
  }
4700
4706
  async initialize() {
4701
- logger$4.info({ projectRoot: this._projectRoot }, "GitWorktreeManager.initialize()");
4707
+ logger$5.info({ projectRoot: this._projectRoot }, "GitWorktreeManager.initialize()");
4702
4708
  await this.verifyGitVersion();
4703
4709
  const cleaned = await this.cleanupAllWorktrees();
4704
- if (cleaned > 0) logger$4.info({ cleaned }, "Recovered orphaned worktrees on startup");
4710
+ if (cleaned > 0) logger$5.info({ cleaned }, "Recovered orphaned worktrees on startup");
4705
4711
  this._eventBus.on("task:ready", this._onTaskReady);
4706
4712
  this._eventBus.on("task:complete", this._onTaskComplete);
4707
4713
  this._eventBus.on("task:failed", this._onTaskFailed);
4708
- logger$4.info("GitWorktreeManager initialized");
4714
+ logger$5.info("GitWorktreeManager initialized");
4709
4715
  }
4710
4716
  async shutdown() {
4711
- logger$4.info("GitWorktreeManager.shutdown()");
4717
+ logger$5.info("GitWorktreeManager.shutdown()");
4712
4718
  this._eventBus.off("task:ready", this._onTaskReady);
4713
4719
  this._eventBus.off("task:complete", this._onTaskComplete);
4714
4720
  this._eventBus.off("task:failed", this._onTaskFailed);
4715
4721
  await this.cleanupAllWorktrees();
4716
- logger$4.info("GitWorktreeManager shutdown complete");
4722
+ logger$5.info("GitWorktreeManager shutdown complete");
4717
4723
  }
4718
4724
  async _handleTaskReady(taskId) {
4719
- logger$4.debug({ taskId }, "task:ready — creating worktree");
4725
+ logger$5.debug({ taskId }, "task:ready — creating worktree");
4720
4726
  try {
4721
4727
  await this.createWorktree(taskId);
4722
4728
  } catch (err) {
4723
- logger$4.error({
4729
+ logger$5.error({
4724
4730
  taskId,
4725
4731
  err
4726
4732
  }, "Failed to create worktree for task");
4727
4733
  }
4728
4734
  }
4729
4735
  async _handleTaskDone(taskId) {
4730
- logger$4.debug({ taskId }, "task done — cleaning up worktree");
4736
+ logger$5.debug({ taskId }, "task done — cleaning up worktree");
4731
4737
  try {
4732
4738
  await this.cleanupWorktree(taskId);
4733
4739
  } catch (err) {
4734
- logger$4.warn({
4740
+ logger$5.warn({
4735
4741
  taskId,
4736
4742
  err
4737
4743
  }, "Failed to cleanup worktree for task");
@@ -4741,7 +4747,7 @@ var GitWorktreeManagerImpl = class {
4741
4747
  if (!taskId || taskId.trim().length === 0) throw new Error("createWorktree: taskId must be a non-empty string");
4742
4748
  const branchName = BRANCH_PREFIX + taskId;
4743
4749
  const worktreePath = this.getWorktreePath(taskId);
4744
- logger$4.debug({
4750
+ logger$5.debug({
4745
4751
  taskId,
4746
4752
  branchName,
4747
4753
  worktreePath,
@@ -4761,7 +4767,7 @@ var GitWorktreeManagerImpl = class {
4761
4767
  worktreePath,
4762
4768
  createdAt
4763
4769
  };
4764
- logger$4.info({
4770
+ logger$5.info({
4765
4771
  taskId,
4766
4772
  branchName,
4767
4773
  worktreePath
@@ -4771,7 +4777,7 @@ var GitWorktreeManagerImpl = class {
4771
4777
  async cleanupWorktree(taskId) {
4772
4778
  const branchName = BRANCH_PREFIX + taskId;
4773
4779
  const worktreePath = this.getWorktreePath(taskId);
4774
- logger$4.debug({
4780
+ logger$5.debug({
4775
4781
  taskId,
4776
4782
  branchName,
4777
4783
  worktreePath
@@ -4781,7 +4787,7 @@ var GitWorktreeManagerImpl = class {
4781
4787
  await access$1(worktreePath);
4782
4788
  worktreeExists = true;
4783
4789
  } catch {
4784
- logger$4.debug({
4790
+ logger$5.debug({
4785
4791
  taskId,
4786
4792
  worktreePath
4787
4793
  }, "cleanupWorktree: worktree does not exist, skipping removal");
@@ -4789,7 +4795,7 @@ var GitWorktreeManagerImpl = class {
4789
4795
  if (worktreeExists) try {
4790
4796
  await removeWorktree(worktreePath, this._projectRoot);
4791
4797
  } catch (err) {
4792
- logger$4.warn({
4798
+ logger$5.warn({
4793
4799
  taskId,
4794
4800
  worktreePath,
4795
4801
  err
@@ -4798,7 +4804,7 @@ var GitWorktreeManagerImpl = class {
4798
4804
  try {
4799
4805
  await removeBranch(branchName, this._projectRoot);
4800
4806
  } catch (err) {
4801
- logger$4.warn({
4807
+ logger$5.warn({
4802
4808
  taskId,
4803
4809
  branchName,
4804
4810
  err
@@ -4808,13 +4814,13 @@ var GitWorktreeManagerImpl = class {
4808
4814
  taskId,
4809
4815
  branchName
4810
4816
  });
4811
- logger$4.info({
4817
+ logger$5.info({
4812
4818
  taskId,
4813
4819
  branchName
4814
4820
  }, "Worktree cleaned up");
4815
4821
  }
4816
4822
  async cleanupAllWorktrees() {
4817
- logger$4.debug({ projectRoot: this._projectRoot }, "cleanupAllWorktrees");
4823
+ logger$5.debug({ projectRoot: this._projectRoot }, "cleanupAllWorktrees");
4818
4824
  const orphanedPaths = await getOrphanedWorktrees(this._projectRoot, this._baseDirectory);
4819
4825
  let cleaned = 0;
4820
4826
  for (const worktreePath of orphanedPaths) {
@@ -4823,12 +4829,12 @@ var GitWorktreeManagerImpl = class {
4823
4829
  try {
4824
4830
  await removeWorktree(worktreePath, this._projectRoot);
4825
4831
  worktreeRemoved = true;
4826
- logger$4.debug({
4832
+ logger$5.debug({
4827
4833
  taskId,
4828
4834
  worktreePath
4829
4835
  }, "cleanupAllWorktrees: removed orphaned worktree");
4830
4836
  } catch (err) {
4831
- logger$4.warn({
4837
+ logger$5.warn({
4832
4838
  taskId,
4833
4839
  worktreePath,
4834
4840
  err
@@ -4838,12 +4844,12 @@ var GitWorktreeManagerImpl = class {
4838
4844
  let branchRemoved = false;
4839
4845
  try {
4840
4846
  branchRemoved = await removeBranch(branchName, this._projectRoot);
4841
- if (branchRemoved) logger$4.debug({
4847
+ if (branchRemoved) logger$5.debug({
4842
4848
  taskId,
4843
4849
  branchName
4844
4850
  }, "cleanupAllWorktrees: removed orphaned branch");
4845
4851
  } catch (err) {
4846
- logger$4.warn({
4852
+ logger$5.warn({
4847
4853
  taskId,
4848
4854
  branchName,
4849
4855
  err
@@ -4851,14 +4857,14 @@ var GitWorktreeManagerImpl = class {
4851
4857
  }
4852
4858
  if (worktreeRemoved) cleaned++;
4853
4859
  }
4854
- if (cleaned > 0) logger$4.info({ cleaned }, "cleanupAllWorktrees: recovered orphaned worktrees");
4860
+ if (cleaned > 0) logger$5.info({ cleaned }, "cleanupAllWorktrees: recovered orphaned worktrees");
4855
4861
  return cleaned;
4856
4862
  }
4857
4863
  async detectConflicts(taskId, targetBranch = "main") {
4858
4864
  if (!taskId || taskId.trim().length === 0) throw new Error("detectConflicts: taskId must be a non-empty string");
4859
4865
  const branchName = BRANCH_PREFIX + taskId;
4860
4866
  const worktreePath = this.getWorktreePath(taskId);
4861
- logger$4.debug({
4867
+ logger$5.debug({
4862
4868
  taskId,
4863
4869
  branchName,
4864
4870
  targetBranch
@@ -4886,7 +4892,7 @@ var GitWorktreeManagerImpl = class {
4886
4892
  branch: branchName,
4887
4893
  conflictingFiles: report.conflictingFiles
4888
4894
  });
4889
- logger$4.info({
4895
+ logger$5.info({
4890
4896
  taskId,
4891
4897
  hasConflicts: report.hasConflicts,
4892
4898
  conflictCount: conflictingFiles.length
@@ -4896,14 +4902,14 @@ var GitWorktreeManagerImpl = class {
4896
4902
  async mergeWorktree(taskId, targetBranch = "main") {
4897
4903
  if (!taskId || taskId.trim().length === 0) throw new Error("mergeWorktree: taskId must be a non-empty string");
4898
4904
  const branchName = BRANCH_PREFIX + taskId;
4899
- logger$4.debug({
4905
+ logger$5.debug({
4900
4906
  taskId,
4901
4907
  branchName,
4902
4908
  targetBranch
4903
4909
  }, "mergeWorktree");
4904
4910
  const conflictReport = await this.detectConflicts(taskId, targetBranch);
4905
4911
  if (conflictReport.hasConflicts) {
4906
- logger$4.info({
4912
+ logger$5.info({
4907
4913
  taskId,
4908
4914
  conflictCount: conflictReport.conflictingFiles.length
4909
4915
  }, "Merge skipped due to conflicts");
@@ -4925,7 +4931,7 @@ var GitWorktreeManagerImpl = class {
4925
4931
  success: true,
4926
4932
  mergedFiles
4927
4933
  };
4928
- logger$4.info({
4934
+ logger$5.info({
4929
4935
  taskId,
4930
4936
  branchName,
4931
4937
  mergedFileCount: mergedFiles.length
@@ -4933,7 +4939,7 @@ var GitWorktreeManagerImpl = class {
4933
4939
  return result;
4934
4940
  }
4935
4941
  async listWorktrees() {
4936
- logger$4.debug({
4942
+ logger$5.debug({
4937
4943
  projectRoot: this._projectRoot,
4938
4944
  baseDirectory: this._baseDirectory
4939
4945
  }, "listWorktrees");
@@ -4957,7 +4963,7 @@ var GitWorktreeManagerImpl = class {
4957
4963
  createdAt
4958
4964
  });
4959
4965
  }
4960
- logger$4.debug({ count: results.length }, "listWorktrees: found worktrees");
4966
+ logger$5.debug({ count: results.length }, "listWorktrees: found worktrees");
4961
4967
  return results;
4962
4968
  }
4963
4969
  getWorktreePath(taskId) {
@@ -4977,7 +4983,7 @@ function createGitWorktreeManager(options) {
4977
4983
 
4978
4984
  //#endregion
4979
4985
  //#region src/cli/commands/merge.ts
4980
- const logger$3 = createLogger("merge-cmd");
4986
+ const logger$4 = createLogger("merge-cmd");
4981
4987
  const MERGE_EXIT_SUCCESS = 0;
4982
4988
  const MERGE_EXIT_CONFLICT = 1;
4983
4989
  const MERGE_EXIT_ERROR = 2;
@@ -5015,7 +5021,7 @@ async function mergeTask(taskId, targetBranch, projectRoot) {
5015
5021
  projectRoot
5016
5022
  });
5017
5023
  try {
5018
- logger$3.info({
5024
+ logger$4.info({
5019
5025
  taskId,
5020
5026
  targetBranch
5021
5027
  }, "Running conflict detection...");
@@ -5037,7 +5043,7 @@ async function mergeTask(taskId, targetBranch, projectRoot) {
5037
5043
  } catch (err) {
5038
5044
  const message = err instanceof Error ? err.message : String(err);
5039
5045
  console.error(`Error merging task "${taskId}": ${message}`);
5040
- logger$3.error({
5046
+ logger$4.error({
5041
5047
  taskId,
5042
5048
  err
5043
5049
  }, "merge --task failed");
@@ -5091,7 +5097,7 @@ async function mergeAll(targetBranch, projectRoot, taskIds) {
5091
5097
  error: message
5092
5098
  });
5093
5099
  console.log(` Error for task "${taskId}": ${message}`);
5094
- logger$3.error({
5100
+ logger$4.error({
5095
5101
  taskId,
5096
5102
  err
5097
5103
  }, "merge --all: task failed");
@@ -5144,7 +5150,7 @@ function registerMergeCommand(program, projectRoot = process.cwd()) {
5144
5150
 
5145
5151
  //#endregion
5146
5152
  //#region src/cli/commands/worktrees.ts
5147
- const logger$2 = createLogger("worktrees-cmd");
5153
+ const logger$3 = createLogger("worktrees-cmd");
5148
5154
  const WORKTREES_EXIT_SUCCESS = 0;
5149
5155
  const WORKTREES_EXIT_ERROR = 1;
5150
5156
  /** Valid task statuses for filtering */
@@ -5271,7 +5277,7 @@ async function listWorktreesAction(options) {
5271
5277
  try {
5272
5278
  worktreeInfos = await manager.listWorktrees();
5273
5279
  } catch (err) {
5274
- logger$2.error({ err }, "Failed to list worktrees");
5280
+ logger$3.error({ err }, "Failed to list worktrees");
5275
5281
  const message = err instanceof Error ? err.message : String(err);
5276
5282
  process.stderr.write(`Error listing worktrees: ${message}\n`);
5277
5283
  return WORKTREES_EXIT_ERROR;
@@ -5298,7 +5304,7 @@ async function listWorktreesAction(options) {
5298
5304
  } catch (err) {
5299
5305
  const message = err instanceof Error ? err.message : String(err);
5300
5306
  process.stderr.write(`Error: ${message}\n`);
5301
- logger$2.error({ err }, "listWorktreesAction failed");
5307
+ logger$3.error({ err }, "listWorktreesAction failed");
5302
5308
  return WORKTREES_EXIT_ERROR;
5303
5309
  }
5304
5310
  }
@@ -5339,7 +5345,7 @@ function registerWorktreesCommand(program, version = "0.0.0", projectRoot = proc
5339
5345
 
5340
5346
  //#endregion
5341
5347
  //#region src/cli/commands/brainstorm.ts
5342
- const logger$1 = createLogger("brainstorm-cmd");
5348
+ const logger$2 = createLogger("brainstorm-cmd");
5343
5349
  /**
5344
5350
  * Detect whether the project has existing planning artifacts that indicate
5345
5351
  * this is an amendment session (vs. a brand-new project brainstorm).
@@ -5385,13 +5391,13 @@ async function loadAmendmentContextDocuments(projectRoot) {
5385
5391
  try {
5386
5392
  brief = await readFile(briefPath, "utf-8");
5387
5393
  } catch {
5388
- logger$1.warn({ briefPath }, "product-brief.md not found — continuing without brief context");
5394
+ logger$2.warn({ briefPath }, "product-brief.md not found — continuing without brief context");
5389
5395
  process.stderr.write(`Warning: product-brief.md not found at ${briefPath}\n`);
5390
5396
  }
5391
5397
  try {
5392
5398
  prd = await readFile(prdPath, "utf-8");
5393
5399
  } catch {
5394
- logger$1.warn({ prdPath }, "requirements.md not found — continuing without PRD context");
5400
+ logger$2.warn({ prdPath }, "requirements.md not found — continuing without PRD context");
5395
5401
  process.stderr.write(`Warning: requirements.md not found at ${prdPath}\n`);
5396
5402
  }
5397
5403
  return {
@@ -5600,7 +5606,7 @@ async function dispatchToPersonas(userPrompt, context, llmDispatch) {
5600
5606
  }
5601
5607
  ];
5602
5608
  const defaultDispatch = async (prompt, personaName) => {
5603
- logger$1.debug({
5609
+ logger$2.debug({
5604
5610
  personaName,
5605
5611
  promptLength: prompt.length
5606
5612
  }, "Dispatching to persona (stub mode)");
@@ -5617,7 +5623,7 @@ async function dispatchToPersonas(userPrompt, context, llmDispatch) {
5617
5623
  };
5618
5624
  } catch (err) {
5619
5625
  const msg = err instanceof Error ? err.message : String(err);
5620
- logger$1.error({
5626
+ logger$2.error({
5621
5627
  err,
5622
5628
  personaName: persona.name
5623
5629
  }, "Persona dispatch failed");
@@ -5769,7 +5775,7 @@ async function runBrainstormSession(options, llmDispatch, rlInterface) {
5769
5775
  }
5770
5776
  });
5771
5777
  rl.on("error", (err) => {
5772
- logger$1.error({ err }, "readline error");
5778
+ logger$2.error({ err }, "readline error");
5773
5779
  if (!sessionEnded) endSession(false);
5774
5780
  });
5775
5781
  });
@@ -5808,6 +5814,590 @@ function registerBrainstormCommand(program, _version = "0.0.0", projectRoot = pr
5808
5814
  });
5809
5815
  }
5810
5816
 
5817
+ //#endregion
5818
+ //#region src/modules/export/renderers.ts
5819
+ /** Fields from analysis/product-brief decisions to render, in display order */
5820
+ const PRODUCT_BRIEF_FIELDS = [
5821
+ "problem_statement",
5822
+ "target_users",
5823
+ "core_features",
5824
+ "success_metrics",
5825
+ "constraints",
5826
+ "technology_constraints"
5827
+ ];
5828
+ /**
5829
+ * Known acronyms that should appear fully uppercased when they are a standalone
5830
+ * word in a label (e.g. 'fr_coverage' → 'FR Coverage', 'api_style' → 'API Style').
5831
+ */
5832
+ const UPPERCASE_ACRONYMS = new Set([
5833
+ "fr",
5834
+ "nfr",
5835
+ "ux",
5836
+ "api",
5837
+ "db",
5838
+ "id",
5839
+ "url"
5840
+ ]);
5841
+ /**
5842
+ * Convert a snake_case key to Title Case for display headings.
5843
+ * Known acronyms (fr, nfr, ux, api, db, id, url) are rendered fully uppercased.
5844
+ */
5845
+ function fieldLabel(key) {
5846
+ return key.replace(/_/g, " ").replace(/\b\w+/g, (word) => {
5847
+ const lower = word.toLowerCase();
5848
+ if (UPPERCASE_ACRONYMS.has(lower)) return lower.toUpperCase();
5849
+ return word.charAt(0).toUpperCase() + word.slice(1).toLowerCase();
5850
+ });
5851
+ }
5852
+ /**
5853
+ * Safely parse a JSON string; returns the original string if parsing fails.
5854
+ */
5855
+ function safeParseJson(value) {
5856
+ try {
5857
+ return JSON.parse(value);
5858
+ } catch {
5859
+ return value;
5860
+ }
5861
+ }
5862
+ /**
5863
+ * Render a decision value to a markdown-friendly string.
5864
+ * - Arrays → bulleted list items
5865
+ * - Objects → key: value lines
5866
+ * - Primitives → plain string
5867
+ */
5868
+ function renderValue(rawValue) {
5869
+ const parsed = safeParseJson(rawValue);
5870
+ if (Array.isArray(parsed)) return parsed.map((item) => `- ${String(item)}`).join("\n");
5871
+ if (typeof parsed === "object" && parsed !== null) return Object.entries(parsed).map(([k, v]) => `- **${fieldLabel(k)}**: ${String(v)}`).join("\n");
5872
+ return String(parsed);
5873
+ }
5874
+ /**
5875
+ * Render analysis-phase decisions as a `product-brief.md` file.
5876
+ *
5877
+ * Merges `product-brief` category decisions with `technology-constraints`
5878
+ * category decisions (they are stored separately in the decision store).
5879
+ *
5880
+ * @param decisions - All decisions from the analysis phase (any category)
5881
+ * @returns Formatted markdown content for product-brief.md
5882
+ */
5883
+ function renderProductBrief(decisions) {
5884
+ const briefDecisions = decisions.filter((d) => d.category === "product-brief");
5885
+ const techConstraintDecisions = decisions.filter((d) => d.category === "technology-constraints");
5886
+ const briefMap = Object.fromEntries(briefDecisions.map((d) => [d.key, d.value]));
5887
+ if (techConstraintDecisions.length > 0 && briefMap["technology_constraints"] === void 0) {
5888
+ const tcBullets = techConstraintDecisions.flatMap((d) => {
5889
+ const parsed = safeParseJson(d.value);
5890
+ if (Array.isArray(parsed)) return parsed.map((item) => String(item));
5891
+ return [String(parsed)];
5892
+ });
5893
+ briefMap["technology_constraints"] = JSON.stringify(tcBullets);
5894
+ }
5895
+ if (briefDecisions.length === 0 && techConstraintDecisions.length === 0) return "";
5896
+ const parts = ["# Product Brief", ""];
5897
+ for (const field of PRODUCT_BRIEF_FIELDS) {
5898
+ const rawValue = briefMap[field];
5899
+ if (rawValue === void 0) continue;
5900
+ parts.push(`## ${fieldLabel(field)}`);
5901
+ parts.push("");
5902
+ parts.push(renderValue(rawValue));
5903
+ parts.push("");
5904
+ }
5905
+ return parts.join("\n");
5906
+ }
5907
+ /**
5908
+ * Render planning-phase decisions (and requirements table) as a `prd.md` file.
5909
+ *
5910
+ * Sections rendered (when data is present):
5911
+ * - Project Classification (classification decisions)
5912
+ * - Functional Requirements (functional-requirements decisions)
5913
+ * - Non-Functional Requirements (non-functional-requirements decisions)
5914
+ * - Domain Model (domain-model decisions)
5915
+ * - User Stories (user-stories decisions)
5916
+ * - Tech Stack (tech-stack decisions)
5917
+ * - Out of Scope (out-of-scope decisions)
5918
+ *
5919
+ * @param decisions - All decisions from the planning phase
5920
+ * @param requirements - Requirements records from the requirements table (optional)
5921
+ * @returns Formatted markdown content for prd.md
5922
+ */
5923
+ function renderPrd(decisions, requirements = []) {
5924
+ if (decisions.length === 0) return "";
5925
+ const parts = ["# Product Requirements Document", ""];
5926
+ const classificationDecisions = decisions.filter((d) => d.category === "classification");
5927
+ if (classificationDecisions.length > 0) {
5928
+ parts.push("## Project Classification");
5929
+ parts.push("");
5930
+ for (const d of classificationDecisions) {
5931
+ const parsed = safeParseJson(d.value);
5932
+ if (Array.isArray(parsed)) {
5933
+ parts.push(`**${fieldLabel(d.key)}**:`);
5934
+ for (const item of parsed) parts.push(`- ${String(item)}`);
5935
+ } else parts.push(`**${fieldLabel(d.key)}**: ${String(parsed)}`);
5936
+ }
5937
+ parts.push("");
5938
+ }
5939
+ const frDecisions = decisions.filter((d) => d.category === "functional-requirements");
5940
+ if (frDecisions.length > 0) {
5941
+ parts.push("## Functional Requirements");
5942
+ parts.push("");
5943
+ for (const d of frDecisions) {
5944
+ const parsed = safeParseJson(d.value);
5945
+ if (typeof parsed === "object" && parsed !== null && !Array.isArray(parsed)) {
5946
+ const fr = parsed;
5947
+ const id = fr.id ?? d.key;
5948
+ const priority = fr.priority ? ` [${fr.priority.toUpperCase()}]` : "";
5949
+ parts.push(`- **${id}**${priority}: ${fr.description ?? d.value}`);
5950
+ if (fr.acceptance_criteria && fr.acceptance_criteria.length > 0) for (const ac of fr.acceptance_criteria) parts.push(` - ${ac}`);
5951
+ } else parts.push(`- **${d.key}**: ${renderValue(d.value)}`);
5952
+ }
5953
+ parts.push("");
5954
+ }
5955
+ const nfrDecisions = decisions.filter((d) => d.category === "non-functional-requirements");
5956
+ if (nfrDecisions.length > 0) {
5957
+ parts.push("## Non-Functional Requirements");
5958
+ parts.push("");
5959
+ for (const d of nfrDecisions) {
5960
+ const parsed = safeParseJson(d.value);
5961
+ if (typeof parsed === "object" && parsed !== null && !Array.isArray(parsed)) {
5962
+ const nfr = parsed;
5963
+ const id = nfr.id ?? d.key;
5964
+ const cat = nfr.category ? ` [${nfr.category.toUpperCase()}]` : "";
5965
+ parts.push(`- **${id}**${cat}: ${nfr.description ?? d.value}`);
5966
+ } else parts.push(`- **${d.key}**: ${renderValue(d.value)}`);
5967
+ }
5968
+ parts.push("");
5969
+ }
5970
+ const domainDecisions = decisions.filter((d) => d.category === "domain-model");
5971
+ if (domainDecisions.length > 0) {
5972
+ parts.push("## Domain Model");
5973
+ parts.push("");
5974
+ for (const d of domainDecisions) parts.push(renderValue(d.value));
5975
+ parts.push("");
5976
+ }
5977
+ const userStoryDecisions = decisions.filter((d) => d.category === "user-stories");
5978
+ if (userStoryDecisions.length > 0) {
5979
+ parts.push("## User Stories");
5980
+ parts.push("");
5981
+ for (const d of userStoryDecisions) {
5982
+ const parsed = safeParseJson(d.value);
5983
+ if (typeof parsed === "object" && parsed !== null && !Array.isArray(parsed)) {
5984
+ const us = parsed;
5985
+ if (us.title) {
5986
+ parts.push(`### ${us.title}`);
5987
+ parts.push("");
5988
+ if (us.description) {
5989
+ parts.push(us.description);
5990
+ parts.push("");
5991
+ }
5992
+ } else {
5993
+ parts.push(renderValue(d.value));
5994
+ parts.push("");
5995
+ }
5996
+ } else {
5997
+ parts.push(renderValue(d.value));
5998
+ parts.push("");
5999
+ }
6000
+ }
6001
+ }
6002
+ const techStackDecisions = decisions.filter((d) => d.category === "tech-stack");
6003
+ if (techStackDecisions.length > 0) {
6004
+ parts.push("## Tech Stack");
6005
+ parts.push("");
6006
+ for (const d of techStackDecisions) if (d.key === "tech_stack") {
6007
+ const parsed = safeParseJson(d.value);
6008
+ if (typeof parsed === "object" && parsed !== null && !Array.isArray(parsed)) for (const [k, v] of Object.entries(parsed)) parts.push(`- **${fieldLabel(k)}**: ${String(v)}`);
6009
+ else parts.push(`- **${fieldLabel(d.key)}**: ${d.value}`);
6010
+ } else parts.push(`- **${fieldLabel(d.key)}**: ${d.value}`);
6011
+ parts.push("");
6012
+ }
6013
+ const outOfScopeDecisions = decisions.filter((d) => d.category === "out-of-scope");
6014
+ if (outOfScopeDecisions.length > 0) {
6015
+ parts.push("## Out of Scope");
6016
+ parts.push("");
6017
+ for (const d of outOfScopeDecisions) parts.push(renderValue(d.value));
6018
+ parts.push("");
6019
+ }
6020
+ const functionalReqs = requirements.filter((r) => r.type === "functional");
6021
+ const nonFunctionalReqs = requirements.filter((r) => r.type === "non_functional");
6022
+ if ((functionalReqs.length > 0 || nonFunctionalReqs.length > 0) && frDecisions.length === 0 && nfrDecisions.length === 0) {
6023
+ parts.push("## Requirements (from Requirements Table)");
6024
+ parts.push("");
6025
+ if (functionalReqs.length > 0) {
6026
+ parts.push("### Functional Requirements");
6027
+ parts.push("");
6028
+ for (const r of functionalReqs) {
6029
+ const priority = r.priority ? ` [${r.priority.toUpperCase()}]` : "";
6030
+ parts.push(`- ${r.source ?? ""}${priority}: ${r.description}`);
6031
+ }
6032
+ parts.push("");
6033
+ }
6034
+ if (nonFunctionalReqs.length > 0) {
6035
+ parts.push("### Non-Functional Requirements");
6036
+ parts.push("");
6037
+ for (const r of nonFunctionalReqs) {
6038
+ const priority = r.priority ? ` [${r.priority.toUpperCase()}]` : "";
6039
+ parts.push(`- ${priority}: ${r.description}`);
6040
+ }
6041
+ parts.push("");
6042
+ }
6043
+ }
6044
+ return parts.join("\n");
6045
+ }
6046
+ /**
6047
+ * Render solutioning-phase architecture decisions as an `architecture.md` file.
6048
+ *
6049
+ * Groups all architecture decisions into a single `## Architecture Decisions`
6050
+ * section, formatting each as `**key**: value` with italicised rationale where
6051
+ * present. The heading pattern matches the regex used by `seedMethodologyContext()`
6052
+ * so that the exported file can be round-tripped back into the decision store.
6053
+ *
6054
+ * @param decisions - All decisions from the solutioning phase (any category)
6055
+ * @returns Formatted markdown content for architecture.md, or '' if no data
6056
+ */
6057
+ function renderArchitecture(decisions) {
6058
+ const archDecisions = decisions.filter((d) => d.category === "architecture");
6059
+ if (archDecisions.length === 0) return "";
6060
+ const parts = ["# Architecture", ""];
6061
+ parts.push("## Architecture Decisions");
6062
+ parts.push("");
6063
+ for (const d of archDecisions) {
6064
+ const value = safeParseJson(d.value);
6065
+ let displayValue;
6066
+ if (typeof value === "object" && value !== null && !Array.isArray(value)) {
6067
+ displayValue = Object.entries(value).map(([k, v]) => ` - *${fieldLabel(k)}*: ${String(v)}`).join("\n");
6068
+ parts.push(`**${d.key}**:`);
6069
+ parts.push(displayValue);
6070
+ } else if (Array.isArray(value)) {
6071
+ displayValue = value.map((item) => ` - ${String(item)}`).join("\n");
6072
+ parts.push(`**${d.key}**:`);
6073
+ parts.push(displayValue);
6074
+ } else {
6075
+ displayValue = String(value);
6076
+ if (d.rationale) parts.push(`**${d.key}**: ${displayValue} *(${d.rationale})*`);
6077
+ else parts.push(`**${d.key}**: ${displayValue}`);
6078
+ }
6079
+ }
6080
+ parts.push("");
6081
+ return parts.join("\n");
6082
+ }
6083
+ /**
6084
+ * Render solutioning-phase epics and stories decisions as an `epics.md` file.
6085
+ *
6086
+ * Output format:
6087
+ * ```
6088
+ * ## Epic 1: Title
6089
+ * Description
6090
+ *
6091
+ * ### Story 1-1: Title
6092
+ * **Priority**: must
6093
+ * **Description**: ...
6094
+ * **Acceptance Criteria**:
6095
+ * - AC1
6096
+ * - AC2
6097
+ * ```
6098
+ *
6099
+ * The `## Epic N:` heading pattern is parsed by `parseEpicShards()` in
6100
+ * `seed-methodology-context.ts`, satisfying the round-trip contract (AC5).
6101
+ *
6102
+ * Stories are associated with their parent epic by the numeric prefix of the
6103
+ * story key (e.g., story key `2-3` → epic 2).
6104
+ *
6105
+ * @param decisions - All decisions from the solutioning phase (any category)
6106
+ * @returns Formatted markdown content for epics.md, or '' if no data
6107
+ */
6108
+ function renderEpics(decisions) {
6109
+ const epicDecisions = decisions.filter((d) => d.category === "epics");
6110
+ const storyDecisions = decisions.filter((d) => d.category === "stories");
6111
+ if (epicDecisions.length === 0 && storyDecisions.length === 0) return "";
6112
+ const epicMap = new Map();
6113
+ for (const d of epicDecisions) {
6114
+ const match = /^epic-(\d+)$/i.exec(d.key);
6115
+ if (match === null) continue;
6116
+ const epicNum = parseInt(match[1], 10);
6117
+ const parsed = safeParseJson(d.value);
6118
+ if (typeof parsed === "object" && parsed !== null && !Array.isArray(parsed)) {
6119
+ const p = parsed;
6120
+ epicMap.set(epicNum, {
6121
+ num: epicNum,
6122
+ title: p.title ?? `Epic ${epicNum}`,
6123
+ description: p.description ?? ""
6124
+ });
6125
+ } else epicMap.set(epicNum, {
6126
+ num: epicNum,
6127
+ title: String(parsed),
6128
+ description: ""
6129
+ });
6130
+ }
6131
+ const storyMap = new Map();
6132
+ for (const d of storyDecisions) {
6133
+ const parsed = safeParseJson(d.value);
6134
+ let story;
6135
+ if (typeof parsed === "object" && parsed !== null && !Array.isArray(parsed)) {
6136
+ const p = parsed;
6137
+ const storyKey = p.key ?? d.key;
6138
+ const keyMatch = /^(\d+)-(\d+)/.exec(storyKey);
6139
+ if (keyMatch === null) continue;
6140
+ const epicNum = parseInt(keyMatch[1], 10);
6141
+ const storyNum = parseInt(keyMatch[2], 10);
6142
+ story = {
6143
+ key: storyKey,
6144
+ epicNum,
6145
+ storyNum,
6146
+ title: p.title ?? `Story ${storyKey}`,
6147
+ description: p.description ?? "",
6148
+ ac: p.acceptance_criteria ?? p.ac ?? [],
6149
+ priority: p.priority ?? "must"
6150
+ };
6151
+ } else {
6152
+ const storyKey = d.key;
6153
+ const keyMatch = /^(\d+)-(\d+)/.exec(storyKey);
6154
+ if (keyMatch === null) continue;
6155
+ const epicNum = parseInt(keyMatch[1], 10);
6156
+ const storyNum = parseInt(keyMatch[2], 10);
6157
+ story = {
6158
+ key: storyKey,
6159
+ epicNum,
6160
+ storyNum,
6161
+ title: `Story ${storyKey}`,
6162
+ description: String(parsed),
6163
+ ac: [],
6164
+ priority: "must"
6165
+ };
6166
+ }
6167
+ if (!storyMap.has(story.epicNum)) storyMap.set(story.epicNum, []);
6168
+ storyMap.get(story.epicNum).push(story);
6169
+ }
6170
+ for (const stories of storyMap.values()) stories.sort((a, b) => a.storyNum - b.storyNum);
6171
+ const allEpicNums = new Set([...epicMap.keys(), ...storyMap.keys()]);
6172
+ const sortedEpicNums = [...allEpicNums].sort((a, b) => a - b);
6173
+ const parts = ["# Epics and Stories", ""];
6174
+ for (const epicNum of sortedEpicNums) {
6175
+ const epic = epicMap.get(epicNum);
6176
+ const epicTitle = epic?.title ?? `Epic ${epicNum}`;
6177
+ const epicDescription = epic?.description ?? "";
6178
+ parts.push(`## Epic ${epicNum}: ${epicTitle}`);
6179
+ parts.push("");
6180
+ if (epicDescription) {
6181
+ parts.push(epicDescription);
6182
+ parts.push("");
6183
+ }
6184
+ const stories = storyMap.get(epicNum) ?? [];
6185
+ for (const story of stories) {
6186
+ parts.push(`### Story ${story.key}: ${story.title}`);
6187
+ parts.push("");
6188
+ parts.push(`**Priority**: ${story.priority}`);
6189
+ if (story.description) parts.push(`**Description**: ${story.description}`);
6190
+ if (story.ac.length > 0) {
6191
+ parts.push("**Acceptance Criteria**:");
6192
+ for (const ac of story.ac) parts.push(`- ${ac}`);
6193
+ }
6194
+ parts.push("");
6195
+ }
6196
+ }
6197
+ return parts.join("\n");
6198
+ }
6199
+ /**
6200
+ * Render solutioning-phase readiness-findings decisions as a `readiness-report.md`.
6201
+ *
6202
+ * Groups findings by category, shows severity per finding, and emits an
6203
+ * overall pass/fail verdict based on whether any blockers were found.
6204
+ *
6205
+ * @param decisions - All decisions from the solutioning phase (any category)
6206
+ * @returns Formatted markdown content for readiness-report.md, or '' if no data
6207
+ */
6208
+ function renderReadinessReport(decisions) {
6209
+ const findingDecisions = decisions.filter((d) => d.category === "readiness-findings");
6210
+ if (findingDecisions.length === 0) return "";
6211
+ const findings = [];
6212
+ for (const d of findingDecisions) {
6213
+ const parsed = safeParseJson(d.value);
6214
+ if (typeof parsed === "object" && parsed !== null && !Array.isArray(parsed)) {
6215
+ const p = parsed;
6216
+ findings.push({
6217
+ category: p.category ?? "general",
6218
+ severity: p.severity ?? "minor",
6219
+ description: p.description ?? String(parsed),
6220
+ affected_items: p.affected_items ?? []
6221
+ });
6222
+ } else findings.push({
6223
+ category: "general",
6224
+ severity: "minor",
6225
+ description: String(parsed),
6226
+ affected_items: []
6227
+ });
6228
+ }
6229
+ const hasCritical = findings.some((f) => f.severity === "blocker" || f.severity === "major");
6230
+ const verdict = hasCritical ? "FAIL" : "PASS";
6231
+ const parts = ["# Readiness Report", ""];
6232
+ parts.push(`**Overall Verdict**: ${verdict}`);
6233
+ parts.push("");
6234
+ parts.push(`**Total Findings**: ${findings.length}`);
6235
+ parts.push(`**Blockers**: ${findings.filter((f) => f.severity === "blocker").length}`);
6236
+ parts.push(`**Major**: ${findings.filter((f) => f.severity === "major").length}`);
6237
+ parts.push(`**Minor**: ${findings.filter((f) => f.severity === "minor").length}`);
6238
+ parts.push("");
6239
+ const byCategory = new Map();
6240
+ for (const finding of findings) {
6241
+ if (!byCategory.has(finding.category)) byCategory.set(finding.category, []);
6242
+ byCategory.get(finding.category).push(finding);
6243
+ }
6244
+ const categoryOrder = [
6245
+ "fr_coverage",
6246
+ "architecture_compliance",
6247
+ "story_quality",
6248
+ "ux_alignment",
6249
+ "dependency_validity",
6250
+ "general"
6251
+ ];
6252
+ const sortedCategories = [...byCategory.keys()].sort((a, b) => {
6253
+ const ai = categoryOrder.indexOf(a);
6254
+ const bi = categoryOrder.indexOf(b);
6255
+ return (ai === -1 ? 999 : ai) - (bi === -1 ? 999 : bi);
6256
+ });
6257
+ for (const category of sortedCategories) {
6258
+ const categoryFindings = byCategory.get(category);
6259
+ const categoryLabel = fieldLabel(category);
6260
+ parts.push(`## ${categoryLabel}`);
6261
+ parts.push("");
6262
+ for (const finding of categoryFindings) {
6263
+ const severityTag = `[${finding.severity.toUpperCase()}]`;
6264
+ parts.push(`- ${severityTag} ${finding.description}`);
6265
+ if (finding.affected_items.length > 0) parts.push(` - *Affected*: ${finding.affected_items.join(", ")}`);
6266
+ }
6267
+ parts.push("");
6268
+ }
6269
+ return parts.join("\n");
6270
+ }
6271
+
6272
+ //#endregion
6273
+ //#region src/cli/commands/export.ts
6274
+ const logger$1 = createLogger("export-cmd");
6275
+ /**
6276
+ * Execute the export action.
6277
+ * Returns an exit code (0 = success, 1 = error).
6278
+ */
6279
+ async function runExportAction(options) {
6280
+ const { runId, outputDir, projectRoot, outputFormat } = options;
6281
+ let dbWrapper;
6282
+ try {
6283
+ const dbRoot = await resolveMainRepoRoot(projectRoot);
6284
+ const dbPath = join$1(dbRoot, ".substrate", "substrate.db");
6285
+ if (!existsSync$1(dbPath)) {
6286
+ const errorMsg = `Decision store not initialized. Run 'substrate init' first.`;
6287
+ if (outputFormat === "json") process.stdout.write(JSON.stringify({ error: errorMsg }) + "\n");
6288
+ else process.stderr.write(`Error: ${errorMsg}\n`);
6289
+ return 1;
6290
+ }
6291
+ dbWrapper = new DatabaseWrapper(dbPath);
6292
+ dbWrapper.open();
6293
+ const db = dbWrapper.db;
6294
+ let run;
6295
+ if (runId !== void 0 && runId !== "") run = db.prepare("SELECT * FROM pipeline_runs WHERE id = ?").get(runId);
6296
+ else run = getLatestRun(db);
6297
+ if (run === void 0) {
6298
+ const errorMsg = runId !== void 0 ? `Pipeline run '${runId}' not found.` : "No pipeline runs found. Run `substrate run` first.";
6299
+ if (outputFormat === "json") process.stdout.write(JSON.stringify({ error: errorMsg }) + "\n");
6300
+ else process.stderr.write(`Error: ${errorMsg}\n`);
6301
+ return 1;
6302
+ }
6303
+ const activeRunId = run.id;
6304
+ const resolvedOutputDir = isAbsolute(outputDir) ? outputDir : join$1(projectRoot, outputDir);
6305
+ if (!existsSync$1(resolvedOutputDir)) mkdirSync$1(resolvedOutputDir, { recursive: true });
6306
+ const filesWritten = [];
6307
+ const phasesExported = [];
6308
+ const analysisDecisions = getDecisionsByPhaseForRun(db, activeRunId, "analysis");
6309
+ if (analysisDecisions.length > 0) {
6310
+ const content = renderProductBrief(analysisDecisions);
6311
+ if (content !== "") {
6312
+ const filePath = join$1(resolvedOutputDir, "product-brief.md");
6313
+ writeFileSync$1(filePath, content, "utf-8");
6314
+ filesWritten.push(filePath);
6315
+ phasesExported.push("analysis");
6316
+ if (outputFormat === "human") process.stdout.write(` Written: ${filePath}\n`);
6317
+ }
6318
+ }
6319
+ const planningDecisions = getDecisionsByPhaseForRun(db, activeRunId, "planning");
6320
+ if (planningDecisions.length > 0) {
6321
+ const requirements = listRequirements(db).filter((r) => r.pipeline_run_id === activeRunId);
6322
+ const content = renderPrd(planningDecisions, requirements);
6323
+ if (content !== "") {
6324
+ const filePath = join$1(resolvedOutputDir, "prd.md");
6325
+ writeFileSync$1(filePath, content, "utf-8");
6326
+ filesWritten.push(filePath);
6327
+ if (!phasesExported.includes("planning")) phasesExported.push("planning");
6328
+ if (outputFormat === "human") process.stdout.write(` Written: ${filePath}\n`);
6329
+ }
6330
+ }
6331
+ const solutioningDecisions = getDecisionsByPhaseForRun(db, activeRunId, "solutioning");
6332
+ if (solutioningDecisions.length > 0) {
6333
+ const archContent = renderArchitecture(solutioningDecisions);
6334
+ if (archContent !== "") {
6335
+ const filePath = join$1(resolvedOutputDir, "architecture.md");
6336
+ writeFileSync$1(filePath, archContent, "utf-8");
6337
+ filesWritten.push(filePath);
6338
+ if (!phasesExported.includes("solutioning")) phasesExported.push("solutioning");
6339
+ if (outputFormat === "human") process.stdout.write(` Written: ${filePath}\n`);
6340
+ }
6341
+ const epicsContent = renderEpics(solutioningDecisions);
6342
+ if (epicsContent !== "") {
6343
+ const filePath = join$1(resolvedOutputDir, "epics.md");
6344
+ writeFileSync$1(filePath, epicsContent, "utf-8");
6345
+ filesWritten.push(filePath);
6346
+ if (!phasesExported.includes("solutioning")) phasesExported.push("solutioning");
6347
+ if (outputFormat === "human") process.stdout.write(` Written: ${filePath}\n`);
6348
+ }
6349
+ const readinessContent = renderReadinessReport(solutioningDecisions);
6350
+ if (readinessContent !== "") {
6351
+ const filePath = join$1(resolvedOutputDir, "readiness-report.md");
6352
+ writeFileSync$1(filePath, readinessContent, "utf-8");
6353
+ filesWritten.push(filePath);
6354
+ if (!phasesExported.includes("solutioning")) phasesExported.push("solutioning");
6355
+ if (outputFormat === "human") process.stdout.write(` Written: ${filePath}\n`);
6356
+ }
6357
+ }
6358
+ if (outputFormat === "json") {
6359
+ const result = {
6360
+ files_written: filesWritten,
6361
+ run_id: activeRunId,
6362
+ phases_exported: phasesExported
6363
+ };
6364
+ process.stdout.write(JSON.stringify(result) + "\n");
6365
+ } else {
6366
+ if (filesWritten.length === 0) process.stdout.write(`No data found for run ${activeRunId}. The pipeline may not have completed any phases.\n`);
6367
+ else process.stdout.write(`\nExported ${filesWritten.length} file(s) from run ${activeRunId}.\n`);
6368
+ const skippedPhases = [];
6369
+ if (!phasesExported.includes("analysis")) skippedPhases.push("analysis");
6370
+ if (!phasesExported.includes("planning")) skippedPhases.push("planning");
6371
+ if (!phasesExported.includes("solutioning")) skippedPhases.push("solutioning");
6372
+ if (skippedPhases.length > 0) process.stdout.write(`Phases with no data (skipped): ${skippedPhases.join(", ")}\n`);
6373
+ }
6374
+ return 0;
6375
+ } catch (err) {
6376
+ const msg = err instanceof Error ? err.message : String(err);
6377
+ if (outputFormat === "json") process.stdout.write(JSON.stringify({ error: msg }) + "\n");
6378
+ else process.stderr.write(`Error: ${msg}\n`);
6379
+ logger$1.error({ err }, "export action failed");
6380
+ return 1;
6381
+ } finally {
6382
+ if (dbWrapper !== void 0) try {
6383
+ dbWrapper.close();
6384
+ } catch {}
6385
+ }
6386
+ }
6387
+ function registerExportCommand(program, _version = "0.0.0", projectRoot = process.cwd()) {
6388
+ program.command("export").description("Export decision store contents as human-readable markdown files").option("--run-id <id>", "Pipeline run ID to export (defaults to latest run)").option("--output-dir <path>", "Directory to write exported files to", "_bmad-output/planning-artifacts/").option("--project-root <path>", "Project root directory", projectRoot).option("--output-format <format>", "Output format: human (default) or json", "human").action(async (opts) => {
6389
+ if (opts.outputFormat !== "json" && opts.outputFormat !== "human") process.stderr.write(`Warning: unknown --output-format '${opts.outputFormat}', defaulting to 'human'\n`);
6390
+ const outputFormat = opts.outputFormat === "json" ? "json" : "human";
6391
+ const exitCode = await runExportAction({
6392
+ runId: opts.runId,
6393
+ outputDir: opts.outputDir,
6394
+ projectRoot: opts.projectRoot,
6395
+ outputFormat
6396
+ });
6397
+ process.exitCode = exitCode;
6398
+ });
6399
+ }
6400
+
5811
6401
  //#endregion
5812
6402
  //#region src/cli/index.ts
5813
6403
  process.setMaxListeners(20);
@@ -5858,6 +6448,7 @@ async function createProgram() {
5858
6448
  registerMergeCommand(program);
5859
6449
  registerWorktreesCommand(program, version);
5860
6450
  registerBrainstormCommand(program, version);
6451
+ registerExportCommand(program, version);
5861
6452
  registerUpgradeCommand(program);
5862
6453
  return program;
5863
6454
  }