substrate-ai 0.2.3 → 0.2.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/index.js +693 -107
- package/dist/decisions-DKXc-jnv.js +3 -0
- package/dist/{decisions-BBLMsN_c.js → decisions-DNYByk0U.js} +24 -2
- package/dist/run-Bwyy5-RY.js +7 -0
- package/dist/{run-DlOWhkIF.js → run-D3ZscMlL.js} +2 -2
- package/package.json +1 -1
- package/dist/decisions-WIsicZiG.js +0 -3
- package/dist/run-CRmhkcwN.js +0 -7
package/dist/cli/index.js
CHANGED
|
@@ -2,9 +2,9 @@
|
|
|
2
2
|
import { createLogger, deepMask } from "../logger-C6n1g8uP.js";
|
|
3
3
|
import { AdapterRegistry, createEventBus } from "../event-bus-J-bw-pkp.js";
|
|
4
4
|
import { CURRENT_CONFIG_FORMAT_VERSION, CURRENT_TASK_GRAPH_VERSION, PartialSubstrateConfigSchema, SUPPORTED_CONFIG_FORMAT_VERSIONS, SubstrateConfigSchema, defaultConfigMigrator } from "../version-manager-impl-BpVx2DkY.js";
|
|
5
|
-
import { DatabaseWrapper, SUBSTRATE_OWNED_SETTINGS_KEYS, VALID_PHASES, buildPipelineStatusOutput, createContextCompiler, createDispatcher, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStopAfterGate, findPackageRoot, formatOutput, formatPhaseCompletionSummary, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getSubstrateDefaultSettings, registerRunCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, runAnalysisPhase, runMigrations, runPlanningPhase, runSolutioningPhase, validateStopAfterFromConflict } from "../run-
|
|
5
|
+
import { DatabaseWrapper, SUBSTRATE_OWNED_SETTINGS_KEYS, VALID_PHASES, buildPipelineStatusOutput, createContextCompiler, createDispatcher, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStopAfterGate, findPackageRoot, formatOutput, formatPhaseCompletionSummary, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getSubstrateDefaultSettings, registerRunCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, runAnalysisPhase, runMigrations, runPlanningPhase, runSolutioningPhase, validateStopAfterFromConflict } from "../run-D3ZscMlL.js";
|
|
6
6
|
import { ConfigError, ConfigIncompatibleFormatError } from "../errors-BPqtzQ4U.js";
|
|
7
|
-
import { addTokenUsage, createDecision, getDecisionsByPhaseForRun, getLatestRun, getPipelineRunById, getTokenUsageSummary, updatePipelineRun } from "../decisions-
|
|
7
|
+
import { addTokenUsage, createDecision, getDecisionsByPhaseForRun, getLatestRun, getPipelineRunById, getTokenUsageSummary, listRequirements, updatePipelineRun } from "../decisions-DNYByk0U.js";
|
|
8
8
|
import { compareRunMetrics, getBaselineRunMetrics, getRunMetrics, getStoryMetricsForRun, incrementRunRestarts, listRunMetrics, tagRunAsBaseline } from "../metrics-BSg8VIHd.js";
|
|
9
9
|
import { abortMerge, createWorktree, getConflictingFiles, getMergedFiles, getOrphanedWorktrees, performMerge, removeBranch, removeWorktree, simulateMerge, verifyGitVersion } from "../git-utils-BtI5eNoN.js";
|
|
10
10
|
import { registerUpgradeCommand } from "../upgrade-rV26kdh3.js";
|
|
@@ -17,8 +17,9 @@ import { chmodSync, cpSync, existsSync, mkdirSync, readFileSync, readdirSync, re
|
|
|
17
17
|
import yaml from "js-yaml";
|
|
18
18
|
import { createRequire as createRequire$1 } from "node:module";
|
|
19
19
|
import * as path$1 from "node:path";
|
|
20
|
+
import { isAbsolute, join as join$1 } from "node:path";
|
|
20
21
|
import BetterSqlite3 from "better-sqlite3";
|
|
21
|
-
import { existsSync as existsSync$1 } from "node:fs";
|
|
22
|
+
import { existsSync as existsSync$1, mkdirSync as mkdirSync$1, writeFileSync as writeFileSync$1 } from "node:fs";
|
|
22
23
|
import { createInterface } from "node:readline";
|
|
23
24
|
import { homedir } from "os";
|
|
24
25
|
import { access as access$1 } from "node:fs/promises";
|
|
@@ -328,7 +329,7 @@ const DEFAULT_CONFIG = {
|
|
|
328
329
|
|
|
329
330
|
//#endregion
|
|
330
331
|
//#region src/cli/commands/init.ts
|
|
331
|
-
const logger$
|
|
332
|
+
const logger$17 = createLogger("init");
|
|
332
333
|
const __dirname = dirname(new URL(import.meta.url).pathname);
|
|
333
334
|
const INIT_EXIT_SUCCESS = 0;
|
|
334
335
|
const INIT_EXIT_ERROR = 1;
|
|
@@ -349,7 +350,7 @@ async function scaffoldBmadFramework(projectRoot, force, outputFormat) {
|
|
|
349
350
|
const version = resolveBmadMethodVersion();
|
|
350
351
|
if (force && bmadExists) process.stderr.write(`Warning: Replacing existing _bmad/ framework with bmad-method@${version}\n`);
|
|
351
352
|
process.stdout.write(`Scaffolding BMAD framework from bmad-method@${version}\n`);
|
|
352
|
-
logger$
|
|
353
|
+
logger$17.info({
|
|
353
354
|
version,
|
|
354
355
|
dest: bmadDest
|
|
355
356
|
}, "Scaffolding BMAD framework");
|
|
@@ -359,7 +360,7 @@ async function scaffoldBmadFramework(projectRoot, force, outputFormat) {
|
|
|
359
360
|
const destDir = join(bmadDest, dir);
|
|
360
361
|
mkdirSync(destDir, { recursive: true });
|
|
361
362
|
cpSync(srcDir, destDir, { recursive: true });
|
|
362
|
-
logger$
|
|
363
|
+
logger$17.info({
|
|
363
364
|
dir,
|
|
364
365
|
dest: destDir
|
|
365
366
|
}, "Scaffolded BMAD framework directory");
|
|
@@ -378,7 +379,7 @@ async function scaffoldBmadFramework(projectRoot, force, outputFormat) {
|
|
|
378
379
|
"document_output_language: English"
|
|
379
380
|
].join("\n") + "\n";
|
|
380
381
|
await writeFile(configFile, configStub, "utf8");
|
|
381
|
-
logger$
|
|
382
|
+
logger$17.info({ configFile }, "Generated _bmad/_config/config.yaml stub");
|
|
382
383
|
}
|
|
383
384
|
}
|
|
384
385
|
const CLAUDE_MD_START_MARKER = "<!-- substrate:start -->";
|
|
@@ -393,7 +394,7 @@ async function scaffoldClaudeMd(projectRoot) {
|
|
|
393
394
|
try {
|
|
394
395
|
sectionContent = await readFile(templatePath, "utf8");
|
|
395
396
|
} catch {
|
|
396
|
-
logger$
|
|
397
|
+
logger$17.warn({ templatePath }, "CLAUDE.md substrate section template not found; skipping");
|
|
397
398
|
return;
|
|
398
399
|
}
|
|
399
400
|
if (!sectionContent.endsWith("\n")) sectionContent += "\n";
|
|
@@ -411,7 +412,7 @@ async function scaffoldClaudeMd(projectRoot) {
|
|
|
411
412
|
newContent = existingContent + separator + sectionContent;
|
|
412
413
|
}
|
|
413
414
|
await writeFile(claudeMdPath, newContent, "utf8");
|
|
414
|
-
logger$
|
|
415
|
+
logger$17.info({ claudeMdPath }, "Wrote substrate section to CLAUDE.md");
|
|
415
416
|
}
|
|
416
417
|
async function scaffoldStatuslineScript(projectRoot) {
|
|
417
418
|
const pkgRoot = findPackageRoot(__dirname);
|
|
@@ -422,7 +423,7 @@ async function scaffoldStatuslineScript(projectRoot) {
|
|
|
422
423
|
try {
|
|
423
424
|
content = await readFile(templatePath, "utf8");
|
|
424
425
|
} catch {
|
|
425
|
-
logger$
|
|
426
|
+
logger$17.warn({ templatePath }, "statusline.sh template not found; skipping");
|
|
426
427
|
return;
|
|
427
428
|
}
|
|
428
429
|
const claudeDir = join(projectRoot, ".claude");
|
|
@@ -430,7 +431,7 @@ async function scaffoldStatuslineScript(projectRoot) {
|
|
|
430
431
|
mkdirSync(claudeDir, { recursive: true });
|
|
431
432
|
await writeFile(statuslinePath, content, "utf8");
|
|
432
433
|
chmodSync(statuslinePath, 493);
|
|
433
|
-
logger$
|
|
434
|
+
logger$17.info({ statuslinePath }, "Wrote .claude/statusline.sh");
|
|
434
435
|
}
|
|
435
436
|
async function scaffoldClaudeSettings(projectRoot) {
|
|
436
437
|
const claudeDir = join(projectRoot, ".claude");
|
|
@@ -446,7 +447,7 @@ async function scaffoldClaudeSettings(projectRoot) {
|
|
|
446
447
|
if (!merged["$schema"]) merged["$schema"] = "https://json.schemastore.org/claude-code-settings.json";
|
|
447
448
|
mkdirSync(claudeDir, { recursive: true });
|
|
448
449
|
await writeFile(settingsPath, JSON.stringify(merged, null, 2) + "\n", "utf8");
|
|
449
|
-
logger$
|
|
450
|
+
logger$17.info({ settingsPath }, "Wrote substrate settings to .claude/settings.json");
|
|
450
451
|
}
|
|
451
452
|
function resolveBmadMethodInstallerLibPath(fromDir = __dirname) {
|
|
452
453
|
try {
|
|
@@ -516,7 +517,7 @@ async function compileBmadAgents(bmadDir) {
|
|
|
516
517
|
writeFileSync(mdPath, result.xml, "utf-8");
|
|
517
518
|
compiled++;
|
|
518
519
|
} catch (compileErr) {
|
|
519
|
-
logger$
|
|
520
|
+
logger$17.debug({
|
|
520
521
|
err: compileErr,
|
|
521
522
|
file
|
|
522
523
|
}, "Failed to compile agent YAML");
|
|
@@ -537,9 +538,9 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
|
|
|
537
538
|
const _require = createRequire$1(join(__dirname, "synthetic.js"));
|
|
538
539
|
try {
|
|
539
540
|
const compiledCount = await compileBmadAgents(bmadDir);
|
|
540
|
-
if (compiledCount > 0) logger$
|
|
541
|
+
if (compiledCount > 0) logger$17.info({ compiledCount }, "Compiled agent YAML files to MD");
|
|
541
542
|
} catch (compileErr) {
|
|
542
|
-
logger$
|
|
543
|
+
logger$17.warn({ err: compileErr }, "Agent compilation failed; agent commands may be incomplete");
|
|
543
544
|
}
|
|
544
545
|
const { AgentCommandGenerator } = _require(join(installerLibPath, "ide", "shared", "agent-command-generator.js"));
|
|
545
546
|
const { WorkflowCommandGenerator } = _require(join(installerLibPath, "ide", "shared", "workflow-command-generator.js"));
|
|
@@ -551,7 +552,7 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
|
|
|
551
552
|
const manifestGen = new ManifestGenerator();
|
|
552
553
|
await manifestGen.generateManifests(bmadDir, allModules, [], { ides: ["claude-code"] });
|
|
553
554
|
} catch (manifestErr) {
|
|
554
|
-
logger$
|
|
555
|
+
logger$17.warn({ err: manifestErr }, "ManifestGenerator failed; workflow/task commands may be incomplete");
|
|
555
556
|
}
|
|
556
557
|
const commandsDir = join(projectRoot, ".claude", "commands");
|
|
557
558
|
mkdirSync(commandsDir, { recursive: true });
|
|
@@ -567,7 +568,7 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
|
|
|
567
568
|
const taskToolCount = await taskToolGen.writeDashArtifacts(commandsDir, taskToolArtifacts);
|
|
568
569
|
const total = agentCount + workflowCount + taskToolCount;
|
|
569
570
|
if (outputFormat !== "json") process.stdout.write(`Generated ${String(total)} Claude Code commands (${String(agentCount)} agents, ${String(workflowCount)} workflows, ${String(taskToolCount)} tasks/tools)\n`);
|
|
570
|
-
logger$
|
|
571
|
+
logger$17.info({
|
|
571
572
|
agentCount,
|
|
572
573
|
workflowCount,
|
|
573
574
|
taskToolCount,
|
|
@@ -577,7 +578,7 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
|
|
|
577
578
|
} catch (err) {
|
|
578
579
|
const msg = err instanceof Error ? err.message : String(err);
|
|
579
580
|
if (outputFormat !== "json") process.stderr.write(`Warning: .claude/commands/ generation failed: ${msg}\n`);
|
|
580
|
-
logger$
|
|
581
|
+
logger$17.warn({ err }, "scaffoldClaudeCommands failed; init continues");
|
|
581
582
|
}
|
|
582
583
|
}
|
|
583
584
|
const PROVIDER_DEFAULTS = DEFAULT_CONFIG.providers;
|
|
@@ -651,7 +652,7 @@ async function runInitAction(options) {
|
|
|
651
652
|
discoveryReport = await registry.discoverAndRegister();
|
|
652
653
|
} catch (err) {
|
|
653
654
|
const message = err instanceof Error ? err.message : String(err);
|
|
654
|
-
logger$
|
|
655
|
+
logger$17.error({ err }, "Adapter discovery failed");
|
|
655
656
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, `Adapter discovery failed: ${message}`) + "\n");
|
|
656
657
|
else process.stderr.write(` Error: adapter discovery failed — ${message}\n`);
|
|
657
658
|
return INIT_EXIT_ERROR;
|
|
@@ -700,12 +701,12 @@ async function runInitAction(options) {
|
|
|
700
701
|
return INIT_EXIT_ERROR;
|
|
701
702
|
}
|
|
702
703
|
if (force && existsSync(localManifest)) {
|
|
703
|
-
logger$
|
|
704
|
+
logger$17.info({ pack: packName }, "Replacing existing pack with bundled version");
|
|
704
705
|
process.stderr.write(`Warning: Replacing existing pack '${packName}' with bundled version\n`);
|
|
705
706
|
}
|
|
706
707
|
mkdirSync(dirname(packPath), { recursive: true });
|
|
707
708
|
cpSync(bundledPackPath, packPath, { recursive: true });
|
|
708
|
-
logger$
|
|
709
|
+
logger$17.info({
|
|
709
710
|
pack: packName,
|
|
710
711
|
dest: packPath
|
|
711
712
|
}, "Scaffolded methodology pack");
|
|
@@ -758,7 +759,7 @@ async function runInitAction(options) {
|
|
|
758
759
|
const msg = err instanceof Error ? err.message : String(err);
|
|
759
760
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
760
761
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
761
|
-
logger$
|
|
762
|
+
logger$17.error({ err }, "init failed");
|
|
762
763
|
return INIT_EXIT_ERROR;
|
|
763
764
|
}
|
|
764
765
|
}
|
|
@@ -804,7 +805,7 @@ function formatUnsupportedVersionError(formatType, version, supported) {
|
|
|
804
805
|
|
|
805
806
|
//#endregion
|
|
806
807
|
//#region src/modules/config/config-system-impl.ts
|
|
807
|
-
const logger$
|
|
808
|
+
const logger$16 = createLogger("config");
|
|
808
809
|
function deepMerge(base, override) {
|
|
809
810
|
const result = { ...base };
|
|
810
811
|
for (const [key, val] of Object.entries(override)) if (val !== null && val !== void 0 && typeof val === "object" && !Array.isArray(val) && typeof result[key] === "object" && result[key] !== null && !Array.isArray(result[key])) result[key] = deepMerge(result[key], val);
|
|
@@ -849,7 +850,7 @@ function readEnvOverrides() {
|
|
|
849
850
|
}
|
|
850
851
|
const parsed = PartialSubstrateConfigSchema.safeParse(overrides);
|
|
851
852
|
if (!parsed.success) {
|
|
852
|
-
logger$
|
|
853
|
+
logger$16.warn({ errors: parsed.error.issues }, "Invalid environment variable overrides ignored");
|
|
853
854
|
return {};
|
|
854
855
|
}
|
|
855
856
|
return parsed.data;
|
|
@@ -913,7 +914,7 @@ var ConfigSystemImpl = class {
|
|
|
913
914
|
throw new ConfigError(`Configuration validation failed:\n${issues}`, { issues: result.error.issues });
|
|
914
915
|
}
|
|
915
916
|
this._config = result.data;
|
|
916
|
-
logger$
|
|
917
|
+
logger$16.debug("Configuration loaded successfully");
|
|
917
918
|
}
|
|
918
919
|
getConfig() {
|
|
919
920
|
if (this._config === null) throw new ConfigError("Configuration has not been loaded. Call load() before getConfig().", {});
|
|
@@ -976,7 +977,7 @@ var ConfigSystemImpl = class {
|
|
|
976
977
|
if (version !== void 0 && typeof version === "string" && !isVersionSupported(version, SUPPORTED_CONFIG_FORMAT_VERSIONS)) if (defaultConfigMigrator.canMigrate(version, CURRENT_CONFIG_FORMAT_VERSION)) {
|
|
977
978
|
const migrationOutput = defaultConfigMigrator.migrate(rawObj, version, CURRENT_CONFIG_FORMAT_VERSION, filePath);
|
|
978
979
|
if (migrationOutput.result.success) {
|
|
979
|
-
logger$
|
|
980
|
+
logger$16.info({
|
|
980
981
|
from: version,
|
|
981
982
|
to: CURRENT_CONFIG_FORMAT_VERSION,
|
|
982
983
|
backup: migrationOutput.result.backupPath
|
|
@@ -1019,7 +1020,7 @@ function createConfigSystem(options = {}) {
|
|
|
1019
1020
|
|
|
1020
1021
|
//#endregion
|
|
1021
1022
|
//#region src/cli/commands/config.ts
|
|
1022
|
-
const logger$
|
|
1023
|
+
const logger$15 = createLogger("config-cmd");
|
|
1023
1024
|
const CONFIG_EXIT_SUCCESS = 0;
|
|
1024
1025
|
const CONFIG_EXIT_ERROR = 1;
|
|
1025
1026
|
const CONFIG_EXIT_INVALID = 2;
|
|
@@ -1045,7 +1046,7 @@ async function runConfigShow(opts = {}) {
|
|
|
1045
1046
|
return CONFIG_EXIT_INVALID;
|
|
1046
1047
|
}
|
|
1047
1048
|
const message = err instanceof Error ? err.message : String(err);
|
|
1048
|
-
logger$
|
|
1049
|
+
logger$15.error({ err }, "Failed to load configuration");
|
|
1049
1050
|
process.stderr.write(` Error loading configuration: ${message}\n`);
|
|
1050
1051
|
return CONFIG_EXIT_ERROR;
|
|
1051
1052
|
}
|
|
@@ -1119,7 +1120,7 @@ async function runConfigExport(opts = {}) {
|
|
|
1119
1120
|
return CONFIG_EXIT_INVALID;
|
|
1120
1121
|
}
|
|
1121
1122
|
const message = err instanceof Error ? err.message : String(err);
|
|
1122
|
-
logger$
|
|
1123
|
+
logger$15.error({ err }, "Failed to load configuration");
|
|
1123
1124
|
process.stderr.write(`Error loading configuration: ${message}\n`);
|
|
1124
1125
|
return CONFIG_EXIT_ERROR;
|
|
1125
1126
|
}
|
|
@@ -1273,7 +1274,7 @@ function registerConfigCommand(program, _version) {
|
|
|
1273
1274
|
|
|
1274
1275
|
//#endregion
|
|
1275
1276
|
//#region src/cli/commands/resume.ts
|
|
1276
|
-
const logger$
|
|
1277
|
+
const logger$14 = createLogger("resume-cmd");
|
|
1277
1278
|
async function runResumeAction(options) {
|
|
1278
1279
|
const { runId: specifiedRunId, stopAfter, outputFormat, projectRoot, concurrency, pack: packName } = options;
|
|
1279
1280
|
if (stopAfter !== void 0 && !VALID_PHASES.includes(stopAfter)) {
|
|
@@ -1355,7 +1356,7 @@ async function runResumeAction(options) {
|
|
|
1355
1356
|
const msg = err instanceof Error ? err.message : String(err);
|
|
1356
1357
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
1357
1358
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
1358
|
-
logger$
|
|
1359
|
+
logger$14.error({ err }, "auto resume failed");
|
|
1359
1360
|
return 1;
|
|
1360
1361
|
} finally {
|
|
1361
1362
|
try {
|
|
@@ -1506,7 +1507,7 @@ async function runFullPipelineFromPhase(options) {
|
|
|
1506
1507
|
});
|
|
1507
1508
|
}
|
|
1508
1509
|
} catch (err) {
|
|
1509
|
-
logger$
|
|
1510
|
+
logger$14.warn({ err }, "Failed to record token usage");
|
|
1510
1511
|
}
|
|
1511
1512
|
});
|
|
1512
1513
|
const storyDecisions = db.prepare(`SELECT description FROM requirements WHERE pipeline_run_id = ? AND source = 'solutioning-phase'`).all(runId);
|
|
@@ -1565,7 +1566,7 @@ async function runFullPipelineFromPhase(options) {
|
|
|
1565
1566
|
const msg = err instanceof Error ? err.message : String(err);
|
|
1566
1567
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
1567
1568
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
1568
|
-
logger$
|
|
1569
|
+
logger$14.error({ err }, "pipeline from phase failed");
|
|
1569
1570
|
return 1;
|
|
1570
1571
|
} finally {
|
|
1571
1572
|
try {
|
|
@@ -1590,7 +1591,7 @@ function registerResumeCommand(program, _version = "0.0.0", projectRoot = proces
|
|
|
1590
1591
|
|
|
1591
1592
|
//#endregion
|
|
1592
1593
|
//#region src/cli/commands/status.ts
|
|
1593
|
-
const logger$
|
|
1594
|
+
const logger$13 = createLogger("status-cmd");
|
|
1594
1595
|
async function runStatusAction(options) {
|
|
1595
1596
|
const { outputFormat, runId, projectRoot } = options;
|
|
1596
1597
|
const dbRoot = await resolveMainRepoRoot(projectRoot);
|
|
@@ -1667,7 +1668,7 @@ async function runStatusAction(options) {
|
|
|
1667
1668
|
const msg = err instanceof Error ? err.message : String(err);
|
|
1668
1669
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
1669
1670
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
1670
|
-
logger$
|
|
1671
|
+
logger$13.error({ err }, "status action failed");
|
|
1671
1672
|
return 1;
|
|
1672
1673
|
} finally {
|
|
1673
1674
|
try {
|
|
@@ -2091,7 +2092,7 @@ Analyze thoroughly and return ONLY the JSON array with no additional text.`;
|
|
|
2091
2092
|
|
|
2092
2093
|
//#endregion
|
|
2093
2094
|
//#region src/cli/commands/amend.ts
|
|
2094
|
-
const logger$
|
|
2095
|
+
const logger$12 = createLogger("amend-cmd");
|
|
2095
2096
|
/**
|
|
2096
2097
|
* Detect and apply supersessions after a phase completes in an amendment run.
|
|
2097
2098
|
*
|
|
@@ -2122,7 +2123,7 @@ function runPostPhaseSupersessionDetection(db, amendmentRunId, currentPhase, han
|
|
|
2122
2123
|
});
|
|
2123
2124
|
} catch (err) {
|
|
2124
2125
|
const msg = err instanceof Error ? err.message : String(err);
|
|
2125
|
-
logger$
|
|
2126
|
+
logger$12.warn({
|
|
2126
2127
|
err,
|
|
2127
2128
|
originalId: parentMatch.id,
|
|
2128
2129
|
supersedingId: newDec.id
|
|
@@ -2257,7 +2258,7 @@ async function runAmendAction(options) {
|
|
|
2257
2258
|
for (let i = startIdx; i < phaseOrder.length; i++) {
|
|
2258
2259
|
const currentPhase = phaseOrder[i];
|
|
2259
2260
|
const amendmentContext = handler.loadContextForPhase(currentPhase);
|
|
2260
|
-
logger$
|
|
2261
|
+
logger$12.info({
|
|
2261
2262
|
phase: currentPhase,
|
|
2262
2263
|
amendmentContextLen: amendmentContext.length
|
|
2263
2264
|
}, "Amendment context loaded for phase");
|
|
@@ -2377,7 +2378,7 @@ async function runAmendAction(options) {
|
|
|
2377
2378
|
} catch (err) {
|
|
2378
2379
|
const msg = err instanceof Error ? err.message : String(err);
|
|
2379
2380
|
process.stderr.write(`Error: ${msg}\n`);
|
|
2380
|
-
logger$
|
|
2381
|
+
logger$12.error({ err }, "amend failed");
|
|
2381
2382
|
return 1;
|
|
2382
2383
|
} finally {
|
|
2383
2384
|
try {
|
|
@@ -2402,7 +2403,7 @@ function registerAmendCommand(program, _version = "0.0.0", projectRoot = process
|
|
|
2402
2403
|
|
|
2403
2404
|
//#endregion
|
|
2404
2405
|
//#region src/cli/commands/health.ts
|
|
2405
|
-
const logger$
|
|
2406
|
+
const logger$11 = createLogger("health-cmd");
|
|
2406
2407
|
function inspectProcessTree() {
|
|
2407
2408
|
const result = {
|
|
2408
2409
|
orchestrator_pid: null,
|
|
@@ -2651,7 +2652,7 @@ async function runHealthAction(options) {
|
|
|
2651
2652
|
const msg = err instanceof Error ? err.message : String(err);
|
|
2652
2653
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
2653
2654
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
2654
|
-
logger$
|
|
2655
|
+
logger$11.error({ err }, "health action failed");
|
|
2655
2656
|
return 1;
|
|
2656
2657
|
} finally {
|
|
2657
2658
|
try {
|
|
@@ -2832,7 +2833,7 @@ async function runSupervisorAction(options, deps = {}) {
|
|
|
2832
2833
|
);
|
|
2833
2834
|
const { getLatestRun: getLatest } = await import(
|
|
2834
2835
|
/* @vite-ignore */
|
|
2835
|
-
"../decisions-
|
|
2836
|
+
"../decisions-DKXc-jnv.js"
|
|
2836
2837
|
);
|
|
2837
2838
|
const dbPath = join(projectRoot, ".substrate", "substrate.db");
|
|
2838
2839
|
const expDbWrapper = new DatabaseWrapper(dbPath);
|
|
@@ -2842,7 +2843,7 @@ async function runSupervisorAction(options, deps = {}) {
|
|
|
2842
2843
|
const expDb = expDbWrapper.db;
|
|
2843
2844
|
const { runRunAction: runPipeline } = await import(
|
|
2844
2845
|
/* @vite-ignore */
|
|
2845
|
-
"../run-
|
|
2846
|
+
"../run-Bwyy5-RY.js"
|
|
2846
2847
|
);
|
|
2847
2848
|
const runStoryFn = async (opts) => {
|
|
2848
2849
|
const exitCode = await runPipeline({
|
|
@@ -2999,7 +3000,7 @@ function registerSupervisorCommand(program, _version = "0.0.0", projectRoot = pr
|
|
|
2999
3000
|
|
|
3000
3001
|
//#endregion
|
|
3001
3002
|
//#region src/cli/commands/metrics.ts
|
|
3002
|
-
const logger$
|
|
3003
|
+
const logger$10 = createLogger("metrics-cmd");
|
|
3003
3004
|
async function runMetricsAction(options) {
|
|
3004
3005
|
const { outputFormat, projectRoot, limit = 10, compare, tagBaseline, analysis } = options;
|
|
3005
3006
|
if (analysis !== void 0) {
|
|
@@ -3105,7 +3106,7 @@ async function runMetricsAction(options) {
|
|
|
3105
3106
|
const msg = err instanceof Error ? err.message : String(err);
|
|
3106
3107
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
3107
3108
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
3108
|
-
logger$
|
|
3109
|
+
logger$10.error({ err }, "metrics action failed");
|
|
3109
3110
|
return 1;
|
|
3110
3111
|
} finally {
|
|
3111
3112
|
try {
|
|
@@ -3359,7 +3360,7 @@ function getPlanningCostTotal(db, sessionId) {
|
|
|
3359
3360
|
function getLatestSessionId(_db) {
|
|
3360
3361
|
return null;
|
|
3361
3362
|
}
|
|
3362
|
-
const logger$
|
|
3363
|
+
const logger$9 = createLogger("cost-cmd");
|
|
3363
3364
|
const COST_EXIT_SUCCESS = 0;
|
|
3364
3365
|
const COST_EXIT_ERROR = 1;
|
|
3365
3366
|
/**
|
|
@@ -3605,7 +3606,7 @@ async function runCostAction(options) {
|
|
|
3605
3606
|
} catch (err) {
|
|
3606
3607
|
const message = err instanceof Error ? err.message : String(err);
|
|
3607
3608
|
process.stderr.write(`Error: ${message}\n`);
|
|
3608
|
-
logger$
|
|
3609
|
+
logger$9.error({ err }, "runCostAction failed");
|
|
3609
3610
|
return COST_EXIT_ERROR;
|
|
3610
3611
|
} finally {
|
|
3611
3612
|
if (wrapper !== null) try {
|
|
@@ -3707,7 +3708,7 @@ function applyMonitorSchema(db) {
|
|
|
3707
3708
|
|
|
3708
3709
|
//#endregion
|
|
3709
3710
|
//#region src/persistence/monitor-database.ts
|
|
3710
|
-
const logger$
|
|
3711
|
+
const logger$8 = createLogger("persistence:monitor-db");
|
|
3711
3712
|
var MonitorDatabaseImpl = class {
|
|
3712
3713
|
_db = null;
|
|
3713
3714
|
_path;
|
|
@@ -3718,10 +3719,10 @@ var MonitorDatabaseImpl = class {
|
|
|
3718
3719
|
this._open();
|
|
3719
3720
|
}
|
|
3720
3721
|
_open() {
|
|
3721
|
-
logger$
|
|
3722
|
+
logger$8.info({ path: this._path }, "Opening monitor database");
|
|
3722
3723
|
this._db = new BetterSqlite3(this._path);
|
|
3723
3724
|
const walResult = this._db.pragma("journal_mode = WAL");
|
|
3724
|
-
if (walResult?.[0]?.journal_mode !== "wal") logger$
|
|
3725
|
+
if (walResult?.[0]?.journal_mode !== "wal") logger$8.warn({ result: walResult?.[0]?.journal_mode }, "Monitor DB: WAL pragma did not confirm wal mode");
|
|
3725
3726
|
this._db.pragma("synchronous = NORMAL");
|
|
3726
3727
|
this._db.pragma("busy_timeout = 5000");
|
|
3727
3728
|
this._db.pragma("foreign_keys = ON");
|
|
@@ -3756,7 +3757,7 @@ var MonitorDatabaseImpl = class {
|
|
|
3756
3757
|
total_retries = total_retries + @retries,
|
|
3757
3758
|
last_updated = @lastUpdated
|
|
3758
3759
|
`);
|
|
3759
|
-
logger$
|
|
3760
|
+
logger$8.info({ path: this._path }, "Monitor database ready");
|
|
3760
3761
|
}
|
|
3761
3762
|
_assertOpen() {
|
|
3762
3763
|
if (this._db === null) throw new Error("MonitorDatabase: connection is closed");
|
|
@@ -3905,7 +3906,7 @@ var MonitorDatabaseImpl = class {
|
|
|
3905
3906
|
const db = this._assertOpen();
|
|
3906
3907
|
const cutoff = new Date(Date.now() - retentionDays * 24 * 60 * 60 * 1e3).toISOString();
|
|
3907
3908
|
const result = db.prepare("DELETE FROM task_metrics WHERE recorded_at < @cutoff").run({ cutoff });
|
|
3908
|
-
logger$
|
|
3909
|
+
logger$8.info({
|
|
3909
3910
|
cutoff,
|
|
3910
3911
|
deleted: result.changes
|
|
3911
3912
|
}, "Pruned old task_metrics rows");
|
|
@@ -3944,13 +3945,13 @@ var MonitorDatabaseImpl = class {
|
|
|
3944
3945
|
db.exec("ROLLBACK");
|
|
3945
3946
|
throw err;
|
|
3946
3947
|
}
|
|
3947
|
-
logger$
|
|
3948
|
+
logger$8.info("Rebuilt performance_aggregates from task_metrics");
|
|
3948
3949
|
}
|
|
3949
3950
|
resetAllData() {
|
|
3950
3951
|
const db = this._assertOpen();
|
|
3951
3952
|
db.exec("DELETE FROM task_metrics");
|
|
3952
3953
|
db.exec("DELETE FROM performance_aggregates");
|
|
3953
|
-
logger$
|
|
3954
|
+
logger$8.info({ path: this._path }, "Monitor data reset — all rows deleted");
|
|
3954
3955
|
}
|
|
3955
3956
|
getTaskMetricsDateRange() {
|
|
3956
3957
|
const db = this._assertOpen();
|
|
@@ -3967,7 +3968,7 @@ var MonitorDatabaseImpl = class {
|
|
|
3967
3968
|
if (this._db === null) return;
|
|
3968
3969
|
this._db.close();
|
|
3969
3970
|
this._db = null;
|
|
3970
|
-
logger$
|
|
3971
|
+
logger$8.info({ path: this._path }, "Monitor database closed");
|
|
3971
3972
|
}
|
|
3972
3973
|
/**
|
|
3973
3974
|
* Access the raw underlying database for testing purposes only.
|
|
@@ -3980,7 +3981,7 @@ var MonitorDatabaseImpl = class {
|
|
|
3980
3981
|
|
|
3981
3982
|
//#endregion
|
|
3982
3983
|
//#region src/modules/monitor/recommendation-engine.ts
|
|
3983
|
-
const logger$
|
|
3984
|
+
const logger$7 = createLogger("monitor:recommendations");
|
|
3984
3985
|
var RecommendationEngine = class {
|
|
3985
3986
|
_monitorDb;
|
|
3986
3987
|
_filters;
|
|
@@ -4013,7 +4014,7 @@ var RecommendationEngine = class {
|
|
|
4013
4014
|
const sinceDate = new Date(Date.now() - this._historyDays * 24 * 60 * 60 * 1e3).toISOString();
|
|
4014
4015
|
const aggregates = this._monitorDb.getAggregates({ sinceDate });
|
|
4015
4016
|
if (aggregates.length === 0) {
|
|
4016
|
-
logger$
|
|
4017
|
+
logger$7.debug("No performance aggregates found — no recommendations to generate");
|
|
4017
4018
|
return [];
|
|
4018
4019
|
}
|
|
4019
4020
|
const byTaskType = new Map();
|
|
@@ -4078,7 +4079,7 @@ var RecommendationEngine = class {
|
|
|
4078
4079
|
if (confDiff !== 0) return confDiff;
|
|
4079
4080
|
return b.improvement_percentage - a.improvement_percentage;
|
|
4080
4081
|
});
|
|
4081
|
-
logger$
|
|
4082
|
+
logger$7.debug({ count: recommendations.length }, "Generated routing recommendations");
|
|
4082
4083
|
return recommendations;
|
|
4083
4084
|
}
|
|
4084
4085
|
/**
|
|
@@ -4244,7 +4245,7 @@ function generateMonitorReport(monitorDb, options = {}) {
|
|
|
4244
4245
|
|
|
4245
4246
|
//#endregion
|
|
4246
4247
|
//#region src/cli/commands/monitor.ts
|
|
4247
|
-
const logger$
|
|
4248
|
+
const logger$6 = createLogger("monitor-cmd");
|
|
4248
4249
|
const MONITOR_EXIT_SUCCESS = 0;
|
|
4249
4250
|
const MONITOR_EXIT_ERROR = 1;
|
|
4250
4251
|
/**
|
|
@@ -4447,7 +4448,7 @@ async function runMonitorReportAction(options) {
|
|
|
4447
4448
|
} catch (err) {
|
|
4448
4449
|
const message = err instanceof Error ? err.message : String(err);
|
|
4449
4450
|
process.stderr.write(`Error: ${message}\n`);
|
|
4450
|
-
logger$
|
|
4451
|
+
logger$6.error({ err }, "runMonitorReportAction failed");
|
|
4451
4452
|
return MONITOR_EXIT_ERROR;
|
|
4452
4453
|
} finally {
|
|
4453
4454
|
if (monitorDb !== null) try {
|
|
@@ -4509,7 +4510,7 @@ async function runMonitorStatusAction(options) {
|
|
|
4509
4510
|
} catch (err) {
|
|
4510
4511
|
const message = err instanceof Error ? err.message : String(err);
|
|
4511
4512
|
process.stderr.write(`Error: ${message}\n`);
|
|
4512
|
-
logger$
|
|
4513
|
+
logger$6.error({ err }, "runMonitorStatusAction failed");
|
|
4513
4514
|
return MONITOR_EXIT_ERROR;
|
|
4514
4515
|
} finally {
|
|
4515
4516
|
if (monitorDb !== null) try {
|
|
@@ -4544,7 +4545,7 @@ async function runMonitorResetAction(options) {
|
|
|
4544
4545
|
} catch (err) {
|
|
4545
4546
|
const message = err instanceof Error ? err.message : String(err);
|
|
4546
4547
|
process.stderr.write(`Error: ${message}\n`);
|
|
4547
|
-
logger$
|
|
4548
|
+
logger$6.error({ err }, "runMonitorResetAction failed");
|
|
4548
4549
|
return MONITOR_EXIT_ERROR;
|
|
4549
4550
|
} finally {
|
|
4550
4551
|
if (monitorDb !== null) try {
|
|
@@ -4592,7 +4593,7 @@ async function runMonitorRecommendationsAction(options) {
|
|
|
4592
4593
|
} catch (err) {
|
|
4593
4594
|
const message = err instanceof Error ? err.message : String(err);
|
|
4594
4595
|
process.stderr.write(`Error: ${message}\n`);
|
|
4595
|
-
logger$
|
|
4596
|
+
logger$6.error({ err }, "runMonitorRecommendationsAction failed");
|
|
4596
4597
|
return MONITOR_EXIT_ERROR;
|
|
4597
4598
|
} finally {
|
|
4598
4599
|
if (monitorDb !== null) try {
|
|
@@ -4670,7 +4671,7 @@ function registerMonitorCommand(program, version = "0.0.0", projectRoot = proces
|
|
|
4670
4671
|
|
|
4671
4672
|
//#endregion
|
|
4672
4673
|
//#region src/modules/git-worktree/git-worktree-manager-impl.ts
|
|
4673
|
-
const logger$
|
|
4674
|
+
const logger$5 = createLogger("git-worktree");
|
|
4674
4675
|
const BRANCH_PREFIX = "substrate/task-";
|
|
4675
4676
|
const DEFAULT_WORKTREE_BASE = ".substrate-worktrees";
|
|
4676
4677
|
var GitWorktreeManagerImpl = class {
|
|
@@ -4689,7 +4690,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
4689
4690
|
this._db = db;
|
|
4690
4691
|
this._onTaskReady = ({ taskId }) => {
|
|
4691
4692
|
this._handleTaskReady(taskId).catch((err) => {
|
|
4692
|
-
logger$
|
|
4693
|
+
logger$5.error({
|
|
4693
4694
|
taskId,
|
|
4694
4695
|
err
|
|
4695
4696
|
}, "Unhandled error in _handleTaskReady");
|
|
@@ -4703,40 +4704,40 @@ var GitWorktreeManagerImpl = class {
|
|
|
4703
4704
|
};
|
|
4704
4705
|
}
|
|
4705
4706
|
async initialize() {
|
|
4706
|
-
logger$
|
|
4707
|
+
logger$5.info({ projectRoot: this._projectRoot }, "GitWorktreeManager.initialize()");
|
|
4707
4708
|
await this.verifyGitVersion();
|
|
4708
4709
|
const cleaned = await this.cleanupAllWorktrees();
|
|
4709
|
-
if (cleaned > 0) logger$
|
|
4710
|
+
if (cleaned > 0) logger$5.info({ cleaned }, "Recovered orphaned worktrees on startup");
|
|
4710
4711
|
this._eventBus.on("task:ready", this._onTaskReady);
|
|
4711
4712
|
this._eventBus.on("task:complete", this._onTaskComplete);
|
|
4712
4713
|
this._eventBus.on("task:failed", this._onTaskFailed);
|
|
4713
|
-
logger$
|
|
4714
|
+
logger$5.info("GitWorktreeManager initialized");
|
|
4714
4715
|
}
|
|
4715
4716
|
async shutdown() {
|
|
4716
|
-
logger$
|
|
4717
|
+
logger$5.info("GitWorktreeManager.shutdown()");
|
|
4717
4718
|
this._eventBus.off("task:ready", this._onTaskReady);
|
|
4718
4719
|
this._eventBus.off("task:complete", this._onTaskComplete);
|
|
4719
4720
|
this._eventBus.off("task:failed", this._onTaskFailed);
|
|
4720
4721
|
await this.cleanupAllWorktrees();
|
|
4721
|
-
logger$
|
|
4722
|
+
logger$5.info("GitWorktreeManager shutdown complete");
|
|
4722
4723
|
}
|
|
4723
4724
|
async _handleTaskReady(taskId) {
|
|
4724
|
-
logger$
|
|
4725
|
+
logger$5.debug({ taskId }, "task:ready — creating worktree");
|
|
4725
4726
|
try {
|
|
4726
4727
|
await this.createWorktree(taskId);
|
|
4727
4728
|
} catch (err) {
|
|
4728
|
-
logger$
|
|
4729
|
+
logger$5.error({
|
|
4729
4730
|
taskId,
|
|
4730
4731
|
err
|
|
4731
4732
|
}, "Failed to create worktree for task");
|
|
4732
4733
|
}
|
|
4733
4734
|
}
|
|
4734
4735
|
async _handleTaskDone(taskId) {
|
|
4735
|
-
logger$
|
|
4736
|
+
logger$5.debug({ taskId }, "task done — cleaning up worktree");
|
|
4736
4737
|
try {
|
|
4737
4738
|
await this.cleanupWorktree(taskId);
|
|
4738
4739
|
} catch (err) {
|
|
4739
|
-
logger$
|
|
4740
|
+
logger$5.warn({
|
|
4740
4741
|
taskId,
|
|
4741
4742
|
err
|
|
4742
4743
|
}, "Failed to cleanup worktree for task");
|
|
@@ -4746,7 +4747,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
4746
4747
|
if (!taskId || taskId.trim().length === 0) throw new Error("createWorktree: taskId must be a non-empty string");
|
|
4747
4748
|
const branchName = BRANCH_PREFIX + taskId;
|
|
4748
4749
|
const worktreePath = this.getWorktreePath(taskId);
|
|
4749
|
-
logger$
|
|
4750
|
+
logger$5.debug({
|
|
4750
4751
|
taskId,
|
|
4751
4752
|
branchName,
|
|
4752
4753
|
worktreePath,
|
|
@@ -4766,7 +4767,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
4766
4767
|
worktreePath,
|
|
4767
4768
|
createdAt
|
|
4768
4769
|
};
|
|
4769
|
-
logger$
|
|
4770
|
+
logger$5.info({
|
|
4770
4771
|
taskId,
|
|
4771
4772
|
branchName,
|
|
4772
4773
|
worktreePath
|
|
@@ -4776,7 +4777,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
4776
4777
|
async cleanupWorktree(taskId) {
|
|
4777
4778
|
const branchName = BRANCH_PREFIX + taskId;
|
|
4778
4779
|
const worktreePath = this.getWorktreePath(taskId);
|
|
4779
|
-
logger$
|
|
4780
|
+
logger$5.debug({
|
|
4780
4781
|
taskId,
|
|
4781
4782
|
branchName,
|
|
4782
4783
|
worktreePath
|
|
@@ -4786,7 +4787,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
4786
4787
|
await access$1(worktreePath);
|
|
4787
4788
|
worktreeExists = true;
|
|
4788
4789
|
} catch {
|
|
4789
|
-
logger$
|
|
4790
|
+
logger$5.debug({
|
|
4790
4791
|
taskId,
|
|
4791
4792
|
worktreePath
|
|
4792
4793
|
}, "cleanupWorktree: worktree does not exist, skipping removal");
|
|
@@ -4794,7 +4795,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
4794
4795
|
if (worktreeExists) try {
|
|
4795
4796
|
await removeWorktree(worktreePath, this._projectRoot);
|
|
4796
4797
|
} catch (err) {
|
|
4797
|
-
logger$
|
|
4798
|
+
logger$5.warn({
|
|
4798
4799
|
taskId,
|
|
4799
4800
|
worktreePath,
|
|
4800
4801
|
err
|
|
@@ -4803,7 +4804,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
4803
4804
|
try {
|
|
4804
4805
|
await removeBranch(branchName, this._projectRoot);
|
|
4805
4806
|
} catch (err) {
|
|
4806
|
-
logger$
|
|
4807
|
+
logger$5.warn({
|
|
4807
4808
|
taskId,
|
|
4808
4809
|
branchName,
|
|
4809
4810
|
err
|
|
@@ -4813,13 +4814,13 @@ var GitWorktreeManagerImpl = class {
|
|
|
4813
4814
|
taskId,
|
|
4814
4815
|
branchName
|
|
4815
4816
|
});
|
|
4816
|
-
logger$
|
|
4817
|
+
logger$5.info({
|
|
4817
4818
|
taskId,
|
|
4818
4819
|
branchName
|
|
4819
4820
|
}, "Worktree cleaned up");
|
|
4820
4821
|
}
|
|
4821
4822
|
async cleanupAllWorktrees() {
|
|
4822
|
-
logger$
|
|
4823
|
+
logger$5.debug({ projectRoot: this._projectRoot }, "cleanupAllWorktrees");
|
|
4823
4824
|
const orphanedPaths = await getOrphanedWorktrees(this._projectRoot, this._baseDirectory);
|
|
4824
4825
|
let cleaned = 0;
|
|
4825
4826
|
for (const worktreePath of orphanedPaths) {
|
|
@@ -4828,12 +4829,12 @@ var GitWorktreeManagerImpl = class {
|
|
|
4828
4829
|
try {
|
|
4829
4830
|
await removeWorktree(worktreePath, this._projectRoot);
|
|
4830
4831
|
worktreeRemoved = true;
|
|
4831
|
-
logger$
|
|
4832
|
+
logger$5.debug({
|
|
4832
4833
|
taskId,
|
|
4833
4834
|
worktreePath
|
|
4834
4835
|
}, "cleanupAllWorktrees: removed orphaned worktree");
|
|
4835
4836
|
} catch (err) {
|
|
4836
|
-
logger$
|
|
4837
|
+
logger$5.warn({
|
|
4837
4838
|
taskId,
|
|
4838
4839
|
worktreePath,
|
|
4839
4840
|
err
|
|
@@ -4843,12 +4844,12 @@ var GitWorktreeManagerImpl = class {
|
|
|
4843
4844
|
let branchRemoved = false;
|
|
4844
4845
|
try {
|
|
4845
4846
|
branchRemoved = await removeBranch(branchName, this._projectRoot);
|
|
4846
|
-
if (branchRemoved) logger$
|
|
4847
|
+
if (branchRemoved) logger$5.debug({
|
|
4847
4848
|
taskId,
|
|
4848
4849
|
branchName
|
|
4849
4850
|
}, "cleanupAllWorktrees: removed orphaned branch");
|
|
4850
4851
|
} catch (err) {
|
|
4851
|
-
logger$
|
|
4852
|
+
logger$5.warn({
|
|
4852
4853
|
taskId,
|
|
4853
4854
|
branchName,
|
|
4854
4855
|
err
|
|
@@ -4856,14 +4857,14 @@ var GitWorktreeManagerImpl = class {
|
|
|
4856
4857
|
}
|
|
4857
4858
|
if (worktreeRemoved) cleaned++;
|
|
4858
4859
|
}
|
|
4859
|
-
if (cleaned > 0) logger$
|
|
4860
|
+
if (cleaned > 0) logger$5.info({ cleaned }, "cleanupAllWorktrees: recovered orphaned worktrees");
|
|
4860
4861
|
return cleaned;
|
|
4861
4862
|
}
|
|
4862
4863
|
async detectConflicts(taskId, targetBranch = "main") {
|
|
4863
4864
|
if (!taskId || taskId.trim().length === 0) throw new Error("detectConflicts: taskId must be a non-empty string");
|
|
4864
4865
|
const branchName = BRANCH_PREFIX + taskId;
|
|
4865
4866
|
const worktreePath = this.getWorktreePath(taskId);
|
|
4866
|
-
logger$
|
|
4867
|
+
logger$5.debug({
|
|
4867
4868
|
taskId,
|
|
4868
4869
|
branchName,
|
|
4869
4870
|
targetBranch
|
|
@@ -4891,7 +4892,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
4891
4892
|
branch: branchName,
|
|
4892
4893
|
conflictingFiles: report.conflictingFiles
|
|
4893
4894
|
});
|
|
4894
|
-
logger$
|
|
4895
|
+
logger$5.info({
|
|
4895
4896
|
taskId,
|
|
4896
4897
|
hasConflicts: report.hasConflicts,
|
|
4897
4898
|
conflictCount: conflictingFiles.length
|
|
@@ -4901,14 +4902,14 @@ var GitWorktreeManagerImpl = class {
|
|
|
4901
4902
|
async mergeWorktree(taskId, targetBranch = "main") {
|
|
4902
4903
|
if (!taskId || taskId.trim().length === 0) throw new Error("mergeWorktree: taskId must be a non-empty string");
|
|
4903
4904
|
const branchName = BRANCH_PREFIX + taskId;
|
|
4904
|
-
logger$
|
|
4905
|
+
logger$5.debug({
|
|
4905
4906
|
taskId,
|
|
4906
4907
|
branchName,
|
|
4907
4908
|
targetBranch
|
|
4908
4909
|
}, "mergeWorktree");
|
|
4909
4910
|
const conflictReport = await this.detectConflicts(taskId, targetBranch);
|
|
4910
4911
|
if (conflictReport.hasConflicts) {
|
|
4911
|
-
logger$
|
|
4912
|
+
logger$5.info({
|
|
4912
4913
|
taskId,
|
|
4913
4914
|
conflictCount: conflictReport.conflictingFiles.length
|
|
4914
4915
|
}, "Merge skipped due to conflicts");
|
|
@@ -4930,7 +4931,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
4930
4931
|
success: true,
|
|
4931
4932
|
mergedFiles
|
|
4932
4933
|
};
|
|
4933
|
-
logger$
|
|
4934
|
+
logger$5.info({
|
|
4934
4935
|
taskId,
|
|
4935
4936
|
branchName,
|
|
4936
4937
|
mergedFileCount: mergedFiles.length
|
|
@@ -4938,7 +4939,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
4938
4939
|
return result;
|
|
4939
4940
|
}
|
|
4940
4941
|
async listWorktrees() {
|
|
4941
|
-
logger$
|
|
4942
|
+
logger$5.debug({
|
|
4942
4943
|
projectRoot: this._projectRoot,
|
|
4943
4944
|
baseDirectory: this._baseDirectory
|
|
4944
4945
|
}, "listWorktrees");
|
|
@@ -4962,7 +4963,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
4962
4963
|
createdAt
|
|
4963
4964
|
});
|
|
4964
4965
|
}
|
|
4965
|
-
logger$
|
|
4966
|
+
logger$5.debug({ count: results.length }, "listWorktrees: found worktrees");
|
|
4966
4967
|
return results;
|
|
4967
4968
|
}
|
|
4968
4969
|
getWorktreePath(taskId) {
|
|
@@ -4982,7 +4983,7 @@ function createGitWorktreeManager(options) {
|
|
|
4982
4983
|
|
|
4983
4984
|
//#endregion
|
|
4984
4985
|
//#region src/cli/commands/merge.ts
|
|
4985
|
-
const logger$
|
|
4986
|
+
const logger$4 = createLogger("merge-cmd");
|
|
4986
4987
|
const MERGE_EXIT_SUCCESS = 0;
|
|
4987
4988
|
const MERGE_EXIT_CONFLICT = 1;
|
|
4988
4989
|
const MERGE_EXIT_ERROR = 2;
|
|
@@ -5020,7 +5021,7 @@ async function mergeTask(taskId, targetBranch, projectRoot) {
|
|
|
5020
5021
|
projectRoot
|
|
5021
5022
|
});
|
|
5022
5023
|
try {
|
|
5023
|
-
logger$
|
|
5024
|
+
logger$4.info({
|
|
5024
5025
|
taskId,
|
|
5025
5026
|
targetBranch
|
|
5026
5027
|
}, "Running conflict detection...");
|
|
@@ -5042,7 +5043,7 @@ async function mergeTask(taskId, targetBranch, projectRoot) {
|
|
|
5042
5043
|
} catch (err) {
|
|
5043
5044
|
const message = err instanceof Error ? err.message : String(err);
|
|
5044
5045
|
console.error(`Error merging task "${taskId}": ${message}`);
|
|
5045
|
-
logger$
|
|
5046
|
+
logger$4.error({
|
|
5046
5047
|
taskId,
|
|
5047
5048
|
err
|
|
5048
5049
|
}, "merge --task failed");
|
|
@@ -5096,7 +5097,7 @@ async function mergeAll(targetBranch, projectRoot, taskIds) {
|
|
|
5096
5097
|
error: message
|
|
5097
5098
|
});
|
|
5098
5099
|
console.log(` Error for task "${taskId}": ${message}`);
|
|
5099
|
-
logger$
|
|
5100
|
+
logger$4.error({
|
|
5100
5101
|
taskId,
|
|
5101
5102
|
err
|
|
5102
5103
|
}, "merge --all: task failed");
|
|
@@ -5149,7 +5150,7 @@ function registerMergeCommand(program, projectRoot = process.cwd()) {
|
|
|
5149
5150
|
|
|
5150
5151
|
//#endregion
|
|
5151
5152
|
//#region src/cli/commands/worktrees.ts
|
|
5152
|
-
const logger$
|
|
5153
|
+
const logger$3 = createLogger("worktrees-cmd");
|
|
5153
5154
|
const WORKTREES_EXIT_SUCCESS = 0;
|
|
5154
5155
|
const WORKTREES_EXIT_ERROR = 1;
|
|
5155
5156
|
/** Valid task statuses for filtering */
|
|
@@ -5276,7 +5277,7 @@ async function listWorktreesAction(options) {
|
|
|
5276
5277
|
try {
|
|
5277
5278
|
worktreeInfos = await manager.listWorktrees();
|
|
5278
5279
|
} catch (err) {
|
|
5279
|
-
logger$
|
|
5280
|
+
logger$3.error({ err }, "Failed to list worktrees");
|
|
5280
5281
|
const message = err instanceof Error ? err.message : String(err);
|
|
5281
5282
|
process.stderr.write(`Error listing worktrees: ${message}\n`);
|
|
5282
5283
|
return WORKTREES_EXIT_ERROR;
|
|
@@ -5303,7 +5304,7 @@ async function listWorktreesAction(options) {
|
|
|
5303
5304
|
} catch (err) {
|
|
5304
5305
|
const message = err instanceof Error ? err.message : String(err);
|
|
5305
5306
|
process.stderr.write(`Error: ${message}\n`);
|
|
5306
|
-
logger$
|
|
5307
|
+
logger$3.error({ err }, "listWorktreesAction failed");
|
|
5307
5308
|
return WORKTREES_EXIT_ERROR;
|
|
5308
5309
|
}
|
|
5309
5310
|
}
|
|
@@ -5344,7 +5345,7 @@ function registerWorktreesCommand(program, version = "0.0.0", projectRoot = proc
|
|
|
5344
5345
|
|
|
5345
5346
|
//#endregion
|
|
5346
5347
|
//#region src/cli/commands/brainstorm.ts
|
|
5347
|
-
const logger$
|
|
5348
|
+
const logger$2 = createLogger("brainstorm-cmd");
|
|
5348
5349
|
/**
|
|
5349
5350
|
* Detect whether the project has existing planning artifacts that indicate
|
|
5350
5351
|
* this is an amendment session (vs. a brand-new project brainstorm).
|
|
@@ -5390,13 +5391,13 @@ async function loadAmendmentContextDocuments(projectRoot) {
|
|
|
5390
5391
|
try {
|
|
5391
5392
|
brief = await readFile(briefPath, "utf-8");
|
|
5392
5393
|
} catch {
|
|
5393
|
-
logger$
|
|
5394
|
+
logger$2.warn({ briefPath }, "product-brief.md not found — continuing without brief context");
|
|
5394
5395
|
process.stderr.write(`Warning: product-brief.md not found at ${briefPath}\n`);
|
|
5395
5396
|
}
|
|
5396
5397
|
try {
|
|
5397
5398
|
prd = await readFile(prdPath, "utf-8");
|
|
5398
5399
|
} catch {
|
|
5399
|
-
logger$
|
|
5400
|
+
logger$2.warn({ prdPath }, "requirements.md not found — continuing without PRD context");
|
|
5400
5401
|
process.stderr.write(`Warning: requirements.md not found at ${prdPath}\n`);
|
|
5401
5402
|
}
|
|
5402
5403
|
return {
|
|
@@ -5605,7 +5606,7 @@ async function dispatchToPersonas(userPrompt, context, llmDispatch) {
|
|
|
5605
5606
|
}
|
|
5606
5607
|
];
|
|
5607
5608
|
const defaultDispatch = async (prompt, personaName) => {
|
|
5608
|
-
logger$
|
|
5609
|
+
logger$2.debug({
|
|
5609
5610
|
personaName,
|
|
5610
5611
|
promptLength: prompt.length
|
|
5611
5612
|
}, "Dispatching to persona (stub mode)");
|
|
@@ -5622,7 +5623,7 @@ async function dispatchToPersonas(userPrompt, context, llmDispatch) {
|
|
|
5622
5623
|
};
|
|
5623
5624
|
} catch (err) {
|
|
5624
5625
|
const msg = err instanceof Error ? err.message : String(err);
|
|
5625
|
-
logger$
|
|
5626
|
+
logger$2.error({
|
|
5626
5627
|
err,
|
|
5627
5628
|
personaName: persona.name
|
|
5628
5629
|
}, "Persona dispatch failed");
|
|
@@ -5774,7 +5775,7 @@ async function runBrainstormSession(options, llmDispatch, rlInterface) {
|
|
|
5774
5775
|
}
|
|
5775
5776
|
});
|
|
5776
5777
|
rl.on("error", (err) => {
|
|
5777
|
-
logger$
|
|
5778
|
+
logger$2.error({ err }, "readline error");
|
|
5778
5779
|
if (!sessionEnded) endSession(false);
|
|
5779
5780
|
});
|
|
5780
5781
|
});
|
|
@@ -5813,6 +5814,590 @@ function registerBrainstormCommand(program, _version = "0.0.0", projectRoot = pr
|
|
|
5813
5814
|
});
|
|
5814
5815
|
}
|
|
5815
5816
|
|
|
5817
|
+
//#endregion
|
|
5818
|
+
//#region src/modules/export/renderers.ts
|
|
5819
|
+
/** Fields from analysis/product-brief decisions to render, in display order */
|
|
5820
|
+
const PRODUCT_BRIEF_FIELDS = [
|
|
5821
|
+
"problem_statement",
|
|
5822
|
+
"target_users",
|
|
5823
|
+
"core_features",
|
|
5824
|
+
"success_metrics",
|
|
5825
|
+
"constraints",
|
|
5826
|
+
"technology_constraints"
|
|
5827
|
+
];
|
|
5828
|
+
/**
|
|
5829
|
+
* Known acronyms that should appear fully uppercased when they are a standalone
|
|
5830
|
+
* word in a label (e.g. 'fr_coverage' → 'FR Coverage', 'api_style' → 'API Style').
|
|
5831
|
+
*/
|
|
5832
|
+
const UPPERCASE_ACRONYMS = new Set([
|
|
5833
|
+
"fr",
|
|
5834
|
+
"nfr",
|
|
5835
|
+
"ux",
|
|
5836
|
+
"api",
|
|
5837
|
+
"db",
|
|
5838
|
+
"id",
|
|
5839
|
+
"url"
|
|
5840
|
+
]);
|
|
5841
|
+
/**
|
|
5842
|
+
* Convert a snake_case key to Title Case for display headings.
|
|
5843
|
+
* Known acronyms (fr, nfr, ux, api, db, id, url) are rendered fully uppercased.
|
|
5844
|
+
*/
|
|
5845
|
+
function fieldLabel(key) {
|
|
5846
|
+
return key.replace(/_/g, " ").replace(/\b\w+/g, (word) => {
|
|
5847
|
+
const lower = word.toLowerCase();
|
|
5848
|
+
if (UPPERCASE_ACRONYMS.has(lower)) return lower.toUpperCase();
|
|
5849
|
+
return word.charAt(0).toUpperCase() + word.slice(1).toLowerCase();
|
|
5850
|
+
});
|
|
5851
|
+
}
|
|
5852
|
+
/**
|
|
5853
|
+
* Safely parse a JSON string; returns the original string if parsing fails.
|
|
5854
|
+
*/
|
|
5855
|
+
function safeParseJson(value) {
|
|
5856
|
+
try {
|
|
5857
|
+
return JSON.parse(value);
|
|
5858
|
+
} catch {
|
|
5859
|
+
return value;
|
|
5860
|
+
}
|
|
5861
|
+
}
|
|
5862
|
+
/**
|
|
5863
|
+
* Render a decision value to a markdown-friendly string.
|
|
5864
|
+
* - Arrays → bulleted list items
|
|
5865
|
+
* - Objects → key: value lines
|
|
5866
|
+
* - Primitives → plain string
|
|
5867
|
+
*/
|
|
5868
|
+
function renderValue(rawValue) {
|
|
5869
|
+
const parsed = safeParseJson(rawValue);
|
|
5870
|
+
if (Array.isArray(parsed)) return parsed.map((item) => `- ${String(item)}`).join("\n");
|
|
5871
|
+
if (typeof parsed === "object" && parsed !== null) return Object.entries(parsed).map(([k, v]) => `- **${fieldLabel(k)}**: ${String(v)}`).join("\n");
|
|
5872
|
+
return String(parsed);
|
|
5873
|
+
}
|
|
5874
|
+
/**
|
|
5875
|
+
* Render analysis-phase decisions as a `product-brief.md` file.
|
|
5876
|
+
*
|
|
5877
|
+
* Merges `product-brief` category decisions with `technology-constraints`
|
|
5878
|
+
* category decisions (they are stored separately in the decision store).
|
|
5879
|
+
*
|
|
5880
|
+
* @param decisions - All decisions from the analysis phase (any category)
|
|
5881
|
+
* @returns Formatted markdown content for product-brief.md
|
|
5882
|
+
*/
|
|
5883
|
+
function renderProductBrief(decisions) {
|
|
5884
|
+
const briefDecisions = decisions.filter((d) => d.category === "product-brief");
|
|
5885
|
+
const techConstraintDecisions = decisions.filter((d) => d.category === "technology-constraints");
|
|
5886
|
+
const briefMap = Object.fromEntries(briefDecisions.map((d) => [d.key, d.value]));
|
|
5887
|
+
if (techConstraintDecisions.length > 0 && briefMap["technology_constraints"] === void 0) {
|
|
5888
|
+
const tcBullets = techConstraintDecisions.flatMap((d) => {
|
|
5889
|
+
const parsed = safeParseJson(d.value);
|
|
5890
|
+
if (Array.isArray(parsed)) return parsed.map((item) => String(item));
|
|
5891
|
+
return [String(parsed)];
|
|
5892
|
+
});
|
|
5893
|
+
briefMap["technology_constraints"] = JSON.stringify(tcBullets);
|
|
5894
|
+
}
|
|
5895
|
+
if (briefDecisions.length === 0 && techConstraintDecisions.length === 0) return "";
|
|
5896
|
+
const parts = ["# Product Brief", ""];
|
|
5897
|
+
for (const field of PRODUCT_BRIEF_FIELDS) {
|
|
5898
|
+
const rawValue = briefMap[field];
|
|
5899
|
+
if (rawValue === void 0) continue;
|
|
5900
|
+
parts.push(`## ${fieldLabel(field)}`);
|
|
5901
|
+
parts.push("");
|
|
5902
|
+
parts.push(renderValue(rawValue));
|
|
5903
|
+
parts.push("");
|
|
5904
|
+
}
|
|
5905
|
+
return parts.join("\n");
|
|
5906
|
+
}
|
|
5907
|
+
/**
|
|
5908
|
+
* Render planning-phase decisions (and requirements table) as a `prd.md` file.
|
|
5909
|
+
*
|
|
5910
|
+
* Sections rendered (when data is present):
|
|
5911
|
+
* - Project Classification (classification decisions)
|
|
5912
|
+
* - Functional Requirements (functional-requirements decisions)
|
|
5913
|
+
* - Non-Functional Requirements (non-functional-requirements decisions)
|
|
5914
|
+
* - Domain Model (domain-model decisions)
|
|
5915
|
+
* - User Stories (user-stories decisions)
|
|
5916
|
+
* - Tech Stack (tech-stack decisions)
|
|
5917
|
+
* - Out of Scope (out-of-scope decisions)
|
|
5918
|
+
*
|
|
5919
|
+
* @param decisions - All decisions from the planning phase
|
|
5920
|
+
* @param requirements - Requirements records from the requirements table (optional)
|
|
5921
|
+
* @returns Formatted markdown content for prd.md
|
|
5922
|
+
*/
|
|
5923
|
+
function renderPrd(decisions, requirements = []) {
|
|
5924
|
+
if (decisions.length === 0) return "";
|
|
5925
|
+
const parts = ["# Product Requirements Document", ""];
|
|
5926
|
+
const classificationDecisions = decisions.filter((d) => d.category === "classification");
|
|
5927
|
+
if (classificationDecisions.length > 0) {
|
|
5928
|
+
parts.push("## Project Classification");
|
|
5929
|
+
parts.push("");
|
|
5930
|
+
for (const d of classificationDecisions) {
|
|
5931
|
+
const parsed = safeParseJson(d.value);
|
|
5932
|
+
if (Array.isArray(parsed)) {
|
|
5933
|
+
parts.push(`**${fieldLabel(d.key)}**:`);
|
|
5934
|
+
for (const item of parsed) parts.push(`- ${String(item)}`);
|
|
5935
|
+
} else parts.push(`**${fieldLabel(d.key)}**: ${String(parsed)}`);
|
|
5936
|
+
}
|
|
5937
|
+
parts.push("");
|
|
5938
|
+
}
|
|
5939
|
+
const frDecisions = decisions.filter((d) => d.category === "functional-requirements");
|
|
5940
|
+
if (frDecisions.length > 0) {
|
|
5941
|
+
parts.push("## Functional Requirements");
|
|
5942
|
+
parts.push("");
|
|
5943
|
+
for (const d of frDecisions) {
|
|
5944
|
+
const parsed = safeParseJson(d.value);
|
|
5945
|
+
if (typeof parsed === "object" && parsed !== null && !Array.isArray(parsed)) {
|
|
5946
|
+
const fr = parsed;
|
|
5947
|
+
const id = fr.id ?? d.key;
|
|
5948
|
+
const priority = fr.priority ? ` [${fr.priority.toUpperCase()}]` : "";
|
|
5949
|
+
parts.push(`- **${id}**${priority}: ${fr.description ?? d.value}`);
|
|
5950
|
+
if (fr.acceptance_criteria && fr.acceptance_criteria.length > 0) for (const ac of fr.acceptance_criteria) parts.push(` - ${ac}`);
|
|
5951
|
+
} else parts.push(`- **${d.key}**: ${renderValue(d.value)}`);
|
|
5952
|
+
}
|
|
5953
|
+
parts.push("");
|
|
5954
|
+
}
|
|
5955
|
+
const nfrDecisions = decisions.filter((d) => d.category === "non-functional-requirements");
|
|
5956
|
+
if (nfrDecisions.length > 0) {
|
|
5957
|
+
parts.push("## Non-Functional Requirements");
|
|
5958
|
+
parts.push("");
|
|
5959
|
+
for (const d of nfrDecisions) {
|
|
5960
|
+
const parsed = safeParseJson(d.value);
|
|
5961
|
+
if (typeof parsed === "object" && parsed !== null && !Array.isArray(parsed)) {
|
|
5962
|
+
const nfr = parsed;
|
|
5963
|
+
const id = nfr.id ?? d.key;
|
|
5964
|
+
const cat = nfr.category ? ` [${nfr.category.toUpperCase()}]` : "";
|
|
5965
|
+
parts.push(`- **${id}**${cat}: ${nfr.description ?? d.value}`);
|
|
5966
|
+
} else parts.push(`- **${d.key}**: ${renderValue(d.value)}`);
|
|
5967
|
+
}
|
|
5968
|
+
parts.push("");
|
|
5969
|
+
}
|
|
5970
|
+
const domainDecisions = decisions.filter((d) => d.category === "domain-model");
|
|
5971
|
+
if (domainDecisions.length > 0) {
|
|
5972
|
+
parts.push("## Domain Model");
|
|
5973
|
+
parts.push("");
|
|
5974
|
+
for (const d of domainDecisions) parts.push(renderValue(d.value));
|
|
5975
|
+
parts.push("");
|
|
5976
|
+
}
|
|
5977
|
+
const userStoryDecisions = decisions.filter((d) => d.category === "user-stories");
|
|
5978
|
+
if (userStoryDecisions.length > 0) {
|
|
5979
|
+
parts.push("## User Stories");
|
|
5980
|
+
parts.push("");
|
|
5981
|
+
for (const d of userStoryDecisions) {
|
|
5982
|
+
const parsed = safeParseJson(d.value);
|
|
5983
|
+
if (typeof parsed === "object" && parsed !== null && !Array.isArray(parsed)) {
|
|
5984
|
+
const us = parsed;
|
|
5985
|
+
if (us.title) {
|
|
5986
|
+
parts.push(`### ${us.title}`);
|
|
5987
|
+
parts.push("");
|
|
5988
|
+
if (us.description) {
|
|
5989
|
+
parts.push(us.description);
|
|
5990
|
+
parts.push("");
|
|
5991
|
+
}
|
|
5992
|
+
} else {
|
|
5993
|
+
parts.push(renderValue(d.value));
|
|
5994
|
+
parts.push("");
|
|
5995
|
+
}
|
|
5996
|
+
} else {
|
|
5997
|
+
parts.push(renderValue(d.value));
|
|
5998
|
+
parts.push("");
|
|
5999
|
+
}
|
|
6000
|
+
}
|
|
6001
|
+
}
|
|
6002
|
+
const techStackDecisions = decisions.filter((d) => d.category === "tech-stack");
|
|
6003
|
+
if (techStackDecisions.length > 0) {
|
|
6004
|
+
parts.push("## Tech Stack");
|
|
6005
|
+
parts.push("");
|
|
6006
|
+
for (const d of techStackDecisions) if (d.key === "tech_stack") {
|
|
6007
|
+
const parsed = safeParseJson(d.value);
|
|
6008
|
+
if (typeof parsed === "object" && parsed !== null && !Array.isArray(parsed)) for (const [k, v] of Object.entries(parsed)) parts.push(`- **${fieldLabel(k)}**: ${String(v)}`);
|
|
6009
|
+
else parts.push(`- **${fieldLabel(d.key)}**: ${d.value}`);
|
|
6010
|
+
} else parts.push(`- **${fieldLabel(d.key)}**: ${d.value}`);
|
|
6011
|
+
parts.push("");
|
|
6012
|
+
}
|
|
6013
|
+
const outOfScopeDecisions = decisions.filter((d) => d.category === "out-of-scope");
|
|
6014
|
+
if (outOfScopeDecisions.length > 0) {
|
|
6015
|
+
parts.push("## Out of Scope");
|
|
6016
|
+
parts.push("");
|
|
6017
|
+
for (const d of outOfScopeDecisions) parts.push(renderValue(d.value));
|
|
6018
|
+
parts.push("");
|
|
6019
|
+
}
|
|
6020
|
+
const functionalReqs = requirements.filter((r) => r.type === "functional");
|
|
6021
|
+
const nonFunctionalReqs = requirements.filter((r) => r.type === "non_functional");
|
|
6022
|
+
if ((functionalReqs.length > 0 || nonFunctionalReqs.length > 0) && frDecisions.length === 0 && nfrDecisions.length === 0) {
|
|
6023
|
+
parts.push("## Requirements (from Requirements Table)");
|
|
6024
|
+
parts.push("");
|
|
6025
|
+
if (functionalReqs.length > 0) {
|
|
6026
|
+
parts.push("### Functional Requirements");
|
|
6027
|
+
parts.push("");
|
|
6028
|
+
for (const r of functionalReqs) {
|
|
6029
|
+
const priority = r.priority ? ` [${r.priority.toUpperCase()}]` : "";
|
|
6030
|
+
parts.push(`- ${r.source ?? ""}${priority}: ${r.description}`);
|
|
6031
|
+
}
|
|
6032
|
+
parts.push("");
|
|
6033
|
+
}
|
|
6034
|
+
if (nonFunctionalReqs.length > 0) {
|
|
6035
|
+
parts.push("### Non-Functional Requirements");
|
|
6036
|
+
parts.push("");
|
|
6037
|
+
for (const r of nonFunctionalReqs) {
|
|
6038
|
+
const priority = r.priority ? ` [${r.priority.toUpperCase()}]` : "";
|
|
6039
|
+
parts.push(`- ${priority}: ${r.description}`);
|
|
6040
|
+
}
|
|
6041
|
+
parts.push("");
|
|
6042
|
+
}
|
|
6043
|
+
}
|
|
6044
|
+
return parts.join("\n");
|
|
6045
|
+
}
|
|
6046
|
+
/**
|
|
6047
|
+
* Render solutioning-phase architecture decisions as an `architecture.md` file.
|
|
6048
|
+
*
|
|
6049
|
+
* Groups all architecture decisions into a single `## Architecture Decisions`
|
|
6050
|
+
* section, formatting each as `**key**: value` with italicised rationale where
|
|
6051
|
+
* present. The heading pattern matches the regex used by `seedMethodologyContext()`
|
|
6052
|
+
* so that the exported file can be round-tripped back into the decision store.
|
|
6053
|
+
*
|
|
6054
|
+
* @param decisions - All decisions from the solutioning phase (any category)
|
|
6055
|
+
* @returns Formatted markdown content for architecture.md, or '' if no data
|
|
6056
|
+
*/
|
|
6057
|
+
function renderArchitecture(decisions) {
|
|
6058
|
+
const archDecisions = decisions.filter((d) => d.category === "architecture");
|
|
6059
|
+
if (archDecisions.length === 0) return "";
|
|
6060
|
+
const parts = ["# Architecture", ""];
|
|
6061
|
+
parts.push("## Architecture Decisions");
|
|
6062
|
+
parts.push("");
|
|
6063
|
+
for (const d of archDecisions) {
|
|
6064
|
+
const value = safeParseJson(d.value);
|
|
6065
|
+
let displayValue;
|
|
6066
|
+
if (typeof value === "object" && value !== null && !Array.isArray(value)) {
|
|
6067
|
+
displayValue = Object.entries(value).map(([k, v]) => ` - *${fieldLabel(k)}*: ${String(v)}`).join("\n");
|
|
6068
|
+
parts.push(`**${d.key}**:`);
|
|
6069
|
+
parts.push(displayValue);
|
|
6070
|
+
} else if (Array.isArray(value)) {
|
|
6071
|
+
displayValue = value.map((item) => ` - ${String(item)}`).join("\n");
|
|
6072
|
+
parts.push(`**${d.key}**:`);
|
|
6073
|
+
parts.push(displayValue);
|
|
6074
|
+
} else {
|
|
6075
|
+
displayValue = String(value);
|
|
6076
|
+
if (d.rationale) parts.push(`**${d.key}**: ${displayValue} *(${d.rationale})*`);
|
|
6077
|
+
else parts.push(`**${d.key}**: ${displayValue}`);
|
|
6078
|
+
}
|
|
6079
|
+
}
|
|
6080
|
+
parts.push("");
|
|
6081
|
+
return parts.join("\n");
|
|
6082
|
+
}
|
|
6083
|
+
/**
|
|
6084
|
+
* Render solutioning-phase epics and stories decisions as an `epics.md` file.
|
|
6085
|
+
*
|
|
6086
|
+
* Output format:
|
|
6087
|
+
* ```
|
|
6088
|
+
* ## Epic 1: Title
|
|
6089
|
+
* Description
|
|
6090
|
+
*
|
|
6091
|
+
* ### Story 1-1: Title
|
|
6092
|
+
* **Priority**: must
|
|
6093
|
+
* **Description**: ...
|
|
6094
|
+
* **Acceptance Criteria**:
|
|
6095
|
+
* - AC1
|
|
6096
|
+
* - AC2
|
|
6097
|
+
* ```
|
|
6098
|
+
*
|
|
6099
|
+
* The `## Epic N:` heading pattern is parsed by `parseEpicShards()` in
|
|
6100
|
+
* `seed-methodology-context.ts`, satisfying the round-trip contract (AC5).
|
|
6101
|
+
*
|
|
6102
|
+
* Stories are associated with their parent epic by the numeric prefix of the
|
|
6103
|
+
* story key (e.g., story key `2-3` → epic 2).
|
|
6104
|
+
*
|
|
6105
|
+
* @param decisions - All decisions from the solutioning phase (any category)
|
|
6106
|
+
* @returns Formatted markdown content for epics.md, or '' if no data
|
|
6107
|
+
*/
|
|
6108
|
+
function renderEpics(decisions) {
|
|
6109
|
+
const epicDecisions = decisions.filter((d) => d.category === "epics");
|
|
6110
|
+
const storyDecisions = decisions.filter((d) => d.category === "stories");
|
|
6111
|
+
if (epicDecisions.length === 0 && storyDecisions.length === 0) return "";
|
|
6112
|
+
const epicMap = new Map();
|
|
6113
|
+
for (const d of epicDecisions) {
|
|
6114
|
+
const match = /^epic-(\d+)$/i.exec(d.key);
|
|
6115
|
+
if (match === null) continue;
|
|
6116
|
+
const epicNum = parseInt(match[1], 10);
|
|
6117
|
+
const parsed = safeParseJson(d.value);
|
|
6118
|
+
if (typeof parsed === "object" && parsed !== null && !Array.isArray(parsed)) {
|
|
6119
|
+
const p = parsed;
|
|
6120
|
+
epicMap.set(epicNum, {
|
|
6121
|
+
num: epicNum,
|
|
6122
|
+
title: p.title ?? `Epic ${epicNum}`,
|
|
6123
|
+
description: p.description ?? ""
|
|
6124
|
+
});
|
|
6125
|
+
} else epicMap.set(epicNum, {
|
|
6126
|
+
num: epicNum,
|
|
6127
|
+
title: String(parsed),
|
|
6128
|
+
description: ""
|
|
6129
|
+
});
|
|
6130
|
+
}
|
|
6131
|
+
const storyMap = new Map();
|
|
6132
|
+
for (const d of storyDecisions) {
|
|
6133
|
+
const parsed = safeParseJson(d.value);
|
|
6134
|
+
let story;
|
|
6135
|
+
if (typeof parsed === "object" && parsed !== null && !Array.isArray(parsed)) {
|
|
6136
|
+
const p = parsed;
|
|
6137
|
+
const storyKey = p.key ?? d.key;
|
|
6138
|
+
const keyMatch = /^(\d+)-(\d+)/.exec(storyKey);
|
|
6139
|
+
if (keyMatch === null) continue;
|
|
6140
|
+
const epicNum = parseInt(keyMatch[1], 10);
|
|
6141
|
+
const storyNum = parseInt(keyMatch[2], 10);
|
|
6142
|
+
story = {
|
|
6143
|
+
key: storyKey,
|
|
6144
|
+
epicNum,
|
|
6145
|
+
storyNum,
|
|
6146
|
+
title: p.title ?? `Story ${storyKey}`,
|
|
6147
|
+
description: p.description ?? "",
|
|
6148
|
+
ac: p.acceptance_criteria ?? p.ac ?? [],
|
|
6149
|
+
priority: p.priority ?? "must"
|
|
6150
|
+
};
|
|
6151
|
+
} else {
|
|
6152
|
+
const storyKey = d.key;
|
|
6153
|
+
const keyMatch = /^(\d+)-(\d+)/.exec(storyKey);
|
|
6154
|
+
if (keyMatch === null) continue;
|
|
6155
|
+
const epicNum = parseInt(keyMatch[1], 10);
|
|
6156
|
+
const storyNum = parseInt(keyMatch[2], 10);
|
|
6157
|
+
story = {
|
|
6158
|
+
key: storyKey,
|
|
6159
|
+
epicNum,
|
|
6160
|
+
storyNum,
|
|
6161
|
+
title: `Story ${storyKey}`,
|
|
6162
|
+
description: String(parsed),
|
|
6163
|
+
ac: [],
|
|
6164
|
+
priority: "must"
|
|
6165
|
+
};
|
|
6166
|
+
}
|
|
6167
|
+
if (!storyMap.has(story.epicNum)) storyMap.set(story.epicNum, []);
|
|
6168
|
+
storyMap.get(story.epicNum).push(story);
|
|
6169
|
+
}
|
|
6170
|
+
for (const stories of storyMap.values()) stories.sort((a, b) => a.storyNum - b.storyNum);
|
|
6171
|
+
const allEpicNums = new Set([...epicMap.keys(), ...storyMap.keys()]);
|
|
6172
|
+
const sortedEpicNums = [...allEpicNums].sort((a, b) => a - b);
|
|
6173
|
+
const parts = ["# Epics and Stories", ""];
|
|
6174
|
+
for (const epicNum of sortedEpicNums) {
|
|
6175
|
+
const epic = epicMap.get(epicNum);
|
|
6176
|
+
const epicTitle = epic?.title ?? `Epic ${epicNum}`;
|
|
6177
|
+
const epicDescription = epic?.description ?? "";
|
|
6178
|
+
parts.push(`## Epic ${epicNum}: ${epicTitle}`);
|
|
6179
|
+
parts.push("");
|
|
6180
|
+
if (epicDescription) {
|
|
6181
|
+
parts.push(epicDescription);
|
|
6182
|
+
parts.push("");
|
|
6183
|
+
}
|
|
6184
|
+
const stories = storyMap.get(epicNum) ?? [];
|
|
6185
|
+
for (const story of stories) {
|
|
6186
|
+
parts.push(`### Story ${story.key}: ${story.title}`);
|
|
6187
|
+
parts.push("");
|
|
6188
|
+
parts.push(`**Priority**: ${story.priority}`);
|
|
6189
|
+
if (story.description) parts.push(`**Description**: ${story.description}`);
|
|
6190
|
+
if (story.ac.length > 0) {
|
|
6191
|
+
parts.push("**Acceptance Criteria**:");
|
|
6192
|
+
for (const ac of story.ac) parts.push(`- ${ac}`);
|
|
6193
|
+
}
|
|
6194
|
+
parts.push("");
|
|
6195
|
+
}
|
|
6196
|
+
}
|
|
6197
|
+
return parts.join("\n");
|
|
6198
|
+
}
|
|
6199
|
+
/**
|
|
6200
|
+
* Render solutioning-phase readiness-findings decisions as a `readiness-report.md`.
|
|
6201
|
+
*
|
|
6202
|
+
* Groups findings by category, shows severity per finding, and emits an
|
|
6203
|
+
* overall pass/fail verdict based on whether any blockers were found.
|
|
6204
|
+
*
|
|
6205
|
+
* @param decisions - All decisions from the solutioning phase (any category)
|
|
6206
|
+
* @returns Formatted markdown content for readiness-report.md, or '' if no data
|
|
6207
|
+
*/
|
|
6208
|
+
function renderReadinessReport(decisions) {
|
|
6209
|
+
const findingDecisions = decisions.filter((d) => d.category === "readiness-findings");
|
|
6210
|
+
if (findingDecisions.length === 0) return "";
|
|
6211
|
+
const findings = [];
|
|
6212
|
+
for (const d of findingDecisions) {
|
|
6213
|
+
const parsed = safeParseJson(d.value);
|
|
6214
|
+
if (typeof parsed === "object" && parsed !== null && !Array.isArray(parsed)) {
|
|
6215
|
+
const p = parsed;
|
|
6216
|
+
findings.push({
|
|
6217
|
+
category: p.category ?? "general",
|
|
6218
|
+
severity: p.severity ?? "minor",
|
|
6219
|
+
description: p.description ?? String(parsed),
|
|
6220
|
+
affected_items: p.affected_items ?? []
|
|
6221
|
+
});
|
|
6222
|
+
} else findings.push({
|
|
6223
|
+
category: "general",
|
|
6224
|
+
severity: "minor",
|
|
6225
|
+
description: String(parsed),
|
|
6226
|
+
affected_items: []
|
|
6227
|
+
});
|
|
6228
|
+
}
|
|
6229
|
+
const hasCritical = findings.some((f) => f.severity === "blocker" || f.severity === "major");
|
|
6230
|
+
const verdict = hasCritical ? "FAIL" : "PASS";
|
|
6231
|
+
const parts = ["# Readiness Report", ""];
|
|
6232
|
+
parts.push(`**Overall Verdict**: ${verdict}`);
|
|
6233
|
+
parts.push("");
|
|
6234
|
+
parts.push(`**Total Findings**: ${findings.length}`);
|
|
6235
|
+
parts.push(`**Blockers**: ${findings.filter((f) => f.severity === "blocker").length}`);
|
|
6236
|
+
parts.push(`**Major**: ${findings.filter((f) => f.severity === "major").length}`);
|
|
6237
|
+
parts.push(`**Minor**: ${findings.filter((f) => f.severity === "minor").length}`);
|
|
6238
|
+
parts.push("");
|
|
6239
|
+
const byCategory = new Map();
|
|
6240
|
+
for (const finding of findings) {
|
|
6241
|
+
if (!byCategory.has(finding.category)) byCategory.set(finding.category, []);
|
|
6242
|
+
byCategory.get(finding.category).push(finding);
|
|
6243
|
+
}
|
|
6244
|
+
const categoryOrder = [
|
|
6245
|
+
"fr_coverage",
|
|
6246
|
+
"architecture_compliance",
|
|
6247
|
+
"story_quality",
|
|
6248
|
+
"ux_alignment",
|
|
6249
|
+
"dependency_validity",
|
|
6250
|
+
"general"
|
|
6251
|
+
];
|
|
6252
|
+
const sortedCategories = [...byCategory.keys()].sort((a, b) => {
|
|
6253
|
+
const ai = categoryOrder.indexOf(a);
|
|
6254
|
+
const bi = categoryOrder.indexOf(b);
|
|
6255
|
+
return (ai === -1 ? 999 : ai) - (bi === -1 ? 999 : bi);
|
|
6256
|
+
});
|
|
6257
|
+
for (const category of sortedCategories) {
|
|
6258
|
+
const categoryFindings = byCategory.get(category);
|
|
6259
|
+
const categoryLabel = fieldLabel(category);
|
|
6260
|
+
parts.push(`## ${categoryLabel}`);
|
|
6261
|
+
parts.push("");
|
|
6262
|
+
for (const finding of categoryFindings) {
|
|
6263
|
+
const severityTag = `[${finding.severity.toUpperCase()}]`;
|
|
6264
|
+
parts.push(`- ${severityTag} ${finding.description}`);
|
|
6265
|
+
if (finding.affected_items.length > 0) parts.push(` - *Affected*: ${finding.affected_items.join(", ")}`);
|
|
6266
|
+
}
|
|
6267
|
+
parts.push("");
|
|
6268
|
+
}
|
|
6269
|
+
return parts.join("\n");
|
|
6270
|
+
}
|
|
6271
|
+
|
|
6272
|
+
//#endregion
|
|
6273
|
+
//#region src/cli/commands/export.ts
|
|
6274
|
+
const logger$1 = createLogger("export-cmd");
|
|
6275
|
+
/**
|
|
6276
|
+
* Execute the export action.
|
|
6277
|
+
* Returns an exit code (0 = success, 1 = error).
|
|
6278
|
+
*/
|
|
6279
|
+
async function runExportAction(options) {
|
|
6280
|
+
const { runId, outputDir, projectRoot, outputFormat } = options;
|
|
6281
|
+
let dbWrapper;
|
|
6282
|
+
try {
|
|
6283
|
+
const dbRoot = await resolveMainRepoRoot(projectRoot);
|
|
6284
|
+
const dbPath = join$1(dbRoot, ".substrate", "substrate.db");
|
|
6285
|
+
if (!existsSync$1(dbPath)) {
|
|
6286
|
+
const errorMsg = `Decision store not initialized. Run 'substrate init' first.`;
|
|
6287
|
+
if (outputFormat === "json") process.stdout.write(JSON.stringify({ error: errorMsg }) + "\n");
|
|
6288
|
+
else process.stderr.write(`Error: ${errorMsg}\n`);
|
|
6289
|
+
return 1;
|
|
6290
|
+
}
|
|
6291
|
+
dbWrapper = new DatabaseWrapper(dbPath);
|
|
6292
|
+
dbWrapper.open();
|
|
6293
|
+
const db = dbWrapper.db;
|
|
6294
|
+
let run;
|
|
6295
|
+
if (runId !== void 0 && runId !== "") run = db.prepare("SELECT * FROM pipeline_runs WHERE id = ?").get(runId);
|
|
6296
|
+
else run = getLatestRun(db);
|
|
6297
|
+
if (run === void 0) {
|
|
6298
|
+
const errorMsg = runId !== void 0 ? `Pipeline run '${runId}' not found.` : "No pipeline runs found. Run `substrate run` first.";
|
|
6299
|
+
if (outputFormat === "json") process.stdout.write(JSON.stringify({ error: errorMsg }) + "\n");
|
|
6300
|
+
else process.stderr.write(`Error: ${errorMsg}\n`);
|
|
6301
|
+
return 1;
|
|
6302
|
+
}
|
|
6303
|
+
const activeRunId = run.id;
|
|
6304
|
+
const resolvedOutputDir = isAbsolute(outputDir) ? outputDir : join$1(projectRoot, outputDir);
|
|
6305
|
+
if (!existsSync$1(resolvedOutputDir)) mkdirSync$1(resolvedOutputDir, { recursive: true });
|
|
6306
|
+
const filesWritten = [];
|
|
6307
|
+
const phasesExported = [];
|
|
6308
|
+
const analysisDecisions = getDecisionsByPhaseForRun(db, activeRunId, "analysis");
|
|
6309
|
+
if (analysisDecisions.length > 0) {
|
|
6310
|
+
const content = renderProductBrief(analysisDecisions);
|
|
6311
|
+
if (content !== "") {
|
|
6312
|
+
const filePath = join$1(resolvedOutputDir, "product-brief.md");
|
|
6313
|
+
writeFileSync$1(filePath, content, "utf-8");
|
|
6314
|
+
filesWritten.push(filePath);
|
|
6315
|
+
phasesExported.push("analysis");
|
|
6316
|
+
if (outputFormat === "human") process.stdout.write(` Written: ${filePath}\n`);
|
|
6317
|
+
}
|
|
6318
|
+
}
|
|
6319
|
+
const planningDecisions = getDecisionsByPhaseForRun(db, activeRunId, "planning");
|
|
6320
|
+
if (planningDecisions.length > 0) {
|
|
6321
|
+
const requirements = listRequirements(db).filter((r) => r.pipeline_run_id === activeRunId);
|
|
6322
|
+
const content = renderPrd(planningDecisions, requirements);
|
|
6323
|
+
if (content !== "") {
|
|
6324
|
+
const filePath = join$1(resolvedOutputDir, "prd.md");
|
|
6325
|
+
writeFileSync$1(filePath, content, "utf-8");
|
|
6326
|
+
filesWritten.push(filePath);
|
|
6327
|
+
if (!phasesExported.includes("planning")) phasesExported.push("planning");
|
|
6328
|
+
if (outputFormat === "human") process.stdout.write(` Written: ${filePath}\n`);
|
|
6329
|
+
}
|
|
6330
|
+
}
|
|
6331
|
+
const solutioningDecisions = getDecisionsByPhaseForRun(db, activeRunId, "solutioning");
|
|
6332
|
+
if (solutioningDecisions.length > 0) {
|
|
6333
|
+
const archContent = renderArchitecture(solutioningDecisions);
|
|
6334
|
+
if (archContent !== "") {
|
|
6335
|
+
const filePath = join$1(resolvedOutputDir, "architecture.md");
|
|
6336
|
+
writeFileSync$1(filePath, archContent, "utf-8");
|
|
6337
|
+
filesWritten.push(filePath);
|
|
6338
|
+
if (!phasesExported.includes("solutioning")) phasesExported.push("solutioning");
|
|
6339
|
+
if (outputFormat === "human") process.stdout.write(` Written: ${filePath}\n`);
|
|
6340
|
+
}
|
|
6341
|
+
const epicsContent = renderEpics(solutioningDecisions);
|
|
6342
|
+
if (epicsContent !== "") {
|
|
6343
|
+
const filePath = join$1(resolvedOutputDir, "epics.md");
|
|
6344
|
+
writeFileSync$1(filePath, epicsContent, "utf-8");
|
|
6345
|
+
filesWritten.push(filePath);
|
|
6346
|
+
if (!phasesExported.includes("solutioning")) phasesExported.push("solutioning");
|
|
6347
|
+
if (outputFormat === "human") process.stdout.write(` Written: ${filePath}\n`);
|
|
6348
|
+
}
|
|
6349
|
+
const readinessContent = renderReadinessReport(solutioningDecisions);
|
|
6350
|
+
if (readinessContent !== "") {
|
|
6351
|
+
const filePath = join$1(resolvedOutputDir, "readiness-report.md");
|
|
6352
|
+
writeFileSync$1(filePath, readinessContent, "utf-8");
|
|
6353
|
+
filesWritten.push(filePath);
|
|
6354
|
+
if (!phasesExported.includes("solutioning")) phasesExported.push("solutioning");
|
|
6355
|
+
if (outputFormat === "human") process.stdout.write(` Written: ${filePath}\n`);
|
|
6356
|
+
}
|
|
6357
|
+
}
|
|
6358
|
+
if (outputFormat === "json") {
|
|
6359
|
+
const result = {
|
|
6360
|
+
files_written: filesWritten,
|
|
6361
|
+
run_id: activeRunId,
|
|
6362
|
+
phases_exported: phasesExported
|
|
6363
|
+
};
|
|
6364
|
+
process.stdout.write(JSON.stringify(result) + "\n");
|
|
6365
|
+
} else {
|
|
6366
|
+
if (filesWritten.length === 0) process.stdout.write(`No data found for run ${activeRunId}. The pipeline may not have completed any phases.\n`);
|
|
6367
|
+
else process.stdout.write(`\nExported ${filesWritten.length} file(s) from run ${activeRunId}.\n`);
|
|
6368
|
+
const skippedPhases = [];
|
|
6369
|
+
if (!phasesExported.includes("analysis")) skippedPhases.push("analysis");
|
|
6370
|
+
if (!phasesExported.includes("planning")) skippedPhases.push("planning");
|
|
6371
|
+
if (!phasesExported.includes("solutioning")) skippedPhases.push("solutioning");
|
|
6372
|
+
if (skippedPhases.length > 0) process.stdout.write(`Phases with no data (skipped): ${skippedPhases.join(", ")}\n`);
|
|
6373
|
+
}
|
|
6374
|
+
return 0;
|
|
6375
|
+
} catch (err) {
|
|
6376
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
6377
|
+
if (outputFormat === "json") process.stdout.write(JSON.stringify({ error: msg }) + "\n");
|
|
6378
|
+
else process.stderr.write(`Error: ${msg}\n`);
|
|
6379
|
+
logger$1.error({ err }, "export action failed");
|
|
6380
|
+
return 1;
|
|
6381
|
+
} finally {
|
|
6382
|
+
if (dbWrapper !== void 0) try {
|
|
6383
|
+
dbWrapper.close();
|
|
6384
|
+
} catch {}
|
|
6385
|
+
}
|
|
6386
|
+
}
|
|
6387
|
+
function registerExportCommand(program, _version = "0.0.0", projectRoot = process.cwd()) {
|
|
6388
|
+
program.command("export").description("Export decision store contents as human-readable markdown files").option("--run-id <id>", "Pipeline run ID to export (defaults to latest run)").option("--output-dir <path>", "Directory to write exported files to", "_bmad-output/planning-artifacts/").option("--project-root <path>", "Project root directory", projectRoot).option("--output-format <format>", "Output format: human (default) or json", "human").action(async (opts) => {
|
|
6389
|
+
if (opts.outputFormat !== "json" && opts.outputFormat !== "human") process.stderr.write(`Warning: unknown --output-format '${opts.outputFormat}', defaulting to 'human'\n`);
|
|
6390
|
+
const outputFormat = opts.outputFormat === "json" ? "json" : "human";
|
|
6391
|
+
const exitCode = await runExportAction({
|
|
6392
|
+
runId: opts.runId,
|
|
6393
|
+
outputDir: opts.outputDir,
|
|
6394
|
+
projectRoot: opts.projectRoot,
|
|
6395
|
+
outputFormat
|
|
6396
|
+
});
|
|
6397
|
+
process.exitCode = exitCode;
|
|
6398
|
+
});
|
|
6399
|
+
}
|
|
6400
|
+
|
|
5816
6401
|
//#endregion
|
|
5817
6402
|
//#region src/cli/index.ts
|
|
5818
6403
|
process.setMaxListeners(20);
|
|
@@ -5863,6 +6448,7 @@ async function createProgram() {
|
|
|
5863
6448
|
registerMergeCommand(program);
|
|
5864
6449
|
registerWorktreesCommand(program, version);
|
|
5865
6450
|
registerBrainstormCommand(program, version);
|
|
6451
|
+
registerExportCommand(program, version);
|
|
5866
6452
|
registerUpgradeCommand(program);
|
|
5867
6453
|
return program;
|
|
5868
6454
|
}
|