substrate-ai 0.5.0 → 0.5.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli/index.js
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
import { AdapterTelemetryPersistence, AppError, DEFAULT_CONFIG, DEFAULT_ROUTING_POLICY, DoltClient, DoltNotInstalled, DoltRepoMapMetaRepository, DoltSymbolRepository, ERR_REPO_MAP_STORAGE_WRITE, FileStateStore, GitClient, GrammarLoader, IngestionServer, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SUBSTRATE_OWNED_SETTINGS_KEYS, SymbolParser, VALID_PHASES, buildPipelineStatusOutput, checkDoltInstalled, createConfigSystem, createContextCompiler, createDatabaseAdapter, createDispatcher, createDoltClient, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStateStore, createStopAfterGate, findPackageRoot, formatOutput, formatPhaseCompletionSummary, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, initSchema, initializeDolt, isSyncAdapter, parseDbTimestampAsUtc, registerHealthCommand, registerRunCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, resolveStoryKeys, runAnalysisPhase, runPlanningPhase, runSolutioningPhase, validateStopAfterFromConflict } from "../run-
|
|
2
|
+
import { AdapterTelemetryPersistence, AppError, DEFAULT_CONFIG, DEFAULT_ROUTING_POLICY, DoltClient, DoltNotInstalled, DoltRepoMapMetaRepository, DoltSymbolRepository, ERR_REPO_MAP_STORAGE_WRITE, FileStateStore, GitClient, GrammarLoader, IngestionServer, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SUBSTRATE_OWNED_SETTINGS_KEYS, SymbolParser, VALID_PHASES, WorkGraphRepository, buildPipelineStatusOutput, checkDoltInstalled, createConfigSystem, createContextCompiler, createDatabaseAdapter, createDispatcher, createDoltClient, createEventEmitter, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStateStore, createStopAfterGate, detectCycles, findPackageRoot, formatOutput, formatPhaseCompletionSummary, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, initSchema, initializeDolt, isSyncAdapter, parseDbTimestampAsUtc, registerHealthCommand, registerRunCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, resolveStoryKeys, runAnalysisPhase, runPlanningPhase, runSolutioningPhase, validateStopAfterFromConflict } from "../run-CxoTrYdA.js";
|
|
3
3
|
import { createLogger } from "../logger-D2fS2ccL.js";
|
|
4
4
|
import { AdapterRegistry } from "../adapter-registry-BkUvZSKJ.js";
|
|
5
5
|
import { CURRENT_CONFIG_FORMAT_VERSION, CURRENT_TASK_GRAPH_VERSION, PartialSubstrateConfigSchema } from "../config-migrator-DtZW1maj.js";
|
|
@@ -19,7 +19,7 @@ import yaml from "js-yaml";
|
|
|
19
19
|
import { createRequire } from "node:module";
|
|
20
20
|
import * as path$1 from "node:path";
|
|
21
21
|
import { isAbsolute, join as join$1 } from "node:path";
|
|
22
|
-
import { existsSync as existsSync$1, mkdirSync as mkdirSync$1, writeFileSync as writeFileSync$1 } from "node:fs";
|
|
22
|
+
import { existsSync as existsSync$1, mkdirSync as mkdirSync$1, readFileSync as readFileSync$1, writeFileSync as writeFileSync$1 } from "node:fs";
|
|
23
23
|
import { access as access$1, readFile as readFile$1 } from "node:fs/promises";
|
|
24
24
|
import { createInterface } from "node:readline";
|
|
25
25
|
import { homedir } from "os";
|
|
@@ -574,8 +574,8 @@ async function directoryExists(path$2) {
|
|
|
574
574
|
*/
|
|
575
575
|
async function runInitAction(options) {
|
|
576
576
|
const { pack: packName, projectRoot, outputFormat, force = false, yes: nonInteractive = false } = options;
|
|
577
|
-
const packPath = join(projectRoot, "packs", packName);
|
|
578
577
|
const dbRoot = await resolveMainRepoRoot(projectRoot);
|
|
578
|
+
const packPath = join(dbRoot, "packs", packName);
|
|
579
579
|
const substrateDir = join(dbRoot, ".substrate");
|
|
580
580
|
const dbPath = join(substrateDir, "substrate.db");
|
|
581
581
|
const configPath = join(substrateDir, "config.yaml");
|
|
@@ -1004,19 +1004,52 @@ function registerConfigCommand(program, _version) {
|
|
|
1004
1004
|
});
|
|
1005
1005
|
}
|
|
1006
1006
|
|
|
1007
|
+
//#endregion
|
|
1008
|
+
//#region src/modules/work-graph/errors.ts
|
|
1009
|
+
/**
|
|
1010
|
+
* Work-graph error types.
|
|
1011
|
+
*
|
|
1012
|
+
* Story 31-7: Cycle Detection in Work Graph
|
|
1013
|
+
*/
|
|
1014
|
+
/**
|
|
1015
|
+
* Thrown by `EpicIngester.ingest()` when the provided dependency list
|
|
1016
|
+
* contains a cycle. The `cycle` field contains the path of story keys
|
|
1017
|
+
* that form the cycle (first and last element are the same).
|
|
1018
|
+
*/
|
|
1019
|
+
var CyclicDependencyError = class extends Error {
|
|
1020
|
+
constructor(cycle) {
|
|
1021
|
+
super(`Cyclic dependency detected: ${cycle.join(" → ")}`);
|
|
1022
|
+
this.cycle = cycle;
|
|
1023
|
+
this.name = "CyclicDependencyError";
|
|
1024
|
+
}
|
|
1025
|
+
};
|
|
1026
|
+
|
|
1007
1027
|
//#endregion
|
|
1008
1028
|
//#region src/cli/commands/resume.ts
|
|
1009
1029
|
const logger$16 = createLogger("resume-cmd");
|
|
1030
|
+
/**
|
|
1031
|
+
* Map internal orchestrator phase names to pipeline event protocol phase names.
|
|
1032
|
+
*/
|
|
1033
|
+
function mapInternalPhaseToEventPhase(internalPhase) {
|
|
1034
|
+
switch (internalPhase) {
|
|
1035
|
+
case "IN_STORY_CREATION": return "create-story";
|
|
1036
|
+
case "IN_DEV": return "dev-story";
|
|
1037
|
+
case "IN_REVIEW": return "code-review";
|
|
1038
|
+
case "IN_MINOR_FIX":
|
|
1039
|
+
case "IN_MAJOR_FIX": return "fix";
|
|
1040
|
+
default: return null;
|
|
1041
|
+
}
|
|
1042
|
+
}
|
|
1010
1043
|
async function runResumeAction(options) {
|
|
1011
|
-
const { runId: specifiedRunId, stopAfter, outputFormat, projectRoot, concurrency, pack: packName, registry } = options;
|
|
1044
|
+
const { runId: specifiedRunId, stopAfter, outputFormat, projectRoot, concurrency, pack: packName, events: eventsFlag, registry } = options;
|
|
1012
1045
|
if (stopAfter !== void 0 && !VALID_PHASES.includes(stopAfter)) {
|
|
1013
1046
|
const errorMsg = `Invalid phase: "${stopAfter}". Valid phases: ${VALID_PHASES.join(", ")}`;
|
|
1014
1047
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, errorMsg) + "\n");
|
|
1015
1048
|
else process.stderr.write(`Error: ${errorMsg}\n`);
|
|
1016
1049
|
return 1;
|
|
1017
1050
|
}
|
|
1018
|
-
const packPath = join(projectRoot, "packs", packName);
|
|
1019
1051
|
const dbRoot = await resolveMainRepoRoot(projectRoot);
|
|
1052
|
+
const packPath = join(dbRoot, "packs", packName);
|
|
1020
1053
|
const dbPath = join(dbRoot, ".substrate", "substrate.db");
|
|
1021
1054
|
const doltDir = join(dbRoot, ".substrate", "state", ".dolt");
|
|
1022
1055
|
if (!existsSync(dbPath) && !existsSync(doltDir)) {
|
|
@@ -1086,6 +1119,7 @@ async function runResumeAction(options) {
|
|
|
1086
1119
|
concept,
|
|
1087
1120
|
concurrency,
|
|
1088
1121
|
outputFormat,
|
|
1122
|
+
events: eventsFlag,
|
|
1089
1123
|
existingRunId: runId,
|
|
1090
1124
|
projectRoot,
|
|
1091
1125
|
registry
|
|
@@ -1103,7 +1137,7 @@ async function runResumeAction(options) {
|
|
|
1103
1137
|
}
|
|
1104
1138
|
}
|
|
1105
1139
|
async function runFullPipelineFromPhase(options) {
|
|
1106
|
-
const { packName, packPath, dbDir, dbPath, startPhase, stopAfter, concept, concurrency, outputFormat, existingRunId, projectRoot, registry: injectedRegistry } = options;
|
|
1140
|
+
const { packName, packPath, dbDir, dbPath, startPhase, stopAfter, concept, concurrency, outputFormat, events: eventsFlag, existingRunId, projectRoot, registry: injectedRegistry } = options;
|
|
1107
1141
|
if (!existsSync(dbDir)) mkdirSync(dbDir, { recursive: true });
|
|
1108
1142
|
const adapter = createDatabaseAdapter({
|
|
1109
1143
|
backend: "auto",
|
|
@@ -1143,6 +1177,8 @@ async function runFullPipelineFromPhase(options) {
|
|
|
1143
1177
|
let runId;
|
|
1144
1178
|
if (existingRunId !== void 0) runId = existingRunId;
|
|
1145
1179
|
else runId = await phaseOrchestrator.startRun(concept, startPhase);
|
|
1180
|
+
let ndjsonEmitter;
|
|
1181
|
+
if (eventsFlag === true) ndjsonEmitter = createEventEmitter(process.stdout);
|
|
1146
1182
|
const phaseOrder = [
|
|
1147
1183
|
"analysis",
|
|
1148
1184
|
"planning",
|
|
@@ -1239,12 +1275,87 @@ async function runFullPipelineFromPhase(options) {
|
|
|
1239
1275
|
config: {
|
|
1240
1276
|
maxConcurrency: concurrency,
|
|
1241
1277
|
maxReviewCycles: 2,
|
|
1242
|
-
pipelineRunId: runId
|
|
1278
|
+
pipelineRunId: runId,
|
|
1279
|
+
enableHeartbeat: eventsFlag === true
|
|
1243
1280
|
},
|
|
1244
1281
|
projectRoot,
|
|
1245
1282
|
...ingestionServer !== void 0 ? { ingestionServer } : {},
|
|
1246
1283
|
...telemetryPersistence !== void 0 ? { telemetryPersistence } : {}
|
|
1247
1284
|
});
|
|
1285
|
+
if (ndjsonEmitter !== void 0) {
|
|
1286
|
+
const resolvedKeys = await resolveStoryKeys(adapter, projectRoot, { pipelineRunId: runId });
|
|
1287
|
+
ndjsonEmitter.emit({
|
|
1288
|
+
type: "pipeline:start",
|
|
1289
|
+
ts: new Date().toISOString(),
|
|
1290
|
+
run_id: runId,
|
|
1291
|
+
stories: resolvedKeys,
|
|
1292
|
+
concurrency
|
|
1293
|
+
});
|
|
1294
|
+
eventBus.on("orchestrator:story-phase-start", (payload) => {
|
|
1295
|
+
const phase = mapInternalPhaseToEventPhase(payload.phase);
|
|
1296
|
+
if (phase !== null) ndjsonEmitter.emit({
|
|
1297
|
+
type: "story:phase",
|
|
1298
|
+
ts: new Date().toISOString(),
|
|
1299
|
+
key: payload.storyKey,
|
|
1300
|
+
phase,
|
|
1301
|
+
status: "in_progress"
|
|
1302
|
+
});
|
|
1303
|
+
});
|
|
1304
|
+
eventBus.on("orchestrator:story-phase-complete", (payload) => {
|
|
1305
|
+
const phase = mapInternalPhaseToEventPhase(payload.phase);
|
|
1306
|
+
if (phase !== null) {
|
|
1307
|
+
const result = payload.result;
|
|
1308
|
+
ndjsonEmitter.emit({
|
|
1309
|
+
type: "story:phase",
|
|
1310
|
+
ts: new Date().toISOString(),
|
|
1311
|
+
key: payload.storyKey,
|
|
1312
|
+
phase,
|
|
1313
|
+
status: "complete",
|
|
1314
|
+
...phase === "code-review" && result?.verdict !== void 0 ? { verdict: result.verdict } : {},
|
|
1315
|
+
...phase === "create-story" && result?.story_file !== void 0 ? { file: result.story_file } : {}
|
|
1316
|
+
});
|
|
1317
|
+
}
|
|
1318
|
+
});
|
|
1319
|
+
eventBus.on("orchestrator:story-complete", (payload) => {
|
|
1320
|
+
ndjsonEmitter.emit({
|
|
1321
|
+
type: "story:done",
|
|
1322
|
+
ts: new Date().toISOString(),
|
|
1323
|
+
key: payload.storyKey,
|
|
1324
|
+
result: "success",
|
|
1325
|
+
review_cycles: payload.reviewCycles
|
|
1326
|
+
});
|
|
1327
|
+
});
|
|
1328
|
+
eventBus.on("orchestrator:story-escalated", (payload) => {
|
|
1329
|
+
const rawIssues = Array.isArray(payload.issues) ? payload.issues : [];
|
|
1330
|
+
const issues = rawIssues.map((issue) => {
|
|
1331
|
+
const iss = issue;
|
|
1332
|
+
return {
|
|
1333
|
+
severity: iss.severity ?? "unknown",
|
|
1334
|
+
file: iss.file ?? "",
|
|
1335
|
+
desc: iss.desc ?? iss.description ?? ""
|
|
1336
|
+
};
|
|
1337
|
+
});
|
|
1338
|
+
ndjsonEmitter.emit({
|
|
1339
|
+
type: "story:escalation",
|
|
1340
|
+
ts: new Date().toISOString(),
|
|
1341
|
+
key: payload.storyKey,
|
|
1342
|
+
reason: payload.lastVerdict ?? "escalated",
|
|
1343
|
+
cycles: payload.reviewCycles ?? 0,
|
|
1344
|
+
issues,
|
|
1345
|
+
...payload.diagnosis !== void 0 ? { diagnosis: payload.diagnosis } : {}
|
|
1346
|
+
});
|
|
1347
|
+
});
|
|
1348
|
+
eventBus.on("orchestrator:heartbeat", (payload) => {
|
|
1349
|
+
ndjsonEmitter.emit({
|
|
1350
|
+
type: "pipeline:heartbeat",
|
|
1351
|
+
ts: new Date().toISOString(),
|
|
1352
|
+
run_id: payload.runId,
|
|
1353
|
+
active_dispatches: payload.activeDispatches,
|
|
1354
|
+
completed_dispatches: payload.completedDispatches,
|
|
1355
|
+
queued_dispatches: payload.queuedDispatches
|
|
1356
|
+
});
|
|
1357
|
+
});
|
|
1358
|
+
}
|
|
1248
1359
|
eventBus.on("orchestrator:story-phase-complete", (payload) => {
|
|
1249
1360
|
try {
|
|
1250
1361
|
const result = payload.result;
|
|
@@ -1268,6 +1379,13 @@ async function runFullPipelineFromPhase(options) {
|
|
|
1268
1379
|
const storyKeys = await resolveStoryKeys(adapter, projectRoot, { pipelineRunId: runId });
|
|
1269
1380
|
if (storyKeys.length === 0 && outputFormat === "human") process.stdout.write("[IMPLEMENTATION] No stories found for this run. Check solutioning phase output.\n");
|
|
1270
1381
|
await orchestrator.run(storyKeys);
|
|
1382
|
+
if (ndjsonEmitter !== void 0) ndjsonEmitter.emit({
|
|
1383
|
+
type: "pipeline:complete",
|
|
1384
|
+
ts: new Date().toISOString(),
|
|
1385
|
+
succeeded: storyKeys,
|
|
1386
|
+
failed: [],
|
|
1387
|
+
escalated: []
|
|
1388
|
+
});
|
|
1271
1389
|
if (outputFormat === "human") process.stdout.write("[IMPLEMENTATION] Complete\n");
|
|
1272
1390
|
}
|
|
1273
1391
|
if (stopAfter !== void 0 && currentPhase === stopAfter) {
|
|
@@ -1330,7 +1448,7 @@ async function runFullPipelineFromPhase(options) {
|
|
|
1330
1448
|
}
|
|
1331
1449
|
}
|
|
1332
1450
|
function registerResumeCommand(program, _version = "0.0.0", projectRoot = process.cwd(), registry) {
|
|
1333
|
-
program.command("resume").description("Resume a previously interrupted pipeline run").option("--run-id <id>", "Pipeline run ID to resume (defaults to latest)").option("--pack <name>", "Methodology pack name", "bmad").option("--stop-after <phase>", "Stop pipeline after this phase completes (overrides saved state)").option("--concurrency <n>", "Maximum parallel conflict groups", (v) => parseInt(v, 10), 3).option("--project-root <path>", "Project root directory", projectRoot).option("--output-format <format>", "Output format: human (default) or json", "human").action(async (opts) => {
|
|
1451
|
+
program.command("resume").description("Resume a previously interrupted pipeline run").option("--run-id <id>", "Pipeline run ID to resume (defaults to latest)").option("--pack <name>", "Methodology pack name", "bmad").option("--stop-after <phase>", "Stop pipeline after this phase completes (overrides saved state)").option("--concurrency <n>", "Maximum parallel conflict groups", (v) => parseInt(v, 10), 3).option("--project-root <path>", "Project root directory", projectRoot).option("--output-format <format>", "Output format: human (default) or json", "human").option("--events", "Emit structured NDJSON events on stdout for programmatic consumption").action(async (opts) => {
|
|
1334
1452
|
const outputFormat = opts.outputFormat === "json" ? "json" : "human";
|
|
1335
1453
|
const exitCode = await runResumeAction({
|
|
1336
1454
|
runId: opts.runId,
|
|
@@ -1339,6 +1457,7 @@ function registerResumeCommand(program, _version = "0.0.0", projectRoot = proces
|
|
|
1339
1457
|
projectRoot: opts.projectRoot,
|
|
1340
1458
|
concurrency: opts.concurrency,
|
|
1341
1459
|
pack: opts.pack,
|
|
1460
|
+
events: opts.events,
|
|
1342
1461
|
registry
|
|
1343
1462
|
});
|
|
1344
1463
|
process.exitCode = exitCode;
|
|
@@ -1390,6 +1509,44 @@ async function runStatusAction(options) {
|
|
|
1390
1509
|
});
|
|
1391
1510
|
try {
|
|
1392
1511
|
await initSchema(adapter);
|
|
1512
|
+
let workGraph;
|
|
1513
|
+
try {
|
|
1514
|
+
const wgRepo = new WorkGraphRepository(adapter);
|
|
1515
|
+
const allStories = await adapter.query(`SELECT story_key, title, status FROM wg_stories`);
|
|
1516
|
+
if (allStories.length > 0) {
|
|
1517
|
+
const readyStoriesRaw = await wgRepo.getReadyStories();
|
|
1518
|
+
const blockedStoriesRaw = await wgRepo.getBlockedStories();
|
|
1519
|
+
const readyKeys = new Set(readyStoriesRaw.map((s) => s.story_key));
|
|
1520
|
+
const blockedKeys = new Set(blockedStoriesRaw.map((b) => b.story.story_key));
|
|
1521
|
+
const inProgressCount = allStories.filter((s) => s.status === "in_progress").length;
|
|
1522
|
+
const completeCount = allStories.filter((s) => s.status === "complete").length;
|
|
1523
|
+
const escalatedCount = allStories.filter((s) => s.status === "escalated").length;
|
|
1524
|
+
workGraph = {
|
|
1525
|
+
summary: {
|
|
1526
|
+
ready: readyKeys.size,
|
|
1527
|
+
blocked: blockedKeys.size,
|
|
1528
|
+
inProgress: inProgressCount,
|
|
1529
|
+
complete: completeCount,
|
|
1530
|
+
escalated: escalatedCount
|
|
1531
|
+
},
|
|
1532
|
+
readyStories: readyStoriesRaw.map((s) => ({
|
|
1533
|
+
key: s.story_key,
|
|
1534
|
+
title: s.title ?? s.story_key
|
|
1535
|
+
})),
|
|
1536
|
+
blockedStories: blockedStoriesRaw.map((b) => ({
|
|
1537
|
+
key: b.story.story_key,
|
|
1538
|
+
title: b.story.title ?? b.story.story_key,
|
|
1539
|
+
blockers: b.blockers.map((bl) => ({
|
|
1540
|
+
key: bl.key,
|
|
1541
|
+
title: bl.title,
|
|
1542
|
+
status: bl.status
|
|
1543
|
+
}))
|
|
1544
|
+
}))
|
|
1545
|
+
};
|
|
1546
|
+
}
|
|
1547
|
+
} catch (err) {
|
|
1548
|
+
logger$15.debug({ err }, "Work graph query failed, continuing without work graph data");
|
|
1549
|
+
}
|
|
1393
1550
|
let run;
|
|
1394
1551
|
if (runId !== void 0 && runId !== "") run = await getPipelineRunById(adapter, runId);
|
|
1395
1552
|
else run = await getLatestRun(adapter);
|
|
@@ -1457,7 +1614,8 @@ async function runStatusAction(options) {
|
|
|
1457
1614
|
stories_per_hour: storiesPerHour,
|
|
1458
1615
|
cost_usd: totalCostUsd
|
|
1459
1616
|
},
|
|
1460
|
-
story_states: storeStories
|
|
1617
|
+
story_states: storeStories,
|
|
1618
|
+
workGraph: workGraph ?? null
|
|
1461
1619
|
};
|
|
1462
1620
|
process.stdout.write(formatOutput(enhancedOutput, "json", true) + "\n");
|
|
1463
1621
|
} else {
|
|
@@ -1503,6 +1661,22 @@ async function runStatusAction(options) {
|
|
|
1503
1661
|
process.stdout.write("\nStateStore Story States:\n");
|
|
1504
1662
|
for (const s of storeStories) process.stdout.write(` ${s.storyKey}: ${s.phase} (${s.reviewCycles} review cycles)\n`);
|
|
1505
1663
|
}
|
|
1664
|
+
if (workGraph !== void 0) {
|
|
1665
|
+
const { summary, readyStories, blockedStories } = workGraph;
|
|
1666
|
+
process.stdout.write("\nWork Graph:\n");
|
|
1667
|
+
process.stdout.write(` ${summary.inProgress} in progress, ${summary.ready} ready, ${summary.blocked} blocked, ${summary.complete} complete, ${summary.escalated} escalated\n`);
|
|
1668
|
+
if (readyStories.length > 0) {
|
|
1669
|
+
process.stdout.write("\n Ready to dispatch:\n");
|
|
1670
|
+
for (const s of readyStories) process.stdout.write(` ${s.key}: ${s.title}\n`);
|
|
1671
|
+
}
|
|
1672
|
+
if (blockedStories.length > 0) {
|
|
1673
|
+
process.stdout.write("\n Blocked:\n");
|
|
1674
|
+
for (const b of blockedStories) {
|
|
1675
|
+
process.stdout.write(` ${b.key}: ${b.title}\n`);
|
|
1676
|
+
for (const bl of b.blockers) process.stdout.write(` waiting on ${bl.key} (${bl.status}): ${bl.title}\n`);
|
|
1677
|
+
}
|
|
1678
|
+
}
|
|
1679
|
+
}
|
|
1506
1680
|
process.stdout.write("\n");
|
|
1507
1681
|
process.stdout.write(formatTokenTelemetry(tokenSummary) + "\n");
|
|
1508
1682
|
}
|
|
@@ -2031,7 +2205,7 @@ async function runAmendAction(options) {
|
|
|
2031
2205
|
const dbRoot = await resolveMainRepoRoot(projectRoot);
|
|
2032
2206
|
const dbDir = join(dbRoot, ".substrate");
|
|
2033
2207
|
const dbPath = join(dbDir, "substrate.db");
|
|
2034
|
-
const packPath = join(
|
|
2208
|
+
const packPath = join(dbRoot, "packs", packName);
|
|
2035
2209
|
const doltDir = join(dbRoot, ".substrate", "state", ".dolt");
|
|
2036
2210
|
if (!existsSync(dbPath) && !existsSync(doltDir)) {
|
|
2037
2211
|
process.stderr.write(`Error: Decision store not initialized. Run 'substrate init' first.\n`);
|
|
@@ -2749,7 +2923,7 @@ async function runSupervisorAction(options, deps = {}) {
|
|
|
2749
2923
|
await initSchema(expAdapter);
|
|
2750
2924
|
const { runRunAction: runPipeline } = await import(
|
|
2751
2925
|
/* @vite-ignore */
|
|
2752
|
-
"../run-
|
|
2926
|
+
"../run-BSs4Dn0j.js"
|
|
2753
2927
|
);
|
|
2754
2928
|
const runStoryFn = async (opts) => {
|
|
2755
2929
|
const exitCode = await runPipeline({
|
|
@@ -7142,7 +7316,7 @@ async function runRetryEscalatedAction(options) {
|
|
|
7142
7316
|
}
|
|
7143
7317
|
return 0;
|
|
7144
7318
|
}
|
|
7145
|
-
const packPath = join(
|
|
7319
|
+
const packPath = join(dbRoot, "packs", packName);
|
|
7146
7320
|
const packLoader = createPackLoader();
|
|
7147
7321
|
let pack;
|
|
7148
7322
|
try {
|
|
@@ -7701,6 +7875,364 @@ function registerRoutingCommand(program) {
|
|
|
7701
7875
|
});
|
|
7702
7876
|
}
|
|
7703
7877
|
|
|
7878
|
+
//#endregion
|
|
7879
|
+
//#region src/modules/work-graph/schema.ts
|
|
7880
|
+
/**
|
|
7881
|
+
* Work-graph schema DDL constants.
|
|
7882
|
+
*
|
|
7883
|
+
* Story 31-1 placeholder — defines the `stories`, `story_dependencies`, and
|
|
7884
|
+
* `ready_stories` DDL used by the EpicIngester and downstream consumers.
|
|
7885
|
+
*
|
|
7886
|
+
* NOTE: This file is a minimal placeholder created by story 31-2 because story
|
|
7887
|
+
* 31-1 (schema creation) had not yet run. If story 31-1 produces a richer
|
|
7888
|
+
* schema, merge carefully and remove this note.
|
|
7889
|
+
*/
|
|
7890
|
+
const CREATE_STORIES_TABLE = `
|
|
7891
|
+
CREATE TABLE IF NOT EXISTS stories (
|
|
7892
|
+
story_key VARCHAR(50) NOT NULL,
|
|
7893
|
+
epic_num INT NOT NULL,
|
|
7894
|
+
story_num INT NOT NULL,
|
|
7895
|
+
title VARCHAR(500) NOT NULL,
|
|
7896
|
+
priority VARCHAR(10) NOT NULL,
|
|
7897
|
+
size VARCHAR(50) NOT NULL,
|
|
7898
|
+
sprint INT NOT NULL,
|
|
7899
|
+
status VARCHAR(50) NOT NULL DEFAULT 'planned',
|
|
7900
|
+
PRIMARY KEY (story_key)
|
|
7901
|
+
)
|
|
7902
|
+
`.trim();
|
|
7903
|
+
const CREATE_STORY_DEPENDENCIES_TABLE = `
|
|
7904
|
+
CREATE TABLE IF NOT EXISTS story_dependencies (
|
|
7905
|
+
story_key VARCHAR(50) NOT NULL,
|
|
7906
|
+
depends_on VARCHAR(50) NOT NULL,
|
|
7907
|
+
dependency_type VARCHAR(50) NOT NULL DEFAULT 'blocks',
|
|
7908
|
+
source VARCHAR(50) NOT NULL DEFAULT 'explicit',
|
|
7909
|
+
PRIMARY KEY (story_key, depends_on)
|
|
7910
|
+
)
|
|
7911
|
+
`.trim();
|
|
7912
|
+
const CREATE_READY_STORIES_VIEW = `
|
|
7913
|
+
CREATE VIEW IF NOT EXISTS ready_stories AS
|
|
7914
|
+
SELECT s.*
|
|
7915
|
+
FROM stories s
|
|
7916
|
+
WHERE s.status = 'planned'
|
|
7917
|
+
AND NOT EXISTS (
|
|
7918
|
+
SELECT 1 FROM story_dependencies sd
|
|
7919
|
+
JOIN stories blocking ON sd.depends_on = blocking.story_key
|
|
7920
|
+
WHERE sd.story_key = s.story_key
|
|
7921
|
+
AND blocking.status != 'done'
|
|
7922
|
+
)
|
|
7923
|
+
`.trim();
|
|
7924
|
+
|
|
7925
|
+
//#endregion
|
|
7926
|
+
//#region src/modules/work-graph/epic-parser.ts
|
|
7927
|
+
/** Regex for sprint header lines: `**Sprint 1 —` (em dash or hyphen) */
|
|
7928
|
+
const SPRINT_HEADER_RE = /^\*\*Sprint\s+(\d+)\s*[—–-]/i;
|
|
7929
|
+
/**
|
|
7930
|
+
* Regex for story lines: `- 31-2: Epic doc ingestion (P0, Medium)`
|
|
7931
|
+
* Captures: epicNum, storyNum, title, priority, size
|
|
7932
|
+
*/
|
|
7933
|
+
const STORY_LINE_RE = /^-\s+(\d+)-(\d+):\s+(.+?)\s+\((P\d+),\s+([\w-]+)\)\s*$/;
|
|
7934
|
+
/** Regex to find the story map section heading */
|
|
7935
|
+
const STORY_MAP_HEADING_RE = /^#{1,6}\s+.*Story\s+Map/im;
|
|
7936
|
+
/** Regex to find the dependency chain line */
|
|
7937
|
+
const DEPENDENCY_CHAIN_RE = /\*\*Dependency\s+chain\*\*:\s*(.+)/i;
|
|
7938
|
+
/** Regex for "also gates" clauses: `31-3 also gates 31-6, 31-7` */
|
|
7939
|
+
const ALSO_GATES_RE = /^([\d]+-[\d]+)\s+also\s+gates\s+(.+)$/i;
|
|
7940
|
+
var EpicParser = class {
|
|
7941
|
+
/**
|
|
7942
|
+
* Parse story metadata from an epic planning document.
|
|
7943
|
+
*
|
|
7944
|
+
* @param content - Full text of the epic markdown document.
|
|
7945
|
+
* @returns Array of `ParsedStory` objects, one per story line found.
|
|
7946
|
+
* @throws {Error} If the story map section is absent or no stories can be parsed.
|
|
7947
|
+
*/
|
|
7948
|
+
parseStories(content) {
|
|
7949
|
+
const headingMatch = STORY_MAP_HEADING_RE.exec(content);
|
|
7950
|
+
if (!headingMatch) throw new Error("No story map section found in document");
|
|
7951
|
+
const afterHeading = content.slice(headingMatch.index + headingMatch[0].length);
|
|
7952
|
+
const stories = [];
|
|
7953
|
+
let currentSprint = 0;
|
|
7954
|
+
for (const rawLine of afterHeading.split("\n")) {
|
|
7955
|
+
const line = rawLine.trim();
|
|
7956
|
+
const sprintMatch = SPRINT_HEADER_RE.exec(line);
|
|
7957
|
+
if (sprintMatch) {
|
|
7958
|
+
currentSprint = parseInt(sprintMatch[1], 10);
|
|
7959
|
+
continue;
|
|
7960
|
+
}
|
|
7961
|
+
const storyMatch = STORY_LINE_RE.exec(line);
|
|
7962
|
+
if (storyMatch) {
|
|
7963
|
+
const epicNum = parseInt(storyMatch[1], 10);
|
|
7964
|
+
const storyNum = parseInt(storyMatch[2], 10);
|
|
7965
|
+
stories.push({
|
|
7966
|
+
story_key: `${epicNum}-${storyNum}`,
|
|
7967
|
+
epic_num: epicNum,
|
|
7968
|
+
story_num: storyNum,
|
|
7969
|
+
title: storyMatch[3].trim(),
|
|
7970
|
+
priority: storyMatch[4],
|
|
7971
|
+
size: storyMatch[5],
|
|
7972
|
+
sprint: currentSprint
|
|
7973
|
+
});
|
|
7974
|
+
}
|
|
7975
|
+
}
|
|
7976
|
+
if (stories.length === 0) throw new Error("Story map section found but contained no parseable story lines");
|
|
7977
|
+
return stories;
|
|
7978
|
+
}
|
|
7979
|
+
/**
|
|
7980
|
+
* Parse dependency relationships from an epic planning document.
|
|
7981
|
+
*
|
|
7982
|
+
* If the `**Dependency chain**:` line is absent, returns an empty array
|
|
7983
|
+
* (not all epics declare dependencies).
|
|
7984
|
+
*
|
|
7985
|
+
* @param content - Full text of the epic markdown document.
|
|
7986
|
+
* @returns Array of `ParsedDependency` objects.
|
|
7987
|
+
*/
|
|
7988
|
+
parseDependencies(content) {
|
|
7989
|
+
const chainLineMatch = DEPENDENCY_CHAIN_RE.exec(content);
|
|
7990
|
+
if (!chainLineMatch) return [];
|
|
7991
|
+
const chainStr = chainLineMatch[1].trim();
|
|
7992
|
+
const dependencies = [];
|
|
7993
|
+
const clauses = chainStr.split(";").map((c) => c.trim()).filter(Boolean);
|
|
7994
|
+
for (const clause of clauses) {
|
|
7995
|
+
const alsoGatesMatch = ALSO_GATES_RE.exec(clause);
|
|
7996
|
+
if (alsoGatesMatch) {
|
|
7997
|
+
const gater = alsoGatesMatch[1].trim();
|
|
7998
|
+
const gatedList = alsoGatesMatch[2].split(",").map((t) => t.trim()).filter(Boolean);
|
|
7999
|
+
for (const gated of gatedList) dependencies.push({
|
|
8000
|
+
story_key: gated,
|
|
8001
|
+
depends_on: gater,
|
|
8002
|
+
dependency_type: "blocks",
|
|
8003
|
+
source: "explicit"
|
|
8004
|
+
});
|
|
8005
|
+
continue;
|
|
8006
|
+
}
|
|
8007
|
+
const parts = clause.split("→").map((p) => p.trim()).filter(Boolean);
|
|
8008
|
+
for (let i = 0; i < parts.length - 1; i++) {
|
|
8009
|
+
const upstream = parts[i];
|
|
8010
|
+
const downstream = parts[i + 1];
|
|
8011
|
+
dependencies.push({
|
|
8012
|
+
story_key: downstream,
|
|
8013
|
+
depends_on: upstream,
|
|
8014
|
+
dependency_type: "blocks",
|
|
8015
|
+
source: "explicit"
|
|
8016
|
+
});
|
|
8017
|
+
}
|
|
8018
|
+
}
|
|
8019
|
+
return dependencies;
|
|
8020
|
+
}
|
|
8021
|
+
};
|
|
8022
|
+
|
|
8023
|
+
//#endregion
|
|
8024
|
+
//#region src/modules/work-graph/epic-ingester.ts
|
|
8025
|
+
var EpicIngester = class {
|
|
8026
|
+
adapter;
|
|
8027
|
+
constructor(adapter) {
|
|
8028
|
+
this.adapter = adapter;
|
|
8029
|
+
}
|
|
8030
|
+
/**
|
|
8031
|
+
* Upsert stories and sync dependencies into the database.
|
|
8032
|
+
*
|
|
8033
|
+
* Both operations are wrapped in a single transaction: if either fails the
|
|
8034
|
+
* entire batch is rolled back.
|
|
8035
|
+
*
|
|
8036
|
+
* @param stories - Parsed story metadata from `EpicParser.parseStories()`.
|
|
8037
|
+
* @param dependencies - Parsed dependency edges from `EpicParser.parseDependencies()`.
|
|
8038
|
+
* @returns `IngestResult` with counts of affected rows.
|
|
8039
|
+
*/
|
|
8040
|
+
async ingest(stories, dependencies) {
|
|
8041
|
+
const cycle = detectCycles(dependencies);
|
|
8042
|
+
if (cycle !== null) throw new CyclicDependencyError(cycle);
|
|
8043
|
+
return this.adapter.transaction(async (tx) => {
|
|
8044
|
+
let storiesUpserted = 0;
|
|
8045
|
+
for (const story of stories) {
|
|
8046
|
+
const existing = await tx.query("SELECT status FROM stories WHERE story_key = ?", [story.story_key]);
|
|
8047
|
+
if (existing.length > 0) await tx.query("UPDATE stories SET title = ?, priority = ?, size = ?, sprint = ? WHERE story_key = ?", [
|
|
8048
|
+
story.title,
|
|
8049
|
+
story.priority,
|
|
8050
|
+
story.size,
|
|
8051
|
+
story.sprint,
|
|
8052
|
+
story.story_key
|
|
8053
|
+
]);
|
|
8054
|
+
else {
|
|
8055
|
+
await tx.query("INSERT INTO stories (story_key, epic_num, story_num, title, priority, size, sprint, status) VALUES (?, ?, ?, ?, ?, ?, ?, ?)", [
|
|
8056
|
+
story.story_key,
|
|
8057
|
+
story.epic_num,
|
|
8058
|
+
story.story_num,
|
|
8059
|
+
story.title,
|
|
8060
|
+
story.priority,
|
|
8061
|
+
story.size,
|
|
8062
|
+
story.sprint,
|
|
8063
|
+
"planned"
|
|
8064
|
+
]);
|
|
8065
|
+
storiesUpserted++;
|
|
8066
|
+
}
|
|
8067
|
+
}
|
|
8068
|
+
const epicNum = stories.length > 0 ? stories[0].epic_num : null;
|
|
8069
|
+
if (epicNum !== null) await tx.query(`DELETE FROM story_dependencies WHERE source = 'explicit' AND story_key LIKE ?`, [`${epicNum}-%`]);
|
|
8070
|
+
for (const dep of dependencies) await tx.query("INSERT INTO story_dependencies (story_key, depends_on, dependency_type, source) VALUES (?, ?, ?, ?)", [
|
|
8071
|
+
dep.story_key,
|
|
8072
|
+
dep.depends_on,
|
|
8073
|
+
dep.dependency_type,
|
|
8074
|
+
dep.source
|
|
8075
|
+
]);
|
|
8076
|
+
return {
|
|
8077
|
+
storiesUpserted,
|
|
8078
|
+
dependenciesReplaced: dependencies.length
|
|
8079
|
+
};
|
|
8080
|
+
});
|
|
8081
|
+
}
|
|
8082
|
+
};
|
|
8083
|
+
|
|
8084
|
+
//#endregion
|
|
8085
|
+
//#region src/cli/commands/ingest-epic.ts
|
|
8086
|
+
function registerIngestEpicCommand(program) {
|
|
8087
|
+
program.command("ingest-epic <epic-doc-path>").description("Parse an epic planning doc and upsert story metadata into the work-graph").action(async (epicDocPath) => {
|
|
8088
|
+
if (!existsSync$1(epicDocPath)) {
|
|
8089
|
+
process.stderr.write(`Error: File not found: ${epicDocPath}\n`);
|
|
8090
|
+
process.exitCode = 1;
|
|
8091
|
+
return;
|
|
8092
|
+
}
|
|
8093
|
+
let content;
|
|
8094
|
+
try {
|
|
8095
|
+
content = readFileSync$1(epicDocPath, "utf-8");
|
|
8096
|
+
} catch (err) {
|
|
8097
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
8098
|
+
process.stderr.write(`Error: Cannot read file ${epicDocPath}: ${msg}\n`);
|
|
8099
|
+
process.exitCode = 1;
|
|
8100
|
+
return;
|
|
8101
|
+
}
|
|
8102
|
+
const parser = new EpicParser();
|
|
8103
|
+
let stories;
|
|
8104
|
+
let dependencies;
|
|
8105
|
+
try {
|
|
8106
|
+
stories = parser.parseStories(content);
|
|
8107
|
+
dependencies = parser.parseDependencies(content);
|
|
8108
|
+
} catch (err) {
|
|
8109
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
8110
|
+
process.stderr.write(`Error: ${msg}\n`);
|
|
8111
|
+
process.exitCode = 1;
|
|
8112
|
+
return;
|
|
8113
|
+
}
|
|
8114
|
+
const adapter = createDatabaseAdapter({
|
|
8115
|
+
backend: "auto",
|
|
8116
|
+
basePath: process.cwd()
|
|
8117
|
+
});
|
|
8118
|
+
try {
|
|
8119
|
+
await adapter.exec(CREATE_STORIES_TABLE);
|
|
8120
|
+
await adapter.exec(CREATE_STORY_DEPENDENCIES_TABLE);
|
|
8121
|
+
const ingester = new EpicIngester(adapter);
|
|
8122
|
+
const result = await ingester.ingest(stories, dependencies);
|
|
8123
|
+
const epicNum = stories[0].epic_num;
|
|
8124
|
+
process.stdout.write(`Ingested ${result.storiesUpserted} stories and ${result.dependenciesReplaced} dependencies from epic ${epicNum}\n`);
|
|
8125
|
+
} catch (err) {
|
|
8126
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
8127
|
+
process.stderr.write(`Error: ${msg}\n`);
|
|
8128
|
+
process.exitCode = 1;
|
|
8129
|
+
} finally {
|
|
8130
|
+
await adapter.close();
|
|
8131
|
+
}
|
|
8132
|
+
});
|
|
8133
|
+
}
|
|
8134
|
+
|
|
8135
|
+
//#endregion
|
|
8136
|
+
//#region src/cli/commands/epic-status.ts
|
|
8137
|
+
function sortByStoryKey(stories) {
|
|
8138
|
+
return [...stories].sort((a, b) => {
|
|
8139
|
+
const numA = parseInt(a.story_key.split("-")[1] ?? "0", 10);
|
|
8140
|
+
const numB = parseInt(b.story_key.split("-")[1] ?? "0", 10);
|
|
8141
|
+
return numA - numB;
|
|
8142
|
+
});
|
|
8143
|
+
}
|
|
8144
|
+
const BADGE_WIDTH = 12;
|
|
8145
|
+
const STATUS_LABELS = {
|
|
8146
|
+
complete: "complete ",
|
|
8147
|
+
in_progress: "in_progress",
|
|
8148
|
+
ready: "ready ",
|
|
8149
|
+
planned: "planned ",
|
|
8150
|
+
escalated: "escalated ",
|
|
8151
|
+
blocked: "blocked "
|
|
8152
|
+
};
|
|
8153
|
+
function getBadge(status, isBlocked) {
|
|
8154
|
+
if (isBlocked) return `[${STATUS_LABELS["blocked"] ?? "blocked "}]`;
|
|
8155
|
+
const label = STATUS_LABELS[status] ?? status.padEnd(BADGE_WIDTH - 2);
|
|
8156
|
+
return `[${label}]`;
|
|
8157
|
+
}
|
|
8158
|
+
async function runEpicStatusAction(epicNum, opts) {
|
|
8159
|
+
const adapter = createDatabaseAdapter({
|
|
8160
|
+
backend: "auto",
|
|
8161
|
+
basePath: process.cwd()
|
|
8162
|
+
});
|
|
8163
|
+
try {
|
|
8164
|
+
await adapter.exec(CREATE_STORIES_TABLE);
|
|
8165
|
+
await adapter.exec(CREATE_STORY_DEPENDENCIES_TABLE);
|
|
8166
|
+
const repo = new WorkGraphRepository(adapter);
|
|
8167
|
+
const rawStories = await repo.listStories({ epic: epicNum });
|
|
8168
|
+
if (rawStories.length === 0) {
|
|
8169
|
+
process.stderr.write(`No stories found for epic ${epicNum} (work graph not populated — run \`substrate ingest-epic\` first)\n`);
|
|
8170
|
+
process.exitCode = 1;
|
|
8171
|
+
return;
|
|
8172
|
+
}
|
|
8173
|
+
const stories = sortByStoryKey(rawStories);
|
|
8174
|
+
const allBlocked = await repo.getBlockedStories();
|
|
8175
|
+
const epicBlockedMap = new Map(allBlocked.filter((b) => b.story.epic === epicNum).map((b) => [b.story.story_key, b]));
|
|
8176
|
+
const allReady = await repo.getReadyStories();
|
|
8177
|
+
const epicReadySet = new Set(allReady.filter((s) => s.epic === epicNum).map((s) => s.story_key));
|
|
8178
|
+
const summary = {
|
|
8179
|
+
total: stories.length,
|
|
8180
|
+
complete: stories.filter((s) => s.status === "complete").length,
|
|
8181
|
+
inProgress: stories.filter((s) => s.status === "in_progress").length,
|
|
8182
|
+
escalated: stories.filter((s) => s.status === "escalated").length,
|
|
8183
|
+
blocked: epicBlockedMap.size,
|
|
8184
|
+
ready: epicReadySet.size - epicBlockedMap.size,
|
|
8185
|
+
planned: stories.filter((s) => (s.status === "planned" || s.status === "ready") && !epicBlockedMap.has(s.story_key) && !epicReadySet.has(s.story_key)).length
|
|
8186
|
+
};
|
|
8187
|
+
if (opts.outputFormat === "json") {
|
|
8188
|
+
const output = {
|
|
8189
|
+
epic: epicNum,
|
|
8190
|
+
stories: stories.map((s) => {
|
|
8191
|
+
const blockedInfo = epicBlockedMap.get(s.story_key);
|
|
8192
|
+
const entry = {
|
|
8193
|
+
key: s.story_key,
|
|
8194
|
+
title: s.title ?? null,
|
|
8195
|
+
status: blockedInfo ? "blocked" : s.status
|
|
8196
|
+
};
|
|
8197
|
+
if (blockedInfo) entry.blockers = blockedInfo.blockers.map((b) => ({
|
|
8198
|
+
key: b.key,
|
|
8199
|
+
title: b.title,
|
|
8200
|
+
status: b.status
|
|
8201
|
+
}));
|
|
8202
|
+
return entry;
|
|
8203
|
+
}),
|
|
8204
|
+
summary
|
|
8205
|
+
};
|
|
8206
|
+
process.stdout.write(JSON.stringify(output, null, 2) + "\n");
|
|
8207
|
+
} else {
|
|
8208
|
+
process.stdout.write(`Epic ${epicNum} — ${stories.length} stories\n\n`);
|
|
8209
|
+
for (const story of stories) {
|
|
8210
|
+
const isBlocked = epicBlockedMap.has(story.story_key);
|
|
8211
|
+
const badge = getBadge(story.status, isBlocked);
|
|
8212
|
+
const keyPadded = story.story_key.padEnd(6);
|
|
8213
|
+
const displayTitle = story.title ?? story.story_key;
|
|
8214
|
+
let line = ` ${badge} ${keyPadded} ${displayTitle}`;
|
|
8215
|
+
if (isBlocked) {
|
|
8216
|
+
const blockedInfo = epicBlockedMap.get(story.story_key);
|
|
8217
|
+
const blockerList = blockedInfo.blockers.map((b) => `${b.key} (${b.status})`).join(", ");
|
|
8218
|
+
line += ` [waiting on: ${blockerList}]`;
|
|
8219
|
+
}
|
|
8220
|
+
process.stdout.write(line + "\n");
|
|
8221
|
+
}
|
|
8222
|
+
process.stdout.write("\n");
|
|
8223
|
+
process.stdout.write(`Epic ${epicNum}: ${summary.complete} complete · ${summary.inProgress} in_progress · ${summary.ready} ready · ${summary.blocked} blocked · ${summary.planned} planned · ${summary.escalated} escalated\n`);
|
|
8224
|
+
}
|
|
8225
|
+
} finally {
|
|
8226
|
+
await adapter.close();
|
|
8227
|
+
}
|
|
8228
|
+
}
|
|
8229
|
+
function registerEpicStatusCommand(program) {
|
|
8230
|
+
program.command("epic-status <epic>").description("Show a generated status view of an epic from the Dolt work graph").option("--output-format <format>", "Output format: human (default) or json", "human").action(async (epic, options) => {
|
|
8231
|
+
const fmt = options.outputFormat === "json" ? "json" : "human";
|
|
8232
|
+
await runEpicStatusAction(epic, { outputFormat: fmt });
|
|
8233
|
+
});
|
|
8234
|
+
}
|
|
8235
|
+
|
|
7704
8236
|
//#endregion
|
|
7705
8237
|
//#region src/cli/index.ts
|
|
7706
8238
|
process.setMaxListeners(20);
|
|
@@ -7761,6 +8293,8 @@ async function createProgram() {
|
|
|
7761
8293
|
registerWorktreesCommand(program, version);
|
|
7762
8294
|
registerBrainstormCommand(program, version);
|
|
7763
8295
|
registerExportCommand(program, version);
|
|
8296
|
+
registerIngestEpicCommand(program);
|
|
8297
|
+
registerEpicStatusCommand(program);
|
|
7764
8298
|
registerUpgradeCommand(program);
|
|
7765
8299
|
return program;
|
|
7766
8300
|
}
|