substrate-ai 0.5.1 → 0.5.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli/index.js CHANGED
@@ -1,12 +1,12 @@
1
1
  #!/usr/bin/env node
2
- import { AdapterTelemetryPersistence, AppError, DEFAULT_CONFIG, DEFAULT_ROUTING_POLICY, DoltClient, DoltNotInstalled, DoltRepoMapMetaRepository, DoltSymbolRepository, ERR_REPO_MAP_STORAGE_WRITE, FileStateStore, GitClient, GrammarLoader, IngestionServer, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SUBSTRATE_OWNED_SETTINGS_KEYS, SymbolParser, VALID_PHASES, buildPipelineStatusOutput, checkDoltInstalled, createConfigSystem, createContextCompiler, createDatabaseAdapter, createDispatcher, createDoltClient, createEventEmitter, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStateStore, createStopAfterGate, findPackageRoot, formatOutput, formatPhaseCompletionSummary, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, initSchema, initializeDolt, isSyncAdapter, parseDbTimestampAsUtc, registerHealthCommand, registerRunCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, resolveStoryKeys, runAnalysisPhase, runPlanningPhase, runSolutioningPhase, validateStopAfterFromConflict } from "../run-BD0Ugp7F.js";
2
+ import { AdapterTelemetryPersistence, AppError, DEFAULT_CONFIG, DEFAULT_ROUTING_POLICY, DoltClient, DoltNotInstalled, DoltRepoMapMetaRepository, DoltSymbolRepository, ERR_REPO_MAP_STORAGE_WRITE, FileStateStore, GitClient, GrammarLoader, IngestionServer, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SUBSTRATE_OWNED_SETTINGS_KEYS, SymbolParser, VALID_PHASES, WorkGraphRepository, buildPipelineStatusOutput, checkDoltInstalled, createConfigSystem, createContextCompiler, createDatabaseAdapter, createDispatcher, createDoltClient, createEventEmitter, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStateStore, createStopAfterGate, detectCycles, findPackageRoot, formatOutput, formatPhaseCompletionSummary, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, initSchema, initializeDolt, isSyncAdapter, parseDbTimestampAsUtc, registerHealthCommand, registerRunCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, resolveStoryKeys, runAnalysisPhase, runPlanningPhase, runSolutioningPhase, validateStopAfterFromConflict } from "../run-D7a-qzk9.js";
3
3
  import { createLogger } from "../logger-D2fS2ccL.js";
4
4
  import { AdapterRegistry } from "../adapter-registry-BkUvZSKJ.js";
5
5
  import { CURRENT_CONFIG_FORMAT_VERSION, CURRENT_TASK_GRAPH_VERSION, PartialSubstrateConfigSchema } from "../config-migrator-DtZW1maj.js";
6
6
  import { ConfigError, createEventBus } from "../helpers-BihqWgVe.js";
7
7
  import { RoutingRecommender } from "../routing-BUE9pIxW.js";
8
8
  import { addTokenUsage, createDecision, createPipelineRun, getDecisionsByCategory, getDecisionsByPhaseForRun, getLatestRun, getPipelineRunById, getTokenUsageSummary, listRequirements, updatePipelineRun } from "../decisions-C6MF2Cax.js";
9
- import { ESCALATION_DIAGNOSIS, EXPERIMENT_RESULT, OPERATIONAL_FINDING, STORY_METRICS, aggregateTokenUsageForRun, compareRunMetrics, getBaselineRunMetrics, getRunMetrics, getStoryMetricsForRun, incrementRunRestarts, listRunMetrics, tagRunAsBaseline } from "../operational-CidppHy-.js";
9
+ import { ESCALATION_DIAGNOSIS, EXPERIMENT_RESULT, OPERATIONAL_FINDING, STORY_METRICS, aggregateTokenUsageForRun, compareRunMetrics, getBaselineRunMetrics, getRunMetrics, getStoryMetricsForRun, incrementRunRestarts, listRunMetrics, tagRunAsBaseline } from "../operational-BRpT8MYF.js";
10
10
  import { abortMerge, createWorktree, getConflictingFiles, getMergedFiles, getOrphanedWorktrees, performMerge, removeBranch, removeWorktree, simulateMerge, verifyGitVersion } from "../git-utils-C-fdrHF_.js";
11
11
  import "../version-manager-impl-DTlmGvHb.js";
12
12
  import { registerUpgradeCommand } from "../upgrade-C8_VcI8B.js";
@@ -1004,6 +1004,26 @@ function registerConfigCommand(program, _version) {
1004
1004
  });
1005
1005
  }
1006
1006
 
1007
+ //#endregion
1008
+ //#region src/modules/work-graph/errors.ts
1009
+ /**
1010
+ * Work-graph error types.
1011
+ *
1012
+ * Story 31-7: Cycle Detection in Work Graph
1013
+ */
1014
+ /**
1015
+ * Thrown by `EpicIngester.ingest()` when the provided dependency list
1016
+ * contains a cycle. The `cycle` field contains the path of story keys
1017
+ * that form the cycle (first and last element are the same).
1018
+ */
1019
+ var CyclicDependencyError = class extends Error {
1020
+ constructor(cycle) {
1021
+ super(`Cyclic dependency detected: ${cycle.join(" → ")}`);
1022
+ this.cycle = cycle;
1023
+ this.name = "CyclicDependencyError";
1024
+ }
1025
+ };
1026
+
1007
1027
  //#endregion
1008
1028
  //#region src/cli/commands/resume.ts
1009
1029
  const logger$16 = createLogger("resume-cmd");
@@ -1017,7 +1037,6 @@ function mapInternalPhaseToEventPhase(internalPhase) {
1017
1037
  case "IN_REVIEW": return "code-review";
1018
1038
  case "IN_MINOR_FIX":
1019
1039
  case "IN_MAJOR_FIX": return "fix";
1020
- case "IN_TEST_PLANNING": return "test-planning";
1021
1040
  default: return null;
1022
1041
  }
1023
1042
  }
@@ -1490,6 +1509,44 @@ async function runStatusAction(options) {
1490
1509
  });
1491
1510
  try {
1492
1511
  await initSchema(adapter);
1512
+ let workGraph;
1513
+ try {
1514
+ const wgRepo = new WorkGraphRepository(adapter);
1515
+ const allStories = await adapter.query(`SELECT story_key, title, status FROM wg_stories`);
1516
+ if (allStories.length > 0) {
1517
+ const readyStoriesRaw = await wgRepo.getReadyStories();
1518
+ const blockedStoriesRaw = await wgRepo.getBlockedStories();
1519
+ const readyKeys = new Set(readyStoriesRaw.map((s) => s.story_key));
1520
+ const blockedKeys = new Set(blockedStoriesRaw.map((b) => b.story.story_key));
1521
+ const inProgressCount = allStories.filter((s) => s.status === "in_progress").length;
1522
+ const completeCount = allStories.filter((s) => s.status === "complete").length;
1523
+ const escalatedCount = allStories.filter((s) => s.status === "escalated").length;
1524
+ workGraph = {
1525
+ summary: {
1526
+ ready: readyKeys.size,
1527
+ blocked: blockedKeys.size,
1528
+ inProgress: inProgressCount,
1529
+ complete: completeCount,
1530
+ escalated: escalatedCount
1531
+ },
1532
+ readyStories: readyStoriesRaw.map((s) => ({
1533
+ key: s.story_key,
1534
+ title: s.title ?? s.story_key
1535
+ })),
1536
+ blockedStories: blockedStoriesRaw.map((b) => ({
1537
+ key: b.story.story_key,
1538
+ title: b.story.title ?? b.story.story_key,
1539
+ blockers: b.blockers.map((bl) => ({
1540
+ key: bl.key,
1541
+ title: bl.title,
1542
+ status: bl.status
1543
+ }))
1544
+ }))
1545
+ };
1546
+ }
1547
+ } catch (err) {
1548
+ logger$15.debug({ err }, "Work graph query failed, continuing without work graph data");
1549
+ }
1493
1550
  let run;
1494
1551
  if (runId !== void 0 && runId !== "") run = await getPipelineRunById(adapter, runId);
1495
1552
  else run = await getLatestRun(adapter);
@@ -1557,7 +1614,8 @@ async function runStatusAction(options) {
1557
1614
  stories_per_hour: storiesPerHour,
1558
1615
  cost_usd: totalCostUsd
1559
1616
  },
1560
- story_states: storeStories
1617
+ story_states: storeStories,
1618
+ workGraph: workGraph ?? null
1561
1619
  };
1562
1620
  process.stdout.write(formatOutput(enhancedOutput, "json", true) + "\n");
1563
1621
  } else {
@@ -1603,6 +1661,22 @@ async function runStatusAction(options) {
1603
1661
  process.stdout.write("\nStateStore Story States:\n");
1604
1662
  for (const s of storeStories) process.stdout.write(` ${s.storyKey}: ${s.phase} (${s.reviewCycles} review cycles)\n`);
1605
1663
  }
1664
+ if (workGraph !== void 0) {
1665
+ const { summary, readyStories, blockedStories } = workGraph;
1666
+ process.stdout.write("\nWork Graph:\n");
1667
+ process.stdout.write(` ${summary.inProgress} in progress, ${summary.ready} ready, ${summary.blocked} blocked, ${summary.complete} complete, ${summary.escalated} escalated\n`);
1668
+ if (readyStories.length > 0) {
1669
+ process.stdout.write("\n Ready to dispatch:\n");
1670
+ for (const s of readyStories) process.stdout.write(` ${s.key}: ${s.title}\n`);
1671
+ }
1672
+ if (blockedStories.length > 0) {
1673
+ process.stdout.write("\n Blocked:\n");
1674
+ for (const b of blockedStories) {
1675
+ process.stdout.write(` ${b.key}: ${b.title}\n`);
1676
+ for (const bl of b.blockers) process.stdout.write(` waiting on ${bl.key} (${bl.status}): ${bl.title}\n`);
1677
+ }
1678
+ }
1679
+ }
1606
1680
  process.stdout.write("\n");
1607
1681
  process.stdout.write(formatTokenTelemetry(tokenSummary) + "\n");
1608
1682
  }
@@ -2835,7 +2909,7 @@ async function runSupervisorAction(options, deps = {}) {
2835
2909
  try {
2836
2910
  const { createExperimenter } = await import(
2837
2911
  /* @vite-ignore */
2838
- "../experimenter-CoR0k66d.js"
2912
+ "../experimenter-CjfzjmwY.js"
2839
2913
  );
2840
2914
  const { getLatestRun: getLatest } = await import(
2841
2915
  /* @vite-ignore */
@@ -2849,7 +2923,7 @@ async function runSupervisorAction(options, deps = {}) {
2849
2923
  await initSchema(expAdapter);
2850
2924
  const { runRunAction: runPipeline } = await import(
2851
2925
  /* @vite-ignore */
2852
- "../run-B-TUWMCv.js"
2926
+ "../run-DE9y1W6N.js"
2853
2927
  );
2854
2928
  const runStoryFn = async (opts) => {
2855
2929
  const exitCode = await runPipeline({
@@ -7806,23 +7880,19 @@ function registerRoutingCommand(program) {
7806
7880
  /**
7807
7881
  * Work-graph schema DDL constants.
7808
7882
  *
7809
- * Story 31-1 placeholder defines the `stories`, `story_dependencies`, and
7810
- * `ready_stories` DDL used by the EpicIngester and downstream consumers.
7811
- *
7812
- * NOTE: This file is a minimal placeholder created by story 31-2 because story
7813
- * 31-1 (schema creation) had not yet run. If story 31-1 produces a richer
7814
- * schema, merge carefully and remove this note.
7883
+ * Aligned with the authoritative schema in src/modules/state/schema.sql.
7884
+ * Table names use `wg_stories` and `story_dependencies`.
7815
7885
  */
7816
7886
  const CREATE_STORIES_TABLE = `
7817
- CREATE TABLE IF NOT EXISTS stories (
7818
- story_key VARCHAR(50) NOT NULL,
7819
- epic_num INT NOT NULL,
7820
- story_num INT NOT NULL,
7821
- title VARCHAR(500) NOT NULL,
7822
- priority VARCHAR(10) NOT NULL,
7823
- size VARCHAR(50) NOT NULL,
7824
- sprint INT NOT NULL,
7825
- status VARCHAR(50) NOT NULL DEFAULT 'planned',
7887
+ CREATE TABLE IF NOT EXISTS wg_stories (
7888
+ story_key VARCHAR(20) NOT NULL,
7889
+ epic VARCHAR(20) NOT NULL,
7890
+ title VARCHAR(255),
7891
+ status VARCHAR(30) NOT NULL DEFAULT 'planned',
7892
+ spec_path VARCHAR(500),
7893
+ created_at DATETIME,
7894
+ updated_at DATETIME,
7895
+ completed_at DATETIME,
7826
7896
  PRIMARY KEY (story_key)
7827
7897
  )
7828
7898
  `.trim();
@@ -7832,31 +7902,33 @@ CREATE TABLE IF NOT EXISTS story_dependencies (
7832
7902
  depends_on VARCHAR(50) NOT NULL,
7833
7903
  dependency_type VARCHAR(50) NOT NULL DEFAULT 'blocks',
7834
7904
  source VARCHAR(50) NOT NULL DEFAULT 'explicit',
7905
+ created_at DATETIME,
7835
7906
  PRIMARY KEY (story_key, depends_on)
7836
7907
  )
7837
7908
  `.trim();
7838
7909
  const CREATE_READY_STORIES_VIEW = `
7839
7910
  CREATE VIEW IF NOT EXISTS ready_stories AS
7840
7911
  SELECT s.*
7841
- FROM stories s
7842
- WHERE s.status = 'planned'
7912
+ FROM wg_stories s
7913
+ WHERE s.status IN ('planned', 'ready')
7843
7914
  AND NOT EXISTS (
7844
7915
  SELECT 1 FROM story_dependencies sd
7845
- JOIN stories blocking ON sd.depends_on = blocking.story_key
7916
+ JOIN wg_stories blocking ON sd.depends_on = blocking.story_key
7846
7917
  WHERE sd.story_key = s.story_key
7847
- AND blocking.status != 'done'
7918
+ AND sd.dependency_type = 'blocks'
7919
+ AND blocking.status <> 'complete'
7848
7920
  )
7849
7921
  `.trim();
7850
7922
 
7851
7923
  //#endregion
7852
7924
  //#region src/modules/work-graph/epic-parser.ts
7853
- /** Regex for sprint header lines: `**Sprint 1 —` (em dash or hyphen) */
7854
- const SPRINT_HEADER_RE = /^\*\*Sprint\s+(\d+)\s*[—–-]/i;
7925
+ /** Regex for sprint header lines: `**Sprint 1 —` or `Sprint 1 —` (with or without bold markers) */
7926
+ const SPRINT_HEADER_RE = /^(?:\*\*)?Sprint\s+(\d+)\s*[—–-]/i;
7855
7927
  /**
7856
7928
  * Regex for story lines: `- 31-2: Epic doc ingestion (P0, Medium)`
7857
7929
  * Captures: epicNum, storyNum, title, priority, size
7858
7930
  */
7859
- const STORY_LINE_RE = /^-\s+(\d+)-(\d+):\s+(.+?)\s+\((P\d+),\s+([\w-]+)\)\s*$/;
7931
+ const STORY_LINE_RE = /^(?:-\s+)?(\d+)-(\d+):\s+(.+?)\s+\((P\d+),\s+([\w-]+)\)\s*$/;
7860
7932
  /** Regex to find the story map section heading */
7861
7933
  const STORY_MAP_HEADING_RE = /^#{1,6}\s+.*Story\s+Map/im;
7862
7934
  /** Regex to find the dependency chain line */
@@ -7879,6 +7951,7 @@ var EpicParser = class {
7879
7951
  let currentSprint = 0;
7880
7952
  for (const rawLine of afterHeading.split("\n")) {
7881
7953
  const line = rawLine.trim();
7954
+ if (line.startsWith("```")) continue;
7882
7955
  const sprintMatch = SPRINT_HEADER_RE.exec(line);
7883
7956
  if (sprintMatch) {
7884
7957
  currentSprint = parseInt(sprintMatch[1], 10);
@@ -7964,27 +8037,26 @@ var EpicIngester = class {
7964
8037
  * @returns `IngestResult` with counts of affected rows.
7965
8038
  */
7966
8039
  async ingest(stories, dependencies) {
8040
+ const cycle = detectCycles(dependencies);
8041
+ if (cycle !== null) throw new CyclicDependencyError(cycle);
7967
8042
  return this.adapter.transaction(async (tx) => {
7968
8043
  let storiesUpserted = 0;
7969
8044
  for (const story of stories) {
7970
- const existing = await tx.query("SELECT status FROM stories WHERE story_key = ?", [story.story_key]);
7971
- if (existing.length > 0) await tx.query("UPDATE stories SET title = ?, priority = ?, size = ?, sprint = ? WHERE story_key = ?", [
8045
+ const existing = await tx.query("SELECT status FROM wg_stories WHERE story_key = ?", [story.story_key]);
8046
+ if (existing.length > 0) await tx.query("UPDATE wg_stories SET title = ?, updated_at = ? WHERE story_key = ?", [
7972
8047
  story.title,
7973
- story.priority,
7974
- story.size,
7975
- story.sprint,
8048
+ new Date().toISOString(),
7976
8049
  story.story_key
7977
8050
  ]);
7978
8051
  else {
7979
- await tx.query("INSERT INTO stories (story_key, epic_num, story_num, title, priority, size, sprint, status) VALUES (?, ?, ?, ?, ?, ?, ?, ?)", [
8052
+ const now = new Date().toISOString();
8053
+ await tx.query("INSERT INTO wg_stories (story_key, epic, title, status, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?)", [
7980
8054
  story.story_key,
7981
- story.epic_num,
7982
- story.story_num,
8055
+ String(story.epic_num),
7983
8056
  story.title,
7984
- story.priority,
7985
- story.size,
7986
- story.sprint,
7987
- "planned"
8057
+ "planned",
8058
+ now,
8059
+ now
7988
8060
  ]);
7989
8061
  storiesUpserted++;
7990
8062
  }
@@ -8056,6 +8128,107 @@ function registerIngestEpicCommand(program) {
8056
8128
  });
8057
8129
  }
8058
8130
 
8131
+ //#endregion
8132
+ //#region src/cli/commands/epic-status.ts
8133
+ function sortByStoryKey(stories) {
8134
+ return [...stories].sort((a, b) => {
8135
+ const numA = parseInt(a.story_key.split("-")[1] ?? "0", 10);
8136
+ const numB = parseInt(b.story_key.split("-")[1] ?? "0", 10);
8137
+ return numA - numB;
8138
+ });
8139
+ }
8140
+ const BADGE_WIDTH = 12;
8141
+ const STATUS_LABELS = {
8142
+ complete: "complete ",
8143
+ in_progress: "in_progress",
8144
+ ready: "ready ",
8145
+ planned: "planned ",
8146
+ escalated: "escalated ",
8147
+ blocked: "blocked "
8148
+ };
8149
+ function getBadge(status, isBlocked) {
8150
+ if (isBlocked) return `[${STATUS_LABELS["blocked"] ?? "blocked "}]`;
8151
+ const label = STATUS_LABELS[status] ?? status.padEnd(BADGE_WIDTH - 2);
8152
+ return `[${label}]`;
8153
+ }
8154
+ async function runEpicStatusAction(epicNum, opts) {
8155
+ const adapter = createDatabaseAdapter({
8156
+ backend: "auto",
8157
+ basePath: process.cwd()
8158
+ });
8159
+ try {
8160
+ await adapter.exec(CREATE_STORIES_TABLE);
8161
+ await adapter.exec(CREATE_STORY_DEPENDENCIES_TABLE);
8162
+ const repo = new WorkGraphRepository(adapter);
8163
+ const rawStories = await repo.listStories({ epic: epicNum });
8164
+ if (rawStories.length === 0) {
8165
+ process.stderr.write(`No stories found for epic ${epicNum} (work graph not populated — run \`substrate ingest-epic\` first)\n`);
8166
+ process.exitCode = 1;
8167
+ return;
8168
+ }
8169
+ const stories = sortByStoryKey(rawStories);
8170
+ const allBlocked = await repo.getBlockedStories();
8171
+ const epicBlockedMap = new Map(allBlocked.filter((b) => b.story.epic === epicNum).map((b) => [b.story.story_key, b]));
8172
+ const allReady = await repo.getReadyStories();
8173
+ const epicReadySet = new Set(allReady.filter((s) => s.epic === epicNum).map((s) => s.story_key));
8174
+ const summary = {
8175
+ total: stories.length,
8176
+ complete: stories.filter((s) => s.status === "complete").length,
8177
+ inProgress: stories.filter((s) => s.status === "in_progress").length,
8178
+ escalated: stories.filter((s) => s.status === "escalated").length,
8179
+ blocked: epicBlockedMap.size,
8180
+ ready: epicReadySet.size - epicBlockedMap.size,
8181
+ planned: stories.filter((s) => (s.status === "planned" || s.status === "ready") && !epicBlockedMap.has(s.story_key) && !epicReadySet.has(s.story_key)).length
8182
+ };
8183
+ if (opts.outputFormat === "json") {
8184
+ const output = {
8185
+ epic: epicNum,
8186
+ stories: stories.map((s) => {
8187
+ const blockedInfo = epicBlockedMap.get(s.story_key);
8188
+ const entry = {
8189
+ key: s.story_key,
8190
+ title: s.title ?? null,
8191
+ status: blockedInfo ? "blocked" : s.status
8192
+ };
8193
+ if (blockedInfo) entry.blockers = blockedInfo.blockers.map((b) => ({
8194
+ key: b.key,
8195
+ title: b.title,
8196
+ status: b.status
8197
+ }));
8198
+ return entry;
8199
+ }),
8200
+ summary
8201
+ };
8202
+ process.stdout.write(JSON.stringify(output, null, 2) + "\n");
8203
+ } else {
8204
+ process.stdout.write(`Epic ${epicNum} — ${stories.length} stories\n\n`);
8205
+ for (const story of stories) {
8206
+ const isBlocked = epicBlockedMap.has(story.story_key);
8207
+ const badge = getBadge(story.status, isBlocked);
8208
+ const keyPadded = story.story_key.padEnd(6);
8209
+ const displayTitle = story.title ?? story.story_key;
8210
+ let line = ` ${badge} ${keyPadded} ${displayTitle}`;
8211
+ if (isBlocked) {
8212
+ const blockedInfo = epicBlockedMap.get(story.story_key);
8213
+ const blockerList = blockedInfo.blockers.map((b) => `${b.key} (${b.status})`).join(", ");
8214
+ line += ` [waiting on: ${blockerList}]`;
8215
+ }
8216
+ process.stdout.write(line + "\n");
8217
+ }
8218
+ process.stdout.write("\n");
8219
+ process.stdout.write(`Epic ${epicNum}: ${summary.complete} complete · ${summary.inProgress} in_progress · ${summary.ready} ready · ${summary.blocked} blocked · ${summary.planned} planned · ${summary.escalated} escalated\n`);
8220
+ }
8221
+ } finally {
8222
+ await adapter.close();
8223
+ }
8224
+ }
8225
+ function registerEpicStatusCommand(program) {
8226
+ program.command("epic-status <epic>").description("Show a generated status view of an epic from the Dolt work graph").option("--output-format <format>", "Output format: human (default) or json", "human").action(async (epic, options) => {
8227
+ const fmt = options.outputFormat === "json" ? "json" : "human";
8228
+ await runEpicStatusAction(epic, { outputFormat: fmt });
8229
+ });
8230
+ }
8231
+
8059
8232
  //#endregion
8060
8233
  //#region src/cli/index.ts
8061
8234
  process.setMaxListeners(20);
@@ -8117,6 +8290,7 @@ async function createProgram() {
8117
8290
  registerBrainstormCommand(program, version);
8118
8291
  registerExportCommand(program, version);
8119
8292
  registerIngestEpicCommand(program);
8293
+ registerEpicStatusCommand(program);
8120
8294
  registerUpgradeCommand(program);
8121
8295
  return program;
8122
8296
  }
@@ -1,6 +1,6 @@
1
1
  import "./logger-D2fS2ccL.js";
2
2
  import { createDecision } from "./decisions-C6MF2Cax.js";
3
- import { EXPERIMENT_RESULT, getRunMetrics, getStoryMetricsForRun } from "./operational-CidppHy-.js";
3
+ import { EXPERIMENT_RESULT, getRunMetrics, getStoryMetricsForRun } from "./operational-BRpT8MYF.js";
4
4
  import { spawnGit } from "./git-utils-C-fdrHF_.js";
5
5
  import { spawn } from "node:child_process";
6
6
  import { join } from "node:path";
@@ -500,4 +500,4 @@ function createExperimenter(config, deps) {
500
500
 
501
501
  //#endregion
502
502
  export { createExperimenter };
503
- //# sourceMappingURL=experimenter-CoR0k66d.js.map
503
+ //# sourceMappingURL=experimenter-CjfzjmwY.js.map
@@ -186,7 +186,7 @@ async function aggregateTokenUsageForStory(adapter, runId, storyKey) {
186
186
  FROM token_usage
187
187
  WHERE pipeline_run_id = ?
188
188
  AND metadata IS NOT NULL
189
- AND json_extract(metadata, '$.storyKey') = ?`, [runId, storyKey]);
189
+ AND metadata LIKE ?`, [runId, `%"storyKey":"${storyKey}"%`]);
190
190
  return rows[0] ?? {
191
191
  input: 0,
192
192
  output: 0,
@@ -371,4 +371,4 @@ const ADVISORY_NOTES = "advisory-notes";
371
371
 
372
372
  //#endregion
373
373
  export { ADVISORY_NOTES, ESCALATION_DIAGNOSIS, EXPERIMENT_RESULT, OPERATIONAL_FINDING, STORY_METRICS, STORY_OUTCOME, TEST_EXPANSION_FINDING, TEST_PLAN, aggregateTokenUsageForRun, aggregateTokenUsageForStory, compareRunMetrics, getBaselineRunMetrics, getRunMetrics, getStoryMetricsForRun, incrementRunRestarts, listRunMetrics, tagRunAsBaseline, writeRunMetrics, writeStoryMetrics };
374
- //# sourceMappingURL=operational-CidppHy-.js.map
374
+ //# sourceMappingURL=operational-BRpT8MYF.js.map
@@ -3,7 +3,7 @@ import { CURRENT_CONFIG_FORMAT_VERSION, PartialSubstrateConfigSchema, SUPPORTED_
3
3
  import { ConfigError, ConfigIncompatibleFormatError, createEventBus, createTuiApp, isTuiCapable, printNonTtyWarning, sleep } from "./helpers-BihqWgVe.js";
4
4
  import { RoutingRecommender, RoutingResolver, RoutingTelemetry, RoutingTokenAccumulator, RoutingTuner, loadModelRoutingConfig } from "./routing-BUE9pIxW.js";
5
5
  import { addTokenUsage, createDecision, createPipelineRun, createRequirement, getArtifactByTypeForRun, getArtifactsByRun, getDecisionsByCategory, getDecisionsByPhase, getDecisionsByPhaseForRun, getLatestRun, getPipelineRunById, getRunningPipelineRuns, getTokenUsageSummary, registerArtifact, updatePipelineRun, updatePipelineRunConfig, upsertDecision } from "./decisions-C6MF2Cax.js";
6
- import { ADVISORY_NOTES, ESCALATION_DIAGNOSIS, OPERATIONAL_FINDING, STORY_METRICS, STORY_OUTCOME, TEST_EXPANSION_FINDING, TEST_PLAN, aggregateTokenUsageForRun, aggregateTokenUsageForStory, getStoryMetricsForRun, writeRunMetrics, writeStoryMetrics } from "./operational-CidppHy-.js";
6
+ import { ADVISORY_NOTES, ESCALATION_DIAGNOSIS, OPERATIONAL_FINDING, STORY_METRICS, STORY_OUTCOME, TEST_EXPANSION_FINDING, TEST_PLAN, aggregateTokenUsageForRun, aggregateTokenUsageForStory, getStoryMetricsForRun, writeRunMetrics, writeStoryMetrics } from "./operational-BRpT8MYF.js";
7
7
  import { createRequire } from "module";
8
8
  import { dirname, join, resolve } from "path";
9
9
  import { access, mkdir, readFile, readdir, stat, writeFile } from "fs/promises";
@@ -215,6 +215,24 @@ var DoltDatabaseAdapter = class {
215
215
  async close() {
216
216
  await this._client.close();
217
217
  }
218
+ /**
219
+ * Query story keys from the `ready_stories` SQL view.
220
+ *
221
+ * Returns story keys whose status is `planned` or `ready` and whose
222
+ * hard dependencies are all `complete` in the work graph.
223
+ *
224
+ * On any SQL error (e.g., view not yet created by story 31-1 schema,
225
+ * or empty stories table), returns `[]` so the caller falls through to
226
+ * the legacy discovery chain.
227
+ */
228
+ async queryReadyStories() {
229
+ try {
230
+ const rows = await this._client.query("SELECT `key` FROM ready_stories ORDER BY `key` ASC", void 0);
231
+ return rows.map((r) => r.key);
232
+ } catch {
233
+ return [];
234
+ }
235
+ }
218
236
  };
219
237
 
220
238
  //#endregion
@@ -242,6 +260,13 @@ var InMemoryDatabaseAdapter = class {
242
260
  async close() {
243
261
  this._tables.clear();
244
262
  }
263
+ /**
264
+ * Work graph not supported in InMemoryDatabaseAdapter.
265
+ * Returns `[]` to signal the caller to use the legacy discovery path.
266
+ */
267
+ async queryReadyStories() {
268
+ return [];
269
+ }
245
270
  _execute(sql, params) {
246
271
  const resolved = this._substituteParams(sql, params);
247
272
  const upper = resolved.trimStart().toUpperCase();
@@ -281,8 +306,8 @@ var InMemoryDatabaseAdapter = class {
281
306
  if (m) this._tables.delete(m[1]);
282
307
  return [];
283
308
  }
284
- _insert(sql) {
285
- const m = /INSERT\s+INTO\s+(\w+)\s*\(([^)]+)\)\s*VALUES\s*\((.+)\)\s*$/is.exec(sql);
309
+ _insert(sql, _ignoreConflicts = false) {
310
+ const m = /INSERT\s+(?:IGNORE\s+)?INTO\s+(\w+)\s*\(([^)]+)\)\s*VALUES\s*\((.+)\)\s*$/is.exec(sql);
286
311
  if (!m) return [];
287
312
  const tableName = m[1];
288
313
  const cols = m[2].split(",").map((c) => c.trim());
@@ -7198,6 +7223,259 @@ function createDispatcher(options) {
7198
7223
  return new DispatcherImpl(options.eventBus, options.adapterRegistry, config);
7199
7224
  }
7200
7225
 
7226
+ //#endregion
7227
+ //#region src/modules/work-graph/cycle-detector.ts
7228
+ /**
7229
+ * detectCycles — DFS-based cycle detection for story dependency graphs.
7230
+ *
7231
+ * Story 31-7: Cycle Detection in Work Graph
7232
+ *
7233
+ * Pure function; no database or I/O dependencies.
7234
+ */
7235
+ /**
7236
+ * Detect cycles in a directed dependency graph represented as an edge list.
7237
+ *
7238
+ * Each edge `{ story_key, depends_on }` means story_key depends on depends_on
7239
+ * (i.e. story_key → depends_on is the directed edge we traverse).
7240
+ *
7241
+ * Uses iterative DFS with an explicit stack to avoid call-stack overflows on
7242
+ * large graphs, but also supports a nested recursive helper for cycle path
7243
+ * reconstruction.
7244
+ *
7245
+ * @param edges - List of dependency edges to check.
7246
+ * @returns `null` if the graph is acyclic (safe to persist), or a `string[]`
7247
+ * containing the cycle path with the first and last element being the same
7248
+ * story key (e.g. `['A', 'B', 'A']`).
7249
+ */
7250
+ function detectCycles(edges) {
7251
+ const adj = new Map();
7252
+ for (const { story_key, depends_on } of edges) {
7253
+ if (!adj.has(story_key)) adj.set(story_key, []);
7254
+ adj.get(story_key).push(depends_on);
7255
+ }
7256
+ const visited = new Set();
7257
+ const visiting = new Set();
7258
+ const path$2 = [];
7259
+ function dfs(node) {
7260
+ if (visiting.has(node)) {
7261
+ const cycleStart = path$2.indexOf(node);
7262
+ return [...path$2.slice(cycleStart), node];
7263
+ }
7264
+ if (visited.has(node)) return null;
7265
+ visiting.add(node);
7266
+ path$2.push(node);
7267
+ for (const neighbor of adj.get(node) ?? []) {
7268
+ const cycle = dfs(neighbor);
7269
+ if (cycle !== null) return cycle;
7270
+ }
7271
+ path$2.pop();
7272
+ visiting.delete(node);
7273
+ visited.add(node);
7274
+ return null;
7275
+ }
7276
+ const allNodes = new Set([...edges.map((e) => e.story_key), ...edges.map((e) => e.depends_on)]);
7277
+ for (const node of allNodes) if (!visited.has(node)) {
7278
+ const cycle = dfs(node);
7279
+ if (cycle !== null) return cycle;
7280
+ }
7281
+ return null;
7282
+ }
7283
+
7284
+ //#endregion
7285
+ //#region src/modules/state/work-graph-repository.ts
7286
+ var WorkGraphRepository = class {
7287
+ constructor(db) {
7288
+ this.db = db;
7289
+ }
7290
+ /**
7291
+ * Insert or replace a work-graph story node.
7292
+ * Uses DELETE + INSERT so it works on InMemoryDatabaseAdapter (which does
7293
+ * not support ON DUPLICATE KEY UPDATE).
7294
+ */
7295
+ async upsertStory(story) {
7296
+ await this.db.query(`DELETE FROM wg_stories WHERE story_key = ?`, [story.story_key]);
7297
+ await this.db.query(`INSERT INTO wg_stories (story_key, epic, title, status, spec_path, created_at, updated_at, completed_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?)`, [
7298
+ story.story_key,
7299
+ story.epic,
7300
+ story.title ?? null,
7301
+ story.status,
7302
+ story.spec_path ?? null,
7303
+ story.created_at ?? null,
7304
+ story.updated_at ?? null,
7305
+ story.completed_at ?? null
7306
+ ]);
7307
+ }
7308
+ /**
7309
+ * Insert a dependency edge. Idempotent — if a row with the same
7310
+ * (story_key, depends_on) already exists it is silently skipped.
7311
+ */
7312
+ async addDependency(dep) {
7313
+ const existing = await this.db.query(`SELECT story_key FROM story_dependencies WHERE story_key = ? AND depends_on = ?`, [dep.story_key, dep.depends_on]);
7314
+ if (existing.length > 0) return;
7315
+ await this.db.query(`INSERT INTO story_dependencies (story_key, depends_on, dependency_type, source, created_at) VALUES (?, ?, ?, ?, ?)`, [
7316
+ dep.story_key,
7317
+ dep.depends_on,
7318
+ dep.dependency_type,
7319
+ dep.source,
7320
+ dep.created_at ?? null
7321
+ ]);
7322
+ }
7323
+ /**
7324
+ * Persist contract-based dependency edges to `story_dependencies` as
7325
+ * best-effort, idempotent writes.
7326
+ *
7327
+ * - edges where `reason` does NOT start with `'dual export:'` are persisted
7328
+ * as `dependency_type = 'blocks'` (hard prerequisites).
7329
+ * - edges where `reason` starts with `'dual export:'` are persisted as
7330
+ * `dependency_type = 'informs'` (serialization hints, not hard gates).
7331
+ *
7332
+ * Idempotency is delegated to `addDependency()`, which skips the INSERT if
7333
+ * a row with the same `(story_key, depends_on)` already exists.
7334
+ *
7335
+ * @param edges - Readonly list of contract dependency edges to persist.
7336
+ */
7337
+ async addContractDependencies(edges) {
7338
+ if (edges.length === 0) return;
7339
+ for (const edge of edges) {
7340
+ const dependency_type = edge.reason?.startsWith("dual export:") ? "informs" : "blocks";
7341
+ await this.addDependency({
7342
+ story_key: edge.to,
7343
+ depends_on: edge.from,
7344
+ dependency_type,
7345
+ source: "contract",
7346
+ created_at: new Date().toISOString()
7347
+ });
7348
+ }
7349
+ }
7350
+ /**
7351
+ * Return all work-graph stories, optionally filtered by epic and/or status.
7352
+ */
7353
+ async listStories(filter$1) {
7354
+ if (!filter$1 || !filter$1.epic && !filter$1.status) return this.db.query(`SELECT * FROM wg_stories`);
7355
+ const conditions = [];
7356
+ const params = [];
7357
+ if (filter$1.epic) {
7358
+ conditions.push(`epic = ?`);
7359
+ params.push(filter$1.epic);
7360
+ }
7361
+ if (filter$1.status) {
7362
+ conditions.push(`status = ?`);
7363
+ params.push(filter$1.status);
7364
+ }
7365
+ const where = conditions.join(" AND ");
7366
+ return this.db.query(`SELECT * FROM wg_stories WHERE ${where}`, params);
7367
+ }
7368
+ /**
7369
+ * Update the `status` (and optionally `completed_at`) of an existing
7370
+ * work-graph story.
7371
+ *
7372
+ * This is a read-modify-write operation: SELECT existing row → build
7373
+ * updated WgStory → upsertStory(). If no row exists for `storyKey` the
7374
+ * call is a no-op (AC4).
7375
+ *
7376
+ * @param storyKey - Story identifier, e.g. "31-4"
7377
+ * @param status - Target WgStoryStatus value
7378
+ * @param opts - Optional `completedAt` ISO string for terminal phases
7379
+ */
7380
+ async updateStoryStatus(storyKey, status, opts) {
7381
+ const rows = await this.db.query(`SELECT * FROM wg_stories WHERE story_key = ?`, [storyKey]);
7382
+ if (rows.length === 0) return;
7383
+ const existing = rows[0];
7384
+ const now = new Date().toISOString();
7385
+ const isTerminal = status === "complete" || status === "escalated";
7386
+ const updated = {
7387
+ ...existing,
7388
+ status,
7389
+ updated_at: now,
7390
+ completed_at: isTerminal ? opts?.completedAt ?? now : existing.completed_at
7391
+ };
7392
+ await this.upsertStory(updated);
7393
+ }
7394
+ /**
7395
+ * Return stories that are eligible for dispatch.
7396
+ *
7397
+ * A story is ready when:
7398
+ * 1. Its status is 'planned' or 'ready', AND
7399
+ * 2. It has no 'blocks' dependency whose blocking story is not 'complete'.
7400
+ *
7401
+ * Soft ('informs') dependencies never block dispatch.
7402
+ *
7403
+ * This is implemented programmatically rather than via the `ready_stories`
7404
+ * VIEW so that the InMemoryDatabaseAdapter can handle it without VIEW support.
7405
+ */
7406
+ async getReadyStories() {
7407
+ const allStories = await this.db.query(`SELECT * FROM wg_stories`);
7408
+ const candidates = allStories.filter((s) => s.status === "planned" || s.status === "ready");
7409
+ if (candidates.length === 0) return [];
7410
+ const deps = await this.db.query(`SELECT story_key, depends_on FROM story_dependencies WHERE dependency_type = 'blocks'`);
7411
+ if (deps.length === 0) return candidates;
7412
+ const blockerStatus = new Map(allStories.map((s) => [s.story_key, s.status]));
7413
+ const depsMap = new Map();
7414
+ for (const d of deps) {
7415
+ if (!depsMap.has(d.story_key)) depsMap.set(d.story_key, []);
7416
+ depsMap.get(d.story_key).push(d.depends_on);
7417
+ }
7418
+ return candidates.filter((s) => {
7419
+ const blocking = depsMap.get(s.story_key) ?? [];
7420
+ return blocking.every((dep) => blockerStatus.get(dep) === "complete");
7421
+ });
7422
+ }
7423
+ /**
7424
+ * Return stories that are planned/ready but cannot be dispatched because
7425
+ * at least one hard-blocking ('blocks') dependency is not yet complete.
7426
+ *
7427
+ * For each blocked story, the returned object includes the full WgStory
7428
+ * record plus the list of unsatisfied blockers (key, title, status).
7429
+ *
7430
+ * Soft ('informs') dependencies are ignored here, matching getReadyStories().
7431
+ */
7432
+ /**
7433
+ * Query the database for all 'blocks' dependency rows and run DFS cycle
7434
+ * detection over them.
7435
+ *
7436
+ * Returns an empty array if no cycle is found (consistent with other
7437
+ * repository methods that return empty arrays rather than null).
7438
+ *
7439
+ * Only 'blocks' deps are checked — soft 'informs' deps cannot cause
7440
+ * dispatch deadlocks (AC5).
7441
+ */
7442
+ async detectCycles() {
7443
+ const rows = await this.db.query(`SELECT story_key, depends_on FROM story_dependencies WHERE dependency_type = 'blocks'`);
7444
+ const cycle = detectCycles(rows);
7445
+ return cycle ?? [];
7446
+ }
7447
+ async getBlockedStories() {
7448
+ const allStories = await this.db.query(`SELECT * FROM wg_stories`);
7449
+ const candidates = allStories.filter((s) => s.status === "planned" || s.status === "ready");
7450
+ if (candidates.length === 0) return [];
7451
+ const deps = await this.db.query(`SELECT story_key, depends_on FROM story_dependencies WHERE dependency_type = 'blocks'`);
7452
+ if (deps.length === 0) return [];
7453
+ const statusMap = new Map(allStories.map((s) => [s.story_key, s]));
7454
+ const depsMap = new Map();
7455
+ for (const d of deps) {
7456
+ if (!depsMap.has(d.story_key)) depsMap.set(d.story_key, []);
7457
+ depsMap.get(d.story_key).push(d.depends_on);
7458
+ }
7459
+ const result = [];
7460
+ for (const story of candidates) {
7461
+ const blockerKeys = depsMap.get(story.story_key) ?? [];
7462
+ const unsatisfied = blockerKeys.filter((key) => statusMap.get(key)?.status !== "complete").map((key) => {
7463
+ const s = statusMap.get(key);
7464
+ return {
7465
+ key,
7466
+ title: s?.title ?? key,
7467
+ status: s?.status ?? "unknown"
7468
+ };
7469
+ });
7470
+ if (unsatisfied.length > 0) result.push({
7471
+ story,
7472
+ blockers: unsatisfied
7473
+ });
7474
+ }
7475
+ return result;
7476
+ }
7477
+ };
7478
+
7201
7479
  //#endregion
7202
7480
  //#region src/modules/state/file-store.ts
7203
7481
  /**
@@ -9186,6 +9464,42 @@ function countFilesInLayout(content) {
9186
9464
  return count;
9187
9465
  }
9188
9466
 
9467
+ //#endregion
9468
+ //#region src/modules/work-graph/spec-migrator.ts
9469
+ /**
9470
+ * spec-migrator — utilities for migrating story spec files away from the
9471
+ * deprecated `Status:` frontmatter field.
9472
+ *
9473
+ * Story 31-8: Deprecate Status Field in Story Spec Frontmatter
9474
+ *
9475
+ * Story status is now exclusively managed in the Dolt work graph
9476
+ * (`wg_stories.status`). These pure functions strip the deprecated field from
9477
+ * spec content before it is injected into agent prompts.
9478
+ */
9479
+ /**
9480
+ * Remove the deprecated `Status:` line from story spec content.
9481
+ * Also removes the blank line immediately following the Status line.
9482
+ * Returns the original content unchanged if no Status line is present.
9483
+ *
9484
+ * The regex is anchored at the start of a line (`^` with multiline flag) so
9485
+ * it does NOT strip lines like `## Status Notes` or `The status is good`.
9486
+ */
9487
+ function stripDeprecatedStatusField(content) {
9488
+ return content.replace(/^Status:[^\n]*\n?(\n)?/m, "");
9489
+ }
9490
+ /**
9491
+ * Detect whether a story spec contains the deprecated Status field.
9492
+ * Returns the status value string (e.g. `'ready-for-dev'`) if found, or
9493
+ * `null` if absent.
9494
+ *
9495
+ * The regex is anchored at line start so incidental uses of the word "Status"
9496
+ * (e.g. in section headings) are not matched.
9497
+ */
9498
+ function detectDeprecatedStatusField(content) {
9499
+ const match$1 = /^Status:\s*(.+)$/m.exec(content);
9500
+ return match$1 !== null ? match$1[1].trim() : null;
9501
+ }
9502
+
9189
9503
  //#endregion
9190
9504
  //#region src/modules/compiled-workflows/dev-story.ts
9191
9505
  const logger$15 = createLogger("compiled-workflows:dev-story");
@@ -9294,6 +9608,14 @@ async function runDevStory(deps, params) {
9294
9608
  }, "Story file is empty");
9295
9609
  return makeFailureResult("story_file_empty");
9296
9610
  }
9611
+ const staleStatus = detectDeprecatedStatusField(storyContent);
9612
+ if (staleStatus !== null) {
9613
+ logger$15.warn({
9614
+ storyFilePath,
9615
+ staleStatus
9616
+ }, "Story spec contains deprecated Status field — stripped before dispatch (status is managed by Dolt work graph)");
9617
+ storyContent = stripDeprecatedStatusField(storyContent);
9618
+ }
9297
9619
  const complexity = computeStoryComplexity(storyContent);
9298
9620
  const resolvedMaxTurns = resolveDevStoryMaxTurns(complexity.complexityScore);
9299
9621
  logComplexityResult(storyKey, complexity, resolvedMaxTurns);
@@ -14451,6 +14773,22 @@ function buildTargetedFilesContent(issueList) {
14451
14773
  return lines.join("\n");
14452
14774
  }
14453
14775
  /**
14776
+ * Map a StoryPhase to the corresponding WgStoryStatus for wg_stories writes.
14777
+ * Returns null for PENDING (no write needed).
14778
+ */
14779
+ function wgStatusForPhase(phase) {
14780
+ switch (phase) {
14781
+ case "PENDING": return null;
14782
+ case "IN_STORY_CREATION":
14783
+ case "IN_TEST_PLANNING":
14784
+ case "IN_DEV":
14785
+ case "IN_REVIEW":
14786
+ case "NEEDS_FIXES": return "in_progress";
14787
+ case "COMPLETE": return "complete";
14788
+ case "ESCALATED": return "escalated";
14789
+ }
14790
+ }
14791
+ /**
14454
14792
  * Factory function that creates an ImplementationOrchestrator instance.
14455
14793
  *
14456
14794
  * @param deps - Injected dependencies (db, pack, contextCompiler, dispatcher,
@@ -14460,6 +14798,8 @@ function buildTargetedFilesContent(issueList) {
14460
14798
  function createImplementationOrchestrator(deps) {
14461
14799
  const { db, pack, contextCompiler, dispatcher, eventBus, config, projectRoot, tokenCeilings, stateStore, telemetryPersistence, ingestionServer, repoMapInjector, maxRepoMapTokens } = deps;
14462
14800
  const logger$26 = createLogger("implementation-orchestrator");
14801
+ const wgRepo = new WorkGraphRepository(db);
14802
+ const _wgInProgressWritten = new Set();
14463
14803
  let _state = "IDLE";
14464
14804
  let _startedAt;
14465
14805
  let _completedAt;
@@ -14730,6 +15070,21 @@ function createImplementationOrchestrator(deps) {
14730
15070
  err,
14731
15071
  storyKey
14732
15072
  }, "rollbackStory failed — branch may persist"));
15073
+ if (updates.phase !== void 0) {
15074
+ const targetStatus = wgStatusForPhase(updates.phase);
15075
+ if (targetStatus !== null) if (targetStatus === "in_progress" && _wgInProgressWritten.has(storyKey)) {} else {
15076
+ const fullUpdated = {
15077
+ ...existing,
15078
+ ...updates
15079
+ };
15080
+ const opts = targetStatus === "complete" || targetStatus === "escalated" ? { completedAt: fullUpdated.completedAt } : void 0;
15081
+ wgRepo.updateStoryStatus(storyKey, targetStatus, opts).catch((err) => logger$26.warn({
15082
+ err,
15083
+ storyKey
15084
+ }, "wg_stories status update failed (best-effort)"));
15085
+ if (targetStatus === "in_progress") _wgInProgressWritten.add(storyKey);
15086
+ }
15087
+ }
14733
15088
  }
14734
15089
  }
14735
15090
  /**
@@ -16210,6 +16565,7 @@ function createImplementationOrchestrator(deps) {
16210
16565
  contractEdges,
16211
16566
  edgeCount: contractEdges.length
16212
16567
  }, "Contract dependency edges detected — applying contract-aware dispatch ordering");
16568
+ wgRepo.addContractDependencies(contractEdges).catch((err) => logger$26.warn({ err }, "contract dep persistence failed (best-effort)"));
16213
16569
  logger$26.info({
16214
16570
  storyCount: storyKeys.length,
16215
16571
  groupCount: batches.reduce((sum, b) => sum + b.length, 0),
@@ -16370,9 +16726,10 @@ function createImplementationOrchestrator(deps) {
16370
16726
  //#endregion
16371
16727
  //#region src/modules/implementation-orchestrator/story-discovery.ts
16372
16728
  /**
16373
- * Unified story key resolution with a 4-level fallback chain.
16729
+ * Unified story key resolution with a 5-level fallback chain.
16374
16730
  *
16375
16731
  * 1. Explicit keys (from --stories flag) — returned as-is
16732
+ * 1.5. ready_stories SQL view — when work graph is populated (story 31-3)
16376
16733
  * 2. Decisions table (category='stories', phase='solutioning')
16377
16734
  * 3. Epic shard decisions (category='epic-shard') — parsed with parseStoryKeysFromEpics
16378
16735
  * 4. epics.md file on disk (via discoverPendingStoryKeys)
@@ -16384,6 +16741,19 @@ function createImplementationOrchestrator(deps) {
16384
16741
  async function resolveStoryKeys(db, projectRoot, opts) {
16385
16742
  if (opts?.explicit !== void 0 && opts.explicit.length > 0) return opts.explicit;
16386
16743
  let keys = [];
16744
+ const readyKeys = await db.queryReadyStories();
16745
+ if (readyKeys.length > 0) {
16746
+ let filteredKeys = readyKeys;
16747
+ if (opts?.epicNumber !== void 0) {
16748
+ const prefix = `${opts.epicNumber}-`;
16749
+ filteredKeys = filteredKeys.filter((k) => k.startsWith(prefix));
16750
+ }
16751
+ if (opts?.filterCompleted === true && filteredKeys.length > 0) {
16752
+ const completedKeys = await getCompletedStoryKeys(db);
16753
+ filteredKeys = filteredKeys.filter((k) => !completedKeys.has(k));
16754
+ }
16755
+ return sortStoryKeys([...new Set(filteredKeys)]);
16756
+ }
16387
16757
  try {
16388
16758
  const sql = opts?.pipelineRunId !== void 0 ? `SELECT key FROM decisions WHERE phase = 'solutioning' AND category = 'stories' AND pipeline_run_id = ? ORDER BY created_at ASC` : `SELECT key FROM decisions WHERE phase = 'solutioning' AND category = 'stories' ORDER BY created_at ASC`;
16389
16759
  const params = opts?.pipelineRunId !== void 0 ? [opts.pipelineRunId] : [];
@@ -21750,5 +22120,5 @@ function registerRunCommand(program, _version = "0.0.0", projectRoot = process.c
21750
22120
  }
21751
22121
 
21752
22122
  //#endregion
21753
- export { AdapterTelemetryPersistence, AppError, DEFAULT_CONFIG, DEFAULT_ROUTING_POLICY, DoltClient, DoltNotInstalled, DoltRepoMapMetaRepository, DoltSymbolRepository, ERR_REPO_MAP_STORAGE_WRITE, FileStateStore, GitClient, GrammarLoader, IngestionServer, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SUBSTRATE_OWNED_SETTINGS_KEYS, SymbolParser, VALID_PHASES, buildPipelineStatusOutput, checkDoltInstalled, createConfigSystem, createContextCompiler, createDatabaseAdapter, createDispatcher, createDoltClient, createEventEmitter, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStateStore, createStopAfterGate, findPackageRoot, formatOutput, formatPhaseCompletionSummary, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, initSchema, initializeDolt, isSyncAdapter, parseDbTimestampAsUtc, registerHealthCommand, registerRunCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, resolveStoryKeys, runAnalysisPhase, runPlanningPhase, runRunAction, runSolutioningPhase, validateStopAfterFromConflict };
21754
- //# sourceMappingURL=run-BD0Ugp7F.js.map
22123
+ export { AdapterTelemetryPersistence, AppError, DEFAULT_CONFIG, DEFAULT_ROUTING_POLICY, DoltClient, DoltNotInstalled, DoltRepoMapMetaRepository, DoltSymbolRepository, ERR_REPO_MAP_STORAGE_WRITE, FileStateStore, GitClient, GrammarLoader, IngestionServer, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SUBSTRATE_OWNED_SETTINGS_KEYS, SymbolParser, VALID_PHASES, WorkGraphRepository, buildPipelineStatusOutput, checkDoltInstalled, createConfigSystem, createContextCompiler, createDatabaseAdapter, createDispatcher, createDoltClient, createEventEmitter, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStateStore, createStopAfterGate, detectCycles, findPackageRoot, formatOutput, formatPhaseCompletionSummary, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, initSchema, initializeDolt, isSyncAdapter, parseDbTimestampAsUtc, registerHealthCommand, registerRunCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, resolveStoryKeys, runAnalysisPhase, runPlanningPhase, runRunAction, runSolutioningPhase, validateStopAfterFromConflict };
22124
+ //# sourceMappingURL=run-D7a-qzk9.js.map
@@ -1,9 +1,9 @@
1
- import { registerRunCommand, runRunAction } from "./run-BD0Ugp7F.js";
1
+ import { registerRunCommand, runRunAction } from "./run-D7a-qzk9.js";
2
2
  import "./logger-D2fS2ccL.js";
3
3
  import "./config-migrator-DtZW1maj.js";
4
4
  import "./helpers-BihqWgVe.js";
5
5
  import "./routing-BUE9pIxW.js";
6
6
  import "./decisions-C6MF2Cax.js";
7
- import "./operational-CidppHy-.js";
7
+ import "./operational-BRpT8MYF.js";
8
8
 
9
9
  export { runRunAction };
package/dist/schema.sql CHANGED
@@ -258,11 +258,10 @@ CREATE INDEX IF NOT EXISTS idx_wg_stories_epic ON wg_stories (epic);
258
258
  -- story_dependencies (Epic 31-1) — directed dependency edges
259
259
  -- ---------------------------------------------------------------------------
260
260
  CREATE TABLE IF NOT EXISTS story_dependencies (
261
- story_key VARCHAR(20) NOT NULL,
262
- depends_on VARCHAR(20) NOT NULL,
263
- dep_type VARCHAR(20) NOT NULL,
264
- source VARCHAR(20) NOT NULL,
265
- created_at DATETIME,
261
+ story_key VARCHAR(50) NOT NULL,
262
+ depends_on VARCHAR(50) NOT NULL,
263
+ dependency_type VARCHAR(50) NOT NULL DEFAULT 'blocks',
264
+ source VARCHAR(50) NOT NULL DEFAULT 'explicit',
266
265
  PRIMARY KEY (story_key, depends_on)
267
266
  );
268
267
 
@@ -276,7 +275,7 @@ CREATE OR REPLACE VIEW ready_stories AS
276
275
  SELECT 1 FROM story_dependencies d
277
276
  JOIN wg_stories dep ON dep.story_key = d.depends_on
278
277
  WHERE d.story_key = s.story_key
279
- AND d.dep_type = 'blocks'
278
+ AND d.dependency_type = 'blocks'
280
279
  AND dep.status <> 'complete'
281
280
  );
282
281
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "substrate-ai",
3
- "version": "0.5.1",
3
+ "version": "0.5.3",
4
4
  "description": "Substrate — multi-agent orchestration daemon for AI coding agents",
5
5
  "type": "module",
6
6
  "license": "MIT",
@@ -32,7 +32,7 @@ Using the context above, write a complete, implementation-ready story file for s
32
32
  - Dev Notes with file paths, import patterns, testing requirements
33
33
  5. **Apply the scope cap** — see Scope Cap Guidance below
34
34
  6. **Write the story file** to: `_bmad-output/implementation-artifacts/{{story_key}}-<kebab-title>.md`
35
- - Status must be: `ready-for-dev`
35
+ - Do NOT add a `Status:` field to the story file — story status is managed exclusively by the Dolt work graph (`wg_stories` table)
36
36
  - Dev Agent Record section must be present but left blank (to be filled by dev agent)
37
37
 
38
38
  ## Interface Contracts Guidance
@@ -1,7 +1,5 @@
1
1
  # Story {epic_num}.{story_num}: {Title}
2
2
 
3
- Status: draft
4
-
5
3
  ## Story
6
4
 
7
5
  As a {role},