substrate-ai 0.2.39 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -10,15 +10,15 @@ import { existsSync, mkdirSync, readFileSync, unlinkSync, writeFileSync } from "
10
10
  import yaml from "js-yaml";
11
11
  import { createRequire as createRequire$1 } from "node:module";
12
12
  import { z } from "zod";
13
- import { execSync, spawn } from "node:child_process";
13
+ import { execFile, execSync, spawn, spawnSync } from "node:child_process";
14
14
  import { dirname as dirname$1, join as join$1, resolve as resolve$1 } from "node:path";
15
- import BetterSqlite3 from "better-sqlite3";
15
+ import Database from "better-sqlite3";
16
+ import { access as access$1, mkdir as mkdir$1, readFile as readFile$1, stat as stat$1, writeFile as writeFile$1 } from "node:fs/promises";
16
17
  import { fileURLToPath } from "node:url";
17
18
  import { existsSync as existsSync$1, readFileSync as readFileSync$1, readdirSync as readdirSync$1 } from "node:fs";
18
19
  import { homedir } from "os";
19
20
  import { freemem, platform } from "node:os";
20
21
  import { createHash, randomUUID } from "node:crypto";
21
- import { readFile as readFile$1, stat as stat$1 } from "node:fs/promises";
22
22
 
23
23
  //#region rolldown:runtime
24
24
  var __require = /* @__PURE__ */ createRequire(import.meta.url);
@@ -605,7 +605,7 @@ const migration010RunMetrics = {
605
605
 
606
606
  //#endregion
607
607
  //#region src/persistence/migrations/index.ts
608
- const logger$22 = createLogger("persistence:migrations");
608
+ const logger$23 = createLogger("persistence:migrations");
609
609
  const MIGRATIONS = [
610
610
  initialSchemaMigration,
611
611
  costTrackerSchemaMigration,
@@ -623,7 +623,7 @@ const MIGRATIONS = [
623
623
  * Safe to call multiple times — already-applied migrations are skipped.
624
624
  */
625
625
  function runMigrations(db) {
626
- logger$22.info("Starting migration runner");
626
+ logger$23.info("Starting migration runner");
627
627
  db.exec(`
628
628
  CREATE TABLE IF NOT EXISTS schema_migrations (
629
629
  version INTEGER PRIMARY KEY,
@@ -634,12 +634,12 @@ function runMigrations(db) {
634
634
  const appliedVersions = new Set(db.prepare("SELECT version FROM schema_migrations").all().map((row) => row.version));
635
635
  const pending = MIGRATIONS.filter((m) => !appliedVersions.has(m.version)).sort((a, b) => a.version - b.version);
636
636
  if (pending.length === 0) {
637
- logger$22.info("No pending migrations");
637
+ logger$23.info("No pending migrations");
638
638
  return;
639
639
  }
640
640
  const insertMigration = db.prepare("INSERT INTO schema_migrations (version, name) VALUES (?, ?)");
641
641
  for (const migration of pending) {
642
- logger$22.info({
642
+ logger$23.info({
643
643
  version: migration.version,
644
644
  name: migration.name
645
645
  }, "Applying migration");
@@ -653,14 +653,14 @@ function runMigrations(db) {
653
653
  });
654
654
  applyMigration();
655
655
  }
656
- logger$22.info({ version: migration.version }, "Migration applied successfully");
656
+ logger$23.info({ version: migration.version }, "Migration applied successfully");
657
657
  }
658
- logger$22.info({ count: pending.length }, "All pending migrations applied");
658
+ logger$23.info({ count: pending.length }, "All pending migrations applied");
659
659
  }
660
660
 
661
661
  //#endregion
662
662
  //#region src/persistence/database.ts
663
- const logger$21 = createLogger("persistence:database");
663
+ const logger$22 = createLogger("persistence:database");
664
664
  /**
665
665
  * Thin wrapper that opens a SQLite database, applies required PRAGMAs,
666
666
  * and exposes the raw BetterSqlite3 instance.
@@ -677,14 +677,14 @@ var DatabaseWrapper = class {
677
677
  */
678
678
  open() {
679
679
  if (this._db !== null) return;
680
- logger$21.info({ path: this._path }, "Opening SQLite database");
681
- this._db = new BetterSqlite3(this._path);
680
+ logger$22.info({ path: this._path }, "Opening SQLite database");
681
+ this._db = new Database(this._path);
682
682
  const walResult = this._db.pragma("journal_mode = WAL");
683
- if (walResult?.[0]?.journal_mode !== "wal") logger$21.warn({ result: walResult?.[0]?.journal_mode }, "WAL pragma did not return expected \"wal\" — journal_mode may be \"memory\" or unsupported");
683
+ if (walResult?.[0]?.journal_mode !== "wal") logger$22.warn({ result: walResult?.[0]?.journal_mode }, "WAL pragma did not return expected \"wal\" — journal_mode may be \"memory\" or unsupported");
684
684
  this._db.pragma("busy_timeout = 5000");
685
685
  this._db.pragma("synchronous = NORMAL");
686
686
  this._db.pragma("foreign_keys = ON");
687
- logger$21.info({ path: this._path }, "SQLite database opened with WAL mode");
687
+ logger$22.info({ path: this._path }, "SQLite database opened with WAL mode");
688
688
  }
689
689
  /**
690
690
  * Close the database. Idempotent — calling close() when already closed is a no-op.
@@ -693,7 +693,7 @@ var DatabaseWrapper = class {
693
693
  if (this._db === null) return;
694
694
  this._db.close();
695
695
  this._db = null;
696
- logger$21.info({ path: this._path }, "SQLite database closed");
696
+ logger$22.info({ path: this._path }, "SQLite database closed");
697
697
  }
698
698
  /**
699
699
  * Return the raw BetterSqlite3 instance.
@@ -938,6 +938,161 @@ function createPackLoader() {
938
938
  return new PackLoaderImpl();
939
939
  }
940
940
 
941
+ //#endregion
942
+ //#region src/modules/state/dolt-init.ts
943
+ /**
944
+ * Thrown when the `dolt` binary cannot be found in PATH.
945
+ */
946
+ var DoltNotInstalled = class extends Error {
947
+ constructor() {
948
+ super("Dolt CLI not found in PATH. Install Dolt from https://docs.dolthub.com/introduction/installation");
949
+ this.name = "DoltNotInstalled";
950
+ }
951
+ };
952
+ /**
953
+ * Thrown when a Dolt CLI command exits with a non-zero status code.
954
+ */
955
+ var DoltInitError = class extends Error {
956
+ constructor(args, exitCode, stderr) {
957
+ super(`Dolt command "dolt ${args.join(" ")}" failed with exit code ${exitCode}${stderr ? `: ${stderr}` : ""}`);
958
+ this.name = "DoltInitError";
959
+ }
960
+ };
961
+ /**
962
+ * Verify that the `dolt` binary is installed and accessible.
963
+ *
964
+ * @throws {DoltNotInstalled} If the binary is not found in PATH.
965
+ */
966
+ async function checkDoltInstalled() {
967
+ return new Promise((resolve$2, reject) => {
968
+ let child;
969
+ try {
970
+ child = spawn("dolt", ["version"], { stdio: "ignore" });
971
+ } catch (err) {
972
+ const nodeErr = err;
973
+ if (nodeErr.code === "ENOENT") reject(new DoltNotInstalled());
974
+ else reject(err);
975
+ return;
976
+ }
977
+ child.on("error", (err) => {
978
+ if (err.code === "ENOENT") reject(new DoltNotInstalled());
979
+ else reject(err);
980
+ });
981
+ child.on("close", (code) => {
982
+ if (code === 0) resolve$2();
983
+ else resolve$2();
984
+ });
985
+ });
986
+ }
987
+ /**
988
+ * Run a Dolt CLI command in the given working directory.
989
+ *
990
+ * @param args - Arguments to pass to `dolt` (e.g. `['init']`).
991
+ * @param cwd - Working directory for the command.
992
+ * @throws {DoltInitError} If the command exits with a non-zero code.
993
+ */
994
+ async function runDoltCommand(args, cwd) {
995
+ return new Promise((resolve$2, reject) => {
996
+ const stderrChunks = [];
997
+ const child = spawn("dolt", args, {
998
+ cwd,
999
+ stdio: [
1000
+ "ignore",
1001
+ "ignore",
1002
+ "pipe"
1003
+ ]
1004
+ });
1005
+ child.stderr?.on("data", (chunk) => {
1006
+ stderrChunks.push(chunk);
1007
+ });
1008
+ child.on("error", (err) => {
1009
+ reject(err);
1010
+ });
1011
+ child.on("close", (code) => {
1012
+ if (code === 0) resolve$2();
1013
+ else {
1014
+ const stderr = Buffer.concat(stderrChunks).toString("utf8").trim();
1015
+ reject(new DoltInitError(args, code ?? -1, stderr));
1016
+ }
1017
+ });
1018
+ });
1019
+ }
1020
+ /**
1021
+ * Initialize a Dolt repository for Substrate state storage.
1022
+ *
1023
+ * This function is idempotent: running it a second time on an already-
1024
+ * initialized repository is safe — `dolt init` is skipped, existing tables
1025
+ * are not re-created (IF NOT EXISTS guards), and the schema version row is
1026
+ * not duplicated (INSERT IGNORE).
1027
+ *
1028
+ * @param config - Initialization configuration.
1029
+ * @throws {DoltNotInstalled} If the `dolt` binary is not in PATH.
1030
+ * @throws {DoltInitError} If any Dolt CLI command fails.
1031
+ */
1032
+ async function initializeDolt(config) {
1033
+ const statePath = config.statePath ?? join$1(config.projectRoot, ".substrate", "state");
1034
+ const schemaPath = config.schemaPath ?? fileURLToPath(new URL("./schema.sql", import.meta.url));
1035
+ await checkDoltInstalled();
1036
+ await mkdir$1(statePath, { recursive: true });
1037
+ const doltDir = join$1(statePath, ".dolt");
1038
+ let doltDirExists = false;
1039
+ try {
1040
+ await access$1(doltDir);
1041
+ doltDirExists = true;
1042
+ } catch {
1043
+ doltDirExists = false;
1044
+ }
1045
+ if (!doltDirExists) await runDoltCommand(["init"], statePath);
1046
+ await runDoltCommand([
1047
+ "sql",
1048
+ "-f",
1049
+ schemaPath
1050
+ ], statePath);
1051
+ let hasCommits = false;
1052
+ try {
1053
+ await runDoltCommand(["log", "--oneline"], statePath);
1054
+ hasCommits = await doltLogHasCommits(statePath);
1055
+ } catch {
1056
+ hasCommits = false;
1057
+ }
1058
+ if (!hasCommits) {
1059
+ await runDoltCommand(["add", "-A"], statePath);
1060
+ await runDoltCommand([
1061
+ "commit",
1062
+ "-m",
1063
+ "Initialize substrate state schema v1"
1064
+ ], statePath);
1065
+ }
1066
+ }
1067
+ /**
1068
+ * Returns `true` if there is at least one commit in the Dolt repo.
1069
+ */
1070
+ async function doltLogHasCommits(cwd) {
1071
+ return new Promise((resolve$2) => {
1072
+ const stdoutChunks = [];
1073
+ const child = spawn("dolt", ["log", "--oneline"], {
1074
+ cwd,
1075
+ stdio: [
1076
+ "ignore",
1077
+ "pipe",
1078
+ "ignore"
1079
+ ]
1080
+ });
1081
+ child.stdout?.on("data", (chunk) => {
1082
+ stdoutChunks.push(chunk);
1083
+ });
1084
+ child.on("error", () => resolve$2(false));
1085
+ child.on("close", (code) => {
1086
+ if (code !== 0) {
1087
+ resolve$2(false);
1088
+ return;
1089
+ }
1090
+ const output = Buffer.concat(stdoutChunks).toString("utf8").trim();
1091
+ resolve$2(output.length > 0);
1092
+ });
1093
+ });
1094
+ }
1095
+
941
1096
  //#endregion
942
1097
  //#region src/modules/stop-after/types.ts
943
1098
  /**
@@ -1165,7 +1320,7 @@ const BMAD_BASELINE_TOKENS_FULL = 56800;
1165
1320
  /** BMAD baseline token total for create+dev+review comparison */
1166
1321
  const BMAD_BASELINE_TOKENS = 23800;
1167
1322
  /** Story key pattern: <epic>-<story> e.g. "10-1" */
1168
- const STORY_KEY_PATTERN = /^\d+-\d+$/;
1323
+ const STORY_KEY_PATTERN$1 = /^\d+-\d+$/;
1169
1324
  /**
1170
1325
  * Top-level keys in .claude/settings.json that substrate owns.
1171
1326
  * On init, these are set/updated unconditionally.
@@ -1225,7 +1380,7 @@ function formatTokenTelemetry(summary, baselineTokens = BMAD_BASELINE_TOKENS) {
1225
1380
  * Validate a story key has the expected format: <epic>-<story> (e.g., "10-1").
1226
1381
  */
1227
1382
  function validateStoryKey(key) {
1228
- return STORY_KEY_PATTERN.test(key);
1383
+ return STORY_KEY_PATTERN$1.test(key);
1229
1384
  }
1230
1385
  /**
1231
1386
  * Build the AC5 JSON status schema for a pipeline run.
@@ -1455,7 +1610,7 @@ function formatUnsupportedVersionError(formatType, version, supported) {
1455
1610
 
1456
1611
  //#endregion
1457
1612
  //#region src/modules/config/config-system-impl.ts
1458
- const logger$20 = createLogger("config");
1613
+ const logger$21 = createLogger("config");
1459
1614
  function deepMerge(base, override) {
1460
1615
  const result = { ...base };
1461
1616
  for (const [key, val] of Object.entries(override)) if (val !== null && val !== void 0 && typeof val === "object" && !Array.isArray(val) && typeof result[key] === "object" && result[key] !== null && !Array.isArray(result[key])) result[key] = deepMerge(result[key], val);
@@ -1500,7 +1655,7 @@ function readEnvOverrides() {
1500
1655
  }
1501
1656
  const parsed = PartialSubstrateConfigSchema.safeParse(overrides);
1502
1657
  if (!parsed.success) {
1503
- logger$20.warn({ errors: parsed.error.issues }, "Invalid environment variable overrides ignored");
1658
+ logger$21.warn({ errors: parsed.error.issues }, "Invalid environment variable overrides ignored");
1504
1659
  return {};
1505
1660
  }
1506
1661
  return parsed.data;
@@ -1564,7 +1719,7 @@ var ConfigSystemImpl = class {
1564
1719
  throw new ConfigError(`Configuration validation failed:\n${issues}`, { issues: result.error.issues });
1565
1720
  }
1566
1721
  this._config = result.data;
1567
- logger$20.debug("Configuration loaded successfully");
1722
+ logger$21.debug("Configuration loaded successfully");
1568
1723
  }
1569
1724
  getConfig() {
1570
1725
  if (this._config === null) throw new ConfigError("Configuration has not been loaded. Call load() before getConfig().", {});
@@ -1627,7 +1782,7 @@ var ConfigSystemImpl = class {
1627
1782
  if (version !== void 0 && typeof version === "string" && !isVersionSupported(version, SUPPORTED_CONFIG_FORMAT_VERSIONS)) if (defaultConfigMigrator.canMigrate(version, CURRENT_CONFIG_FORMAT_VERSION)) {
1628
1783
  const migrationOutput = defaultConfigMigrator.migrate(rawObj, version, CURRENT_CONFIG_FORMAT_VERSION, filePath);
1629
1784
  if (migrationOutput.result.success) {
1630
- logger$20.info({
1785
+ logger$21.info({
1631
1786
  from: version,
1632
1787
  to: CURRENT_CONFIG_FORMAT_VERSION,
1633
1788
  backup: migrationOutput.result.backupPath
@@ -3036,7 +3191,7 @@ function truncateToTokens(text, maxTokens) {
3036
3191
 
3037
3192
  //#endregion
3038
3193
  //#region src/modules/context-compiler/context-compiler-impl.ts
3039
- const logger$19 = createLogger("context-compiler");
3194
+ const logger$20 = createLogger("context-compiler");
3040
3195
  /**
3041
3196
  * Fraction of the original token budget that must remain (after required +
3042
3197
  * important sections) before an optional section is included.
@@ -3128,7 +3283,7 @@ var ContextCompilerImpl = class {
3128
3283
  includedParts.push(truncated);
3129
3284
  remainingBudget -= truncatedTokens;
3130
3285
  anyTruncated = true;
3131
- logger$19.warn({
3286
+ logger$20.warn({
3132
3287
  section: section.name,
3133
3288
  originalTokens: tokens,
3134
3289
  budgetTokens: truncatedTokens
@@ -3142,7 +3297,7 @@ var ContextCompilerImpl = class {
3142
3297
  });
3143
3298
  } else {
3144
3299
  anyTruncated = true;
3145
- logger$19.warn({
3300
+ logger$20.warn({
3146
3301
  section: section.name,
3147
3302
  tokens
3148
3303
  }, "Context compiler: omitted \"important\" section — no budget remaining");
@@ -3169,7 +3324,7 @@ var ContextCompilerImpl = class {
3169
3324
  } else {
3170
3325
  if (tokens > 0) {
3171
3326
  anyTruncated = true;
3172
- logger$19.warn({
3327
+ logger$20.warn({
3173
3328
  section: section.name,
3174
3329
  tokens,
3175
3330
  budgetFractionRemaining: budgetFractionRemaining.toFixed(2)
@@ -3454,7 +3609,7 @@ function parseYamlResult(yamlText, schema) {
3454
3609
 
3455
3610
  //#endregion
3456
3611
  //#region src/modules/agent-dispatch/dispatcher-impl.ts
3457
- const logger$18 = createLogger("agent-dispatch");
3612
+ const logger$19 = createLogger("agent-dispatch");
3458
3613
  const SHUTDOWN_GRACE_MS = 1e4;
3459
3614
  const SHUTDOWN_MAX_WAIT_MS = 3e4;
3460
3615
  const CHARS_PER_TOKEN = 4;
@@ -3499,7 +3654,7 @@ function getAvailableMemory() {
3499
3654
  }).trim(), 10);
3500
3655
  _lastKnownPressureLevel = pressureLevel;
3501
3656
  if (pressureLevel >= 4) {
3502
- logger$18.warn({ pressureLevel }, "macOS kernel reports critical memory pressure");
3657
+ logger$19.warn({ pressureLevel }, "macOS kernel reports critical memory pressure");
3503
3658
  return 0;
3504
3659
  }
3505
3660
  } catch {}
@@ -3514,7 +3669,7 @@ function getAvailableMemory() {
3514
3669
  const speculative = parseInt(vmstat.match(/Pages speculative:\s+(\d+)/)?.[1] ?? "0", 10);
3515
3670
  const available = (free + purgeable + speculative) * pageSize;
3516
3671
  if (pressureLevel >= 2) {
3517
- logger$18.warn({
3672
+ logger$19.warn({
3518
3673
  pressureLevel,
3519
3674
  availableBeforeDiscount: available
3520
3675
  }, "macOS kernel reports memory pressure — discounting estimate");
@@ -3594,7 +3749,7 @@ var DispatcherImpl = class {
3594
3749
  resolve: typedResolve,
3595
3750
  reject
3596
3751
  });
3597
- logger$18.debug({
3752
+ logger$19.debug({
3598
3753
  id,
3599
3754
  queueLength: this._queue.length
3600
3755
  }, "Dispatch queued");
@@ -3625,7 +3780,7 @@ var DispatcherImpl = class {
3625
3780
  async shutdown() {
3626
3781
  this._shuttingDown = true;
3627
3782
  this._stopMemoryPressureTimer();
3628
- logger$18.info({
3783
+ logger$19.info({
3629
3784
  running: this._running.size,
3630
3785
  queued: this._queue.length
3631
3786
  }, "Dispatcher shutting down");
@@ -3658,13 +3813,13 @@ var DispatcherImpl = class {
3658
3813
  }
3659
3814
  }, 50);
3660
3815
  });
3661
- logger$18.info("Dispatcher shutdown complete");
3816
+ logger$19.info("Dispatcher shutdown complete");
3662
3817
  }
3663
3818
  async _startDispatch(id, request, resolve$2) {
3664
3819
  const { prompt, agent, taskType, timeout, outputSchema, workingDirectory, model, maxTurns } = request;
3665
3820
  const adapter = this._adapterRegistry.get(agent);
3666
3821
  if (adapter === void 0) {
3667
- logger$18.warn({
3822
+ logger$19.warn({
3668
3823
  id,
3669
3824
  agent
3670
3825
  }, "No adapter found for agent");
@@ -3710,7 +3865,7 @@ var DispatcherImpl = class {
3710
3865
  });
3711
3866
  const startedAt = Date.now();
3712
3867
  proc.on("error", (err) => {
3713
- logger$18.error({
3868
+ logger$19.error({
3714
3869
  id,
3715
3870
  binary: cmd.binary,
3716
3871
  error: err.message
@@ -3718,7 +3873,7 @@ var DispatcherImpl = class {
3718
3873
  });
3719
3874
  if (proc.stdin !== null) {
3720
3875
  proc.stdin.on("error", (err) => {
3721
- if (err.code !== "EPIPE") logger$18.warn({
3876
+ if (err.code !== "EPIPE") logger$19.warn({
3722
3877
  id,
3723
3878
  error: err.message
3724
3879
  }, "stdin write error");
@@ -3760,7 +3915,7 @@ var DispatcherImpl = class {
3760
3915
  agent,
3761
3916
  taskType
3762
3917
  });
3763
- logger$18.debug({
3918
+ logger$19.debug({
3764
3919
  id,
3765
3920
  agent,
3766
3921
  taskType,
@@ -3777,7 +3932,7 @@ var DispatcherImpl = class {
3777
3932
  dispatchId: id,
3778
3933
  timeoutMs
3779
3934
  });
3780
- logger$18.warn({
3935
+ logger$19.warn({
3781
3936
  id,
3782
3937
  agent,
3783
3938
  taskType,
@@ -3831,7 +3986,7 @@ var DispatcherImpl = class {
3831
3986
  exitCode: code,
3832
3987
  output: stdout
3833
3988
  });
3834
- logger$18.debug({
3989
+ logger$19.debug({
3835
3990
  id,
3836
3991
  agent,
3837
3992
  taskType,
@@ -3857,7 +4012,7 @@ var DispatcherImpl = class {
3857
4012
  error: stderr || `Process exited with code ${String(code)}`,
3858
4013
  exitCode: code
3859
4014
  });
3860
- logger$18.debug({
4015
+ logger$19.debug({
3861
4016
  id,
3862
4017
  agent,
3863
4018
  taskType,
@@ -3916,7 +4071,7 @@ var DispatcherImpl = class {
3916
4071
  const next = this._queue.shift();
3917
4072
  if (next === void 0) return;
3918
4073
  next.handle.status = "running";
3919
- logger$18.debug({
4074
+ logger$19.debug({
3920
4075
  id: next.id,
3921
4076
  queueLength: this._queue.length
3922
4077
  }, "Dequeued dispatch");
@@ -3929,7 +4084,7 @@ var DispatcherImpl = class {
3929
4084
  _isMemoryPressured() {
3930
4085
  const free = getAvailableMemory();
3931
4086
  if (free < MIN_FREE_MEMORY_BYTES) {
3932
- logger$18.warn({
4087
+ logger$19.warn({
3933
4088
  freeMB: Math.round(free / 1024 / 1024),
3934
4089
  thresholdMB: Math.round(MIN_FREE_MEMORY_BYTES / 1024 / 1024),
3935
4090
  pressureLevel: _lastKnownPressureLevel
@@ -4045,7 +4200,7 @@ function runBuildVerification(options) {
4045
4200
  let cmd;
4046
4201
  if (verifyCommand === void 0) {
4047
4202
  const detection = detectPackageManager(projectRoot);
4048
- logger$18.info({
4203
+ logger$19.info({
4049
4204
  packageManager: detection.packageManager,
4050
4205
  lockfile: detection.lockfile,
4051
4206
  resolvedCommand: detection.command
@@ -4244,7 +4399,7 @@ function pickRecommendation(distribution, profile, totalIssues, reviewCycles, la
4244
4399
 
4245
4400
  //#endregion
4246
4401
  //#region src/modules/compiled-workflows/prompt-assembler.ts
4247
- const logger$17 = createLogger("compiled-workflows:prompt-assembler");
4402
+ const logger$18 = createLogger("compiled-workflows:prompt-assembler");
4248
4403
  /**
4249
4404
  * Assemble a final prompt from a template and sections map.
4250
4405
  *
@@ -4269,7 +4424,7 @@ function assemblePrompt(template, sections, tokenCeiling = 2200) {
4269
4424
  tokenCount,
4270
4425
  truncated: false
4271
4426
  };
4272
- logger$17.warn({
4427
+ logger$18.warn({
4273
4428
  tokenCount,
4274
4429
  ceiling: tokenCeiling
4275
4430
  }, "Prompt exceeds token ceiling — truncating optional sections");
@@ -4285,10 +4440,10 @@ function assemblePrompt(template, sections, tokenCeiling = 2200) {
4285
4440
  const targetSectionTokens = Math.max(0, currentSectionTokens - overBy);
4286
4441
  if (targetSectionTokens === 0) {
4287
4442
  contentMap[section.name] = "";
4288
- logger$17.warn({ sectionName: section.name }, "Section eliminated to fit token budget");
4443
+ logger$18.warn({ sectionName: section.name }, "Section eliminated to fit token budget");
4289
4444
  } else {
4290
4445
  contentMap[section.name] = truncateToTokens(section.content, targetSectionTokens);
4291
- logger$17.warn({
4446
+ logger$18.warn({
4292
4447
  sectionName: section.name,
4293
4448
  targetSectionTokens
4294
4449
  }, "Section truncated to fit token budget");
@@ -4299,7 +4454,7 @@ function assemblePrompt(template, sections, tokenCeiling = 2200) {
4299
4454
  }
4300
4455
  if (tokenCount <= tokenCeiling) break;
4301
4456
  }
4302
- if (tokenCount > tokenCeiling) logger$17.warn({
4457
+ if (tokenCount > tokenCeiling) logger$18.warn({
4303
4458
  tokenCount,
4304
4459
  ceiling: tokenCeiling
4305
4460
  }, "Required sections alone exceed token ceiling — returning over-budget prompt");
@@ -4597,7 +4752,7 @@ function getTokenCeiling(workflowType, tokenCeilings) {
4597
4752
 
4598
4753
  //#endregion
4599
4754
  //#region src/modules/compiled-workflows/create-story.ts
4600
- const logger$16 = createLogger("compiled-workflows:create-story");
4755
+ const logger$17 = createLogger("compiled-workflows:create-story");
4601
4756
  /**
4602
4757
  * Execute the compiled create-story workflow.
4603
4758
  *
@@ -4617,13 +4772,13 @@ const logger$16 = createLogger("compiled-workflows:create-story");
4617
4772
  */
4618
4773
  async function runCreateStory(deps, params) {
4619
4774
  const { epicId, storyKey, pipelineRunId } = params;
4620
- logger$16.debug({
4775
+ logger$17.debug({
4621
4776
  epicId,
4622
4777
  storyKey,
4623
4778
  pipelineRunId
4624
4779
  }, "Starting create-story workflow");
4625
4780
  const { ceiling: TOKEN_CEILING, source: tokenCeilingSource } = getTokenCeiling("create-story", deps.tokenCeilings);
4626
- logger$16.info({
4781
+ logger$17.info({
4627
4782
  workflow: "create-story",
4628
4783
  ceiling: TOKEN_CEILING,
4629
4784
  source: tokenCeilingSource
@@ -4633,7 +4788,7 @@ async function runCreateStory(deps, params) {
4633
4788
  template = await deps.pack.getPrompt("create-story");
4634
4789
  } catch (err) {
4635
4790
  const error = err instanceof Error ? err.message : String(err);
4636
- logger$16.error({ error }, "Failed to retrieve create-story prompt template");
4791
+ logger$17.error({ error }, "Failed to retrieve create-story prompt template");
4637
4792
  return {
4638
4793
  result: "failed",
4639
4794
  error: `Failed to retrieve prompt template: ${error}`,
@@ -4675,7 +4830,7 @@ async function runCreateStory(deps, params) {
4675
4830
  priority: "important"
4676
4831
  }
4677
4832
  ], TOKEN_CEILING);
4678
- logger$16.debug({
4833
+ logger$17.debug({
4679
4834
  tokenCount,
4680
4835
  truncated,
4681
4836
  tokenCeiling: TOKEN_CEILING
@@ -4692,7 +4847,7 @@ async function runCreateStory(deps, params) {
4692
4847
  dispatchResult = await handle.result;
4693
4848
  } catch (err) {
4694
4849
  const error = err instanceof Error ? err.message : String(err);
4695
- logger$16.error({
4850
+ logger$17.error({
4696
4851
  epicId,
4697
4852
  storyKey,
4698
4853
  error
@@ -4713,7 +4868,7 @@ async function runCreateStory(deps, params) {
4713
4868
  if (dispatchResult.status === "failed") {
4714
4869
  const errorMsg = dispatchResult.parseError ?? `Dispatch failed with exit code ${dispatchResult.exitCode}`;
4715
4870
  const stderrDetail = dispatchResult.output ? ` Output: ${dispatchResult.output}` : "";
4716
- logger$16.warn({
4871
+ logger$17.warn({
4717
4872
  epicId,
4718
4873
  storyKey,
4719
4874
  exitCode: dispatchResult.exitCode
@@ -4725,7 +4880,7 @@ async function runCreateStory(deps, params) {
4725
4880
  };
4726
4881
  }
4727
4882
  if (dispatchResult.status === "timeout") {
4728
- logger$16.warn({
4883
+ logger$17.warn({
4729
4884
  epicId,
4730
4885
  storyKey
4731
4886
  }, "Create-story dispatch timed out");
@@ -4738,7 +4893,7 @@ async function runCreateStory(deps, params) {
4738
4893
  if (dispatchResult.parsed === null) {
4739
4894
  const details = dispatchResult.parseError ?? "No YAML block found in output";
4740
4895
  const rawSnippet = dispatchResult.output ? dispatchResult.output.slice(0, 1e3) : "(empty)";
4741
- logger$16.warn({
4896
+ logger$17.warn({
4742
4897
  epicId,
4743
4898
  storyKey,
4744
4899
  details,
@@ -4754,7 +4909,7 @@ async function runCreateStory(deps, params) {
4754
4909
  const parseResult = CreateStoryResultSchema.safeParse(dispatchResult.parsed);
4755
4910
  if (!parseResult.success) {
4756
4911
  const details = parseResult.error.message;
4757
- logger$16.warn({
4912
+ logger$17.warn({
4758
4913
  epicId,
4759
4914
  storyKey,
4760
4915
  details
@@ -4767,7 +4922,7 @@ async function runCreateStory(deps, params) {
4767
4922
  };
4768
4923
  }
4769
4924
  const parsed = parseResult.data;
4770
- logger$16.info({
4925
+ logger$17.info({
4771
4926
  epicId,
4772
4927
  storyKey,
4773
4928
  storyFile: parsed.story_file,
@@ -4789,7 +4944,7 @@ function getImplementationDecisions(deps) {
4789
4944
  try {
4790
4945
  return getDecisionsByPhase(deps.db, "implementation");
4791
4946
  } catch (err) {
4792
- logger$16.warn({ error: err instanceof Error ? err.message : String(err) }, "Failed to retrieve implementation decisions");
4947
+ logger$17.warn({ error: err instanceof Error ? err.message : String(err) }, "Failed to retrieve implementation decisions");
4793
4948
  return [];
4794
4949
  }
4795
4950
  }
@@ -4832,13 +4987,13 @@ function getEpicShard(decisions, epicId, projectRoot, storyKey) {
4832
4987
  if (storyKey) {
4833
4988
  const storySection = extractStorySection(shardContent, storyKey);
4834
4989
  if (storySection) {
4835
- logger$16.debug({
4990
+ logger$17.debug({
4836
4991
  epicId,
4837
4992
  storyKey
4838
4993
  }, "Extracted per-story section from epic shard");
4839
4994
  return storySection;
4840
4995
  }
4841
- logger$16.debug({
4996
+ logger$17.debug({
4842
4997
  epicId,
4843
4998
  storyKey
4844
4999
  }, "No matching story section found — using full epic shard");
@@ -4848,11 +5003,11 @@ function getEpicShard(decisions, epicId, projectRoot, storyKey) {
4848
5003
  if (projectRoot) {
4849
5004
  const fallback = readEpicShardFromFile(projectRoot, epicId);
4850
5005
  if (fallback) {
4851
- logger$16.info({ epicId }, "Using file-based fallback for epic shard (decisions table empty)");
5006
+ logger$17.info({ epicId }, "Using file-based fallback for epic shard (decisions table empty)");
4852
5007
  if (storyKey) {
4853
5008
  const storySection = extractStorySection(fallback, storyKey);
4854
5009
  if (storySection) {
4855
- logger$16.debug({
5010
+ logger$17.debug({
4856
5011
  epicId,
4857
5012
  storyKey
4858
5013
  }, "Extracted per-story section from file-based epic shard");
@@ -4864,7 +5019,7 @@ function getEpicShard(decisions, epicId, projectRoot, storyKey) {
4864
5019
  }
4865
5020
  return "";
4866
5021
  } catch (err) {
4867
- logger$16.warn({
5022
+ logger$17.warn({
4868
5023
  epicId,
4869
5024
  error: err instanceof Error ? err.message : String(err)
4870
5025
  }, "Failed to retrieve epic shard");
@@ -4881,7 +5036,7 @@ function getPrevDevNotes(decisions, epicId) {
4881
5036
  if (devNotes.length === 0) return "";
4882
5037
  return devNotes[devNotes.length - 1].value;
4883
5038
  } catch (err) {
4884
- logger$16.warn({
5039
+ logger$17.warn({
4885
5040
  epicId,
4886
5041
  error: err instanceof Error ? err.message : String(err)
4887
5042
  }, "Failed to retrieve prev dev notes");
@@ -4901,13 +5056,13 @@ function getArchConstraints$3(deps) {
4901
5056
  if (deps.projectRoot) {
4902
5057
  const fallback = readArchConstraintsFromFile(deps.projectRoot);
4903
5058
  if (fallback) {
4904
- logger$16.info("Using file-based fallback for architecture constraints (decisions table empty)");
5059
+ logger$17.info("Using file-based fallback for architecture constraints (decisions table empty)");
4905
5060
  return fallback;
4906
5061
  }
4907
5062
  }
4908
5063
  return "";
4909
5064
  } catch (err) {
4910
- logger$16.warn({ error: err instanceof Error ? err.message : String(err) }, "Failed to retrieve architecture constraints");
5065
+ logger$17.warn({ error: err instanceof Error ? err.message : String(err) }, "Failed to retrieve architecture constraints");
4911
5066
  return "";
4912
5067
  }
4913
5068
  }
@@ -4927,7 +5082,7 @@ function readEpicShardFromFile(projectRoot, epicId) {
4927
5082
  const match = pattern.exec(content);
4928
5083
  return match ? match[0].trim() : "";
4929
5084
  } catch (err) {
4930
- logger$16.warn({
5085
+ logger$17.warn({
4931
5086
  epicId,
4932
5087
  error: err instanceof Error ? err.message : String(err)
4933
5088
  }, "File-based epic shard fallback failed");
@@ -4950,7 +5105,7 @@ function readArchConstraintsFromFile(projectRoot) {
4950
5105
  const content = readFileSync$1(archPath, "utf-8");
4951
5106
  return content.slice(0, 1500);
4952
5107
  } catch (err) {
4953
- logger$16.warn({ error: err instanceof Error ? err.message : String(err) }, "File-based architecture fallback failed");
5108
+ logger$17.warn({ error: err instanceof Error ? err.message : String(err) }, "File-based architecture fallback failed");
4954
5109
  return "";
4955
5110
  }
4956
5111
  }
@@ -4963,7 +5118,7 @@ async function getStoryTemplate(deps) {
4963
5118
  try {
4964
5119
  return await deps.pack.getTemplate("story");
4965
5120
  } catch (err) {
4966
- logger$16.warn({ error: err instanceof Error ? err.message : String(err) }, "Failed to retrieve story template from pack");
5121
+ logger$17.warn({ error: err instanceof Error ? err.message : String(err) }, "Failed to retrieve story template from pack");
4967
5122
  return "";
4968
5123
  }
4969
5124
  }
@@ -5000,7 +5155,7 @@ async function isValidStoryFile(filePath) {
5000
5155
 
5001
5156
  //#endregion
5002
5157
  //#region src/modules/compiled-workflows/git-helpers.ts
5003
- const logger$15 = createLogger("compiled-workflows:git-helpers");
5158
+ const logger$16 = createLogger("compiled-workflows:git-helpers");
5004
5159
  /**
5005
5160
  * Capture the full git diff for HEAD (working tree vs current commit).
5006
5161
  *
@@ -5096,7 +5251,7 @@ async function stageIntentToAdd(files, workingDirectory) {
5096
5251
  if (files.length === 0) return;
5097
5252
  const existing = files.filter((f) => {
5098
5253
  const exists = existsSync$1(f);
5099
- if (!exists) logger$15.debug({ file: f }, "Skipping nonexistent file in stageIntentToAdd");
5254
+ if (!exists) logger$16.debug({ file: f }, "Skipping nonexistent file in stageIntentToAdd");
5100
5255
  return exists;
5101
5256
  });
5102
5257
  if (existing.length === 0) return;
@@ -5130,7 +5285,7 @@ async function runGitCommand(args, cwd, logLabel) {
5130
5285
  stderr += chunk.toString("utf-8");
5131
5286
  });
5132
5287
  proc.on("error", (err) => {
5133
- logger$15.warn({
5288
+ logger$16.warn({
5134
5289
  label: logLabel,
5135
5290
  cwd,
5136
5291
  error: err.message
@@ -5139,7 +5294,7 @@ async function runGitCommand(args, cwd, logLabel) {
5139
5294
  });
5140
5295
  proc.on("close", (code) => {
5141
5296
  if (code !== 0) {
5142
- logger$15.warn({
5297
+ logger$16.warn({
5143
5298
  label: logLabel,
5144
5299
  cwd,
5145
5300
  code,
@@ -5155,7 +5310,7 @@ async function runGitCommand(args, cwd, logLabel) {
5155
5310
 
5156
5311
  //#endregion
5157
5312
  //#region src/modules/implementation-orchestrator/project-findings.ts
5158
- const logger$14 = createLogger("project-findings");
5313
+ const logger$15 = createLogger("project-findings");
5159
5314
  /** Maximum character length for the findings summary */
5160
5315
  const MAX_CHARS = 2e3;
5161
5316
  /**
@@ -5221,7 +5376,7 @@ function getProjectFindings(db) {
5221
5376
  if (summary.length > MAX_CHARS) summary = summary.slice(0, MAX_CHARS - 3) + "...";
5222
5377
  return summary;
5223
5378
  } catch (err) {
5224
- logger$14.warn({ err }, "Failed to query project findings (graceful fallback)");
5379
+ logger$15.warn({ err }, "Failed to query project findings (graceful fallback)");
5225
5380
  return "";
5226
5381
  }
5227
5382
  }
@@ -5244,7 +5399,7 @@ function extractRecurringPatterns(outcomes) {
5244
5399
 
5245
5400
  //#endregion
5246
5401
  //#region src/modules/compiled-workflows/story-complexity.ts
5247
- const logger$13 = createLogger("compiled-workflows:story-complexity");
5402
+ const logger$14 = createLogger("compiled-workflows:story-complexity");
5248
5403
  /**
5249
5404
  * Compute a complexity score from story markdown content.
5250
5405
  *
@@ -5296,7 +5451,7 @@ function resolveFixStoryMaxTurns(complexityScore) {
5296
5451
  * @param resolvedMaxTurns - Turn limit resolved for this dispatch
5297
5452
  */
5298
5453
  function logComplexityResult(storyKey, complexity, resolvedMaxTurns) {
5299
- logger$13.info({
5454
+ logger$14.info({
5300
5455
  storyKey,
5301
5456
  taskCount: complexity.taskCount,
5302
5457
  subtaskCount: complexity.subtaskCount,
@@ -5354,7 +5509,7 @@ function countFilesInLayout(content) {
5354
5509
 
5355
5510
  //#endregion
5356
5511
  //#region src/modules/compiled-workflows/dev-story.ts
5357
- const logger$12 = createLogger("compiled-workflows:dev-story");
5512
+ const logger$13 = createLogger("compiled-workflows:dev-story");
5358
5513
  /** Default timeout for dev-story dispatches in milliseconds (30 min) */
5359
5514
  const DEFAULT_TIMEOUT_MS$1 = 18e5;
5360
5515
  /** Default Vitest test patterns injected when no test-pattern decisions exist */
@@ -5377,12 +5532,12 @@ const DEFAULT_VITEST_PATTERNS = `## Test Patterns (defaults)
5377
5532
  */
5378
5533
  async function runDevStory(deps, params) {
5379
5534
  const { storyKey, storyFilePath, taskScope, priorFiles } = params;
5380
- logger$12.info({
5535
+ logger$13.info({
5381
5536
  storyKey,
5382
5537
  storyFilePath
5383
5538
  }, "Starting compiled dev-story workflow");
5384
5539
  const { ceiling: TOKEN_CEILING, source: tokenCeilingSource } = getTokenCeiling("dev-story", deps.tokenCeilings);
5385
- logger$12.info({
5540
+ logger$13.info({
5386
5541
  workflow: "dev-story",
5387
5542
  ceiling: TOKEN_CEILING,
5388
5543
  source: tokenCeilingSource
@@ -5425,10 +5580,10 @@ async function runDevStory(deps, params) {
5425
5580
  let template;
5426
5581
  try {
5427
5582
  template = await deps.pack.getPrompt("dev-story");
5428
- logger$12.debug({ storyKey }, "Retrieved dev-story prompt template from pack");
5583
+ logger$13.debug({ storyKey }, "Retrieved dev-story prompt template from pack");
5429
5584
  } catch (err) {
5430
5585
  const error = err instanceof Error ? err.message : String(err);
5431
- logger$12.error({
5586
+ logger$13.error({
5432
5587
  storyKey,
5433
5588
  error
5434
5589
  }, "Failed to retrieve dev-story prompt template");
@@ -5439,14 +5594,14 @@ async function runDevStory(deps, params) {
5439
5594
  storyContent = await readFile$1(storyFilePath, "utf-8");
5440
5595
  } catch (err) {
5441
5596
  if (err.code === "ENOENT") {
5442
- logger$12.error({
5597
+ logger$13.error({
5443
5598
  storyKey,
5444
5599
  storyFilePath
5445
5600
  }, "Story file not found");
5446
5601
  return makeFailureResult("story_file_not_found");
5447
5602
  }
5448
5603
  const error = err instanceof Error ? err.message : String(err);
5449
- logger$12.error({
5604
+ logger$13.error({
5450
5605
  storyKey,
5451
5606
  storyFilePath,
5452
5607
  error
@@ -5454,7 +5609,7 @@ async function runDevStory(deps, params) {
5454
5609
  return makeFailureResult(`story_file_read_error: ${error}`);
5455
5610
  }
5456
5611
  if (storyContent.trim().length === 0) {
5457
- logger$12.error({
5612
+ logger$13.error({
5458
5613
  storyKey,
5459
5614
  storyFilePath
5460
5615
  }, "Story file is empty");
@@ -5469,17 +5624,17 @@ async function runDevStory(deps, params) {
5469
5624
  const testPatternDecisions = solutioningDecisions.filter((d) => d.category === "test-patterns");
5470
5625
  if (testPatternDecisions.length > 0) {
5471
5626
  testPatternsContent = "## Test Patterns\n" + testPatternDecisions.map((d) => `- ${d.key}: ${d.value}`).join("\n");
5472
- logger$12.debug({
5627
+ logger$13.debug({
5473
5628
  storyKey,
5474
5629
  count: testPatternDecisions.length
5475
5630
  }, "Loaded test patterns from decision store");
5476
5631
  } else {
5477
5632
  testPatternsContent = DEFAULT_VITEST_PATTERNS;
5478
- logger$12.debug({ storyKey }, "No test-pattern decisions found — using default Vitest patterns");
5633
+ logger$13.debug({ storyKey }, "No test-pattern decisions found — using default Vitest patterns");
5479
5634
  }
5480
5635
  } catch (err) {
5481
5636
  const error = err instanceof Error ? err.message : String(err);
5482
- logger$12.warn({
5637
+ logger$13.warn({
5483
5638
  storyKey,
5484
5639
  error
5485
5640
  }, "Failed to load test patterns — using defaults");
@@ -5494,7 +5649,7 @@ async function runDevStory(deps, params) {
5494
5649
  const findings = getProjectFindings(deps.db);
5495
5650
  if (findings.length > 0) {
5496
5651
  priorFindingsContent = "Previous pipeline runs encountered these issues — avoid repeating them:\n\n" + findings;
5497
- logger$12.debug({
5652
+ logger$13.debug({
5498
5653
  storyKey,
5499
5654
  findingsLen: findings.length
5500
5655
  }, "Injecting prior findings into dev-story prompt");
@@ -5514,7 +5669,7 @@ async function runDevStory(deps, params) {
5514
5669
  if (plan.test_categories && plan.test_categories.length > 0) parts.push(`\n### Categories: ${plan.test_categories.join(", ")}`);
5515
5670
  if (plan.coverage_notes) parts.push(`\n### Coverage Notes\n${plan.coverage_notes}`);
5516
5671
  testPlanContent = parts.join("\n");
5517
- logger$12.debug({ storyKey }, "Injecting test plan into dev-story prompt");
5672
+ logger$13.debug({ storyKey }, "Injecting test plan into dev-story prompt");
5518
5673
  }
5519
5674
  } catch {}
5520
5675
  const sections = [
@@ -5560,7 +5715,7 @@ async function runDevStory(deps, params) {
5560
5715
  }
5561
5716
  ];
5562
5717
  const { prompt, tokenCount, truncated } = assemblePrompt(template, sections, TOKEN_CEILING);
5563
- logger$12.info({
5718
+ logger$13.info({
5564
5719
  storyKey,
5565
5720
  tokenCount,
5566
5721
  ceiling: TOKEN_CEILING,
@@ -5580,7 +5735,7 @@ async function runDevStory(deps, params) {
5580
5735
  dispatchResult = await handle.result;
5581
5736
  } catch (err) {
5582
5737
  const error = err instanceof Error ? err.message : String(err);
5583
- logger$12.error({
5738
+ logger$13.error({
5584
5739
  storyKey,
5585
5740
  error
5586
5741
  }, "Dispatch threw an unexpected error");
@@ -5591,11 +5746,11 @@ async function runDevStory(deps, params) {
5591
5746
  output: dispatchResult.tokenEstimate.output
5592
5747
  };
5593
5748
  if (dispatchResult.status === "timeout") {
5594
- logger$12.error({
5749
+ logger$13.error({
5595
5750
  storyKey,
5596
5751
  durationMs: dispatchResult.durationMs
5597
5752
  }, "Dev-story dispatch timed out");
5598
- if (dispatchResult.output.length > 0) logger$12.info({
5753
+ if (dispatchResult.output.length > 0) logger$13.info({
5599
5754
  storyKey,
5600
5755
  partialOutput: dispatchResult.output.slice(0, 500)
5601
5756
  }, "Partial output before timeout");
@@ -5605,12 +5760,12 @@ async function runDevStory(deps, params) {
5605
5760
  };
5606
5761
  }
5607
5762
  if (dispatchResult.status === "failed" || dispatchResult.exitCode !== 0) {
5608
- logger$12.error({
5763
+ logger$13.error({
5609
5764
  storyKey,
5610
5765
  exitCode: dispatchResult.exitCode,
5611
5766
  status: dispatchResult.status
5612
5767
  }, "Dev-story dispatch failed");
5613
- if (dispatchResult.output.length > 0) logger$12.info({
5768
+ if (dispatchResult.output.length > 0) logger$13.info({
5614
5769
  storyKey,
5615
5770
  partialOutput: dispatchResult.output.slice(0, 500)
5616
5771
  }, "Partial output from failed dispatch");
@@ -5622,7 +5777,7 @@ async function runDevStory(deps, params) {
5622
5777
  if (dispatchResult.parseError !== null || dispatchResult.parsed === null) {
5623
5778
  const details = dispatchResult.parseError ?? "parsed result was null";
5624
5779
  const rawSnippet = dispatchResult.output ? dispatchResult.output.slice(0, 1e3) : "(empty)";
5625
- logger$12.error({
5780
+ logger$13.error({
5626
5781
  storyKey,
5627
5782
  parseError: details,
5628
5783
  rawOutputSnippet: rawSnippet
@@ -5630,12 +5785,12 @@ async function runDevStory(deps, params) {
5630
5785
  let filesModified = [];
5631
5786
  try {
5632
5787
  filesModified = await getGitChangedFiles(deps.projectRoot ?? process.cwd());
5633
- if (filesModified.length > 0) logger$12.info({
5788
+ if (filesModified.length > 0) logger$13.info({
5634
5789
  storyKey,
5635
5790
  fileCount: filesModified.length
5636
5791
  }, "Recovered files_modified from git status (YAML fallback)");
5637
5792
  } catch (err) {
5638
- logger$12.warn({
5793
+ logger$13.warn({
5639
5794
  storyKey,
5640
5795
  error: err instanceof Error ? err.message : String(err)
5641
5796
  }, "Failed to recover files_modified from git");
@@ -5652,7 +5807,7 @@ async function runDevStory(deps, params) {
5652
5807
  };
5653
5808
  }
5654
5809
  const parsed = dispatchResult.parsed;
5655
- logger$12.info({
5810
+ logger$13.info({
5656
5811
  storyKey,
5657
5812
  result: parsed.result,
5658
5813
  acMet: parsed.ac_met.length
@@ -5791,7 +5946,7 @@ function extractFilesInScope(storyContent) {
5791
5946
 
5792
5947
  //#endregion
5793
5948
  //#region src/modules/compiled-workflows/code-review.ts
5794
- const logger$11 = createLogger("compiled-workflows:code-review");
5949
+ const logger$12 = createLogger("compiled-workflows:code-review");
5795
5950
  /**
5796
5951
  * Default fallback result when dispatch fails or times out.
5797
5952
  * Uses NEEDS_MINOR_FIXES (not NEEDS_MAJOR_REWORK) so a parse/schema failure
@@ -5829,14 +5984,14 @@ function defaultFailResult(error, tokenUsage) {
5829
5984
  async function runCodeReview(deps, params) {
5830
5985
  const { storyKey, storyFilePath, workingDirectory, pipelineRunId, filesModified, previousIssues } = params;
5831
5986
  const cwd = workingDirectory ?? process.cwd();
5832
- logger$11.debug({
5987
+ logger$12.debug({
5833
5988
  storyKey,
5834
5989
  storyFilePath,
5835
5990
  cwd,
5836
5991
  pipelineRunId
5837
5992
  }, "Starting code-review workflow");
5838
5993
  const { ceiling: TOKEN_CEILING, source: tokenCeilingSource } = getTokenCeiling("code-review", deps.tokenCeilings);
5839
- logger$11.info({
5994
+ logger$12.info({
5840
5995
  workflow: "code-review",
5841
5996
  ceiling: TOKEN_CEILING,
5842
5997
  source: tokenCeilingSource
@@ -5846,7 +6001,7 @@ async function runCodeReview(deps, params) {
5846
6001
  template = await deps.pack.getPrompt("code-review");
5847
6002
  } catch (err) {
5848
6003
  const error = err instanceof Error ? err.message : String(err);
5849
- logger$11.error({ error }, "Failed to retrieve code-review prompt template");
6004
+ logger$12.error({ error }, "Failed to retrieve code-review prompt template");
5850
6005
  return defaultFailResult(`Failed to retrieve prompt template: ${error}`, {
5851
6006
  input: 0,
5852
6007
  output: 0
@@ -5857,7 +6012,7 @@ async function runCodeReview(deps, params) {
5857
6012
  storyContent = await readFile$1(storyFilePath, "utf-8");
5858
6013
  } catch (err) {
5859
6014
  const error = err instanceof Error ? err.message : String(err);
5860
- logger$11.error({
6015
+ logger$12.error({
5861
6016
  storyFilePath,
5862
6017
  error
5863
6018
  }, "Failed to read story file");
@@ -5877,12 +6032,12 @@ async function runCodeReview(deps, params) {
5877
6032
  const scopedTotal = nonDiffTokens + countTokens(scopedDiff);
5878
6033
  if (scopedTotal <= TOKEN_CEILING) {
5879
6034
  gitDiffContent = scopedDiff;
5880
- logger$11.debug({
6035
+ logger$12.debug({
5881
6036
  fileCount: filesModified.length,
5882
6037
  tokenCount: scopedTotal
5883
6038
  }, "Using scoped file diff");
5884
6039
  } else {
5885
- logger$11.warn({
6040
+ logger$12.warn({
5886
6041
  estimatedTotal: scopedTotal,
5887
6042
  ceiling: TOKEN_CEILING,
5888
6043
  fileCount: filesModified.length
@@ -5896,7 +6051,7 @@ async function runCodeReview(deps, params) {
5896
6051
  const fullTotal = nonDiffTokens + countTokens(fullDiff);
5897
6052
  if (fullTotal <= TOKEN_CEILING) gitDiffContent = fullDiff;
5898
6053
  else {
5899
- logger$11.warn({
6054
+ logger$12.warn({
5900
6055
  estimatedTotal: fullTotal,
5901
6056
  ceiling: TOKEN_CEILING
5902
6057
  }, "Full git diff would exceed token ceiling — using stat-only summary");
@@ -5904,7 +6059,7 @@ async function runCodeReview(deps, params) {
5904
6059
  }
5905
6060
  }
5906
6061
  if (gitDiffContent.trim().length === 0) {
5907
- logger$11.info({ storyKey }, "Empty git diff — skipping review with SHIP_IT");
6062
+ logger$12.info({ storyKey }, "Empty git diff — skipping review with SHIP_IT");
5908
6063
  return {
5909
6064
  verdict: "SHIP_IT",
5910
6065
  issues: 0,
@@ -5929,7 +6084,7 @@ async function runCodeReview(deps, params) {
5929
6084
  const findings = getProjectFindings(deps.db);
5930
6085
  if (findings.length > 0) {
5931
6086
  priorFindingsContent = "Previous reviews found these recurring patterns — pay special attention:\n\n" + findings;
5932
- logger$11.debug({
6087
+ logger$12.debug({
5933
6088
  storyKey,
5934
6089
  findingsLen: findings.length
5935
6090
  }, "Injecting prior findings into code-review prompt");
@@ -5963,11 +6118,11 @@ async function runCodeReview(deps, params) {
5963
6118
  }
5964
6119
  ];
5965
6120
  const assembleResult = assemblePrompt(template, sections, TOKEN_CEILING);
5966
- if (assembleResult.truncated) logger$11.warn({
6121
+ if (assembleResult.truncated) logger$12.warn({
5967
6122
  storyKey,
5968
6123
  tokenCount: assembleResult.tokenCount
5969
6124
  }, "Code-review prompt truncated to fit token ceiling");
5970
- logger$11.debug({
6125
+ logger$12.debug({
5971
6126
  storyKey,
5972
6127
  tokenCount: assembleResult.tokenCount,
5973
6128
  truncated: assembleResult.truncated
@@ -5985,7 +6140,7 @@ async function runCodeReview(deps, params) {
5985
6140
  dispatchResult = await handle.result;
5986
6141
  } catch (err) {
5987
6142
  const error = err instanceof Error ? err.message : String(err);
5988
- logger$11.error({
6143
+ logger$12.error({
5989
6144
  storyKey,
5990
6145
  error
5991
6146
  }, "Code-review dispatch threw unexpected error");
@@ -6001,7 +6156,7 @@ async function runCodeReview(deps, params) {
6001
6156
  const rawOutput = dispatchResult.output ?? void 0;
6002
6157
  if (dispatchResult.status === "failed") {
6003
6158
  const errorMsg = `Dispatch status: failed. Exit code: ${dispatchResult.exitCode}. ${dispatchResult.parseError ?? ""} ${dispatchResult.output ? `Stderr: ${dispatchResult.output}` : ""}`.trim();
6004
- logger$11.warn({
6159
+ logger$12.warn({
6005
6160
  storyKey,
6006
6161
  exitCode: dispatchResult.exitCode
6007
6162
  }, "Code-review dispatch failed");
@@ -6011,7 +6166,7 @@ async function runCodeReview(deps, params) {
6011
6166
  };
6012
6167
  }
6013
6168
  if (dispatchResult.status === "timeout") {
6014
- logger$11.warn({ storyKey }, "Code-review dispatch timed out");
6169
+ logger$12.warn({ storyKey }, "Code-review dispatch timed out");
6015
6170
  return {
6016
6171
  ...defaultFailResult("Dispatch status: timeout. The agent did not complete within the allowed time.", tokenUsage),
6017
6172
  rawOutput
@@ -6019,7 +6174,7 @@ async function runCodeReview(deps, params) {
6019
6174
  }
6020
6175
  if (dispatchResult.parsed === null) {
6021
6176
  const details = dispatchResult.parseError ?? "No YAML block found in output";
6022
- logger$11.warn({
6177
+ logger$12.warn({
6023
6178
  storyKey,
6024
6179
  details
6025
6180
  }, "Code-review output schema validation failed");
@@ -6036,7 +6191,7 @@ async function runCodeReview(deps, params) {
6036
6191
  const parseResult = CodeReviewResultSchema.safeParse(dispatchResult.parsed);
6037
6192
  if (!parseResult.success) {
6038
6193
  const details = parseResult.error.message;
6039
- logger$11.warn({
6194
+ logger$12.warn({
6040
6195
  storyKey,
6041
6196
  details
6042
6197
  }, "Code-review output failed schema validation");
@@ -6051,13 +6206,13 @@ async function runCodeReview(deps, params) {
6051
6206
  };
6052
6207
  }
6053
6208
  const parsed = parseResult.data;
6054
- if (parsed.agentVerdict !== parsed.verdict) logger$11.info({
6209
+ if (parsed.agentVerdict !== parsed.verdict) logger$12.info({
6055
6210
  storyKey,
6056
6211
  agentVerdict: parsed.agentVerdict,
6057
6212
  pipelineVerdict: parsed.verdict,
6058
6213
  issues: parsed.issues
6059
6214
  }, "Pipeline overrode agent verdict based on issue severities");
6060
- logger$11.info({
6215
+ logger$12.info({
6061
6216
  storyKey,
6062
6217
  verdict: parsed.verdict,
6063
6218
  issues: parsed.issues
@@ -6082,14 +6237,14 @@ function getArchConstraints$2(deps) {
6082
6237
  if (constraints.length === 0) return "";
6083
6238
  return constraints.map((d) => `${d.key}: ${d.value}`).join("\n");
6084
6239
  } catch (err) {
6085
- logger$11.warn({ error: err instanceof Error ? err.message : String(err) }, "Failed to retrieve architecture constraints");
6240
+ logger$12.warn({ error: err instanceof Error ? err.message : String(err) }, "Failed to retrieve architecture constraints");
6086
6241
  return "";
6087
6242
  }
6088
6243
  }
6089
6244
 
6090
6245
  //#endregion
6091
6246
  //#region src/modules/compiled-workflows/test-plan.ts
6092
- const logger$10 = createLogger("compiled-workflows:test-plan");
6247
+ const logger$11 = createLogger("compiled-workflows:test-plan");
6093
6248
  /** Default timeout for test-plan dispatches in milliseconds (5 min — lightweight call) */
6094
6249
  const DEFAULT_TIMEOUT_MS = 3e5;
6095
6250
  /**
@@ -6101,12 +6256,12 @@ const DEFAULT_TIMEOUT_MS = 3e5;
6101
6256
  */
6102
6257
  async function runTestPlan(deps, params) {
6103
6258
  const { storyKey, storyFilePath, pipelineRunId } = params;
6104
- logger$10.info({
6259
+ logger$11.info({
6105
6260
  storyKey,
6106
6261
  storyFilePath
6107
6262
  }, "Starting compiled test-plan workflow");
6108
6263
  const { ceiling: TOKEN_CEILING, source: tokenCeilingSource } = getTokenCeiling("test-plan", deps.tokenCeilings);
6109
- logger$10.info({
6264
+ logger$11.info({
6110
6265
  workflow: "test-plan",
6111
6266
  ceiling: TOKEN_CEILING,
6112
6267
  source: tokenCeilingSource
@@ -6114,10 +6269,10 @@ async function runTestPlan(deps, params) {
6114
6269
  let template;
6115
6270
  try {
6116
6271
  template = await deps.pack.getPrompt("test-plan");
6117
- logger$10.debug({ storyKey }, "Retrieved test-plan prompt template from pack");
6272
+ logger$11.debug({ storyKey }, "Retrieved test-plan prompt template from pack");
6118
6273
  } catch (err) {
6119
6274
  const error = err instanceof Error ? err.message : String(err);
6120
- logger$10.warn({
6275
+ logger$11.warn({
6121
6276
  storyKey,
6122
6277
  error
6123
6278
  }, "Failed to retrieve test-plan prompt template");
@@ -6128,14 +6283,14 @@ async function runTestPlan(deps, params) {
6128
6283
  storyContent = await readFile$1(storyFilePath, "utf-8");
6129
6284
  } catch (err) {
6130
6285
  if (err.code === "ENOENT") {
6131
- logger$10.warn({
6286
+ logger$11.warn({
6132
6287
  storyKey,
6133
6288
  storyFilePath
6134
6289
  }, "Story file not found for test planning");
6135
6290
  return makeTestPlanFailureResult("story_file_not_found");
6136
6291
  }
6137
6292
  const error = err instanceof Error ? err.message : String(err);
6138
- logger$10.warn({
6293
+ logger$11.warn({
6139
6294
  storyKey,
6140
6295
  storyFilePath,
6141
6296
  error
@@ -6152,7 +6307,7 @@ async function runTestPlan(deps, params) {
6152
6307
  content: archConstraintsContent,
6153
6308
  priority: "optional"
6154
6309
  }], TOKEN_CEILING);
6155
- logger$10.info({
6310
+ logger$11.info({
6156
6311
  storyKey,
6157
6312
  tokenCount,
6158
6313
  ceiling: TOKEN_CEILING,
@@ -6171,7 +6326,7 @@ async function runTestPlan(deps, params) {
6171
6326
  dispatchResult = await handle.result;
6172
6327
  } catch (err) {
6173
6328
  const error = err instanceof Error ? err.message : String(err);
6174
- logger$10.warn({
6329
+ logger$11.warn({
6175
6330
  storyKey,
6176
6331
  error
6177
6332
  }, "Test-plan dispatch threw an unexpected error");
@@ -6182,7 +6337,7 @@ async function runTestPlan(deps, params) {
6182
6337
  output: dispatchResult.tokenEstimate.output
6183
6338
  };
6184
6339
  if (dispatchResult.status === "timeout") {
6185
- logger$10.warn({
6340
+ logger$11.warn({
6186
6341
  storyKey,
6187
6342
  durationMs: dispatchResult.durationMs
6188
6343
  }, "Test-plan dispatch timed out");
@@ -6192,7 +6347,7 @@ async function runTestPlan(deps, params) {
6192
6347
  };
6193
6348
  }
6194
6349
  if (dispatchResult.status === "failed" || dispatchResult.exitCode !== 0) {
6195
- logger$10.warn({
6350
+ logger$11.warn({
6196
6351
  storyKey,
6197
6352
  exitCode: dispatchResult.exitCode,
6198
6353
  status: dispatchResult.status
@@ -6204,7 +6359,7 @@ async function runTestPlan(deps, params) {
6204
6359
  }
6205
6360
  if (dispatchResult.parseError !== null || dispatchResult.parsed === null) {
6206
6361
  const details = dispatchResult.parseError ?? "parsed result was null";
6207
- logger$10.warn({
6362
+ logger$11.warn({
6208
6363
  storyKey,
6209
6364
  parseError: details
6210
6365
  }, "Test-plan YAML schema validation failed");
@@ -6227,19 +6382,19 @@ async function runTestPlan(deps, params) {
6227
6382
  }),
6228
6383
  rationale: `Test plan for ${storyKey}: ${parsed.test_files.length} test files, categories: ${parsed.test_categories.join(", ")}`
6229
6384
  });
6230
- logger$10.info({
6385
+ logger$11.info({
6231
6386
  storyKey,
6232
6387
  fileCount: parsed.test_files.length,
6233
6388
  categories: parsed.test_categories
6234
6389
  }, "Test plan stored in decision store");
6235
6390
  } catch (err) {
6236
6391
  const error = err instanceof Error ? err.message : String(err);
6237
- logger$10.warn({
6392
+ logger$11.warn({
6238
6393
  storyKey,
6239
6394
  error
6240
6395
  }, "Failed to store test plan in decision store — proceeding anyway");
6241
6396
  }
6242
- logger$10.info({
6397
+ logger$11.info({
6243
6398
  storyKey,
6244
6399
  result: parsed.result
6245
6400
  }, "Test-plan workflow completed");
@@ -6279,14 +6434,14 @@ function getArchConstraints$1(deps) {
6279
6434
  if (constraints.length === 0) return "";
6280
6435
  return constraints.map((d) => `${d.key}: ${d.value}`).join("\n");
6281
6436
  } catch (err) {
6282
- logger$10.warn({ error: err instanceof Error ? err.message : String(err) }, "Failed to retrieve architecture constraints for test-plan — proceeding without them");
6437
+ logger$11.warn({ error: err instanceof Error ? err.message : String(err) }, "Failed to retrieve architecture constraints for test-plan — proceeding without them");
6283
6438
  return "";
6284
6439
  }
6285
6440
  }
6286
6441
 
6287
6442
  //#endregion
6288
6443
  //#region src/modules/compiled-workflows/test-expansion.ts
6289
- const logger$9 = createLogger("compiled-workflows:test-expansion");
6444
+ const logger$10 = createLogger("compiled-workflows:test-expansion");
6290
6445
  function defaultFallbackResult(error, tokenUsage) {
6291
6446
  return {
6292
6447
  expansion_priority: "low",
@@ -6316,14 +6471,14 @@ function defaultFallbackResult(error, tokenUsage) {
6316
6471
  async function runTestExpansion(deps, params) {
6317
6472
  const { storyKey, storyFilePath, pipelineRunId, filesModified, workingDirectory } = params;
6318
6473
  const cwd = workingDirectory ?? process.cwd();
6319
- logger$9.debug({
6474
+ logger$10.debug({
6320
6475
  storyKey,
6321
6476
  storyFilePath,
6322
6477
  cwd,
6323
6478
  pipelineRunId
6324
6479
  }, "Starting test-expansion workflow");
6325
6480
  const { ceiling: TOKEN_CEILING, source: tokenCeilingSource } = getTokenCeiling("test-expansion", deps.tokenCeilings);
6326
- logger$9.info({
6481
+ logger$10.info({
6327
6482
  workflow: "test-expansion",
6328
6483
  ceiling: TOKEN_CEILING,
6329
6484
  source: tokenCeilingSource
@@ -6333,7 +6488,7 @@ async function runTestExpansion(deps, params) {
6333
6488
  template = await deps.pack.getPrompt("test-expansion");
6334
6489
  } catch (err) {
6335
6490
  const error = err instanceof Error ? err.message : String(err);
6336
- logger$9.warn({ error }, "Failed to retrieve test-expansion prompt template");
6491
+ logger$10.warn({ error }, "Failed to retrieve test-expansion prompt template");
6337
6492
  return defaultFallbackResult(`Failed to retrieve prompt template: ${error}`, {
6338
6493
  input: 0,
6339
6494
  output: 0
@@ -6344,7 +6499,7 @@ async function runTestExpansion(deps, params) {
6344
6499
  storyContent = await readFile$1(storyFilePath, "utf-8");
6345
6500
  } catch (err) {
6346
6501
  const error = err instanceof Error ? err.message : String(err);
6347
- logger$9.warn({
6502
+ logger$10.warn({
6348
6503
  storyFilePath,
6349
6504
  error
6350
6505
  }, "Failed to read story file");
@@ -6364,12 +6519,12 @@ async function runTestExpansion(deps, params) {
6364
6519
  const scopedTotal = nonDiffTokens + countTokens(scopedDiff);
6365
6520
  if (scopedTotal <= TOKEN_CEILING) {
6366
6521
  gitDiffContent = scopedDiff;
6367
- logger$9.debug({
6522
+ logger$10.debug({
6368
6523
  fileCount: filesModified.length,
6369
6524
  tokenCount: scopedTotal
6370
6525
  }, "Using scoped file diff");
6371
6526
  } else {
6372
- logger$9.warn({
6527
+ logger$10.warn({
6373
6528
  estimatedTotal: scopedTotal,
6374
6529
  ceiling: TOKEN_CEILING,
6375
6530
  fileCount: filesModified.length
@@ -6377,7 +6532,7 @@ async function runTestExpansion(deps, params) {
6377
6532
  gitDiffContent = await getGitDiffStatSummary(cwd);
6378
6533
  }
6379
6534
  } catch (err) {
6380
- logger$9.warn({ error: err instanceof Error ? err.message : String(err) }, "Failed to get git diff — proceeding with empty diff");
6535
+ logger$10.warn({ error: err instanceof Error ? err.message : String(err) }, "Failed to get git diff — proceeding with empty diff");
6381
6536
  }
6382
6537
  const sections = [
6383
6538
  {
@@ -6397,11 +6552,11 @@ async function runTestExpansion(deps, params) {
6397
6552
  }
6398
6553
  ];
6399
6554
  const assembleResult = assemblePrompt(template, sections, TOKEN_CEILING);
6400
- if (assembleResult.truncated) logger$9.warn({
6555
+ if (assembleResult.truncated) logger$10.warn({
6401
6556
  storyKey,
6402
6557
  tokenCount: assembleResult.tokenCount
6403
6558
  }, "Test-expansion prompt truncated to fit token ceiling");
6404
- logger$9.debug({
6559
+ logger$10.debug({
6405
6560
  storyKey,
6406
6561
  tokenCount: assembleResult.tokenCount,
6407
6562
  truncated: assembleResult.truncated
@@ -6419,7 +6574,7 @@ async function runTestExpansion(deps, params) {
6419
6574
  dispatchResult = await handle.result;
6420
6575
  } catch (err) {
6421
6576
  const error = err instanceof Error ? err.message : String(err);
6422
- logger$9.warn({
6577
+ logger$10.warn({
6423
6578
  storyKey,
6424
6579
  error
6425
6580
  }, "Test-expansion dispatch threw unexpected error");
@@ -6434,19 +6589,19 @@ async function runTestExpansion(deps, params) {
6434
6589
  };
6435
6590
  if (dispatchResult.status === "failed") {
6436
6591
  const errorMsg = `Dispatch status: failed. Exit code: ${dispatchResult.exitCode}. ${dispatchResult.parseError ?? ""}`.trim();
6437
- logger$9.warn({
6592
+ logger$10.warn({
6438
6593
  storyKey,
6439
6594
  exitCode: dispatchResult.exitCode
6440
6595
  }, "Test-expansion dispatch failed");
6441
6596
  return defaultFallbackResult(errorMsg, tokenUsage);
6442
6597
  }
6443
6598
  if (dispatchResult.status === "timeout") {
6444
- logger$9.warn({ storyKey }, "Test-expansion dispatch timed out");
6599
+ logger$10.warn({ storyKey }, "Test-expansion dispatch timed out");
6445
6600
  return defaultFallbackResult("Dispatch status: timeout. The agent did not complete within the allowed time.", tokenUsage);
6446
6601
  }
6447
6602
  if (dispatchResult.parsed === null) {
6448
6603
  const details = dispatchResult.parseError ?? "No YAML block found in output";
6449
- logger$9.warn({
6604
+ logger$10.warn({
6450
6605
  storyKey,
6451
6606
  details
6452
6607
  }, "Test-expansion output has no parseable YAML");
@@ -6455,14 +6610,14 @@ async function runTestExpansion(deps, params) {
6455
6610
  const parseResult = TestExpansionResultSchema.safeParse(dispatchResult.parsed);
6456
6611
  if (!parseResult.success) {
6457
6612
  const details = parseResult.error.message;
6458
- logger$9.warn({
6613
+ logger$10.warn({
6459
6614
  storyKey,
6460
6615
  details
6461
6616
  }, "Test-expansion output failed schema validation");
6462
6617
  return defaultFallbackResult(`schema_validation_failed: ${details}`, tokenUsage);
6463
6618
  }
6464
6619
  const parsed = parseResult.data;
6465
- logger$9.info({
6620
+ logger$10.info({
6466
6621
  storyKey,
6467
6622
  expansion_priority: parsed.expansion_priority,
6468
6623
  coverage_gaps: parsed.coverage_gaps.length,
@@ -6487,7 +6642,7 @@ function getArchConstraints(deps) {
6487
6642
  if (constraints.length === 0) return "";
6488
6643
  return constraints.map((d) => `${d.key}: ${d.value}`).join("\n");
6489
6644
  } catch (err) {
6490
- logger$9.warn({ error: err instanceof Error ? err.message : String(err) }, "Failed to retrieve architecture constraints");
6645
+ logger$10.warn({ error: err instanceof Error ? err.message : String(err) }, "Failed to retrieve architecture constraints");
6491
6646
  return "";
6492
6647
  }
6493
6648
  }
@@ -6917,6 +7072,924 @@ function detectConflictGroupsWithContracts(storyKeys, config, declarations) {
6917
7072
  };
6918
7073
  }
6919
7074
 
7075
+ //#endregion
7076
+ //#region src/modules/state/file-store.ts
7077
+ /**
7078
+ * In-memory / file-backed StateStore implementation.
7079
+ *
7080
+ * Suitable for the current pipeline where orchestrator state is ephemeral and
7081
+ * metrics can optionally be flushed to SQLite. Replace with DoltStateStore
7082
+ * (story 26-3) to gain branch-per-story isolation and versioned history.
7083
+ */
7084
+ var FileStateStore = class {
7085
+ _db;
7086
+ _basePath;
7087
+ _stories = new Map();
7088
+ _metrics = [];
7089
+ _contracts = new Map();
7090
+ _contractVerifications = new Map();
7091
+ constructor(options = {}) {
7092
+ this._db = options.db;
7093
+ this._basePath = options.basePath;
7094
+ }
7095
+ async initialize() {}
7096
+ async close() {}
7097
+ async getStoryState(storyKey) {
7098
+ return this._stories.get(storyKey);
7099
+ }
7100
+ async setStoryState(storyKey, state) {
7101
+ this._stories.set(storyKey, {
7102
+ ...state,
7103
+ storyKey
7104
+ });
7105
+ }
7106
+ async queryStories(filter) {
7107
+ const all = Array.from(this._stories.values());
7108
+ return all.filter((record) => {
7109
+ if (filter.phase !== void 0) {
7110
+ const phases = Array.isArray(filter.phase) ? filter.phase : [filter.phase];
7111
+ if (!phases.includes(record.phase)) return false;
7112
+ }
7113
+ if (filter.sprint !== void 0 && record.sprint !== filter.sprint) return false;
7114
+ if (filter.storyKey !== void 0 && record.storyKey !== filter.storyKey) return false;
7115
+ return true;
7116
+ });
7117
+ }
7118
+ async recordMetric(metric) {
7119
+ const record = {
7120
+ ...metric,
7121
+ recordedAt: metric.recordedAt ?? new Date().toISOString()
7122
+ };
7123
+ this._metrics.push(record);
7124
+ if (this._db) writeStoryMetrics(this._db, {
7125
+ run_id: "default",
7126
+ story_key: metric.storyKey,
7127
+ result: metric.result ?? "unknown",
7128
+ wall_clock_seconds: metric.wallClockMs !== void 0 ? metric.wallClockMs / 1e3 : void 0,
7129
+ input_tokens: metric.tokensIn,
7130
+ output_tokens: metric.tokensOut,
7131
+ cost_usd: metric.costUsd,
7132
+ review_cycles: metric.reviewCycles
7133
+ });
7134
+ }
7135
+ async queryMetrics(filter) {
7136
+ const storyKey = filter.storyKey ?? filter.story_key;
7137
+ const taskType = filter.taskType ?? filter.task_type;
7138
+ return this._metrics.filter((m) => {
7139
+ if (storyKey !== void 0 && m.storyKey !== storyKey) return false;
7140
+ if (taskType !== void 0 && m.taskType !== taskType) return false;
7141
+ if (filter.sprint !== void 0 && m.sprint !== filter.sprint) return false;
7142
+ if (filter.dateFrom !== void 0 && m.recordedAt !== void 0 && m.recordedAt < filter.dateFrom) return false;
7143
+ if (filter.dateTo !== void 0 && m.recordedAt !== void 0 && m.recordedAt > filter.dateTo) return false;
7144
+ if (filter.since !== void 0 && m.recordedAt !== void 0 && m.recordedAt < filter.since) return false;
7145
+ return true;
7146
+ });
7147
+ }
7148
+ async getContracts(storyKey) {
7149
+ return this._contracts.get(storyKey) ?? [];
7150
+ }
7151
+ async setContracts(storyKey, contracts) {
7152
+ this._contracts.set(storyKey, contracts.map((c) => ({ ...c })));
7153
+ }
7154
+ async queryContracts(filter) {
7155
+ const all = [];
7156
+ for (const records of this._contracts.values()) for (const r of records) all.push(r);
7157
+ return all.filter((r) => {
7158
+ if (filter?.storyKey !== void 0 && r.storyKey !== filter.storyKey) return false;
7159
+ if (filter?.direction !== void 0 && r.direction !== filter.direction) return false;
7160
+ return true;
7161
+ });
7162
+ }
7163
+ async setContractVerification(storyKey, results) {
7164
+ this._contractVerifications.set(storyKey, results.map((r) => ({ ...r })));
7165
+ if (this._basePath !== void 0) {
7166
+ const serialized = {};
7167
+ for (const [key, records] of this._contractVerifications) serialized[key] = records;
7168
+ const filePath = join$1(this._basePath, "contract-verifications.json");
7169
+ await writeFile$1(filePath, JSON.stringify(serialized, null, 2), "utf-8");
7170
+ }
7171
+ }
7172
+ async getContractVerification(storyKey) {
7173
+ return this._contractVerifications.get(storyKey) ?? [];
7174
+ }
7175
+ async branchForStory(_storyKey) {}
7176
+ async mergeStory(_storyKey) {}
7177
+ async rollbackStory(_storyKey) {}
7178
+ async diffStory(storyKey) {
7179
+ return {
7180
+ storyKey,
7181
+ tables: []
7182
+ };
7183
+ }
7184
+ async getHistory(_limit) {
7185
+ return [];
7186
+ }
7187
+ };
7188
+
7189
+ //#endregion
7190
+ //#region src/modules/state/errors.ts
7191
+ /**
7192
+ * Typed error classes for the Dolt state store.
7193
+ */
7194
+ var StateStoreError = class extends Error {
7195
+ code;
7196
+ constructor(code, message) {
7197
+ super(message);
7198
+ this.name = "StateStoreError";
7199
+ this.code = code;
7200
+ }
7201
+ };
7202
+ var DoltQueryError = class extends StateStoreError {
7203
+ sql;
7204
+ detail;
7205
+ constructor(sql, detail) {
7206
+ super("DOLT_QUERY_ERROR", `Dolt query failed: ${detail}`);
7207
+ this.name = "DoltQueryError";
7208
+ this.sql = sql;
7209
+ this.detail = detail;
7210
+ }
7211
+ };
7212
+ var DoltMergeConflictError = class extends StateStoreError {
7213
+ table;
7214
+ conflictingKeys;
7215
+ rowKey;
7216
+ ourValue;
7217
+ theirValue;
7218
+ constructor(table, conflictingKeys, options) {
7219
+ super("DOLT_MERGE_CONFLICT", `Merge conflict in table '${table}' on keys: ${conflictingKeys.join(", ")}`);
7220
+ this.name = "DoltMergeConflictError";
7221
+ this.table = table;
7222
+ this.conflictingKeys = conflictingKeys;
7223
+ if (options) {
7224
+ this.rowKey = options.rowKey;
7225
+ this.ourValue = options.ourValue;
7226
+ this.theirValue = options.theirValue;
7227
+ }
7228
+ }
7229
+ };
7230
+ /** Alias for DoltMergeConflictError — used by orchestrator branch lifecycle. */
7231
+ const DoltMergeConflict = DoltMergeConflictError;
7232
+
7233
+ //#endregion
7234
+ //#region src/modules/state/dolt-store.ts
7235
+ const log$1 = createLogger("modules:state:dolt");
7236
+ /**
7237
+ * Validate that a story key matches the expected pattern (e.g. "26-7").
7238
+ * Prevents SQL injection via string-interpolated identifiers.
7239
+ */
7240
+ const STORY_KEY_PATTERN = /^[0-9]+-[0-9]+$/;
7241
+ function assertValidStoryKey(storyKey) {
7242
+ if (!STORY_KEY_PATTERN.test(storyKey)) throw new DoltQueryError("assertValidStoryKey", `Invalid story key: '${storyKey}'. Must match pattern <number>-<number>.`);
7243
+ }
7244
+ /**
7245
+ * Dolt-backed implementation of the StateStore interface.
7246
+ *
7247
+ * Constructor accepts a deps object for DI: `{ repoPath, client }`.
7248
+ * Call `initialize()` before any CRUD operations.
7249
+ */
7250
+ var DoltStateStore = class DoltStateStore {
7251
+ _repoPath;
7252
+ _client;
7253
+ _storyBranches = new Map();
7254
+ constructor(options) {
7255
+ this._repoPath = options.repoPath;
7256
+ this._client = options.client;
7257
+ }
7258
+ /**
7259
+ * Return the branch name for a story if one has been created via branchForStory(),
7260
+ * or undefined to use the default (main) branch.
7261
+ */
7262
+ _branchFor(storyKey) {
7263
+ return this._storyBranches.get(storyKey);
7264
+ }
7265
+ async initialize() {
7266
+ await this._client.connect();
7267
+ await this._runMigrations();
7268
+ await this.flush("substrate: schema migrations");
7269
+ log$1.debug("DoltStateStore initialized at %s", this._repoPath);
7270
+ }
7271
+ async close() {
7272
+ await this._client.close();
7273
+ }
7274
+ async _runMigrations() {
7275
+ const ddl = [
7276
+ `CREATE TABLE IF NOT EXISTS stories (
7277
+ story_key VARCHAR(100) NOT NULL,
7278
+ phase VARCHAR(30) NOT NULL DEFAULT 'PENDING',
7279
+ review_cycles INT NOT NULL DEFAULT 0,
7280
+ last_verdict VARCHAR(64) NULL,
7281
+ error TEXT NULL,
7282
+ started_at VARCHAR(64) NULL,
7283
+ completed_at VARCHAR(64) NULL,
7284
+ sprint VARCHAR(50) NULL,
7285
+ PRIMARY KEY (story_key)
7286
+ )`,
7287
+ `CREATE TABLE IF NOT EXISTS metrics (
7288
+ id BIGINT NOT NULL AUTO_INCREMENT,
7289
+ story_key VARCHAR(100) NOT NULL,
7290
+ task_type VARCHAR(100) NOT NULL,
7291
+ model VARCHAR(100) NULL,
7292
+ tokens_in BIGINT NULL,
7293
+ tokens_out BIGINT NULL,
7294
+ cache_read_tokens BIGINT NULL,
7295
+ cost_usd DOUBLE NULL,
7296
+ wall_clock_ms BIGINT NULL,
7297
+ review_cycles INT NULL,
7298
+ stall_count INT NULL,
7299
+ result VARCHAR(30) NULL,
7300
+ recorded_at VARCHAR(64) NULL,
7301
+ sprint VARCHAR(50) NULL,
7302
+ PRIMARY KEY (id)
7303
+ )`,
7304
+ `CREATE TABLE IF NOT EXISTS contracts (
7305
+ story_key VARCHAR(100) NOT NULL,
7306
+ contract_name VARCHAR(200) NOT NULL,
7307
+ direction VARCHAR(20) NOT NULL,
7308
+ schema_path VARCHAR(500) NULL,
7309
+ transport VARCHAR(200) NULL,
7310
+ PRIMARY KEY (story_key, contract_name, direction)
7311
+ )`,
7312
+ `CREATE TABLE IF NOT EXISTS review_verdicts (
7313
+ id BIGINT NOT NULL AUTO_INCREMENT,
7314
+ story_key VARCHAR(100) NOT NULL,
7315
+ task_type VARCHAR(100) NOT NULL,
7316
+ verdict VARCHAR(64) NOT NULL,
7317
+ issues_count INT NULL,
7318
+ notes TEXT NULL,
7319
+ timestamp VARCHAR(64) NULL,
7320
+ PRIMARY KEY (id)
7321
+ )`
7322
+ ];
7323
+ for (const sql of ddl) await this._client.query(sql);
7324
+ log$1.debug("Schema migrations applied");
7325
+ }
7326
+ /**
7327
+ * Commit pending Dolt changes on the current branch.
7328
+ * Callers can invoke this after a batch of writes for explicit durability.
7329
+ */
7330
+ async flush(message = "substrate: auto-commit") {
7331
+ try {
7332
+ await this._client.execArgs(["add", "."]);
7333
+ await this._client.execArgs([
7334
+ "commit",
7335
+ "--allow-empty",
7336
+ "-m",
7337
+ message
7338
+ ]);
7339
+ log$1.debug("Dolt flush committed: %s", message);
7340
+ } catch (err) {
7341
+ const detail = err instanceof Error ? err.message : String(err);
7342
+ log$1.warn({ detail }, "Dolt flush failed (non-fatal)");
7343
+ }
7344
+ }
7345
+ async getStoryState(storyKey) {
7346
+ const rows = await this._client.query("SELECT * FROM stories WHERE story_key = ?", [storyKey]);
7347
+ if (rows.length === 0) return void 0;
7348
+ return this._rowToStory(rows[0]);
7349
+ }
7350
+ async setStoryState(storyKey, state) {
7351
+ const branch = this._branchFor(storyKey);
7352
+ const sql = `REPLACE INTO stories
7353
+ (story_key, phase, review_cycles, last_verdict, error, started_at, completed_at, sprint)
7354
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)`;
7355
+ await this._client.query(sql, [
7356
+ storyKey,
7357
+ state.phase,
7358
+ state.reviewCycles,
7359
+ state.lastVerdict ?? null,
7360
+ state.error ?? null,
7361
+ state.startedAt ?? null,
7362
+ state.completedAt ?? null,
7363
+ state.sprint ?? null
7364
+ ], branch);
7365
+ }
7366
+ async queryStories(filter) {
7367
+ const conditions = [];
7368
+ const params = [];
7369
+ if (filter.phase !== void 0) {
7370
+ const phases = Array.isArray(filter.phase) ? filter.phase : [filter.phase];
7371
+ const placeholders = phases.map(() => "?").join(", ");
7372
+ conditions.push(`phase IN (${placeholders})`);
7373
+ params.push(...phases);
7374
+ }
7375
+ if (filter.sprint !== void 0) {
7376
+ conditions.push("sprint = ?");
7377
+ params.push(filter.sprint);
7378
+ }
7379
+ if (filter.storyKey !== void 0) {
7380
+ conditions.push("story_key = ?");
7381
+ params.push(filter.storyKey);
7382
+ }
7383
+ const where = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
7384
+ const sql = `SELECT * FROM stories ${where} ORDER BY story_key`;
7385
+ const rows = await this._client.query(sql, params);
7386
+ return rows.map((r) => this._rowToStory(r));
7387
+ }
7388
+ _rowToStory(row) {
7389
+ return {
7390
+ storyKey: row.story_key,
7391
+ phase: row.phase,
7392
+ reviewCycles: Number(row.review_cycles),
7393
+ lastVerdict: row.last_verdict ?? void 0,
7394
+ error: row.error ?? void 0,
7395
+ startedAt: row.started_at ?? void 0,
7396
+ completedAt: row.completed_at ?? void 0,
7397
+ sprint: row.sprint ?? void 0
7398
+ };
7399
+ }
7400
+ async recordMetric(metric) {
7401
+ const branch = this._branchFor(metric.storyKey);
7402
+ const recordedAt = metric.recordedAt ?? metric.timestamp ?? new Date().toISOString();
7403
+ const sql = `INSERT INTO metrics
7404
+ (story_key, task_type, model, tokens_in, tokens_out, cache_read_tokens,
7405
+ cost_usd, wall_clock_ms, review_cycles, stall_count, result, recorded_at, sprint)
7406
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`;
7407
+ await this._client.query(sql, [
7408
+ metric.storyKey,
7409
+ metric.taskType,
7410
+ metric.model ?? null,
7411
+ metric.tokensIn ?? null,
7412
+ metric.tokensOut ?? null,
7413
+ metric.cacheReadTokens ?? null,
7414
+ metric.costUsd ?? null,
7415
+ metric.wallClockMs ?? null,
7416
+ metric.reviewCycles ?? null,
7417
+ metric.stallCount ?? null,
7418
+ metric.result ?? null,
7419
+ recordedAt,
7420
+ metric.sprint ?? null
7421
+ ], branch);
7422
+ }
7423
+ async queryMetrics(filter) {
7424
+ const conditions = [];
7425
+ const params = [];
7426
+ const storyKey = filter.storyKey ?? filter.story_key;
7427
+ const taskType = filter.taskType ?? filter.task_type;
7428
+ if (storyKey !== void 0) {
7429
+ conditions.push("story_key = ?");
7430
+ params.push(storyKey);
7431
+ }
7432
+ if (taskType !== void 0) {
7433
+ conditions.push("task_type = ?");
7434
+ params.push(taskType);
7435
+ }
7436
+ if (filter.sprint !== void 0) {
7437
+ conditions.push("sprint = ?");
7438
+ params.push(filter.sprint);
7439
+ }
7440
+ if (filter.dateFrom !== void 0) {
7441
+ conditions.push("recorded_at >= ?");
7442
+ params.push(filter.dateFrom);
7443
+ }
7444
+ if (filter.dateTo !== void 0) {
7445
+ conditions.push("recorded_at <= ?");
7446
+ params.push(filter.dateTo);
7447
+ }
7448
+ if (filter.since !== void 0) {
7449
+ conditions.push("recorded_at >= ?");
7450
+ params.push(filter.since);
7451
+ }
7452
+ const where = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
7453
+ if (filter.aggregate) {
7454
+ const sql$1 = `SELECT task_type,
7455
+ AVG(cost_usd) AS avg_cost_usd,
7456
+ SUM(tokens_in) AS sum_tokens_in,
7457
+ SUM(tokens_out) AS sum_tokens_out,
7458
+ COUNT(*) AS count
7459
+ FROM metrics ${where} GROUP BY task_type ORDER BY task_type`;
7460
+ const aggRows = await this._client.query(sql$1, params);
7461
+ return aggRows.map((r) => this._aggregateRowToMetric(r));
7462
+ }
7463
+ const sql = `SELECT * FROM metrics ${where} ORDER BY id`;
7464
+ const rows = await this._client.query(sql, params);
7465
+ return rows.map((r) => this._rowToMetric(r));
7466
+ }
7467
+ _aggregateRowToMetric(row) {
7468
+ return {
7469
+ storyKey: "",
7470
+ taskType: row.task_type,
7471
+ costUsd: row.avg_cost_usd ?? void 0,
7472
+ tokensIn: row.sum_tokens_in ?? void 0,
7473
+ tokensOut: row.sum_tokens_out ?? void 0,
7474
+ count: row.count,
7475
+ result: "aggregate"
7476
+ };
7477
+ }
7478
+ _rowToMetric(row) {
7479
+ return {
7480
+ storyKey: row.story_key,
7481
+ taskType: row.task_type,
7482
+ model: row.model ?? void 0,
7483
+ tokensIn: row.tokens_in ?? void 0,
7484
+ tokensOut: row.tokens_out ?? void 0,
7485
+ cacheReadTokens: row.cache_read_tokens ?? void 0,
7486
+ costUsd: row.cost_usd ?? void 0,
7487
+ wallClockMs: row.wall_clock_ms ?? void 0,
7488
+ reviewCycles: row.review_cycles ?? void 0,
7489
+ stallCount: row.stall_count ?? void 0,
7490
+ result: row.result ?? void 0,
7491
+ recordedAt: row.recorded_at ?? void 0,
7492
+ sprint: row.sprint ?? void 0,
7493
+ timestamp: row.timestamp ?? row.recorded_at ?? void 0
7494
+ };
7495
+ }
7496
+ async getContracts(storyKey) {
7497
+ const rows = await this._client.query("SELECT * FROM contracts WHERE story_key = ? ORDER BY contract_name", [storyKey]);
7498
+ return rows.map((r) => this._rowToContract(r));
7499
+ }
7500
+ async setContracts(storyKey, contracts) {
7501
+ const branch = this._branchFor(storyKey);
7502
+ await this._client.query("DELETE FROM contracts WHERE story_key = ?", [storyKey], branch);
7503
+ for (const c of contracts) await this._client.query(`INSERT INTO contracts (story_key, contract_name, direction, schema_path, transport)
7504
+ VALUES (?, ?, ?, ?, ?)`, [
7505
+ c.storyKey,
7506
+ c.contractName,
7507
+ c.direction,
7508
+ c.schemaPath,
7509
+ c.transport ?? null
7510
+ ], branch);
7511
+ }
7512
+ _rowToContract(row) {
7513
+ return {
7514
+ storyKey: row.story_key,
7515
+ contractName: row.contract_name,
7516
+ direction: row.direction,
7517
+ schemaPath: row.schema_path,
7518
+ transport: row.transport ?? void 0
7519
+ };
7520
+ }
7521
+ async queryContracts(filter) {
7522
+ const conditions = [];
7523
+ const params = [];
7524
+ if (filter?.storyKey !== void 0) {
7525
+ conditions.push("story_key = ?");
7526
+ params.push(filter.storyKey);
7527
+ }
7528
+ if (filter?.direction !== void 0) {
7529
+ conditions.push("direction = ?");
7530
+ params.push(filter.direction);
7531
+ }
7532
+ const where = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
7533
+ const sql = `SELECT * FROM contracts ${where} ORDER BY story_key, contract_name`;
7534
+ const rows = await this._client.query(sql, params);
7535
+ return rows.map((r) => this._rowToContract(r));
7536
+ }
7537
+ async setContractVerification(storyKey, results) {
7538
+ const branch = this._branchFor(storyKey);
7539
+ await this._client.query(`DELETE FROM review_verdicts WHERE story_key = ? AND task_type = 'contract-verification'`, [storyKey], branch);
7540
+ const failCount = results.filter((r) => r.verdict === "fail").length;
7541
+ for (const r of results) await this._client.query(`INSERT INTO review_verdicts (story_key, task_type, verdict, issues_count, notes, timestamp)
7542
+ VALUES (?, 'contract-verification', ?, ?, ?, ?)`, [
7543
+ storyKey,
7544
+ r.verdict,
7545
+ failCount,
7546
+ JSON.stringify({
7547
+ contractName: r.contractName,
7548
+ mismatchDescription: r.mismatchDescription
7549
+ }),
7550
+ r.verifiedAt
7551
+ ], branch);
7552
+ }
7553
+ async getContractVerification(storyKey) {
7554
+ const rows = await this._client.query(`SELECT * FROM review_verdicts WHERE story_key = ? AND task_type = 'contract-verification' ORDER BY timestamp DESC`, [storyKey]);
7555
+ return rows.map((row) => {
7556
+ let contractName = "";
7557
+ let mismatchDescription;
7558
+ if (row.notes !== null) try {
7559
+ const parsed = JSON.parse(row.notes);
7560
+ if (typeof parsed.contractName === "string") contractName = parsed.contractName;
7561
+ if (typeof parsed.mismatchDescription === "string") mismatchDescription = parsed.mismatchDescription;
7562
+ } catch {}
7563
+ return {
7564
+ storyKey: row.story_key,
7565
+ contractName,
7566
+ verdict: row.verdict,
7567
+ ...mismatchDescription !== void 0 ? { mismatchDescription } : {},
7568
+ verifiedAt: row.timestamp ?? new Date().toISOString()
7569
+ };
7570
+ });
7571
+ }
7572
+ async branchForStory(storyKey) {
7573
+ assertValidStoryKey(storyKey);
7574
+ const branchName = `story/${storyKey}`;
7575
+ try {
7576
+ await this._client.query(`CALL DOLT_BRANCH('${branchName}')`, [], "main");
7577
+ this._storyBranches.set(storyKey, branchName);
7578
+ log$1.debug("Created Dolt branch %s for story %s", branchName, storyKey);
7579
+ } catch (err) {
7580
+ const detail = err instanceof Error ? err.message : String(err);
7581
+ throw new DoltQueryError(`CALL DOLT_BRANCH('${branchName}')`, detail);
7582
+ }
7583
+ }
7584
+ async mergeStory(storyKey) {
7585
+ assertValidStoryKey(storyKey);
7586
+ const branchName = this._storyBranches.get(storyKey);
7587
+ if (branchName === void 0) {
7588
+ log$1.warn({ storyKey }, "mergeStory called but no branch registered — no-op");
7589
+ return;
7590
+ }
7591
+ try {
7592
+ try {
7593
+ await this._client.query(`CALL DOLT_ADD('-A')`, [], branchName);
7594
+ await this._client.query(`CALL DOLT_COMMIT('-m', 'Story ${storyKey}: pre-merge commit', '--allow-empty')`, [], branchName);
7595
+ } catch {}
7596
+ try {
7597
+ await this._client.query(`CALL DOLT_ADD('-A')`, [], "main");
7598
+ await this._client.query(`CALL DOLT_COMMIT('-m', 'substrate: pre-merge auto-commit', '--allow-empty')`, [], "main");
7599
+ } catch {}
7600
+ const mergeRows = await this._client.query(`CALL DOLT_MERGE('${branchName}')`, [], "main");
7601
+ const mergeResult = mergeRows[0];
7602
+ if (mergeResult && (mergeResult.conflicts ?? 0) > 0) {
7603
+ let table = "stories";
7604
+ let rowKey = "unknown";
7605
+ let ourValue;
7606
+ let theirValue;
7607
+ try {
7608
+ const conflictRows = await this._client.query(`SELECT * FROM dolt_conflicts_stories LIMIT 1`, [], "main");
7609
+ if (conflictRows.length > 0) {
7610
+ const row = conflictRows[0];
7611
+ rowKey = String(row["base_story_key"] ?? row["our_story_key"] ?? "unknown");
7612
+ ourValue = JSON.stringify(row["our_status"] ?? row);
7613
+ theirValue = JSON.stringify(row["their_status"] ?? row);
7614
+ }
7615
+ } catch {}
7616
+ this._storyBranches.delete(storyKey);
7617
+ throw new DoltMergeConflictError(table, [rowKey], {
7618
+ rowKey,
7619
+ ourValue,
7620
+ theirValue
7621
+ });
7622
+ }
7623
+ try {
7624
+ await this._client.query(`CALL DOLT_COMMIT('-m', 'Merge story ${storyKey}: COMPLETE')`, [], "main");
7625
+ } catch (commitErr) {
7626
+ const msg = commitErr instanceof Error ? commitErr.message : String(commitErr);
7627
+ if (!msg.includes("nothing to commit")) throw commitErr;
7628
+ }
7629
+ this._storyBranches.delete(storyKey);
7630
+ log$1.debug("Merged branch %s into main for story %s", branchName, storyKey);
7631
+ } catch (err) {
7632
+ if (err instanceof DoltMergeConflictError) throw err;
7633
+ const detail = err instanceof Error ? err.message : String(err);
7634
+ throw new DoltQueryError(`CALL DOLT_MERGE('${branchName}')`, detail);
7635
+ }
7636
+ }
7637
+ async rollbackStory(storyKey) {
7638
+ assertValidStoryKey(storyKey);
7639
+ const branchName = this._storyBranches.get(storyKey);
7640
+ if (branchName === void 0) {
7641
+ log$1.warn({ storyKey }, "rollbackStory called but no branch registered — no-op");
7642
+ return;
7643
+ }
7644
+ try {
7645
+ await this._client.query(`CALL DOLT_BRANCH('-D', '${branchName}')`, [], "main");
7646
+ this._storyBranches.delete(storyKey);
7647
+ log$1.debug("Rolled back (deleted) branch %s for story %s", branchName, storyKey);
7648
+ } catch (err) {
7649
+ const detail = err instanceof Error ? err.message : String(err);
7650
+ log$1.warn({
7651
+ detail,
7652
+ storyKey,
7653
+ branchName
7654
+ }, "rollbackStory failed (non-fatal)");
7655
+ this._storyBranches.delete(storyKey);
7656
+ }
7657
+ }
7658
+ /**
7659
+ * Tables queried by diffStory(). Each table is checked for row-level changes
7660
+ * via SELECT * FROM DOLT_DIFF('main', branchName, tableName).
7661
+ */
7662
+ static DIFF_TABLES = [
7663
+ "stories",
7664
+ "contracts",
7665
+ "metrics",
7666
+ "dispatch_log",
7667
+ "build_results",
7668
+ "review_verdicts"
7669
+ ];
7670
+ async diffStory(storyKey) {
7671
+ assertValidStoryKey(storyKey);
7672
+ const branchName = this._storyBranches.get(storyKey);
7673
+ if (branchName === void 0) return this._diffMergedStory(storyKey);
7674
+ try {
7675
+ await this._client.query(`CALL DOLT_ADD('-A')`, [], branchName);
7676
+ await this._client.query(`CALL DOLT_COMMIT('-m', 'Story ${storyKey}: pre-diff snapshot', '--allow-empty')`, [], branchName);
7677
+ } catch {}
7678
+ return this._diffRange("main", branchName, storyKey);
7679
+ }
7680
+ /**
7681
+ * Diff a merged story by finding its merge commit in the Dolt log.
7682
+ * Queries the `dolt_log` system table for commits referencing the story,
7683
+ * then diffs `<hash>~1` vs `<hash>` for row-level changes.
7684
+ */
7685
+ async _diffMergedStory(storyKey) {
7686
+ try {
7687
+ const rows = await this._client.query(`SELECT commit_hash FROM dolt_log WHERE message LIKE ? LIMIT 1`, [`%${storyKey}%`]);
7688
+ if (rows.length === 0) return {
7689
+ storyKey,
7690
+ tables: []
7691
+ };
7692
+ const hash = String(rows[0].commit_hash);
7693
+ if (!hash) return {
7694
+ storyKey,
7695
+ tables: []
7696
+ };
7697
+ return this._diffRange(`${hash}~1`, hash, storyKey);
7698
+ } catch {
7699
+ return {
7700
+ storyKey,
7701
+ tables: []
7702
+ };
7703
+ }
7704
+ }
7705
+ /**
7706
+ * Compute row-level diffs between two Dolt revisions (branches or commit hashes)
7707
+ * across all tracked tables.
7708
+ */
7709
+ async _diffRange(fromRef, toRef, storyKey) {
7710
+ const tableDiffs = [];
7711
+ for (const table of DoltStateStore.DIFF_TABLES) try {
7712
+ const rows = await this._client.query(`SELECT * FROM DOLT_DIFF('${fromRef}', '${toRef}', '${table}')`, [], "main");
7713
+ if (rows.length === 0) continue;
7714
+ const added = [];
7715
+ const modified = [];
7716
+ const deleted = [];
7717
+ for (const row of rows) {
7718
+ const diffType = row["diff_type"];
7719
+ const rowKey = this._extractRowKey(row);
7720
+ const before = this._extractPrefixedFields(row, "before_");
7721
+ const after = this._extractPrefixedFields(row, "after_");
7722
+ const diffRow = {
7723
+ rowKey,
7724
+ ...before !== void 0 && { before },
7725
+ ...after !== void 0 && { after }
7726
+ };
7727
+ if (diffType === "added") added.push(diffRow);
7728
+ else if (diffType === "modified") modified.push(diffRow);
7729
+ else if (diffType === "removed") deleted.push(diffRow);
7730
+ }
7731
+ if (added.length > 0 || modified.length > 0 || deleted.length > 0) tableDiffs.push({
7732
+ table,
7733
+ added,
7734
+ modified,
7735
+ deleted
7736
+ });
7737
+ } catch {}
7738
+ return {
7739
+ storyKey,
7740
+ tables: tableDiffs
7741
+ };
7742
+ }
7743
+ /**
7744
+ * Extract a human-readable row key from a DOLT_DIFF result row.
7745
+ * Tries after_ fields first (for added/modified rows), then before_ fields
7746
+ * (for removed rows). Skips commit_hash pseudo-columns.
7747
+ */
7748
+ _extractRowKey(row) {
7749
+ for (const prefix of ["after_", "before_"]) for (const [key, val] of Object.entries(row)) if (key.startsWith(prefix) && !key.endsWith("_commit_hash") && val !== null && val !== void 0) return String(val);
7750
+ return "unknown";
7751
+ }
7752
+ /**
7753
+ * Extract all fields with a given prefix from a DOLT_DIFF result row,
7754
+ * stripping the prefix from the key names. Returns undefined if no matching
7755
+ * fields are found.
7756
+ */
7757
+ _extractPrefixedFields(row, prefix) {
7758
+ const result = {};
7759
+ for (const [key, val] of Object.entries(row)) if (key.startsWith(prefix)) result[key.slice(prefix.length)] = val;
7760
+ return Object.keys(result).length > 0 ? result : void 0;
7761
+ }
7762
+ async getHistory(limit) {
7763
+ const effectiveLimit = limit ?? 20;
7764
+ try {
7765
+ const rows = await this._client.query(`SELECT commit_hash, date, message, committer FROM dolt_log LIMIT ?`, [effectiveLimit]);
7766
+ const entries = [];
7767
+ for (const row of rows) {
7768
+ const hash = String(row.commit_hash ?? "");
7769
+ const timestamp = row.date instanceof Date ? row.date.toISOString() : String(row.date ?? "");
7770
+ const message = String(row.message ?? "");
7771
+ const author = row.committer ? String(row.committer) : void 0;
7772
+ const storyKeyMatch = /story\/([0-9]+-[0-9]+)/i.exec(message);
7773
+ entries.push({
7774
+ hash,
7775
+ timestamp,
7776
+ storyKey: storyKeyMatch ? storyKeyMatch[1] : null,
7777
+ message,
7778
+ author
7779
+ });
7780
+ }
7781
+ return entries;
7782
+ } catch (err) {
7783
+ const detail = err instanceof Error ? err.message : String(err);
7784
+ throw new DoltQueryError("getHistory", detail);
7785
+ }
7786
+ }
7787
+ };
7788
+
7789
+ //#endregion
7790
+ //#region src/modules/state/dolt-client.ts
7791
+ /**
7792
+ * Promise-wrapper around execFile that always resolves to { stdout, stderr }.
7793
+ * Using an explicit wrapper rather than promisify() avoids the util.promisify.custom
7794
+ * symbol complexity when mocking in tests.
7795
+ */
7796
+ function runExecFile(cmd, args, opts) {
7797
+ return new Promise((resolve$2, reject) => {
7798
+ execFile(cmd, args, opts, (err, stdout, stderr) => {
7799
+ if (err) reject(err);
7800
+ else resolve$2({
7801
+ stdout,
7802
+ stderr
7803
+ });
7804
+ });
7805
+ });
7806
+ }
7807
+ const log = createLogger("modules:state:dolt");
7808
+ var DoltClient = class {
7809
+ repoPath;
7810
+ socketPath;
7811
+ _pool = null;
7812
+ _useCliMode = false;
7813
+ _connected = false;
7814
+ constructor(options) {
7815
+ this.repoPath = options.repoPath;
7816
+ this.socketPath = options.socketPath ?? `${options.repoPath}/.dolt/dolt.sock`;
7817
+ }
7818
+ async connect() {
7819
+ try {
7820
+ await access$1(this.socketPath);
7821
+ const mysql = await import("mysql2/promise");
7822
+ this._pool = mysql.createPool({
7823
+ socketPath: this.socketPath,
7824
+ user: "root",
7825
+ database: "doltdb",
7826
+ waitForConnections: true,
7827
+ connectionLimit: 5
7828
+ });
7829
+ this._useCliMode = false;
7830
+ log.debug("Connected via unix socket: %s", this.socketPath);
7831
+ } catch {
7832
+ this._useCliMode = true;
7833
+ log.debug("Unix socket not available, using CLI fallback for %s", this.repoPath);
7834
+ }
7835
+ this._connected = true;
7836
+ }
7837
+ async query(sql, params, branch) {
7838
+ if (!this._connected) await this.connect();
7839
+ if (this._useCliMode) return this._queryCli(sql, params, branch);
7840
+ return this._queryPool(sql, params, branch);
7841
+ }
7842
+ async _queryPool(sql, params, branch) {
7843
+ try {
7844
+ if (branch !== void 0 && branch !== "main") {
7845
+ const conn = await this._pool.getConnection();
7846
+ try {
7847
+ await conn.execute(`USE \`substrate/${branch}\``);
7848
+ const [rows$1] = await conn.execute(sql, params);
7849
+ return rows$1;
7850
+ } finally {
7851
+ conn.release();
7852
+ }
7853
+ }
7854
+ const [rows] = await this._pool.execute(sql, params);
7855
+ return rows;
7856
+ } catch (err) {
7857
+ const detail = err instanceof Error ? err.message : String(err);
7858
+ throw new DoltQueryError(sql, detail);
7859
+ }
7860
+ }
7861
+ async _queryCli(sql, params, branch) {
7862
+ let resolvedSql = sql;
7863
+ if (params && params.length > 0) {
7864
+ let i = 0;
7865
+ resolvedSql = sql.replace(/\?/g, () => {
7866
+ const val = params[i++];
7867
+ if (val === null || val === void 0) return "NULL";
7868
+ if (typeof val === "number") return String(val);
7869
+ return `'${String(val).replace(/'/g, "''")}'`;
7870
+ });
7871
+ }
7872
+ try {
7873
+ const branchPrefix = branch ? `CALL DOLT_CHECKOUT('${branch.replace(/'/g, "''")}'); ` : "";
7874
+ const args = [
7875
+ "sql",
7876
+ "-q",
7877
+ branchPrefix + resolvedSql,
7878
+ "--result-format",
7879
+ "json"
7880
+ ];
7881
+ const { stdout } = await runExecFile("dolt", args, { cwd: this.repoPath });
7882
+ const lines = (stdout || "").trim().split("\n").filter(Boolean);
7883
+ const lastLine = lines.length > 0 ? lines[lines.length - 1] : "{\"rows\":[]}";
7884
+ const parsed = JSON.parse(lastLine);
7885
+ return parsed.rows ?? [];
7886
+ } catch (err) {
7887
+ const detail = err instanceof Error ? err.message : String(err);
7888
+ throw new DoltQueryError(resolvedSql, detail);
7889
+ }
7890
+ }
7891
+ /**
7892
+ * Execute a raw Dolt CLI command (e.g. `dolt diff main...story/26-1 --stat`)
7893
+ * and return the stdout as a string.
7894
+ *
7895
+ * This is distinct from `query()` which runs SQL. Use `exec()` for Dolt
7896
+ * sub-commands like `diff`, `log`, `branch`, etc.
7897
+ */
7898
+ async exec(command) {
7899
+ const parts = command.trim().split(/\s+/);
7900
+ const cmdArgs = parts[0] === "dolt" ? parts.slice(1) : parts;
7901
+ return this.execArgs(cmdArgs);
7902
+ }
7903
+ /**
7904
+ * Execute a Dolt CLI command with pre-split arguments.
7905
+ *
7906
+ * Use this instead of `exec()` when arguments contain spaces (e.g. commit
7907
+ * messages) to avoid whitespace-splitting issues.
7908
+ */
7909
+ async execArgs(args) {
7910
+ try {
7911
+ const { stdout } = await runExecFile("dolt", args, { cwd: this.repoPath });
7912
+ return stdout;
7913
+ } catch (err) {
7914
+ const detail = err instanceof Error ? err.message : String(err);
7915
+ throw new DoltQueryError(args.join(" "), detail);
7916
+ }
7917
+ }
7918
+ async close() {
7919
+ if (this._pool) {
7920
+ await this._pool.end();
7921
+ this._pool = null;
7922
+ }
7923
+ this._connected = false;
7924
+ }
7925
+ };
7926
+ function createDoltClient(options) {
7927
+ return new DoltClient(options);
7928
+ }
7929
+
7930
+ //#endregion
7931
+ //#region src/modules/state/index.ts
7932
+ const logger$9 = createLogger("state:factory");
7933
+ /**
7934
+ * Synchronously check whether Dolt is available and a Dolt repo exists at the
7935
+ * canonical state path under `basePath`.
7936
+ *
7937
+ * @param basePath - Project root to check (e.g. `process.cwd()`).
7938
+ * @returns `{ available: true, reason: '...' }` when both probes pass,
7939
+ * `{ available: false, reason: '...' }` otherwise.
7940
+ */
7941
+ function detectDoltAvailableSync(basePath) {
7942
+ const result = spawnSync("dolt", ["version"], { stdio: "ignore" });
7943
+ const binaryFound = result.error == null && result.status === 0;
7944
+ if (!binaryFound) return {
7945
+ available: false,
7946
+ reason: "dolt binary not found on PATH"
7947
+ };
7948
+ const stateDoltDir = join$1(basePath, ".substrate", "state", ".dolt");
7949
+ const repoExists = existsSync$1(stateDoltDir);
7950
+ if (!repoExists) return {
7951
+ available: false,
7952
+ reason: `Dolt repo not initialised at ${stateDoltDir}`
7953
+ };
7954
+ return {
7955
+ available: true,
7956
+ reason: "dolt binary found and repo initialised"
7957
+ };
7958
+ }
7959
+ /**
7960
+ * Create a StateStore backed by the specified backend.
7961
+ *
7962
+ * @param config - Optional configuration. Defaults to `{ backend: 'file' }`.
7963
+ * @returns A StateStore instance. Call `initialize()` before use.
7964
+ */
7965
+ function createStateStore(config = {}) {
7966
+ const backend = config.backend ?? "file";
7967
+ if (backend === "dolt") {
7968
+ const repoPath = config.basePath ?? process.cwd();
7969
+ const client = new DoltClient({ repoPath });
7970
+ return new DoltStateStore({
7971
+ repoPath,
7972
+ client
7973
+ });
7974
+ }
7975
+ if (backend === "auto") {
7976
+ const repoPath = config.basePath ?? process.cwd();
7977
+ const detection = detectDoltAvailableSync(repoPath);
7978
+ if (detection.available) {
7979
+ logger$9.debug(`Dolt detected, using DoltStateStore (state path: ${join$1(repoPath, ".substrate", "state")})`);
7980
+ const client = new DoltClient({ repoPath });
7981
+ return new DoltStateStore({
7982
+ repoPath,
7983
+ client
7984
+ });
7985
+ } else {
7986
+ logger$9.debug(`Dolt not found, using FileStateStore (reason: ${detection.reason})`);
7987
+ return new FileStateStore({ basePath: config.basePath });
7988
+ }
7989
+ }
7990
+ return new FileStateStore({ basePath: config.basePath });
7991
+ }
7992
+
6920
7993
  //#endregion
6921
7994
  //#region src/cli/commands/health.ts
6922
7995
  const logger$8 = createLogger("health-cmd");
@@ -7064,9 +8137,36 @@ function getAllDescendantPids(rootPids, execFileSyncOverride) {
7064
8137
  * (missing DB, missing run, terminal run status). Throws only on unexpected errors.
7065
8138
  */
7066
8139
  async function getAutoHealthData(options) {
7067
- const { runId, projectRoot } = options;
8140
+ const { runId, projectRoot, stateStore, stateStoreConfig } = options;
7068
8141
  const dbRoot = await resolveMainRepoRoot(projectRoot);
7069
8142
  const dbPath = join(dbRoot, ".substrate", "substrate.db");
8143
+ let doltStateInfo;
8144
+ if (stateStoreConfig?.backend === "dolt" && stateStore) {
8145
+ const repoPath = stateStoreConfig.basePath ?? projectRoot;
8146
+ const doltDirPath = join(repoPath, ".dolt");
8147
+ const initialized = existsSync$1(doltDirPath);
8148
+ let responsive = false;
8149
+ let version;
8150
+ try {
8151
+ await stateStore.getHistory(1);
8152
+ responsive = true;
8153
+ try {
8154
+ const { execFile: ef } = await import("node:child_process");
8155
+ const { promisify: p } = await import("node:util");
8156
+ const execFileAsync = p(ef);
8157
+ const { stdout } = await execFileAsync("dolt", ["version"]);
8158
+ const match = stdout.match(/dolt version (\S+)/);
8159
+ if (match) version = match[1];
8160
+ } catch {}
8161
+ } catch {
8162
+ responsive = false;
8163
+ }
8164
+ doltStateInfo = {
8165
+ initialized,
8166
+ responsive,
8167
+ ...version !== void 0 ? { version } : {}
8168
+ };
8169
+ }
7070
8170
  const NO_PIPELINE = {
7071
8171
  verdict: "NO_PIPELINE_RUNNING",
7072
8172
  run_id: null,
@@ -7084,7 +8184,8 @@ async function getAutoHealthData(options) {
7084
8184
  completed: 0,
7085
8185
  escalated: 0,
7086
8186
  details: {}
7087
- }
8187
+ },
8188
+ ...doltStateInfo !== void 0 ? { dolt_state: doltStateInfo } : {}
7088
8189
  };
7089
8190
  if (!existsSync$1(dbPath)) return NO_PIPELINE;
7090
8191
  const dbWrapper = new DatabaseWrapper(dbPath);
@@ -7129,7 +8230,7 @@ async function getAutoHealthData(options) {
7129
8230
  else if (processInfo.orchestrator_pid !== null && processInfo.child_pids.length === 0 && active > 0) verdict = "STALLED";
7130
8231
  else verdict = "HEALTHY";
7131
8232
  else if (run.status === "completed" || run.status === "failed" || run.status === "stopped") verdict = "NO_PIPELINE_RUNNING";
7132
- return {
8233
+ const healthOutput = {
7133
8234
  verdict,
7134
8235
  run_id: run.id,
7135
8236
  status: run.status,
@@ -7143,8 +8244,10 @@ async function getAutoHealthData(options) {
7143
8244
  escalated,
7144
8245
  pending,
7145
8246
  details: storyDetails
7146
- }
8247
+ },
8248
+ ...doltStateInfo !== void 0 ? { dolt_state: doltStateInfo } : {}
7147
8249
  };
8250
+ return healthOutput;
7148
8251
  } finally {
7149
8252
  try {
7150
8253
  dbWrapper.close();
@@ -7178,6 +8281,13 @@ async function runHealthAction(options) {
7178
8281
  process.stdout.write(`\n Summary: ${health.stories.active} active, ${health.stories.completed} completed, ${health.stories.escalated} escalated\n`);
7179
8282
  }
7180
8283
  }
8284
+ if (health.dolt_state !== void 0) {
8285
+ const ds = health.dolt_state;
8286
+ const initStr = ds.initialized ? "yes" : "no";
8287
+ const respStr = ds.responsive ? "yes" : "no";
8288
+ const verStr = ds.version !== void 0 ? ` (v${ds.version})` : "";
8289
+ process.stdout.write(`\n Dolt State: initialized=${initStr} responsive=${respStr}${verStr}\n`);
8290
+ }
7181
8291
  }
7182
8292
  return 0;
7183
8293
  } catch (err) {
@@ -7191,12 +8301,41 @@ async function runHealthAction(options) {
7191
8301
  function registerHealthCommand(program, _version = "0.0.0", projectRoot = process.cwd()) {
7192
8302
  program.command("health").description("Check pipeline health: process status, stall detection, and verdict").option("--run-id <id>", "Pipeline run ID to query (defaults to latest)").option("--project-root <path>", "Project root directory", projectRoot).option("--output-format <format>", "Output format: human (default) or json", "human").action(async (opts) => {
7193
8303
  const outputFormat = opts.outputFormat === "json" ? "json" : "human";
7194
- const exitCode = await runHealthAction({
7195
- outputFormat,
7196
- runId: opts.runId,
7197
- projectRoot: opts.projectRoot
7198
- });
7199
- process.exitCode = exitCode;
8304
+ const root = opts.projectRoot;
8305
+ let stateStore;
8306
+ let stateStoreConfig;
8307
+ const doltStatePath = join(root, ".substrate", "state", ".dolt");
8308
+ if (existsSync$1(doltStatePath)) {
8309
+ const basePath = join(root, ".substrate", "state");
8310
+ stateStoreConfig = {
8311
+ backend: "dolt",
8312
+ basePath
8313
+ };
8314
+ try {
8315
+ stateStore = createStateStore({
8316
+ backend: "dolt",
8317
+ basePath
8318
+ });
8319
+ await stateStore.initialize();
8320
+ } catch {
8321
+ stateStore = void 0;
8322
+ stateStoreConfig = void 0;
8323
+ }
8324
+ }
8325
+ try {
8326
+ const exitCode = await runHealthAction({
8327
+ outputFormat,
8328
+ runId: opts.runId,
8329
+ projectRoot: root,
8330
+ stateStore,
8331
+ stateStoreConfig
8332
+ });
8333
+ process.exitCode = exitCode;
8334
+ } finally {
8335
+ try {
8336
+ await stateStore?.close();
8337
+ } catch {}
8338
+ }
7200
8339
  });
7201
8340
  }
7202
8341
 
@@ -7835,8 +8974,8 @@ function buildTargetedFilesContent(issueList) {
7835
8974
  * @returns A fully-configured ImplementationOrchestrator ready to call run()
7836
8975
  */
7837
8976
  function createImplementationOrchestrator(deps) {
7838
- const { db, pack, contextCompiler, dispatcher, eventBus, config, projectRoot, tokenCeilings } = deps;
7839
- const logger$23 = createLogger("implementation-orchestrator");
8977
+ const { db, pack, contextCompiler, dispatcher, eventBus, config, projectRoot, tokenCeilings, stateStore } = deps;
8978
+ const logger$24 = createLogger("implementation-orchestrator");
7840
8979
  let _state = "IDLE";
7841
8980
  let _startedAt;
7842
8981
  let _completedAt;
@@ -7856,6 +8995,7 @@ function createImplementationOrchestrator(deps) {
7856
8995
  const _storyDispatches = new Map();
7857
8996
  let _maxConcurrentActual = 0;
7858
8997
  let _contractMismatches;
8998
+ const _stateStoreCache = new Map();
7859
8999
  const MEMORY_PRESSURE_BACKOFF_MS = [
7860
9000
  3e4,
7861
9001
  6e4,
@@ -7880,7 +9020,7 @@ function createImplementationOrchestrator(deps) {
7880
9020
  const nowMs = Date.now();
7881
9021
  for (const [phase, startMs] of starts) {
7882
9022
  const endMs = ends?.get(phase);
7883
- if (endMs === void 0) logger$23.warn({
9023
+ if (endMs === void 0) logger$24.warn({
7884
9024
  storyKey,
7885
9025
  phase
7886
9026
  }, "Phase has no end time — story may have errored mid-phase. Duration capped to now() and may be inflated.");
@@ -7912,6 +9052,26 @@ function createImplementationOrchestrator(deps) {
7912
9052
  review_cycles: reviewCycles,
7913
9053
  dispatches: _storyDispatches.get(storyKey) ?? 0
7914
9054
  });
9055
+ if (stateStore !== void 0) stateStore.recordMetric({
9056
+ storyKey,
9057
+ taskType: "dev-story",
9058
+ model: void 0,
9059
+ tokensIn: tokenAgg.input,
9060
+ tokensOut: tokenAgg.output,
9061
+ cacheReadTokens: void 0,
9062
+ costUsd: tokenAgg.cost,
9063
+ wallClockMs,
9064
+ reviewCycles,
9065
+ stallCount: _storiesWithStall.has(storyKey) ? 1 : 0,
9066
+ result,
9067
+ recordedAt: completedAt,
9068
+ timestamp: completedAt
9069
+ }).catch((storeErr) => {
9070
+ logger$24.warn({
9071
+ err: storeErr,
9072
+ storyKey
9073
+ }, "Failed to record metric to StateStore (best-effort)");
9074
+ });
7915
9075
  try {
7916
9076
  const runId = config.pipelineRunId ?? "unknown";
7917
9077
  createDecision(db, {
@@ -7929,7 +9089,7 @@ function createImplementationOrchestrator(deps) {
7929
9089
  rationale: `Story ${storyKey} completed with result=${result} in ${wallClockSeconds}s. Tokens: ${tokenAgg.input}+${tokenAgg.output}. Review cycles: ${reviewCycles}.`
7930
9090
  });
7931
9091
  } catch (decisionErr) {
7932
- logger$23.warn({
9092
+ logger$24.warn({
7933
9093
  err: decisionErr,
7934
9094
  storyKey
7935
9095
  }, "Failed to write story-metrics decision (best-effort)");
@@ -7957,13 +9117,13 @@ function createImplementationOrchestrator(deps) {
7957
9117
  dispatches: _storyDispatches.get(storyKey) ?? 0
7958
9118
  });
7959
9119
  } catch (emitErr) {
7960
- logger$23.warn({
9120
+ logger$24.warn({
7961
9121
  err: emitErr,
7962
9122
  storyKey
7963
9123
  }, "Failed to emit story:metrics event (best-effort)");
7964
9124
  }
7965
9125
  } catch (err) {
7966
- logger$23.warn({
9126
+ logger$24.warn({
7967
9127
  err,
7968
9128
  storyKey
7969
9129
  }, "Failed to write story metrics (best-effort)");
@@ -7992,7 +9152,7 @@ function createImplementationOrchestrator(deps) {
7992
9152
  rationale: `Story ${storyKey} ${outcome} after ${reviewCycles} review cycle(s).`
7993
9153
  });
7994
9154
  } catch (err) {
7995
- logger$23.warn({
9155
+ logger$24.warn({
7996
9156
  err,
7997
9157
  storyKey
7998
9158
  }, "Failed to write story-outcome decision (best-effort)");
@@ -8018,7 +9178,7 @@ function createImplementationOrchestrator(deps) {
8018
9178
  rationale: `Escalation diagnosis for ${payload.storyKey}: ${diagnosis.recommendedAction} — ${diagnosis.rationale}`
8019
9179
  });
8020
9180
  } catch (err) {
8021
- logger$23.warn({
9181
+ logger$24.warn({
8022
9182
  err,
8023
9183
  storyKey: payload.storyKey
8024
9184
  }, "Failed to persist escalation diagnosis (best-effort)");
@@ -8040,6 +9200,14 @@ function createImplementationOrchestrator(deps) {
8040
9200
  }
8041
9201
  function getStatus() {
8042
9202
  const stories = {};
9203
+ for (const [key, record] of _stateStoreCache) if (!_stories.has(key)) stories[key] = {
9204
+ phase: record.phase,
9205
+ reviewCycles: record.reviewCycles,
9206
+ lastVerdict: record.lastVerdict,
9207
+ error: record.error,
9208
+ startedAt: record.startedAt,
9209
+ completedAt: record.completedAt
9210
+ };
8043
9211
  for (const [key, s] of _stories) stories[key] = { ...s };
8044
9212
  const status = {
8045
9213
  state: _state,
@@ -8057,7 +9225,54 @@ function createImplementationOrchestrator(deps) {
8057
9225
  }
8058
9226
  function updateStory(storyKey, updates) {
8059
9227
  const existing = _stories.get(storyKey);
8060
- if (existing !== void 0) Object.assign(existing, updates);
9228
+ if (existing !== void 0) {
9229
+ Object.assign(existing, updates);
9230
+ persistStoryState(storyKey, existing).catch((err) => logger$24.warn({
9231
+ err,
9232
+ storyKey
9233
+ }, "StateStore write failed after updateStory"));
9234
+ if (updates.phase === "COMPLETE") stateStore?.mergeStory(storyKey).catch((err) => {
9235
+ if (err instanceof DoltMergeConflict) eventBus.emit("pipeline:state-conflict", {
9236
+ storyKey,
9237
+ conflict: err
9238
+ });
9239
+ else logger$24.warn({
9240
+ err,
9241
+ storyKey
9242
+ }, "mergeStory failed");
9243
+ });
9244
+ else if (updates.phase === "ESCALATED" || updates.phase === "FAILED") stateStore?.rollbackStory(storyKey).catch((err) => logger$24.warn({
9245
+ err,
9246
+ storyKey
9247
+ }, "rollbackStory failed — branch may persist"));
9248
+ }
9249
+ }
9250
+ /**
9251
+ * Persist a single story's state to the StateStore (Story 26-4, AC2).
9252
+ *
9253
+ * Best-effort: callers should `.catch()` on the returned promise.
9254
+ * Never throws — errors are swallowed so the pipeline is never blocked.
9255
+ */
9256
+ async function persistStoryState(storyKey, state) {
9257
+ if (stateStore === void 0) return;
9258
+ try {
9259
+ const record = {
9260
+ storyKey,
9261
+ phase: state.phase,
9262
+ reviewCycles: state.reviewCycles,
9263
+ lastVerdict: state.lastVerdict,
9264
+ error: state.error,
9265
+ startedAt: state.startedAt,
9266
+ completedAt: state.completedAt,
9267
+ sprint: config.sprint
9268
+ };
9269
+ await stateStore.setStoryState(storyKey, record);
9270
+ } catch (err) {
9271
+ logger$24.warn({
9272
+ err,
9273
+ storyKey
9274
+ }, "StateStore.setStoryState failed (best-effort)");
9275
+ }
8061
9276
  }
8062
9277
  function persistState() {
8063
9278
  if (config.pipelineRunId === void 0) return;
@@ -8069,7 +9284,7 @@ function createImplementationOrchestrator(deps) {
8069
9284
  token_usage_json: serialized
8070
9285
  });
8071
9286
  } catch (err) {
8072
- logger$23.warn({ err }, "Failed to persist orchestrator state");
9287
+ logger$24.warn({ err }, "Failed to persist orchestrator state");
8073
9288
  }
8074
9289
  }
8075
9290
  function recordProgress() {
@@ -8116,7 +9331,7 @@ function createImplementationOrchestrator(deps) {
8116
9331
  }
8117
9332
  if (childActive) {
8118
9333
  _lastProgressTs = Date.now();
8119
- logger$23.debug({
9334
+ logger$24.debug({
8120
9335
  storyKey: key,
8121
9336
  phase: s.phase,
8122
9337
  childPids
@@ -8125,7 +9340,7 @@ function createImplementationOrchestrator(deps) {
8125
9340
  }
8126
9341
  _stalledStories.add(key);
8127
9342
  _storiesWithStall.add(key);
8128
- logger$23.warn({
9343
+ logger$24.warn({
8129
9344
  storyKey: key,
8130
9345
  phase: s.phase,
8131
9346
  elapsedMs: elapsed,
@@ -8170,7 +9385,7 @@ function createImplementationOrchestrator(deps) {
8170
9385
  for (let attempt = 0; attempt < MEMORY_PRESSURE_BACKOFF_MS.length; attempt++) {
8171
9386
  const memState = dispatcher.getMemoryState();
8172
9387
  if (!memState.isPressured) return true;
8173
- logger$23.warn({
9388
+ logger$24.warn({
8174
9389
  storyKey,
8175
9390
  freeMB: memState.freeMB,
8176
9391
  thresholdMB: memState.thresholdMB,
@@ -8190,18 +9405,23 @@ function createImplementationOrchestrator(deps) {
8190
9405
  * exhausted retries the story is ESCALATED.
8191
9406
  */
8192
9407
  async function processStory(storyKey) {
8193
- logger$23.info({ storyKey }, "Processing story");
9408
+ logger$24.info({ storyKey }, "Processing story");
8194
9409
  {
8195
9410
  const memoryOk = await checkMemoryPressure(storyKey);
8196
9411
  if (!memoryOk) {
8197
- logger$23.warn({ storyKey }, "Memory pressure exhausted — escalating story without dispatch");
8198
- _stories.set(storyKey, {
9412
+ logger$24.warn({ storyKey }, "Memory pressure exhausted — escalating story without dispatch");
9413
+ const memPressureState = {
8199
9414
  phase: "ESCALATED",
8200
9415
  reviewCycles: 0,
8201
9416
  error: "memory_pressure_exhausted",
8202
9417
  startedAt: new Date().toISOString(),
8203
9418
  completedAt: new Date().toISOString()
8204
- });
9419
+ };
9420
+ _stories.set(storyKey, memPressureState);
9421
+ persistStoryState(storyKey, memPressureState).catch((err) => logger$24.warn({
9422
+ err,
9423
+ storyKey
9424
+ }, "StateStore write failed after memory-pressure escalation"));
8205
9425
  writeStoryMetricsBestEffort(storyKey, "escalated", 0);
8206
9426
  emitEscalation({
8207
9427
  storyKey,
@@ -8215,6 +9435,10 @@ function createImplementationOrchestrator(deps) {
8215
9435
  }
8216
9436
  await waitIfPaused();
8217
9437
  if (_state !== "RUNNING") return;
9438
+ stateStore?.branchForStory(storyKey).catch((err) => logger$24.warn({
9439
+ err,
9440
+ storyKey
9441
+ }, "branchForStory failed — continuing without branch isolation"));
8218
9442
  startPhase(storyKey, "create-story");
8219
9443
  updateStory(storyKey, {
8220
9444
  phase: "IN_STORY_CREATION",
@@ -8228,14 +9452,14 @@ function createImplementationOrchestrator(deps) {
8228
9452
  if (match) {
8229
9453
  const candidatePath = join$1(artifactsDir, match);
8230
9454
  const validation = await isValidStoryFile(candidatePath);
8231
- if (!validation.valid) logger$23.warn({
9455
+ if (!validation.valid) logger$24.warn({
8232
9456
  storyKey,
8233
9457
  storyFilePath: candidatePath,
8234
9458
  reason: validation.reason
8235
9459
  }, `Existing story file for ${storyKey} is invalid (${validation.reason}) — re-creating`);
8236
9460
  else {
8237
9461
  storyFilePath = candidatePath;
8238
- logger$23.info({
9462
+ logger$24.info({
8239
9463
  storyKey,
8240
9464
  storyFilePath
8241
9465
  }, "Found existing story file — skipping create-story");
@@ -8331,7 +9555,15 @@ function createImplementationOrchestrator(deps) {
8331
9555
  const storyContent = await readFile$1(storyFilePath, "utf-8");
8332
9556
  const contracts = parseInterfaceContracts(storyContent, storyKey);
8333
9557
  if (contracts.length > 0) {
8334
- for (const contract of contracts) createDecision(db, {
9558
+ const contractRecords = contracts.map((d) => ({
9559
+ storyKey: d.storyKey,
9560
+ contractName: d.contractName,
9561
+ direction: d.direction,
9562
+ schemaPath: d.filePath,
9563
+ ...d.transport !== void 0 ? { transport: d.transport } : {}
9564
+ }));
9565
+ if (stateStore !== void 0) await stateStore.setContracts(storyKey, contractRecords);
9566
+ else for (const contract of contracts) createDecision(db, {
8335
9567
  pipeline_run_id: config.pipelineRunId ?? null,
8336
9568
  phase: "implementation",
8337
9569
  category: "interface-contract",
@@ -8344,14 +9576,14 @@ function createImplementationOrchestrator(deps) {
8344
9576
  ...contract.transport !== void 0 ? { transport: contract.transport } : {}
8345
9577
  })
8346
9578
  });
8347
- logger$23.info({
9579
+ logger$24.info({
8348
9580
  storyKey,
8349
9581
  contractCount: contracts.length,
8350
9582
  contracts
8351
- }, "Stored interface contract declarations in decision store");
9583
+ }, "Stored interface contract declarations");
8352
9584
  }
8353
9585
  } catch (err) {
8354
- logger$23.warn({
9586
+ logger$24.warn({
8355
9587
  storyKey,
8356
9588
  error: err instanceof Error ? err.message : String(err)
8357
9589
  }, "Failed to parse interface contracts — continuing without contract declarations");
@@ -8376,10 +9608,10 @@ function createImplementationOrchestrator(deps) {
8376
9608
  pipelineRunId: config.pipelineRunId ?? ""
8377
9609
  });
8378
9610
  testPlanPhaseResult = testPlanResult.result;
8379
- if (testPlanResult.result === "success") logger$23.info({ storyKey }, "Test plan generated successfully");
8380
- else logger$23.warn({ storyKey }, "Test planning returned failed result — proceeding to dev-story without test plan");
9611
+ if (testPlanResult.result === "success") logger$24.info({ storyKey }, "Test plan generated successfully");
9612
+ else logger$24.warn({ storyKey }, "Test planning returned failed result — proceeding to dev-story without test plan");
8381
9613
  } catch (err) {
8382
- logger$23.warn({
9614
+ logger$24.warn({
8383
9615
  storyKey,
8384
9616
  err
8385
9617
  }, "Test planning failed — proceeding to dev-story without test plan");
@@ -8403,7 +9635,7 @@ function createImplementationOrchestrator(deps) {
8403
9635
  try {
8404
9636
  storyContentForAnalysis = await readFile$1(storyFilePath ?? "", "utf-8");
8405
9637
  } catch (err) {
8406
- logger$23.error({
9638
+ logger$24.error({
8407
9639
  storyKey,
8408
9640
  storyFilePath,
8409
9641
  error: err instanceof Error ? err.message : String(err)
@@ -8411,7 +9643,7 @@ function createImplementationOrchestrator(deps) {
8411
9643
  }
8412
9644
  const analysis = analyzeStoryComplexity(storyContentForAnalysis);
8413
9645
  const batches = planTaskBatches(analysis);
8414
- logger$23.info({
9646
+ logger$24.info({
8415
9647
  storyKey,
8416
9648
  estimatedScope: analysis.estimatedScope,
8417
9649
  batchCount: batches.length,
@@ -8429,7 +9661,7 @@ function createImplementationOrchestrator(deps) {
8429
9661
  if (_state !== "RUNNING") break;
8430
9662
  const taskScope = batch.taskIds.map((id, i) => `T${id}: ${batch.taskTitles[i] ?? ""}`).join("\n");
8431
9663
  const priorFiles = allFilesModified.size > 0 ? Array.from(allFilesModified) : void 0;
8432
- logger$23.info({
9664
+ logger$24.info({
8433
9665
  storyKey,
8434
9666
  batchIndex: batch.batchIndex,
8435
9667
  taskCount: batch.taskIds.length
@@ -8454,7 +9686,7 @@ function createImplementationOrchestrator(deps) {
8454
9686
  });
8455
9687
  } catch (batchErr) {
8456
9688
  const errMsg = batchErr instanceof Error ? batchErr.message : String(batchErr);
8457
- logger$23.warn({
9689
+ logger$24.warn({
8458
9690
  storyKey,
8459
9691
  batchIndex: batch.batchIndex,
8460
9692
  error: errMsg
@@ -8474,7 +9706,7 @@ function createImplementationOrchestrator(deps) {
8474
9706
  filesModified: batchFilesModified,
8475
9707
  result: batchResult.result === "success" ? "success" : "failed"
8476
9708
  };
8477
- logger$23.info(batchMetrics, "Batch dev-story metrics");
9709
+ logger$24.info(batchMetrics, "Batch dev-story metrics");
8478
9710
  for (const f of batchFilesModified) allFilesModified.add(f);
8479
9711
  if (batchFilesModified.length > 0) batchFileGroups.push({
8480
9712
  batchIndex: batch.batchIndex,
@@ -8496,13 +9728,13 @@ function createImplementationOrchestrator(deps) {
8496
9728
  })
8497
9729
  });
8498
9730
  } catch (tokenErr) {
8499
- logger$23.warn({
9731
+ logger$24.warn({
8500
9732
  storyKey,
8501
9733
  batchIndex: batch.batchIndex,
8502
9734
  err: tokenErr
8503
9735
  }, "Failed to record batch token usage");
8504
9736
  }
8505
- if (batchResult.result === "failed") logger$23.warn({
9737
+ if (batchResult.result === "failed") logger$24.warn({
8506
9738
  storyKey,
8507
9739
  batchIndex: batch.batchIndex,
8508
9740
  error: batchResult.error
@@ -8538,7 +9770,7 @@ function createImplementationOrchestrator(deps) {
8538
9770
  });
8539
9771
  persistState();
8540
9772
  if (devResult.result === "success") devStoryWasSuccess = true;
8541
- else logger$23.warn({
9773
+ else logger$24.warn({
8542
9774
  storyKey,
8543
9775
  error: devResult.error,
8544
9776
  filesModified: devFilesModified.length
@@ -8566,7 +9798,7 @@ function createImplementationOrchestrator(deps) {
8566
9798
  if (devStoryWasSuccess) {
8567
9799
  gitDiffFiles = checkGitDiffFiles(projectRoot ?? process.cwd());
8568
9800
  if (gitDiffFiles.length === 0) {
8569
- logger$23.warn({ storyKey }, "Zero-diff detected after COMPLETE dev-story — no file changes in git working tree");
9801
+ logger$24.warn({ storyKey }, "Zero-diff detected after COMPLETE dev-story — no file changes in git working tree");
8570
9802
  eventBus.emit("orchestrator:zero-diff-escalation", {
8571
9803
  storyKey,
8572
9804
  reason: "zero-diff-on-complete"
@@ -8597,7 +9829,7 @@ function createImplementationOrchestrator(deps) {
8597
9829
  });
8598
9830
  if (buildVerifyResult.status === "passed") {
8599
9831
  eventBus.emit("story:build-verification-passed", { storyKey });
8600
- logger$23.info({ storyKey }, "Build verification passed");
9832
+ logger$24.info({ storyKey }, "Build verification passed");
8601
9833
  } else if (buildVerifyResult.status === "failed" || buildVerifyResult.status === "timeout") {
8602
9834
  const truncatedOutput = (buildVerifyResult.output ?? "").slice(0, 2e3);
8603
9835
  const reason = buildVerifyResult.reason ?? "build-verification-failed";
@@ -8606,7 +9838,7 @@ function createImplementationOrchestrator(deps) {
8606
9838
  exitCode: buildVerifyResult.exitCode ?? 1,
8607
9839
  output: truncatedOutput
8608
9840
  });
8609
- logger$23.warn({
9841
+ logger$24.warn({
8610
9842
  storyKey,
8611
9843
  reason,
8612
9844
  exitCode: buildVerifyResult.exitCode
@@ -8636,7 +9868,7 @@ function createImplementationOrchestrator(deps) {
8636
9868
  storyKey
8637
9869
  });
8638
9870
  if (icResult.potentiallyAffectedTests.length > 0) {
8639
- logger$23.warn({
9871
+ logger$24.warn({
8640
9872
  storyKey,
8641
9873
  modifiedInterfaces: icResult.modifiedInterfaces,
8642
9874
  potentiallyAffectedTests: icResult.potentiallyAffectedTests
@@ -8682,7 +9914,7 @@ function createImplementationOrchestrator(deps) {
8682
9914
  "NEEDS_MAJOR_REWORK": 2
8683
9915
  };
8684
9916
  for (const group of batchFileGroups) {
8685
- logger$23.info({
9917
+ logger$24.info({
8686
9918
  storyKey,
8687
9919
  batchIndex: group.batchIndex,
8688
9920
  fileCount: group.files.length
@@ -8720,7 +9952,7 @@ function createImplementationOrchestrator(deps) {
8720
9952
  rawOutput: lastRawOutput,
8721
9953
  tokenUsage: aggregateTokens
8722
9954
  };
8723
- logger$23.info({
9955
+ logger$24.info({
8724
9956
  storyKey,
8725
9957
  batchCount: batchFileGroups.length,
8726
9958
  verdict: worstVerdict,
@@ -8747,7 +9979,7 @@ function createImplementationOrchestrator(deps) {
8747
9979
  const isPhantomReview = reviewResult.dispatchFailed === true || reviewResult.verdict !== "SHIP_IT" && reviewResult.verdict !== "LGTM_WITH_NOTES" && (reviewResult.issue_list === void 0 || reviewResult.issue_list.length === 0) && reviewResult.error !== void 0;
8748
9980
  if (isPhantomReview && !timeoutRetried) {
8749
9981
  timeoutRetried = true;
8750
- logger$23.warn({
9982
+ logger$24.warn({
8751
9983
  storyKey,
8752
9984
  reviewCycles,
8753
9985
  error: reviewResult.error
@@ -8757,7 +9989,7 @@ function createImplementationOrchestrator(deps) {
8757
9989
  verdict = reviewResult.verdict;
8758
9990
  issueList = reviewResult.issue_list ?? [];
8759
9991
  if (verdict === "NEEDS_MAJOR_REWORK" && reviewCycles > 0 && previousIssueList.length > 0 && issueList.length < previousIssueList.length) {
8760
- logger$23.info({
9992
+ logger$24.info({
8761
9993
  storyKey,
8762
9994
  originalVerdict: verdict,
8763
9995
  issuesBefore: previousIssueList.length,
@@ -8793,7 +10025,7 @@ function createImplementationOrchestrator(deps) {
8793
10025
  if (_decomposition !== void 0) parts.push(`decomposed: ${_decomposition.batchCount} batches`);
8794
10026
  parts.push(`${fileCount} files`);
8795
10027
  parts.push(`${totalTokensK} tokens`);
8796
- logger$23.info({
10028
+ logger$24.info({
8797
10029
  storyKey,
8798
10030
  verdict,
8799
10031
  agentVerdict: reviewResult.agentVerdict
@@ -8842,9 +10074,9 @@ function createImplementationOrchestrator(deps) {
8842
10074
  }),
8843
10075
  rationale: `Advisory notes from LGTM_WITH_NOTES review of ${storyKey}`
8844
10076
  });
8845
- logger$23.info({ storyKey }, "Advisory notes persisted to decision store");
10077
+ logger$24.info({ storyKey }, "Advisory notes persisted to decision store");
8846
10078
  } catch (advisoryErr) {
8847
- logger$23.warn({
10079
+ logger$24.warn({
8848
10080
  storyKey,
8849
10081
  error: advisoryErr instanceof Error ? advisoryErr.message : String(advisoryErr)
8850
10082
  }, "Failed to persist advisory notes (best-effort)");
@@ -8864,7 +10096,7 @@ function createImplementationOrchestrator(deps) {
8864
10096
  filesModified: devFilesModified,
8865
10097
  workingDirectory: projectRoot
8866
10098
  });
8867
- logger$23.debug({
10099
+ logger$24.debug({
8868
10100
  storyKey,
8869
10101
  expansion_priority: expansionResult.expansion_priority,
8870
10102
  coverage_gaps: expansionResult.coverage_gaps.length
@@ -8877,7 +10109,7 @@ function createImplementationOrchestrator(deps) {
8877
10109
  value: JSON.stringify(expansionResult)
8878
10110
  });
8879
10111
  } catch (expansionErr) {
8880
- logger$23.warn({
10112
+ logger$24.warn({
8881
10113
  storyKey,
8882
10114
  error: expansionErr instanceof Error ? expansionErr.message : String(expansionErr)
8883
10115
  }, "Test expansion failed — story verdict unchanged");
@@ -8904,7 +10136,7 @@ function createImplementationOrchestrator(deps) {
8904
10136
  persistState();
8905
10137
  return;
8906
10138
  }
8907
- logger$23.info({
10139
+ logger$24.info({
8908
10140
  storyKey,
8909
10141
  reviewCycles: finalReviewCycles,
8910
10142
  issueCount: issueList.length
@@ -8964,7 +10196,7 @@ function createImplementationOrchestrator(deps) {
8964
10196
  fixPrompt = assembled.prompt;
8965
10197
  } catch {
8966
10198
  fixPrompt = `Fix story ${storyKey}: verdict=${verdict}, minor fixes needed`;
8967
- logger$23.warn({ storyKey }, "Failed to assemble auto-approve fix prompt, using fallback");
10199
+ logger$24.warn({ storyKey }, "Failed to assemble auto-approve fix prompt, using fallback");
8968
10200
  }
8969
10201
  const handle = dispatcher.dispatch({
8970
10202
  prompt: fixPrompt,
@@ -8982,9 +10214,9 @@ function createImplementationOrchestrator(deps) {
8982
10214
  output: fixResult.tokenEstimate.output
8983
10215
  } : void 0 }
8984
10216
  });
8985
- if (fixResult.status === "timeout") logger$23.warn({ storyKey }, "Auto-approve fix timed out — approving anyway (issues were minor)");
10217
+ if (fixResult.status === "timeout") logger$24.warn({ storyKey }, "Auto-approve fix timed out — approving anyway (issues were minor)");
8986
10218
  } catch (err) {
8987
- logger$23.warn({
10219
+ logger$24.warn({
8988
10220
  storyKey,
8989
10221
  err
8990
10222
  }, "Auto-approve fix dispatch failed — approving anyway (issues were minor)");
@@ -9101,7 +10333,7 @@ function createImplementationOrchestrator(deps) {
9101
10333
  fixPrompt = assembled.prompt;
9102
10334
  } catch {
9103
10335
  fixPrompt = `Fix story ${storyKey}: verdict=${verdict}, taskType=${taskType}`;
9104
- logger$23.warn({
10336
+ logger$24.warn({
9105
10337
  storyKey,
9106
10338
  taskType
9107
10339
  }, "Failed to assemble fix prompt, using fallback");
@@ -9133,7 +10365,7 @@ function createImplementationOrchestrator(deps) {
9133
10365
  } : void 0 }
9134
10366
  });
9135
10367
  if (fixResult.status === "timeout") {
9136
- logger$23.warn({
10368
+ logger$24.warn({
9137
10369
  storyKey,
9138
10370
  taskType
9139
10371
  }, "Fix dispatch timed out — escalating story");
@@ -9155,7 +10387,7 @@ function createImplementationOrchestrator(deps) {
9155
10387
  }
9156
10388
  if (fixResult.status === "failed") {
9157
10389
  if (isMajorRework) {
9158
- logger$23.warn({
10390
+ logger$24.warn({
9159
10391
  storyKey,
9160
10392
  exitCode: fixResult.exitCode
9161
10393
  }, "Major rework dispatch failed — escalating story");
@@ -9175,14 +10407,14 @@ function createImplementationOrchestrator(deps) {
9175
10407
  persistState();
9176
10408
  return;
9177
10409
  }
9178
- logger$23.warn({
10410
+ logger$24.warn({
9179
10411
  storyKey,
9180
10412
  taskType,
9181
10413
  exitCode: fixResult.exitCode
9182
10414
  }, "Fix dispatch failed");
9183
10415
  }
9184
10416
  } catch (err) {
9185
- logger$23.warn({
10417
+ logger$24.warn({
9186
10418
  storyKey,
9187
10419
  taskType,
9188
10420
  err
@@ -9245,19 +10477,22 @@ function createImplementationOrchestrator(deps) {
9245
10477
  }
9246
10478
  async function run(storyKeys) {
9247
10479
  if (_state === "RUNNING" || _state === "PAUSED") {
9248
- logger$23.warn({ state: _state }, "run() called while orchestrator is already running or paused — ignoring");
10480
+ logger$24.warn({ state: _state }, "run() called while orchestrator is already running or paused — ignoring");
9249
10481
  return getStatus();
9250
10482
  }
9251
10483
  if (_state === "COMPLETE") {
9252
- logger$23.warn({ state: _state }, "run() called on a COMPLETE orchestrator — ignoring");
10484
+ logger$24.warn({ state: _state }, "run() called on a COMPLETE orchestrator — ignoring");
9253
10485
  return getStatus();
9254
10486
  }
9255
10487
  _state = "RUNNING";
9256
10488
  _startedAt = new Date().toISOString();
9257
- for (const key of storyKeys) _stories.set(key, {
9258
- phase: "PENDING",
9259
- reviewCycles: 0
9260
- });
10489
+ for (const key of storyKeys) {
10490
+ const pendingState = {
10491
+ phase: "PENDING",
10492
+ reviewCycles: 0
10493
+ };
10494
+ _stories.set(key, pendingState);
10495
+ }
9261
10496
  eventBus.emit("orchestrator:started", {
9262
10497
  storyKeys,
9263
10498
  pipelineRunId: config.pipelineRunId
@@ -9267,112 +10502,176 @@ function createImplementationOrchestrator(deps) {
9267
10502
  if (config.enableHeartbeat) startHeartbeat();
9268
10503
  if (projectRoot !== void 0) {
9269
10504
  const seedResult = seedMethodologyContext(db, projectRoot);
9270
- if (seedResult.decisionsCreated > 0) logger$23.info({
10505
+ if (seedResult.decisionsCreated > 0) logger$24.info({
9271
10506
  decisionsCreated: seedResult.decisionsCreated,
9272
10507
  skippedCategories: seedResult.skippedCategories
9273
10508
  }, "Methodology context seeded from planning artifacts");
9274
10509
  }
9275
- const interfaceContractDecisions = getDecisionsByCategory(db, "interface-contract");
9276
- const contractDeclarations = interfaceContractDecisions.map((d) => {
10510
+ try {
10511
+ if (stateStore !== void 0) {
10512
+ await stateStore.initialize();
10513
+ for (const key of storyKeys) {
10514
+ const pendingState = _stories.get(key);
10515
+ if (pendingState !== void 0) persistStoryState(key, pendingState).catch((err) => logger$24.warn({
10516
+ err,
10517
+ storyKey: key
10518
+ }, "StateStore write failed during PENDING init"));
10519
+ }
10520
+ try {
10521
+ const existingRecords = await stateStore.queryStories({});
10522
+ for (const record of existingRecords) _stateStoreCache.set(record.storyKey, record);
10523
+ } catch (err) {
10524
+ logger$24.warn({ err }, "StateStore.queryStories() failed during init — status merge will be empty (best-effort)");
10525
+ }
10526
+ }
10527
+ let contractDeclarations = [];
10528
+ if (stateStore !== void 0) {
10529
+ const allContractRecords = await stateStore.queryContracts();
10530
+ contractDeclarations = allContractRecords.map((r) => ({
10531
+ storyKey: r.storyKey,
10532
+ contractName: r.contractName,
10533
+ direction: r.direction,
10534
+ filePath: r.schemaPath,
10535
+ ...r.transport !== void 0 ? { transport: r.transport } : {}
10536
+ }));
10537
+ } else {
10538
+ const interfaceContractDecisions = getDecisionsByCategory(db, "interface-contract");
10539
+ contractDeclarations = interfaceContractDecisions.map((d) => {
10540
+ try {
10541
+ const parsed = JSON.parse(d.value);
10542
+ const storyKey = typeof parsed.storyKey === "string" ? parsed.storyKey : "";
10543
+ const contractName = typeof parsed.schemaName === "string" ? parsed.schemaName : "";
10544
+ const direction = parsed.direction === "export" ? "export" : "import";
10545
+ const filePath = typeof parsed.filePath === "string" ? parsed.filePath : "";
10546
+ if (!storyKey || !contractName) return null;
10547
+ return {
10548
+ storyKey,
10549
+ contractName,
10550
+ direction,
10551
+ filePath,
10552
+ ...typeof parsed.transport === "string" ? { transport: parsed.transport } : {}
10553
+ };
10554
+ } catch {
10555
+ return null;
10556
+ }
10557
+ }).filter((d) => d !== null);
10558
+ }
10559
+ const { batches, edges: contractEdges } = detectConflictGroupsWithContracts(storyKeys, { moduleMap: pack.manifest.conflictGroups }, contractDeclarations);
10560
+ if (contractEdges.length > 0) logger$24.info({
10561
+ contractEdges,
10562
+ edgeCount: contractEdges.length
10563
+ }, "Contract dependency edges detected — applying contract-aware dispatch ordering");
10564
+ logger$24.info({
10565
+ storyCount: storyKeys.length,
10566
+ groupCount: batches.reduce((sum, b) => sum + b.length, 0),
10567
+ batchCount: batches.length,
10568
+ maxConcurrency: config.maxConcurrency
10569
+ }, "Orchestrator starting");
10570
+ if (config.skipPreflight !== true) {
10571
+ const preFlightResult = runBuildVerification({
10572
+ verifyCommand: pack.manifest.verifyCommand,
10573
+ verifyTimeoutMs: pack.manifest.verifyTimeoutMs,
10574
+ projectRoot: projectRoot ?? process.cwd()
10575
+ });
10576
+ if (preFlightResult.status === "failed" || preFlightResult.status === "timeout") {
10577
+ stopHeartbeat();
10578
+ const truncatedOutput = (preFlightResult.output ?? "").slice(0, 2e3);
10579
+ const exitCode = preFlightResult.exitCode ?? 1;
10580
+ eventBus.emit("pipeline:pre-flight-failure", {
10581
+ exitCode,
10582
+ output: truncatedOutput
10583
+ });
10584
+ logger$24.error({
10585
+ exitCode,
10586
+ reason: preFlightResult.reason
10587
+ }, "Pre-flight build check failed — aborting pipeline before any story dispatch");
10588
+ _state = "FAILED";
10589
+ _completedAt = new Date().toISOString();
10590
+ persistState();
10591
+ return getStatus();
10592
+ }
10593
+ if (preFlightResult.status !== "skipped") logger$24.info("Pre-flight build check passed");
10594
+ }
9277
10595
  try {
9278
- const parsed = JSON.parse(d.value);
9279
- const storyKey = typeof parsed.storyKey === "string" ? parsed.storyKey : "";
9280
- const contractName = typeof parsed.schemaName === "string" ? parsed.schemaName : "";
9281
- const direction = parsed.direction === "export" ? "export" : "import";
9282
- const filePath = typeof parsed.filePath === "string" ? parsed.filePath : "";
9283
- if (!storyKey || !contractName) return null;
9284
- return {
9285
- storyKey,
9286
- contractName,
9287
- direction,
9288
- filePath,
9289
- ...typeof parsed.transport === "string" ? { transport: parsed.transport } : {}
9290
- };
9291
- } catch {
9292
- return null;
9293
- }
9294
- }).filter((d) => d !== null);
9295
- const { batches, edges: contractEdges } = detectConflictGroupsWithContracts(storyKeys, { moduleMap: pack.manifest.conflictGroups }, contractDeclarations);
9296
- if (contractEdges.length > 0) logger$23.info({
9297
- contractEdges,
9298
- edgeCount: contractEdges.length
9299
- }, "Contract dependency edges detected — applying contract-aware dispatch ordering");
9300
- logger$23.info({
9301
- storyCount: storyKeys.length,
9302
- groupCount: batches.reduce((sum, b) => sum + b.length, 0),
9303
- batchCount: batches.length,
9304
- maxConcurrency: config.maxConcurrency
9305
- }, "Orchestrator starting");
9306
- if (config.skipPreflight !== true) {
9307
- const preFlightResult = runBuildVerification({
9308
- verifyCommand: pack.manifest.verifyCommand,
9309
- verifyTimeoutMs: pack.manifest.verifyTimeoutMs,
9310
- projectRoot: projectRoot ?? process.cwd()
9311
- });
9312
- if (preFlightResult.status === "failed" || preFlightResult.status === "timeout") {
10596
+ for (const batchGroups of batches) await runWithConcurrency(batchGroups, config.maxConcurrency);
10597
+ } catch (err) {
9313
10598
  stopHeartbeat();
9314
- const truncatedOutput = (preFlightResult.output ?? "").slice(0, 2e3);
9315
- const exitCode = preFlightResult.exitCode ?? 1;
9316
- eventBus.emit("pipeline:pre-flight-failure", {
9317
- exitCode,
9318
- output: truncatedOutput
9319
- });
9320
- logger$23.error({
9321
- exitCode,
9322
- reason: preFlightResult.reason
9323
- }, "Pre-flight build check failed — aborting pipeline before any story dispatch");
9324
10599
  _state = "FAILED";
9325
10600
  _completedAt = new Date().toISOString();
9326
10601
  persistState();
10602
+ logger$24.error({ err }, "Orchestrator failed with unhandled error");
9327
10603
  return getStatus();
9328
10604
  }
9329
- if (preFlightResult.status !== "skipped") logger$23.info("Pre-flight build check passed");
9330
- }
9331
- try {
9332
- for (const batchGroups of batches) await runWithConcurrency(batchGroups, config.maxConcurrency);
9333
- } catch (err) {
9334
10605
  stopHeartbeat();
9335
- _state = "FAILED";
10606
+ _state = "COMPLETE";
9336
10607
  _completedAt = new Date().toISOString();
10608
+ if (projectRoot !== void 0 && contractDeclarations.length > 0) try {
10609
+ const mismatches = verifyContracts(contractDeclarations, projectRoot);
10610
+ if (mismatches.length > 0) {
10611
+ _contractMismatches = mismatches;
10612
+ for (const mismatch of mismatches) eventBus.emit("pipeline:contract-mismatch", {
10613
+ exporter: mismatch.exporter,
10614
+ importer: mismatch.importer,
10615
+ contractName: mismatch.contractName,
10616
+ mismatchDescription: mismatch.mismatchDescription
10617
+ });
10618
+ logger$24.warn({
10619
+ mismatchCount: mismatches.length,
10620
+ mismatches
10621
+ }, "Post-sprint contract verification found mismatches — manual review required");
10622
+ } else logger$24.info("Post-sprint contract verification passed — all declared contracts satisfied");
10623
+ if (stateStore !== void 0) try {
10624
+ const allContractsForVerification = await stateStore.queryContracts();
10625
+ const verifiedAt = new Date().toISOString();
10626
+ const contractsByStory = new Map();
10627
+ for (const cr of allContractsForVerification) {
10628
+ const existing = contractsByStory.get(cr.storyKey) ?? [];
10629
+ existing.push(cr);
10630
+ contractsByStory.set(cr.storyKey, existing);
10631
+ }
10632
+ for (const [sk, contracts] of contractsByStory) {
10633
+ const records = contracts.map((cr) => {
10634
+ const contractMismatches = (_contractMismatches ?? []).filter((m) => (m.exporter === sk || m.importer === sk) && m.contractName === cr.contractName);
10635
+ if (contractMismatches.length > 0) return {
10636
+ storyKey: sk,
10637
+ contractName: cr.contractName,
10638
+ verdict: "fail",
10639
+ mismatchDescription: contractMismatches[0].mismatchDescription,
10640
+ verifiedAt
10641
+ };
10642
+ return {
10643
+ storyKey: sk,
10644
+ contractName: cr.contractName,
10645
+ verdict: "pass",
10646
+ verifiedAt
10647
+ };
10648
+ });
10649
+ await stateStore.setContractVerification(sk, records);
10650
+ }
10651
+ logger$24.info({ storyCount: contractsByStory.size }, "Contract verification results persisted to StateStore");
10652
+ } catch (persistErr) {
10653
+ logger$24.warn({ err: persistErr }, "Failed to persist contract verification results to StateStore");
10654
+ }
10655
+ } catch (err) {
10656
+ logger$24.error({ err }, "Post-sprint contract verification threw an error — skipping");
10657
+ }
10658
+ let completed = 0;
10659
+ let escalated = 0;
10660
+ let failed = 0;
10661
+ for (const s of _stories.values()) if (s.phase === "COMPLETE") completed++;
10662
+ else if (s.phase === "ESCALATED") if (s.error !== void 0) failed++;
10663
+ else escalated++;
10664
+ eventBus.emit("orchestrator:complete", {
10665
+ totalStories: storyKeys.length,
10666
+ completed,
10667
+ escalated,
10668
+ failed
10669
+ });
9337
10670
  persistState();
9338
- logger$23.error({ err }, "Orchestrator failed with unhandled error");
9339
10671
  return getStatus();
10672
+ } finally {
10673
+ if (stateStore !== void 0) await stateStore.close().catch((err) => logger$24.warn({ err }, "StateStore.close() failed (best-effort)"));
9340
10674
  }
9341
- stopHeartbeat();
9342
- _state = "COMPLETE";
9343
- _completedAt = new Date().toISOString();
9344
- if (projectRoot !== void 0 && contractDeclarations.length > 0) try {
9345
- const mismatches = verifyContracts(contractDeclarations, projectRoot);
9346
- if (mismatches.length > 0) {
9347
- _contractMismatches = mismatches;
9348
- for (const mismatch of mismatches) eventBus.emit("pipeline:contract-mismatch", {
9349
- exporter: mismatch.exporter,
9350
- importer: mismatch.importer,
9351
- contractName: mismatch.contractName,
9352
- mismatchDescription: mismatch.mismatchDescription
9353
- });
9354
- logger$23.warn({
9355
- mismatchCount: mismatches.length,
9356
- mismatches
9357
- }, "Post-sprint contract verification found mismatches — manual review required");
9358
- } else logger$23.info("Post-sprint contract verification passed — all declared contracts satisfied");
9359
- } catch (err) {
9360
- logger$23.error({ err }, "Post-sprint contract verification threw an error — skipping");
9361
- }
9362
- let completed = 0;
9363
- let escalated = 0;
9364
- let failed = 0;
9365
- for (const s of _stories.values()) if (s.phase === "COMPLETE") completed++;
9366
- else if (s.phase === "ESCALATED") if (s.error !== void 0) failed++;
9367
- else escalated++;
9368
- eventBus.emit("orchestrator:complete", {
9369
- totalStories: storyKeys.length,
9370
- completed,
9371
- escalated,
9372
- failed
9373
- });
9374
- persistState();
9375
- return getStatus();
9376
10675
  }
9377
10676
  function pause() {
9378
10677
  if (_state !== "RUNNING") return;
@@ -9380,7 +10679,7 @@ function createImplementationOrchestrator(deps) {
9380
10679
  _pauseGate = createPauseGate();
9381
10680
  _state = "PAUSED";
9382
10681
  eventBus.emit("orchestrator:paused", {});
9383
- logger$23.info("Orchestrator paused");
10682
+ logger$24.info("Orchestrator paused");
9384
10683
  }
9385
10684
  function resume() {
9386
10685
  if (_state !== "PAUSED") return;
@@ -9391,7 +10690,7 @@ function createImplementationOrchestrator(deps) {
9391
10690
  }
9392
10691
  _state = "RUNNING";
9393
10692
  eventBus.emit("orchestrator:resumed", {});
9394
- logger$23.info("Orchestrator resumed");
10693
+ logger$24.info("Orchestrator resumed");
9395
10694
  }
9396
10695
  return {
9397
10696
  run,
@@ -9460,8 +10759,10 @@ function parseStoryKeysFromEpics(content) {
9460
10759
  const explicitKeyPattern = /\*\*Story key:\*\*\s*`?(\d+-\d+)(?:-[^`\s]*)?`?/g;
9461
10760
  let match;
9462
10761
  while ((match = explicitKeyPattern.exec(content)) !== null) if (match[1] !== void 0) keys.add(match[1]);
9463
- const headingPattern = /^###\s+Story\s+(\d+)\.(\d+)/gm;
10762
+ const headingPattern = /^###\s+Story\s+(\d+)[.\-](\d+)/gm;
9464
10763
  while ((match = headingPattern.exec(content)) !== null) if (match[1] !== void 0 && match[2] !== void 0) keys.add(`${match[1]}-${match[2]}`);
10764
+ const inlineStoryPattern = /Story\s+(\d+)-(\d+)[:\s]/g;
10765
+ while ((match = inlineStoryPattern.exec(content)) !== null) if (match[1] !== void 0 && match[2] !== void 0) keys.add(`${match[1]}-${match[2]}`);
9465
10766
  const filePathPattern = /_bmad-output\/implementation-artifacts\/(\d+-\d+)-/g;
9466
10767
  while ((match = filePathPattern.exec(content)) !== null) if (match[1] !== void 0) keys.add(match[1]);
9467
10768
  return sortStoryKeys(Array.from(keys));
@@ -9481,21 +10782,34 @@ function parseStoryKeysFromEpics(content) {
9481
10782
  * @returns Sorted array of pending story keys in "N-M" format
9482
10783
  */
9483
10784
  function discoverPendingStoryKeys(projectRoot) {
10785
+ let allKeys = [];
9484
10786
  const epicsPath = findEpicsFile(projectRoot);
9485
- if (epicsPath === void 0) return [];
9486
- let content;
9487
- try {
9488
- content = readFileSync$1(epicsPath, "utf-8");
9489
- } catch {
9490
- return [];
10787
+ if (epicsPath !== void 0) try {
10788
+ const content = readFileSync$1(epicsPath, "utf-8");
10789
+ allKeys = parseStoryKeysFromEpics(content);
10790
+ } catch {}
10791
+ if (allKeys.length === 0) {
10792
+ const epicFiles = findEpicFiles(projectRoot);
10793
+ for (const epicFile of epicFiles) try {
10794
+ const content = readFileSync$1(epicFile, "utf-8");
10795
+ const keys = parseStoryKeysFromEpics(content);
10796
+ allKeys.push(...keys);
10797
+ } catch {}
10798
+ allKeys = sortStoryKeys([...new Set(allKeys)]);
9491
10799
  }
9492
- const allKeys = parseStoryKeysFromEpics(content);
9493
10800
  if (allKeys.length === 0) return [];
9494
10801
  const existingKeys = collectExistingStoryKeys(projectRoot);
9495
10802
  return allKeys.filter((k) => !existingKeys.has(k));
9496
10803
  }
9497
10804
  /**
9498
- * Find epics.md from known candidate paths relative to projectRoot.
10805
+ * Find epic files from known candidate paths relative to projectRoot.
10806
+ *
10807
+ * Checks for:
10808
+ * 1. epics.md (consolidated epic file)
10809
+ * 2. Individual epic-*.md files in planning-artifacts/
10810
+ *
10811
+ * Returns a single path for epics.md, or undefined if not found.
10812
+ * For individual epic files, use findEpicFiles() instead.
9499
10813
  */
9500
10814
  function findEpicsFile(projectRoot) {
9501
10815
  const candidates = ["_bmad-output/planning-artifacts/epics.md", "_bmad-output/epics.md"];
@@ -9506,6 +10820,20 @@ function findEpicsFile(projectRoot) {
9506
10820
  return void 0;
9507
10821
  }
9508
10822
  /**
10823
+ * Find individual epic-*.md files in the planning artifacts directory.
10824
+ * Returns paths sorted alphabetically.
10825
+ */
10826
+ function findEpicFiles(projectRoot) {
10827
+ const planningDir = join$1(projectRoot, "_bmad-output", "planning-artifacts");
10828
+ if (!existsSync$1(planningDir)) return [];
10829
+ try {
10830
+ const entries = readdirSync$1(planningDir, { encoding: "utf-8" });
10831
+ return entries.filter((e) => /^epic-\d+.*\.md$/.test(e)).sort().map((e) => join$1(planningDir, e));
10832
+ } catch {
10833
+ return [];
10834
+ }
10835
+ }
10836
+ /**
9509
10837
  * Collect story keys that already have implementation artifact files.
9510
10838
  * Scans _bmad-output/implementation-artifacts/ for files matching N-M-*.md.
9511
10839
  */
@@ -13711,7 +15039,7 @@ async function runRunAction(options) {
13711
15039
  if (storyKeys.length === 0) {
13712
15040
  const activeReqs = db.prepare(`SELECT description FROM requirements WHERE status = 'active' AND type = 'story'`).all();
13713
15041
  for (const req of activeReqs) {
13714
- const match = STORY_KEY_PATTERN.exec(req.description.trim());
15042
+ const match = STORY_KEY_PATTERN$1.exec(req.description.trim());
13715
15043
  if (match !== null) storyKeys.push(match[0]);
13716
15044
  }
13717
15045
  if (storyKeys.length > 0) {
@@ -14532,5 +15860,5 @@ function registerRunCommand(program, _version = "0.0.0", projectRoot = process.c
14532
15860
  }
14533
15861
 
14534
15862
  //#endregion
14535
- export { DEFAULT_CONFIG, DEFAULT_ROUTING_POLICY, DatabaseWrapper, SUBSTRATE_OWNED_SETTINGS_KEYS, VALID_PHASES, buildPipelineStatusOutput, createConfigSystem, createContextCompiler, createDispatcher, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStopAfterGate, findPackageRoot, formatOutput, formatPhaseCompletionSummary, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, parseDbTimestampAsUtc, registerHealthCommand, registerRunCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, resolveStoryKeys, runAnalysisPhase, runMigrations, runPlanningPhase, runRunAction, runSolutioningPhase, validateStopAfterFromConflict };
14536
- //# sourceMappingURL=run-RJ0EHbfM.js.map
15863
+ export { DEFAULT_CONFIG, DEFAULT_ROUTING_POLICY, DatabaseWrapper, DoltNotInstalled, FileStateStore, SUBSTRATE_OWNED_SETTINGS_KEYS, VALID_PHASES, buildPipelineStatusOutput, checkDoltInstalled, createConfigSystem, createContextCompiler, createDispatcher, createDoltClient, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStateStore, createStopAfterGate, findPackageRoot, formatOutput, formatPhaseCompletionSummary, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, initializeDolt, parseDbTimestampAsUtc, registerHealthCommand, registerRunCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, resolveStoryKeys, runAnalysisPhase, runMigrations, runPlanningPhase, runRunAction, runSolutioningPhase, validateStopAfterFromConflict };
15864
+ //# sourceMappingURL=run-DO9n3cwy.js.map