substrate-ai 0.20.64 → 0.20.65

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. package/dist/adapter-registry-BbVWH3Yv.js +4 -0
  2. package/dist/cli/index.js +93 -24
  3. package/dist/{decision-router-BA__VYIp.js → decision-router-DblHY8se.js} +1 -1
  4. package/dist/{decisions-4F91LrVD.js → decisions-DilHo99V.js} +2 -2
  5. package/dist/{dist-W2emvN3F.js → dist-K_RRWnBX.js} +2 -2
  6. package/dist/{errors-CKFu8YI9.js → errors-pSiZbn6e.js} +2 -2
  7. package/dist/{experimenter-BgpUcUaW.js → experimenter-DT9v2Pto.js} +1 -1
  8. package/dist/health-DC3y-sR6.js +1715 -0
  9. package/dist/health-qhtWYh49.js +8 -0
  10. package/dist/index-c924O9mj.d.ts +1432 -0
  11. package/dist/index.d.ts +56 -735
  12. package/dist/index.js +2 -2
  13. package/dist/interactive-prompt-C7wpE4z4.js +183 -0
  14. package/dist/{health-DudlnqXd.js → manifest-read-DDkXC3L_.js} +120 -2012
  15. package/dist/modules/interactive-prompt/index.d.ts +86 -0
  16. package/dist/modules/interactive-prompt/index.js +6 -0
  17. package/dist/recovery-engine-BKGBeBnW.js +281 -0
  18. package/dist/{routing-0ykvBl_4.js → routing-CzF0p6lI.js} +2 -2
  19. package/dist/run-DX95j4_D.js +14 -0
  20. package/dist/{run-CCxsv-9M.js → run-DzB4rgkj.js} +224 -31
  21. package/dist/src/modules/decision-router/index.js +1 -1
  22. package/dist/src/modules/recovery-engine/index.d.ts +1101 -0
  23. package/dist/src/modules/recovery-engine/index.js +5 -0
  24. package/dist/{upgrade-OFeC_NIx.js → upgrade-DxzQ1nss.js} +3 -3
  25. package/dist/{upgrade-aW7GYL2F.js → upgrade-MP9XzrI6.js} +2 -2
  26. package/dist/version-manager-impl-GZDUBt0Q.js +4 -0
  27. package/dist/work-graph-repository-DZyJv5pV.js +265 -0
  28. package/package.json +1 -1
  29. package/dist/adapter-registry-k7ZX3Bz6.js +0 -4
  30. package/dist/health-CLNmnZiw.js +0 -6
  31. package/dist/run-ChxsPICN.js +0 -10
  32. package/dist/version-manager-impl-BCSf5E3j.js +0 -4
  33. /package/dist/{decisions-C0pz9Clx.js → decisions-CzSIEeGP.js} +0 -0
  34. /package/dist/{routing-CcBOCuC9.js → routing-DFxoKHDt.js} +0 -0
  35. /package/dist/{version-manager-impl-FH4TTnXm.js → version-manager-impl-qFBiO4Eh.js} +0 -0
@@ -0,0 +1,1715 @@
1
+ import { createLogger } from "./logger-KeHncl-f.js";
2
+ import { DoltClient, DoltQueryError, createDatabaseAdapter$1 as createDatabaseAdapter, getLatestRun, getPipelineRunById, initSchema } from "./dist-K_RRWnBX.js";
3
+ import { resolveMainRepoRoot, resolveRunManifest } from "./manifest-read-DDkXC3L_.js";
4
+ import { createRequire } from "module";
5
+ import { dirname, join } from "path";
6
+ import { existsSync, readFileSync } from "node:fs";
7
+ import { spawnSync } from "node:child_process";
8
+ import { join as join$1 } from "node:path";
9
+ import { readFile, writeFile } from "node:fs/promises";
10
+ import { existsSync as existsSync$1 } from "fs";
11
+ import { createRequire as createRequire$1 } from "node:module";
12
+ import { fileURLToPath } from "node:url";
13
+
14
+ //#region rolldown:runtime
15
+ var __create = Object.create;
16
+ var __defProp = Object.defineProperty;
17
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
18
+ var __getOwnPropNames = Object.getOwnPropertyNames;
19
+ var __getProtoOf = Object.getPrototypeOf;
20
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
21
+ var __commonJS = (cb, mod) => function() {
22
+ return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
23
+ };
24
+ var __copyProps = (to, from, except, desc) => {
25
+ if (from && typeof from === "object" || typeof from === "function") for (var keys = __getOwnPropNames(from), i = 0, n = keys.length, key; i < n; i++) {
26
+ key = keys[i];
27
+ if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, {
28
+ get: ((k) => from[k]).bind(null, key),
29
+ enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
30
+ });
31
+ }
32
+ return to;
33
+ };
34
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", {
35
+ value: mod,
36
+ enumerable: true
37
+ }) : target, mod));
38
+ var __require = /* @__PURE__ */ createRequire(import.meta.url);
39
+
40
+ //#endregion
41
+ //#region src/persistence/adapter.ts
42
+ /**
43
+ * Create a DatabaseAdapter for the specified (or auto-detected) backend.
44
+ *
45
+ * This shim wraps the core factory and injects the concrete DoltClient
46
+ * constructor as the doltClientFactory parameter, so monolith callers
47
+ * get Dolt support transparently.
48
+ */
49
+ function createDatabaseAdapter$1(config) {
50
+ return createDatabaseAdapter(config, (repoPath) => new DoltClient({ repoPath }));
51
+ }
52
+
53
+ //#endregion
54
+ //#region src/modules/stop-after/types.ts
55
+ /**
56
+ * Stop-After Gate Module — Types
57
+ *
58
+ * Defines the PhaseName type and all parameter/result types for the stop-after gate.
59
+ * VALID_PHASES is the canonical source for pipeline phase names; auto.ts imports from here.
60
+ */
61
+ /** Canonical pipeline phase names. This is the single source of truth for all phase lists. */
62
+ const VALID_PHASES = [
63
+ "research",
64
+ "analysis",
65
+ "planning",
66
+ "solutioning",
67
+ "implementation"
68
+ ];
69
+ /**
70
+ * Alias for VALID_PHASES retained for backward compatibility with existing imports.
71
+ * @deprecated Use VALID_PHASES directly.
72
+ */
73
+ const STOP_AFTER_VALID_PHASES = VALID_PHASES;
74
+
75
+ //#endregion
76
+ //#region src/cli/commands/pipeline-shared.ts
77
+ /**
78
+ * Parse a DB timestamp string to a Date, correctly treating it as UTC.
79
+ *
80
+ * SQLite stores timestamps as "YYYY-MM-DD HH:MM:SS" without a timezone suffix.
81
+ * JavaScript's Date constructor parses strings without a timezone suffix as
82
+ * *local time*, which causes staleness/duration to be calculated incorrectly
83
+ * on machines not in UTC.
84
+ *
85
+ * Fix: append 'Z' if the string has no timezone marker so it is always
86
+ * parsed as UTC.
87
+ */
88
+ function parseDbTimestampAsUtc(ts) {
89
+ if (ts.endsWith("Z") || /[+-]\d{2}:\d{2}$/.test(ts)) return new Date(ts);
90
+ return new Date(ts.replace(" ", "T") + "Z");
91
+ }
92
+ const __filename = fileURLToPath(import.meta.url);
93
+ const __dirname = dirname(__filename);
94
+ /**
95
+ * Find the package root by walking up until we find package.json.
96
+ * Works regardless of build output structure (tsdown bundles into
97
+ * dist/cli/index.js, not dist/cli/commands/auto.js).
98
+ */
99
+ function findPackageRoot(startDir) {
100
+ let dir = startDir;
101
+ while (dir !== dirname(dir)) {
102
+ if (existsSync$1(join(dir, "package.json"))) return dir;
103
+ dir = dirname(dir);
104
+ }
105
+ return startDir;
106
+ }
107
+ const PACKAGE_ROOT = join(__dirname, "..", "..", "..");
108
+ /**
109
+ * Resolve the absolute path to the bmad-method package's src/ directory.
110
+ * Uses createRequire so it works in ESM without import.meta.resolve polyfills.
111
+ * Returns null if bmad-method is not installed.
112
+ */
113
+ function resolveBmadMethodSrcPath(fromDir = __dirname) {
114
+ try {
115
+ const require$1 = createRequire$1(join(fromDir, "synthetic.js"));
116
+ const pkgJsonPath = require$1.resolve("bmad-method/package.json");
117
+ return join(dirname(pkgJsonPath), "src");
118
+ } catch {
119
+ return null;
120
+ }
121
+ }
122
+ /**
123
+ * Read the version field from bmad-method's package.json.
124
+ * Returns 'unknown' if not resolvable.
125
+ */
126
+ function resolveBmadMethodVersion(fromDir = __dirname) {
127
+ try {
128
+ const require$1 = createRequire$1(join(fromDir, "synthetic.js"));
129
+ const pkgJsonPath = require$1.resolve("bmad-method/package.json");
130
+ const pkg = require$1(pkgJsonPath);
131
+ return pkg.version ?? "unknown";
132
+ } catch {
133
+ return "unknown";
134
+ }
135
+ }
136
+ /** BMAD baseline token total for full pipeline comparison (analysis+planning+solutioning+implementation) */
137
+ const BMAD_BASELINE_TOKENS_FULL = 56800;
138
+ /** BMAD baseline token total for create+dev+review comparison */
139
+ const BMAD_BASELINE_TOKENS = 23800;
140
+ /** Story key pattern: e.g. "10-1", "1-1a", "NEW-26", "E6" */
141
+ const STORY_KEY_PATTERN$1 = /^[A-Za-z0-9]+(-[A-Za-z0-9]+)?$/;
142
+ /**
143
+ * Top-level keys in .claude/settings.json that substrate owns.
144
+ * On init, these are set/updated unconditionally.
145
+ * User-defined keys outside this set are never touched.
146
+ */
147
+ const SUBSTRATE_OWNED_SETTINGS_KEYS = ["statusLine"];
148
+ function getSubstrateDefaultSettings() {
149
+ return { statusLine: {
150
+ type: "command",
151
+ command: "bash \"$CLAUDE_PROJECT_DIR\"/.claude/statusline.sh",
152
+ padding: 0
153
+ } };
154
+ }
155
+ /**
156
+ * Format output according to the requested format.
157
+ */
158
+ function formatOutput(data, format, success = true, errorMessage) {
159
+ if (format === "json") {
160
+ if (!success) return JSON.stringify({
161
+ success: false,
162
+ error: errorMessage ?? "Unknown error"
163
+ });
164
+ return JSON.stringify({
165
+ success: true,
166
+ data
167
+ });
168
+ }
169
+ if (typeof data === "string") return data;
170
+ return JSON.stringify(data, null, 2);
171
+ }
172
+ /**
173
+ * Build a human-readable token telemetry display from summary rows.
174
+ */
175
+ function formatTokenTelemetry(summary, baselineTokens = BMAD_BASELINE_TOKENS) {
176
+ if (summary.length === 0) return "No token usage recorded.";
177
+ let totalInput = 0;
178
+ let totalOutput = 0;
179
+ let totalCost = 0;
180
+ const lines = ["Pipeline Token Usage:"];
181
+ for (const row of summary) {
182
+ totalInput += row.total_input_tokens;
183
+ totalOutput += row.total_output_tokens;
184
+ totalCost += row.total_cost_usd;
185
+ const cost = `$${row.total_cost_usd.toFixed(4)}`;
186
+ lines.push(` ${row.phase} (${row.agent}): ${row.total_input_tokens.toLocaleString()} input / ${row.total_output_tokens.toLocaleString()} output (${cost})`);
187
+ }
188
+ lines.push(" " + "─".repeat(55));
189
+ const costDisplay = `$${totalCost.toFixed(4)}`;
190
+ lines.push(` Total: ${totalInput.toLocaleString()} input / ${totalOutput.toLocaleString()} output (${costDisplay})`);
191
+ const totalTokens = totalInput + totalOutput;
192
+ const savingsPct = baselineTokens > 0 ? Math.round((baselineTokens - totalTokens) / baselineTokens * 100) : 0;
193
+ const savingsLabel = savingsPct >= 0 ? `Savings: ${savingsPct}%` : `Overhead: +${Math.abs(savingsPct)}%`;
194
+ lines.push(` BMAD Baseline: ${baselineTokens.toLocaleString()} tokens → ${savingsLabel}`);
195
+ return lines.join("\n");
196
+ }
197
+ /**
198
+ * Validate a story key has the expected format: <epic>-<story> (e.g., "10-1").
199
+ */
200
+ function validateStoryKey(key) {
201
+ return STORY_KEY_PATTERN$1.test(key);
202
+ }
203
+ /**
204
+ * Build the AC5 JSON status schema for a pipeline run.
205
+ */
206
+ function buildPipelineStatusOutput(run, tokenSummary, decisionsCount, storiesCount) {
207
+ const phases = {};
208
+ const phaseTokenMap = {};
209
+ for (const row of tokenSummary) {
210
+ if (!phaseTokenMap[row.phase]) phaseTokenMap[row.phase] = {
211
+ input: 0,
212
+ output: 0
213
+ };
214
+ phaseTokenMap[row.phase].input += row.total_input_tokens;
215
+ phaseTokenMap[row.phase].output += row.total_output_tokens;
216
+ }
217
+ let phaseHistory = [];
218
+ try {
219
+ if (run.config_json) {
220
+ const config = JSON.parse(run.config_json);
221
+ phaseHistory = config.phaseHistory ?? [];
222
+ }
223
+ } catch {}
224
+ const currentPhase = run.current_phase ?? null;
225
+ for (const phaseName of VALID_PHASES) {
226
+ const historyEntry = phaseHistory.find((h) => h.phase === phaseName);
227
+ const tokenUsage = phaseTokenMap[phaseName] ?? {
228
+ input: 0,
229
+ output: 0
230
+ };
231
+ if (historyEntry?.completedAt) {
232
+ phases[phaseName] = {
233
+ status: "complete",
234
+ completed_at: historyEntry.completedAt,
235
+ token_usage: tokenUsage
236
+ };
237
+ if (historyEntry.startedAt) phases[phaseName].started_at = historyEntry.startedAt;
238
+ } else if (phaseName === currentPhase || historyEntry?.startedAt) phases[phaseName] = {
239
+ status: "running",
240
+ started_at: historyEntry?.startedAt,
241
+ token_usage: tokenUsage
242
+ };
243
+ else phases[phaseName] = { status: "pending" };
244
+ }
245
+ let totalInput = 0;
246
+ let totalOutput = 0;
247
+ let totalCost = 0;
248
+ for (const row of tokenSummary) {
249
+ totalInput += row.total_input_tokens;
250
+ totalOutput += row.total_output_tokens;
251
+ totalCost += row.total_cost_usd;
252
+ }
253
+ let activeDispatches = 0;
254
+ let storiesSummary;
255
+ try {
256
+ if (run.token_usage_json) {
257
+ const state = JSON.parse(run.token_usage_json);
258
+ if (state.stories && Object.keys(state.stories).length > 0) {
259
+ const now = Date.now();
260
+ let completed = 0;
261
+ let inProgress = 0;
262
+ let escalated = 0;
263
+ let pending = 0;
264
+ const details = {};
265
+ for (const [key, s] of Object.entries(state.stories)) {
266
+ const phase = s.phase ?? "PENDING";
267
+ if (phase !== "PENDING" && phase !== "COMPLETE" && phase !== "ESCALATED") activeDispatches++;
268
+ if (phase === "COMPLETE") completed++;
269
+ else if (phase === "ESCALATED") escalated++;
270
+ else if (phase === "PENDING") pending++;
271
+ else inProgress++;
272
+ const elapsed = s.startedAt != null ? Math.max(0, Math.round((now - new Date(s.startedAt).getTime()) / 1e3)) : 0;
273
+ details[key] = {
274
+ phase,
275
+ review_cycles: s.reviewCycles ?? 0,
276
+ elapsed_seconds: elapsed
277
+ };
278
+ }
279
+ storiesSummary = {
280
+ completed,
281
+ in_progress: inProgress,
282
+ escalated,
283
+ pending,
284
+ details
285
+ };
286
+ }
287
+ }
288
+ } catch {}
289
+ const derivedStoriesCount = storiesSummary !== void 0 ? storiesSummary.completed + storiesSummary.in_progress + storiesSummary.escalated + storiesSummary.pending : storiesCount;
290
+ const derivedStoriesCompleted = storiesSummary !== void 0 ? storiesSummary.completed : 0;
291
+ return {
292
+ run_id: run.id,
293
+ current_phase: currentPhase,
294
+ phases,
295
+ total_tokens: {
296
+ input: totalInput,
297
+ output: totalOutput,
298
+ cost_usd: totalCost
299
+ },
300
+ decisions_count: decisionsCount,
301
+ stories_count: derivedStoriesCount,
302
+ stories_completed: derivedStoriesCompleted,
303
+ last_activity: run.updated_at ?? "",
304
+ staleness_seconds: Math.round((Date.now() - parseDbTimestampAsUtc(run.updated_at ?? "").getTime()) / 1e3),
305
+ last_event_ts: run.updated_at ?? "",
306
+ active_dispatches: activeDispatches,
307
+ ...storiesSummary !== void 0 ? { stories: storiesSummary } : {}
308
+ };
309
+ }
310
+ /**
311
+ * Format a pipeline status summary in human-readable format.
312
+ */
313
+ function formatPipelineStatusHuman(status) {
314
+ const lines = [];
315
+ lines.push(`Pipeline Run: ${status.run_id}`);
316
+ lines.push(` Current Phase: ${status.current_phase ?? "N/A"}`);
317
+ lines.push("");
318
+ lines.push(" Phase Status:");
319
+ const statusIcons = {
320
+ complete: "[DONE]",
321
+ running: "[RUN] ",
322
+ pending: "[ ]"
323
+ };
324
+ for (const [phaseName, phaseInfo] of Object.entries(status.phases)) {
325
+ const icon = statusIcons[phaseInfo.status] ?? "[?]";
326
+ let line = ` ${icon} ${phaseName}`;
327
+ if (phaseInfo.status === "complete" && phaseInfo.completed_at) line += ` (completed: ${phaseInfo.completed_at})`;
328
+ if (phaseInfo.token_usage && (phaseInfo.token_usage.input > 0 || phaseInfo.token_usage.output > 0)) line += ` — tokens: ${phaseInfo.token_usage.input.toLocaleString()} in / ${phaseInfo.token_usage.output.toLocaleString()} out`;
329
+ lines.push(line);
330
+ }
331
+ lines.push("");
332
+ lines.push(` Total Tokens: ${(status.total_tokens.input + status.total_tokens.output).toLocaleString()} (in: ${status.total_tokens.input.toLocaleString()}, out: ${status.total_tokens.output.toLocaleString()})`);
333
+ lines.push(` Total Cost: $${status.total_tokens.cost_usd.toFixed(4)}`);
334
+ lines.push(` Decisions: ${status.decisions_count}`);
335
+ lines.push(` Stories: ${status.stories_count}`);
336
+ if (status.stories !== void 0 && Object.keys(status.stories.details).length > 0) {
337
+ lines.push("");
338
+ lines.push(" Sprint Progress:");
339
+ lines.push(" " + "─".repeat(68));
340
+ lines.push(` ${"STORY".padEnd(10)} ${"PHASE".padEnd(24)} ${"CYCLES".padEnd(8)} ELAPSED`);
341
+ lines.push(" " + "─".repeat(68));
342
+ for (const [key, detail] of Object.entries(status.stories.details)) {
343
+ const elapsed = detail.elapsed_seconds > 0 ? `${detail.elapsed_seconds}s` : "-";
344
+ lines.push(` ${key.padEnd(10)} ${detail.phase.padEnd(24)} ${String(detail.review_cycles).padEnd(8)} ${elapsed}`);
345
+ }
346
+ lines.push(" " + "─".repeat(68));
347
+ lines.push(` Completed: ${status.stories.completed} In Progress: ${status.stories.in_progress} Escalated: ${status.stories.escalated} Pending: ${status.stories.pending}`);
348
+ }
349
+ return lines.join("\n");
350
+ }
351
+ /**
352
+ * Format a complete pipeline run summary.
353
+ */
354
+ function formatPipelineSummary(run, tokenSummary, decisionsCount, storiesCount, durationMs, format) {
355
+ let totalInput = 0;
356
+ let totalOutput = 0;
357
+ let totalCost = 0;
358
+ for (const row of tokenSummary) {
359
+ totalInput += row.total_input_tokens;
360
+ totalOutput += row.total_output_tokens;
361
+ totalCost += row.total_cost_usd;
362
+ }
363
+ const totalTokens = totalInput + totalOutput;
364
+ const savingsPct = BMAD_BASELINE_TOKENS_FULL > 0 ? Math.round((BMAD_BASELINE_TOKENS_FULL - totalTokens) / BMAD_BASELINE_TOKENS_FULL * 100) : 0;
365
+ const durationSec = Math.round(durationMs / 1e3);
366
+ if (format === "json") return JSON.stringify({
367
+ run_id: run.id,
368
+ status: run.status,
369
+ duration_ms: durationMs,
370
+ phases_completed: VALID_PHASES.length,
371
+ decisions_count: decisionsCount,
372
+ stories_count: storiesCount,
373
+ token_usage: {
374
+ input: totalInput,
375
+ output: totalOutput,
376
+ total: totalTokens,
377
+ cost_usd: totalCost,
378
+ bmad_baseline: BMAD_BASELINE_TOKENS_FULL,
379
+ savings_pct: savingsPct
380
+ }
381
+ });
382
+ const lines = [
383
+ "┌─────────────────────────────────────────────────────┐",
384
+ "│ Pipeline Run Summary │",
385
+ "└─────────────────────────────────────────────────────┘",
386
+ ` Run ID: ${run.id}`,
387
+ ` Status: ${run.status}`,
388
+ ` Duration: ${durationSec}s`,
389
+ ` Phases Complete: ${VALID_PHASES.length}`,
390
+ ` Decisions: ${decisionsCount}`,
391
+ ` Stories: ${storiesCount}`,
392
+ "",
393
+ ` Token Usage: ${totalTokens.toLocaleString()} total`,
394
+ ` Input: ${totalInput.toLocaleString()}`,
395
+ ` Output: ${totalOutput.toLocaleString()}`,
396
+ ` Cost: $${totalCost.toFixed(4)}`,
397
+ "",
398
+ ` BMAD Baseline: ${BMAD_BASELINE_TOKENS_FULL.toLocaleString()} tokens`,
399
+ ` Token Savings: ${savingsPct >= 0 ? savingsPct + "%" : "N/A (overhead)"}`
400
+ ];
401
+ return lines.join("\n");
402
+ }
403
+
404
+ //#endregion
405
+ //#region src/modules/state/file-store.ts
406
+ /**
407
+ * In-memory / file-backed StateStore implementation.
408
+ *
409
+ * Suitable for CI environments and testing where orchestrator state is
410
+ * ephemeral. Use DoltStateStore for branch-per-story isolation and versioned
411
+ * history in production.
412
+ */
413
+ var FileStateStore = class {
414
+ _basePath;
415
+ _stories = new Map();
416
+ _metrics = [];
417
+ _contracts = new Map();
418
+ _contractVerifications = new Map();
419
+ /** Key-value metrics store: outer key = runId, inner key = metric key */
420
+ _kvMetrics = new Map();
421
+ constructor(options = {}) {
422
+ this._basePath = options.basePath;
423
+ }
424
+ async initialize() {}
425
+ async close() {}
426
+ async getStoryState(storyKey) {
427
+ return this._stories.get(storyKey);
428
+ }
429
+ async setStoryState(storyKey, state) {
430
+ this._stories.set(storyKey, {
431
+ ...state,
432
+ storyKey
433
+ });
434
+ }
435
+ async queryStories(filter) {
436
+ const all = Array.from(this._stories.values());
437
+ return all.filter((record) => {
438
+ if (filter.phase !== void 0) {
439
+ const phases = Array.isArray(filter.phase) ? filter.phase : [filter.phase];
440
+ if (!phases.includes(record.phase)) return false;
441
+ }
442
+ if (filter.sprint !== void 0 && record.sprint !== filter.sprint) return false;
443
+ if (filter.storyKey !== void 0 && record.storyKey !== filter.storyKey) return false;
444
+ return true;
445
+ });
446
+ }
447
+ async recordMetric(metric) {
448
+ const record = {
449
+ ...metric,
450
+ recordedAt: metric.recordedAt ?? new Date().toISOString()
451
+ };
452
+ this._metrics.push(record);
453
+ }
454
+ async queryMetrics(filter) {
455
+ const storyKey = filter.storyKey ?? filter.story_key;
456
+ const taskType = filter.taskType ?? filter.task_type;
457
+ return this._metrics.filter((m) => {
458
+ if (storyKey !== void 0 && m.storyKey !== storyKey) return false;
459
+ if (taskType !== void 0 && m.taskType !== taskType) return false;
460
+ if (filter.sprint !== void 0 && m.sprint !== filter.sprint) return false;
461
+ if (filter.dateFrom !== void 0 && m.recordedAt !== void 0 && m.recordedAt < filter.dateFrom) return false;
462
+ if (filter.dateTo !== void 0 && m.recordedAt !== void 0 && m.recordedAt > filter.dateTo) return false;
463
+ if (filter.since !== void 0 && m.recordedAt !== void 0 && m.recordedAt < filter.since) return false;
464
+ return true;
465
+ });
466
+ }
467
+ /**
468
+ * Persist an arbitrary key-value metric for a run.
469
+ * Stored in memory AND written to `{basePath}/kv-metrics.json` when basePath is set.
470
+ */
471
+ async setMetric(runId, key, value) {
472
+ let runMap = this._kvMetrics.get(runId);
473
+ if (runMap === void 0) {
474
+ runMap = new Map();
475
+ this._kvMetrics.set(runId, runMap);
476
+ }
477
+ runMap.set(key, value);
478
+ if (this._basePath !== void 0) await this._flushKvMetrics();
479
+ }
480
+ /**
481
+ * Retrieve a previously stored key-value metric for a run.
482
+ * Reads from in-memory cache, falling back to the JSON file when basePath is set.
483
+ */
484
+ async getMetric(runId, key) {
485
+ const inMemory = this._kvMetrics.get(runId)?.get(key);
486
+ if (inMemory !== void 0) return inMemory;
487
+ if (this._basePath !== void 0) try {
488
+ const filePath = join$1(this._basePath, "kv-metrics.json");
489
+ const content = await readFile(filePath, "utf-8");
490
+ const parsed = JSON.parse(content);
491
+ return parsed[runId]?.[key] ?? void 0;
492
+ } catch {}
493
+ return void 0;
494
+ }
495
+ /** Serialize the in-memory kv metrics map to JSON on disk. */
496
+ async _flushKvMetrics() {
497
+ if (this._basePath === void 0) return;
498
+ const serialized = {};
499
+ for (const [runId, runMap] of this._kvMetrics) {
500
+ serialized[runId] = {};
501
+ for (const [key, value] of runMap) serialized[runId][key] = value;
502
+ }
503
+ const filePath = join$1(this._basePath, "kv-metrics.json");
504
+ await writeFile(filePath, JSON.stringify(serialized, null, 2), "utf-8");
505
+ }
506
+ async getContracts(storyKey) {
507
+ return this._contracts.get(storyKey) ?? [];
508
+ }
509
+ async setContracts(storyKey, contracts) {
510
+ this._contracts.set(storyKey, contracts.map((c) => ({ ...c })));
511
+ }
512
+ async queryContracts(filter) {
513
+ const all = [];
514
+ for (const records of this._contracts.values()) for (const r of records) all.push(r);
515
+ return all.filter((r) => {
516
+ if (filter?.storyKey !== void 0 && r.storyKey !== filter.storyKey) return false;
517
+ if (filter?.direction !== void 0 && r.direction !== filter.direction) return false;
518
+ return true;
519
+ });
520
+ }
521
+ async setContractVerification(storyKey, results) {
522
+ this._contractVerifications.set(storyKey, results.map((r) => ({ ...r })));
523
+ if (this._basePath !== void 0) {
524
+ const serialized = {};
525
+ for (const [key, records] of this._contractVerifications) serialized[key] = records;
526
+ const filePath = join$1(this._basePath, "contract-verifications.json");
527
+ await writeFile(filePath, JSON.stringify(serialized, null, 2), "utf-8");
528
+ }
529
+ }
530
+ async getContractVerification(storyKey) {
531
+ return this._contractVerifications.get(storyKey) ?? [];
532
+ }
533
+ async branchForStory(_storyKey) {}
534
+ async mergeStory(_storyKey) {}
535
+ async rollbackStory(_storyKey) {}
536
+ async diffStory(storyKey) {
537
+ return {
538
+ storyKey,
539
+ tables: []
540
+ };
541
+ }
542
+ async getHistory(_limit) {
543
+ return [];
544
+ }
545
+ };
546
+
547
+ //#endregion
548
+ //#region src/modules/state/errors.ts
549
+ /**
550
+ * Typed error classes for the Dolt state store.
551
+ */
552
+ var StateStoreError = class extends Error {
553
+ code;
554
+ constructor(code, message) {
555
+ super(message);
556
+ this.name = "StateStoreError";
557
+ this.code = code;
558
+ }
559
+ };
560
+ var DoltMergeConflictError = class extends StateStoreError {
561
+ table;
562
+ conflictingKeys;
563
+ rowKey;
564
+ ourValue;
565
+ theirValue;
566
+ constructor(table, conflictingKeys, options) {
567
+ super("DOLT_MERGE_CONFLICT", `Merge conflict in table '${table}' on keys: ${conflictingKeys.join(", ")}`);
568
+ this.name = "DoltMergeConflictError";
569
+ this.table = table;
570
+ this.conflictingKeys = conflictingKeys;
571
+ if (options) {
572
+ this.rowKey = options.rowKey;
573
+ this.ourValue = options.ourValue;
574
+ this.theirValue = options.theirValue;
575
+ }
576
+ }
577
+ };
578
+ /** Alias for DoltMergeConflictError — used by orchestrator branch lifecycle. */
579
+ const DoltMergeConflict = DoltMergeConflictError;
580
+
581
+ //#endregion
582
+ //#region src/modules/state/dolt-store.ts
583
+ const log = createLogger("modules:state:dolt");
584
+ /**
585
+ * Validate that a story key matches the expected pattern (e.g. "26-7", "1-1a", "NEW-26", "E6").
586
+ * Prevents SQL injection via string-interpolated identifiers.
587
+ */
588
+ const STORY_KEY_PATTERN = /^[A-Za-z0-9]+(-[A-Za-z0-9]+)?$/;
589
+ function assertValidStoryKey(storyKey) {
590
+ if (!STORY_KEY_PATTERN.test(storyKey)) throw new DoltQueryError("assertValidStoryKey", `Invalid story key: '${storyKey}'. Must match pattern <key> or <epic>-<story> (e.g. "E6", "10-1", "1-1a", "NEW-26").`);
591
+ }
592
+ /**
593
+ * Dolt-backed implementation of the StateStore interface.
594
+ *
595
+ * Constructor accepts a deps object for DI: `{ repoPath, client }`.
596
+ * Call `initialize()` before any CRUD operations.
597
+ */
598
+ var DoltStateStore = class DoltStateStore {
599
+ _repoPath;
600
+ _client;
601
+ _storyBranches = new Map();
602
+ constructor(options) {
603
+ this._repoPath = options.repoPath;
604
+ this._client = options.client;
605
+ }
606
+ /**
607
+ * Return the branch name for a story if one has been created via branchForStory(),
608
+ * or undefined to use the default (main) branch.
609
+ */
610
+ _branchFor(storyKey) {
611
+ return this._storyBranches.get(storyKey);
612
+ }
613
+ async initialize() {
614
+ await this._client.connect();
615
+ await this._runMigrations();
616
+ await this.flush("substrate: schema migrations");
617
+ log.debug("DoltStateStore initialized at %s", this._repoPath);
618
+ }
619
+ async close() {
620
+ await this._client.close();
621
+ }
622
+ async _runMigrations() {
623
+ const ddl = [
624
+ `CREATE TABLE IF NOT EXISTS stories (
625
+ story_key VARCHAR(100) NOT NULL,
626
+ phase VARCHAR(30) NOT NULL DEFAULT 'PENDING',
627
+ review_cycles INT NOT NULL DEFAULT 0,
628
+ last_verdict VARCHAR(64) NULL,
629
+ error TEXT NULL,
630
+ started_at VARCHAR(64) NULL,
631
+ completed_at VARCHAR(64) NULL,
632
+ sprint VARCHAR(50) NULL,
633
+ PRIMARY KEY (story_key)
634
+ )`,
635
+ `CREATE TABLE IF NOT EXISTS metrics (
636
+ id BIGINT NOT NULL AUTO_INCREMENT,
637
+ story_key VARCHAR(100) NOT NULL,
638
+ task_type VARCHAR(100) NOT NULL,
639
+ model VARCHAR(100) NULL,
640
+ tokens_in BIGINT NULL,
641
+ tokens_out BIGINT NULL,
642
+ cache_read_tokens BIGINT NULL,
643
+ cost_usd DOUBLE NULL,
644
+ wall_clock_ms BIGINT NULL,
645
+ review_cycles INT NULL,
646
+ stall_count INT NULL,
647
+ result VARCHAR(30) NULL,
648
+ recorded_at VARCHAR(64) NULL,
649
+ sprint VARCHAR(50) NULL,
650
+ PRIMARY KEY (id)
651
+ )`,
652
+ `CREATE TABLE IF NOT EXISTS contracts (
653
+ story_key VARCHAR(100) NOT NULL,
654
+ contract_name VARCHAR(200) NOT NULL,
655
+ direction VARCHAR(20) NOT NULL,
656
+ schema_path VARCHAR(500) NULL,
657
+ transport VARCHAR(200) NULL,
658
+ PRIMARY KEY (story_key, contract_name, direction)
659
+ )`,
660
+ `CREATE TABLE IF NOT EXISTS review_verdicts (
661
+ id BIGINT NOT NULL AUTO_INCREMENT,
662
+ story_key VARCHAR(100) NOT NULL,
663
+ task_type VARCHAR(100) NOT NULL,
664
+ verdict VARCHAR(64) NOT NULL,
665
+ issues_count INT NULL,
666
+ notes TEXT NULL,
667
+ timestamp VARCHAR(64) NULL,
668
+ PRIMARY KEY (id)
669
+ )`
670
+ ];
671
+ for (const sql of ddl) await this._client.query(sql);
672
+ try {
673
+ const colRows = await this._client.query(`SHOW COLUMNS FROM repo_map_symbols LIKE 'dependencies'`);
674
+ if (colRows.length === 0) {
675
+ await this._client.query(`ALTER TABLE repo_map_symbols ADD COLUMN dependencies JSON`);
676
+ await this._client.query(`INSERT IGNORE INTO _schema_version (version, description) VALUES (6, 'Add dependencies JSON column to repo_map_symbols (Epic 28-3)')`);
677
+ log.info({
678
+ component: "dolt-state",
679
+ migration: "v5-to-v6",
680
+ column: "dependencies",
681
+ table: "repo_map_symbols"
682
+ }, "Applied migration v5-to-v6: added dependencies column to repo_map_symbols");
683
+ }
684
+ } catch {
685
+ log.debug("Skipping repo_map_symbols migration: table not yet created");
686
+ }
687
+ log.debug("Schema migrations applied");
688
+ }
689
+ /**
690
+ * Commit pending Dolt changes on the current branch.
691
+ * Callers can invoke this after a batch of writes for explicit durability.
692
+ */
693
+ async flush(message = "substrate: auto-commit") {
694
+ try {
695
+ await this._client.execArgs(["add", "."]);
696
+ await this._client.execArgs([
697
+ "commit",
698
+ "--allow-empty",
699
+ "-m",
700
+ message
701
+ ]);
702
+ log.debug("Dolt flush committed: %s", message);
703
+ } catch (err) {
704
+ const detail = err instanceof Error ? err.message : String(err);
705
+ log.warn({ detail }, "Dolt flush failed (non-fatal)");
706
+ }
707
+ }
708
+ async getStoryState(storyKey) {
709
+ const rows = await this._client.query("SELECT * FROM stories WHERE story_key = ?", [storyKey]);
710
+ if (rows.length === 0) return void 0;
711
+ return this._rowToStory(rows[0]);
712
+ }
713
+ async setStoryState(storyKey, state) {
714
+ const branch = this._branchFor(storyKey);
715
+ const sql = `REPLACE INTO stories
716
+ (story_key, phase, review_cycles, last_verdict, error, started_at, completed_at, sprint)
717
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)`;
718
+ await this._client.query(sql, [
719
+ storyKey,
720
+ state.phase,
721
+ state.reviewCycles,
722
+ state.lastVerdict ?? null,
723
+ state.error ?? null,
724
+ state.startedAt ?? null,
725
+ state.completedAt ?? null,
726
+ state.sprint ?? null
727
+ ], branch);
728
+ }
729
+ async queryStories(filter) {
730
+ const conditions = [];
731
+ const params = [];
732
+ if (filter.phase !== void 0) {
733
+ const phases = Array.isArray(filter.phase) ? filter.phase : [filter.phase];
734
+ const placeholders = phases.map(() => "?").join(", ");
735
+ conditions.push(`phase IN (${placeholders})`);
736
+ params.push(...phases);
737
+ }
738
+ if (filter.sprint !== void 0) {
739
+ conditions.push("sprint = ?");
740
+ params.push(filter.sprint);
741
+ }
742
+ if (filter.storyKey !== void 0) {
743
+ conditions.push("story_key = ?");
744
+ params.push(filter.storyKey);
745
+ }
746
+ const where = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
747
+ const sql = `SELECT * FROM stories ${where} ORDER BY story_key`;
748
+ const rows = await this._client.query(sql, params);
749
+ return rows.map((r) => this._rowToStory(r));
750
+ }
751
+ _rowToStory(row) {
752
+ return {
753
+ storyKey: row.story_key,
754
+ phase: row.phase,
755
+ reviewCycles: Number(row.review_cycles),
756
+ lastVerdict: row.last_verdict ?? void 0,
757
+ error: row.error ?? void 0,
758
+ startedAt: row.started_at ?? void 0,
759
+ completedAt: row.completed_at ?? void 0,
760
+ sprint: row.sprint ?? void 0
761
+ };
762
+ }
763
+ async recordMetric(metric) {
764
+ const branch = this._branchFor(metric.storyKey);
765
+ const recordedAt = metric.recordedAt ?? metric.timestamp ?? new Date().toISOString();
766
+ const sql = `INSERT INTO metrics
767
+ (story_key, task_type, model, tokens_in, tokens_out, cache_read_tokens,
768
+ cost_usd, wall_clock_ms, review_cycles, stall_count, result, recorded_at, sprint)
769
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`;
770
+ await this._client.query(sql, [
771
+ metric.storyKey,
772
+ metric.taskType,
773
+ metric.model ?? null,
774
+ metric.tokensIn ?? null,
775
+ metric.tokensOut ?? null,
776
+ metric.cacheReadTokens ?? null,
777
+ metric.costUsd ?? null,
778
+ metric.wallClockMs ?? null,
779
+ metric.reviewCycles ?? null,
780
+ metric.stallCount ?? null,
781
+ metric.result ?? null,
782
+ recordedAt,
783
+ metric.sprint ?? null
784
+ ], branch);
785
+ }
786
+ async queryMetrics(filter) {
787
+ const conditions = [];
788
+ const params = [];
789
+ const storyKey = filter.storyKey ?? filter.story_key;
790
+ const taskType = filter.taskType ?? filter.task_type;
791
+ if (storyKey !== void 0) {
792
+ conditions.push("story_key = ?");
793
+ params.push(storyKey);
794
+ }
795
+ if (taskType !== void 0) {
796
+ conditions.push("task_type = ?");
797
+ params.push(taskType);
798
+ }
799
+ if (filter.sprint !== void 0) {
800
+ conditions.push("sprint = ?");
801
+ params.push(filter.sprint);
802
+ }
803
+ if (filter.dateFrom !== void 0) {
804
+ conditions.push("recorded_at >= ?");
805
+ params.push(filter.dateFrom);
806
+ }
807
+ if (filter.dateTo !== void 0) {
808
+ conditions.push("recorded_at <= ?");
809
+ params.push(filter.dateTo);
810
+ }
811
+ if (filter.since !== void 0) {
812
+ conditions.push("recorded_at >= ?");
813
+ params.push(filter.since);
814
+ }
815
+ const where = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
816
+ if (filter.aggregate) {
817
+ const sql$1 = `SELECT task_type,
818
+ AVG(cost_usd) AS avg_cost_usd,
819
+ SUM(tokens_in) AS sum_tokens_in,
820
+ SUM(tokens_out) AS sum_tokens_out,
821
+ COUNT(*) AS count
822
+ FROM metrics ${where} GROUP BY task_type ORDER BY task_type`;
823
+ const aggRows = await this._client.query(sql$1, params);
824
+ return aggRows.map((r) => this._aggregateRowToMetric(r));
825
+ }
826
+ const sql = `SELECT * FROM metrics ${where} ORDER BY id`;
827
+ const rows = await this._client.query(sql, params);
828
+ return rows.map((r) => this._rowToMetric(r));
829
+ }
830
+ _aggregateRowToMetric(row) {
831
+ return {
832
+ storyKey: "",
833
+ taskType: row.task_type,
834
+ costUsd: row.avg_cost_usd ?? void 0,
835
+ tokensIn: row.sum_tokens_in ?? void 0,
836
+ tokensOut: row.sum_tokens_out ?? void 0,
837
+ count: row.count,
838
+ result: "aggregate"
839
+ };
840
+ }
841
+ _rowToMetric(row) {
842
+ return {
843
+ storyKey: row.story_key,
844
+ taskType: row.task_type,
845
+ model: row.model ?? void 0,
846
+ tokensIn: row.tokens_in ?? void 0,
847
+ tokensOut: row.tokens_out ?? void 0,
848
+ cacheReadTokens: row.cache_read_tokens ?? void 0,
849
+ costUsd: row.cost_usd ?? void 0,
850
+ wallClockMs: row.wall_clock_ms ?? void 0,
851
+ reviewCycles: row.review_cycles ?? void 0,
852
+ stallCount: row.stall_count ?? void 0,
853
+ result: row.result ?? void 0,
854
+ recordedAt: row.recorded_at ?? void 0,
855
+ sprint: row.sprint ?? void 0,
856
+ timestamp: row.timestamp ?? row.recorded_at ?? void 0
857
+ };
858
+ }
859
+ async getContracts(storyKey) {
860
+ const rows = await this._client.query("SELECT * FROM contracts WHERE story_key = ? ORDER BY contract_name", [storyKey]);
861
+ return rows.map((r) => this._rowToContract(r));
862
+ }
863
+ async setContracts(storyKey, contracts) {
864
+ const branch = this._branchFor(storyKey);
865
+ await this._client.query("DELETE FROM contracts WHERE story_key = ?", [storyKey], branch);
866
+ for (const c of contracts) await this._client.query(`INSERT INTO contracts (story_key, contract_name, direction, schema_path, transport)
867
+ VALUES (?, ?, ?, ?, ?)`, [
868
+ c.storyKey,
869
+ c.contractName,
870
+ c.direction,
871
+ c.schemaPath,
872
+ c.transport ?? null
873
+ ], branch);
874
+ }
875
+ _rowToContract(row) {
876
+ return {
877
+ storyKey: row.story_key,
878
+ contractName: row.contract_name,
879
+ direction: row.direction,
880
+ schemaPath: row.schema_path,
881
+ transport: row.transport ?? void 0
882
+ };
883
+ }
884
+ async queryContracts(filter) {
885
+ const conditions = [];
886
+ const params = [];
887
+ if (filter?.storyKey !== void 0) {
888
+ conditions.push("story_key = ?");
889
+ params.push(filter.storyKey);
890
+ }
891
+ if (filter?.direction !== void 0) {
892
+ conditions.push("direction = ?");
893
+ params.push(filter.direction);
894
+ }
895
+ const where = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
896
+ const sql = `SELECT * FROM contracts ${where} ORDER BY story_key, contract_name`;
897
+ const rows = await this._client.query(sql, params);
898
+ return rows.map((r) => this._rowToContract(r));
899
+ }
900
+ async setContractVerification(storyKey, results) {
901
+ const branch = this._branchFor(storyKey);
902
+ await this._client.query(`DELETE FROM review_verdicts WHERE story_key = ? AND task_type = 'contract-verification'`, [storyKey], branch);
903
+ const failCount = results.filter((r) => r.verdict === "fail").length;
904
+ for (const r of results) await this._client.query(`INSERT INTO review_verdicts (story_key, task_type, verdict, issues_count, notes, timestamp)
905
+ VALUES (?, 'contract-verification', ?, ?, ?, ?)`, [
906
+ storyKey,
907
+ r.verdict,
908
+ failCount,
909
+ JSON.stringify({
910
+ contractName: r.contractName,
911
+ mismatchDescription: r.mismatchDescription
912
+ }),
913
+ r.verifiedAt
914
+ ], branch);
915
+ }
916
+ async getContractVerification(storyKey) {
917
+ const rows = await this._client.query(`SELECT * FROM review_verdicts WHERE story_key = ? AND task_type = 'contract-verification' ORDER BY timestamp DESC`, [storyKey]);
918
+ return rows.map((row) => {
919
+ let contractName = "";
920
+ let mismatchDescription;
921
+ if (row.notes !== null) try {
922
+ const parsed = JSON.parse(row.notes);
923
+ if (typeof parsed.contractName === "string") contractName = parsed.contractName;
924
+ if (typeof parsed.mismatchDescription === "string") mismatchDescription = parsed.mismatchDescription;
925
+ } catch {}
926
+ return {
927
+ storyKey: row.story_key,
928
+ contractName,
929
+ verdict: row.verdict,
930
+ ...mismatchDescription !== void 0 ? { mismatchDescription } : {},
931
+ verifiedAt: row.timestamp ?? new Date().toISOString()
932
+ };
933
+ });
934
+ }
935
+ async branchForStory(storyKey) {
936
+ assertValidStoryKey(storyKey);
937
+ const branchName = `story/${storyKey}`;
938
+ try {
939
+ await this._client.query(`CALL DOLT_BRANCH('${branchName}')`, [], "main");
940
+ this._storyBranches.set(storyKey, branchName);
941
+ log.debug("Created Dolt branch %s for story %s", branchName, storyKey);
942
+ } catch (err) {
943
+ const detail = err instanceof Error ? err.message : String(err);
944
+ throw new DoltQueryError(`CALL DOLT_BRANCH('${branchName}')`, detail);
945
+ }
946
+ }
947
+ async mergeStory(storyKey) {
948
+ assertValidStoryKey(storyKey);
949
+ const branchName = this._storyBranches.get(storyKey);
950
+ if (branchName === void 0) {
951
+ log.warn({ storyKey }, "mergeStory called but no branch registered — no-op");
952
+ return;
953
+ }
954
+ try {
955
+ try {
956
+ await this._client.query(`CALL DOLT_ADD('-A')`, [], branchName);
957
+ await this._client.query(`CALL DOLT_COMMIT('-m', 'Story ${storyKey}: pre-merge commit', '--allow-empty')`, [], branchName);
958
+ } catch {}
959
+ try {
960
+ await this._client.query(`CALL DOLT_ADD('-A')`, [], "main");
961
+ await this._client.query(`CALL DOLT_COMMIT('-m', 'substrate: pre-merge auto-commit', '--allow-empty')`, [], "main");
962
+ } catch {}
963
+ const mergeRows = await this._client.query(`CALL DOLT_MERGE('${branchName}')`, [], "main");
964
+ const mergeResult = mergeRows[0];
965
+ if (mergeResult && (mergeResult.conflicts ?? 0) > 0) {
966
+ let table = "stories";
967
+ let rowKey = "unknown";
968
+ let ourValue;
969
+ let theirValue;
970
+ try {
971
+ const conflictRows = await this._client.query(`SELECT * FROM dolt_conflicts_stories LIMIT 1`, [], "main");
972
+ if (conflictRows.length > 0) {
973
+ const row = conflictRows[0];
974
+ rowKey = String(row["base_story_key"] ?? row["our_story_key"] ?? "unknown");
975
+ ourValue = JSON.stringify(row["our_status"] ?? row);
976
+ theirValue = JSON.stringify(row["their_status"] ?? row);
977
+ }
978
+ } catch {}
979
+ this._storyBranches.delete(storyKey);
980
+ throw new DoltMergeConflictError(table, [rowKey], {
981
+ rowKey,
982
+ ourValue,
983
+ theirValue
984
+ });
985
+ }
986
+ try {
987
+ await this._client.query(`CALL DOLT_COMMIT('-m', 'Merge story ${storyKey}: COMPLETE')`, [], "main");
988
+ } catch (commitErr) {
989
+ const msg = commitErr instanceof Error ? commitErr.message : String(commitErr);
990
+ if (!msg.includes("nothing to commit")) throw commitErr;
991
+ }
992
+ this._storyBranches.delete(storyKey);
993
+ log.debug("Merged branch %s into main for story %s", branchName, storyKey);
994
+ } catch (err) {
995
+ if (err instanceof DoltMergeConflictError) throw err;
996
+ const detail = err instanceof Error ? err.message : String(err);
997
+ throw new DoltQueryError(`CALL DOLT_MERGE('${branchName}')`, detail);
998
+ }
999
+ }
1000
+ async rollbackStory(storyKey) {
1001
+ assertValidStoryKey(storyKey);
1002
+ const branchName = this._storyBranches.get(storyKey);
1003
+ if (branchName === void 0) {
1004
+ log.warn({ storyKey }, "rollbackStory called but no branch registered — no-op");
1005
+ return;
1006
+ }
1007
+ try {
1008
+ await this._client.query(`CALL DOLT_BRANCH('-D', '${branchName}')`, [], "main");
1009
+ this._storyBranches.delete(storyKey);
1010
+ log.debug("Rolled back (deleted) branch %s for story %s", branchName, storyKey);
1011
+ } catch (err) {
1012
+ const detail = err instanceof Error ? err.message : String(err);
1013
+ log.warn({
1014
+ detail,
1015
+ storyKey,
1016
+ branchName
1017
+ }, "rollbackStory failed (non-fatal)");
1018
+ this._storyBranches.delete(storyKey);
1019
+ }
1020
+ }
1021
+ /**
1022
+ * Tables queried by diffStory(). Each table is checked for row-level changes
1023
+ * via SELECT * FROM DOLT_DIFF('main', branchName, tableName).
1024
+ */
1025
+ static DIFF_TABLES = [
1026
+ "stories",
1027
+ "contracts",
1028
+ "metrics",
1029
+ "dispatch_log",
1030
+ "build_results",
1031
+ "review_verdicts"
1032
+ ];
1033
+ async diffStory(storyKey) {
1034
+ assertValidStoryKey(storyKey);
1035
+ const branchName = this._storyBranches.get(storyKey);
1036
+ if (branchName === void 0) return this._diffMergedStory(storyKey);
1037
+ try {
1038
+ await this._client.query(`CALL DOLT_ADD('-A')`, [], branchName);
1039
+ await this._client.query(`CALL DOLT_COMMIT('-m', 'Story ${storyKey}: pre-diff snapshot', '--allow-empty')`, [], branchName);
1040
+ } catch {}
1041
+ return this._diffRange("main", branchName, storyKey);
1042
+ }
1043
+ /**
1044
+ * Diff a merged story by finding its merge commit in the Dolt log.
1045
+ * Queries the `dolt_log` system table for commits referencing the story,
1046
+ * then diffs `<hash>~1` vs `<hash>` for row-level changes.
1047
+ */
1048
+ async _diffMergedStory(storyKey) {
1049
+ try {
1050
+ const rows = await this._client.query(`SELECT commit_hash FROM dolt_log WHERE message LIKE ? LIMIT 1`, [`%${storyKey}%`]);
1051
+ if (rows.length === 0) return {
1052
+ storyKey,
1053
+ tables: []
1054
+ };
1055
+ const hash = String(rows[0].commit_hash);
1056
+ if (!hash) return {
1057
+ storyKey,
1058
+ tables: []
1059
+ };
1060
+ return this._diffRange(`${hash}~1`, hash, storyKey);
1061
+ } catch {
1062
+ return {
1063
+ storyKey,
1064
+ tables: []
1065
+ };
1066
+ }
1067
+ }
1068
+ /**
1069
+ * Compute row-level diffs between two Dolt revisions (branches or commit hashes)
1070
+ * across all tracked tables.
1071
+ */
1072
+ async _diffRange(fromRef, toRef, storyKey) {
1073
+ const tableDiffs = [];
1074
+ for (const table of DoltStateStore.DIFF_TABLES) try {
1075
+ const rows = await this._client.query(`SELECT * FROM DOLT_DIFF('${fromRef}', '${toRef}', '${table}')`, [], "main");
1076
+ if (rows.length === 0) continue;
1077
+ const added = [];
1078
+ const modified = [];
1079
+ const deleted = [];
1080
+ for (const row of rows) {
1081
+ const diffType = row["diff_type"];
1082
+ const rowKey = this._extractRowKey(row);
1083
+ const before = this._extractPrefixedFields(row, "before_");
1084
+ const after = this._extractPrefixedFields(row, "after_");
1085
+ const diffRow = {
1086
+ rowKey,
1087
+ ...before !== void 0 && { before },
1088
+ ...after !== void 0 && { after }
1089
+ };
1090
+ if (diffType === "added") added.push(diffRow);
1091
+ else if (diffType === "modified") modified.push(diffRow);
1092
+ else if (diffType === "removed") deleted.push(diffRow);
1093
+ }
1094
+ if (added.length > 0 || modified.length > 0 || deleted.length > 0) tableDiffs.push({
1095
+ table,
1096
+ added,
1097
+ modified,
1098
+ deleted
1099
+ });
1100
+ } catch {}
1101
+ return {
1102
+ storyKey,
1103
+ tables: tableDiffs
1104
+ };
1105
+ }
1106
+ /**
1107
+ * Extract a human-readable row key from a DOLT_DIFF result row.
1108
+ * Tries after_ fields first (for added/modified rows), then before_ fields
1109
+ * (for removed rows). Skips commit_hash pseudo-columns.
1110
+ */
1111
+ _extractRowKey(row) {
1112
+ for (const prefix of ["after_", "before_"]) for (const [key, val] of Object.entries(row)) if (key.startsWith(prefix) && !key.endsWith("_commit_hash") && val !== null && val !== void 0) return String(val);
1113
+ return "unknown";
1114
+ }
1115
+ /**
1116
+ * Extract all fields with a given prefix from a DOLT_DIFF result row,
1117
+ * stripping the prefix from the key names. Returns undefined if no matching
1118
+ * fields are found.
1119
+ */
1120
+ _extractPrefixedFields(row, prefix) {
1121
+ const result = {};
1122
+ for (const [key, val] of Object.entries(row)) if (key.startsWith(prefix)) result[key.slice(prefix.length)] = val;
1123
+ return Object.keys(result).length > 0 ? result : void 0;
1124
+ }
1125
+ /** In-memory KV store for per-run arbitrary metrics. Not persisted to Dolt. */
1126
+ _kvMetrics = new Map();
1127
+ async setMetric(runId, key, value) {
1128
+ let runMap = this._kvMetrics.get(runId);
1129
+ if (runMap === void 0) {
1130
+ runMap = new Map();
1131
+ this._kvMetrics.set(runId, runMap);
1132
+ }
1133
+ runMap.set(key, value);
1134
+ }
1135
+ async getMetric(runId, key) {
1136
+ return this._kvMetrics.get(runId)?.get(key);
1137
+ }
1138
+ async getHistory(limit) {
1139
+ const effectiveLimit = limit ?? 20;
1140
+ try {
1141
+ const rows = await this._client.query(`SELECT commit_hash, date, message, committer FROM dolt_log LIMIT ?`, [effectiveLimit]);
1142
+ const entries = [];
1143
+ for (const row of rows) {
1144
+ const hash = String(row.commit_hash ?? "");
1145
+ const dateVal = row.date;
1146
+ const timestamp = dateVal instanceof Date ? dateVal.toISOString() : String(dateVal ?? "");
1147
+ const message = String(row.message ?? "");
1148
+ const author = row.committer ? String(row.committer) : void 0;
1149
+ const storyKeyMatch = /story\/([0-9]+-[0-9]+)/i.exec(message);
1150
+ entries.push({
1151
+ hash,
1152
+ timestamp,
1153
+ storyKey: storyKeyMatch ? storyKeyMatch[1] : null,
1154
+ message,
1155
+ author
1156
+ });
1157
+ }
1158
+ return entries;
1159
+ } catch (err) {
1160
+ const detail = err instanceof Error ? err.message : String(err);
1161
+ throw new DoltQueryError("getHistory", detail);
1162
+ }
1163
+ }
1164
+ };
1165
+
1166
+ //#endregion
1167
+ //#region src/modules/state/index.ts
1168
+ const logger$1 = createLogger("state:factory");
1169
+ /**
1170
+ * Synchronously check whether Dolt is available and a Dolt repo exists at the
1171
+ * canonical state path under `basePath`.
1172
+ *
1173
+ * @param basePath - Project root to check (e.g. `process.cwd()`).
1174
+ * @returns `{ available: true, reason: '...' }` when both probes pass,
1175
+ * `{ available: false, reason: '...' }` otherwise.
1176
+ */
1177
+ function detectDoltAvailableSync(basePath) {
1178
+ const result = spawnSync("dolt", ["version"], { stdio: "ignore" });
1179
+ const binaryFound = result.error == null && result.status === 0;
1180
+ if (!binaryFound) return {
1181
+ available: false,
1182
+ reason: "dolt binary not found on PATH"
1183
+ };
1184
+ const stateDoltDir = join$1(basePath, ".substrate", "state", ".dolt");
1185
+ const repoExists = existsSync(stateDoltDir);
1186
+ if (!repoExists) return {
1187
+ available: false,
1188
+ reason: `Dolt repo not initialised at ${stateDoltDir}`
1189
+ };
1190
+ return {
1191
+ available: true,
1192
+ reason: "dolt binary found and repo initialised"
1193
+ };
1194
+ }
1195
+ /**
1196
+ * Create a StateStore backed by the specified backend.
1197
+ *
1198
+ * @param config - Optional configuration. Defaults to `{ backend: 'auto' }`.
1199
+ * @returns A StateStore instance. Call `initialize()` before use.
1200
+ */
1201
+ function createStateStore(config = {}) {
1202
+ const backend = config.backend ?? "auto";
1203
+ if (backend === "dolt") {
1204
+ const repoPath = config.basePath ?? process.cwd();
1205
+ const client = new DoltClient({ repoPath });
1206
+ return new DoltStateStore({
1207
+ repoPath,
1208
+ client
1209
+ });
1210
+ }
1211
+ if (backend === "auto") {
1212
+ const repoPath = config.basePath ?? process.cwd();
1213
+ const detection = detectDoltAvailableSync(repoPath);
1214
+ if (detection.available) {
1215
+ logger$1.debug(`Dolt detected, using DoltStateStore (state path: ${join$1(repoPath, ".substrate", "state")})`);
1216
+ const client = new DoltClient({ repoPath });
1217
+ return new DoltStateStore({
1218
+ repoPath,
1219
+ client
1220
+ });
1221
+ } else {
1222
+ logger$1.debug(`Dolt not found, using FileStateStore (reason: ${detection.reason})`);
1223
+ return new FileStateStore({ basePath: config.basePath });
1224
+ }
1225
+ }
1226
+ return new FileStateStore({ basePath: config.basePath });
1227
+ }
1228
+
1229
+ //#endregion
1230
+ //#region src/cli/commands/health.ts
1231
+ const logger = createLogger("health-cmd");
1232
+ /** Default stall threshold in seconds — also used by supervisor default */
1233
+ const DEFAULT_STALL_THRESHOLD_SECONDS = 600;
1234
+ /**
1235
+ * Determine whether a ps output line represents the substrate pipeline orchestrator.
1236
+ * Handles invocation via:
1237
+ * - `substrate run` (globally installed)
1238
+ * - `substrate-ai run`
1239
+ * - `node dist/cli/index.js run` (npm run substrate:dev)
1240
+ * - `npx substrate run`
1241
+ * - any node process whose command contains `run` with `--events` or `--stories`
1242
+ *
1243
+ * When `projectRoot` is provided, additionally checks that the command line
1244
+ * contains that path (via `--project-root` flag or as part of the binary/CWD path).
1245
+ * This ensures multi-project environments match the correct orchestrator.
1246
+ */
1247
+ function isOrchestratorProcessLine(line, projectRoot) {
1248
+ if (line.includes("grep")) return false;
1249
+ let isOrchestrator = false;
1250
+ if (line.includes("substrate run")) isOrchestrator = true;
1251
+ else if (line.includes("substrate-ai run")) isOrchestrator = true;
1252
+ else if (line.includes("index.js run")) isOrchestrator = true;
1253
+ else if (line.includes("node") && /\srun(\s|$)/.test(line) && (line.includes("substrate") || line.includes("--events") || line.includes("--stories"))) isOrchestrator = true;
1254
+ if (!isOrchestrator) return false;
1255
+ if (projectRoot !== void 0) return line.includes(projectRoot);
1256
+ return true;
1257
+ }
1258
+ function inspectProcessTree(opts) {
1259
+ const { projectRoot, substrateDirPath, execFileSync: execFileSyncOverride, readFileSync: readFileSyncOverride } = opts ?? {};
1260
+ const result = {
1261
+ orchestrator_pid: null,
1262
+ child_pids: [],
1263
+ zombies: []
1264
+ };
1265
+ try {
1266
+ let psOutput;
1267
+ if (execFileSyncOverride !== void 0) psOutput = execFileSyncOverride("ps", ["-eo", "pid,ppid,stat,command"], {
1268
+ encoding: "utf-8",
1269
+ timeout: 5e3
1270
+ });
1271
+ else {
1272
+ const { execFileSync: execFileSync$1 } = __require("node:child_process");
1273
+ psOutput = execFileSync$1("ps", ["-eo", "pid,ppid,stat,command"], {
1274
+ encoding: "utf-8",
1275
+ timeout: 5e3
1276
+ });
1277
+ }
1278
+ const lines = psOutput.split("\n");
1279
+ if (substrateDirPath !== void 0) try {
1280
+ const readFileSyncFn = readFileSyncOverride ?? ((path$1, encoding) => readFileSync(path$1, encoding));
1281
+ const pidContent = readFileSyncFn(join(substrateDirPath, "orchestrator.pid"), "utf-8");
1282
+ const pid = parseInt(pidContent.trim(), 10);
1283
+ if (!isNaN(pid) && pid > 0) {
1284
+ const isAlive = lines.some((line) => {
1285
+ const parts = line.trim().split(/\s+/);
1286
+ if (parts.length < 3) return false;
1287
+ return parseInt(parts[0], 10) === pid && !parts[2].includes("Z");
1288
+ });
1289
+ if (isAlive) result.orchestrator_pid = pid;
1290
+ else result.pid_file_dead = true;
1291
+ }
1292
+ } catch {}
1293
+ if (result.orchestrator_pid === null) {
1294
+ for (const line of lines) if (isOrchestratorProcessLine(line, projectRoot)) {
1295
+ const match = line.trim().match(/^(\d+)/);
1296
+ if (match) {
1297
+ result.orchestrator_pid = parseInt(match[1], 10);
1298
+ break;
1299
+ }
1300
+ }
1301
+ }
1302
+ if (result.orchestrator_pid !== null) for (const line of lines) {
1303
+ const parts = line.trim().split(/\s+/);
1304
+ if (parts.length >= 3) {
1305
+ const pid = parseInt(parts[0], 10);
1306
+ const ppid = parseInt(parts[1], 10);
1307
+ const stat$1 = parts[2];
1308
+ if (ppid === result.orchestrator_pid && pid !== result.orchestrator_pid) {
1309
+ result.child_pids.push(pid);
1310
+ if (stat$1.includes("Z")) result.zombies.push(pid);
1311
+ }
1312
+ }
1313
+ }
1314
+ } catch {}
1315
+ return result;
1316
+ }
1317
+ /**
1318
+ * Collect all descendant PIDs of the given root PIDs by walking the process
1319
+ * tree recursively. This ensures that grandchildren of the orchestrator
1320
+ * (e.g. node subprocesses spawned by `claude -p`) are also killed during
1321
+ * stall recovery, leaving no orphan processes.
1322
+ *
1323
+ * Returns only the descendants — the root PIDs themselves are NOT included.
1324
+ */
1325
+ function getAllDescendantPids(rootPids, execFileSyncOverride) {
1326
+ if (rootPids.length === 0) return [];
1327
+ try {
1328
+ let psOutput;
1329
+ if (execFileSyncOverride !== void 0) psOutput = execFileSyncOverride("ps", ["-eo", "pid,ppid"], {
1330
+ encoding: "utf-8",
1331
+ timeout: 5e3
1332
+ });
1333
+ else {
1334
+ const { execFileSync: execFileSync$1 } = __require("node:child_process");
1335
+ psOutput = execFileSync$1("ps", ["-eo", "pid,ppid"], {
1336
+ encoding: "utf-8",
1337
+ timeout: 5e3
1338
+ });
1339
+ }
1340
+ const childrenOf = new Map();
1341
+ for (const line of psOutput.split("\n")) {
1342
+ const parts = line.trim().split(/\s+/);
1343
+ if (parts.length >= 2) {
1344
+ const pid = parseInt(parts[0], 10);
1345
+ const ppid = parseInt(parts[1], 10);
1346
+ if (!isNaN(pid) && !isNaN(ppid) && pid > 0) {
1347
+ if (!childrenOf.has(ppid)) childrenOf.set(ppid, []);
1348
+ childrenOf.get(ppid).push(pid);
1349
+ }
1350
+ }
1351
+ }
1352
+ const descendants = [];
1353
+ const seen = new Set(rootPids);
1354
+ const queue = [...rootPids];
1355
+ while (queue.length > 0) {
1356
+ const current = queue.shift();
1357
+ const children = childrenOf.get(current) ?? [];
1358
+ for (const child of children) if (!seen.has(child)) {
1359
+ seen.add(child);
1360
+ descendants.push(child);
1361
+ queue.push(child);
1362
+ }
1363
+ }
1364
+ return descendants;
1365
+ } catch {
1366
+ return [];
1367
+ }
1368
+ }
1369
+ /**
1370
+ * Derive health story counts from manifest `per_story_state`.
1371
+ * Maps manifest status strings to health output buckets.
1372
+ */
1373
+ function buildHealthStoryCountsFromManifest(perStoryState) {
1374
+ const counts = {
1375
+ active: 0,
1376
+ completed: 0,
1377
+ escalated: 0,
1378
+ pending: 0,
1379
+ failed: 0
1380
+ };
1381
+ for (const entry of Object.values(perStoryState)) switch (entry.status) {
1382
+ case "complete":
1383
+ counts.completed++;
1384
+ break;
1385
+ case "escalated":
1386
+ counts.escalated++;
1387
+ break;
1388
+ case "failed":
1389
+ case "verification-failed":
1390
+ counts.failed++;
1391
+ break;
1392
+ case "pending":
1393
+ case "gated":
1394
+ counts.pending++;
1395
+ break;
1396
+ case "dispatched":
1397
+ case "in-review":
1398
+ case "recovered":
1399
+ default:
1400
+ counts.active++;
1401
+ break;
1402
+ }
1403
+ return counts;
1404
+ }
1405
+ /**
1406
+ * Fetch pipeline health data as a structured object without any stdout side-effects.
1407
+ * Used by runSupervisorAction to poll health without formatting overhead.
1408
+ *
1409
+ * Returns a NO_PIPELINE_RUNNING health object for all graceful "no data" cases
1410
+ * (missing DB, missing run, terminal run status). Throws only on unexpected errors.
1411
+ */
1412
+ async function getAutoHealthData(options) {
1413
+ const { runId, projectRoot, stateStore, stateStoreConfig } = options;
1414
+ const dbRoot = await resolveMainRepoRoot(projectRoot);
1415
+ const dbPath = join(dbRoot, ".substrate", "substrate.db");
1416
+ let doltStateInfo;
1417
+ if (stateStoreConfig?.backend === "dolt" && stateStore) {
1418
+ const repoPath = stateStoreConfig.basePath ?? projectRoot;
1419
+ const doltDirPath = join(repoPath, ".dolt");
1420
+ const initialized = existsSync(doltDirPath);
1421
+ let responsive = false;
1422
+ let version;
1423
+ let branches;
1424
+ let currentBranch;
1425
+ try {
1426
+ await stateStore.getHistory(1);
1427
+ responsive = true;
1428
+ try {
1429
+ const { execFile: ef } = await import("node:child_process");
1430
+ const { promisify: p } = await import("node:util");
1431
+ const execFileAsync = p(ef);
1432
+ const { stdout } = await execFileAsync("dolt", ["version"]);
1433
+ const match = stdout.match(/dolt version (\S+)/);
1434
+ if (match) version = match[1];
1435
+ } catch {}
1436
+ try {
1437
+ const { execFile: ef } = await import("node:child_process");
1438
+ const { promisify: p } = await import("node:util");
1439
+ const execFileAsync = p(ef);
1440
+ const { stdout } = await execFileAsync("dolt", ["branch", "--list"], { cwd: repoPath });
1441
+ const lines = stdout.split("\n").filter((l) => l.trim().length > 0);
1442
+ branches = lines.map((l) => {
1443
+ const trimmed = l.trim();
1444
+ if (trimmed.startsWith("* ")) {
1445
+ currentBranch = trimmed.slice(2).trim();
1446
+ return currentBranch;
1447
+ }
1448
+ return trimmed;
1449
+ });
1450
+ } catch {}
1451
+ } catch {
1452
+ responsive = false;
1453
+ }
1454
+ doltStateInfo = {
1455
+ initialized,
1456
+ responsive,
1457
+ ...version !== void 0 ? { version } : {},
1458
+ ...branches !== void 0 ? { branches } : {},
1459
+ ...currentBranch !== void 0 ? { current_branch: currentBranch } : {}
1460
+ };
1461
+ }
1462
+ const NO_PIPELINE = {
1463
+ verdict: "NO_PIPELINE_RUNNING",
1464
+ run_id: null,
1465
+ status: null,
1466
+ current_phase: null,
1467
+ staleness_seconds: 0,
1468
+ last_activity: "",
1469
+ process: {
1470
+ orchestrator_pid: null,
1471
+ child_pids: [],
1472
+ zombies: []
1473
+ },
1474
+ stories: {
1475
+ active: 0,
1476
+ completed: 0,
1477
+ escalated: 0,
1478
+ details: {}
1479
+ },
1480
+ ...doltStateInfo !== void 0 ? { dolt_state: doltStateInfo } : {}
1481
+ };
1482
+ const doltDir = join(dbRoot, ".substrate", "state", ".dolt");
1483
+ if (!existsSync(dbPath) && !existsSync(doltDir)) return NO_PIPELINE;
1484
+ const adapter = createDatabaseAdapter$1({
1485
+ backend: "auto",
1486
+ basePath: dbRoot
1487
+ });
1488
+ try {
1489
+ await initSchema(adapter);
1490
+ let run;
1491
+ if (runId !== void 0) run = await getPipelineRunById(adapter, runId);
1492
+ else {
1493
+ let currentRunId;
1494
+ try {
1495
+ const currentRunIdPath = join(dbRoot, ".substrate", "current-run-id");
1496
+ const content = readFileSync(currentRunIdPath, "utf-8").trim();
1497
+ const UUID_RE = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i;
1498
+ if (UUID_RE.test(content)) currentRunId = content;
1499
+ } catch {}
1500
+ if (currentRunId !== void 0) run = await getPipelineRunById(adapter, currentRunId);
1501
+ if (run === void 0) run = await getLatestRun(adapter);
1502
+ }
1503
+ if (run === void 0) {
1504
+ const substrateDirPath$1 = join(dbRoot, ".substrate");
1505
+ const fallbackProcessInfo = inspectProcessTree({
1506
+ projectRoot: dbRoot,
1507
+ substrateDirPath: substrateDirPath$1
1508
+ });
1509
+ if (fallbackProcessInfo.orchestrator_pid !== null) return {
1510
+ verdict: "HEALTHY",
1511
+ run_id: null,
1512
+ status: "running",
1513
+ current_phase: "implementation",
1514
+ staleness_seconds: 0,
1515
+ last_activity: new Date().toISOString(),
1516
+ process: fallbackProcessInfo,
1517
+ stories: {
1518
+ active: 0,
1519
+ completed: 0,
1520
+ escalated: 0,
1521
+ details: {}
1522
+ },
1523
+ ...doltStateInfo !== void 0 ? { dolt_state: doltStateInfo } : {}
1524
+ };
1525
+ return NO_PIPELINE;
1526
+ }
1527
+ const updatedAt = parseDbTimestampAsUtc(run.updated_at ?? "");
1528
+ const stalenessSeconds = Math.round((Date.now() - updatedAt.getTime()) / 1e3);
1529
+ let storyDetails = {};
1530
+ let active = 0;
1531
+ let completed = 0;
1532
+ let escalated = 0;
1533
+ let pending = 0;
1534
+ try {
1535
+ if (run.token_usage_json) {
1536
+ const state = JSON.parse(run.token_usage_json);
1537
+ if (state.stories) for (const [key, s] of Object.entries(state.stories)) {
1538
+ storyDetails[key] = {
1539
+ phase: s.phase,
1540
+ review_cycles: s.reviewCycles
1541
+ };
1542
+ if (s.phase === "COMPLETE") completed++;
1543
+ else if (s.phase === "ESCALATED") escalated++;
1544
+ else if (s.phase === "PENDING") pending++;
1545
+ else active++;
1546
+ }
1547
+ }
1548
+ } catch {}
1549
+ let manifestSupervisor;
1550
+ let manifestStoryCounts;
1551
+ try {
1552
+ const { manifest: resolvedManifest } = await resolveRunManifest(dbRoot, run.id);
1553
+ if (resolvedManifest !== null) {
1554
+ const manifestData = await resolvedManifest.read();
1555
+ manifestSupervisor = {
1556
+ pid: manifestData.supervisor_pid,
1557
+ session_id: manifestData.supervisor_session_id
1558
+ };
1559
+ manifestStoryCounts = buildHealthStoryCountsFromManifest(manifestData.per_story_state);
1560
+ logger.debug({ runId: run.id }, "health: story counts and supervisor read from manifest");
1561
+ }
1562
+ } catch {
1563
+ logger.debug({ runId: run.id }, "health: manifest read failed — using token_usage_json counts");
1564
+ }
1565
+ const finalActive = manifestStoryCounts?.active ?? active;
1566
+ const finalCompleted = manifestStoryCounts?.completed ?? completed;
1567
+ const finalEscalated = manifestStoryCounts?.escalated ?? escalated;
1568
+ const finalPending = manifestStoryCounts?.pending ?? pending;
1569
+ const finalFailed = manifestStoryCounts?.failed;
1570
+ const substrateDirPath = join(dbRoot, ".substrate");
1571
+ const processInfo = options._processInfoOverride ?? inspectProcessTree({
1572
+ projectRoot,
1573
+ substrateDirPath
1574
+ });
1575
+ let verdict = "NO_PIPELINE_RUNNING";
1576
+ if (run.status === "running") if (processInfo.orchestrator_pid !== null) verdict = "HEALTHY";
1577
+ else if (processInfo.pid_file_dead === true) verdict = "STALLED";
1578
+ else if (processInfo.zombies.length > 0) verdict = "STALLED";
1579
+ else if (stalenessSeconds > DEFAULT_STALL_THRESHOLD_SECONDS) verdict = "STALLED";
1580
+ else if (finalActive > 0) verdict = "STALLED";
1581
+ else verdict = "HEALTHY";
1582
+ else if (run.status === "completed" || run.status === "failed" || run.status === "stopped") verdict = "NO_PIPELINE_RUNNING";
1583
+ const warnings = [];
1584
+ if (doltStateInfo !== void 0 && doltStateInfo.responsive === false) warnings.push("Dolt not connected — decision store queries may fail, story context will be degraded");
1585
+ if (finalEscalated > 0) warnings.push(`${finalEscalated} story(ies) escalated — operator intervention may be needed`);
1586
+ const healthOutput = {
1587
+ verdict,
1588
+ run_id: run.id,
1589
+ status: run.status,
1590
+ current_phase: run.current_phase ?? null,
1591
+ staleness_seconds: stalenessSeconds,
1592
+ last_activity: run.updated_at ?? "",
1593
+ process: processInfo,
1594
+ stories: {
1595
+ active: finalActive,
1596
+ completed: finalCompleted,
1597
+ escalated: finalEscalated,
1598
+ pending: finalPending,
1599
+ ...finalFailed !== void 0 ? { failed: finalFailed } : {},
1600
+ details: storyDetails
1601
+ },
1602
+ ...manifestSupervisor !== void 0 ? { manifest_supervisor: manifestSupervisor } : {},
1603
+ ...doltStateInfo !== void 0 ? { dolt_state: doltStateInfo } : {},
1604
+ ...warnings.length > 0 ? { warnings } : {}
1605
+ };
1606
+ return healthOutput;
1607
+ } finally {
1608
+ try {
1609
+ await adapter.close();
1610
+ } catch {}
1611
+ }
1612
+ }
1613
+ async function runHealthAction(options) {
1614
+ const { outputFormat } = options;
1615
+ try {
1616
+ const health = await getAutoHealthData(options);
1617
+ if (outputFormat === "json") process.stdout.write(formatOutput(health, "json", true) + "\n");
1618
+ else {
1619
+ const verdictLabel = health.verdict === "HEALTHY" ? "HEALTHY" : health.verdict === "STALLED" ? "STALLED" : "NO PIPELINE RUNNING";
1620
+ process.stdout.write(`\nPipeline Health: ${verdictLabel}\n`);
1621
+ if (health.run_id !== null) {
1622
+ process.stdout.write(` Run: ${health.run_id}\n`);
1623
+ process.stdout.write(` Status: ${health.status}\n`);
1624
+ process.stdout.write(` Phase: ${health.current_phase ?? "N/A"}\n`);
1625
+ process.stdout.write(` Last Active: ${health.last_activity} (${health.staleness_seconds}s ago)\n`);
1626
+ const processInfo = health.process;
1627
+ if (processInfo.orchestrator_pid !== null) {
1628
+ process.stdout.write(` Orchestrator: PID ${processInfo.orchestrator_pid}\n`);
1629
+ process.stdout.write(` Children: ${processInfo.child_pids.length} active`);
1630
+ if (processInfo.zombies.length > 0) process.stdout.write(` (${processInfo.zombies.length} ZOMBIE)`);
1631
+ process.stdout.write("\n");
1632
+ } else process.stdout.write(" Orchestrator: not running\n");
1633
+ const storyDetails = health.stories.details;
1634
+ if (Object.keys(storyDetails).length > 0) {
1635
+ process.stdout.write("\n Stories:\n");
1636
+ for (const [key, s] of Object.entries(storyDetails)) process.stdout.write(` ${key}: ${s.phase} (${s.review_cycles} review cycles)\n`);
1637
+ process.stdout.write(`\n Summary: ${health.stories.active} active, ${health.stories.completed} completed, ${health.stories.escalated} escalated\n`);
1638
+ }
1639
+ }
1640
+ if (health.verdict === "STALLED") {
1641
+ process.stdout.write("\n Recommended Actions:\n");
1642
+ if (health.process.orchestrator_pid !== null) process.stdout.write(` 1. Kill stalled orchestrator: kill ${health.process.orchestrator_pid}\n`);
1643
+ if (health.process.zombies.length > 0) process.stdout.write(` ${health.process.orchestrator_pid !== null ? "2" : "1"}. Kill zombie processes: kill ${health.process.zombies.join(" ")}\n`);
1644
+ process.stdout.write(` ${health.process.orchestrator_pid !== null ? "3" : "2"}. Resume the run: substrate resume\n`);
1645
+ process.stdout.write(` ${health.process.orchestrator_pid !== null ? "4" : "3"}. Or start fresh: substrate run --events --stories <keys>\n`);
1646
+ } else if (health.verdict === "NO_PIPELINE_RUNNING" && health.stories.escalated > 0) {
1647
+ process.stdout.write("\n Recommended Actions:\n");
1648
+ process.stdout.write(" 1. Retry escalated stories: substrate retry-escalated\n");
1649
+ process.stdout.write(" 2. Or start a new run: substrate run --events\n");
1650
+ }
1651
+ if (health.warnings !== void 0 && health.warnings.length > 0) {
1652
+ process.stdout.write("\n Warnings:\n");
1653
+ for (const w of health.warnings) process.stdout.write(` ⚠ ${w}\n`);
1654
+ }
1655
+ if (health.dolt_state !== void 0) {
1656
+ const ds = health.dolt_state;
1657
+ const initStr = ds.initialized ? "yes" : "no";
1658
+ const respStr = ds.responsive ? "yes" : "no";
1659
+ const verStr = ds.version !== void 0 ? ` (v${ds.version})` : "";
1660
+ process.stdout.write(`\n Dolt State: initialized=${initStr} responsive=${respStr}${verStr}\n`);
1661
+ }
1662
+ }
1663
+ return 0;
1664
+ } catch (err) {
1665
+ const msg = err instanceof Error ? err.message : String(err);
1666
+ if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
1667
+ else process.stderr.write(`Error: ${msg}\n`);
1668
+ logger.error({ err }, "health action failed");
1669
+ return 1;
1670
+ }
1671
+ }
1672
+ function registerHealthCommand(program, _version = "0.0.0", projectRoot = process.cwd()) {
1673
+ program.command("health").description("Check pipeline health: process status, stall detection, and verdict").option("--run-id <id>", "Pipeline run ID to query (defaults to latest)").option("--project-root <path>", "Project root directory", projectRoot).option("--output-format <format>", "Output format: human (default) or json", "human").action(async (opts) => {
1674
+ const outputFormat = opts.outputFormat === "json" ? "json" : "human";
1675
+ const root = opts.projectRoot;
1676
+ let stateStore;
1677
+ let stateStoreConfig;
1678
+ const doltStatePath = join(root, ".substrate", "state", ".dolt");
1679
+ if (existsSync(doltStatePath)) {
1680
+ const basePath = join(root, ".substrate", "state");
1681
+ stateStoreConfig = {
1682
+ backend: "dolt",
1683
+ basePath
1684
+ };
1685
+ try {
1686
+ stateStore = createStateStore({
1687
+ backend: "dolt",
1688
+ basePath
1689
+ });
1690
+ await stateStore.initialize();
1691
+ } catch {
1692
+ stateStore = void 0;
1693
+ stateStoreConfig = void 0;
1694
+ }
1695
+ }
1696
+ try {
1697
+ const exitCode = await runHealthAction({
1698
+ outputFormat,
1699
+ runId: opts.runId,
1700
+ projectRoot: root,
1701
+ stateStore,
1702
+ stateStoreConfig
1703
+ });
1704
+ process.exitCode = exitCode;
1705
+ } finally {
1706
+ try {
1707
+ await stateStore?.close();
1708
+ } catch {}
1709
+ }
1710
+ });
1711
+ }
1712
+
1713
+ //#endregion
1714
+ export { BMAD_BASELINE_TOKENS_FULL, DEFAULT_STALL_THRESHOLD_SECONDS, DoltMergeConflict, FileStateStore, STOP_AFTER_VALID_PHASES, STORY_KEY_PATTERN$1 as STORY_KEY_PATTERN, SUBSTRATE_OWNED_SETTINGS_KEYS, VALID_PHASES, __commonJS, __require, __toESM, buildPipelineStatusOutput, createDatabaseAdapter$1 as createDatabaseAdapter, createStateStore, findPackageRoot, formatOutput, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, inspectProcessTree, isOrchestratorProcessLine, parseDbTimestampAsUtc, registerHealthCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, runHealthAction, validateStoryKey };
1715
+ //# sourceMappingURL=health-DC3y-sR6.js.map