substrate-ai 0.6.9 → 0.7.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli/index.js CHANGED
@@ -1,16 +1,16 @@
1
1
  #!/usr/bin/env node
2
+ import { DoltClient, DoltNotInstalled, FileStateStore, SUBSTRATE_OWNED_SETTINGS_KEYS, VALID_PHASES, WorkGraphRepository, buildPipelineStatusOutput, checkDoltInstalled, createDatabaseAdapter, createDoltClient, createStateStore, detectCycles, findPackageRoot, formatOutput, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, initSchema, initializeDolt, isSyncAdapter, parseDbTimestampAsUtc, registerHealthCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot } from "../health-Dnx-FGva.js";
2
3
  import { createLogger } from "../logger-D2fS2ccL.js";
3
4
  import { AdapterRegistry } from "../adapter-registry-D2zdMwVu.js";
4
- import { AdapterTelemetryPersistence, AppError, DEFAULT_CONFIG, DEFAULT_ROUTING_POLICY, DoltRepoMapMetaRepository, DoltSymbolRepository, ERR_REPO_MAP_STORAGE_WRITE, GitClient, GrammarLoader, IngestionServer, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SymbolParser, createConfigSystem, createContextCompiler, createDispatcher, createEventEmitter, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStopAfterGate, createTelemetryAdvisor, formatPhaseCompletionSummary, registerRunCommand, resolveStoryKeys, runAnalysisPhase, runPlanningPhase, runSolutioningPhase, validateStopAfterFromConflict } from "../run-D3wByPiv.js";
5
+ import { AdapterTelemetryPersistence, AppError, DEFAULT_CONFIG, DEFAULT_ROUTING_POLICY, DoltRepoMapMetaRepository, DoltSymbolRepository, ERR_REPO_MAP_STORAGE_WRITE, GitClient, GrammarLoader, IngestionServer, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SymbolParser, createConfigSystem, createContextCompiler, createDispatcher, createEventEmitter, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStopAfterGate, createTelemetryAdvisor, formatPhaseCompletionSummary, registerRunCommand, resolveStoryKeys, runAnalysisPhase, runPlanningPhase, runSolutioningPhase, validateStopAfterFromConflict } from "../run-BW8_vcTi.js";
5
6
  import { CURRENT_CONFIG_FORMAT_VERSION, CURRENT_TASK_GRAPH_VERSION, PartialSubstrateConfigSchema } from "../config-migrator-CtGelIsG.js";
6
- import { DoltClient, DoltNotInstalled, FileStateStore, SUBSTRATE_OWNED_SETTINGS_KEYS, VALID_PHASES, WorkGraphRepository, buildPipelineStatusOutput, checkDoltInstalled, createDatabaseAdapter, createDoltClient, createStateStore, detectCycles, findPackageRoot, formatOutput, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, initSchema, initializeDolt, isSyncAdapter, parseDbTimestampAsUtc, registerHealthCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot } from "../health-D0lhVJ5C.js";
7
- import { ConfigError, createEventBus } from "../helpers-DtzxPR0u.js";
8
- import { RoutingRecommender } from "../routing-DCWkD4MK.js";
9
- import { addTokenUsage, createDecision, createPipelineRun, getDecisionsByCategory, getDecisionsByPhaseForRun, getLatestRun, getPipelineRunById, getTokenUsageSummary, listRequirements, updatePipelineRun } from "../decisions-BtgXk_My.js";
10
- import { ESCALATION_DIAGNOSIS, EXPERIMENT_RESULT, OPERATIONAL_FINDING, STORY_METRICS, aggregateTokenUsageForRun, compareRunMetrics, getBaselineRunMetrics, getRunMetrics, getStoryMetricsForRun, incrementRunRestarts, listRunMetrics, tagRunAsBaseline } from "../operational-ChLdRgux.js";
11
- import { abortMerge, createWorktree, getConflictingFiles, getMergedFiles, getOrphanedWorktrees, performMerge, removeBranch, removeWorktree, simulateMerge, verifyGitVersion } from "../git-utils-DeIYLwa4.js";
12
- import "../version-manager-impl-CjDZblzf.js";
13
- import { registerUpgradeCommand } from "../upgrade-CTGnrq33.js";
7
+ import { ConfigError, createEventBus } from "../helpers-CpMs8VZX.js";
8
+ import { RoutingRecommender } from "../routing-BVrxrM6v.js";
9
+ import { addTokenUsage, createDecision, createPipelineRun, getDecisionsByCategory, getDecisionsByPhaseForRun, getLatestRun, getPipelineRunById, getTokenUsageSummary, listRequirements, updatePipelineRun } from "../decisions-DhAA2HG2.js";
10
+ import { ESCALATION_DIAGNOSIS, EXPERIMENT_RESULT, OPERATIONAL_FINDING, STORY_METRICS, aggregateTokenUsageForRun, compareRunMetrics, getBaselineRunMetrics, getRunMetrics, getStoryMetricsForRun, incrementRunRestarts, listRunMetrics, tagRunAsBaseline } from "../operational-BdcdmDqS.js";
11
+ import { abortMerge, createWorktree, getConflictingFiles, getMergedFiles, getOrphanedWorktrees, performMerge, removeBranch, removeWorktree, simulateMerge, verifyGitVersion } from "../git-utils-DxPx6erV.js";
12
+ import "../version-manager-impl-BIxOe7gZ.js";
13
+ import { registerUpgradeCommand } from "../upgrade-B1S61VXJ.js";
14
14
  import { Command } from "commander";
15
15
  import { fileURLToPath } from "url";
16
16
  import { dirname, join, resolve } from "path";
@@ -1209,7 +1209,7 @@ async function runInitAction(options) {
1209
1209
  process.stderr.write(`✗ Dolt initialization failed: ${msg}\n`);
1210
1210
  return INIT_EXIT_ERROR;
1211
1211
  }
1212
- logger$18.warn({ error: msg }, "Dolt auto-init failed (non-blocking)");
1212
+ process.stderr.write(`⚠ Dolt state store initialization failed: ${msg}\n Pipeline metrics, cost tracking, and health monitoring will not persist.\n Fix the issue and re-run: substrate init --dolt\n`);
1213
1213
  }
1214
1214
  }
1215
1215
  else logger$18.debug("Dolt step was skipped (--no-dolt)");
@@ -2076,7 +2076,7 @@ async function runStatusAction(options) {
2076
2076
  if (runId !== void 0 && runId !== "") run = await getPipelineRunById(adapter, runId);
2077
2077
  else run = await getLatestRun(adapter);
2078
2078
  if (run === void 0) {
2079
- const { inspectProcessTree } = await import("../health-gUqQUzBJ.js");
2079
+ const { inspectProcessTree } = await import("../health-4fyhDU6T.js");
2080
2080
  const substrateDirPath = join(projectRoot, ".substrate");
2081
2081
  const processInfo = inspectProcessTree({
2082
2082
  projectRoot,
@@ -3489,11 +3489,11 @@ async function runSupervisorAction(options, deps = {}) {
3489
3489
  try {
3490
3490
  const { createExperimenter } = await import(
3491
3491
  /* @vite-ignore */
3492
- "../experimenter-CJ421I-b.js"
3492
+ "../experimenter-D_N_7ZF3.js"
3493
3493
  );
3494
3494
  const { getLatestRun: getLatest } = await import(
3495
3495
  /* @vite-ignore */
3496
- "../decisions-D7TA7Utm.js"
3496
+ "../decisions-BDLp3tJB.js"
3497
3497
  );
3498
3498
  const expAdapter = createDatabaseAdapter({
3499
3499
  backend: "auto",
@@ -3503,7 +3503,7 @@ async function runSupervisorAction(options, deps = {}) {
3503
3503
  await initSchema(expAdapter);
3504
3504
  const { runRunAction: runPipeline } = await import(
3505
3505
  /* @vite-ignore */
3506
- "../run-BQ8lXoyf.js"
3506
+ "../run-CZokQlIi.js"
3507
3507
  );
3508
3508
  const runStoryFn = async (opts) => {
3509
3509
  const exitCode = await runPipeline({
@@ -4019,7 +4019,7 @@ async function runMetricsAction(options) {
4019
4019
  const routingConfigPath = join(dbDir, "routing.yml");
4020
4020
  let routingConfig = null;
4021
4021
  if (existsSync(routingConfigPath)) try {
4022
- const { loadModelRoutingConfig } = await import("../routing-Cl1TgNvZ.js");
4022
+ const { loadModelRoutingConfig } = await import("../routing-CD8bIci_.js");
4023
4023
  routingConfig = loadModelRoutingConfig(routingConfigPath);
4024
4024
  } catch {}
4025
4025
  if (routingConfig === null) routingConfig = {
@@ -8926,8 +8926,8 @@ async function createProgram() {
8926
8926
  /** Fire-and-forget startup version check (story 8.3, AC3/AC5) */
8927
8927
  function checkForUpdatesInBackground(currentVersion) {
8928
8928
  if (process.env.SUBSTRATE_NO_UPDATE_CHECK === "1") return;
8929
- import("../upgrade-CBSusHh-.js").then(async () => {
8930
- const { createVersionManager } = await import("../version-manager-impl-AcT89lmM.js");
8929
+ import("../upgrade-BK0HrKA6.js").then(async () => {
8930
+ const { createVersionManager } = await import("../version-manager-impl-RrWs-CI6.js");
8931
8931
  const vm = createVersionManager();
8932
8932
  const result = await vm.checkForUpdates();
8933
8933
  if (result.updateAvailable) {
@@ -1,3 +1,3 @@
1
- import { addTokenUsage, createDecision, createPipelineRun, createRequirement, getArtifactByTypeForRun, getArtifactsByRun, getDecisionsByCategory, getDecisionsByPhase, getDecisionsByPhaseForRun, getLatestRun, getPipelineRunById, getRunningPipelineRuns, getTokenUsageSummary, listRequirements, registerArtifact, updateDecision, updatePipelineRun, updatePipelineRunConfig, upsertDecision } from "./decisions-BtgXk_My.js";
1
+ import { addTokenUsage, createDecision, createPipelineRun, createRequirement, getArtifactByTypeForRun, getArtifactsByRun, getDecisionsByCategory, getDecisionsByPhase, getDecisionsByPhaseForRun, getLatestRun, getPipelineRunById, getRunningPipelineRuns, getTokenUsageSummary, listRequirements, registerArtifact, updateDecision, updatePipelineRun, updatePipelineRunConfig, upsertDecision } from "./decisions-DhAA2HG2.js";
2
2
 
3
3
  export { getLatestRun };
@@ -394,4 +394,4 @@ async function getTokenUsageSummary(adapter, runId) {
394
394
 
395
395
  //#endregion
396
396
  export { addTokenUsage, createDecision, createPipelineRun, createRequirement, getArtifactByTypeForRun, getArtifactsByRun, getDecisionsByCategory, getDecisionsByPhase, getDecisionsByPhaseForRun, getLatestRun, getPipelineRunById, getRunningPipelineRuns, getTokenUsageSummary, listRequirements, registerArtifact, updateDecision, updatePipelineRun, updatePipelineRunConfig, upsertDecision };
397
- //# sourceMappingURL=decisions-BtgXk_My.js.map
397
+ //# sourceMappingURL=decisions-DhAA2HG2.js.map
@@ -1,7 +1,7 @@
1
1
  import "./logger-D2fS2ccL.js";
2
- import { createDecision } from "./decisions-BtgXk_My.js";
3
- import { EXPERIMENT_RESULT, getRunMetrics, getStoryMetricsForRun } from "./operational-ChLdRgux.js";
4
- import { spawnGit } from "./git-utils-DeIYLwa4.js";
2
+ import { createDecision } from "./decisions-DhAA2HG2.js";
3
+ import { EXPERIMENT_RESULT, getRunMetrics, getStoryMetricsForRun } from "./operational-BdcdmDqS.js";
4
+ import { spawnGit } from "./git-utils-DxPx6erV.js";
5
5
  import { spawn } from "node:child_process";
6
6
  import { join } from "node:path";
7
7
  import { mkdir, readFile, writeFile } from "node:fs/promises";
@@ -500,4 +500,4 @@ function createExperimenter(config, deps) {
500
500
 
501
501
  //#endregion
502
502
  export { createExperimenter };
503
- //# sourceMappingURL=experimenter-CJ421I-b.js.map
503
+ //# sourceMappingURL=experimenter-D_N_7ZF3.js.map
@@ -362,4 +362,4 @@ async function getMergedFiles(cwd) {
362
362
 
363
363
  //#endregion
364
364
  export { abortMerge, createWorktree, getConflictingFiles, getMergedFiles, getOrphanedWorktrees, performMerge, removeBranch, removeWorktree, simulateMerge, spawnGit, verifyGitVersion };
365
- //# sourceMappingURL=git-utils-DeIYLwa4.js.map
365
+ //# sourceMappingURL=git-utils-DxPx6erV.js.map
@@ -1,5 +1,5 @@
1
+ import { DEFAULT_STALL_THRESHOLD_SECONDS, getAllDescendantPids, getAutoHealthData, inspectProcessTree, isOrchestratorProcessLine, registerHealthCommand, runHealthAction } from "./health-Dnx-FGva.js";
1
2
  import "./logger-D2fS2ccL.js";
2
- import { DEFAULT_STALL_THRESHOLD_SECONDS, getAllDescendantPids, getAutoHealthData, inspectProcessTree, isOrchestratorProcessLine, registerHealthCommand, runHealthAction } from "./health-D0lhVJ5C.js";
3
- import "./decisions-BtgXk_My.js";
3
+ import "./decisions-DhAA2HG2.js";
4
4
 
5
5
  export { inspectProcessTree };
@@ -1,15 +1,42 @@
1
- import { __require } from "./chunk-CsIcISPO.js";
2
1
  import { createLogger } from "./logger-D2fS2ccL.js";
3
- import { getLatestRun, getPipelineRunById } from "./decisions-BtgXk_My.js";
2
+ import { getLatestRun, getPipelineRunById } from "./decisions-DhAA2HG2.js";
3
+ import { createRequire } from "module";
4
4
  import { dirname, join } from "path";
5
5
  import { existsSync } from "fs";
6
- import { createRequire } from "node:module";
6
+ import { createRequire as createRequire$1 } from "node:module";
7
7
  import { execFile, spawn, spawnSync } from "node:child_process";
8
8
  import { dirname as dirname$1, join as join$1, resolve as resolve$1 } from "node:path";
9
- import { existsSync as existsSync$1, mkdirSync as mkdirSync$1, readFileSync as readFileSync$1 } from "node:fs";
9
+ import { existsSync as existsSync$1, readFileSync as readFileSync$1 } from "node:fs";
10
10
  import { access, mkdir, readFile, writeFile } from "node:fs/promises";
11
11
  import { fileURLToPath } from "node:url";
12
12
 
13
+ //#region rolldown:runtime
14
+ var __create = Object.create;
15
+ var __defProp = Object.defineProperty;
16
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
17
+ var __getOwnPropNames = Object.getOwnPropertyNames;
18
+ var __getProtoOf = Object.getPrototypeOf;
19
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
20
+ var __commonJS = (cb, mod) => function() {
21
+ return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
22
+ };
23
+ var __copyProps = (to, from, except, desc) => {
24
+ if (from && typeof from === "object" || typeof from === "function") for (var keys = __getOwnPropNames(from), i = 0, n = keys.length, key; i < n; i++) {
25
+ key = keys[i];
26
+ if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, {
27
+ get: ((k) => from[k]).bind(null, key),
28
+ enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
29
+ });
30
+ }
31
+ return to;
32
+ };
33
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", {
34
+ value: mod,
35
+ enumerable: true
36
+ }) : target, mod));
37
+ var __require = /* @__PURE__ */ createRequire(import.meta.url);
38
+
39
+ //#endregion
13
40
  //#region src/utils/git-root.ts
14
41
  /**
15
42
  * Resolve the main git repository root, even from a linked worktree.
@@ -481,63 +508,6 @@ var InMemoryDatabaseAdapter = class {
481
508
  }
482
509
  };
483
510
 
484
- //#endregion
485
- //#region src/persistence/wasm-sqlite-adapter.ts
486
- /**
487
- * DatabaseAdapter that wraps any object implementing the synchronous
488
- * prepare/exec API. Does NOT own the database lifecycle —
489
- * close() is a no-op; the caller manages open/close.
490
- *
491
- * Used in test code that bridges legacy synchronous database objects.
492
- */
493
- var SyncDatabaseAdapter = class {
494
- _db;
495
- constructor(db) {
496
- this._db = db;
497
- }
498
- /** Normalize MySQL/Dolt-flavored SQL to SQLite equivalents */
499
- _normalizeSql(sql) {
500
- return sql.replace(/\bAUTO_INCREMENT\b/g, "AUTOINCREMENT");
501
- }
502
- querySync(sql, params) {
503
- const stmt = this._db.prepare(this._normalizeSql(sql));
504
- if (stmt.reader) return params && params.length > 0 ? stmt.all(...params) : stmt.all();
505
- if (params && params.length > 0) stmt.run(...params);
506
- else stmt.run();
507
- return [];
508
- }
509
- execSync(sql) {
510
- this._db.exec(this._normalizeSql(sql));
511
- }
512
- async query(sql, params) {
513
- return this.querySync(sql, params);
514
- }
515
- async exec(sql) {
516
- this.execSync(sql);
517
- }
518
- async transaction(fn) {
519
- this._db.exec("BEGIN");
520
- try {
521
- const result = await fn(this);
522
- this._db.exec("COMMIT");
523
- return result;
524
- } catch (err) {
525
- try {
526
- this._db.exec("ROLLBACK");
527
- } catch {}
528
- throw err;
529
- }
530
- }
531
- async close() {}
532
- /**
533
- * Work graph not supported in SyncDatabaseAdapter.
534
- * Returns `[]` to signal the caller to use the legacy discovery path.
535
- */
536
- async queryReadyStories() {
537
- return [];
538
- }
539
- };
540
-
541
511
  //#endregion
542
512
  //#region src/modules/state/errors.ts
543
513
  /**
@@ -785,21 +755,8 @@ function createDatabaseAdapter(config = { backend: "auto" }) {
785
755
  const client = new DoltClient({ repoPath: doltRepoPath });
786
756
  return new DoltDatabaseAdapter(client);
787
757
  }
788
- const sqliteDbPath = join$1(basePath, ".substrate", "substrate.db");
789
- try {
790
- const require$1 = createRequire(import.meta.url);
791
- const BetterSqlite3 = require$1("better-sqlite3");
792
- const substrateDir = join$1(basePath, ".substrate");
793
- if (!existsSync$1(substrateDir)) mkdirSync$1(substrateDir, { recursive: true });
794
- const db = new BetterSqlite3(sqliteDbPath);
795
- db.pragma("journal_mode = WAL");
796
- db.pragma("foreign_keys = ON");
797
- logger$2.debug({ path: sqliteDbPath }, "Using file-backed SQLite via better-sqlite3");
798
- return new SyncDatabaseAdapter(db);
799
- } catch (err) {
800
- logger$2.debug({ err }, "better-sqlite3 not available, using InMemoryDatabaseAdapter");
801
- return new InMemoryDatabaseAdapter();
802
- }
758
+ logger$2.debug("Dolt not available, using InMemoryDatabaseAdapter");
759
+ return new InMemoryDatabaseAdapter();
803
760
  }
804
761
 
805
762
  //#endregion
@@ -1354,6 +1311,71 @@ async function runDoltCommand(args, cwd) {
1354
1311
  });
1355
1312
  }
1356
1313
  /**
1314
+ * Ensure that Dolt has a global user identity configured.
1315
+ * `dolt init` and `dolt commit` fail with "empty ident name not allowed"
1316
+ * when no identity exists. This function checks for an existing identity
1317
+ * and configures a default one if absent.
1318
+ */
1319
+ async function ensureDoltIdentity() {
1320
+ const hasIdentity = await doltConfigGet("user.name");
1321
+ if (hasIdentity) return;
1322
+ await runDoltConfigSet("user.name", "substrate");
1323
+ await runDoltConfigSet("user.email", "substrate@localhost");
1324
+ }
1325
+ /**
1326
+ * Check if a Dolt global config key has a value set.
1327
+ */
1328
+ async function doltConfigGet(key) {
1329
+ return new Promise((resolve$2) => {
1330
+ const child = spawn("dolt", [
1331
+ "config",
1332
+ "--global",
1333
+ "--get",
1334
+ key
1335
+ ], { stdio: [
1336
+ "ignore",
1337
+ "ignore",
1338
+ "ignore"
1339
+ ] });
1340
+ child.on("error", () => resolve$2(false));
1341
+ child.on("close", (code) => resolve$2(code === 0));
1342
+ });
1343
+ }
1344
+ /**
1345
+ * Set a Dolt global config value.
1346
+ */
1347
+ async function runDoltConfigSet(key, value) {
1348
+ return new Promise((resolve$2, reject) => {
1349
+ const child = spawn("dolt", [
1350
+ "config",
1351
+ "--global",
1352
+ "--add",
1353
+ key,
1354
+ value
1355
+ ], { stdio: [
1356
+ "ignore",
1357
+ "ignore",
1358
+ "pipe"
1359
+ ] });
1360
+ const stderrChunks = [];
1361
+ child.stderr?.on("data", (chunk) => stderrChunks.push(chunk));
1362
+ child.on("error", reject);
1363
+ child.on("close", (code) => {
1364
+ if (code === 0) resolve$2();
1365
+ else {
1366
+ const stderr = Buffer.concat(stderrChunks).toString("utf8").trim();
1367
+ reject(new DoltInitError([
1368
+ "config",
1369
+ "--global",
1370
+ "--add",
1371
+ key,
1372
+ value
1373
+ ], code ?? -1, stderr));
1374
+ }
1375
+ });
1376
+ });
1377
+ }
1378
+ /**
1357
1379
  * Initialize a Dolt repository for Substrate state storage.
1358
1380
  *
1359
1381
  * This function is idempotent: running it a second time on an already-
@@ -1370,6 +1392,7 @@ async function initializeDolt(config) {
1370
1392
  const schemaPath = config.schemaPath ?? fileURLToPath(new URL("./schema.sql", import.meta.url));
1371
1393
  await checkDoltInstalled();
1372
1394
  await mkdir(statePath, { recursive: true });
1395
+ await ensureDoltIdentity();
1373
1396
  const doltDir = join$1(statePath, ".dolt");
1374
1397
  let doltDirExists = false;
1375
1398
  try {
@@ -1491,7 +1514,7 @@ const PACKAGE_ROOT = join(__dirname, "..", "..", "..");
1491
1514
  */
1492
1515
  function resolveBmadMethodSrcPath(fromDir = __dirname) {
1493
1516
  try {
1494
- const require$1 = createRequire(join(fromDir, "synthetic.js"));
1517
+ const require$1 = createRequire$1(join(fromDir, "synthetic.js"));
1495
1518
  const pkgJsonPath = require$1.resolve("bmad-method/package.json");
1496
1519
  return join(dirname(pkgJsonPath), "src");
1497
1520
  } catch {
@@ -1504,7 +1527,7 @@ function resolveBmadMethodSrcPath(fromDir = __dirname) {
1504
1527
  */
1505
1528
  function resolveBmadMethodVersion(fromDir = __dirname) {
1506
1529
  try {
1507
- const require$1 = createRequire(join(fromDir, "synthetic.js"));
1530
+ const require$1 = createRequire$1(join(fromDir, "synthetic.js"));
1508
1531
  const pkgJsonPath = require$1.resolve("bmad-method/package.json");
1509
1532
  const pkg = require$1(pkgJsonPath);
1510
1533
  return pkg.version ?? "unknown";
@@ -3220,5 +3243,5 @@ function registerHealthCommand(program, _version = "0.0.0", projectRoot = proces
3220
3243
  }
3221
3244
 
3222
3245
  //#endregion
3223
- export { BMAD_BASELINE_TOKENS_FULL, DEFAULT_STALL_THRESHOLD_SECONDS, DoltClient, DoltMergeConflict, DoltNotInstalled, FileStateStore, STOP_AFTER_VALID_PHASES, STORY_KEY_PATTERN$1 as STORY_KEY_PATTERN, SUBSTRATE_OWNED_SETTINGS_KEYS, VALID_PHASES, WorkGraphRepository, buildPipelineStatusOutput, checkDoltInstalled, createDatabaseAdapter, createDoltClient, createStateStore, detectCycles, findPackageRoot, formatOutput, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, initSchema, initializeDolt, inspectProcessTree, isOrchestratorProcessLine, isSyncAdapter, parseDbTimestampAsUtc, registerHealthCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, runHealthAction, validateStoryKey };
3224
- //# sourceMappingURL=health-D0lhVJ5C.js.map
3246
+ export { BMAD_BASELINE_TOKENS_FULL, DEFAULT_STALL_THRESHOLD_SECONDS, DoltClient, DoltMergeConflict, DoltNotInstalled, FileStateStore, STOP_AFTER_VALID_PHASES, STORY_KEY_PATTERN$1 as STORY_KEY_PATTERN, SUBSTRATE_OWNED_SETTINGS_KEYS, VALID_PHASES, WorkGraphRepository, __commonJS, __require, __toESM, buildPipelineStatusOutput, checkDoltInstalled, createDatabaseAdapter, createDoltClient, createStateStore, detectCycles, findPackageRoot, formatOutput, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, initSchema, initializeDolt, inspectProcessTree, isOrchestratorProcessLine, isSyncAdapter, parseDbTimestampAsUtc, registerHealthCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, runHealthAction, validateStoryKey };
3247
+ //# sourceMappingURL=health-Dnx-FGva.js.map
@@ -917,4 +917,4 @@ async function withRetry(fn, maxRetries = 3, baseDelayMs = 100) {
917
917
 
918
918
  //#endregion
919
919
  export { AdtError, BudgetExceededError, ConfigError, ConfigIncompatibleFormatError, GitError, RecoveryError, TaskConfigError, TaskGraphCycleError, TaskGraphError, TaskGraphIncompatibleFormatError, WorkerError, WorkerNotFoundError, assertDefined, createEventBus, createTuiApp, deepClone, formatDuration, generateId, isPlainObject, isTuiCapable, printNonTtyWarning, sleep, withRetry };
920
- //# sourceMappingURL=helpers-DtzxPR0u.js.map
920
+ //# sourceMappingURL=helpers-CpMs8VZX.js.map
package/dist/index.js CHANGED
@@ -1,6 +1,6 @@
1
1
  import { childLogger, createLogger, logger } from "./logger-D2fS2ccL.js";
2
2
  import { AdapterRegistry, ClaudeCodeAdapter, CodexCLIAdapter, GeminiCLIAdapter } from "./adapter-registry-D2zdMwVu.js";
3
- import { AdtError, BudgetExceededError, ConfigError, ConfigIncompatibleFormatError, GitError, RecoveryError, TaskConfigError, TaskGraphCycleError, TaskGraphError, TaskGraphIncompatibleFormatError, WorkerError, WorkerNotFoundError, assertDefined, createEventBus, createTuiApp, deepClone, formatDuration, generateId, isPlainObject, isTuiCapable, printNonTtyWarning, sleep, withRetry } from "./helpers-DtzxPR0u.js";
3
+ import { AdtError, BudgetExceededError, ConfigError, ConfigIncompatibleFormatError, GitError, RecoveryError, TaskConfigError, TaskGraphCycleError, TaskGraphError, TaskGraphIncompatibleFormatError, WorkerError, WorkerNotFoundError, assertDefined, createEventBus, createTuiApp, deepClone, formatDuration, generateId, isPlainObject, isTuiCapable, printNonTtyWarning, sleep, withRetry } from "./helpers-CpMs8VZX.js";
4
4
 
5
5
  //#region src/core/di.ts
6
6
  /**
@@ -371,4 +371,4 @@ const ADVISORY_NOTES = "advisory-notes";
371
371
 
372
372
  //#endregion
373
373
  export { ADVISORY_NOTES, ESCALATION_DIAGNOSIS, EXPERIMENT_RESULT, OPERATIONAL_FINDING, STORY_METRICS, STORY_OUTCOME, TEST_EXPANSION_FINDING, TEST_PLAN, aggregateTokenUsageForRun, aggregateTokenUsageForStory, compareRunMetrics, getBaselineRunMetrics, getRunMetrics, getStoryMetricsForRun, incrementRunRestarts, listRunMetrics, tagRunAsBaseline, writeRunMetrics, writeStoryMetrics };
374
- //# sourceMappingURL=operational-ChLdRgux.js.map
374
+ //# sourceMappingURL=operational-BdcdmDqS.js.map
@@ -829,4 +829,4 @@ var RoutingTuner = class {
829
829
 
830
830
  //#endregion
831
831
  export { ModelRoutingConfigSchema, ProviderPolicySchema, RoutingConfigError, RoutingRecommender, RoutingResolver, RoutingTelemetry, RoutingTokenAccumulator, RoutingTuner, TASK_TYPE_PHASE_MAP, getModelTier, loadModelRoutingConfig };
832
- //# sourceMappingURL=routing-DCWkD4MK.js.map
832
+ //# sourceMappingURL=routing-BVrxrM6v.js.map
@@ -1,4 +1,4 @@
1
1
  import "./logger-D2fS2ccL.js";
2
- import { ModelRoutingConfigSchema, ProviderPolicySchema, RoutingConfigError, RoutingRecommender, RoutingResolver, RoutingTelemetry, RoutingTokenAccumulator, RoutingTuner, TASK_TYPE_PHASE_MAP, getModelTier, loadModelRoutingConfig } from "./routing-DCWkD4MK.js";
2
+ import { ModelRoutingConfigSchema, ProviderPolicySchema, RoutingConfigError, RoutingRecommender, RoutingResolver, RoutingTelemetry, RoutingTokenAccumulator, RoutingTuner, TASK_TYPE_PHASE_MAP, getModelTier, loadModelRoutingConfig } from "./routing-BVrxrM6v.js";
3
3
 
4
4
  export { loadModelRoutingConfig };
@@ -1,11 +1,10 @@
1
- import { __commonJS, __require, __toESM } from "./chunk-CsIcISPO.js";
1
+ import { BMAD_BASELINE_TOKENS_FULL, DoltClient, DoltMergeConflict, FileStateStore, STOP_AFTER_VALID_PHASES, STORY_KEY_PATTERN, VALID_PHASES, WorkGraphRepository, __commonJS, __require, __toESM, buildPipelineStatusOutput, createDatabaseAdapter, formatOutput, formatPipelineSummary, formatTokenTelemetry, initSchema, inspectProcessTree, parseDbTimestampAsUtc, resolveMainRepoRoot, validateStoryKey } from "./health-Dnx-FGva.js";
2
2
  import { createLogger, deepMask } from "./logger-D2fS2ccL.js";
3
3
  import { CURRENT_CONFIG_FORMAT_VERSION, PartialSubstrateConfigSchema, SUPPORTED_CONFIG_FORMAT_VERSIONS, SubstrateConfigSchema, defaultConfigMigrator } from "./config-migrator-CtGelIsG.js";
4
- import { BMAD_BASELINE_TOKENS_FULL, DoltClient, DoltMergeConflict, FileStateStore, STOP_AFTER_VALID_PHASES, STORY_KEY_PATTERN, VALID_PHASES, WorkGraphRepository, buildPipelineStatusOutput, createDatabaseAdapter, formatOutput, formatPipelineSummary, formatTokenTelemetry, initSchema, inspectProcessTree, parseDbTimestampAsUtc, resolveMainRepoRoot, validateStoryKey } from "./health-D0lhVJ5C.js";
5
- import { ConfigError, ConfigIncompatibleFormatError, createEventBus, createTuiApp, isTuiCapable, printNonTtyWarning, sleep } from "./helpers-DtzxPR0u.js";
6
- import { RoutingRecommender, RoutingResolver, RoutingTelemetry, RoutingTokenAccumulator, RoutingTuner, loadModelRoutingConfig } from "./routing-DCWkD4MK.js";
7
- import { addTokenUsage, createDecision, createPipelineRun, createRequirement, getArtifactByTypeForRun, getArtifactsByRun, getDecisionsByCategory, getDecisionsByPhase, getDecisionsByPhaseForRun, getPipelineRunById, getRunningPipelineRuns, getTokenUsageSummary, registerArtifact, updatePipelineRun, updatePipelineRunConfig, upsertDecision } from "./decisions-BtgXk_My.js";
8
- import { ADVISORY_NOTES, ESCALATION_DIAGNOSIS, OPERATIONAL_FINDING, STORY_METRICS, STORY_OUTCOME, TEST_EXPANSION_FINDING, TEST_PLAN, aggregateTokenUsageForRun, aggregateTokenUsageForStory, getStoryMetricsForRun, writeRunMetrics, writeStoryMetrics } from "./operational-ChLdRgux.js";
4
+ import { ConfigError, ConfigIncompatibleFormatError, createEventBus, createTuiApp, isTuiCapable, printNonTtyWarning, sleep } from "./helpers-CpMs8VZX.js";
5
+ import { RoutingRecommender, RoutingResolver, RoutingTelemetry, RoutingTokenAccumulator, RoutingTuner, loadModelRoutingConfig } from "./routing-BVrxrM6v.js";
6
+ import { addTokenUsage, createDecision, createPipelineRun, createRequirement, getArtifactByTypeForRun, getArtifactsByRun, getDecisionsByCategory, getDecisionsByPhase, getDecisionsByPhaseForRun, getPipelineRunById, getRunningPipelineRuns, getTokenUsageSummary, registerArtifact, updatePipelineRun, updatePipelineRunConfig, upsertDecision } from "./decisions-DhAA2HG2.js";
7
+ import { ADVISORY_NOTES, ESCALATION_DIAGNOSIS, OPERATIONAL_FINDING, STORY_METRICS, STORY_OUTCOME, TEST_EXPANSION_FINDING, TEST_PLAN, aggregateTokenUsageForRun, aggregateTokenUsageForStory, getStoryMetricsForRun, writeRunMetrics, writeStoryMetrics } from "./operational-BdcdmDqS.js";
9
8
  import { dirname, join, resolve } from "path";
10
9
  import { access, mkdir, readFile, readdir, stat, writeFile } from "fs/promises";
11
10
  import { existsSync, mkdirSync, readFileSync, unlinkSync, writeFileSync } from "fs";
@@ -4666,7 +4665,7 @@ const DEFAULT_TIMEOUTS = {
4666
4665
  "create-story": 6e5,
4667
4666
  "dev-story": 18e5,
4668
4667
  "code-review": 9e5,
4669
- "minor-fixes": 6e5,
4668
+ "minor-fixes": 12e5,
4670
4669
  "major-rework": 9e5,
4671
4670
  "readiness-check": 6e5,
4672
4671
  "elicitation": 9e5,
@@ -5670,7 +5669,22 @@ function runBuildVerification(options) {
5670
5669
  */
5671
5670
  function checkGitDiffFiles(workingDir = process.cwd()) {
5672
5671
  const results = new Set();
5672
+ let repoHasCommits = true;
5673
5673
  try {
5674
+ execSync("git rev-parse --verify HEAD", {
5675
+ cwd: workingDir,
5676
+ stdio: [
5677
+ "ignore",
5678
+ "pipe",
5679
+ "pipe"
5680
+ ],
5681
+ timeout: 3e3
5682
+ });
5683
+ } catch {
5684
+ repoHasCommits = false;
5685
+ }
5686
+ try {
5687
+ if (!repoHasCommits) throw new Error("no commits — skip HEAD diff");
5674
5688
  const unstaged = execSync("git diff --name-only HEAD", {
5675
5689
  cwd: workingDir,
5676
5690
  encoding: "utf-8",
@@ -6601,6 +6615,27 @@ async function isValidStoryFile(filePath) {
6601
6615
  //#region src/modules/compiled-workflows/git-helpers.ts
6602
6616
  const logger$19 = createLogger("compiled-workflows:git-helpers");
6603
6617
  /**
6618
+ * Check whether the repo at `cwd` has at least one commit (HEAD resolves).
6619
+ * Returns false for fresh repos with no commits, avoiding `fatal: bad revision 'HEAD'`.
6620
+ * Synchronous (execSync) to keep it simple — this is a fast local check.
6621
+ */
6622
+ function hasCommits(cwd) {
6623
+ try {
6624
+ execSync("git rev-parse --verify HEAD", {
6625
+ cwd,
6626
+ stdio: [
6627
+ "ignore",
6628
+ "pipe",
6629
+ "pipe"
6630
+ ],
6631
+ timeout: 3e3
6632
+ });
6633
+ return true;
6634
+ } catch {
6635
+ return false;
6636
+ }
6637
+ }
6638
+ /**
6604
6639
  * Capture the full git diff for HEAD (working tree vs current commit).
6605
6640
  *
6606
6641
  * Runs `git diff HEAD` in the specified working directory and returns
@@ -6614,6 +6649,10 @@ const logger$19 = createLogger("compiled-workflows:git-helpers");
6614
6649
  * @returns The diff output string, or '' on error
6615
6650
  */
6616
6651
  async function getGitDiffSummary(workingDirectory = process.cwd()) {
6652
+ if (!hasCommits(workingDirectory)) {
6653
+ logger$19.debug({ cwd: workingDirectory }, "No commits in repo — returning empty diff");
6654
+ return "";
6655
+ }
6617
6656
  return runGitCommand(["diff", "HEAD"], workingDirectory, "git-diff-summary");
6618
6657
  }
6619
6658
  /**
@@ -6627,6 +6666,10 @@ async function getGitDiffSummary(workingDirectory = process.cwd()) {
6627
6666
  * @returns The stat summary string, or '' on error
6628
6667
  */
6629
6668
  async function getGitDiffStatSummary(workingDirectory = process.cwd()) {
6669
+ if (!hasCommits(workingDirectory)) {
6670
+ logger$19.debug({ cwd: workingDirectory }, "No commits in repo — returning empty stat");
6671
+ return "";
6672
+ }
6630
6673
  return runGitCommand([
6631
6674
  "diff",
6632
6675
  "--stat",
@@ -6649,6 +6692,10 @@ async function getGitDiffStatSummary(workingDirectory = process.cwd()) {
6649
6692
  */
6650
6693
  async function getGitDiffForFiles(files, workingDirectory = process.cwd()) {
6651
6694
  if (files.length === 0) return "";
6695
+ if (!hasCommits(workingDirectory)) {
6696
+ logger$19.debug({ cwd: workingDirectory }, "No commits in repo — returning empty diff for files");
6697
+ return "";
6698
+ }
6652
6699
  await stageIntentToAdd(files, workingDirectory);
6653
6700
  return runGitCommand([
6654
6701
  "diff",
@@ -6672,6 +6719,10 @@ async function getGitDiffForFiles(files, workingDirectory = process.cwd()) {
6672
6719
  */
6673
6720
  async function getGitDiffStatForFiles(files, workingDirectory = process.cwd()) {
6674
6721
  if (files.length === 0) return "";
6722
+ if (!hasCommits(workingDirectory)) {
6723
+ logger$19.debug({ cwd: workingDirectory }, "No commits in repo — returning empty stat for files");
6724
+ return "";
6725
+ }
6675
6726
  return runGitCommand([
6676
6727
  "diff",
6677
6728
  "--stat",
@@ -7335,6 +7386,11 @@ async function runDevStory(deps, params) {
7335
7386
  name: "prior_findings",
7336
7387
  content: priorFindingsContent,
7337
7388
  priority: "optional"
7389
+ },
7390
+ {
7391
+ name: "verify_command",
7392
+ content: deps.pack.manifest.verifyCommand !== false ? deps.pack.manifest.verifyCommand ?? "npx turbo build" : "",
7393
+ priority: "optional"
7338
7394
  }
7339
7395
  ];
7340
7396
  const { prompt, tokenCount, truncated } = assemblePrompt(template, sections, TOKEN_CEILING);
@@ -8904,7 +8960,7 @@ async function seedEpicShards(db, projectRoot) {
8904
8960
  count++;
8905
8961
  }
8906
8962
  }
8907
- await db.exec("DELETE FROM decisions WHERE phase = 'implementation' AND category = 'epic-shard-hash' AND key = 'epics-file'");
8963
+ await db.exec("DELETE FROM decisions WHERE phase = 'implementation' AND category = 'epic-shard-hash' AND `key` = 'epics-file'");
8908
8964
  await createDecision(db, {
8909
8965
  pipeline_run_id: null,
8910
8966
  phase: "implementation",
@@ -10702,22 +10758,49 @@ var IngestionServer = class {
10702
10758
  /**
10703
10759
  * Start the HTTP ingestion server.
10704
10760
  * Resolves when the server is listening and ready to accept connections.
10761
+ * On EADDRINUSE, retries up to 10 consecutive ports before failing.
10705
10762
  */
10706
10763
  async start() {
10707
10764
  if (this._server !== null) {
10708
10765
  logger$8.warn("IngestionServer.start() called while already started — ignoring");
10709
10766
  return;
10710
10767
  }
10768
+ const maxRetries = 10;
10769
+ let lastErr;
10770
+ for (let attempt = 0; attempt <= maxRetries; attempt++) {
10771
+ const port = this._port + attempt;
10772
+ try {
10773
+ await this._tryListen(port);
10774
+ return;
10775
+ } catch (err) {
10776
+ const nodeErr = err;
10777
+ if (nodeErr.code === "EADDRINUSE" && attempt < maxRetries) {
10778
+ logger$8.warn({
10779
+ port,
10780
+ attempt: attempt + 1
10781
+ }, `Port ${port} in use — trying ${port + 1}`);
10782
+ lastErr = nodeErr;
10783
+ continue;
10784
+ }
10785
+ throw err;
10786
+ }
10787
+ }
10788
+ throw lastErr ?? new Error("IngestionServer: exhausted port retry attempts");
10789
+ }
10790
+ _tryListen(port) {
10711
10791
  return new Promise((resolve$2, reject) => {
10712
10792
  const server = createServer(this._handleRequest.bind(this));
10713
10793
  server.on("error", (err) => {
10714
- logger$8.error({ err }, "IngestionServer failed to start");
10794
+ server.close();
10715
10795
  reject(err);
10716
10796
  });
10717
- server.listen(this._port, "127.0.0.1", () => {
10797
+ server.listen(port, "127.0.0.1", () => {
10718
10798
  this._server = server;
10719
10799
  const addr = server.address();
10720
- logger$8.info({ port: addr.port }, "IngestionServer listening");
10800
+ logger$8.info({
10801
+ port: addr.port,
10802
+ requestedPort: this._port
10803
+ }, "IngestionServer listening");
10721
10804
  this._buffer?.start();
10722
10805
  resolve$2();
10723
10806
  });
@@ -14302,30 +14385,79 @@ function createImplementationOrchestrator(deps) {
14302
14385
  }
14303
14386
  }
14304
14387
  if (!retryPassed) {
14305
- eventBus.emit("story:build-verification-failed", {
14306
- storyKey,
14307
- exitCode: buildVerifyResult.exitCode ?? 1,
14308
- output: truncatedOutput
14309
- });
14310
- logger$25.warn({
14311
- storyKey,
14312
- reason,
14313
- exitCode: buildVerifyResult.exitCode
14314
- }, "Build verification failed — escalating story");
14315
- updateStory(storyKey, {
14316
- phase: "ESCALATED",
14317
- error: reason,
14318
- completedAt: new Date().toISOString()
14319
- });
14320
- await writeStoryMetricsBestEffort(storyKey, "escalated", 0);
14321
- await emitEscalation({
14322
- storyKey,
14323
- lastVerdict: reason,
14324
- reviewCycles: 0,
14325
- issues: [truncatedOutput]
14326
- });
14327
- await persistState();
14328
- return;
14388
+ let buildFixPassed = false;
14389
+ if (buildVerifyResult.status === "failed" && storyFilePath !== void 0) try {
14390
+ logger$25.info({ storyKey }, "Dispatching build-fix agent");
14391
+ startPhase(storyKey, "build-fix");
14392
+ const storyContent = await readFile$1(storyFilePath, "utf-8");
14393
+ let buildFixTemplate;
14394
+ try {
14395
+ buildFixTemplate = await pack.getPrompt("build-fix");
14396
+ } catch {
14397
+ buildFixTemplate = [
14398
+ "## Build Error Output\n{{build_errors}}",
14399
+ "## Story File Content\n{{story_content}}",
14400
+ "---",
14401
+ "Fix the build errors above. Make minimal changes. Run the build to verify."
14402
+ ].join("\n\n");
14403
+ }
14404
+ const buildFixPrompt = buildFixTemplate.replace("{{build_errors}}", truncatedOutput).replace("{{story_content}}", storyContent.slice(0, 4e3));
14405
+ incrementDispatches(storyKey);
14406
+ const fixHandle = dispatcher.dispatch({
14407
+ prompt: buildFixPrompt,
14408
+ agent: "claude-code",
14409
+ taskType: "build-fix",
14410
+ maxTurns: 15,
14411
+ workingDirectory: projectRoot ?? process.cwd(),
14412
+ ...config.perStoryContextCeilings?.[storyKey] !== void 0 ? { maxContextTokens: config.perStoryContextCeilings[storyKey] } : {},
14413
+ ..._otlpEndpoint !== void 0 ? { otlpEndpoint: _otlpEndpoint } : {}
14414
+ });
14415
+ await fixHandle.result;
14416
+ endPhase(storyKey, "build-fix");
14417
+ const retryAfterFix = runBuildVerification({
14418
+ verifyCommand: pack.manifest.verifyCommand,
14419
+ verifyTimeoutMs: pack.manifest.verifyTimeoutMs,
14420
+ projectRoot: projectRoot ?? process.cwd(),
14421
+ changedFiles: gitDiffFiles
14422
+ });
14423
+ if (retryAfterFix.status === "passed") {
14424
+ buildFixPassed = true;
14425
+ eventBus.emit("story:build-verification-passed", { storyKey });
14426
+ logger$25.info({ storyKey }, "Build passed after build-fix dispatch");
14427
+ } else logger$25.warn({ storyKey }, "Build still fails after build-fix dispatch — escalating");
14428
+ } catch (fixErr) {
14429
+ const fixMsg = fixErr instanceof Error ? fixErr.message : String(fixErr);
14430
+ logger$25.warn({
14431
+ storyKey,
14432
+ error: fixMsg
14433
+ }, "Build-fix dispatch failed — escalating");
14434
+ }
14435
+ if (!buildFixPassed) {
14436
+ eventBus.emit("story:build-verification-failed", {
14437
+ storyKey,
14438
+ exitCode: buildVerifyResult.exitCode ?? 1,
14439
+ output: truncatedOutput
14440
+ });
14441
+ logger$25.warn({
14442
+ storyKey,
14443
+ reason,
14444
+ exitCode: buildVerifyResult.exitCode
14445
+ }, "Build verification failed — escalating story");
14446
+ updateStory(storyKey, {
14447
+ phase: "ESCALATED",
14448
+ error: reason,
14449
+ completedAt: new Date().toISOString()
14450
+ });
14451
+ await writeStoryMetricsBestEffort(storyKey, "escalated", 0);
14452
+ await emitEscalation({
14453
+ storyKey,
14454
+ lastVerdict: reason,
14455
+ reviewCycles: 0,
14456
+ issues: [truncatedOutput]
14457
+ });
14458
+ await persistState();
14459
+ return;
14460
+ }
14329
14461
  }
14330
14462
  }
14331
14463
  }
@@ -19821,6 +19953,7 @@ async function runRunAction(options) {
19821
19953
  return 1;
19822
19954
  }
19823
19955
  const telemetryPersistence = telemetryEnabled ? new AdapterTelemetryPersistence(adapter) : void 0;
19956
+ if (telemetryPersistence !== void 0) await telemetryPersistence.initSchema();
19824
19957
  const packLoader = createPackLoader();
19825
19958
  let pack;
19826
19959
  try {
@@ -20724,6 +20857,7 @@ async function runFullPipeline(options) {
20724
20857
  });
20725
20858
  }
20726
20859
  const fpTelemetryPersistence = fullTelemetryEnabled ? new AdapterTelemetryPersistence(adapter) : void 0;
20860
+ if (fpTelemetryPersistence !== void 0) await fpTelemetryPersistence.initSchema();
20727
20861
  const orchestrator = createImplementationOrchestrator({
20728
20862
  db: adapter,
20729
20863
  pack,
@@ -20886,4 +21020,4 @@ function registerRunCommand(program, _version = "0.0.0", projectRoot = process.c
20886
21020
 
20887
21021
  //#endregion
20888
21022
  export { AdapterTelemetryPersistence, AppError, DEFAULT_CONFIG, DEFAULT_ROUTING_POLICY, DoltRepoMapMetaRepository, DoltSymbolRepository, ERR_REPO_MAP_STORAGE_WRITE, GitClient, GrammarLoader, IngestionServer, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SymbolParser, createConfigSystem, createContextCompiler, createDispatcher, createEventEmitter, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStopAfterGate, createTelemetryAdvisor, formatPhaseCompletionSummary, registerRunCommand, resolveStoryKeys, runAnalysisPhase, runPlanningPhase, runRunAction, runSolutioningPhase, validateStopAfterFromConflict };
20889
- //# sourceMappingURL=run-D3wByPiv.js.map
21023
+ //# sourceMappingURL=run-BW8_vcTi.js.map
@@ -0,0 +1,10 @@
1
+ import "./health-Dnx-FGva.js";
2
+ import "./logger-D2fS2ccL.js";
3
+ import { registerRunCommand, runRunAction } from "./run-BW8_vcTi.js";
4
+ import "./config-migrator-CtGelIsG.js";
5
+ import "./helpers-CpMs8VZX.js";
6
+ import "./routing-BVrxrM6v.js";
7
+ import "./decisions-DhAA2HG2.js";
8
+ import "./operational-BdcdmDqS.js";
9
+
10
+ export { runRunAction };
package/dist/schema.sql CHANGED
@@ -142,6 +142,8 @@ CREATE TABLE IF NOT EXISTS efficiency_scores (
142
142
  cache_hit_sub_score DOUBLE NOT NULL DEFAULT 0,
143
143
  io_ratio_sub_score DOUBLE NOT NULL DEFAULT 0,
144
144
  context_management_sub_score DOUBLE NOT NULL DEFAULT 0,
145
+ token_density_sub_score DOUBLE NOT NULL DEFAULT 0,
146
+ cold_start_turns_excluded INTEGER NOT NULL DEFAULT 0,
145
147
  avg_cache_hit_rate DOUBLE NOT NULL DEFAULT 0,
146
148
  avg_io_ratio DOUBLE NOT NULL DEFAULT 0,
147
149
  context_spike_count INTEGER NOT NULL DEFAULT 0,
@@ -1,4 +1,4 @@
1
- import { createVersionManager } from "./version-manager-impl-CjDZblzf.js";
1
+ import { createVersionManager } from "./version-manager-impl-BIxOe7gZ.js";
2
2
  import { execSync, spawn } from "child_process";
3
3
  import * as readline from "readline";
4
4
 
@@ -123,4 +123,4 @@ function registerUpgradeCommand(program) {
123
123
 
124
124
  //#endregion
125
125
  export { isGlobalInstall, registerUpgradeCommand, runUpgradeCommand };
126
- //# sourceMappingURL=upgrade-CTGnrq33.js.map
126
+ //# sourceMappingURL=upgrade-B1S61VXJ.js.map
@@ -1,5 +1,5 @@
1
1
  import "./config-migrator-CtGelIsG.js";
2
- import "./version-manager-impl-CjDZblzf.js";
3
- import { isGlobalInstall, registerUpgradeCommand, runUpgradeCommand } from "./upgrade-CTGnrq33.js";
2
+ import "./version-manager-impl-BIxOe7gZ.js";
3
+ import { isGlobalInstall, registerUpgradeCommand, runUpgradeCommand } from "./upgrade-B1S61VXJ.js";
4
4
 
5
5
  export { isGlobalInstall, registerUpgradeCommand, runUpgradeCommand };
@@ -369,4 +369,4 @@ function createVersionManager(deps = {}) {
369
369
 
370
370
  //#endregion
371
371
  export { VersionManagerImpl, createVersionManager };
372
- //# sourceMappingURL=version-manager-impl-CjDZblzf.js.map
372
+ //# sourceMappingURL=version-manager-impl-BIxOe7gZ.js.map
@@ -1,4 +1,4 @@
1
1
  import "./config-migrator-CtGelIsG.js";
2
- import { VersionManagerImpl, createVersionManager } from "./version-manager-impl-CjDZblzf.js";
2
+ import { VersionManagerImpl, createVersionManager } from "./version-manager-impl-BIxOe7gZ.js";
3
3
 
4
4
  export { createVersionManager };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "substrate-ai",
3
- "version": "0.6.9",
3
+ "version": "0.7.1",
4
4
  "description": "Substrate — multi-agent orchestration daemon for AI coding agents",
5
5
  "type": "module",
6
6
  "license": "MIT",
@@ -0,0 +1,54 @@
1
+ # BMAD Compiled Build-Fix Agent
2
+
3
+ ## Context (pre-assembled by pipeline)
4
+
5
+ ### Story File Content
6
+ {{story_content}}
7
+
8
+ ### Build Error Output
9
+ {{build_errors}}
10
+
11
+ ---
12
+
13
+ ## Mission
14
+
15
+ The build verification gate failed after dev-story completed. Fix the build errors shown above so the project compiles cleanly.
16
+
17
+ ## Instructions
18
+
19
+ 1. **Read the build error output** carefully to identify:
20
+ - Which file(s) have errors
21
+ - The exact error type (type mismatch, missing import, syntax error, etc.)
22
+ - The line number(s) involved
23
+
24
+ 2. **Read only the affected file(s)** — do not scan the full codebase.
25
+
26
+ 3. **Fix each error** with the minimal change needed:
27
+ - Type errors: fix the type annotation or cast
28
+ - Missing imports: add the import
29
+ - Missing exports: add the export
30
+ - Do NOT refactor surrounding code or add features
31
+
32
+ 4. **Run the build command** to verify the fix compiles cleanly.
33
+
34
+ 5. **Run tests** to verify no regressions.
35
+
36
+ ## CRITICAL: Output Contract Emission
37
+
38
+ **You MUST emit the YAML output block as the very last thing you produce.**
39
+
40
+ ```yaml
41
+ result: success
42
+ files_modified:
43
+ - <absolute path to each file you modified>
44
+ tests: pass
45
+ ```
46
+
47
+ If you cannot fix the build errors:
48
+
49
+ ```yaml
50
+ result: failed
51
+ files_modified: []
52
+ tests: fail
53
+ notes: <reason the build cannot be fixed>
54
+ ```
@@ -38,6 +38,7 @@ Implement the story above completely. Follow tasks in exact order. Do not stop u
38
38
  - Write failing tests first
39
39
  - Make tests pass with minimal code
40
40
  - Refactor while keeping tests green
41
+ - **Use exact names from the story spec.** When the story specifies a field, variable, class, or method name, use that exact name in your implementation. Only deviate if the name would cause a compilation error (e.g., conflicts with a reserved word or inherited property), and add a code comment explaining why.
41
42
  - **If you import a new package that is not already in package.json, install it immediately** (`npm install <package>` or the appropriate workspace command). The build verification gate runs after dev-story — missing dependencies will fail the build and escalate the story.
42
43
 
43
44
  3. **After each task**:
@@ -47,6 +48,8 @@ Implement the story above completely. Follow tasks in exact order. Do not stop u
47
48
  - Update the story File List with all new/modified files
48
49
 
49
50
  4. **After all tasks complete**:
51
+ - Run the project build to verify type checking: `{{verify_command}}`
52
+ - Fix any type errors or compilation failures before proceeding
50
53
  - Run the full test suite one final time
51
54
  - Update story Status to `review`
52
55
 
@@ -1,30 +0,0 @@
1
- import { createRequire } from "module";
2
-
3
- //#region rolldown:runtime
4
- var __create = Object.create;
5
- var __defProp = Object.defineProperty;
6
- var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
7
- var __getOwnPropNames = Object.getOwnPropertyNames;
8
- var __getProtoOf = Object.getPrototypeOf;
9
- var __hasOwnProp = Object.prototype.hasOwnProperty;
10
- var __commonJS = (cb, mod) => function() {
11
- return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
12
- };
13
- var __copyProps = (to, from, except, desc) => {
14
- if (from && typeof from === "object" || typeof from === "function") for (var keys = __getOwnPropNames(from), i = 0, n = keys.length, key; i < n; i++) {
15
- key = keys[i];
16
- if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, {
17
- get: ((k) => from[k]).bind(null, key),
18
- enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
19
- });
20
- }
21
- return to;
22
- };
23
- var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", {
24
- value: mod,
25
- enumerable: true
26
- }) : target, mod));
27
- var __require = /* @__PURE__ */ createRequire(import.meta.url);
28
-
29
- //#endregion
30
- export { __commonJS, __require, __toESM };
@@ -1,10 +0,0 @@
1
- import "./logger-D2fS2ccL.js";
2
- import { registerRunCommand, runRunAction } from "./run-D3wByPiv.js";
3
- import "./config-migrator-CtGelIsG.js";
4
- import "./health-D0lhVJ5C.js";
5
- import "./helpers-DtzxPR0u.js";
6
- import "./routing-DCWkD4MK.js";
7
- import "./decisions-BtgXk_My.js";
8
- import "./operational-ChLdRgux.js";
9
-
10
- export { runRunAction };