substrate-ai 0.17.2 → 0.18.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,4 @@
1
+ import { AdapterRegistry } from "./dist-Bm0qSZer.js";
2
+ import "./adapter-registry-DXLMTmfD.js";
3
+
4
+ export { AdapterRegistry };
package/dist/cli/index.js CHANGED
@@ -1,15 +1,15 @@
1
1
  #!/usr/bin/env node
2
- import { FileStateStore, SUBSTRATE_OWNED_SETTINGS_KEYS, VALID_PHASES, WorkGraphRepository, buildPipelineStatusOutput, createDatabaseAdapter, createStateStore, detectCycles, findPackageRoot, formatOutput, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, parseDbTimestampAsUtc, registerHealthCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot } from "../health-DswaC1q5.js";
2
+ import { FileStateStore, SUBSTRATE_OWNED_SETTINGS_KEYS, VALID_PHASES, WorkGraphRepository, buildPipelineStatusOutput, createDatabaseAdapter, createStateStore, detectCycles, findPackageRoot, formatOutput, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, parseDbTimestampAsUtc, registerHealthCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot } from "../health-Cx2ZhRNT.js";
3
3
  import { createLogger } from "../logger-KeHncl-f.js";
4
4
  import { createEventBus } from "../helpers-CElYrONe.js";
5
- import { AdapterRegistry, BudgetConfigSchema, CURRENT_CONFIG_FORMAT_VERSION, CURRENT_TASK_GRAPH_VERSION, ConfigError, CostTrackerConfigSchema, DEFAULT_CONFIG, DoltClient, DoltNotInstalled, EXPERIMENT_RESULT, GlobalSettingsSchema, IngestionServer, MonitorDatabaseImpl, OPERATIONAL_FINDING, PartialGlobalSettingsSchema, PartialProviderConfigSchema, ProvidersSchema, RoutingRecommender, STORY_METRICS, TelemetryConfigSchema, addTokenUsage, aggregateTokenUsageForRun, checkDoltInstalled, compareRunMetrics, createAmendmentRun, createConfigSystem, createDecision, createDoltClient, createPipelineRun, getActiveDecisions, getAllCostEntriesFiltered, getBaselineRunMetrics, getDecisionsByCategory, getDecisionsByPhaseForRun, getLatestCompletedRun, getLatestRun, getPipelineRunById, getPlanningCostTotal, getRetryableEscalations, getRunMetrics, getSessionCostSummary, getSessionCostSummaryFiltered, getStoryMetricsForRun, getTokenUsageSummary, incrementRunRestarts, initSchema, initializeDolt, listRequirements, listRunMetrics, loadParentRunDecisions, supersedeDecision, tagRunAsBaseline, updatePipelineRun } from "../dist-CLvAwmT7.js";
5
+ import { AdapterRegistry, BudgetConfigSchema, CURRENT_CONFIG_FORMAT_VERSION, CURRENT_TASK_GRAPH_VERSION, ConfigError, CostTrackerConfigSchema, DEFAULT_CONFIG, DoltClient, DoltNotInstalled, EXPERIMENT_RESULT, GlobalSettingsSchema, IngestionServer, MonitorDatabaseImpl, OPERATIONAL_FINDING, PartialGlobalSettingsSchema, PartialProviderConfigSchema, ProvidersSchema, RoutingRecommender, STORY_METRICS, TelemetryConfigSchema, addTokenUsage, aggregateTokenUsageForRun, checkDoltInstalled, compareRunMetrics, createAmendmentRun, createConfigSystem, createDecision, createDoltClient, createPipelineRun, getActiveDecisions, getAllCostEntriesFiltered, getBaselineRunMetrics, getDecisionsByCategory, getDecisionsByPhaseForRun, getLatestCompletedRun, getLatestRun, getPipelineRunById, getPlanningCostTotal, getRetryableEscalations, getRunMetrics, getSessionCostSummary, getSessionCostSummaryFiltered, getStoryMetricsForRun, getTokenUsageSummary, incrementRunRestarts, initSchema, initializeDolt, listRequirements, listRunMetrics, loadParentRunDecisions, supersedeDecision, tagRunAsBaseline, updatePipelineRun } from "../dist-Bm0qSZer.js";
6
6
  import "../adapter-registry-DXLMTmfD.js";
7
- import { AdapterTelemetryPersistence, AppError, DoltRepoMapMetaRepository, DoltSymbolRepository, ERR_REPO_MAP_STORAGE_WRITE, GitClient, GrammarLoader, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SymbolParser, createContextCompiler, createDispatcher, createEventEmitter, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStopAfterGate, createTelemetryAdvisor, formatPhaseCompletionSummary, getFactoryRunSummaries, getScenarioResultsForRun, getTwinRunsForRun, listGraphRuns, registerFactoryCommand, registerRunCommand, registerScenariosCommand, resolveStoryKeys, runAnalysisPhase, runPlanningPhase, runSolutioningPhase, validateStopAfterFromConflict } from "../run-Bku8fmY3.js";
8
- import "../errors-D1LU8CZ9.js";
7
+ import { AdapterTelemetryPersistence, AppError, DoltRepoMapMetaRepository, DoltSymbolRepository, ERR_REPO_MAP_STORAGE_WRITE, GitClient, GrammarLoader, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SymbolParser, createContextCompiler, createDispatcher, createEventEmitter, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStopAfterGate, createTelemetryAdvisor, formatPhaseCompletionSummary, getFactoryRunSummaries, getScenarioResultsForRun, getTwinRunsForRun, listGraphRuns, registerFactoryCommand, registerRunCommand, registerScenariosCommand, resolveStoryKeys, runAnalysisPhase, runPlanningPhase, runSolutioningPhase, validateStopAfterFromConflict } from "../run-gXtnH8lO.js";
8
+ import "../errors-BSpu7pIv.js";
9
9
  import "../routing-CcBOCuC9.js";
10
10
  import "../decisions-C0pz9Clx.js";
11
11
  import "../version-manager-impl-BmOWu8ml.js";
12
- import { registerUpgradeCommand } from "../upgrade-DT0I_-1E.js";
12
+ import { registerUpgradeCommand } from "../upgrade-BcL4ZV3o.js";
13
13
  import { Command } from "commander";
14
14
  import { fileURLToPath } from "url";
15
15
  import { dirname, join, resolve } from "path";
@@ -24,7 +24,7 @@ import { isAbsolute, join as join$1 } from "node:path";
24
24
  import { z } from "zod";
25
25
  import * as fs from "node:fs/promises";
26
26
  import { access as access$1, readFile as readFile$1, readdir as readdir$1 } from "node:fs/promises";
27
- import { appendFileSync, chmodSync, cpSync, existsSync as existsSync$1, mkdirSync as mkdirSync$1, readFileSync as readFileSync$1, readdirSync as readdirSync$1, realpathSync, statSync, unlinkSync, writeFileSync as writeFileSync$1 } from "fs";
27
+ import { appendFileSync, chmodSync, cpSync, existsSync as existsSync$1, mkdirSync as mkdirSync$1, readFileSync as readFileSync$1, readdirSync as readdirSync$1, realpathSync, rmSync as rmSync$1, statSync, unlinkSync, writeFileSync as writeFileSync$1 } from "fs";
28
28
  import { homedir } from "os";
29
29
  import { createRequire } from "node:module";
30
30
  import { fileURLToPath as fileURLToPath$1 } from "node:url";
@@ -1802,6 +1802,132 @@ async function compileBmadAgents(bmadDir) {
1802
1802
  } catch {}
1803
1803
  return compiled;
1804
1804
  }
1805
+ /**
1806
+ * Parse a single CSV line, respecting double-quoted fields that may contain
1807
+ * commas and escaped quotes (RFC 4180). Returns an array of field values.
1808
+ */
1809
+ function parseCSVLine(line) {
1810
+ const fields = [];
1811
+ let current = "";
1812
+ let inQuotes = false;
1813
+ for (let i = 0; i < line.length; i++) {
1814
+ const ch = line[i];
1815
+ if (inQuotes) if (ch === "\"") if (i + 1 < line.length && line[i + 1] === "\"") {
1816
+ current += "\"";
1817
+ i++;
1818
+ } else inQuotes = false;
1819
+ else current += ch;
1820
+ else if (ch === "\"") inQuotes = true;
1821
+ else if (ch === ",") {
1822
+ fields.push(current);
1823
+ current = "";
1824
+ } else current += ch;
1825
+ }
1826
+ fields.push(current);
1827
+ return fields;
1828
+ }
1829
+ /**
1830
+ * Prepare the `.claude/skills/` directory by cleaning stale bmad-prefixed entries.
1831
+ * Returns the skills directory path.
1832
+ */
1833
+ function prepareSkillsDir(projectRoot) {
1834
+ const skillsDir = join(projectRoot, ".claude", "skills");
1835
+ mkdirSync$1(skillsDir, { recursive: true });
1836
+ try {
1837
+ for (const entry of readdirSync$1(skillsDir, { withFileTypes: true })) if (entry.isDirectory() && entry.name.startsWith("bmad")) rmSync$1(join(skillsDir, entry.name), {
1838
+ recursive: true,
1839
+ force: true
1840
+ });
1841
+ } catch {}
1842
+ return skillsDir;
1843
+ }
1844
+ /**
1845
+ * Install skills from `_bmad/_config/skill-manifest.csv` into `.claude/skills/`.
1846
+ *
1847
+ * Each row in the CSV specifies a canonicalId and a path to the SKILL.md file.
1848
+ * The entire source directory (dirname of the path) is copied to
1849
+ * `.claude/skills/<canonicalId>/`, matching bmad-method's installVerbatimSkills.
1850
+ *
1851
+ * @returns Number of skills installed.
1852
+ */
1853
+ function installSkillsFromManifest(projectRoot, bmadDir) {
1854
+ const csvPath = join(bmadDir, "_config", "skill-manifest.csv");
1855
+ if (!existsSync$1(csvPath)) return 0;
1856
+ const csvContent = readFileSync$1(csvPath, "utf-8");
1857
+ const lines = csvContent.split("\n").filter((l) => l.trim() !== "");
1858
+ if (lines.length < 2) return 0;
1859
+ const headers = parseCSVLine(lines[0]);
1860
+ const canonicalIdIdx = headers.indexOf("canonicalId");
1861
+ const pathIdx = headers.indexOf("path");
1862
+ if (canonicalIdIdx < 0 || pathIdx < 0) return 0;
1863
+ const bmadFolderName = "_bmad";
1864
+ const bmadPrefix = bmadFolderName + "/";
1865
+ const skillsDir = prepareSkillsDir(projectRoot);
1866
+ let count = 0;
1867
+ for (let i = 1; i < lines.length; i++) {
1868
+ const fields = parseCSVLine(lines[i]);
1869
+ const canonicalId = fields[canonicalIdIdx]?.trim();
1870
+ const skillPath = fields[pathIdx]?.trim();
1871
+ if (!canonicalId || !skillPath) continue;
1872
+ const relativePath = skillPath.startsWith(bmadPrefix) ? skillPath.slice(bmadPrefix.length) : skillPath;
1873
+ const sourceFile = join(bmadDir, relativePath);
1874
+ const sourceDir = dirname(sourceFile);
1875
+ if (!existsSync$1(sourceDir)) continue;
1876
+ const destDir = join(skillsDir, canonicalId);
1877
+ mkdirSync$1(destDir, { recursive: true });
1878
+ cpSync(sourceDir, destDir, { recursive: true });
1879
+ count++;
1880
+ }
1881
+ return count;
1882
+ }
1883
+ /**
1884
+ * Install skills directly from bmad-method source directories.
1885
+ *
1886
+ * Scans `src/core-skills/` and `src/bmm-skills/` (recursively) in the
1887
+ * bmad-method package for directories containing SKILL.md. Each directory
1888
+ * name is used as the canonicalId.
1889
+ *
1890
+ * This is the primary installation path for bmad-method v6.2.0+ where
1891
+ * skill-manifest.csv may be empty (it's populated by the full IDE installer,
1892
+ * which substrate doesn't call).
1893
+ *
1894
+ * @param installerLibPath - Path to bmad-method's tools/cli/installers/lib/
1895
+ * @returns Number of skills installed.
1896
+ */
1897
+ function installSkillsFromSource(projectRoot, installerLibPath) {
1898
+ const bmadMethodRoot = resolve(installerLibPath, "..", "..", "..", "..");
1899
+ const skillRoots = [join(bmadMethodRoot, "src", "core-skills"), join(bmadMethodRoot, "src", "bmm-skills")];
1900
+ const skillsDir = prepareSkillsDir(projectRoot);
1901
+ let count = 0;
1902
+ for (const root of skillRoots) {
1903
+ if (!existsSync$1(root)) continue;
1904
+ count += copySkillDirsRecursive(root, skillsDir);
1905
+ }
1906
+ return count;
1907
+ }
1908
+ /**
1909
+ * Recursively find directories containing SKILL.md and copy them to destRoot.
1910
+ * The directory name becomes the canonicalId (skill target directory name).
1911
+ */
1912
+ function copySkillDirsRecursive(dir, destRoot) {
1913
+ if (!existsSync$1(dir)) return 0;
1914
+ let count = 0;
1915
+ try {
1916
+ const entries = readdirSync$1(dir, { withFileTypes: true });
1917
+ for (const entry of entries) {
1918
+ if (!entry.isDirectory()) continue;
1919
+ const childPath = join(dir, entry.name);
1920
+ const skillFile = join(childPath, "SKILL.md");
1921
+ if (existsSync$1(skillFile)) {
1922
+ const destDir = join(destRoot, entry.name);
1923
+ mkdirSync$1(destDir, { recursive: true });
1924
+ cpSync(childPath, destDir, { recursive: true });
1925
+ count++;
1926
+ } else count += copySkillDirsRecursive(childPath, destRoot);
1927
+ }
1928
+ } catch {}
1929
+ return count;
1930
+ }
1805
1931
  async function scaffoldClaudeCommands(projectRoot, outputFormat) {
1806
1932
  const bmadDir = join(projectRoot, "_bmad");
1807
1933
  if (!existsSync$1(bmadDir)) return;
@@ -1840,11 +1966,11 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
1840
1966
  if (existsSync$1(workflowGenPath)) {
1841
1967
  const workflowMod = _require(workflowGenPath);
1842
1968
  WorkflowCommandGenerator = resolveExport(workflowMod, "WorkflowCommandGenerator");
1843
- } else logger$15.info("bmad-method workflow-command-generator not available; skipping workflow commands");
1969
+ } else logger$15.info("bmad-method workflow-command-generator not available; will try skill-based installation");
1844
1970
  if (existsSync$1(taskToolGenPath)) {
1845
1971
  const taskToolMod = _require(taskToolGenPath);
1846
1972
  TaskToolCommandGenerator = resolveExport(taskToolMod, "TaskToolCommandGenerator");
1847
- } else logger$15.info("bmad-method task-tool-command-generator not available; skipping task/tool commands");
1973
+ } else logger$15.info("bmad-method task-tool-command-generator not available; will try skill-based installation");
1848
1974
  let ManifestGenerator = null;
1849
1975
  if (existsSync$1(manifestGenPath)) {
1850
1976
  const manifestMod = _require(manifestGenPath);
@@ -1896,12 +2022,19 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
1896
2022
  const { artifacts: taskToolArtifacts } = await taskToolGen.collectTaskToolArtifacts(bmadDir);
1897
2023
  taskToolCount = typeof taskToolGen.writeDashArtifacts === "function" ? await taskToolGen.writeDashArtifacts(commandsDir, taskToolArtifacts) : await writeDashFallback(commandsDir, taskToolArtifacts, ["task", "tool"]);
1898
2024
  }
1899
- const total = agentCount + workflowCount + taskToolCount;
1900
- if (outputFormat !== "json") process.stdout.write(`Generated ${String(total)} Claude Code commands (${String(agentCount)} agents, ${String(workflowCount)} workflows, ${String(taskToolCount)} tasks/tools)\n`);
2025
+ let skillCount = 0;
2026
+ if (!WorkflowCommandGenerator && !TaskToolCommandGenerator) {
2027
+ skillCount = installSkillsFromSource(projectRoot, installerLibPath);
2028
+ if (skillCount === 0) skillCount = installSkillsFromManifest(projectRoot, bmadDir);
2029
+ }
2030
+ const total = agentCount + workflowCount + taskToolCount + skillCount;
2031
+ if (outputFormat !== "json") if (skillCount > 0) process.stdout.write(`Generated ${String(total)} Claude Code commands (${String(agentCount)} agents, ${String(skillCount)} skills)\n`);
2032
+ else process.stdout.write(`Generated ${String(total)} Claude Code commands (${String(agentCount)} agents, ${String(workflowCount)} workflows, ${String(taskToolCount)} tasks/tools)\n`);
1901
2033
  logger$15.info({
1902
2034
  agentCount,
1903
2035
  workflowCount,
1904
2036
  taskToolCount,
2037
+ skillCount,
1905
2038
  total,
1906
2039
  commandsDir
1907
2040
  }, "Generated .claude/commands/");
@@ -3055,7 +3188,7 @@ async function runStatusAction(options) {
3055
3188
  if (run === void 0) run = await getLatestRun(adapter);
3056
3189
  }
3057
3190
  if (run === void 0) {
3058
- const { inspectProcessTree } = await import("../health-GEDGgGan.js");
3191
+ const { inspectProcessTree } = await import("../health-DRywcMxP.js");
3059
3192
  const substrateDirPath = join(projectRoot, ".substrate");
3060
3193
  const processInfo = inspectProcessTree({
3061
3194
  projectRoot,
@@ -3940,7 +4073,7 @@ function defaultSupervisorDeps() {
3940
4073
  if (cached === null) {
3941
4074
  const { AdapterRegistry: AR } = await import(
3942
4075
  /* @vite-ignore */
3943
- "../adapter-registry-DbLuI3IA.js"
4076
+ "../adapter-registry-bsm0SCZW.js"
3944
4077
  );
3945
4078
  cached = new AR();
3946
4079
  await cached.discoverAndRegister();
@@ -4294,10 +4427,17 @@ async function runSupervisorAction(options, deps = {}) {
4294
4427
  }
4295
4428
  while (true) {
4296
4429
  const health = await getHealth({
4297
- runId,
4430
+ runId: state.runId ?? runId,
4298
4431
  projectRoot
4299
4432
  });
4300
4433
  const ts = new Date().toISOString();
4434
+ if (state.runId === void 0 && health.run_id !== null) {
4435
+ state = {
4436
+ ...state,
4437
+ runId: health.run_id
4438
+ };
4439
+ log(`Supervisor: auto-bound to active run ${health.run_id}`);
4440
+ }
4301
4441
  if (outputFormat === "json") {
4302
4442
  const tokenSnapshot = health.run_id !== null ? await getTokenSnapshot(health.run_id, projectRoot) : {
4303
4443
  input: 0,
@@ -4375,11 +4515,11 @@ async function runSupervisorAction(options, deps = {}) {
4375
4515
  try {
4376
4516
  const { createExperimenter } = await import(
4377
4517
  /* @vite-ignore */
4378
- "../experimenter-D0k2wT3I.js"
4518
+ "../experimenter-DV-ZC-uX.js"
4379
4519
  );
4380
4520
  const { getLatestRun: getLatest } = await import(
4381
4521
  /* @vite-ignore */
4382
- "../decisions-CGNEausW.js"
4522
+ "../decisions-DMpHsVUn.js"
4383
4523
  );
4384
4524
  const expAdapter = createDatabaseAdapter({
4385
4525
  backend: "auto",
@@ -4389,7 +4529,7 @@ async function runSupervisorAction(options, deps = {}) {
4389
4529
  await initSchema(expAdapter);
4390
4530
  const { runRunAction: runPipeline } = await import(
4391
4531
  /* @vite-ignore */
4392
- "../run-N5rHJu-Y.js"
4532
+ "../run-m7RCkpmx.js"
4393
4533
  );
4394
4534
  const runStoryFn = async (opts) => {
4395
4535
  const exitCode = await runPipeline({
@@ -4919,7 +5059,7 @@ async function runMetricsAction(options) {
4919
5059
  const routingConfigPath = join(dbDir, "routing.yml");
4920
5060
  let routingConfig = null;
4921
5061
  if (existsSync$1(routingConfigPath)) try {
4922
- const { loadModelRoutingConfig } = await import("../routing-B1aoIz7L.js");
5062
+ const { loadModelRoutingConfig } = await import("../routing-CXLeUI-t.js");
4923
5063
  routingConfig = loadModelRoutingConfig(routingConfigPath);
4924
5064
  } catch {}
4925
5065
  if (routingConfig === null) routingConfig = {
@@ -8756,8 +8896,8 @@ async function createProgram() {
8756
8896
  /** Fire-and-forget startup version check (story 8.3, AC3/AC5) */
8757
8897
  function checkForUpdatesInBackground(currentVersion) {
8758
8898
  if (process.env.SUBSTRATE_NO_UPDATE_CHECK === "1") return;
8759
- import("../upgrade-DPdh5w4p.js").then(async () => {
8760
- const { createVersionManager } = await import("../version-manager-impl-BHnUB2tl.js");
8899
+ import("../upgrade-CbwnF2hv.js").then(async () => {
8900
+ const { createVersionManager } = await import("../version-manager-impl-DBx3ovp9.js");
8761
8901
  const vm = createVersionManager();
8762
8902
  const result = await vm.checkForUpdates();
8763
8903
  if (result.updateAvailable) {
@@ -1,4 +1,4 @@
1
- import { addTokenUsage, createDecision, createPipelineRun, createRequirement, getArtifactByTypeForRun, getArtifactsByRun, getDecisionsByCategory, getDecisionsByPhase, getDecisionsByPhaseForRun, getLatestRun, getPipelineRunById, getRunningPipelineRuns, getTokenUsageSummary, listRequirements, registerArtifact, updateDecision, updatePipelineRun, updatePipelineRunConfig, upsertDecision } from "./dist-CLvAwmT7.js";
1
+ import { addTokenUsage, createDecision, createPipelineRun, createRequirement, getArtifactByTypeForRun, getArtifactsByRun, getDecisionsByCategory, getDecisionsByPhase, getDecisionsByPhaseForRun, getLatestRun, getPipelineRunById, getRunningPipelineRuns, getTokenUsageSummary, listRequirements, registerArtifact, updateDecision, updatePipelineRun, updatePipelineRunConfig, upsertDecision } from "./dist-Bm0qSZer.js";
2
2
  import "./decisions-C0pz9Clx.js";
3
3
 
4
4
  export { getLatestRun };
@@ -3004,12 +3004,15 @@ async function updatePipelineRunConfig(adapter, id, configJson) {
3004
3004
  async function createPipelineRun(adapter, input) {
3005
3005
  const validated = CreatePipelineRunInputSchema.parse(input);
3006
3006
  const id = crypto.randomUUID();
3007
- await adapter.query(`INSERT INTO pipeline_runs (id, methodology, current_phase, status, config_json)
3008
- VALUES (?, ?, ?, 'running', ?)`, [
3007
+ const nowUtc = new Date().toISOString();
3008
+ await adapter.query(`INSERT INTO pipeline_runs (id, methodology, current_phase, status, config_json, created_at, updated_at)
3009
+ VALUES (?, ?, ?, 'running', ?, ?, ?)`, [
3009
3010
  id,
3010
3011
  validated.methodology,
3011
3012
  validated.start_phase ?? null,
3012
- validated.config_json ?? null
3013
+ validated.config_json ?? null,
3014
+ nowUtc,
3015
+ nowUtc
3013
3016
  ]);
3014
3017
  const rows = await adapter.query("SELECT * FROM pipeline_runs WHERE id = ?", [id]);
3015
3018
  return rows[0];
@@ -10236,4 +10239,4 @@ async function callLLM(params) {
10236
10239
 
10237
10240
  //#endregion
10238
10241
  export { ADVISORY_NOTES, AdapterRegistry, AdtError, BudgetConfigSchema, CURRENT_CONFIG_FORMAT_VERSION, CURRENT_TASK_GRAPH_VERSION, Categorizer, ClaudeCodeAdapter, CodexCLIAdapter, ConfigError, ConfigIncompatibleFormatError, ConsumerAnalyzer, CostTrackerConfigSchema, DEFAULT_CONFIG, DEFAULT_GLOBAL_SETTINGS, DispatcherImpl, DoltClient, DoltNotInstalled, DoltQueryError, ESCALATION_DIAGNOSIS, EXPERIMENT_RESULT, EfficiencyScorer, GeminiCLIAdapter, GlobalSettingsSchema, IngestionServer, LogTurnAnalyzer, ModelRoutingConfigSchema, MonitorDatabaseImpl, OPERATIONAL_FINDING, PartialGlobalSettingsSchema, PartialProviderConfigSchema, ProviderPolicySchema, ProvidersSchema, Recommender, RoutingConfigError, RoutingRecommender, RoutingResolver, RoutingTelemetry, RoutingTokenAccumulator, RoutingTuner, STORY_METRICS, STORY_OUTCOME, SubstrateConfigSchema, TASK_TYPE_PHASE_MAP, TEST_EXPANSION_FINDING, TEST_PLAN, TelemetryConfigSchema, TelemetryNormalizer, TelemetryPipeline, TurnAnalyzer, VersionManagerImpl, addTokenUsage, aggregateTokenUsageForRun, aggregateTokenUsageForStory, buildAuditLogEntry, buildBranchName, buildModificationDirective, buildPRBody, buildWorktreePath, callLLM, checkDoltInstalled, compareRunMetrics, createAmendmentRun, createConfigSystem, createDatabaseAdapter as createDatabaseAdapter$1, createDecision, createDoltClient, createExperimenter, createPipelineRun, createRequirement, createVersionManager, detectInterfaceChanges, determineVerdict, getActiveDecisions, getAllCostEntriesFiltered, getArtifactByTypeForRun, getArtifactsByRun, getBaselineRunMetrics, getDecisionsByCategory, getDecisionsByPhase, getDecisionsByPhaseForRun, getLatestCompletedRun, getLatestRun, getModelTier, getPipelineRunById, getPlanningCostTotal, getRetryableEscalations, getRunMetrics, getRunningPipelineRuns, getSessionCostSummary, getSessionCostSummaryFiltered, getStoryMetricsForRun, getTokenUsageSummary, incrementRunRestarts, initSchema, initializeDolt, listRequirements, listRunMetrics, loadModelRoutingConfig, loadParentRunDecisions, registerArtifact, resolvePromptFile, supersedeDecision, tagRunAsBaseline, updateDecision, updatePipelineRun, updatePipelineRunConfig, upsertDecision, writeRunMetrics, writeStoryMetrics };
10239
- //# sourceMappingURL=dist-CLvAwmT7.js.map
10242
+ //# sourceMappingURL=dist-Bm0qSZer.js.map
@@ -1,4 +1,4 @@
1
- import { AdtError } from "./dist-CLvAwmT7.js";
1
+ import { AdtError } from "./dist-Bm0qSZer.js";
2
2
 
3
3
  //#region src/core/errors.ts
4
4
  /** Error thrown when task configuration is invalid */
@@ -71,4 +71,4 @@ var TaskGraphIncompatibleFormatError = class extends AdtError {
71
71
 
72
72
  //#endregion
73
73
  export { BudgetExceededError, GitError, RecoveryError, TaskConfigError, TaskGraphCycleError, TaskGraphError, TaskGraphIncompatibleFormatError, WorkerError, WorkerNotFoundError };
74
- //# sourceMappingURL=errors-D1LU8CZ9.js.map
74
+ //# sourceMappingURL=errors-BSpu7pIv.js.map
@@ -1,3 +1,3 @@
1
- import { buildAuditLogEntry, buildBranchName, buildModificationDirective, buildPRBody, buildWorktreePath, createExperimenter, determineVerdict, resolvePromptFile } from "./dist-CLvAwmT7.js";
1
+ import { buildAuditLogEntry, buildBranchName, buildModificationDirective, buildPRBody, buildWorktreePath, createExperimenter, determineVerdict, resolvePromptFile } from "./dist-Bm0qSZer.js";
2
2
 
3
3
  export { createExperimenter };
@@ -1,5 +1,5 @@
1
1
  import { createLogger } from "./logger-KeHncl-f.js";
2
- import { DoltClient, DoltQueryError, createDatabaseAdapter$1 as createDatabaseAdapter, getLatestRun, getPipelineRunById, initSchema } from "./dist-CLvAwmT7.js";
2
+ import { DoltClient, DoltQueryError, createDatabaseAdapter$1 as createDatabaseAdapter, getLatestRun, getPipelineRunById, initSchema } from "./dist-Bm0qSZer.js";
3
3
  import { createRequire } from "module";
4
4
  import { dirname, join } from "path";
5
5
  import { existsSync, readFileSync } from "node:fs";
@@ -1930,4 +1930,4 @@ function registerHealthCommand(program, _version = "0.0.0", projectRoot = proces
1930
1930
 
1931
1931
  //#endregion
1932
1932
  export { BMAD_BASELINE_TOKENS_FULL, DEFAULT_STALL_THRESHOLD_SECONDS, DoltMergeConflict, FileStateStore, STOP_AFTER_VALID_PHASES, STORY_KEY_PATTERN$1 as STORY_KEY_PATTERN, SUBSTRATE_OWNED_SETTINGS_KEYS, VALID_PHASES, WorkGraphRepository, __commonJS, __require, __toESM, buildPipelineStatusOutput, createDatabaseAdapter$1 as createDatabaseAdapter, createStateStore, detectCycles, findPackageRoot, formatOutput, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, inspectProcessTree, isOrchestratorProcessLine, parseDbTimestampAsUtc, registerHealthCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, runHealthAction, validateStoryKey };
1933
- //# sourceMappingURL=health-DswaC1q5.js.map
1933
+ //# sourceMappingURL=health-Cx2ZhRNT.js.map
@@ -1,6 +1,6 @@
1
- import { DEFAULT_STALL_THRESHOLD_SECONDS, getAllDescendantPids, getAutoHealthData, inspectProcessTree, isOrchestratorProcessLine, registerHealthCommand, runHealthAction } from "./health-DswaC1q5.js";
1
+ import { DEFAULT_STALL_THRESHOLD_SECONDS, getAllDescendantPids, getAutoHealthData, inspectProcessTree, isOrchestratorProcessLine, registerHealthCommand, runHealthAction } from "./health-Cx2ZhRNT.js";
2
2
  import "./logger-KeHncl-f.js";
3
- import "./dist-CLvAwmT7.js";
3
+ import "./dist-Bm0qSZer.js";
4
4
  import "./decisions-C0pz9Clx.js";
5
5
 
6
6
  export { inspectProcessTree };
package/dist/index.js CHANGED
@@ -1,8 +1,8 @@
1
1
  import { childLogger, createLogger, logger } from "./logger-KeHncl-f.js";
2
2
  import { assertDefined, createEventBus, createTuiApp, deepClone, formatDuration, generateId, isPlainObject, isTuiCapable, printNonTtyWarning, sleep, withRetry } from "./helpers-CElYrONe.js";
3
- import { AdapterRegistry, AdtError, ClaudeCodeAdapter, CodexCLIAdapter, ConfigError, ConfigIncompatibleFormatError, GeminiCLIAdapter } from "./dist-CLvAwmT7.js";
3
+ import { AdapterRegistry, AdtError, ClaudeCodeAdapter, CodexCLIAdapter, ConfigError, ConfigIncompatibleFormatError, GeminiCLIAdapter } from "./dist-Bm0qSZer.js";
4
4
  import "./adapter-registry-DXLMTmfD.js";
5
- import { BudgetExceededError, GitError, RecoveryError, TaskConfigError, TaskGraphCycleError, TaskGraphError, TaskGraphIncompatibleFormatError, WorkerError, WorkerNotFoundError } from "./errors-D1LU8CZ9.js";
5
+ import { BudgetExceededError, GitError, RecoveryError, TaskConfigError, TaskGraphCycleError, TaskGraphError, TaskGraphIncompatibleFormatError, WorkerError, WorkerNotFoundError } from "./errors-BSpu7pIv.js";
6
6
 
7
7
  //#region src/core/di.ts
8
8
  /**
@@ -1,4 +1,4 @@
1
- import { ModelRoutingConfigSchema, ProviderPolicySchema, RoutingConfigError, RoutingRecommender, RoutingResolver, RoutingTelemetry, RoutingTokenAccumulator, RoutingTuner, TASK_TYPE_PHASE_MAP, getModelTier, loadModelRoutingConfig } from "./dist-CLvAwmT7.js";
1
+ import { ModelRoutingConfigSchema, ProviderPolicySchema, RoutingConfigError, RoutingRecommender, RoutingResolver, RoutingTelemetry, RoutingTokenAccumulator, RoutingTuner, TASK_TYPE_PHASE_MAP, getModelTier, loadModelRoutingConfig } from "./dist-Bm0qSZer.js";
2
2
  import "./routing-CcBOCuC9.js";
3
3
 
4
4
  export { loadModelRoutingConfig };
@@ -1,7 +1,7 @@
1
- import { BMAD_BASELINE_TOKENS_FULL, DoltMergeConflict, FileStateStore, STOP_AFTER_VALID_PHASES, STORY_KEY_PATTERN, VALID_PHASES, WorkGraphRepository, __commonJS, __require, __toESM, buildPipelineStatusOutput, createDatabaseAdapter, formatOutput, formatPipelineSummary, formatTokenTelemetry, inspectProcessTree, parseDbTimestampAsUtc, resolveMainRepoRoot, validateStoryKey } from "./health-DswaC1q5.js";
1
+ import { BMAD_BASELINE_TOKENS_FULL, DoltMergeConflict, FileStateStore, STOP_AFTER_VALID_PHASES, STORY_KEY_PATTERN, VALID_PHASES, WorkGraphRepository, __commonJS, __require, __toESM, buildPipelineStatusOutput, createDatabaseAdapter, formatOutput, formatPipelineSummary, formatTokenTelemetry, inspectProcessTree, parseDbTimestampAsUtc, resolveMainRepoRoot, validateStoryKey } from "./health-Cx2ZhRNT.js";
2
2
  import { createLogger } from "./logger-KeHncl-f.js";
3
3
  import { TypedEventBusImpl, createEventBus, createTuiApp, isTuiCapable, printNonTtyWarning, sleep } from "./helpers-CElYrONe.js";
4
- import { ADVISORY_NOTES, Categorizer, ConsumerAnalyzer, DEFAULT_GLOBAL_SETTINGS, DispatcherImpl, DoltClient, ESCALATION_DIAGNOSIS, EfficiencyScorer, IngestionServer, LogTurnAnalyzer, OPERATIONAL_FINDING, Recommender, RoutingRecommender, RoutingResolver, RoutingTelemetry, RoutingTokenAccumulator, RoutingTuner, STORY_METRICS, STORY_OUTCOME, SubstrateConfigSchema, TEST_EXPANSION_FINDING, TEST_PLAN, TelemetryNormalizer, TelemetryPipeline, TurnAnalyzer, addTokenUsage, aggregateTokenUsageForRun, aggregateTokenUsageForStory, callLLM, createConfigSystem, createDatabaseAdapter$1, createDecision, createPipelineRun, createRequirement, detectInterfaceChanges, getArtifactByTypeForRun, getArtifactsByRun, getDecisionsByCategory, getDecisionsByPhase, getDecisionsByPhaseForRun, getPipelineRunById, getRunningPipelineRuns, getStoryMetricsForRun, getTokenUsageSummary, initSchema, loadModelRoutingConfig, registerArtifact, updatePipelineRun, updatePipelineRunConfig, upsertDecision, writeRunMetrics, writeStoryMetrics } from "./dist-CLvAwmT7.js";
4
+ import { ADVISORY_NOTES, Categorizer, ConsumerAnalyzer, DEFAULT_GLOBAL_SETTINGS, DispatcherImpl, DoltClient, ESCALATION_DIAGNOSIS, EfficiencyScorer, IngestionServer, LogTurnAnalyzer, OPERATIONAL_FINDING, Recommender, RoutingRecommender, RoutingResolver, RoutingTelemetry, RoutingTokenAccumulator, RoutingTuner, STORY_METRICS, STORY_OUTCOME, SubstrateConfigSchema, TEST_EXPANSION_FINDING, TEST_PLAN, TelemetryNormalizer, TelemetryPipeline, TurnAnalyzer, addTokenUsage, aggregateTokenUsageForRun, aggregateTokenUsageForStory, callLLM, createConfigSystem, createDatabaseAdapter$1, createDecision, createPipelineRun, createRequirement, detectInterfaceChanges, getArtifactByTypeForRun, getArtifactsByRun, getDecisionsByCategory, getDecisionsByPhase, getDecisionsByPhaseForRun, getPipelineRunById, getRunningPipelineRuns, getStoryMetricsForRun, getTokenUsageSummary, initSchema, loadModelRoutingConfig, registerArtifact, updatePipelineRun, updatePipelineRunConfig, upsertDecision, writeRunMetrics, writeStoryMetrics } from "./dist-Bm0qSZer.js";
5
5
  import { basename, dirname, extname, join } from "path";
6
6
  import { access, readFile, readdir, stat } from "fs/promises";
7
7
  import { EventEmitter } from "node:events";
@@ -10149,6 +10149,9 @@ function createImplementationOrchestrator(deps) {
10149
10149
  completedDispatches: completed,
10150
10150
  queuedDispatches: queued
10151
10151
  });
10152
+ if (config.pipelineRunId !== void 0) updatePipelineRun(db, config.pipelineRunId, { current_phase: "implementation" }).catch((err) => {
10153
+ logger$21.debug({ err }, "Heartbeat: failed to touch updated_at (non-fatal)");
10154
+ });
10152
10155
  const elapsed = Date.now() - _lastProgressTs;
10153
10156
  let childPids = [];
10154
10157
  let childActive = false;
@@ -22250,12 +22253,17 @@ var RunStateManager = class {
22250
22253
  * or `PARTIAL_SUCCESS` for the pipeline to exit normally.
22251
22254
  *
22252
22255
  * Story 42-16.
22256
+ * Story 49-3: AutoSummarizer integration for long-running convergence loops.
22253
22257
  */
22254
22258
  /**
22255
22259
  * Create a new `ConvergenceController` instance backed by an in-memory outcome map.
22260
+ *
22261
+ * @param config - Optional configuration. Pass `{ autoSummarizer }` to enable
22262
+ * automatic context compression in long-running convergence loops.
22256
22263
  */
22257
- function createConvergenceController() {
22264
+ function createConvergenceController(config) {
22258
22265
  const outcomes = new Map();
22266
+ let storedContexts = [];
22259
22267
  /** Returns true only when id is non-empty AND exists in graph.nodes. */
22260
22268
  function isValidTarget(id, graph) {
22261
22269
  return id !== "" && graph.nodes.has(id);
@@ -22315,6 +22323,23 @@ function createConvergenceController() {
22315
22323
  ];
22316
22324
  for (const candidate of candidates) if (isValidTarget(candidate, graph)) return candidate;
22317
22325
  return null;
22326
+ },
22327
+ recordIterationContext(ctx) {
22328
+ storedContexts.push(ctx);
22329
+ },
22330
+ async prepareForIteration(currentIndex) {
22331
+ if (!config?.autoSummarizer || storedContexts.length === 0) return storedContexts;
22332
+ const uncompressedContexts = storedContexts.filter((c) => !("compressed" in c));
22333
+ if (config.autoSummarizer.shouldTrigger(uncompressedContexts)) {
22334
+ const compressionResult = await config.autoSummarizer.compress(uncompressedContexts, currentIndex);
22335
+ const alreadyCompressed = storedContexts.filter((c) => "compressed" in c);
22336
+ const merged = [...alreadyCompressed, ...compressionResult.iterations].sort((a, b) => a.index - b.index);
22337
+ storedContexts = merged;
22338
+ }
22339
+ return storedContexts;
22340
+ },
22341
+ getStoredContexts() {
22342
+ return storedContexts;
22318
22343
  }
22319
22344
  };
22320
22345
  }
@@ -22925,6 +22950,43 @@ async function getFactoryRunSummaries(adapter, limit = 20) {
22925
22950
  });
22926
22951
  }
22927
22952
 
22953
+ //#endregion
22954
+ //#region packages/factory/dist/graph/fidelity.js
22955
+ /**
22956
+ * Map a raw fidelity string to a SummaryLevel for context compression.
22957
+ *
22958
+ * Returns null when no summarization should be applied (fidelity is 'full',
22959
+ * empty, or an unrecognized value). Used by the executor before every node
22960
+ * dispatch to determine whether to call summaryEngine.summarize().
22961
+ */
22962
+ function parseFidelityLevel(fidelity) {
22963
+ const FIDELITY_MAP = {
22964
+ high: "high",
22965
+ "summary:high": "high",
22966
+ medium: "medium",
22967
+ "summary:medium": "medium",
22968
+ low: "low",
22969
+ draft: "low",
22970
+ "summary:low": "low"
22971
+ };
22972
+ return FIDELITY_MAP[fidelity] ?? null;
22973
+ }
22974
+ /**
22975
+ * Resolve the effective fidelity string for a node about to be dispatched.
22976
+ *
22977
+ * Precedence (highest to lowest):
22978
+ * 1. incomingEdge.fidelity (non-empty)
22979
+ * 2. node.fidelity (non-empty)
22980
+ * 3. graph.defaultFidelity (non-empty)
22981
+ * 4. '' (no fidelity set — parseFidelityLevel will return null)
22982
+ */
22983
+ function resolveFidelity(node, incomingEdge, graph) {
22984
+ if (incomingEdge?.fidelity) return incomingEdge.fidelity;
22985
+ if (node.fidelity) return node.fidelity;
22986
+ if (graph.defaultFidelity) return graph.defaultFidelity;
22987
+ return "";
22988
+ }
22989
+
22928
22990
  //#endregion
22929
22991
  //#region packages/factory/dist/graph/executor.js
22930
22992
  /**
@@ -23060,6 +23122,7 @@ function createGraphExecutor() {
23060
23122
  let firstResumedFidelity = "";
23061
23123
  let skipCycleCheck = false;
23062
23124
  let skipCompletedPush = false;
23125
+ let lastIncomingEdge = void 0;
23063
23126
  let scenarioManifest = null;
23064
23127
  if (config.scenarioStore) scenarioManifest = await config.scenarioStore.discover();
23065
23128
  const runStateManager = config.dotSource ? new RunStateManager({ runDir: config.logsRoot }) : null;
@@ -23207,6 +23270,7 @@ function createGraphExecutor() {
23207
23270
  satisfactionScoreHistory: plateauResult.scores
23208
23271
  });
23209
23272
  injectRemediationContext(context, remediation);
23273
+ await controller.prepareForIteration(convergenceIteration);
23210
23274
  skipCycleCheck = true;
23211
23275
  currentNode = retryNode;
23212
23276
  continue;
@@ -23273,6 +23337,25 @@ function createGraphExecutor() {
23273
23337
  fidelity: firstResumedFidelity
23274
23338
  } : currentNode;
23275
23339
  firstResumedFidelity = "";
23340
+ if (config.summaryEngine) {
23341
+ const effectiveFidelity = resolveFidelity(nodeToDispatch, lastIncomingEdge, graph);
23342
+ const summaryLevel = parseFidelityLevel(effectiveFidelity);
23343
+ if (summaryLevel !== null) {
23344
+ const nodeContextContent = context.getString("factory.nodeContext", "");
23345
+ if (nodeContextContent !== "") {
23346
+ const summary = await config.summaryEngine.summarize(nodeContextContent, summaryLevel);
23347
+ context.set("factory.compressedNodeContext", summary.content);
23348
+ context.set("factory.nodeContext", summary.content);
23349
+ config.eventBus?.emit("graph:context-summarized", {
23350
+ runId: config.runId,
23351
+ nodeId: nodeToDispatch.id,
23352
+ level: summaryLevel,
23353
+ originalTokenCount: summary.originalTokenCount ?? 0,
23354
+ summaryTokenCount: summary.summaryTokenCount ?? 0
23355
+ });
23356
+ }
23357
+ }
23358
+ }
23276
23359
  const startedAt = Date.now();
23277
23360
  let outcome = await dispatchWithRetry(nodeToDispatch, context, graph, config, nodeRetries);
23278
23361
  if (Date.now() - startedAt < 50) await new Promise((r) => setTimeout(r, 50));
@@ -23311,6 +23394,11 @@ function createGraphExecutor() {
23311
23394
  {
23312
23395
  const controllerStatus = outcome.status === "SUCCESS" ? "SUCCESS" : outcome.status === "PARTIAL_SUCCESS" ? "PARTIAL_SUCCESS" : "FAILURE";
23313
23396
  controller.recordOutcome(nodeToDispatch.id, controllerStatus);
23397
+ const iterContent = context.getString("factory.nodeContext", "");
23398
+ controller.recordIterationContext({
23399
+ index: convergenceIteration,
23400
+ content: iterContent
23401
+ });
23314
23402
  }
23315
23403
  if (outcome.contextUpdates) for (const [key, value] of Object.entries(outcome.contextUpdates)) context.set(key, value);
23316
23404
  context.set("outcome", outcome.status.toLowerCase());
@@ -23398,6 +23486,7 @@ function createGraphExecutor() {
23398
23486
  }
23399
23487
  const nextNode = graph.nodes.get(edge.toNode);
23400
23488
  if (!nextNode) throw new Error(`Edge target node "${edge.toNode}" not found in graph`);
23489
+ lastIncomingEdge = edge;
23401
23490
  currentNode = nextNode;
23402
23491
  }
23403
23492
  } };
@@ -36236,6 +36325,419 @@ function registerScenariosCommand(program) {
36236
36325
  });
36237
36326
  }
36238
36327
 
36328
+ //#endregion
36329
+ //#region packages/factory/dist/context/summary-cache.js
36330
+ var SummaryCache = class {
36331
+ config;
36332
+ constructor(config) {
36333
+ this.config = config;
36334
+ }
36335
+ summaryPath(hash, level) {
36336
+ return join$1(this.config.storageDir, this.config.runId, "summaries", `${hash}-${level}.json`);
36337
+ }
36338
+ originalPath(hash) {
36339
+ return join$1(this.config.storageDir, this.config.runId, "summaries", `${hash}.orig`);
36340
+ }
36341
+ async put(summary, originalContent) {
36342
+ const dir = join$1(this.config.storageDir, this.config.runId, "summaries");
36343
+ await mkdir$1(dir, { recursive: true });
36344
+ const record = {
36345
+ summary,
36346
+ cachedAt: new Date().toISOString()
36347
+ };
36348
+ await writeFile$1(this.summaryPath(summary.originalHash, summary.level), JSON.stringify(record, null, 2));
36349
+ if (originalContent !== void 0 && this.config.storeOriginals !== false) await writeFile$1(this.originalPath(summary.originalHash), originalContent);
36350
+ }
36351
+ async get(originalHash, level) {
36352
+ try {
36353
+ const raw = await readFile$1(this.summaryPath(originalHash, level), "utf-8");
36354
+ const record = JSON.parse(raw);
36355
+ return record.summary;
36356
+ } catch (err) {
36357
+ if (err.code === "ENOENT") return null;
36358
+ throw err;
36359
+ }
36360
+ }
36361
+ async getOriginal(originalHash) {
36362
+ try {
36363
+ return await readFile$1(this.originalPath(originalHash), "utf-8");
36364
+ } catch (err) {
36365
+ if (err.code === "ENOENT") return null;
36366
+ throw err;
36367
+ }
36368
+ }
36369
+ };
36370
+ var CachingSummaryEngine = class {
36371
+ inner;
36372
+ cache;
36373
+ name;
36374
+ constructor(inner, cache) {
36375
+ this.inner = inner;
36376
+ this.cache = cache;
36377
+ this.name = `caching(${inner.name})`;
36378
+ }
36379
+ async summarize(content, targetLevel, opts) {
36380
+ const originalHash = createHash("sha256").update(content).digest("hex");
36381
+ const cached = await this.cache.get(originalHash, targetLevel);
36382
+ if (cached !== null) return cached;
36383
+ const summary = await this.inner.summarize(content, targetLevel, opts);
36384
+ await this.cache.put(summary, content);
36385
+ return summary;
36386
+ }
36387
+ async expand(summary, targetLevel, opts) {
36388
+ const original = await this.cache.getOriginal(summary.originalHash);
36389
+ if (original !== null) return original;
36390
+ return this.inner.expand(summary, targetLevel, opts);
36391
+ }
36392
+ };
36393
+
36394
+ //#endregion
36395
+ //#region packages/factory/dist/context/summary-metrics.js
36396
+ /**
36397
+ * Compute the compression ratio of a summary.
36398
+ * Returns summaryTokenCount / originalTokenCount in [0, 1].
36399
+ * Returns -1 as a sentinel when token counts are absent or originalTokenCount is zero.
36400
+ */
36401
+ function computeCompressionRatio(summary) {
36402
+ if (summary.originalTokenCount === void 0 || summary.originalTokenCount === 0 || summary.summaryTokenCount === void 0) return -1;
36403
+ return summary.summaryTokenCount / summary.originalTokenCount;
36404
+ }
36405
+ /**
36406
+ * Extract key facts from content:
36407
+ * - Fenced code blocks (triple-backtick)
36408
+ * - File-path tokens matching known extensions
36409
+ * - Error-type names
36410
+ */
36411
+ function extractKeyFacts(content) {
36412
+ const facts = new Set();
36413
+ const codeBlockRegex = /```[\s\S]*?```/g;
36414
+ let match$1;
36415
+ while ((match$1 = codeBlockRegex.exec(content)) !== null) facts.add(match$1[0].trim());
36416
+ const filePathRegex = /[\w./]+(\.ts|\.js|\.json|\.md|\.go|\.py|\.yaml|\.yml)\b/g;
36417
+ while ((match$1 = filePathRegex.exec(content)) !== null) facts.add(match$1[0]);
36418
+ const errorTypeRegex = /\b(Error|Exception|ENOENT|ETIMEDOUT|TypeError|SyntaxError)\b/g;
36419
+ while ((match$1 = errorTypeRegex.exec(content)) !== null) facts.add(match$1[0]);
36420
+ return facts;
36421
+ }
36422
+ /**
36423
+ * Compute the key-fact retention rate between original and summarized content.
36424
+ * Returns the fraction of original key facts that appear in the summarized content.
36425
+ * Returns 1.0 when the original has no key facts.
36426
+ */
36427
+ function computeKeyFactRetentionRate(original, summarized) {
36428
+ const originalFacts = extractKeyFacts(original);
36429
+ const summarizedFacts = extractKeyFacts(summarized);
36430
+ if (originalFacts.size === 0) return 1;
36431
+ const preserved = [...originalFacts].filter((f$1) => summarizedFacts.has(f$1)).length;
36432
+ return preserved / originalFacts.size;
36433
+ }
36434
+
36435
+ //#endregion
36436
+ //#region packages/factory/dist/context/cli-command.js
36437
+ /** Build a CLIJsonOutput envelope. */
36438
+ function buildJsonOutput(command, data, version) {
36439
+ return {
36440
+ timestamp: new Date().toISOString(),
36441
+ version,
36442
+ command,
36443
+ data
36444
+ };
36445
+ }
36446
+ /** Minimal stub SummaryEngine used when no engineFactory is provided. */
36447
+ var StubSummaryEngine = class {
36448
+ name = "stub";
36449
+ async summarize(content, targetLevel) {
36450
+ const originalHash = createHash("sha256").update(content).digest("hex");
36451
+ return {
36452
+ level: targetLevel,
36453
+ content,
36454
+ originalHash,
36455
+ createdAt: new Date().toISOString()
36456
+ };
36457
+ }
36458
+ async expand(summary) {
36459
+ return summary.content;
36460
+ }
36461
+ };
36462
+ /**
36463
+ * Scan all `*.json` summary files in summariesDir to find the most recently
36464
+ * stored summary matching the given iteration number.
36465
+ *
36466
+ * Matches on `record.summary.iterationIndex === iteration`.
36467
+ * Returns null if no match is found.
36468
+ */
36469
+ async function findSummaryForIteration(summariesDir, iteration) {
36470
+ let allFiles;
36471
+ try {
36472
+ allFiles = await readdir$1(summariesDir);
36473
+ } catch {
36474
+ return null;
36475
+ }
36476
+ const jsonFiles = allFiles.filter((f$1) => f$1.endsWith(".json"));
36477
+ const matches = [];
36478
+ for (const file of jsonFiles) try {
36479
+ const raw = await readFile$1(join$1(summariesDir, file), "utf-8");
36480
+ const record = JSON.parse(raw);
36481
+ if (record.summary.iterationIndex === iteration) matches.push({
36482
+ hash: record.summary.originalHash,
36483
+ record,
36484
+ cachedAt: new Date(record.cachedAt).getTime()
36485
+ });
36486
+ } catch {}
36487
+ if (matches.length === 0) return null;
36488
+ matches.sort((a, b) => b.cachedAt - a.cachedAt);
36489
+ const best = matches[0];
36490
+ return {
36491
+ hash: best.hash,
36492
+ record: best.record
36493
+ };
36494
+ }
36495
+ /**
36496
+ * Core logic for `factory context summarize`.
36497
+ *
36498
+ * Loads the original content, runs it through CachingSummaryEngine, stores the
36499
+ * result, and prints compression statistics.
36500
+ *
36501
+ * @returns Exit code: 0 on success, 1 on error.
36502
+ */
36503
+ async function summarizeAction(opts, deps, output = {
36504
+ stdout: process.stdout,
36505
+ stderr: process.stderr
36506
+ }) {
36507
+ const { stdout, stderr } = output;
36508
+ if (!/^\d+$/.test(opts.iteration)) {
36509
+ stderr.write(`Error: --iteration must be a non-negative integer, got: ${opts.iteration}\n`);
36510
+ return 1;
36511
+ }
36512
+ const iteration = parseInt(opts.iteration, 10);
36513
+ const validLevels = [
36514
+ "full",
36515
+ "high",
36516
+ "medium",
36517
+ "low"
36518
+ ];
36519
+ if (!validLevels.includes(opts.level)) {
36520
+ stderr.write(`Error: --level must be one of: full, high, medium, low. Got: ${opts.level}\n`);
36521
+ return 1;
36522
+ }
36523
+ const level = opts.level;
36524
+ const summariesDir = join$1(deps.storageDir, "runs", opts.run, "summaries");
36525
+ try {
36526
+ await stat$1(summariesDir);
36527
+ } catch {
36528
+ stderr.write(`Error: Run directory not found: ${summariesDir}\n`);
36529
+ return 1;
36530
+ }
36531
+ const found = await findSummaryForIteration(summariesDir, iteration);
36532
+ if (!found) {
36533
+ stderr.write(`Error: No summary found for iteration ${iteration} in run ${opts.run}\n`);
36534
+ return 1;
36535
+ }
36536
+ const cache = new SummaryCache({
36537
+ runId: opts.run,
36538
+ storageDir: join$1(deps.storageDir, "runs")
36539
+ });
36540
+ const originalContent = await cache.getOriginal(found.hash);
36541
+ if (originalContent === null) {
36542
+ stderr.write(`Error: Original content (.orig) not found for hash ${found.hash.slice(0, 8)} in run ${opts.run}\n`);
36543
+ return 1;
36544
+ }
36545
+ const innerEngine = deps.engineFactory ? deps.engineFactory() : new StubSummaryEngine();
36546
+ const engine = new CachingSummaryEngine(innerEngine, cache);
36547
+ const summary = await engine.summarize(originalContent, level);
36548
+ await cache.put(summary, originalContent);
36549
+ const compressionRatio = computeCompressionRatio(summary);
36550
+ if (opts.outputFormat === "json") {
36551
+ const data = {
36552
+ hash: summary.originalHash,
36553
+ level: summary.level,
36554
+ compressionRatio,
36555
+ summaryTokenCount: summary.summaryTokenCount,
36556
+ originalTokenCount: summary.originalTokenCount
36557
+ };
36558
+ stdout.write(JSON.stringify(buildJsonOutput("factory context summarize", data, deps.version)) + "\n");
36559
+ } else {
36560
+ const ratioStr = compressionRatio >= 0 ? compressionRatio.toFixed(2) : "n/a";
36561
+ stdout.write(`Summarized iteration ${iteration} → level ${level} | hash: ${summary.originalHash.slice(0, 8)} | compression: ${ratioStr}\n`);
36562
+ }
36563
+ return 0;
36564
+ }
36565
+ /**
36566
+ * Core logic for `factory context expand`.
36567
+ *
36568
+ * Finds the stored summary for the given iteration, then expands it back
36569
+ * toward full content (lossless if .orig exists, LLM fallback otherwise).
36570
+ *
36571
+ * @returns Exit code: 0 on success, 1 on error.
36572
+ */
36573
+ async function expandAction(opts, deps, output = {
36574
+ stdout: process.stdout,
36575
+ stderr: process.stderr
36576
+ }) {
36577
+ const { stdout, stderr } = output;
36578
+ if (!/^\d+$/.test(opts.iteration)) {
36579
+ stderr.write(`Error: --iteration must be a non-negative integer, got: ${opts.iteration}\n`);
36580
+ return 1;
36581
+ }
36582
+ const iteration = parseInt(opts.iteration, 10);
36583
+ const summariesDir = join$1(deps.storageDir, "runs", opts.run, "summaries");
36584
+ try {
36585
+ await stat$1(summariesDir);
36586
+ } catch {
36587
+ stderr.write(`Error: Run directory not found: ${summariesDir}\n`);
36588
+ return 1;
36589
+ }
36590
+ const found = await findSummaryForIteration(summariesDir, iteration);
36591
+ if (!found) {
36592
+ stderr.write(`Error: No summary found for iteration ${iteration} in run ${opts.run}\n`);
36593
+ return 1;
36594
+ }
36595
+ const cache = new SummaryCache({
36596
+ runId: opts.run,
36597
+ storageDir: join$1(deps.storageDir, "runs")
36598
+ });
36599
+ const innerEngine = deps.engineFactory ? deps.engineFactory() : new StubSummaryEngine();
36600
+ const engine = new CachingSummaryEngine(innerEngine, cache);
36601
+ const expandedContent = await engine.expand(found.record.summary, "full");
36602
+ if (opts.outputFormat === "json") {
36603
+ const data = {
36604
+ hash: found.record.summary.originalHash,
36605
+ level: found.record.summary.level,
36606
+ expandedLength: expandedContent.length,
36607
+ content: expandedContent
36608
+ };
36609
+ stdout.write(JSON.stringify(buildJsonOutput("factory context expand", data, deps.version)) + "\n");
36610
+ } else stdout.write(expandedContent + "\n");
36611
+ return 0;
36612
+ }
36613
+ /**
36614
+ * Core logic for `factory context stats`.
36615
+ *
36616
+ * Reads all stored summary records for a run, computes compression metrics,
36617
+ * and prints a formatted table (or JSON array).
36618
+ *
36619
+ * @returns Exit code: 0 on success, 1 on error.
36620
+ */
36621
+ async function statsAction(opts, deps, output = {
36622
+ stdout: process.stdout,
36623
+ stderr: process.stderr
36624
+ }) {
36625
+ const { stdout, stderr } = output;
36626
+ const summariesDir = join$1(deps.storageDir, "runs", opts.run, "summaries");
36627
+ try {
36628
+ await stat$1(summariesDir);
36629
+ } catch {
36630
+ stderr.write(`Error: Run directory not found: ${summariesDir}\n`);
36631
+ return 1;
36632
+ }
36633
+ let allFiles;
36634
+ try {
36635
+ allFiles = await readdir$1(summariesDir);
36636
+ } catch {
36637
+ stderr.write(`Error: Could not read summaries directory: ${summariesDir}\n`);
36638
+ return 1;
36639
+ }
36640
+ const jsonFiles = allFiles.filter((f$1) => f$1.endsWith(".json"));
36641
+ const rows = [];
36642
+ for (const file of jsonFiles) try {
36643
+ const raw = await readFile$1(join$1(summariesDir, file), "utf-8");
36644
+ const record = JSON.parse(raw);
36645
+ const compressionRatio = computeCompressionRatio(record.summary);
36646
+ let keyFactRetentionRate = -1;
36647
+ try {
36648
+ const origPath = join$1(summariesDir, `${record.summary.originalHash}.orig`);
36649
+ const originalContent = await readFile$1(origPath, "utf-8");
36650
+ keyFactRetentionRate = computeKeyFactRetentionRate(originalContent, record.summary.content);
36651
+ } catch {}
36652
+ rows.push({
36653
+ hash: record.summary.originalHash,
36654
+ level: record.summary.level,
36655
+ compressionRatio,
36656
+ keyFactRetentionRate,
36657
+ cachedAt: record.cachedAt
36658
+ });
36659
+ } catch {}
36660
+ rows.sort((a, b) => new Date(a.cachedAt).getTime() - new Date(b.cachedAt).getTime());
36661
+ if (opts.outputFormat === "json") {
36662
+ stdout.write(JSON.stringify(buildJsonOutput("factory context stats", rows, deps.version)) + "\n");
36663
+ return 0;
36664
+ }
36665
+ if (rows.length === 0) {
36666
+ stdout.write("No summaries found.\n");
36667
+ return 0;
36668
+ }
36669
+ const headers = [
36670
+ "Hash",
36671
+ "Level",
36672
+ "CompRatio",
36673
+ "KeyRetention",
36674
+ "CachedAt"
36675
+ ];
36676
+ const tableRows = rows.map((row) => ({
36677
+ Hash: row.hash.slice(0, 8),
36678
+ Level: row.level,
36679
+ CompRatio: row.compressionRatio >= 0 ? row.compressionRatio.toFixed(4) : "n/a",
36680
+ KeyRetention: row.keyFactRetentionRate >= 0 ? row.keyFactRetentionRate.toFixed(4) : "n/a",
36681
+ CachedAt: row.cachedAt
36682
+ }));
36683
+ const widths = headers.map((h) => {
36684
+ const dataMax = tableRows.reduce((max, row) => {
36685
+ const val = row[h] ?? "";
36686
+ return Math.max(max, val.length);
36687
+ }, 0);
36688
+ return Math.max(h.length, dataMax);
36689
+ });
36690
+ const headerLine = headers.map((h, i) => h.padEnd(widths[i])).join(" | ");
36691
+ const separator = widths.map((w) => "-".repeat(w)).join("-+-");
36692
+ stdout.write(headerLine + "\n");
36693
+ stdout.write(separator + "\n");
36694
+ for (const row of tableRows) {
36695
+ const line = headers.map((h, i) => (row[h] ?? "").padEnd(widths[i])).join(" | ");
36696
+ stdout.write(line + "\n");
36697
+ }
36698
+ return 0;
36699
+ }
36700
+ /**
36701
+ * Register the `context` subcommand group on the provided factory command.
36702
+ *
36703
+ * Subcommands registered:
36704
+ * factory context summarize --run <id> --iteration <n> --level <level>
36705
+ * factory context expand --run <id> --iteration <n>
36706
+ * factory context stats --run <id>
36707
+ *
36708
+ * @param factoryCmd - The factory Commander command to attach to
36709
+ * @param version - CLI version string for JSON output (e.g. "1.2.3")
36710
+ * @param storageDir - Base storage directory (defaults to `{cwd}/.substrate`)
36711
+ * @param engineFactory - Optional SummaryEngine factory for testing injection
36712
+ */
36713
+ function registerContextCommand(factoryCmd, version, storageDir, engineFactory) {
36714
+ const resolvedStorageDir = storageDir ?? join$1(process.cwd(), ".substrate");
36715
+ const contextCmd = factoryCmd.command("context").description("Inspect and manage pyramid summaries for factory runs");
36716
+ contextCmd.command("summarize").description("Compress and store a run iteration to the target summary level").requiredOption("--run <id>", "Run ID").requiredOption("--iteration <n>", "Iteration number").requiredOption("--level <level>", "Summary level: high | medium | low").option("--output-format <format>", "Output format: text | json", "text").action(async (opts) => {
36717
+ const code = await summarizeAction(opts, {
36718
+ storageDir: resolvedStorageDir,
36719
+ version,
36720
+ ...engineFactory !== void 0 ? { engineFactory } : {}
36721
+ });
36722
+ if (code !== 0) process.exit(code);
36723
+ });
36724
+ contextCmd.command("expand").description("Expand a stored summary for a run iteration back to full content").requiredOption("--run <id>", "Run ID").requiredOption("--iteration <n>", "Iteration number").option("--output-format <format>", "Output format: text | json", "text").action(async (opts) => {
36725
+ const code = await expandAction(opts, {
36726
+ storageDir: resolvedStorageDir,
36727
+ version,
36728
+ ...engineFactory !== void 0 ? { engineFactory } : {}
36729
+ });
36730
+ if (code !== 0) process.exit(code);
36731
+ });
36732
+ contextCmd.command("stats").description("Report per-run compression statistics for all stored summaries").requiredOption("--run <id>", "Run ID").option("--output-format <format>", "Output format: text | json", "text").action(async (opts) => {
36733
+ const code = await statsAction(opts, {
36734
+ storageDir: resolvedStorageDir,
36735
+ version
36736
+ });
36737
+ if (code !== 0) process.exit(code);
36738
+ });
36739
+ }
36740
+
36239
36741
  //#endregion
36240
36742
  //#region packages/factory/dist/twins/schema.js
36241
36743
  /**
@@ -37119,6 +37621,7 @@ function registerFactoryCommand(program, options) {
37119
37621
  else process.stdout.write(`✗ ${passedCount}/${TOTAL_RULE_COUNT} rules passed, ${errors.length} ${errLabel}, ${warnings.length} ${warnLabel}\n`);
37120
37622
  if (errors.length > 0) process.exit(1);
37121
37623
  });
37624
+ registerContextCommand(factoryCmd, "0.0.0");
37122
37625
  const twinsCmd = factoryCmd.command("twins").description("Digital twin template management");
37123
37626
  twinsCmd.command("templates").description("List available built-in twin templates").action(() => {
37124
37627
  const templates = listTwinTemplates();
@@ -37757,9 +38260,8 @@ async function runRunAction(options) {
37757
38260
  }
37758
38261
  const staleRuns = await getRunningPipelineRuns(adapter) ?? [];
37759
38262
  if (staleRuns.length > 0) {
37760
- const processInfo = inspectProcessTree({ projectRoot });
37761
38263
  let swept = 0;
37762
- for (const stale of staleRuns) if (processInfo.orchestrator_pid === null) {
38264
+ for (const stale of staleRuns) {
37763
38265
  await updatePipelineRun(adapter, stale.id, { status: "failed" });
37764
38266
  swept++;
37765
38267
  }
@@ -38439,6 +38941,8 @@ async function runFullPipeline(options) {
38439
38941
  input_tokens: result.tokenUsage.input,
38440
38942
  output_tokens: result.tokenUsage.output,
38441
38943
  cost_usd: costUsd
38944
+ }).catch((err) => {
38945
+ logger.warn({ err }, "Failed to record analysis token usage (non-fatal)");
38442
38946
  });
38443
38947
  }
38444
38948
  if (result.result === "failed") {
@@ -38462,6 +38966,8 @@ async function runFullPipeline(options) {
38462
38966
  input_tokens: result.tokenUsage.input,
38463
38967
  output_tokens: result.tokenUsage.output,
38464
38968
  cost_usd: costUsd
38969
+ }).catch((err) => {
38970
+ logger.warn({ err }, "Failed to record planning token usage (non-fatal)");
38465
38971
  });
38466
38972
  }
38467
38973
  if (result.result === "failed") {
@@ -38488,6 +38994,8 @@ async function runFullPipeline(options) {
38488
38994
  input_tokens: result.tokenUsage.input,
38489
38995
  output_tokens: result.tokenUsage.output,
38490
38996
  cost_usd: costUsd
38997
+ }).catch((err) => {
38998
+ logger.warn({ err }, "Failed to record research token usage (non-fatal)");
38491
38999
  });
38492
39000
  }
38493
39001
  if (result.result === "failed") {
@@ -38511,6 +39019,8 @@ async function runFullPipeline(options) {
38511
39019
  input_tokens: result.tokenUsage.input,
38512
39020
  output_tokens: result.tokenUsage.output,
38513
39021
  cost_usd: costUsd
39022
+ }).catch((err) => {
39023
+ logger.warn({ err }, "Failed to record ux-design token usage (non-fatal)");
38514
39024
  });
38515
39025
  }
38516
39026
  if (result.result === "failed") {
@@ -38534,6 +39044,8 @@ async function runFullPipeline(options) {
38534
39044
  input_tokens: result.tokenUsage.input,
38535
39045
  output_tokens: result.tokenUsage.output,
38536
39046
  cost_usd: costUsd
39047
+ }).catch((err) => {
39048
+ logger.warn({ err }, "Failed to record solutioning token usage (non-fatal)");
38537
39049
  });
38538
39050
  }
38539
39051
  if (result.result === "failed") {
@@ -38752,4 +39264,4 @@ function registerRunCommand(program, _version = "0.0.0", projectRoot = process.c
38752
39264
 
38753
39265
  //#endregion
38754
39266
  export { AdapterTelemetryPersistence, AppError, DoltRepoMapMetaRepository, DoltSymbolRepository, ERR_REPO_MAP_STORAGE_WRITE, GitClient, GrammarLoader, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SymbolParser, createContextCompiler, createDispatcher, createEventEmitter, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStopAfterGate, createTelemetryAdvisor, formatPhaseCompletionSummary, getFactoryRunSummaries, getScenarioResultsForRun, getTwinRunsForRun, listGraphRuns, normalizeGraphSummaryToStatus, registerFactoryCommand, registerRunCommand, registerScenariosCommand, resolveStoryKeys, runAnalysisPhase, runPlanningPhase, runRunAction, runSolutioningPhase, validateStopAfterFromConflict };
38755
- //# sourceMappingURL=run-Bku8fmY3.js.map
39267
+ //# sourceMappingURL=run-gXtnH8lO.js.map
@@ -1,8 +1,8 @@
1
- import "./health-DswaC1q5.js";
1
+ import "./health-Cx2ZhRNT.js";
2
2
  import "./logger-KeHncl-f.js";
3
3
  import "./helpers-CElYrONe.js";
4
- import "./dist-CLvAwmT7.js";
5
- import { normalizeGraphSummaryToStatus, registerRunCommand, runRunAction } from "./run-Bku8fmY3.js";
4
+ import "./dist-Bm0qSZer.js";
5
+ import { normalizeGraphSummaryToStatus, registerRunCommand, runRunAction } from "./run-gXtnH8lO.js";
6
6
  import "./routing-CcBOCuC9.js";
7
7
  import "./decisions-C0pz9Clx.js";
8
8
 
@@ -1,4 +1,4 @@
1
- import { createVersionManager } from "./dist-CLvAwmT7.js";
1
+ import { createVersionManager } from "./dist-Bm0qSZer.js";
2
2
  import { execSync, spawn } from "child_process";
3
3
  import * as readline from "readline";
4
4
 
@@ -123,4 +123,4 @@ function registerUpgradeCommand(program) {
123
123
 
124
124
  //#endregion
125
125
  export { isGlobalInstall, registerUpgradeCommand, runUpgradeCommand };
126
- //# sourceMappingURL=upgrade-DT0I_-1E.js.map
126
+ //# sourceMappingURL=upgrade-BcL4ZV3o.js.map
@@ -1,5 +1,5 @@
1
- import "./dist-CLvAwmT7.js";
1
+ import "./dist-Bm0qSZer.js";
2
2
  import "./version-manager-impl-BmOWu8ml.js";
3
- import { isGlobalInstall, registerUpgradeCommand, runUpgradeCommand } from "./upgrade-DT0I_-1E.js";
3
+ import { isGlobalInstall, registerUpgradeCommand, runUpgradeCommand } from "./upgrade-BcL4ZV3o.js";
4
4
 
5
5
  export { isGlobalInstall, registerUpgradeCommand, runUpgradeCommand };
@@ -1,4 +1,4 @@
1
- import { VersionManagerImpl, createVersionManager } from "./dist-CLvAwmT7.js";
1
+ import { VersionManagerImpl, createVersionManager } from "./dist-Bm0qSZer.js";
2
2
  import "./version-manager-impl-BmOWu8ml.js";
3
3
 
4
4
  export { createVersionManager };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "substrate-ai",
3
- "version": "0.17.2",
3
+ "version": "0.18.1",
4
4
  "description": "Substrate — multi-agent orchestration daemon for AI coding agents",
5
5
  "type": "module",
6
6
  "license": "MIT",
@@ -1,4 +0,0 @@
1
- import { AdapterRegistry } from "./dist-CLvAwmT7.js";
2
- import "./adapter-registry-DXLMTmfD.js";
3
-
4
- export { AdapterRegistry };