substrate-ai 0.20.58 → 0.20.60
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/index.js +529 -127
- package/dist/{health-CNqQFdaT.js → health-BoXxsFSF.js} +166 -3
- package/dist/{health-BX84L5Qe.js → health-FZVOBYND.js} +1 -1
- package/dist/index.d.ts +40 -0
- package/dist/{run-ChqBlPYZ.js → run-BxfeSz6G.js} +2 -2
- package/dist/{run-DE5xoB9U.js → run-CRz08RrU.js} +132 -3
- package/package.json +1 -1
package/dist/cli/index.js
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
import { FileStateStore, RunManifest, SUBSTRATE_OWNED_SETTINGS_KEYS, SupervisorLock, VALID_PHASES, WorkGraphRepository, ZERO_FINDINGS_BY_AUTHOR, ZERO_FINDING_COUNTS, ZERO_PROBE_AUTHOR_METRICS, aggregateProbeAuthorMetrics, buildPipelineStatusOutput, createDatabaseAdapter, createStateStore, findPackageRoot, formatOutput, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, inspectProcessTree, parseDbTimestampAsUtc, parseRuntimeProbes, registerHealthCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, resolveRunManifest, rollupFindingCounts, rollupFindingsByAuthor, rollupProbeAuthorByClass, rollupProbeAuthorMetrics } from "../health-
|
|
2
|
+
import { FileStateStore, RunManifest, SUBSTRATE_OWNED_SETTINGS_KEYS, SupervisorLock, VALID_PHASES, WorkGraphRepository, ZERO_FINDINGS_BY_AUTHOR, ZERO_FINDING_COUNTS, ZERO_PROBE_AUTHOR_METRICS, aggregateProbeAuthorMetrics, buildPipelineStatusOutput, createDatabaseAdapter, createStateStore, findPackageRoot, formatOutput, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, inspectProcessTree, parseDbTimestampAsUtc, parseRuntimeProbes, readCurrentRunId, registerHealthCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, resolveRunManifest, rollupFindingCounts, rollupFindingsByAuthor, rollupProbeAuthorByClass, rollupProbeAuthorMetrics } from "../health-BoXxsFSF.js";
|
|
3
3
|
import { createLogger } from "../logger-KeHncl-f.js";
|
|
4
4
|
import { createEventBus } from "../helpers-CElYrONe.js";
|
|
5
5
|
import { AdapterRegistry, BudgetConfigSchema, CURRENT_CONFIG_FORMAT_VERSION, CURRENT_TASK_GRAPH_VERSION, ConfigError, CostTrackerConfigSchema, DEFAULT_CONFIG, DoltClient, DoltNotInstalled, GlobalSettingsSchema, InMemoryDatabaseAdapter, IngestionServer, MonitorDatabaseImpl, OPERATIONAL_FINDING, PartialGlobalSettingsSchema, PartialProviderConfigSchema, ProvidersSchema, RoutingRecommender, STORY_METRICS, TelemetryConfigSchema, addTokenUsage, aggregateTokenUsageForRun, checkDoltInstalled, compareRunMetrics, createAmendmentRun, createConfigSystem, createDecision, createDoltClient, createPipelineRun, getActiveDecisions, getAllCostEntriesFiltered, getBaselineRunMetrics, getDecisionsByCategory, getDecisionsByPhaseForRun, getLatestCompletedRun, getLatestRun, getPipelineRunById, getPlanningCostTotal, getRetryableEscalations, getRunMetrics, getRunningPipelineRuns, getSessionCostSummary, getSessionCostSummaryFiltered, getStoryMetricsForRun, getTokenUsageSummary, incrementRunRestarts, initSchema, initializeDolt, listRunMetrics, loadParentRunDecisions, supersedeDecision, tagRunAsBaseline, updatePipelineRun } from "../dist-W2emvN3F.js";
|
|
6
6
|
import "../adapter-registry-DXLMTmfD.js";
|
|
7
|
-
import { AdapterTelemetryPersistence, AppError, DoltRepoMapMetaRepository, DoltSymbolRepository, ERR_REPO_MAP_STORAGE_WRITE, EpicIngester, GLOBSTAR, GitClient, GrammarLoader, Minimatch, Minipass, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SymbolParser, createContextCompiler, createDispatcher, createEventEmitter, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStopAfterGate, createTelemetryAdvisor, escape, formatPhaseCompletionSummary, getFactoryRunSummaries, getScenarioResultsForRun, getTwinRunsForRun, listGraphRuns, registerExportCommand, registerFactoryCommand, registerRunCommand, registerScenariosCommand, resolveStoryKeys, runAnalysisPhase, runPlanningPhase, runProbeAuthor, runSolutioningPhase, unescape, validateStopAfterFromConflict } from "../run-
|
|
7
|
+
import { AdapterTelemetryPersistence, AppError, DoltRepoMapMetaRepository, DoltSymbolRepository, ERR_REPO_MAP_STORAGE_WRITE, EpicIngester, GLOBSTAR, GitClient, GrammarLoader, Minimatch, Minipass, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SymbolParser, createContextCompiler, createDispatcher, createEventEmitter, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStopAfterGate, createTelemetryAdvisor, escape, formatPhaseCompletionSummary, getFactoryRunSummaries, getScenarioResultsForRun, getTwinRunsForRun, listGraphRuns, registerExportCommand, registerFactoryCommand, registerRunCommand, registerScenariosCommand, resolveStoryKeys, runAnalysisPhase, runPlanningPhase, runProbeAuthor, runSolutioningPhase, unescape, validateStopAfterFromConflict } from "../run-CRz08RrU.js";
|
|
8
8
|
import "../errors-CKFu8YI9.js";
|
|
9
9
|
import "../routing-CcBOCuC9.js";
|
|
10
10
|
import "../decisions-C0pz9Clx.js";
|
|
@@ -14,10 +14,11 @@ import { Command } from "commander";
|
|
|
14
14
|
import { fileURLToPath } from "url";
|
|
15
15
|
import { dirname, join, resolve } from "path";
|
|
16
16
|
import { access, mkdir, readFile, writeFile } from "fs/promises";
|
|
17
|
+
import { EventEmitter } from "node:events";
|
|
17
18
|
import yaml from "js-yaml";
|
|
18
19
|
import * as actualFS from "node:fs";
|
|
19
20
|
import { existsSync, promises, readFileSync, writeFileSync } from "node:fs";
|
|
20
|
-
import { execFile, spawn } from "node:child_process";
|
|
21
|
+
import { execFile, spawn, spawnSync } from "node:child_process";
|
|
21
22
|
import * as path$3 from "node:path";
|
|
22
23
|
import * as path$2 from "node:path";
|
|
23
24
|
import * as path$1 from "node:path";
|
|
@@ -30,6 +31,7 @@ import { appendFileSync, chmodSync, cpSync, existsSync as existsSync$1, lstatSyn
|
|
|
30
31
|
import { homedir } from "os";
|
|
31
32
|
import { createRequire } from "node:module";
|
|
32
33
|
import { fileURLToPath as fileURLToPath$1 } from "node:url";
|
|
34
|
+
import * as readline from "node:readline";
|
|
33
35
|
import { createInterface } from "node:readline";
|
|
34
36
|
import { randomUUID as randomUUID$1 } from "crypto";
|
|
35
37
|
import { createInterface as createInterface$1 } from "readline";
|
|
@@ -334,12 +336,12 @@ var GitWorktreeManagerImpl = class {
|
|
|
334
336
|
_onTaskReady;
|
|
335
337
|
_onTaskComplete;
|
|
336
338
|
_onTaskFailed;
|
|
337
|
-
constructor(eventBus, projectRoot, baseDirectory = DEFAULT_WORKTREE_BASE, db = null, logger$
|
|
339
|
+
constructor(eventBus, projectRoot, baseDirectory = DEFAULT_WORKTREE_BASE, db = null, logger$18) {
|
|
338
340
|
this._eventBus = eventBus;
|
|
339
341
|
this._projectRoot = projectRoot;
|
|
340
342
|
this._baseDirectory = baseDirectory;
|
|
341
343
|
this._db = db;
|
|
342
|
-
this._logger = logger$
|
|
344
|
+
this._logger = logger$18 ?? console;
|
|
343
345
|
this._onTaskReady = ({ taskId }) => {
|
|
344
346
|
this._handleTaskReady(taskId).catch((err) => {
|
|
345
347
|
this._logger.error({
|
|
@@ -643,14 +645,14 @@ var RecommendationEngine = class {
|
|
|
643
645
|
_filters;
|
|
644
646
|
_historyDays;
|
|
645
647
|
_logger;
|
|
646
|
-
constructor(monitorDb, config = {}, logger$
|
|
648
|
+
constructor(monitorDb, config = {}, logger$18) {
|
|
647
649
|
this._monitorDb = monitorDb;
|
|
648
650
|
this._filters = {
|
|
649
651
|
threshold_percentage: config.recommendation_threshold_percentage ?? 5,
|
|
650
652
|
min_sample_size: config.min_sample_size ?? 10
|
|
651
653
|
};
|
|
652
654
|
this._historyDays = config.recommendation_history_days ?? 90;
|
|
653
|
-
this._logger = logger$
|
|
655
|
+
this._logger = logger$18 ?? console;
|
|
654
656
|
}
|
|
655
657
|
generateRecommendations() {
|
|
656
658
|
const sinceDate = new Date(Date.now() - this._historyDays * 24 * 60 * 60 * 1e3).toISOString();
|
|
@@ -1740,7 +1742,7 @@ function buildStackAwareDevNotes(profile) {
|
|
|
1740
1742
|
|
|
1741
1743
|
//#endregion
|
|
1742
1744
|
//#region src/cli/commands/init.ts
|
|
1743
|
-
const logger$
|
|
1745
|
+
const logger$17 = createLogger("init");
|
|
1744
1746
|
const __dirname = dirname(new URL(import.meta.url).pathname);
|
|
1745
1747
|
const SCAFFOLD_VERSION_REGEX = /<!-- substrate:version=([\d.]+) -->/;
|
|
1746
1748
|
/**
|
|
@@ -1781,7 +1783,7 @@ async function scaffoldBmadFramework(projectRoot, force, outputFormat) {
|
|
|
1781
1783
|
const version = resolveBmadMethodVersion();
|
|
1782
1784
|
if (force && bmadExists) process.stderr.write(`Warning: Replacing existing _bmad/ framework with bmad-method@${version}\n`);
|
|
1783
1785
|
process.stdout.write(`Scaffolding BMAD framework from bmad-method@${version}\n`);
|
|
1784
|
-
logger$
|
|
1786
|
+
logger$17.info({
|
|
1785
1787
|
version,
|
|
1786
1788
|
dest: bmadDest
|
|
1787
1789
|
}, "Scaffolding BMAD framework");
|
|
@@ -1791,7 +1793,7 @@ async function scaffoldBmadFramework(projectRoot, force, outputFormat) {
|
|
|
1791
1793
|
const destDir = join(bmadDest, dir);
|
|
1792
1794
|
mkdirSync$1(destDir, { recursive: true });
|
|
1793
1795
|
cpSync(srcDir, destDir, { recursive: true });
|
|
1794
|
-
logger$
|
|
1796
|
+
logger$17.info({
|
|
1795
1797
|
dir,
|
|
1796
1798
|
dest: destDir
|
|
1797
1799
|
}, "Scaffolded BMAD framework directory");
|
|
@@ -1810,7 +1812,7 @@ async function scaffoldBmadFramework(projectRoot, force, outputFormat) {
|
|
|
1810
1812
|
"document_output_language: English"
|
|
1811
1813
|
].join("\n") + "\n";
|
|
1812
1814
|
await writeFile(configFile, configStub, "utf8");
|
|
1813
|
-
logger$
|
|
1815
|
+
logger$17.info({ configFile }, "Generated _bmad/_config/config.yaml stub");
|
|
1814
1816
|
}
|
|
1815
1817
|
}
|
|
1816
1818
|
const CLAUDE_MD_START_MARKER = "<!-- substrate:start -->";
|
|
@@ -1825,7 +1827,7 @@ async function scaffoldClaudeMd(projectRoot, profile) {
|
|
|
1825
1827
|
try {
|
|
1826
1828
|
sectionContent = await readFile(templatePath, "utf8");
|
|
1827
1829
|
} catch {
|
|
1828
|
-
logger$
|
|
1830
|
+
logger$17.warn({ templatePath }, "CLAUDE.md substrate section template not found; skipping");
|
|
1829
1831
|
return;
|
|
1830
1832
|
}
|
|
1831
1833
|
const substrateVersion = readSubstrateVersion(pkgRoot);
|
|
@@ -1861,7 +1863,7 @@ async function scaffoldClaudeMd(projectRoot, profile) {
|
|
|
1861
1863
|
else newContent = updatedExisting;
|
|
1862
1864
|
}
|
|
1863
1865
|
await writeFile(claudeMdPath, newContent, "utf8");
|
|
1864
|
-
logger$
|
|
1866
|
+
logger$17.info({ claudeMdPath }, "Wrote substrate section to CLAUDE.md");
|
|
1865
1867
|
}
|
|
1866
1868
|
async function scaffoldAgentsMd(projectRoot, profile) {
|
|
1867
1869
|
const agentsMdPath = join(projectRoot, "AGENTS.md");
|
|
@@ -1873,7 +1875,7 @@ async function scaffoldAgentsMd(projectRoot, profile) {
|
|
|
1873
1875
|
try {
|
|
1874
1876
|
sectionContent = await readFile(templatePath, "utf8");
|
|
1875
1877
|
} catch {
|
|
1876
|
-
logger$
|
|
1878
|
+
logger$17.warn({ templatePath }, "AGENTS.md substrate section template not found; skipping");
|
|
1877
1879
|
return;
|
|
1878
1880
|
}
|
|
1879
1881
|
const substrateVersion = readSubstrateVersion(pkgRoot);
|
|
@@ -1894,7 +1896,7 @@ async function scaffoldAgentsMd(projectRoot, profile) {
|
|
|
1894
1896
|
newContent = existingContent + separator + sectionContent;
|
|
1895
1897
|
}
|
|
1896
1898
|
await writeFile(agentsMdPath, newContent, "utf8");
|
|
1897
|
-
logger$
|
|
1899
|
+
logger$17.info({ agentsMdPath }, "Wrote substrate section to AGENTS.md");
|
|
1898
1900
|
}
|
|
1899
1901
|
async function scaffoldGeminiMd(projectRoot, profile) {
|
|
1900
1902
|
const geminiMdPath = join(projectRoot, "GEMINI.md");
|
|
@@ -1906,7 +1908,7 @@ async function scaffoldGeminiMd(projectRoot, profile) {
|
|
|
1906
1908
|
try {
|
|
1907
1909
|
sectionContent = await readFile(templatePath, "utf8");
|
|
1908
1910
|
} catch {
|
|
1909
|
-
logger$
|
|
1911
|
+
logger$17.warn({ templatePath }, "GEMINI.md substrate section template not found; skipping");
|
|
1910
1912
|
return;
|
|
1911
1913
|
}
|
|
1912
1914
|
const substrateVersion = readSubstrateVersion(pkgRoot);
|
|
@@ -1927,7 +1929,7 @@ async function scaffoldGeminiMd(projectRoot, profile) {
|
|
|
1927
1929
|
newContent = existingContent + separator + sectionContent;
|
|
1928
1930
|
}
|
|
1929
1931
|
await writeFile(geminiMdPath, newContent, "utf8");
|
|
1930
|
-
logger$
|
|
1932
|
+
logger$17.info({ geminiMdPath }, "Wrote substrate section to GEMINI.md");
|
|
1931
1933
|
}
|
|
1932
1934
|
async function scaffoldStatuslineScript(projectRoot) {
|
|
1933
1935
|
const pkgRoot = findPackageRoot(__dirname);
|
|
@@ -1938,7 +1940,7 @@ async function scaffoldStatuslineScript(projectRoot) {
|
|
|
1938
1940
|
try {
|
|
1939
1941
|
content = await readFile(templatePath, "utf8");
|
|
1940
1942
|
} catch {
|
|
1941
|
-
logger$
|
|
1943
|
+
logger$17.warn({ templatePath }, "statusline.sh template not found; skipping");
|
|
1942
1944
|
return;
|
|
1943
1945
|
}
|
|
1944
1946
|
const claudeDir = join(projectRoot, ".claude");
|
|
@@ -1946,7 +1948,7 @@ async function scaffoldStatuslineScript(projectRoot) {
|
|
|
1946
1948
|
mkdirSync$1(claudeDir, { recursive: true });
|
|
1947
1949
|
await writeFile(statuslinePath, content, "utf8");
|
|
1948
1950
|
chmodSync(statuslinePath, 493);
|
|
1949
|
-
logger$
|
|
1951
|
+
logger$17.info({ statuslinePath }, "Wrote .claude/statusline.sh");
|
|
1950
1952
|
}
|
|
1951
1953
|
async function scaffoldClaudeSettings(projectRoot) {
|
|
1952
1954
|
const claudeDir = join(projectRoot, ".claude");
|
|
@@ -1962,7 +1964,7 @@ async function scaffoldClaudeSettings(projectRoot) {
|
|
|
1962
1964
|
if (!merged["$schema"]) merged["$schema"] = "https://json.schemastore.org/claude-code-settings.json";
|
|
1963
1965
|
mkdirSync$1(claudeDir, { recursive: true });
|
|
1964
1966
|
await writeFile(settingsPath, JSON.stringify(merged, null, 2) + "\n", "utf8");
|
|
1965
|
-
logger$
|
|
1967
|
+
logger$17.info({ settingsPath }, "Wrote substrate settings to .claude/settings.json");
|
|
1966
1968
|
}
|
|
1967
1969
|
function resolveBmadMethodInstallerLibPath(fromDir = __dirname) {
|
|
1968
1970
|
try {
|
|
@@ -2034,7 +2036,7 @@ async function compileBmadAgents(bmadDir) {
|
|
|
2034
2036
|
writeFileSync$1(mdPath, result.xml, "utf-8");
|
|
2035
2037
|
compiled++;
|
|
2036
2038
|
} catch (compileErr) {
|
|
2037
|
-
logger$
|
|
2039
|
+
logger$17.debug({
|
|
2038
2040
|
err: compileErr,
|
|
2039
2041
|
file
|
|
2040
2042
|
}, "Failed to compile agent YAML");
|
|
@@ -2181,9 +2183,9 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
|
|
|
2181
2183
|
const _require = createRequire(join(__dirname, "synthetic.js"));
|
|
2182
2184
|
try {
|
|
2183
2185
|
const compiledCount = await compileBmadAgents(bmadDir);
|
|
2184
|
-
if (compiledCount > 0) logger$
|
|
2186
|
+
if (compiledCount > 0) logger$17.info({ compiledCount }, "Compiled agent YAML files to MD");
|
|
2185
2187
|
} catch (compileErr) {
|
|
2186
|
-
logger$
|
|
2188
|
+
logger$17.warn({ err: compileErr }, "Agent compilation failed; agent commands may be incomplete");
|
|
2187
2189
|
}
|
|
2188
2190
|
const resolveExport = (mod, name) => {
|
|
2189
2191
|
if (typeof mod[name] === "function") return mod[name];
|
|
@@ -2197,7 +2199,7 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
|
|
|
2197
2199
|
const manifestGenPath = join(installerLibPath, "core", "manifest-generator.js");
|
|
2198
2200
|
const pathUtilsPath = join(installerLibPath, "ide", "shared", "path-utils.js");
|
|
2199
2201
|
if (!existsSync$1(agentGenPath)) {
|
|
2200
|
-
logger$
|
|
2202
|
+
logger$17.info("bmad-method generators not available (requires bmad-method with agent/workflow/task-tool generators)");
|
|
2201
2203
|
return;
|
|
2202
2204
|
}
|
|
2203
2205
|
const agentMod = _require(agentGenPath);
|
|
@@ -2207,11 +2209,11 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
|
|
|
2207
2209
|
if (existsSync$1(workflowGenPath)) {
|
|
2208
2210
|
const workflowMod = _require(workflowGenPath);
|
|
2209
2211
|
WorkflowCommandGenerator = resolveExport(workflowMod, "WorkflowCommandGenerator");
|
|
2210
|
-
} else logger$
|
|
2212
|
+
} else logger$17.info("bmad-method workflow-command-generator not available; will try skill-based installation");
|
|
2211
2213
|
if (existsSync$1(taskToolGenPath)) {
|
|
2212
2214
|
const taskToolMod = _require(taskToolGenPath);
|
|
2213
2215
|
TaskToolCommandGenerator = resolveExport(taskToolMod, "TaskToolCommandGenerator");
|
|
2214
|
-
} else logger$
|
|
2216
|
+
} else logger$17.info("bmad-method task-tool-command-generator not available; will try skill-based installation");
|
|
2215
2217
|
let ManifestGenerator = null;
|
|
2216
2218
|
if (existsSync$1(manifestGenPath)) {
|
|
2217
2219
|
const manifestMod = _require(manifestGenPath);
|
|
@@ -2243,7 +2245,7 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
|
|
|
2243
2245
|
const manifestGen = new ManifestGenerator();
|
|
2244
2246
|
await manifestGen.generateManifests(bmadDir, allModules, [], { ides: ["claude-code"] });
|
|
2245
2247
|
} catch (manifestErr) {
|
|
2246
|
-
logger$
|
|
2248
|
+
logger$17.warn({ err: manifestErr }, "ManifestGenerator failed; workflow/task commands may be incomplete");
|
|
2247
2249
|
}
|
|
2248
2250
|
const commandsDir = join(projectRoot, ".claude", "commands");
|
|
2249
2251
|
mkdirSync$1(commandsDir, { recursive: true });
|
|
@@ -2271,7 +2273,7 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
|
|
|
2271
2273
|
const total = agentCount + workflowCount + taskToolCount + skillCount;
|
|
2272
2274
|
if (outputFormat !== "json") if (skillCount > 0) process.stdout.write(`Generated ${String(total)} Claude Code commands (${String(agentCount)} agents, ${String(skillCount)} skills)\n`);
|
|
2273
2275
|
else process.stdout.write(`Generated ${String(total)} Claude Code commands (${String(agentCount)} agents, ${String(workflowCount)} workflows, ${String(taskToolCount)} tasks/tools)\n`);
|
|
2274
|
-
logger$
|
|
2276
|
+
logger$17.info({
|
|
2275
2277
|
agentCount,
|
|
2276
2278
|
workflowCount,
|
|
2277
2279
|
taskToolCount,
|
|
@@ -2282,7 +2284,7 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
|
|
|
2282
2284
|
} catch (err) {
|
|
2283
2285
|
const msg = err instanceof Error ? err.message : String(err);
|
|
2284
2286
|
if (outputFormat !== "json") process.stderr.write(`Warning: .claude/commands/ generation failed: ${msg}\n`);
|
|
2285
|
-
logger$
|
|
2287
|
+
logger$17.warn({ err }, "scaffoldClaudeCommands failed; init continues");
|
|
2286
2288
|
}
|
|
2287
2289
|
}
|
|
2288
2290
|
/**
|
|
@@ -2311,7 +2313,7 @@ function syncCommandsAsPrompts(commandsDir, promptsDir, ownershipPrefixes, nameP
|
|
|
2311
2313
|
unlinkSync$1(join(promptsDir, entry.name));
|
|
2312
2314
|
}
|
|
2313
2315
|
} catch (err) {
|
|
2314
|
-
logger$
|
|
2316
|
+
logger$17.debug({
|
|
2315
2317
|
err,
|
|
2316
2318
|
promptsDir
|
|
2317
2319
|
}, "Failed to prune stale prompts");
|
|
@@ -2353,7 +2355,7 @@ function syncSkillsToTarget(srcSkillsDir, destSkillsDir, ownershipPrefixes, name
|
|
|
2353
2355
|
});
|
|
2354
2356
|
}
|
|
2355
2357
|
} catch (err) {
|
|
2356
|
-
logger$
|
|
2358
|
+
logger$17.debug({
|
|
2357
2359
|
err,
|
|
2358
2360
|
destSkillsDir
|
|
2359
2361
|
}, "Failed to prune stale skills");
|
|
@@ -2396,16 +2398,16 @@ function scaffoldCodexProject(projectRoot, outputFormat) {
|
|
|
2396
2398
|
const skillCount = syncSkillsToTarget(claudeSkillsDir, codexSkillsDir, PROJECT_OWNERSHIP_PREFIXES, "");
|
|
2397
2399
|
const total = promptCount + skillCount;
|
|
2398
2400
|
if (outputFormat !== "json" && total > 0) process.stdout.write(`Generated ${String(total)} Codex artifacts (${String(promptCount)} prompts, ${String(skillCount)} skills)\n`);
|
|
2399
|
-
if (total > 0) logger$
|
|
2401
|
+
if (total > 0) logger$17.info({
|
|
2400
2402
|
promptCount,
|
|
2401
2403
|
skillCount,
|
|
2402
2404
|
codexDir
|
|
2403
2405
|
}, "Generated .codex/");
|
|
2404
|
-
else logger$
|
|
2406
|
+
else logger$17.debug({ codexDir }, "No Codex artifacts generated; source Claude content not found");
|
|
2405
2407
|
} catch (err) {
|
|
2406
2408
|
const msg = err instanceof Error ? err.message : String(err);
|
|
2407
2409
|
if (outputFormat !== "json") process.stderr.write(`Warning: .codex/ generation failed: ${msg}\n`);
|
|
2408
|
-
logger$
|
|
2410
|
+
logger$17.warn({ err }, "scaffoldCodexProject failed; init continues");
|
|
2409
2411
|
}
|
|
2410
2412
|
}
|
|
2411
2413
|
/**
|
|
@@ -2431,16 +2433,16 @@ function scaffoldCodexUser(projectRoot, homeDir, outputFormat) {
|
|
|
2431
2433
|
const skillCount = syncSkillsToTarget(claudeSkillsDir, userSkillsDir, ["substrate-"], "substrate-");
|
|
2432
2434
|
const total = promptCount + skillCount;
|
|
2433
2435
|
if (outputFormat !== "json" && total > 0) process.stdout.write(`Installed ${String(total)} Codex artifacts to ${userCodexDir} (${String(promptCount)} prompts, ${String(skillCount)} skills)\n`);
|
|
2434
|
-
if (total > 0) logger$
|
|
2436
|
+
if (total > 0) logger$17.info({
|
|
2435
2437
|
promptCount,
|
|
2436
2438
|
skillCount,
|
|
2437
2439
|
userCodexDir
|
|
2438
2440
|
}, "Installed user-scope Codex content");
|
|
2439
|
-
else logger$
|
|
2441
|
+
else logger$17.debug({ userCodexDir }, "No user-scope Codex content installed; source Claude content not found");
|
|
2440
2442
|
} catch (err) {
|
|
2441
2443
|
const msg = err instanceof Error ? err.message : String(err);
|
|
2442
2444
|
if (outputFormat !== "json") process.stderr.write(`Warning: user-scope Codex install failed: ${msg}\n`);
|
|
2443
|
-
logger$
|
|
2445
|
+
logger$17.warn({ err }, "scaffoldCodexUser failed; init continues");
|
|
2444
2446
|
}
|
|
2445
2447
|
}
|
|
2446
2448
|
const PROVIDER_DEFAULTS = DEFAULT_CONFIG.providers;
|
|
@@ -2496,8 +2498,8 @@ function formatProjectProfile(profile) {
|
|
|
2496
2498
|
*/
|
|
2497
2499
|
async function promptProfileConfirmation(nonInteractive) {
|
|
2498
2500
|
if (nonInteractive) return true;
|
|
2499
|
-
const readline = await import("readline");
|
|
2500
|
-
const rl = readline.createInterface({
|
|
2501
|
+
const readline$1 = await import("readline");
|
|
2502
|
+
const rl = readline$1.createInterface({
|
|
2501
2503
|
input: process.stdin,
|
|
2502
2504
|
output: process.stdout
|
|
2503
2505
|
});
|
|
@@ -2512,8 +2514,8 @@ async function promptProfileConfirmation(nonInteractive) {
|
|
|
2512
2514
|
}
|
|
2513
2515
|
async function promptSubscriptionRouting(providerName, nonInteractive) {
|
|
2514
2516
|
if (nonInteractive) return "auto";
|
|
2515
|
-
const readline = await import("readline");
|
|
2516
|
-
const rl = readline.createInterface({
|
|
2517
|
+
const readline$1 = await import("readline");
|
|
2518
|
+
const rl = readline$1.createInterface({
|
|
2517
2519
|
input: process.stdin,
|
|
2518
2520
|
output: process.stdout
|
|
2519
2521
|
});
|
|
@@ -2560,7 +2562,7 @@ async function runInitAction(options) {
|
|
|
2560
2562
|
discoveryReport = await registry.discoverAndRegister();
|
|
2561
2563
|
} catch (err) {
|
|
2562
2564
|
const message = err instanceof Error ? err.message : String(err);
|
|
2563
|
-
logger$
|
|
2565
|
+
logger$17.error({ err }, "Adapter discovery failed");
|
|
2564
2566
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, `Adapter discovery failed: ${message}`) + "\n");
|
|
2565
2567
|
else process.stderr.write(` Error: adapter discovery failed — ${message}\n`);
|
|
2566
2568
|
return INIT_EXIT_ERROR;
|
|
@@ -2603,7 +2605,7 @@ async function runInitAction(options) {
|
|
|
2603
2605
|
try {
|
|
2604
2606
|
detectedProfile = await detectProjectProfile(dbRoot);
|
|
2605
2607
|
} catch (err) {
|
|
2606
|
-
logger$
|
|
2608
|
+
logger$17.warn({ err }, "Project profile detection failed; skipping");
|
|
2607
2609
|
}
|
|
2608
2610
|
if (detectedProfile === null) {
|
|
2609
2611
|
if (outputFormat !== "json") process.stdout.write(" No project stack detected. Create .substrate/project-profile.yaml manually to enable polyglot support.\n");
|
|
@@ -2637,12 +2639,12 @@ async function runInitAction(options) {
|
|
|
2637
2639
|
return INIT_EXIT_ERROR;
|
|
2638
2640
|
}
|
|
2639
2641
|
if (force && existsSync$1(localManifest)) {
|
|
2640
|
-
logger$
|
|
2642
|
+
logger$17.info({ pack: packName }, "Replacing existing pack with bundled version");
|
|
2641
2643
|
process.stderr.write(`Warning: Replacing existing pack '${packName}' with bundled version\n`);
|
|
2642
2644
|
}
|
|
2643
2645
|
mkdirSync$1(dirname(packPath), { recursive: true });
|
|
2644
2646
|
cpSync(bundledPackPath, packPath, { recursive: true });
|
|
2645
|
-
logger$
|
|
2647
|
+
logger$17.info({
|
|
2646
2648
|
pack: packName,
|
|
2647
2649
|
dest: packPath
|
|
2648
2650
|
}, "Scaffolded methodology pack");
|
|
@@ -2694,10 +2696,10 @@ async function runInitAction(options) {
|
|
|
2694
2696
|
if (missing.length > 0) {
|
|
2695
2697
|
const block = "\n# Substrate runtime files\n" + missing.join("\n") + "\n";
|
|
2696
2698
|
appendFileSync(gitignorePath, block);
|
|
2697
|
-
logger$
|
|
2699
|
+
logger$17.info({ entries: missing }, "Added substrate runtime files to .gitignore");
|
|
2698
2700
|
}
|
|
2699
2701
|
} catch (err) {
|
|
2700
|
-
logger$
|
|
2702
|
+
logger$17.debug({ err }, "Could not update .gitignore (non-fatal)");
|
|
2701
2703
|
}
|
|
2702
2704
|
const doltMode = options.doltMode ?? "auto";
|
|
2703
2705
|
let doltInitialized = false;
|
|
@@ -2714,7 +2716,7 @@ async function runInitAction(options) {
|
|
|
2714
2716
|
process.stderr.write(`${err.message}\n`);
|
|
2715
2717
|
return INIT_EXIT_ERROR;
|
|
2716
2718
|
}
|
|
2717
|
-
logger$
|
|
2719
|
+
logger$17.debug("Dolt not installed, skipping auto-init");
|
|
2718
2720
|
} else {
|
|
2719
2721
|
const msg = err instanceof Error ? err.message : String(err);
|
|
2720
2722
|
if (doltMode === "force") {
|
|
@@ -2724,7 +2726,7 @@ async function runInitAction(options) {
|
|
|
2724
2726
|
process.stderr.write(`⚠ Dolt state store initialization failed: ${msg}\n Pipeline metrics, cost tracking, and health monitoring will not persist.\n Fix the issue and re-run: substrate init --dolt\n`);
|
|
2725
2727
|
}
|
|
2726
2728
|
}
|
|
2727
|
-
else logger$
|
|
2729
|
+
else logger$17.debug("Dolt step was skipped (--no-dolt)");
|
|
2728
2730
|
const successMsg = `Pack '${packName}' and database initialized successfully at ${dbPath}`;
|
|
2729
2731
|
if (outputFormat === "json") process.stdout.write(formatOutput({
|
|
2730
2732
|
pack: packName,
|
|
@@ -2762,7 +2764,7 @@ async function runInitAction(options) {
|
|
|
2762
2764
|
const msg = err instanceof Error ? err.message : String(err);
|
|
2763
2765
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
2764
2766
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
2765
|
-
logger$
|
|
2767
|
+
logger$17.error({ err }, "init failed");
|
|
2766
2768
|
return INIT_EXIT_ERROR;
|
|
2767
2769
|
}
|
|
2768
2770
|
}
|
|
@@ -2786,7 +2788,7 @@ function registerInitCommand(program, _version, registry) {
|
|
|
2786
2788
|
|
|
2787
2789
|
//#endregion
|
|
2788
2790
|
//#region src/cli/commands/config.ts
|
|
2789
|
-
const logger$
|
|
2791
|
+
const logger$16 = createLogger("config-cmd");
|
|
2790
2792
|
const CONFIG_EXIT_SUCCESS = 0;
|
|
2791
2793
|
const CONFIG_EXIT_ERROR = 1;
|
|
2792
2794
|
const CONFIG_EXIT_INVALID = 2;
|
|
@@ -2812,7 +2814,7 @@ async function runConfigShow(opts = {}) {
|
|
|
2812
2814
|
return CONFIG_EXIT_INVALID;
|
|
2813
2815
|
}
|
|
2814
2816
|
const message = err instanceof Error ? err.message : String(err);
|
|
2815
|
-
logger$
|
|
2817
|
+
logger$16.error({ err }, "Failed to load configuration");
|
|
2816
2818
|
process.stderr.write(` Error loading configuration: ${message}\n`);
|
|
2817
2819
|
return CONFIG_EXIT_ERROR;
|
|
2818
2820
|
}
|
|
@@ -2886,7 +2888,7 @@ async function runConfigExport(opts = {}) {
|
|
|
2886
2888
|
return CONFIG_EXIT_INVALID;
|
|
2887
2889
|
}
|
|
2888
2890
|
const message = err instanceof Error ? err.message : String(err);
|
|
2889
|
-
logger$
|
|
2891
|
+
logger$16.error({ err }, "Failed to load configuration");
|
|
2890
2892
|
process.stderr.write(`Error loading configuration: ${message}\n`);
|
|
2891
2893
|
return CONFIG_EXIT_ERROR;
|
|
2892
2894
|
}
|
|
@@ -6806,7 +6808,7 @@ async function detectManifestDriftAgainstWorkingTree(manifest, projectRoot) {
|
|
|
6806
6808
|
|
|
6807
6809
|
//#endregion
|
|
6808
6810
|
//#region src/cli/commands/resume.ts
|
|
6809
|
-
const logger$
|
|
6811
|
+
const logger$15 = createLogger("resume-cmd");
|
|
6810
6812
|
/**
|
|
6811
6813
|
* Format a human-readable duration from an ISO-8601 timestamp to "now".
|
|
6812
6814
|
*/
|
|
@@ -6876,7 +6878,7 @@ async function runResumeAction(options) {
|
|
|
6876
6878
|
return 1;
|
|
6877
6879
|
}
|
|
6878
6880
|
} catch (driftErr) {
|
|
6879
|
-
logger$
|
|
6881
|
+
logger$15.debug({ err: driftErr }, "manifest drift check failed — proceeding with resume");
|
|
6880
6882
|
}
|
|
6881
6883
|
}
|
|
6882
6884
|
const doltDir = join(dbRoot, ".substrate", "state", ".dolt");
|
|
@@ -6946,15 +6948,15 @@ async function runResumeAction(options) {
|
|
|
6946
6948
|
const manifestStories = manifestData.cli_flags["stories"] ?? manifestData.story_scope;
|
|
6947
6949
|
if (Array.isArray(manifestStories) && manifestStories.length > 0) {
|
|
6948
6950
|
scopedStories = manifestStories;
|
|
6949
|
-
logger$
|
|
6951
|
+
logger$15.debug({
|
|
6950
6952
|
runId,
|
|
6951
6953
|
stories: scopedStories
|
|
6952
6954
|
}, "resume scope loaded from manifest");
|
|
6953
6955
|
}
|
|
6954
6956
|
} catch {
|
|
6955
|
-
logger$
|
|
6957
|
+
logger$15.debug({ runId }, "manifest read failed in resume — using legacy config_json scope");
|
|
6956
6958
|
}
|
|
6957
|
-
else logger$
|
|
6959
|
+
else logger$15.debug({ runId }, "Run manifest not found for scope preservation — using legacy config_json scope");
|
|
6958
6960
|
}
|
|
6959
6961
|
return runFullPipelineFromPhase({
|
|
6960
6962
|
packName,
|
|
@@ -6977,7 +6979,7 @@ async function runResumeAction(options) {
|
|
|
6977
6979
|
const msg = err instanceof Error ? err.message : String(err);
|
|
6978
6980
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
6979
6981
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
6980
|
-
logger$
|
|
6982
|
+
logger$15.error({ err }, "auto resume failed");
|
|
6981
6983
|
return 1;
|
|
6982
6984
|
} finally {
|
|
6983
6985
|
try {
|
|
@@ -7227,11 +7229,11 @@ async function runFullPipelineFromPhase(options) {
|
|
|
7227
7229
|
output_tokens: output,
|
|
7228
7230
|
cost_usd: costUsd
|
|
7229
7231
|
}).catch((err) => {
|
|
7230
|
-
logger$
|
|
7232
|
+
logger$15.warn({ err }, "Failed to record token usage");
|
|
7231
7233
|
});
|
|
7232
7234
|
}
|
|
7233
7235
|
} catch (err) {
|
|
7234
|
-
logger$
|
|
7236
|
+
logger$15.warn({ err }, "Failed to record token usage");
|
|
7235
7237
|
}
|
|
7236
7238
|
});
|
|
7237
7239
|
const storyKeys = await resolveStoryKeys(adapter, projectRoot, {
|
|
@@ -7300,7 +7302,7 @@ async function runFullPipelineFromPhase(options) {
|
|
|
7300
7302
|
const msg = err instanceof Error ? err.message : String(err);
|
|
7301
7303
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
7302
7304
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
7303
|
-
logger$
|
|
7305
|
+
logger$15.error({ err }, "pipeline from phase failed");
|
|
7304
7306
|
return 1;
|
|
7305
7307
|
} finally {
|
|
7306
7308
|
try {
|
|
@@ -7330,7 +7332,7 @@ function registerResumeCommand(program, _version = "0.0.0", projectRoot = proces
|
|
|
7330
7332
|
|
|
7331
7333
|
//#endregion
|
|
7332
7334
|
//#region src/cli/commands/status.ts
|
|
7333
|
-
const logger$
|
|
7335
|
+
const logger$14 = createLogger("status-cmd");
|
|
7334
7336
|
/**
|
|
7335
7337
|
* Map a manifest per-story status string to the appropriate WorkGraphCounts bucket.
|
|
7336
7338
|
* Unknown strings are treated as `inProgress` (safe default).
|
|
@@ -7442,9 +7444,9 @@ async function runStatusAction(options) {
|
|
|
7442
7444
|
const manifestData = await resolvedManifest.read();
|
|
7443
7445
|
manifestPerStoryState = manifestData.per_story_state;
|
|
7444
7446
|
workGraph = buildWorkGraphFromManifest(manifestData.per_story_state);
|
|
7445
|
-
logger$
|
|
7447
|
+
logger$14.debug({ runId: run?.id }, "status: workGraph built from manifest per_story_state");
|
|
7446
7448
|
} catch {
|
|
7447
|
-
logger$
|
|
7449
|
+
logger$14.debug({ runId: run?.id }, "status: manifest read failed — falling back to wg_stories");
|
|
7448
7450
|
}
|
|
7449
7451
|
if (workGraph === void 0) try {
|
|
7450
7452
|
const wgRepo = new WorkGraphRepository(adapter);
|
|
@@ -7481,10 +7483,10 @@ async function runStatusAction(options) {
|
|
|
7481
7483
|
};
|
|
7482
7484
|
}
|
|
7483
7485
|
} catch (err) {
|
|
7484
|
-
logger$
|
|
7486
|
+
logger$14.debug({ err }, "Work graph query failed, continuing without work graph data");
|
|
7485
7487
|
}
|
|
7486
7488
|
if (run === void 0) {
|
|
7487
|
-
const { inspectProcessTree: inspectProcessTree$1 } = await import("../health-
|
|
7489
|
+
const { inspectProcessTree: inspectProcessTree$1 } = await import("../health-FZVOBYND.js");
|
|
7488
7490
|
const substrateDirPath = join(projectRoot, ".substrate");
|
|
7489
7491
|
const processInfo = inspectProcessTree$1({
|
|
7490
7492
|
projectRoot,
|
|
@@ -7514,7 +7516,7 @@ async function runStatusAction(options) {
|
|
|
7514
7516
|
if (stateStore) try {
|
|
7515
7517
|
storeStories = await stateStore.queryStories({});
|
|
7516
7518
|
} catch (err) {
|
|
7517
|
-
logger$
|
|
7519
|
+
logger$14.debug({ err }, "StateStore query failed, continuing without store data");
|
|
7518
7520
|
}
|
|
7519
7521
|
if (outputFormat === "json") {
|
|
7520
7522
|
const statusOutput = buildPipelineStatusOutput(run, tokenSummary, decisionsCount, storiesCount);
|
|
@@ -7657,7 +7659,7 @@ async function runStatusAction(options) {
|
|
|
7657
7659
|
const msg = err instanceof Error ? err.message : String(err);
|
|
7658
7660
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
7659
7661
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
7660
|
-
logger$
|
|
7662
|
+
logger$14.error({ err }, "status action failed");
|
|
7661
7663
|
return 1;
|
|
7662
7664
|
} finally {
|
|
7663
7665
|
try {
|
|
@@ -8004,7 +8006,7 @@ Analyze thoroughly and return ONLY the JSON array with no additional text.`;
|
|
|
8004
8006
|
|
|
8005
8007
|
//#endregion
|
|
8006
8008
|
//#region src/cli/commands/amend.ts
|
|
8007
|
-
const logger$
|
|
8009
|
+
const logger$13 = createLogger("amend-cmd");
|
|
8008
8010
|
/**
|
|
8009
8011
|
* Detect and apply supersessions after a phase completes in an amendment run.
|
|
8010
8012
|
*
|
|
@@ -8035,7 +8037,7 @@ async function runPostPhaseSupersessionDetection(adapter, amendmentRunId, curren
|
|
|
8035
8037
|
});
|
|
8036
8038
|
} catch (err) {
|
|
8037
8039
|
const msg = err instanceof Error ? err.message : String(err);
|
|
8038
|
-
logger$
|
|
8040
|
+
logger$13.warn({
|
|
8039
8041
|
err,
|
|
8040
8042
|
originalId: parentMatch.id,
|
|
8041
8043
|
supersedingId: newDec.id
|
|
@@ -8172,7 +8174,7 @@ async function runAmendAction(options) {
|
|
|
8172
8174
|
for (let i = startIdx; i < phaseOrder.length; i++) {
|
|
8173
8175
|
const currentPhase = phaseOrder[i];
|
|
8174
8176
|
const amendmentContext = handler.loadContextForPhase(currentPhase);
|
|
8175
|
-
logger$
|
|
8177
|
+
logger$13.info({
|
|
8176
8178
|
phase: currentPhase,
|
|
8177
8179
|
amendmentContextLen: amendmentContext.length
|
|
8178
8180
|
}, "Amendment context loaded for phase");
|
|
@@ -8293,7 +8295,7 @@ async function runAmendAction(options) {
|
|
|
8293
8295
|
} catch (err) {
|
|
8294
8296
|
const msg = err instanceof Error ? err.message : String(err);
|
|
8295
8297
|
process.stderr.write(`Error: ${msg}\n`);
|
|
8296
|
-
logger$
|
|
8298
|
+
logger$13.error({ err }, "amend failed");
|
|
8297
8299
|
return 1;
|
|
8298
8300
|
} finally {
|
|
8299
8301
|
try {
|
|
@@ -9030,7 +9032,7 @@ async function runSupervisorAction(options, deps = {}) {
|
|
|
9030
9032
|
await initSchema(expAdapter);
|
|
9031
9033
|
const { runRunAction: runPipeline } = await import(
|
|
9032
9034
|
/* @vite-ignore */
|
|
9033
|
-
"../run-
|
|
9035
|
+
"../run-BxfeSz6G.js"
|
|
9034
9036
|
);
|
|
9035
9037
|
const runStoryFn = async (opts) => {
|
|
9036
9038
|
const exitCode = await runPipeline({
|
|
@@ -9280,7 +9282,7 @@ function registerSupervisorCommand(program, _version = "0.0.0", projectRoot = pr
|
|
|
9280
9282
|
|
|
9281
9283
|
//#endregion
|
|
9282
9284
|
//#region src/cli/commands/metrics.ts
|
|
9283
|
-
const logger$
|
|
9285
|
+
const logger$12 = createLogger("metrics-cmd");
|
|
9284
9286
|
async function openTelemetryAdapter(basePath) {
|
|
9285
9287
|
try {
|
|
9286
9288
|
const adapter = createDatabaseAdapter({
|
|
@@ -9646,7 +9648,7 @@ async function runMetricsAction(options) {
|
|
|
9646
9648
|
}
|
|
9647
9649
|
}
|
|
9648
9650
|
} catch (err) {
|
|
9649
|
-
logger$
|
|
9651
|
+
logger$12.debug({ err }, "getScenarioResultsForRun failed");
|
|
9650
9652
|
}
|
|
9651
9653
|
if (rows.length === 0) {
|
|
9652
9654
|
const msg = `No factory run found with id: ${run}`;
|
|
@@ -9682,7 +9684,7 @@ async function runMetricsAction(options) {
|
|
|
9682
9684
|
}
|
|
9683
9685
|
}
|
|
9684
9686
|
} catch (err) {
|
|
9685
|
-
logger$
|
|
9687
|
+
logger$12.debug({ err }, "getTwinRunsForRun failed — twin_runs table may not exist yet");
|
|
9686
9688
|
}
|
|
9687
9689
|
}
|
|
9688
9690
|
return 0;
|
|
@@ -9692,7 +9694,7 @@ async function runMetricsAction(options) {
|
|
|
9692
9694
|
try {
|
|
9693
9695
|
factoryRuns$1 = await getFactoryRunSummaries(adapter, limit);
|
|
9694
9696
|
} catch (err) {
|
|
9695
|
-
logger$
|
|
9697
|
+
logger$12.debug({ err }, "getFactoryRunSummaries failed in factory-only mode");
|
|
9696
9698
|
}
|
|
9697
9699
|
if (outputFormat === "json") process.stdout.write(formatOutput({ graph_runs: factoryRuns$1 }, "json", true) + "\n");
|
|
9698
9700
|
else if (factoryRuns$1.length === 0) process.stdout.write("No factory runs recorded yet.\n");
|
|
@@ -9753,7 +9755,7 @@ async function runMetricsAction(options) {
|
|
|
9753
9755
|
doltMetrics = await stateStore.queryMetrics(doltFilter);
|
|
9754
9756
|
await stateStore.close();
|
|
9755
9757
|
} catch (doltErr) {
|
|
9756
|
-
logger$
|
|
9758
|
+
logger$12.warn({ err: doltErr }, "StateStore query failed — falling back to SQLite metrics only");
|
|
9757
9759
|
}
|
|
9758
9760
|
const storyMetricDecisions = await getDecisionsByCategory(adapter, STORY_METRICS);
|
|
9759
9761
|
const storyMetrics = storyMetricDecisions.map((d) => {
|
|
@@ -9815,7 +9817,7 @@ async function runMetricsAction(options) {
|
|
|
9815
9817
|
try {
|
|
9816
9818
|
factoryRuns = await getFactoryRunSummaries(adapter, limit);
|
|
9817
9819
|
} catch (err) {
|
|
9818
|
-
logger$
|
|
9820
|
+
logger$12.debug({ err }, "getFactoryRunSummaries failed — table may not exist in older databases");
|
|
9819
9821
|
}
|
|
9820
9822
|
if (outputFormat === "json") {
|
|
9821
9823
|
const runsWithBreakdown = runs.map((run$1) => ({
|
|
@@ -9953,7 +9955,7 @@ async function runMetricsAction(options) {
|
|
|
9953
9955
|
const msg = err instanceof Error ? err.message : String(err);
|
|
9954
9956
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
9955
9957
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
9956
|
-
logger$
|
|
9958
|
+
logger$12.error({ err }, "metrics action failed");
|
|
9957
9959
|
return 1;
|
|
9958
9960
|
} finally {
|
|
9959
9961
|
try {
|
|
@@ -10155,7 +10157,7 @@ function registerMigrateCommand(program) {
|
|
|
10155
10157
|
function getLatestSessionId(_adapter) {
|
|
10156
10158
|
return null;
|
|
10157
10159
|
}
|
|
10158
|
-
const logger$
|
|
10160
|
+
const logger$11 = createLogger("cost-cmd");
|
|
10159
10161
|
const COST_EXIT_SUCCESS = 0;
|
|
10160
10162
|
const COST_EXIT_ERROR = 1;
|
|
10161
10163
|
/**
|
|
@@ -10399,7 +10401,7 @@ async function runCostAction(options) {
|
|
|
10399
10401
|
} catch (err) {
|
|
10400
10402
|
const message = err instanceof Error ? err.message : String(err);
|
|
10401
10403
|
process.stderr.write(`Error: ${message}\n`);
|
|
10402
|
-
logger$
|
|
10404
|
+
logger$11.error({ err }, "runCostAction failed");
|
|
10403
10405
|
return COST_EXIT_ERROR;
|
|
10404
10406
|
} finally {
|
|
10405
10407
|
if (adapter !== null) try {
|
|
@@ -10433,7 +10435,7 @@ function registerCostCommand(program, version = "0.0.0", projectRoot = process.c
|
|
|
10433
10435
|
|
|
10434
10436
|
//#endregion
|
|
10435
10437
|
//#region src/cli/commands/monitor.ts
|
|
10436
|
-
const logger$
|
|
10438
|
+
const logger$10 = createLogger("monitor-cmd");
|
|
10437
10439
|
const MONITOR_EXIT_SUCCESS = 0;
|
|
10438
10440
|
const MONITOR_EXIT_ERROR = 1;
|
|
10439
10441
|
/**
|
|
@@ -10636,7 +10638,7 @@ async function runMonitorReportAction(options) {
|
|
|
10636
10638
|
} catch (err) {
|
|
10637
10639
|
const message = err instanceof Error ? err.message : String(err);
|
|
10638
10640
|
process.stderr.write(`Error: ${message}\n`);
|
|
10639
|
-
logger$
|
|
10641
|
+
logger$10.error({ err }, "runMonitorReportAction failed");
|
|
10640
10642
|
return MONITOR_EXIT_ERROR;
|
|
10641
10643
|
} finally {
|
|
10642
10644
|
if (monitorDb !== null) try {
|
|
@@ -10698,7 +10700,7 @@ async function runMonitorStatusAction(options) {
|
|
|
10698
10700
|
} catch (err) {
|
|
10699
10701
|
const message = err instanceof Error ? err.message : String(err);
|
|
10700
10702
|
process.stderr.write(`Error: ${message}\n`);
|
|
10701
|
-
logger$
|
|
10703
|
+
logger$10.error({ err }, "runMonitorStatusAction failed");
|
|
10702
10704
|
return MONITOR_EXIT_ERROR;
|
|
10703
10705
|
} finally {
|
|
10704
10706
|
if (monitorDb !== null) try {
|
|
@@ -10733,7 +10735,7 @@ async function runMonitorResetAction(options) {
|
|
|
10733
10735
|
} catch (err) {
|
|
10734
10736
|
const message = err instanceof Error ? err.message : String(err);
|
|
10735
10737
|
process.stderr.write(`Error: ${message}\n`);
|
|
10736
|
-
logger$
|
|
10738
|
+
logger$10.error({ err }, "runMonitorResetAction failed");
|
|
10737
10739
|
return MONITOR_EXIT_ERROR;
|
|
10738
10740
|
} finally {
|
|
10739
10741
|
if (monitorDb !== null) try {
|
|
@@ -10781,7 +10783,7 @@ async function runMonitorRecommendationsAction(options) {
|
|
|
10781
10783
|
} catch (err) {
|
|
10782
10784
|
const message = err instanceof Error ? err.message : String(err);
|
|
10783
10785
|
process.stderr.write(`Error: ${message}\n`);
|
|
10784
|
-
logger$
|
|
10786
|
+
logger$10.error({ err }, "runMonitorRecommendationsAction failed");
|
|
10785
10787
|
return MONITOR_EXIT_ERROR;
|
|
10786
10788
|
} finally {
|
|
10787
10789
|
if (monitorDb !== null) try {
|
|
@@ -10859,7 +10861,7 @@ function registerMonitorCommand(program, version = "0.0.0", projectRoot = proces
|
|
|
10859
10861
|
|
|
10860
10862
|
//#endregion
|
|
10861
10863
|
//#region src/cli/commands/merge.ts
|
|
10862
|
-
const logger$
|
|
10864
|
+
const logger$9 = createLogger("merge-cmd");
|
|
10863
10865
|
const MERGE_EXIT_SUCCESS = 0;
|
|
10864
10866
|
const MERGE_EXIT_CONFLICT = 1;
|
|
10865
10867
|
const MERGE_EXIT_ERROR = 2;
|
|
@@ -10897,7 +10899,7 @@ async function mergeTask(taskId, targetBranch, projectRoot) {
|
|
|
10897
10899
|
projectRoot
|
|
10898
10900
|
});
|
|
10899
10901
|
try {
|
|
10900
|
-
logger$
|
|
10902
|
+
logger$9.info({
|
|
10901
10903
|
taskId,
|
|
10902
10904
|
targetBranch
|
|
10903
10905
|
}, "Running conflict detection...");
|
|
@@ -10919,7 +10921,7 @@ async function mergeTask(taskId, targetBranch, projectRoot) {
|
|
|
10919
10921
|
} catch (err) {
|
|
10920
10922
|
const message = err instanceof Error ? err.message : String(err);
|
|
10921
10923
|
console.error(`Error merging task "${taskId}": ${message}`);
|
|
10922
|
-
logger$
|
|
10924
|
+
logger$9.error({
|
|
10923
10925
|
taskId,
|
|
10924
10926
|
err
|
|
10925
10927
|
}, "merge --task failed");
|
|
@@ -10973,7 +10975,7 @@ async function mergeAll(targetBranch, projectRoot, taskIds) {
|
|
|
10973
10975
|
error: message
|
|
10974
10976
|
});
|
|
10975
10977
|
console.log(` Error for task "${taskId}": ${message}`);
|
|
10976
|
-
logger$
|
|
10978
|
+
logger$9.error({
|
|
10977
10979
|
taskId,
|
|
10978
10980
|
err
|
|
10979
10981
|
}, "merge --all: task failed");
|
|
@@ -11026,7 +11028,7 @@ function registerMergeCommand(program, projectRoot = process.cwd()) {
|
|
|
11026
11028
|
|
|
11027
11029
|
//#endregion
|
|
11028
11030
|
//#region src/cli/commands/worktrees.ts
|
|
11029
|
-
const logger$
|
|
11031
|
+
const logger$8 = createLogger("worktrees-cmd");
|
|
11030
11032
|
const WORKTREES_EXIT_SUCCESS = 0;
|
|
11031
11033
|
const WORKTREES_EXIT_ERROR = 1;
|
|
11032
11034
|
/** Valid task statuses for filtering */
|
|
@@ -11153,7 +11155,7 @@ async function listWorktreesAction(options) {
|
|
|
11153
11155
|
try {
|
|
11154
11156
|
worktreeInfos = await manager.listWorktrees();
|
|
11155
11157
|
} catch (err) {
|
|
11156
|
-
logger$
|
|
11158
|
+
logger$8.error({ err }, "Failed to list worktrees");
|
|
11157
11159
|
const message = err instanceof Error ? err.message : String(err);
|
|
11158
11160
|
process.stderr.write(`Error listing worktrees: ${message}\n`);
|
|
11159
11161
|
return WORKTREES_EXIT_ERROR;
|
|
@@ -11180,7 +11182,7 @@ async function listWorktreesAction(options) {
|
|
|
11180
11182
|
} catch (err) {
|
|
11181
11183
|
const message = err instanceof Error ? err.message : String(err);
|
|
11182
11184
|
process.stderr.write(`Error: ${message}\n`);
|
|
11183
|
-
logger$
|
|
11185
|
+
logger$8.error({ err }, "listWorktreesAction failed");
|
|
11184
11186
|
return WORKTREES_EXIT_ERROR;
|
|
11185
11187
|
}
|
|
11186
11188
|
}
|
|
@@ -11221,7 +11223,7 @@ function registerWorktreesCommand(program, version = "0.0.0", projectRoot = proc
|
|
|
11221
11223
|
|
|
11222
11224
|
//#endregion
|
|
11223
11225
|
//#region src/cli/commands/brainstorm.ts
|
|
11224
|
-
const logger$
|
|
11226
|
+
const logger$7 = createLogger("brainstorm-cmd");
|
|
11225
11227
|
/**
|
|
11226
11228
|
* Detect whether the project has existing planning artifacts that indicate
|
|
11227
11229
|
* this is an amendment session (vs. a brand-new project brainstorm).
|
|
@@ -11267,13 +11269,13 @@ async function loadAmendmentContextDocuments(projectRoot) {
|
|
|
11267
11269
|
try {
|
|
11268
11270
|
brief = await readFile(briefPath, "utf-8");
|
|
11269
11271
|
} catch {
|
|
11270
|
-
logger$
|
|
11272
|
+
logger$7.warn({ briefPath }, "product-brief.md not found — continuing without brief context");
|
|
11271
11273
|
process.stderr.write(`Warning: product-brief.md not found at ${briefPath}\n`);
|
|
11272
11274
|
}
|
|
11273
11275
|
try {
|
|
11274
11276
|
prd = await readFile(prdPath, "utf-8");
|
|
11275
11277
|
} catch {
|
|
11276
|
-
logger$
|
|
11278
|
+
logger$7.warn({ prdPath }, "requirements.md not found — continuing without PRD context");
|
|
11277
11279
|
process.stderr.write(`Warning: requirements.md not found at ${prdPath}\n`);
|
|
11278
11280
|
}
|
|
11279
11281
|
return {
|
|
@@ -11482,7 +11484,7 @@ async function dispatchToPersonas(userPrompt, context, llmDispatch) {
|
|
|
11482
11484
|
}
|
|
11483
11485
|
];
|
|
11484
11486
|
const defaultDispatch = async (prompt, personaName) => {
|
|
11485
|
-
logger$
|
|
11487
|
+
logger$7.debug({
|
|
11486
11488
|
personaName,
|
|
11487
11489
|
promptLength: prompt.length
|
|
11488
11490
|
}, "Dispatching to persona (stub mode)");
|
|
@@ -11499,7 +11501,7 @@ async function dispatchToPersonas(userPrompt, context, llmDispatch) {
|
|
|
11499
11501
|
};
|
|
11500
11502
|
} catch (err) {
|
|
11501
11503
|
const msg = err instanceof Error ? err.message : String(err);
|
|
11502
|
-
logger$
|
|
11504
|
+
logger$7.error({
|
|
11503
11505
|
err,
|
|
11504
11506
|
personaName: persona.name
|
|
11505
11507
|
}, "Persona dispatch failed");
|
|
@@ -11651,7 +11653,7 @@ async function runBrainstormSession(options, llmDispatch, rlInterface) {
|
|
|
11651
11653
|
}
|
|
11652
11654
|
});
|
|
11653
11655
|
rl.on("error", (err) => {
|
|
11654
|
-
logger$
|
|
11656
|
+
logger$7.error({ err }, "readline error");
|
|
11655
11657
|
if (!sessionEnded) endSession(false);
|
|
11656
11658
|
});
|
|
11657
11659
|
});
|
|
@@ -11692,7 +11694,7 @@ function registerBrainstormCommand(program, _version = "0.0.0", projectRoot = pr
|
|
|
11692
11694
|
|
|
11693
11695
|
//#endregion
|
|
11694
11696
|
//#region src/cli/commands/retry-escalated.ts
|
|
11695
|
-
const logger$
|
|
11697
|
+
const logger$6 = createLogger("retry-escalated-cmd");
|
|
11696
11698
|
async function runRetryEscalatedAction(options) {
|
|
11697
11699
|
const { runId, dryRun, force, outputFormat, projectRoot, concurrency, pack: packName, registry: injectedRegistry, agent: agentId, events: eventsFlag } = options;
|
|
11698
11700
|
const dbRoot = await resolveMainRepoRoot(projectRoot);
|
|
@@ -11733,7 +11735,7 @@ async function runRetryEscalatedAction(options) {
|
|
|
11733
11735
|
process.stdout.write(`[INFO] ${storyKey}: Context ceiling set to ${contextCeiling} tokens due to prior context spike pattern.\n`);
|
|
11734
11736
|
}
|
|
11735
11737
|
} catch (err) {
|
|
11736
|
-
logger$
|
|
11738
|
+
logger$6.warn({
|
|
11737
11739
|
err,
|
|
11738
11740
|
storyKey
|
|
11739
11741
|
}, "Failed to read efficiency profile — skipping gate");
|
|
@@ -11887,11 +11889,11 @@ async function runRetryEscalatedAction(options) {
|
|
|
11887
11889
|
output_tokens: output,
|
|
11888
11890
|
cost_usd: costUsd
|
|
11889
11891
|
}).catch((err) => {
|
|
11890
|
-
logger$
|
|
11892
|
+
logger$6.warn({ err }, "Failed to record token usage");
|
|
11891
11893
|
});
|
|
11892
11894
|
}
|
|
11893
11895
|
} catch (err) {
|
|
11894
|
-
logger$
|
|
11896
|
+
logger$6.warn({ err }, "Failed to record token usage");
|
|
11895
11897
|
}
|
|
11896
11898
|
});
|
|
11897
11899
|
if (outputFormat === "human") {
|
|
@@ -11920,7 +11922,7 @@ async function runRetryEscalatedAction(options) {
|
|
|
11920
11922
|
const msg = err instanceof Error ? err.message : String(err);
|
|
11921
11923
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
11922
11924
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
11923
|
-
logger$
|
|
11925
|
+
logger$6.error({ err }, "retry-escalated failed");
|
|
11924
11926
|
return 1;
|
|
11925
11927
|
} finally {
|
|
11926
11928
|
try {
|
|
@@ -11953,7 +11955,7 @@ function registerRetryEscalatedCommand(program, _version = "0.0.0", projectRoot
|
|
|
11953
11955
|
|
|
11954
11956
|
//#endregion
|
|
11955
11957
|
//#region src/cli/commands/cancel.ts
|
|
11956
|
-
const logger$
|
|
11958
|
+
const logger$5 = createLogger("cancel-cmd");
|
|
11957
11959
|
async function runCancelAction(options) {
|
|
11958
11960
|
const { outputFormat, projectRoot, force } = options;
|
|
11959
11961
|
const dbRoot = await resolveMainRepoRoot(projectRoot);
|
|
@@ -11992,7 +11994,7 @@ async function runCancelAction(options) {
|
|
|
11992
11994
|
}
|
|
11993
11995
|
} catch (err) {
|
|
11994
11996
|
const msg = err instanceof Error ? err.message : String(err);
|
|
11995
|
-
logger$
|
|
11997
|
+
logger$5.warn({
|
|
11996
11998
|
pid,
|
|
11997
11999
|
err: msg
|
|
11998
12000
|
}, "Failed to kill orchestrator");
|
|
@@ -12022,7 +12024,7 @@ async function runCancelAction(options) {
|
|
|
12022
12024
|
await adapter.close();
|
|
12023
12025
|
}
|
|
12024
12026
|
} catch (err) {
|
|
12025
|
-
logger$
|
|
12027
|
+
logger$5.warn({ err }, "Could not update pipeline run status (non-fatal)");
|
|
12026
12028
|
}
|
|
12027
12029
|
if (outputFormat === "json") process.stdout.write(formatOutput({
|
|
12028
12030
|
cancelled: true,
|
|
@@ -12376,7 +12378,7 @@ function emitDiff(diff, ctx) {
|
|
|
12376
12378
|
|
|
12377
12379
|
//#endregion
|
|
12378
12380
|
//#region src/cli/commands/probe-author.ts
|
|
12379
|
-
const logger$
|
|
12381
|
+
const logger$4 = createLogger("cli:probe-author");
|
|
12380
12382
|
/**
|
|
12381
12383
|
* A minimum-viable logger that routes everything to stderr. Used by the
|
|
12382
12384
|
* subcommand to keep stdout reserved for the JSON result payload.
|
|
@@ -12445,7 +12447,7 @@ async function runProbeAuthorDispatch(opts, projectRoot, registry) {
|
|
|
12445
12447
|
logger: stderrLogger
|
|
12446
12448
|
});
|
|
12447
12449
|
const workingDir = opts.workingDir !== void 0 ? resolve$1(opts.workingDir) : resolve$1(storyFilePath, "..");
|
|
12448
|
-
logger$
|
|
12450
|
+
logger$4.info({
|
|
12449
12451
|
storyKey: opts.storyKey,
|
|
12450
12452
|
storyFile: storyFilePath,
|
|
12451
12453
|
epicFile: epicFilePath,
|
|
@@ -12626,7 +12628,7 @@ function registerHistoryCommand(program) {
|
|
|
12626
12628
|
|
|
12627
12629
|
//#endregion
|
|
12628
12630
|
//#region src/cli/commands/repo-map.ts
|
|
12629
|
-
const logger$
|
|
12631
|
+
const logger$3 = createLogger("cli:repo-map");
|
|
12630
12632
|
/** Validate that a symbol name contains only safe identifier characters. */
|
|
12631
12633
|
function isValidSymbolName(name) {
|
|
12632
12634
|
return /^[a-zA-Z0-9_]+$/.test(name);
|
|
@@ -12661,15 +12663,15 @@ function registerRepoMapCommand(program) {
|
|
|
12661
12663
|
const colRows = await doltClient.query(`SHOW COLUMNS FROM repo_map_symbols LIKE 'dependencies'`);
|
|
12662
12664
|
if (colRows.length === 0) {
|
|
12663
12665
|
await doltClient.query(`ALTER TABLE repo_map_symbols ADD COLUMN dependencies JSON`);
|
|
12664
|
-
logger$
|
|
12666
|
+
logger$3.info("Applied migration: added dependencies column to repo_map_symbols");
|
|
12665
12667
|
}
|
|
12666
12668
|
} catch {
|
|
12667
|
-
logger$
|
|
12669
|
+
logger$3.debug("Skipping repo_map_symbols migration: table not yet created");
|
|
12668
12670
|
}
|
|
12669
|
-
const symbolRepo = new DoltSymbolRepository(doltClient, logger$
|
|
12671
|
+
const symbolRepo = new DoltSymbolRepository(doltClient, logger$3);
|
|
12670
12672
|
const metaRepo = new DoltRepoMapMetaRepository(doltClient);
|
|
12671
|
-
const repoMapModule = new RepoMapModule(metaRepo, logger$
|
|
12672
|
-
const queryEngine = new RepoMapQueryEngine(symbolRepo, logger$
|
|
12673
|
+
const repoMapModule = new RepoMapModule(metaRepo, logger$3);
|
|
12674
|
+
const queryEngine = new RepoMapQueryEngine(symbolRepo, logger$3);
|
|
12673
12675
|
if (options.show === true || !options.update && !options.query && !options.dryRun) {
|
|
12674
12676
|
const meta = await metaRepo.getMeta();
|
|
12675
12677
|
const staleResult = await repoMapModule.checkStaleness();
|
|
@@ -12695,9 +12697,9 @@ function registerRepoMapCommand(program) {
|
|
|
12695
12697
|
return;
|
|
12696
12698
|
}
|
|
12697
12699
|
if (options.update === true) {
|
|
12698
|
-
logger$
|
|
12699
|
-
const gitClient = new GitClient(logger$
|
|
12700
|
-
const grammarLoader = new GrammarLoader(logger$
|
|
12700
|
+
logger$3.info("repo-map --update: triggering incremental update");
|
|
12701
|
+
const gitClient = new GitClient(logger$3);
|
|
12702
|
+
const grammarLoader = new GrammarLoader(logger$3);
|
|
12701
12703
|
if (grammarLoader.getGrammar(".ts") === null) {
|
|
12702
12704
|
const msg = "tree-sitter grammars not installed. Run `npm install tree-sitter tree-sitter-typescript tree-sitter-javascript tree-sitter-python` in the substrate installation directory.";
|
|
12703
12705
|
if (options.outputFormat === "json") console.log(JSON.stringify({
|
|
@@ -12708,15 +12710,15 @@ function registerRepoMapCommand(program) {
|
|
|
12708
12710
|
process.exitCode = 1;
|
|
12709
12711
|
return;
|
|
12710
12712
|
}
|
|
12711
|
-
const parser = new SymbolParser(grammarLoader, logger$
|
|
12712
|
-
const storage = new RepoMapStorage(symbolRepo, metaRepo, gitClient, logger$
|
|
12713
|
+
const parser = new SymbolParser(grammarLoader, logger$3);
|
|
12714
|
+
const storage = new RepoMapStorage(symbolRepo, metaRepo, gitClient, logger$3);
|
|
12713
12715
|
let updateWarning;
|
|
12714
12716
|
try {
|
|
12715
12717
|
await storage.incrementalUpdate(dbRoot, parser);
|
|
12716
12718
|
} catch (err) {
|
|
12717
12719
|
if (err instanceof AppError && err.code === ERR_REPO_MAP_STORAGE_WRITE) {
|
|
12718
12720
|
updateWarning = err.message;
|
|
12719
|
-
logger$
|
|
12721
|
+
logger$3.warn({ err }, "repo-map --update: storage write error (partial update)");
|
|
12720
12722
|
} else throw err;
|
|
12721
12723
|
}
|
|
12722
12724
|
const meta = await metaRepo.getMeta();
|
|
@@ -12736,7 +12738,7 @@ function registerRepoMapCommand(program) {
|
|
|
12736
12738
|
return;
|
|
12737
12739
|
}
|
|
12738
12740
|
if (options.query !== void 0) {
|
|
12739
|
-
logger$
|
|
12741
|
+
logger$3.debug({ symbol: options.query }, "repo-map --query");
|
|
12740
12742
|
const result = await queryEngine.query({
|
|
12741
12743
|
symbols: [options.query],
|
|
12742
12744
|
maxTokens: 4e3
|
|
@@ -12758,7 +12760,7 @@ function registerRepoMapCommand(program) {
|
|
|
12758
12760
|
process.exitCode = 1;
|
|
12759
12761
|
return;
|
|
12760
12762
|
}
|
|
12761
|
-
const injector = new RepoMapInjector(queryEngine, logger$
|
|
12763
|
+
const injector = new RepoMapInjector(queryEngine, logger$3);
|
|
12762
12764
|
const injectionResult = await injector.buildContext(storyContent, 2e3);
|
|
12763
12765
|
console.log(JSON.stringify({
|
|
12764
12766
|
text: injectionResult.text,
|
|
@@ -12772,7 +12774,7 @@ function registerRepoMapCommand(program) {
|
|
|
12772
12774
|
|
|
12773
12775
|
//#endregion
|
|
12774
12776
|
//#region src/cli/commands/routing.ts
|
|
12775
|
-
const logger$
|
|
12777
|
+
const logger$2 = createLogger("cli:routing");
|
|
12776
12778
|
function registerRoutingCommand(program) {
|
|
12777
12779
|
program.command("routing").description("Show routing configuration and auto-tune history").option("--history", "Show the routing auto-tune log (model changes applied)").option("--output-format <format>", "Output format: text or json", "text").action(async (options) => {
|
|
12778
12780
|
const dbRoot = await resolveMainRepoRoot(process.cwd());
|
|
@@ -12789,7 +12791,7 @@ function registerRoutingCommand(program) {
|
|
|
12789
12791
|
try {
|
|
12790
12792
|
await store.initialize();
|
|
12791
12793
|
if (options.history === true) {
|
|
12792
|
-
logger$
|
|
12794
|
+
logger$2.debug("routing --history: fetching tune log");
|
|
12793
12795
|
const raw$1 = await store.getMetric("global", "routing_tune_log");
|
|
12794
12796
|
let entries = [];
|
|
12795
12797
|
if (Array.isArray(raw$1)) entries = raw$1.sort((a, b) => b.appliedAt.localeCompare(a.appliedAt));
|
|
@@ -13124,6 +13126,405 @@ function registerFactoryCommand$1(program) {
|
|
|
13124
13126
|
}) });
|
|
13125
13127
|
}
|
|
13126
13128
|
|
|
13129
|
+
//#endregion
|
|
13130
|
+
//#region src/cli/commands/reconcile-from-disk.ts
|
|
13131
|
+
const logger$1 = createLogger("reconcile-from-disk");
|
|
13132
|
+
/** 64KB tail window for capturing subprocess stderr/stdout (Story 66-5 pattern). */
|
|
13133
|
+
const MAX_OUTPUT_BYTES = 64 * 1024;
|
|
13134
|
+
/**
|
|
13135
|
+
* Gate chain definition — each gate runs in order. On any failure the chain
|
|
13136
|
+
* halts and emits pipeline:reconcile-gate-failed before exiting with code 1.
|
|
13137
|
+
*/
|
|
13138
|
+
const GATE_CHAIN = [
|
|
13139
|
+
{
|
|
13140
|
+
name: "build",
|
|
13141
|
+
cmd: "npm",
|
|
13142
|
+
args: ["run", "build"],
|
|
13143
|
+
timeoutMs: 18e4
|
|
13144
|
+
},
|
|
13145
|
+
{
|
|
13146
|
+
name: "check:circular",
|
|
13147
|
+
cmd: "npm",
|
|
13148
|
+
args: ["run", "check:circular"],
|
|
13149
|
+
timeoutMs: 6e4
|
|
13150
|
+
},
|
|
13151
|
+
{
|
|
13152
|
+
name: "typecheck:gate",
|
|
13153
|
+
cmd: "npm",
|
|
13154
|
+
args: ["run", "typecheck:gate"],
|
|
13155
|
+
timeoutMs: 12e4
|
|
13156
|
+
},
|
|
13157
|
+
{
|
|
13158
|
+
name: "test:fast",
|
|
13159
|
+
cmd: "npm",
|
|
13160
|
+
args: ["run", "test:fast"],
|
|
13161
|
+
timeoutMs: 3e5
|
|
13162
|
+
}
|
|
13163
|
+
];
|
|
13164
|
+
/**
|
|
13165
|
+
* Truncate a string to the last N bytes (tail-window pattern from Story 66-5).
|
|
13166
|
+
* Preserves the END of the string, which contains the most recent diagnostic output.
|
|
13167
|
+
*/
|
|
13168
|
+
function tailWindow(s, maxBytes = MAX_OUTPUT_BYTES) {
|
|
13169
|
+
if (!s) return "";
|
|
13170
|
+
if (Buffer.byteLength(s, "utf-8") <= maxBytes) return s;
|
|
13171
|
+
const buf = Buffer.from(s, "utf-8");
|
|
13172
|
+
return buf.slice(buf.length - maxBytes).toString("utf-8");
|
|
13173
|
+
}
|
|
13174
|
+
/**
|
|
13175
|
+
* Read and parse the reconcile manifest index from .substrate/runs/manifest.json.
|
|
13176
|
+
* Returns null if the file doesn't exist or fails to parse.
|
|
13177
|
+
*/
|
|
13178
|
+
async function readReconcileManifest(dbRoot) {
|
|
13179
|
+
const manifestPath = join(dbRoot, ".substrate", "runs", "manifest.json");
|
|
13180
|
+
try {
|
|
13181
|
+
const content = await readFile(manifestPath, "utf-8");
|
|
13182
|
+
const parsed = JSON.parse(content);
|
|
13183
|
+
if (parsed !== null && typeof parsed === "object" && "runs" in parsed && Array.isArray(parsed.runs)) return parsed;
|
|
13184
|
+
return null;
|
|
13185
|
+
} catch {
|
|
13186
|
+
return null;
|
|
13187
|
+
}
|
|
13188
|
+
}
|
|
13189
|
+
/**
|
|
13190
|
+
* Find a specific run entry from the manifest by runId.
|
|
13191
|
+
* When runId is not provided, returns the last (most recent) entry.
|
|
13192
|
+
*/
|
|
13193
|
+
function findRunEntry(manifest, runId) {
|
|
13194
|
+
if (manifest.runs.length === 0) return null;
|
|
13195
|
+
if (runId) return manifest.runs.find((r) => r.runId === runId) ?? null;
|
|
13196
|
+
return manifest.runs[manifest.runs.length - 1] ?? null;
|
|
13197
|
+
}
|
|
13198
|
+
/**
|
|
13199
|
+
* Detect an auto-committed SHA for a story using git log.
|
|
13200
|
+
*
|
|
13201
|
+
* Searches commits matching `feat(story-<storyKey>)` since started_at using
|
|
13202
|
+
* git's --grep filter. Returns the first matching SHA, or undefined if none found.
|
|
13203
|
+
*
|
|
13204
|
+
* Git operations use `cwd: projectRoot` (not process.cwd()) to avoid the
|
|
13205
|
+
* bash-session-drift footgun documented in obs_025.
|
|
13206
|
+
*/
|
|
13207
|
+
function detectAutoCommit(storyKey, startedAt, projectRoot) {
|
|
13208
|
+
const grepPattern = `feat(story-${storyKey})`;
|
|
13209
|
+
const result = spawnSync("git", [
|
|
13210
|
+
"log",
|
|
13211
|
+
"--oneline",
|
|
13212
|
+
`--since=${startedAt}`,
|
|
13213
|
+
`--grep=${grepPattern}`
|
|
13214
|
+
], {
|
|
13215
|
+
cwd: projectRoot,
|
|
13216
|
+
encoding: "utf-8",
|
|
13217
|
+
timeout: 1e4
|
|
13218
|
+
});
|
|
13219
|
+
if (result.status !== 0 || !result.stdout?.trim()) return void 0;
|
|
13220
|
+
const lines = result.stdout.trim().split("\n").filter(Boolean);
|
|
13221
|
+
for (const line of lines) {
|
|
13222
|
+
const sha = line.split(" ")[0];
|
|
13223
|
+
if (sha) return sha;
|
|
13224
|
+
}
|
|
13225
|
+
return void 0;
|
|
13226
|
+
}
|
|
13227
|
+
/**
|
|
13228
|
+
* Detect working-tree changes for files declared in targetFiles.
|
|
13229
|
+
*
|
|
13230
|
+
* Runs `git status --porcelain` and cross-references with targetFiles.
|
|
13231
|
+
* Returns the list of matching modified/added/deleted paths.
|
|
13232
|
+
*/
|
|
13233
|
+
function detectWorkingTreeChanges(targetFiles, projectRoot) {
|
|
13234
|
+
if (targetFiles.length === 0) return [];
|
|
13235
|
+
const result = spawnSync("git", ["status", "--porcelain"], {
|
|
13236
|
+
cwd: projectRoot,
|
|
13237
|
+
encoding: "utf-8",
|
|
13238
|
+
timeout: 1e4
|
|
13239
|
+
});
|
|
13240
|
+
if (result.status !== 0 || !result.stdout?.trim()) return [];
|
|
13241
|
+
const modifiedPaths = [];
|
|
13242
|
+
const lines = result.stdout.split("\n").filter((l) => l.trim().length > 0);
|
|
13243
|
+
for (const line of lines) {
|
|
13244
|
+
const path$4 = line.length > 3 ? line.slice(3).trim() : line.trim();
|
|
13245
|
+
for (const tf of targetFiles) if (path$4 === tf || path$4.endsWith(`/${tf}`) || tf.endsWith(`/${path$4}`) || path$4.endsWith(tf) || tf.endsWith(path$4)) {
|
|
13246
|
+
modifiedPaths.push(path$4);
|
|
13247
|
+
break;
|
|
13248
|
+
}
|
|
13249
|
+
}
|
|
13250
|
+
return modifiedPaths;
|
|
13251
|
+
}
|
|
13252
|
+
/**
|
|
13253
|
+
* Run the validation gate chain: build → check:circular → typecheck:gate → test:fast.
|
|
13254
|
+
*
|
|
13255
|
+
* Each gate uses child_process.spawnSync with an explicit timeout. On any gate
|
|
13256
|
+
* failure, stderr/stdout are captured with a 64KB tail-window (Story 66-5 pattern)
|
|
13257
|
+
* and the chain halts.
|
|
13258
|
+
*/
|
|
13259
|
+
function runGateChain(projectRoot) {
|
|
13260
|
+
const gateResults = [];
|
|
13261
|
+
for (const gate of GATE_CHAIN) {
|
|
13262
|
+
const startMs = Date.now();
|
|
13263
|
+
const result = spawnSync(gate.cmd, gate.args, {
|
|
13264
|
+
cwd: projectRoot,
|
|
13265
|
+
encoding: "utf-8",
|
|
13266
|
+
timeout: gate.timeoutMs
|
|
13267
|
+
});
|
|
13268
|
+
const durationMs = Date.now() - startMs;
|
|
13269
|
+
const stderrTail = tailWindow(result.stderr ?? "");
|
|
13270
|
+
const stdoutTail = tailWindow(result.stdout ?? "");
|
|
13271
|
+
const exitCode = result.status ?? (result.signal ? 128 : -1);
|
|
13272
|
+
const passed = result.status === 0 && !result.signal;
|
|
13273
|
+
gateResults.push({
|
|
13274
|
+
gate: gate.name,
|
|
13275
|
+
passed,
|
|
13276
|
+
exitCode,
|
|
13277
|
+
durationMs,
|
|
13278
|
+
...stderrTail ? { stderrTail } : {},
|
|
13279
|
+
...stdoutTail ? { stdoutTail } : {}
|
|
13280
|
+
});
|
|
13281
|
+
if (!passed) return {
|
|
13282
|
+
passed: false,
|
|
13283
|
+
gateResults
|
|
13284
|
+
};
|
|
13285
|
+
}
|
|
13286
|
+
return {
|
|
13287
|
+
passed: true,
|
|
13288
|
+
gateResults
|
|
13289
|
+
};
|
|
13290
|
+
}
|
|
13291
|
+
/**
|
|
13292
|
+
* Prompt the operator for confirmation before making Dolt writes.
|
|
13293
|
+
* Returns true if the operator responds 'y' or 'Y'.
|
|
13294
|
+
*/
|
|
13295
|
+
async function promptOperator(storyCount) {
|
|
13296
|
+
return new Promise((resolve$2) => {
|
|
13297
|
+
const rl = readline.createInterface({
|
|
13298
|
+
input: process.stdin,
|
|
13299
|
+
output: process.stdout
|
|
13300
|
+
});
|
|
13301
|
+
rl.question(`Reconcile ${storyCount} stories to status='complete'? [y/N] `, (answer) => {
|
|
13302
|
+
rl.close();
|
|
13303
|
+
resolve$2(answer.toLowerCase() === "y");
|
|
13304
|
+
});
|
|
13305
|
+
});
|
|
13306
|
+
}
|
|
13307
|
+
/**
|
|
13308
|
+
* Run the reconcile-from-disk action.
|
|
13309
|
+
* Returns exit code: 0 = success/no-op, 1 = error/gate-failure.
|
|
13310
|
+
*/
|
|
13311
|
+
async function runReconcileFromDiskAction(options) {
|
|
13312
|
+
const { runId, dryRun = false, yes = false, outputFormat, projectRoot, _dbRoot, _skipGates = false } = options;
|
|
13313
|
+
const startMs = Date.now();
|
|
13314
|
+
const dbRoot = _dbRoot ?? await resolveMainRepoRoot(projectRoot);
|
|
13315
|
+
const indexManifest = await readReconcileManifest(dbRoot);
|
|
13316
|
+
let resolvedRunId = null;
|
|
13317
|
+
let runEntry = null;
|
|
13318
|
+
if (runId) {
|
|
13319
|
+
resolvedRunId = runId;
|
|
13320
|
+
if (indexManifest) runEntry = findRunEntry(indexManifest, runId);
|
|
13321
|
+
} else if (indexManifest && indexManifest.runs.length > 0) {
|
|
13322
|
+
runEntry = findRunEntry(indexManifest);
|
|
13323
|
+
resolvedRunId = runEntry?.runId ?? null;
|
|
13324
|
+
} else resolvedRunId = await readCurrentRunId(dbRoot);
|
|
13325
|
+
if (!resolvedRunId) {
|
|
13326
|
+
const errorMsg = "No runs found. Use `substrate metrics --output-format json` for run history.";
|
|
13327
|
+
if (outputFormat === "json") process.stdout.write(JSON.stringify({ error: errorMsg }) + "\n");
|
|
13328
|
+
else process.stderr.write(`Error: ${errorMsg}\n`);
|
|
13329
|
+
return 1;
|
|
13330
|
+
}
|
|
13331
|
+
if (!runEntry) {
|
|
13332
|
+
const { manifest: fullManifest } = await resolveRunManifest(dbRoot, resolvedRunId);
|
|
13333
|
+
if (fullManifest) try {
|
|
13334
|
+
const data = await fullManifest.read();
|
|
13335
|
+
const stories = Object.entries(data.per_story_state).map(([key, state]) => ({
|
|
13336
|
+
storyKey: key,
|
|
13337
|
+
status: state.status
|
|
13338
|
+
}));
|
|
13339
|
+
runEntry = {
|
|
13340
|
+
runId: resolvedRunId,
|
|
13341
|
+
started_at: data.created_at,
|
|
13342
|
+
stories
|
|
13343
|
+
};
|
|
13344
|
+
} catch {
|
|
13345
|
+
logger$1.debug({ runId: resolvedRunId }, "failed to read individual run manifest");
|
|
13346
|
+
}
|
|
13347
|
+
}
|
|
13348
|
+
if (!runEntry) {
|
|
13349
|
+
const errorMsg = runId ? `Run '${runId}' not found. Use \`substrate metrics --output-format json\` for run history.` : "No runs found. Use `substrate metrics --output-format json` for run history.";
|
|
13350
|
+
if (outputFormat === "json") process.stdout.write(JSON.stringify({ error: errorMsg }) + "\n");
|
|
13351
|
+
else process.stderr.write(`Error: ${errorMsg}\n`);
|
|
13352
|
+
return 1;
|
|
13353
|
+
}
|
|
13354
|
+
const candidateStories = runEntry.stories.filter((s) => s.status !== "complete" && s.status !== "cancelled");
|
|
13355
|
+
if (candidateStories.length === 0) {
|
|
13356
|
+
const output$1 = {
|
|
13357
|
+
runId: resolvedRunId,
|
|
13358
|
+
candidates: [],
|
|
13359
|
+
gateResults: [],
|
|
13360
|
+
reconciled: false,
|
|
13361
|
+
affectedStoryKeys: []
|
|
13362
|
+
};
|
|
13363
|
+
if (outputFormat === "json") process.stdout.write(JSON.stringify(output$1) + "\n");
|
|
13364
|
+
else process.stdout.write(`All stories already complete or cancelled for run ${resolvedRunId}.\n`);
|
|
13365
|
+
return 0;
|
|
13366
|
+
}
|
|
13367
|
+
const diffRecords = [];
|
|
13368
|
+
for (const story of candidateStories) {
|
|
13369
|
+
const autoCommittedSha = detectAutoCommit(story.storyKey, runEntry.started_at, projectRoot);
|
|
13370
|
+
const modifiedFiles = detectWorkingTreeChanges(story.targetFiles ?? [], projectRoot);
|
|
13371
|
+
const reconcilable = !!(autoCommittedSha || modifiedFiles.length > 0);
|
|
13372
|
+
diffRecords.push({
|
|
13373
|
+
storyKey: story.storyKey,
|
|
13374
|
+
autoCommittedSha,
|
|
13375
|
+
modifiedFiles,
|
|
13376
|
+
reconcilable
|
|
13377
|
+
});
|
|
13378
|
+
}
|
|
13379
|
+
if (dryRun) {
|
|
13380
|
+
const output$1 = {
|
|
13381
|
+
runId: resolvedRunId,
|
|
13382
|
+
candidates: diffRecords,
|
|
13383
|
+
gateResults: [],
|
|
13384
|
+
reconciled: false,
|
|
13385
|
+
affectedStoryKeys: []
|
|
13386
|
+
};
|
|
13387
|
+
if (outputFormat === "json") process.stdout.write(JSON.stringify(output$1) + "\n");
|
|
13388
|
+
else {
|
|
13389
|
+
process.stdout.write(`[DRY RUN] Run: ${resolvedRunId}\n`);
|
|
13390
|
+
process.stdout.write(`Would run gates: ${GATE_CHAIN.map((g) => g.name).join(" → ")}\n`);
|
|
13391
|
+
const reconcilable = diffRecords.filter((r) => r.reconcilable);
|
|
13392
|
+
process.stdout.write(`Would reconcile ${reconcilable.length} of ${diffRecords.length} candidate stories\n`);
|
|
13393
|
+
for (const r of diffRecords) {
|
|
13394
|
+
const status = r.reconcilable ? "✓ reconcilable" : "✗ not reconcilable";
|
|
13395
|
+
const detail = r.autoCommittedSha ? ` (commit: ${r.autoCommittedSha})` : "";
|
|
13396
|
+
process.stdout.write(` ${r.storyKey}: ${status}${detail}\n`);
|
|
13397
|
+
}
|
|
13398
|
+
}
|
|
13399
|
+
return 0;
|
|
13400
|
+
}
|
|
13401
|
+
const { passed, gateResults } = _skipGates ? {
|
|
13402
|
+
passed: true,
|
|
13403
|
+
gateResults: []
|
|
13404
|
+
} : runGateChain(projectRoot);
|
|
13405
|
+
if (!passed) {
|
|
13406
|
+
const failedGateResult = gateResults.find((g) => !g.passed);
|
|
13407
|
+
const failedGateName = failedGateResult?.gate ?? "unknown";
|
|
13408
|
+
const durationMs$1 = Date.now() - startMs;
|
|
13409
|
+
const localBus$1 = new EventEmitter();
|
|
13410
|
+
localBus$1.emit("pipeline:reconcile-gate-failed", {
|
|
13411
|
+
runId: resolvedRunId,
|
|
13412
|
+
failedGate: failedGateName,
|
|
13413
|
+
stderrTail: failedGateResult?.stderrTail,
|
|
13414
|
+
stdoutTail: failedGateResult?.stdoutTail,
|
|
13415
|
+
durationMs: durationMs$1
|
|
13416
|
+
});
|
|
13417
|
+
logger$1.info({
|
|
13418
|
+
runId: resolvedRunId,
|
|
13419
|
+
failedGate: failedGateName,
|
|
13420
|
+
exitCode: failedGateResult?.exitCode
|
|
13421
|
+
}, "reconcile-from-disk gate failed");
|
|
13422
|
+
const output$1 = {
|
|
13423
|
+
runId: resolvedRunId,
|
|
13424
|
+
candidates: diffRecords,
|
|
13425
|
+
gateResults,
|
|
13426
|
+
reconciled: false,
|
|
13427
|
+
affectedStoryKeys: []
|
|
13428
|
+
};
|
|
13429
|
+
if (outputFormat === "json") process.stdout.write(JSON.stringify(output$1) + "\n");
|
|
13430
|
+
else {
|
|
13431
|
+
process.stderr.write(`Gate '${failedGateName}' failed (exit ${failedGateResult?.exitCode ?? -1}). No Dolt changes made.\n`);
|
|
13432
|
+
if (failedGateResult?.stderrTail) process.stderr.write(`--- stderr ---\n${failedGateResult.stderrTail}\n`);
|
|
13433
|
+
}
|
|
13434
|
+
return 1;
|
|
13435
|
+
}
|
|
13436
|
+
const reconcilableRecords = diffRecords.filter((r) => r.reconcilable);
|
|
13437
|
+
if (!yes) {
|
|
13438
|
+
if (outputFormat === "human") {
|
|
13439
|
+
process.stdout.write(`\nRun: ${resolvedRunId}\n`);
|
|
13440
|
+
process.stdout.write(`Stories to reconcile (${reconcilableRecords.length}):\n`);
|
|
13441
|
+
for (const r of reconcilableRecords) {
|
|
13442
|
+
const detail = r.autoCommittedSha ? ` (commit: ${r.autoCommittedSha})` : "";
|
|
13443
|
+
process.stdout.write(` ${r.storyKey}${detail}\n`);
|
|
13444
|
+
}
|
|
13445
|
+
process.stdout.write("\n");
|
|
13446
|
+
}
|
|
13447
|
+
const confirmed = await promptOperator(reconcilableRecords.length);
|
|
13448
|
+
if (!confirmed) {
|
|
13449
|
+
const output$1 = {
|
|
13450
|
+
runId: resolvedRunId,
|
|
13451
|
+
candidates: diffRecords,
|
|
13452
|
+
gateResults,
|
|
13453
|
+
reconciled: false,
|
|
13454
|
+
affectedStoryKeys: []
|
|
13455
|
+
};
|
|
13456
|
+
if (outputFormat === "json") process.stdout.write(JSON.stringify(output$1) + "\n");
|
|
13457
|
+
else process.stdout.write("Reconciliation declined.\n");
|
|
13458
|
+
return 0;
|
|
13459
|
+
}
|
|
13460
|
+
}
|
|
13461
|
+
const adapter = createDatabaseAdapter({
|
|
13462
|
+
backend: "auto",
|
|
13463
|
+
basePath: dbRoot
|
|
13464
|
+
});
|
|
13465
|
+
try {
|
|
13466
|
+
await initSchema(adapter);
|
|
13467
|
+
const now = new Date().toISOString();
|
|
13468
|
+
await adapter.transaction(async (tx) => {
|
|
13469
|
+
for (const record of reconcilableRecords) await tx.query("UPDATE wg_stories SET status='complete', updated_at=? WHERE story_key=? AND run_id=?", [
|
|
13470
|
+
now,
|
|
13471
|
+
record.storyKey,
|
|
13472
|
+
resolvedRunId
|
|
13473
|
+
]);
|
|
13474
|
+
});
|
|
13475
|
+
logger$1.info({
|
|
13476
|
+
runId: resolvedRunId,
|
|
13477
|
+
affectedStories: reconcilableRecords.map((r) => r.storyKey)
|
|
13478
|
+
}, "reconcile-from-disk: Dolt update complete");
|
|
13479
|
+
} finally {
|
|
13480
|
+
await adapter.close().catch(() => {});
|
|
13481
|
+
}
|
|
13482
|
+
const durationMs = Date.now() - startMs;
|
|
13483
|
+
const affectedStoryKeys = reconcilableRecords.map((r) => r.storyKey);
|
|
13484
|
+
const localBus = new EventEmitter();
|
|
13485
|
+
localBus.emit("pipeline:reconcile-from-disk", {
|
|
13486
|
+
runId: resolvedRunId,
|
|
13487
|
+
affectedStories: affectedStoryKeys,
|
|
13488
|
+
gatesPassed: true,
|
|
13489
|
+
operatorConfirmed: !yes,
|
|
13490
|
+
durationMs
|
|
13491
|
+
});
|
|
13492
|
+
const output = {
|
|
13493
|
+
runId: resolvedRunId,
|
|
13494
|
+
candidates: diffRecords,
|
|
13495
|
+
gateResults,
|
|
13496
|
+
reconciled: true,
|
|
13497
|
+
affectedStoryKeys
|
|
13498
|
+
};
|
|
13499
|
+
if (outputFormat === "json") process.stdout.write(JSON.stringify(output) + "\n");
|
|
13500
|
+
else {
|
|
13501
|
+
process.stdout.write(`Reconciled ${affectedStoryKeys.length} stories to 'complete' in ${durationMs}ms.\n`);
|
|
13502
|
+
for (const key of affectedStoryKeys) process.stdout.write(` ✓ ${key}\n`);
|
|
13503
|
+
}
|
|
13504
|
+
return 0;
|
|
13505
|
+
}
|
|
13506
|
+
/**
|
|
13507
|
+
* Register the `reconcile-from-disk` subcommand with the CLI program.
|
|
13508
|
+
*
|
|
13509
|
+
* Command shape:
|
|
13510
|
+
* substrate reconcile-from-disk [--run-id <id>] [--dry-run] [--yes]
|
|
13511
|
+
* [--output-format <human|json>]
|
|
13512
|
+
* [--project-root <path>]
|
|
13513
|
+
*/
|
|
13514
|
+
function registerReconcileFromDiskCommand(program, _version = "0.0.0", projectRoot = process.cwd(), _registry) {
|
|
13515
|
+
program.command("reconcile-from-disk").description("Reconcile wg_stories.status against working-tree and git history (Path A recovery)").option("--run-id <id>", "Pipeline run ID to reconcile (defaults to most recent run in .substrate/runs/manifest.json)").option("--dry-run", "Print discovery output and would-update list without running gates or writing Dolt").option("--yes", "Skip operator confirmation prompt (gates still run; gate failure still aborts)").option("--output-format <format>", "Output format: human (default) or json", "human").option("--project-root <path>", "Project root directory", projectRoot).action(async (opts) => {
|
|
13516
|
+
const outputFormat = opts.outputFormat === "json" ? "json" : "human";
|
|
13517
|
+
const exitCode = await runReconcileFromDiskAction({
|
|
13518
|
+
runId: opts.runId,
|
|
13519
|
+
dryRun: opts.dryRun,
|
|
13520
|
+
yes: opts.yes,
|
|
13521
|
+
outputFormat,
|
|
13522
|
+
projectRoot: opts.projectRoot
|
|
13523
|
+
});
|
|
13524
|
+
process.exitCode = exitCode;
|
|
13525
|
+
});
|
|
13526
|
+
}
|
|
13527
|
+
|
|
13127
13528
|
//#endregion
|
|
13128
13529
|
//#region src/cli/index.ts
|
|
13129
13530
|
process.setMaxListeners(20);
|
|
@@ -13192,6 +13593,7 @@ async function createProgram() {
|
|
|
13192
13593
|
registerEpicStatusCommand(program);
|
|
13193
13594
|
registerScenariosCommand(program);
|
|
13194
13595
|
registerFactoryCommand$1(program);
|
|
13596
|
+
registerReconcileFromDiskCommand(program, version, process.cwd(), registry);
|
|
13195
13597
|
registerUpgradeCommand(program);
|
|
13196
13598
|
return program;
|
|
13197
13599
|
}
|