substrate-ai 0.20.61 → 0.20.63
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/index.js +575 -131
- package/dist/{health-BoXxsFSF.js → health-C-KOZrFJ.js} +227 -4
- package/dist/{health-FZVOBYND.js → health-CJqd1FzY.js} +1 -1
- package/dist/index.d.ts +28 -0
- package/dist/{run-CRz08RrU.js → run-CHUFlRbH.js} +31 -2
- package/dist/{run-BxfeSz6G.js → run-Z_-caE_i.js} +2 -2
- package/package.json +2 -2
package/dist/cli/index.js
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
import { FileStateStore, RunManifest, SUBSTRATE_OWNED_SETTINGS_KEYS, SupervisorLock, VALID_PHASES, WorkGraphRepository, ZERO_FINDINGS_BY_AUTHOR, ZERO_FINDING_COUNTS, ZERO_PROBE_AUTHOR_METRICS, aggregateProbeAuthorMetrics, buildPipelineStatusOutput, createDatabaseAdapter, createStateStore, findPackageRoot, formatOutput, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, inspectProcessTree, parseDbTimestampAsUtc, parseRuntimeProbes, readCurrentRunId, registerHealthCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, resolveRunManifest, rollupFindingCounts, rollupFindingsByAuthor, rollupProbeAuthorByClass, rollupProbeAuthorMetrics } from "../health-
|
|
2
|
+
import { FileStateStore, RunManifest, SUBSTRATE_OWNED_SETTINGS_KEYS, SupervisorLock, VALID_PHASES, WorkGraphRepository, ZERO_FINDINGS_BY_AUTHOR, ZERO_FINDING_COUNTS, ZERO_PROBE_AUTHOR_METRICS, aggregateProbeAuthorMetrics, buildPipelineStatusOutput, createDatabaseAdapter, createStateStore, findPackageRoot, formatOutput, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, inspectProcessTree, parseDbTimestampAsUtc, parseRuntimeProbes, readCurrentRunId, registerHealthCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, resolveRunManifest, rollupFindingCounts, rollupFindingsByAuthor, rollupProbeAuthorByClass, rollupProbeAuthorMetrics } from "../health-C-KOZrFJ.js";
|
|
3
3
|
import { createLogger } from "../logger-KeHncl-f.js";
|
|
4
4
|
import { createEventBus } from "../helpers-CElYrONe.js";
|
|
5
5
|
import { AdapterRegistry, BudgetConfigSchema, CURRENT_CONFIG_FORMAT_VERSION, CURRENT_TASK_GRAPH_VERSION, ConfigError, CostTrackerConfigSchema, DEFAULT_CONFIG, DoltClient, DoltNotInstalled, GlobalSettingsSchema, InMemoryDatabaseAdapter, IngestionServer, MonitorDatabaseImpl, OPERATIONAL_FINDING, PartialGlobalSettingsSchema, PartialProviderConfigSchema, ProvidersSchema, RoutingRecommender, STORY_METRICS, TelemetryConfigSchema, addTokenUsage, aggregateTokenUsageForRun, checkDoltInstalled, compareRunMetrics, createAmendmentRun, createConfigSystem, createDecision, createDoltClient, createPipelineRun, getActiveDecisions, getAllCostEntriesFiltered, getBaselineRunMetrics, getDecisionsByCategory, getDecisionsByPhaseForRun, getLatestCompletedRun, getLatestRun, getPipelineRunById, getPlanningCostTotal, getRetryableEscalations, getRunMetrics, getRunningPipelineRuns, getSessionCostSummary, getSessionCostSummaryFiltered, getStoryMetricsForRun, getTokenUsageSummary, incrementRunRestarts, initSchema, initializeDolt, listRunMetrics, loadParentRunDecisions, supersedeDecision, tagRunAsBaseline, updatePipelineRun } from "../dist-W2emvN3F.js";
|
|
6
6
|
import "../adapter-registry-DXLMTmfD.js";
|
|
7
|
-
import { AdapterTelemetryPersistence, AppError, DoltRepoMapMetaRepository, DoltSymbolRepository, ERR_REPO_MAP_STORAGE_WRITE, EpicIngester, GLOBSTAR, GitClient, GrammarLoader, Minimatch, Minipass, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SymbolParser, createContextCompiler, createDispatcher, createEventEmitter, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStopAfterGate, createTelemetryAdvisor, escape, formatPhaseCompletionSummary, getFactoryRunSummaries, getScenarioResultsForRun, getTwinRunsForRun, listGraphRuns, registerExportCommand, registerFactoryCommand, registerRunCommand, registerScenariosCommand, resolveStoryKeys, runAnalysisPhase, runPlanningPhase, runProbeAuthor, runSolutioningPhase, unescape, validateStopAfterFromConflict } from "../run-
|
|
7
|
+
import { AdapterTelemetryPersistence, AppError, DoltRepoMapMetaRepository, DoltSymbolRepository, ERR_REPO_MAP_STORAGE_WRITE, EpicIngester, GLOBSTAR, GitClient, GrammarLoader, Minimatch, Minipass, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SymbolParser, createContextCompiler, createDispatcher, createEventEmitter, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStopAfterGate, createTelemetryAdvisor, escape, formatPhaseCompletionSummary, getFactoryRunSummaries, getScenarioResultsForRun, getTwinRunsForRun, listGraphRuns, registerExportCommand, registerFactoryCommand, registerRunCommand, registerScenariosCommand, resolveStoryKeys, runAnalysisPhase, runPlanningPhase, runProbeAuthor, runSolutioningPhase, unescape, validateStopAfterFromConflict } from "../run-CHUFlRbH.js";
|
|
8
8
|
import "../errors-CKFu8YI9.js";
|
|
9
9
|
import "../routing-CcBOCuC9.js";
|
|
10
10
|
import "../decisions-C0pz9Clx.js";
|
|
@@ -13,7 +13,7 @@ import { registerUpgradeCommand } from "../upgrade-CAqLkNUP.js";
|
|
|
13
13
|
import { Command } from "commander";
|
|
14
14
|
import { fileURLToPath } from "url";
|
|
15
15
|
import { dirname, join, resolve } from "path";
|
|
16
|
-
import { access, mkdir, readFile, writeFile } from "fs/promises";
|
|
16
|
+
import { access, mkdir, readFile, readdir, writeFile } from "fs/promises";
|
|
17
17
|
import { EventEmitter } from "node:events";
|
|
18
18
|
import yaml from "js-yaml";
|
|
19
19
|
import * as actualFS from "node:fs";
|
|
@@ -336,12 +336,12 @@ var GitWorktreeManagerImpl = class {
|
|
|
336
336
|
_onTaskReady;
|
|
337
337
|
_onTaskComplete;
|
|
338
338
|
_onTaskFailed;
|
|
339
|
-
constructor(eventBus, projectRoot, baseDirectory = DEFAULT_WORKTREE_BASE, db = null, logger$
|
|
339
|
+
constructor(eventBus, projectRoot, baseDirectory = DEFAULT_WORKTREE_BASE, db = null, logger$19) {
|
|
340
340
|
this._eventBus = eventBus;
|
|
341
341
|
this._projectRoot = projectRoot;
|
|
342
342
|
this._baseDirectory = baseDirectory;
|
|
343
343
|
this._db = db;
|
|
344
|
-
this._logger = logger$
|
|
344
|
+
this._logger = logger$19 ?? console;
|
|
345
345
|
this._onTaskReady = ({ taskId }) => {
|
|
346
346
|
this._handleTaskReady(taskId).catch((err) => {
|
|
347
347
|
this._logger.error({
|
|
@@ -645,14 +645,14 @@ var RecommendationEngine = class {
|
|
|
645
645
|
_filters;
|
|
646
646
|
_historyDays;
|
|
647
647
|
_logger;
|
|
648
|
-
constructor(monitorDb, config = {}, logger$
|
|
648
|
+
constructor(monitorDb, config = {}, logger$19) {
|
|
649
649
|
this._monitorDb = monitorDb;
|
|
650
650
|
this._filters = {
|
|
651
651
|
threshold_percentage: config.recommendation_threshold_percentage ?? 5,
|
|
652
652
|
min_sample_size: config.min_sample_size ?? 10
|
|
653
653
|
};
|
|
654
654
|
this._historyDays = config.recommendation_history_days ?? 90;
|
|
655
|
-
this._logger = logger$
|
|
655
|
+
this._logger = logger$19 ?? console;
|
|
656
656
|
}
|
|
657
657
|
generateRecommendations() {
|
|
658
658
|
const sinceDate = new Date(Date.now() - this._historyDays * 24 * 60 * 60 * 1e3).toISOString();
|
|
@@ -1742,7 +1742,7 @@ function buildStackAwareDevNotes(profile) {
|
|
|
1742
1742
|
|
|
1743
1743
|
//#endregion
|
|
1744
1744
|
//#region src/cli/commands/init.ts
|
|
1745
|
-
const logger$
|
|
1745
|
+
const logger$18 = createLogger("init");
|
|
1746
1746
|
const __dirname = dirname(new URL(import.meta.url).pathname);
|
|
1747
1747
|
const SCAFFOLD_VERSION_REGEX = /<!-- substrate:version=([\d.]+) -->/;
|
|
1748
1748
|
/**
|
|
@@ -1783,7 +1783,7 @@ async function scaffoldBmadFramework(projectRoot, force, outputFormat) {
|
|
|
1783
1783
|
const version = resolveBmadMethodVersion();
|
|
1784
1784
|
if (force && bmadExists) process.stderr.write(`Warning: Replacing existing _bmad/ framework with bmad-method@${version}\n`);
|
|
1785
1785
|
process.stdout.write(`Scaffolding BMAD framework from bmad-method@${version}\n`);
|
|
1786
|
-
logger$
|
|
1786
|
+
logger$18.info({
|
|
1787
1787
|
version,
|
|
1788
1788
|
dest: bmadDest
|
|
1789
1789
|
}, "Scaffolding BMAD framework");
|
|
@@ -1793,7 +1793,7 @@ async function scaffoldBmadFramework(projectRoot, force, outputFormat) {
|
|
|
1793
1793
|
const destDir = join(bmadDest, dir);
|
|
1794
1794
|
mkdirSync$1(destDir, { recursive: true });
|
|
1795
1795
|
cpSync(srcDir, destDir, { recursive: true });
|
|
1796
|
-
logger$
|
|
1796
|
+
logger$18.info({
|
|
1797
1797
|
dir,
|
|
1798
1798
|
dest: destDir
|
|
1799
1799
|
}, "Scaffolded BMAD framework directory");
|
|
@@ -1812,7 +1812,7 @@ async function scaffoldBmadFramework(projectRoot, force, outputFormat) {
|
|
|
1812
1812
|
"document_output_language: English"
|
|
1813
1813
|
].join("\n") + "\n";
|
|
1814
1814
|
await writeFile(configFile, configStub, "utf8");
|
|
1815
|
-
logger$
|
|
1815
|
+
logger$18.info({ configFile }, "Generated _bmad/_config/config.yaml stub");
|
|
1816
1816
|
}
|
|
1817
1817
|
}
|
|
1818
1818
|
const CLAUDE_MD_START_MARKER = "<!-- substrate:start -->";
|
|
@@ -1827,7 +1827,7 @@ async function scaffoldClaudeMd(projectRoot, profile) {
|
|
|
1827
1827
|
try {
|
|
1828
1828
|
sectionContent = await readFile(templatePath, "utf8");
|
|
1829
1829
|
} catch {
|
|
1830
|
-
logger$
|
|
1830
|
+
logger$18.warn({ templatePath }, "CLAUDE.md substrate section template not found; skipping");
|
|
1831
1831
|
return;
|
|
1832
1832
|
}
|
|
1833
1833
|
const substrateVersion = readSubstrateVersion(pkgRoot);
|
|
@@ -1863,7 +1863,7 @@ async function scaffoldClaudeMd(projectRoot, profile) {
|
|
|
1863
1863
|
else newContent = updatedExisting;
|
|
1864
1864
|
}
|
|
1865
1865
|
await writeFile(claudeMdPath, newContent, "utf8");
|
|
1866
|
-
logger$
|
|
1866
|
+
logger$18.info({ claudeMdPath }, "Wrote substrate section to CLAUDE.md");
|
|
1867
1867
|
}
|
|
1868
1868
|
async function scaffoldAgentsMd(projectRoot, profile) {
|
|
1869
1869
|
const agentsMdPath = join(projectRoot, "AGENTS.md");
|
|
@@ -1875,7 +1875,7 @@ async function scaffoldAgentsMd(projectRoot, profile) {
|
|
|
1875
1875
|
try {
|
|
1876
1876
|
sectionContent = await readFile(templatePath, "utf8");
|
|
1877
1877
|
} catch {
|
|
1878
|
-
logger$
|
|
1878
|
+
logger$18.warn({ templatePath }, "AGENTS.md substrate section template not found; skipping");
|
|
1879
1879
|
return;
|
|
1880
1880
|
}
|
|
1881
1881
|
const substrateVersion = readSubstrateVersion(pkgRoot);
|
|
@@ -1896,7 +1896,7 @@ async function scaffoldAgentsMd(projectRoot, profile) {
|
|
|
1896
1896
|
newContent = existingContent + separator + sectionContent;
|
|
1897
1897
|
}
|
|
1898
1898
|
await writeFile(agentsMdPath, newContent, "utf8");
|
|
1899
|
-
logger$
|
|
1899
|
+
logger$18.info({ agentsMdPath }, "Wrote substrate section to AGENTS.md");
|
|
1900
1900
|
}
|
|
1901
1901
|
async function scaffoldGeminiMd(projectRoot, profile) {
|
|
1902
1902
|
const geminiMdPath = join(projectRoot, "GEMINI.md");
|
|
@@ -1908,7 +1908,7 @@ async function scaffoldGeminiMd(projectRoot, profile) {
|
|
|
1908
1908
|
try {
|
|
1909
1909
|
sectionContent = await readFile(templatePath, "utf8");
|
|
1910
1910
|
} catch {
|
|
1911
|
-
logger$
|
|
1911
|
+
logger$18.warn({ templatePath }, "GEMINI.md substrate section template not found; skipping");
|
|
1912
1912
|
return;
|
|
1913
1913
|
}
|
|
1914
1914
|
const substrateVersion = readSubstrateVersion(pkgRoot);
|
|
@@ -1929,7 +1929,7 @@ async function scaffoldGeminiMd(projectRoot, profile) {
|
|
|
1929
1929
|
newContent = existingContent + separator + sectionContent;
|
|
1930
1930
|
}
|
|
1931
1931
|
await writeFile(geminiMdPath, newContent, "utf8");
|
|
1932
|
-
logger$
|
|
1932
|
+
logger$18.info({ geminiMdPath }, "Wrote substrate section to GEMINI.md");
|
|
1933
1933
|
}
|
|
1934
1934
|
async function scaffoldStatuslineScript(projectRoot) {
|
|
1935
1935
|
const pkgRoot = findPackageRoot(__dirname);
|
|
@@ -1940,7 +1940,7 @@ async function scaffoldStatuslineScript(projectRoot) {
|
|
|
1940
1940
|
try {
|
|
1941
1941
|
content = await readFile(templatePath, "utf8");
|
|
1942
1942
|
} catch {
|
|
1943
|
-
logger$
|
|
1943
|
+
logger$18.warn({ templatePath }, "statusline.sh template not found; skipping");
|
|
1944
1944
|
return;
|
|
1945
1945
|
}
|
|
1946
1946
|
const claudeDir = join(projectRoot, ".claude");
|
|
@@ -1948,7 +1948,7 @@ async function scaffoldStatuslineScript(projectRoot) {
|
|
|
1948
1948
|
mkdirSync$1(claudeDir, { recursive: true });
|
|
1949
1949
|
await writeFile(statuslinePath, content, "utf8");
|
|
1950
1950
|
chmodSync(statuslinePath, 493);
|
|
1951
|
-
logger$
|
|
1951
|
+
logger$18.info({ statuslinePath }, "Wrote .claude/statusline.sh");
|
|
1952
1952
|
}
|
|
1953
1953
|
async function scaffoldClaudeSettings(projectRoot) {
|
|
1954
1954
|
const claudeDir = join(projectRoot, ".claude");
|
|
@@ -1964,7 +1964,7 @@ async function scaffoldClaudeSettings(projectRoot) {
|
|
|
1964
1964
|
if (!merged["$schema"]) merged["$schema"] = "https://json.schemastore.org/claude-code-settings.json";
|
|
1965
1965
|
mkdirSync$1(claudeDir, { recursive: true });
|
|
1966
1966
|
await writeFile(settingsPath, JSON.stringify(merged, null, 2) + "\n", "utf8");
|
|
1967
|
-
logger$
|
|
1967
|
+
logger$18.info({ settingsPath }, "Wrote substrate settings to .claude/settings.json");
|
|
1968
1968
|
}
|
|
1969
1969
|
function resolveBmadMethodInstallerLibPath(fromDir = __dirname) {
|
|
1970
1970
|
try {
|
|
@@ -2036,7 +2036,7 @@ async function compileBmadAgents(bmadDir) {
|
|
|
2036
2036
|
writeFileSync$1(mdPath, result.xml, "utf-8");
|
|
2037
2037
|
compiled++;
|
|
2038
2038
|
} catch (compileErr) {
|
|
2039
|
-
logger$
|
|
2039
|
+
logger$18.debug({
|
|
2040
2040
|
err: compileErr,
|
|
2041
2041
|
file
|
|
2042
2042
|
}, "Failed to compile agent YAML");
|
|
@@ -2183,9 +2183,9 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
|
|
|
2183
2183
|
const _require = createRequire(join(__dirname, "synthetic.js"));
|
|
2184
2184
|
try {
|
|
2185
2185
|
const compiledCount = await compileBmadAgents(bmadDir);
|
|
2186
|
-
if (compiledCount > 0) logger$
|
|
2186
|
+
if (compiledCount > 0) logger$18.info({ compiledCount }, "Compiled agent YAML files to MD");
|
|
2187
2187
|
} catch (compileErr) {
|
|
2188
|
-
logger$
|
|
2188
|
+
logger$18.warn({ err: compileErr }, "Agent compilation failed; agent commands may be incomplete");
|
|
2189
2189
|
}
|
|
2190
2190
|
const resolveExport = (mod, name) => {
|
|
2191
2191
|
if (typeof mod[name] === "function") return mod[name];
|
|
@@ -2199,7 +2199,7 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
|
|
|
2199
2199
|
const manifestGenPath = join(installerLibPath, "core", "manifest-generator.js");
|
|
2200
2200
|
const pathUtilsPath = join(installerLibPath, "ide", "shared", "path-utils.js");
|
|
2201
2201
|
if (!existsSync$1(agentGenPath)) {
|
|
2202
|
-
logger$
|
|
2202
|
+
logger$18.info("bmad-method generators not available (requires bmad-method with agent/workflow/task-tool generators)");
|
|
2203
2203
|
return;
|
|
2204
2204
|
}
|
|
2205
2205
|
const agentMod = _require(agentGenPath);
|
|
@@ -2209,11 +2209,11 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
|
|
|
2209
2209
|
if (existsSync$1(workflowGenPath)) {
|
|
2210
2210
|
const workflowMod = _require(workflowGenPath);
|
|
2211
2211
|
WorkflowCommandGenerator = resolveExport(workflowMod, "WorkflowCommandGenerator");
|
|
2212
|
-
} else logger$
|
|
2212
|
+
} else logger$18.info("bmad-method workflow-command-generator not available; will try skill-based installation");
|
|
2213
2213
|
if (existsSync$1(taskToolGenPath)) {
|
|
2214
2214
|
const taskToolMod = _require(taskToolGenPath);
|
|
2215
2215
|
TaskToolCommandGenerator = resolveExport(taskToolMod, "TaskToolCommandGenerator");
|
|
2216
|
-
} else logger$
|
|
2216
|
+
} else logger$18.info("bmad-method task-tool-command-generator not available; will try skill-based installation");
|
|
2217
2217
|
let ManifestGenerator = null;
|
|
2218
2218
|
if (existsSync$1(manifestGenPath)) {
|
|
2219
2219
|
const manifestMod = _require(manifestGenPath);
|
|
@@ -2245,7 +2245,7 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
|
|
|
2245
2245
|
const manifestGen = new ManifestGenerator();
|
|
2246
2246
|
await manifestGen.generateManifests(bmadDir, allModules, [], { ides: ["claude-code"] });
|
|
2247
2247
|
} catch (manifestErr) {
|
|
2248
|
-
logger$
|
|
2248
|
+
logger$18.warn({ err: manifestErr }, "ManifestGenerator failed; workflow/task commands may be incomplete");
|
|
2249
2249
|
}
|
|
2250
2250
|
const commandsDir = join(projectRoot, ".claude", "commands");
|
|
2251
2251
|
mkdirSync$1(commandsDir, { recursive: true });
|
|
@@ -2273,7 +2273,7 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
|
|
|
2273
2273
|
const total = agentCount + workflowCount + taskToolCount + skillCount;
|
|
2274
2274
|
if (outputFormat !== "json") if (skillCount > 0) process.stdout.write(`Generated ${String(total)} Claude Code commands (${String(agentCount)} agents, ${String(skillCount)} skills)\n`);
|
|
2275
2275
|
else process.stdout.write(`Generated ${String(total)} Claude Code commands (${String(agentCount)} agents, ${String(workflowCount)} workflows, ${String(taskToolCount)} tasks/tools)\n`);
|
|
2276
|
-
logger$
|
|
2276
|
+
logger$18.info({
|
|
2277
2277
|
agentCount,
|
|
2278
2278
|
workflowCount,
|
|
2279
2279
|
taskToolCount,
|
|
@@ -2284,7 +2284,7 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
|
|
|
2284
2284
|
} catch (err) {
|
|
2285
2285
|
const msg = err instanceof Error ? err.message : String(err);
|
|
2286
2286
|
if (outputFormat !== "json") process.stderr.write(`Warning: .claude/commands/ generation failed: ${msg}\n`);
|
|
2287
|
-
logger$
|
|
2287
|
+
logger$18.warn({ err }, "scaffoldClaudeCommands failed; init continues");
|
|
2288
2288
|
}
|
|
2289
2289
|
}
|
|
2290
2290
|
/**
|
|
@@ -2313,7 +2313,7 @@ function syncCommandsAsPrompts(commandsDir, promptsDir, ownershipPrefixes, nameP
|
|
|
2313
2313
|
unlinkSync$1(join(promptsDir, entry.name));
|
|
2314
2314
|
}
|
|
2315
2315
|
} catch (err) {
|
|
2316
|
-
logger$
|
|
2316
|
+
logger$18.debug({
|
|
2317
2317
|
err,
|
|
2318
2318
|
promptsDir
|
|
2319
2319
|
}, "Failed to prune stale prompts");
|
|
@@ -2355,7 +2355,7 @@ function syncSkillsToTarget(srcSkillsDir, destSkillsDir, ownershipPrefixes, name
|
|
|
2355
2355
|
});
|
|
2356
2356
|
}
|
|
2357
2357
|
} catch (err) {
|
|
2358
|
-
logger$
|
|
2358
|
+
logger$18.debug({
|
|
2359
2359
|
err,
|
|
2360
2360
|
destSkillsDir
|
|
2361
2361
|
}, "Failed to prune stale skills");
|
|
@@ -2398,16 +2398,16 @@ function scaffoldCodexProject(projectRoot, outputFormat) {
|
|
|
2398
2398
|
const skillCount = syncSkillsToTarget(claudeSkillsDir, codexSkillsDir, PROJECT_OWNERSHIP_PREFIXES, "");
|
|
2399
2399
|
const total = promptCount + skillCount;
|
|
2400
2400
|
if (outputFormat !== "json" && total > 0) process.stdout.write(`Generated ${String(total)} Codex artifacts (${String(promptCount)} prompts, ${String(skillCount)} skills)\n`);
|
|
2401
|
-
if (total > 0) logger$
|
|
2401
|
+
if (total > 0) logger$18.info({
|
|
2402
2402
|
promptCount,
|
|
2403
2403
|
skillCount,
|
|
2404
2404
|
codexDir
|
|
2405
2405
|
}, "Generated .codex/");
|
|
2406
|
-
else logger$
|
|
2406
|
+
else logger$18.debug({ codexDir }, "No Codex artifacts generated; source Claude content not found");
|
|
2407
2407
|
} catch (err) {
|
|
2408
2408
|
const msg = err instanceof Error ? err.message : String(err);
|
|
2409
2409
|
if (outputFormat !== "json") process.stderr.write(`Warning: .codex/ generation failed: ${msg}\n`);
|
|
2410
|
-
logger$
|
|
2410
|
+
logger$18.warn({ err }, "scaffoldCodexProject failed; init continues");
|
|
2411
2411
|
}
|
|
2412
2412
|
}
|
|
2413
2413
|
/**
|
|
@@ -2433,16 +2433,16 @@ function scaffoldCodexUser(projectRoot, homeDir, outputFormat) {
|
|
|
2433
2433
|
const skillCount = syncSkillsToTarget(claudeSkillsDir, userSkillsDir, ["substrate-"], "substrate-");
|
|
2434
2434
|
const total = promptCount + skillCount;
|
|
2435
2435
|
if (outputFormat !== "json" && total > 0) process.stdout.write(`Installed ${String(total)} Codex artifacts to ${userCodexDir} (${String(promptCount)} prompts, ${String(skillCount)} skills)\n`);
|
|
2436
|
-
if (total > 0) logger$
|
|
2436
|
+
if (total > 0) logger$18.info({
|
|
2437
2437
|
promptCount,
|
|
2438
2438
|
skillCount,
|
|
2439
2439
|
userCodexDir
|
|
2440
2440
|
}, "Installed user-scope Codex content");
|
|
2441
|
-
else logger$
|
|
2441
|
+
else logger$18.debug({ userCodexDir }, "No user-scope Codex content installed; source Claude content not found");
|
|
2442
2442
|
} catch (err) {
|
|
2443
2443
|
const msg = err instanceof Error ? err.message : String(err);
|
|
2444
2444
|
if (outputFormat !== "json") process.stderr.write(`Warning: user-scope Codex install failed: ${msg}\n`);
|
|
2445
|
-
logger$
|
|
2445
|
+
logger$18.warn({ err }, "scaffoldCodexUser failed; init continues");
|
|
2446
2446
|
}
|
|
2447
2447
|
}
|
|
2448
2448
|
const PROVIDER_DEFAULTS = DEFAULT_CONFIG.providers;
|
|
@@ -2562,7 +2562,7 @@ async function runInitAction(options) {
|
|
|
2562
2562
|
discoveryReport = await registry.discoverAndRegister();
|
|
2563
2563
|
} catch (err) {
|
|
2564
2564
|
const message = err instanceof Error ? err.message : String(err);
|
|
2565
|
-
logger$
|
|
2565
|
+
logger$18.error({ err }, "Adapter discovery failed");
|
|
2566
2566
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, `Adapter discovery failed: ${message}`) + "\n");
|
|
2567
2567
|
else process.stderr.write(` Error: adapter discovery failed — ${message}\n`);
|
|
2568
2568
|
return INIT_EXIT_ERROR;
|
|
@@ -2605,7 +2605,7 @@ async function runInitAction(options) {
|
|
|
2605
2605
|
try {
|
|
2606
2606
|
detectedProfile = await detectProjectProfile(dbRoot);
|
|
2607
2607
|
} catch (err) {
|
|
2608
|
-
logger$
|
|
2608
|
+
logger$18.warn({ err }, "Project profile detection failed; skipping");
|
|
2609
2609
|
}
|
|
2610
2610
|
if (detectedProfile === null) {
|
|
2611
2611
|
if (outputFormat !== "json") process.stdout.write(" No project stack detected. Create .substrate/project-profile.yaml manually to enable polyglot support.\n");
|
|
@@ -2639,12 +2639,12 @@ async function runInitAction(options) {
|
|
|
2639
2639
|
return INIT_EXIT_ERROR;
|
|
2640
2640
|
}
|
|
2641
2641
|
if (force && existsSync$1(localManifest)) {
|
|
2642
|
-
logger$
|
|
2642
|
+
logger$18.info({ pack: packName }, "Replacing existing pack with bundled version");
|
|
2643
2643
|
process.stderr.write(`Warning: Replacing existing pack '${packName}' with bundled version\n`);
|
|
2644
2644
|
}
|
|
2645
2645
|
mkdirSync$1(dirname(packPath), { recursive: true });
|
|
2646
2646
|
cpSync(bundledPackPath, packPath, { recursive: true });
|
|
2647
|
-
logger$
|
|
2647
|
+
logger$18.info({
|
|
2648
2648
|
pack: packName,
|
|
2649
2649
|
dest: packPath
|
|
2650
2650
|
}, "Scaffolded methodology pack");
|
|
@@ -2696,10 +2696,10 @@ async function runInitAction(options) {
|
|
|
2696
2696
|
if (missing.length > 0) {
|
|
2697
2697
|
const block = "\n# Substrate runtime files\n" + missing.join("\n") + "\n";
|
|
2698
2698
|
appendFileSync(gitignorePath, block);
|
|
2699
|
-
logger$
|
|
2699
|
+
logger$18.info({ entries: missing }, "Added substrate runtime files to .gitignore");
|
|
2700
2700
|
}
|
|
2701
2701
|
} catch (err) {
|
|
2702
|
-
logger$
|
|
2702
|
+
logger$18.debug({ err }, "Could not update .gitignore (non-fatal)");
|
|
2703
2703
|
}
|
|
2704
2704
|
const doltMode = options.doltMode ?? "auto";
|
|
2705
2705
|
let doltInitialized = false;
|
|
@@ -2716,7 +2716,7 @@ async function runInitAction(options) {
|
|
|
2716
2716
|
process.stderr.write(`${err.message}\n`);
|
|
2717
2717
|
return INIT_EXIT_ERROR;
|
|
2718
2718
|
}
|
|
2719
|
-
logger$
|
|
2719
|
+
logger$18.debug("Dolt not installed, skipping auto-init");
|
|
2720
2720
|
} else {
|
|
2721
2721
|
const msg = err instanceof Error ? err.message : String(err);
|
|
2722
2722
|
if (doltMode === "force") {
|
|
@@ -2726,7 +2726,7 @@ async function runInitAction(options) {
|
|
|
2726
2726
|
process.stderr.write(`⚠ Dolt state store initialization failed: ${msg}\n Pipeline metrics, cost tracking, and health monitoring will not persist.\n Fix the issue and re-run: substrate init --dolt\n`);
|
|
2727
2727
|
}
|
|
2728
2728
|
}
|
|
2729
|
-
else logger$
|
|
2729
|
+
else logger$18.debug("Dolt step was skipped (--no-dolt)");
|
|
2730
2730
|
const successMsg = `Pack '${packName}' and database initialized successfully at ${dbPath}`;
|
|
2731
2731
|
if (outputFormat === "json") process.stdout.write(formatOutput({
|
|
2732
2732
|
pack: packName,
|
|
@@ -2764,7 +2764,7 @@ async function runInitAction(options) {
|
|
|
2764
2764
|
const msg = err instanceof Error ? err.message : String(err);
|
|
2765
2765
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
2766
2766
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
2767
|
-
logger$
|
|
2767
|
+
logger$18.error({ err }, "init failed");
|
|
2768
2768
|
return INIT_EXIT_ERROR;
|
|
2769
2769
|
}
|
|
2770
2770
|
}
|
|
@@ -2788,7 +2788,7 @@ function registerInitCommand(program, _version, registry) {
|
|
|
2788
2788
|
|
|
2789
2789
|
//#endregion
|
|
2790
2790
|
//#region src/cli/commands/config.ts
|
|
2791
|
-
const logger$
|
|
2791
|
+
const logger$17 = createLogger("config-cmd");
|
|
2792
2792
|
const CONFIG_EXIT_SUCCESS = 0;
|
|
2793
2793
|
const CONFIG_EXIT_ERROR = 1;
|
|
2794
2794
|
const CONFIG_EXIT_INVALID = 2;
|
|
@@ -2814,7 +2814,7 @@ async function runConfigShow(opts = {}) {
|
|
|
2814
2814
|
return CONFIG_EXIT_INVALID;
|
|
2815
2815
|
}
|
|
2816
2816
|
const message = err instanceof Error ? err.message : String(err);
|
|
2817
|
-
logger$
|
|
2817
|
+
logger$17.error({ err }, "Failed to load configuration");
|
|
2818
2818
|
process.stderr.write(` Error loading configuration: ${message}\n`);
|
|
2819
2819
|
return CONFIG_EXIT_ERROR;
|
|
2820
2820
|
}
|
|
@@ -2888,7 +2888,7 @@ async function runConfigExport(opts = {}) {
|
|
|
2888
2888
|
return CONFIG_EXIT_INVALID;
|
|
2889
2889
|
}
|
|
2890
2890
|
const message = err instanceof Error ? err.message : String(err);
|
|
2891
|
-
logger$
|
|
2891
|
+
logger$17.error({ err }, "Failed to load configuration");
|
|
2892
2892
|
process.stderr.write(`Error loading configuration: ${message}\n`);
|
|
2893
2893
|
return CONFIG_EXIT_ERROR;
|
|
2894
2894
|
}
|
|
@@ -6808,7 +6808,7 @@ async function detectManifestDriftAgainstWorkingTree(manifest, projectRoot) {
|
|
|
6808
6808
|
|
|
6809
6809
|
//#endregion
|
|
6810
6810
|
//#region src/cli/commands/resume.ts
|
|
6811
|
-
const logger$
|
|
6811
|
+
const logger$16 = createLogger("resume-cmd");
|
|
6812
6812
|
/**
|
|
6813
6813
|
* Format a human-readable duration from an ISO-8601 timestamp to "now".
|
|
6814
6814
|
*/
|
|
@@ -6878,7 +6878,7 @@ async function runResumeAction(options) {
|
|
|
6878
6878
|
return 1;
|
|
6879
6879
|
}
|
|
6880
6880
|
} catch (driftErr) {
|
|
6881
|
-
logger$
|
|
6881
|
+
logger$16.debug({ err: driftErr }, "manifest drift check failed — proceeding with resume");
|
|
6882
6882
|
}
|
|
6883
6883
|
}
|
|
6884
6884
|
const doltDir = join(dbRoot, ".substrate", "state", ".dolt");
|
|
@@ -6948,15 +6948,15 @@ async function runResumeAction(options) {
|
|
|
6948
6948
|
const manifestStories = manifestData.cli_flags["stories"] ?? manifestData.story_scope;
|
|
6949
6949
|
if (Array.isArray(manifestStories) && manifestStories.length > 0) {
|
|
6950
6950
|
scopedStories = manifestStories;
|
|
6951
|
-
logger$
|
|
6951
|
+
logger$16.debug({
|
|
6952
6952
|
runId,
|
|
6953
6953
|
stories: scopedStories
|
|
6954
6954
|
}, "resume scope loaded from manifest");
|
|
6955
6955
|
}
|
|
6956
6956
|
} catch {
|
|
6957
|
-
logger$
|
|
6957
|
+
logger$16.debug({ runId }, "manifest read failed in resume — using legacy config_json scope");
|
|
6958
6958
|
}
|
|
6959
|
-
else logger$
|
|
6959
|
+
else logger$16.debug({ runId }, "Run manifest not found for scope preservation — using legacy config_json scope");
|
|
6960
6960
|
}
|
|
6961
6961
|
return runFullPipelineFromPhase({
|
|
6962
6962
|
packName,
|
|
@@ -6979,7 +6979,7 @@ async function runResumeAction(options) {
|
|
|
6979
6979
|
const msg = err instanceof Error ? err.message : String(err);
|
|
6980
6980
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
6981
6981
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
6982
|
-
logger$
|
|
6982
|
+
logger$16.error({ err }, "auto resume failed");
|
|
6983
6983
|
return 1;
|
|
6984
6984
|
} finally {
|
|
6985
6985
|
try {
|
|
@@ -7229,11 +7229,11 @@ async function runFullPipelineFromPhase(options) {
|
|
|
7229
7229
|
output_tokens: output,
|
|
7230
7230
|
cost_usd: costUsd
|
|
7231
7231
|
}).catch((err) => {
|
|
7232
|
-
logger$
|
|
7232
|
+
logger$16.warn({ err }, "Failed to record token usage");
|
|
7233
7233
|
});
|
|
7234
7234
|
}
|
|
7235
7235
|
} catch (err) {
|
|
7236
|
-
logger$
|
|
7236
|
+
logger$16.warn({ err }, "Failed to record token usage");
|
|
7237
7237
|
}
|
|
7238
7238
|
});
|
|
7239
7239
|
const storyKeys = await resolveStoryKeys(adapter, projectRoot, {
|
|
@@ -7302,7 +7302,7 @@ async function runFullPipelineFromPhase(options) {
|
|
|
7302
7302
|
const msg = err instanceof Error ? err.message : String(err);
|
|
7303
7303
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
7304
7304
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
7305
|
-
logger$
|
|
7305
|
+
logger$16.error({ err }, "pipeline from phase failed");
|
|
7306
7306
|
return 1;
|
|
7307
7307
|
} finally {
|
|
7308
7308
|
try {
|
|
@@ -7332,7 +7332,7 @@ function registerResumeCommand(program, _version = "0.0.0", projectRoot = proces
|
|
|
7332
7332
|
|
|
7333
7333
|
//#endregion
|
|
7334
7334
|
//#region src/cli/commands/status.ts
|
|
7335
|
-
const logger$
|
|
7335
|
+
const logger$15 = createLogger("status-cmd");
|
|
7336
7336
|
/**
|
|
7337
7337
|
* Map a manifest per-story status string to the appropriate WorkGraphCounts bucket.
|
|
7338
7338
|
* Unknown strings are treated as `inProgress` (safe default).
|
|
@@ -7444,9 +7444,9 @@ async function runStatusAction(options) {
|
|
|
7444
7444
|
const manifestData = await resolvedManifest.read();
|
|
7445
7445
|
manifestPerStoryState = manifestData.per_story_state;
|
|
7446
7446
|
workGraph = buildWorkGraphFromManifest(manifestData.per_story_state);
|
|
7447
|
-
logger$
|
|
7447
|
+
logger$15.debug({ runId: run?.id }, "status: workGraph built from manifest per_story_state");
|
|
7448
7448
|
} catch {
|
|
7449
|
-
logger$
|
|
7449
|
+
logger$15.debug({ runId: run?.id }, "status: manifest read failed — falling back to wg_stories");
|
|
7450
7450
|
}
|
|
7451
7451
|
if (workGraph === void 0) try {
|
|
7452
7452
|
const wgRepo = new WorkGraphRepository(adapter);
|
|
@@ -7483,10 +7483,10 @@ async function runStatusAction(options) {
|
|
|
7483
7483
|
};
|
|
7484
7484
|
}
|
|
7485
7485
|
} catch (err) {
|
|
7486
|
-
logger$
|
|
7486
|
+
logger$15.debug({ err }, "Work graph query failed, continuing without work graph data");
|
|
7487
7487
|
}
|
|
7488
7488
|
if (run === void 0) {
|
|
7489
|
-
const { inspectProcessTree: inspectProcessTree$1 } = await import("../health-
|
|
7489
|
+
const { inspectProcessTree: inspectProcessTree$1 } = await import("../health-CJqd1FzY.js");
|
|
7490
7490
|
const substrateDirPath = join(projectRoot, ".substrate");
|
|
7491
7491
|
const processInfo = inspectProcessTree$1({
|
|
7492
7492
|
projectRoot,
|
|
@@ -7516,7 +7516,7 @@ async function runStatusAction(options) {
|
|
|
7516
7516
|
if (stateStore) try {
|
|
7517
7517
|
storeStories = await stateStore.queryStories({});
|
|
7518
7518
|
} catch (err) {
|
|
7519
|
-
logger$
|
|
7519
|
+
logger$15.debug({ err }, "StateStore query failed, continuing without store data");
|
|
7520
7520
|
}
|
|
7521
7521
|
if (outputFormat === "json") {
|
|
7522
7522
|
const statusOutput = buildPipelineStatusOutput(run, tokenSummary, decisionsCount, storiesCount);
|
|
@@ -7659,7 +7659,7 @@ async function runStatusAction(options) {
|
|
|
7659
7659
|
const msg = err instanceof Error ? err.message : String(err);
|
|
7660
7660
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
7661
7661
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
7662
|
-
logger$
|
|
7662
|
+
logger$15.error({ err }, "status action failed");
|
|
7663
7663
|
return 1;
|
|
7664
7664
|
} finally {
|
|
7665
7665
|
try {
|
|
@@ -8006,7 +8006,7 @@ Analyze thoroughly and return ONLY the JSON array with no additional text.`;
|
|
|
8006
8006
|
|
|
8007
8007
|
//#endregion
|
|
8008
8008
|
//#region src/cli/commands/amend.ts
|
|
8009
|
-
const logger$
|
|
8009
|
+
const logger$14 = createLogger("amend-cmd");
|
|
8010
8010
|
/**
|
|
8011
8011
|
* Detect and apply supersessions after a phase completes in an amendment run.
|
|
8012
8012
|
*
|
|
@@ -8037,7 +8037,7 @@ async function runPostPhaseSupersessionDetection(adapter, amendmentRunId, curren
|
|
|
8037
8037
|
});
|
|
8038
8038
|
} catch (err) {
|
|
8039
8039
|
const msg = err instanceof Error ? err.message : String(err);
|
|
8040
|
-
logger$
|
|
8040
|
+
logger$14.warn({
|
|
8041
8041
|
err,
|
|
8042
8042
|
originalId: parentMatch.id,
|
|
8043
8043
|
supersedingId: newDec.id
|
|
@@ -8174,7 +8174,7 @@ async function runAmendAction(options) {
|
|
|
8174
8174
|
for (let i = startIdx; i < phaseOrder.length; i++) {
|
|
8175
8175
|
const currentPhase = phaseOrder[i];
|
|
8176
8176
|
const amendmentContext = handler.loadContextForPhase(currentPhase);
|
|
8177
|
-
logger$
|
|
8177
|
+
logger$14.info({
|
|
8178
8178
|
phase: currentPhase,
|
|
8179
8179
|
amendmentContextLen: amendmentContext.length
|
|
8180
8180
|
}, "Amendment context loaded for phase");
|
|
@@ -8295,7 +8295,7 @@ async function runAmendAction(options) {
|
|
|
8295
8295
|
} catch (err) {
|
|
8296
8296
|
const msg = err instanceof Error ? err.message : String(err);
|
|
8297
8297
|
process.stderr.write(`Error: ${msg}\n`);
|
|
8298
|
-
logger$
|
|
8298
|
+
logger$14.error({ err }, "amend failed");
|
|
8299
8299
|
return 1;
|
|
8300
8300
|
} finally {
|
|
8301
8301
|
try {
|
|
@@ -9032,7 +9032,7 @@ async function runSupervisorAction(options, deps = {}) {
|
|
|
9032
9032
|
await initSchema(expAdapter);
|
|
9033
9033
|
const { runRunAction: runPipeline } = await import(
|
|
9034
9034
|
/* @vite-ignore */
|
|
9035
|
-
"../run-
|
|
9035
|
+
"../run-Z_-caE_i.js"
|
|
9036
9036
|
);
|
|
9037
9037
|
const runStoryFn = async (opts) => {
|
|
9038
9038
|
const exitCode = await runPipeline({
|
|
@@ -9282,7 +9282,7 @@ function registerSupervisorCommand(program, _version = "0.0.0", projectRoot = pr
|
|
|
9282
9282
|
|
|
9283
9283
|
//#endregion
|
|
9284
9284
|
//#region src/cli/commands/metrics.ts
|
|
9285
|
-
const logger$
|
|
9285
|
+
const logger$13 = createLogger("metrics-cmd");
|
|
9286
9286
|
async function openTelemetryAdapter(basePath) {
|
|
9287
9287
|
try {
|
|
9288
9288
|
const adapter = createDatabaseAdapter({
|
|
@@ -9648,7 +9648,7 @@ async function runMetricsAction(options) {
|
|
|
9648
9648
|
}
|
|
9649
9649
|
}
|
|
9650
9650
|
} catch (err) {
|
|
9651
|
-
logger$
|
|
9651
|
+
logger$13.debug({ err }, "getScenarioResultsForRun failed");
|
|
9652
9652
|
}
|
|
9653
9653
|
if (rows.length === 0) {
|
|
9654
9654
|
const msg = `No factory run found with id: ${run}`;
|
|
@@ -9684,7 +9684,7 @@ async function runMetricsAction(options) {
|
|
|
9684
9684
|
}
|
|
9685
9685
|
}
|
|
9686
9686
|
} catch (err) {
|
|
9687
|
-
logger$
|
|
9687
|
+
logger$13.debug({ err }, "getTwinRunsForRun failed — twin_runs table may not exist yet");
|
|
9688
9688
|
}
|
|
9689
9689
|
}
|
|
9690
9690
|
return 0;
|
|
@@ -9694,7 +9694,7 @@ async function runMetricsAction(options) {
|
|
|
9694
9694
|
try {
|
|
9695
9695
|
factoryRuns$1 = await getFactoryRunSummaries(adapter, limit);
|
|
9696
9696
|
} catch (err) {
|
|
9697
|
-
logger$
|
|
9697
|
+
logger$13.debug({ err }, "getFactoryRunSummaries failed in factory-only mode");
|
|
9698
9698
|
}
|
|
9699
9699
|
if (outputFormat === "json") process.stdout.write(formatOutput({ graph_runs: factoryRuns$1 }, "json", true) + "\n");
|
|
9700
9700
|
else if (factoryRuns$1.length === 0) process.stdout.write("No factory runs recorded yet.\n");
|
|
@@ -9755,7 +9755,7 @@ async function runMetricsAction(options) {
|
|
|
9755
9755
|
doltMetrics = await stateStore.queryMetrics(doltFilter);
|
|
9756
9756
|
await stateStore.close();
|
|
9757
9757
|
} catch (doltErr) {
|
|
9758
|
-
logger$
|
|
9758
|
+
logger$13.warn({ err: doltErr }, "StateStore query failed — falling back to SQLite metrics only");
|
|
9759
9759
|
}
|
|
9760
9760
|
const storyMetricDecisions = await getDecisionsByCategory(adapter, STORY_METRICS);
|
|
9761
9761
|
const storyMetrics = storyMetricDecisions.map((d) => {
|
|
@@ -9817,7 +9817,7 @@ async function runMetricsAction(options) {
|
|
|
9817
9817
|
try {
|
|
9818
9818
|
factoryRuns = await getFactoryRunSummaries(adapter, limit);
|
|
9819
9819
|
} catch (err) {
|
|
9820
|
-
logger$
|
|
9820
|
+
logger$13.debug({ err }, "getFactoryRunSummaries failed — table may not exist in older databases");
|
|
9821
9821
|
}
|
|
9822
9822
|
if (outputFormat === "json") {
|
|
9823
9823
|
const runsWithBreakdown = runs.map((run$1) => ({
|
|
@@ -9955,7 +9955,7 @@ async function runMetricsAction(options) {
|
|
|
9955
9955
|
const msg = err instanceof Error ? err.message : String(err);
|
|
9956
9956
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
9957
9957
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
9958
|
-
logger$
|
|
9958
|
+
logger$13.error({ err }, "metrics action failed");
|
|
9959
9959
|
return 1;
|
|
9960
9960
|
} finally {
|
|
9961
9961
|
try {
|
|
@@ -10157,7 +10157,7 @@ function registerMigrateCommand(program) {
|
|
|
10157
10157
|
function getLatestSessionId(_adapter) {
|
|
10158
10158
|
return null;
|
|
10159
10159
|
}
|
|
10160
|
-
const logger$
|
|
10160
|
+
const logger$12 = createLogger("cost-cmd");
|
|
10161
10161
|
const COST_EXIT_SUCCESS = 0;
|
|
10162
10162
|
const COST_EXIT_ERROR = 1;
|
|
10163
10163
|
/**
|
|
@@ -10401,7 +10401,7 @@ async function runCostAction(options) {
|
|
|
10401
10401
|
} catch (err) {
|
|
10402
10402
|
const message = err instanceof Error ? err.message : String(err);
|
|
10403
10403
|
process.stderr.write(`Error: ${message}\n`);
|
|
10404
|
-
logger$
|
|
10404
|
+
logger$12.error({ err }, "runCostAction failed");
|
|
10405
10405
|
return COST_EXIT_ERROR;
|
|
10406
10406
|
} finally {
|
|
10407
10407
|
if (adapter !== null) try {
|
|
@@ -10435,7 +10435,7 @@ function registerCostCommand(program, version = "0.0.0", projectRoot = process.c
|
|
|
10435
10435
|
|
|
10436
10436
|
//#endregion
|
|
10437
10437
|
//#region src/cli/commands/monitor.ts
|
|
10438
|
-
const logger$
|
|
10438
|
+
const logger$11 = createLogger("monitor-cmd");
|
|
10439
10439
|
const MONITOR_EXIT_SUCCESS = 0;
|
|
10440
10440
|
const MONITOR_EXIT_ERROR = 1;
|
|
10441
10441
|
/**
|
|
@@ -10638,7 +10638,7 @@ async function runMonitorReportAction(options) {
|
|
|
10638
10638
|
} catch (err) {
|
|
10639
10639
|
const message = err instanceof Error ? err.message : String(err);
|
|
10640
10640
|
process.stderr.write(`Error: ${message}\n`);
|
|
10641
|
-
logger$
|
|
10641
|
+
logger$11.error({ err }, "runMonitorReportAction failed");
|
|
10642
10642
|
return MONITOR_EXIT_ERROR;
|
|
10643
10643
|
} finally {
|
|
10644
10644
|
if (monitorDb !== null) try {
|
|
@@ -10700,7 +10700,7 @@ async function runMonitorStatusAction(options) {
|
|
|
10700
10700
|
} catch (err) {
|
|
10701
10701
|
const message = err instanceof Error ? err.message : String(err);
|
|
10702
10702
|
process.stderr.write(`Error: ${message}\n`);
|
|
10703
|
-
logger$
|
|
10703
|
+
logger$11.error({ err }, "runMonitorStatusAction failed");
|
|
10704
10704
|
return MONITOR_EXIT_ERROR;
|
|
10705
10705
|
} finally {
|
|
10706
10706
|
if (monitorDb !== null) try {
|
|
@@ -10735,7 +10735,7 @@ async function runMonitorResetAction(options) {
|
|
|
10735
10735
|
} catch (err) {
|
|
10736
10736
|
const message = err instanceof Error ? err.message : String(err);
|
|
10737
10737
|
process.stderr.write(`Error: ${message}\n`);
|
|
10738
|
-
logger$
|
|
10738
|
+
logger$11.error({ err }, "runMonitorResetAction failed");
|
|
10739
10739
|
return MONITOR_EXIT_ERROR;
|
|
10740
10740
|
} finally {
|
|
10741
10741
|
if (monitorDb !== null) try {
|
|
@@ -10783,7 +10783,7 @@ async function runMonitorRecommendationsAction(options) {
|
|
|
10783
10783
|
} catch (err) {
|
|
10784
10784
|
const message = err instanceof Error ? err.message : String(err);
|
|
10785
10785
|
process.stderr.write(`Error: ${message}\n`);
|
|
10786
|
-
logger$
|
|
10786
|
+
logger$11.error({ err }, "runMonitorRecommendationsAction failed");
|
|
10787
10787
|
return MONITOR_EXIT_ERROR;
|
|
10788
10788
|
} finally {
|
|
10789
10789
|
if (monitorDb !== null) try {
|
|
@@ -10861,7 +10861,7 @@ function registerMonitorCommand(program, version = "0.0.0", projectRoot = proces
|
|
|
10861
10861
|
|
|
10862
10862
|
//#endregion
|
|
10863
10863
|
//#region src/cli/commands/merge.ts
|
|
10864
|
-
const logger$
|
|
10864
|
+
const logger$10 = createLogger("merge-cmd");
|
|
10865
10865
|
const MERGE_EXIT_SUCCESS = 0;
|
|
10866
10866
|
const MERGE_EXIT_CONFLICT = 1;
|
|
10867
10867
|
const MERGE_EXIT_ERROR = 2;
|
|
@@ -10899,7 +10899,7 @@ async function mergeTask(taskId, targetBranch, projectRoot) {
|
|
|
10899
10899
|
projectRoot
|
|
10900
10900
|
});
|
|
10901
10901
|
try {
|
|
10902
|
-
logger$
|
|
10902
|
+
logger$10.info({
|
|
10903
10903
|
taskId,
|
|
10904
10904
|
targetBranch
|
|
10905
10905
|
}, "Running conflict detection...");
|
|
@@ -10921,7 +10921,7 @@ async function mergeTask(taskId, targetBranch, projectRoot) {
|
|
|
10921
10921
|
} catch (err) {
|
|
10922
10922
|
const message = err instanceof Error ? err.message : String(err);
|
|
10923
10923
|
console.error(`Error merging task "${taskId}": ${message}`);
|
|
10924
|
-
logger$
|
|
10924
|
+
logger$10.error({
|
|
10925
10925
|
taskId,
|
|
10926
10926
|
err
|
|
10927
10927
|
}, "merge --task failed");
|
|
@@ -10975,7 +10975,7 @@ async function mergeAll(targetBranch, projectRoot, taskIds) {
|
|
|
10975
10975
|
error: message
|
|
10976
10976
|
});
|
|
10977
10977
|
console.log(` Error for task "${taskId}": ${message}`);
|
|
10978
|
-
logger$
|
|
10978
|
+
logger$10.error({
|
|
10979
10979
|
taskId,
|
|
10980
10980
|
err
|
|
10981
10981
|
}, "merge --all: task failed");
|
|
@@ -11028,7 +11028,7 @@ function registerMergeCommand(program, projectRoot = process.cwd()) {
|
|
|
11028
11028
|
|
|
11029
11029
|
//#endregion
|
|
11030
11030
|
//#region src/cli/commands/worktrees.ts
|
|
11031
|
-
const logger$
|
|
11031
|
+
const logger$9 = createLogger("worktrees-cmd");
|
|
11032
11032
|
const WORKTREES_EXIT_SUCCESS = 0;
|
|
11033
11033
|
const WORKTREES_EXIT_ERROR = 1;
|
|
11034
11034
|
/** Valid task statuses for filtering */
|
|
@@ -11155,7 +11155,7 @@ async function listWorktreesAction(options) {
|
|
|
11155
11155
|
try {
|
|
11156
11156
|
worktreeInfos = await manager.listWorktrees();
|
|
11157
11157
|
} catch (err) {
|
|
11158
|
-
logger$
|
|
11158
|
+
logger$9.error({ err }, "Failed to list worktrees");
|
|
11159
11159
|
const message = err instanceof Error ? err.message : String(err);
|
|
11160
11160
|
process.stderr.write(`Error listing worktrees: ${message}\n`);
|
|
11161
11161
|
return WORKTREES_EXIT_ERROR;
|
|
@@ -11182,7 +11182,7 @@ async function listWorktreesAction(options) {
|
|
|
11182
11182
|
} catch (err) {
|
|
11183
11183
|
const message = err instanceof Error ? err.message : String(err);
|
|
11184
11184
|
process.stderr.write(`Error: ${message}\n`);
|
|
11185
|
-
logger$
|
|
11185
|
+
logger$9.error({ err }, "listWorktreesAction failed");
|
|
11186
11186
|
return WORKTREES_EXIT_ERROR;
|
|
11187
11187
|
}
|
|
11188
11188
|
}
|
|
@@ -11223,7 +11223,7 @@ function registerWorktreesCommand(program, version = "0.0.0", projectRoot = proc
|
|
|
11223
11223
|
|
|
11224
11224
|
//#endregion
|
|
11225
11225
|
//#region src/cli/commands/brainstorm.ts
|
|
11226
|
-
const logger$
|
|
11226
|
+
const logger$8 = createLogger("brainstorm-cmd");
|
|
11227
11227
|
/**
|
|
11228
11228
|
* Detect whether the project has existing planning artifacts that indicate
|
|
11229
11229
|
* this is an amendment session (vs. a brand-new project brainstorm).
|
|
@@ -11269,13 +11269,13 @@ async function loadAmendmentContextDocuments(projectRoot) {
|
|
|
11269
11269
|
try {
|
|
11270
11270
|
brief = await readFile(briefPath, "utf-8");
|
|
11271
11271
|
} catch {
|
|
11272
|
-
logger$
|
|
11272
|
+
logger$8.warn({ briefPath }, "product-brief.md not found — continuing without brief context");
|
|
11273
11273
|
process.stderr.write(`Warning: product-brief.md not found at ${briefPath}\n`);
|
|
11274
11274
|
}
|
|
11275
11275
|
try {
|
|
11276
11276
|
prd = await readFile(prdPath, "utf-8");
|
|
11277
11277
|
} catch {
|
|
11278
|
-
logger$
|
|
11278
|
+
logger$8.warn({ prdPath }, "requirements.md not found — continuing without PRD context");
|
|
11279
11279
|
process.stderr.write(`Warning: requirements.md not found at ${prdPath}\n`);
|
|
11280
11280
|
}
|
|
11281
11281
|
return {
|
|
@@ -11484,7 +11484,7 @@ async function dispatchToPersonas(userPrompt, context, llmDispatch) {
|
|
|
11484
11484
|
}
|
|
11485
11485
|
];
|
|
11486
11486
|
const defaultDispatch = async (prompt, personaName) => {
|
|
11487
|
-
logger$
|
|
11487
|
+
logger$8.debug({
|
|
11488
11488
|
personaName,
|
|
11489
11489
|
promptLength: prompt.length
|
|
11490
11490
|
}, "Dispatching to persona (stub mode)");
|
|
@@ -11501,7 +11501,7 @@ async function dispatchToPersonas(userPrompt, context, llmDispatch) {
|
|
|
11501
11501
|
};
|
|
11502
11502
|
} catch (err) {
|
|
11503
11503
|
const msg = err instanceof Error ? err.message : String(err);
|
|
11504
|
-
logger$
|
|
11504
|
+
logger$8.error({
|
|
11505
11505
|
err,
|
|
11506
11506
|
personaName: persona.name
|
|
11507
11507
|
}, "Persona dispatch failed");
|
|
@@ -11653,7 +11653,7 @@ async function runBrainstormSession(options, llmDispatch, rlInterface) {
|
|
|
11653
11653
|
}
|
|
11654
11654
|
});
|
|
11655
11655
|
rl.on("error", (err) => {
|
|
11656
|
-
logger$
|
|
11656
|
+
logger$8.error({ err }, "readline error");
|
|
11657
11657
|
if (!sessionEnded) endSession(false);
|
|
11658
11658
|
});
|
|
11659
11659
|
});
|
|
@@ -11694,7 +11694,7 @@ function registerBrainstormCommand(program, _version = "0.0.0", projectRoot = pr
|
|
|
11694
11694
|
|
|
11695
11695
|
//#endregion
|
|
11696
11696
|
//#region src/cli/commands/retry-escalated.ts
|
|
11697
|
-
const logger$
|
|
11697
|
+
const logger$7 = createLogger("retry-escalated-cmd");
|
|
11698
11698
|
async function runRetryEscalatedAction(options) {
|
|
11699
11699
|
const { runId, dryRun, force, outputFormat, projectRoot, concurrency, pack: packName, registry: injectedRegistry, agent: agentId, events: eventsFlag } = options;
|
|
11700
11700
|
const dbRoot = await resolveMainRepoRoot(projectRoot);
|
|
@@ -11735,7 +11735,7 @@ async function runRetryEscalatedAction(options) {
|
|
|
11735
11735
|
process.stdout.write(`[INFO] ${storyKey}: Context ceiling set to ${contextCeiling} tokens due to prior context spike pattern.\n`);
|
|
11736
11736
|
}
|
|
11737
11737
|
} catch (err) {
|
|
11738
|
-
logger$
|
|
11738
|
+
logger$7.warn({
|
|
11739
11739
|
err,
|
|
11740
11740
|
storyKey
|
|
11741
11741
|
}, "Failed to read efficiency profile — skipping gate");
|
|
@@ -11889,11 +11889,11 @@ async function runRetryEscalatedAction(options) {
|
|
|
11889
11889
|
output_tokens: output,
|
|
11890
11890
|
cost_usd: costUsd
|
|
11891
11891
|
}).catch((err) => {
|
|
11892
|
-
logger$
|
|
11892
|
+
logger$7.warn({ err }, "Failed to record token usage");
|
|
11893
11893
|
});
|
|
11894
11894
|
}
|
|
11895
11895
|
} catch (err) {
|
|
11896
|
-
logger$
|
|
11896
|
+
logger$7.warn({ err }, "Failed to record token usage");
|
|
11897
11897
|
}
|
|
11898
11898
|
});
|
|
11899
11899
|
if (outputFormat === "human") {
|
|
@@ -11922,7 +11922,7 @@ async function runRetryEscalatedAction(options) {
|
|
|
11922
11922
|
const msg = err instanceof Error ? err.message : String(err);
|
|
11923
11923
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
11924
11924
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
11925
|
-
logger$
|
|
11925
|
+
logger$7.error({ err }, "retry-escalated failed");
|
|
11926
11926
|
return 1;
|
|
11927
11927
|
} finally {
|
|
11928
11928
|
try {
|
|
@@ -11955,7 +11955,7 @@ function registerRetryEscalatedCommand(program, _version = "0.0.0", projectRoot
|
|
|
11955
11955
|
|
|
11956
11956
|
//#endregion
|
|
11957
11957
|
//#region src/cli/commands/cancel.ts
|
|
11958
|
-
const logger$
|
|
11958
|
+
const logger$6 = createLogger("cancel-cmd");
|
|
11959
11959
|
async function runCancelAction(options) {
|
|
11960
11960
|
const { outputFormat, projectRoot, force } = options;
|
|
11961
11961
|
const dbRoot = await resolveMainRepoRoot(projectRoot);
|
|
@@ -11994,7 +11994,7 @@ async function runCancelAction(options) {
|
|
|
11994
11994
|
}
|
|
11995
11995
|
} catch (err) {
|
|
11996
11996
|
const msg = err instanceof Error ? err.message : String(err);
|
|
11997
|
-
logger$
|
|
11997
|
+
logger$6.warn({
|
|
11998
11998
|
pid,
|
|
11999
11999
|
err: msg
|
|
12000
12000
|
}, "Failed to kill orchestrator");
|
|
@@ -12024,7 +12024,7 @@ async function runCancelAction(options) {
|
|
|
12024
12024
|
await adapter.close();
|
|
12025
12025
|
}
|
|
12026
12026
|
} catch (err) {
|
|
12027
|
-
logger$
|
|
12027
|
+
logger$6.warn({ err }, "Could not update pipeline run status (non-fatal)");
|
|
12028
12028
|
}
|
|
12029
12029
|
if (outputFormat === "json") process.stdout.write(formatOutput({
|
|
12030
12030
|
cancelled: true,
|
|
@@ -12108,10 +12108,10 @@ function registerContractsCommand(program) {
|
|
|
12108
12108
|
r.verificationStatus
|
|
12109
12109
|
]);
|
|
12110
12110
|
const colWidths = headers.map((h, i) => Math.max(h.length, ...rows.map((r) => (r[i] ?? "").length)));
|
|
12111
|
-
const formatRow = (cells) => cells.map((c, i) => c.padEnd(colWidths[i])).join(" ");
|
|
12112
|
-
console.log(formatRow(headers));
|
|
12111
|
+
const formatRow$1 = (cells) => cells.map((c, i) => c.padEnd(colWidths[i])).join(" ");
|
|
12112
|
+
console.log(formatRow$1(headers));
|
|
12113
12113
|
console.log(colWidths.map((w) => "-".repeat(w)).join(" "));
|
|
12114
|
-
for (const row of rows) console.log(formatRow(row));
|
|
12114
|
+
for (const row of rows) console.log(formatRow$1(row));
|
|
12115
12115
|
} finally {
|
|
12116
12116
|
await store.close();
|
|
12117
12117
|
}
|
|
@@ -12378,7 +12378,7 @@ function emitDiff(diff, ctx) {
|
|
|
12378
12378
|
|
|
12379
12379
|
//#endregion
|
|
12380
12380
|
//#region src/cli/commands/probe-author.ts
|
|
12381
|
-
const logger$
|
|
12381
|
+
const logger$5 = createLogger("cli:probe-author");
|
|
12382
12382
|
/**
|
|
12383
12383
|
* A minimum-viable logger that routes everything to stderr. Used by the
|
|
12384
12384
|
* subcommand to keep stdout reserved for the JSON result payload.
|
|
@@ -12447,7 +12447,7 @@ async function runProbeAuthorDispatch(opts, projectRoot, registry) {
|
|
|
12447
12447
|
logger: stderrLogger
|
|
12448
12448
|
});
|
|
12449
12449
|
const workingDir = opts.workingDir !== void 0 ? resolve$1(opts.workingDir) : resolve$1(storyFilePath, "..");
|
|
12450
|
-
logger$
|
|
12450
|
+
logger$5.info({
|
|
12451
12451
|
storyKey: opts.storyKey,
|
|
12452
12452
|
storyFile: storyFilePath,
|
|
12453
12453
|
epicFile: epicFilePath,
|
|
@@ -12628,7 +12628,7 @@ function registerHistoryCommand(program) {
|
|
|
12628
12628
|
|
|
12629
12629
|
//#endregion
|
|
12630
12630
|
//#region src/cli/commands/repo-map.ts
|
|
12631
|
-
const logger$
|
|
12631
|
+
const logger$4 = createLogger("cli:repo-map");
|
|
12632
12632
|
/** Validate that a symbol name contains only safe identifier characters. */
|
|
12633
12633
|
function isValidSymbolName(name) {
|
|
12634
12634
|
return /^[a-zA-Z0-9_]+$/.test(name);
|
|
@@ -12663,15 +12663,15 @@ function registerRepoMapCommand(program) {
|
|
|
12663
12663
|
const colRows = await doltClient.query(`SHOW COLUMNS FROM repo_map_symbols LIKE 'dependencies'`);
|
|
12664
12664
|
if (colRows.length === 0) {
|
|
12665
12665
|
await doltClient.query(`ALTER TABLE repo_map_symbols ADD COLUMN dependencies JSON`);
|
|
12666
|
-
logger$
|
|
12666
|
+
logger$4.info("Applied migration: added dependencies column to repo_map_symbols");
|
|
12667
12667
|
}
|
|
12668
12668
|
} catch {
|
|
12669
|
-
logger$
|
|
12669
|
+
logger$4.debug("Skipping repo_map_symbols migration: table not yet created");
|
|
12670
12670
|
}
|
|
12671
|
-
const symbolRepo = new DoltSymbolRepository(doltClient, logger$
|
|
12671
|
+
const symbolRepo = new DoltSymbolRepository(doltClient, logger$4);
|
|
12672
12672
|
const metaRepo = new DoltRepoMapMetaRepository(doltClient);
|
|
12673
|
-
const repoMapModule = new RepoMapModule(metaRepo, logger$
|
|
12674
|
-
const queryEngine = new RepoMapQueryEngine(symbolRepo, logger$
|
|
12673
|
+
const repoMapModule = new RepoMapModule(metaRepo, logger$4);
|
|
12674
|
+
const queryEngine = new RepoMapQueryEngine(symbolRepo, logger$4);
|
|
12675
12675
|
if (options.show === true || !options.update && !options.query && !options.dryRun) {
|
|
12676
12676
|
const meta = await metaRepo.getMeta();
|
|
12677
12677
|
const staleResult = await repoMapModule.checkStaleness();
|
|
@@ -12697,9 +12697,9 @@ function registerRepoMapCommand(program) {
|
|
|
12697
12697
|
return;
|
|
12698
12698
|
}
|
|
12699
12699
|
if (options.update === true) {
|
|
12700
|
-
logger$
|
|
12701
|
-
const gitClient = new GitClient(logger$
|
|
12702
|
-
const grammarLoader = new GrammarLoader(logger$
|
|
12700
|
+
logger$4.info("repo-map --update: triggering incremental update");
|
|
12701
|
+
const gitClient = new GitClient(logger$4);
|
|
12702
|
+
const grammarLoader = new GrammarLoader(logger$4);
|
|
12703
12703
|
if (grammarLoader.getGrammar(".ts") === null) {
|
|
12704
12704
|
const msg = "tree-sitter grammars not installed. Run `npm install tree-sitter tree-sitter-typescript tree-sitter-javascript tree-sitter-python` in the substrate installation directory.";
|
|
12705
12705
|
if (options.outputFormat === "json") console.log(JSON.stringify({
|
|
@@ -12710,15 +12710,15 @@ function registerRepoMapCommand(program) {
|
|
|
12710
12710
|
process.exitCode = 1;
|
|
12711
12711
|
return;
|
|
12712
12712
|
}
|
|
12713
|
-
const parser = new SymbolParser(grammarLoader, logger$
|
|
12714
|
-
const storage = new RepoMapStorage(symbolRepo, metaRepo, gitClient, logger$
|
|
12713
|
+
const parser = new SymbolParser(grammarLoader, logger$4);
|
|
12714
|
+
const storage = new RepoMapStorage(symbolRepo, metaRepo, gitClient, logger$4);
|
|
12715
12715
|
let updateWarning;
|
|
12716
12716
|
try {
|
|
12717
12717
|
await storage.incrementalUpdate(dbRoot, parser);
|
|
12718
12718
|
} catch (err) {
|
|
12719
12719
|
if (err instanceof AppError && err.code === ERR_REPO_MAP_STORAGE_WRITE) {
|
|
12720
12720
|
updateWarning = err.message;
|
|
12721
|
-
logger$
|
|
12721
|
+
logger$4.warn({ err }, "repo-map --update: storage write error (partial update)");
|
|
12722
12722
|
} else throw err;
|
|
12723
12723
|
}
|
|
12724
12724
|
const meta = await metaRepo.getMeta();
|
|
@@ -12738,7 +12738,7 @@ function registerRepoMapCommand(program) {
|
|
|
12738
12738
|
return;
|
|
12739
12739
|
}
|
|
12740
12740
|
if (options.query !== void 0) {
|
|
12741
|
-
logger$
|
|
12741
|
+
logger$4.debug({ symbol: options.query }, "repo-map --query");
|
|
12742
12742
|
const result = await queryEngine.query({
|
|
12743
12743
|
symbols: [options.query],
|
|
12744
12744
|
maxTokens: 4e3
|
|
@@ -12760,7 +12760,7 @@ function registerRepoMapCommand(program) {
|
|
|
12760
12760
|
process.exitCode = 1;
|
|
12761
12761
|
return;
|
|
12762
12762
|
}
|
|
12763
|
-
const injector = new RepoMapInjector(queryEngine, logger$
|
|
12763
|
+
const injector = new RepoMapInjector(queryEngine, logger$4);
|
|
12764
12764
|
const injectionResult = await injector.buildContext(storyContent, 2e3);
|
|
12765
12765
|
console.log(JSON.stringify({
|
|
12766
12766
|
text: injectionResult.text,
|
|
@@ -12774,7 +12774,7 @@ function registerRepoMapCommand(program) {
|
|
|
12774
12774
|
|
|
12775
12775
|
//#endregion
|
|
12776
12776
|
//#region src/cli/commands/routing.ts
|
|
12777
|
-
const logger$
|
|
12777
|
+
const logger$3 = createLogger("cli:routing");
|
|
12778
12778
|
function registerRoutingCommand(program) {
|
|
12779
12779
|
program.command("routing").description("Show routing configuration and auto-tune history").option("--history", "Show the routing auto-tune log (model changes applied)").option("--output-format <format>", "Output format: text or json", "text").action(async (options) => {
|
|
12780
12780
|
const dbRoot = await resolveMainRepoRoot(process.cwd());
|
|
@@ -12791,7 +12791,7 @@ function registerRoutingCommand(program) {
|
|
|
12791
12791
|
try {
|
|
12792
12792
|
await store.initialize();
|
|
12793
12793
|
if (options.history === true) {
|
|
12794
|
-
logger$
|
|
12794
|
+
logger$3.debug("routing --history: fetching tune log");
|
|
12795
12795
|
const raw$1 = await store.getMetric("global", "routing_tune_log");
|
|
12796
12796
|
let entries = [];
|
|
12797
12797
|
if (Array.isArray(raw$1)) entries = raw$1.sort((a, b) => b.appliedAt.localeCompare(a.appliedAt));
|
|
@@ -13128,7 +13128,7 @@ function registerFactoryCommand$1(program) {
|
|
|
13128
13128
|
|
|
13129
13129
|
//#endregion
|
|
13130
13130
|
//#region src/cli/commands/reconcile-from-disk.ts
|
|
13131
|
-
const logger$
|
|
13131
|
+
const logger$2 = createLogger("reconcile-from-disk");
|
|
13132
13132
|
/** 64KB tail window for capturing subprocess stderr/stdout (Story 66-5 pattern). */
|
|
13133
13133
|
const MAX_OUTPUT_BYTES = 64 * 1024;
|
|
13134
13134
|
/**
|
|
@@ -13191,7 +13191,7 @@ async function readRunEntry(dbRoot, resolvedRunId) {
|
|
|
13191
13191
|
stories
|
|
13192
13192
|
};
|
|
13193
13193
|
} catch {
|
|
13194
|
-
logger$
|
|
13194
|
+
logger$2.debug({ runId: resolvedRunId }, "failed to read individual run manifest");
|
|
13195
13195
|
return null;
|
|
13196
13196
|
}
|
|
13197
13197
|
}
|
|
@@ -13324,7 +13324,7 @@ async function runReconcileFromDiskAction(options) {
|
|
|
13324
13324
|
const latestRun = await getLatestRun(probeAdapter);
|
|
13325
13325
|
if (latestRun?.id) resolvedRunId = latestRun.id;
|
|
13326
13326
|
} catch {
|
|
13327
|
-
logger$
|
|
13327
|
+
logger$2.debug("Dolt fallback failed during run-id resolution");
|
|
13328
13328
|
} finally {
|
|
13329
13329
|
await probeAdapter.close().catch(() => {});
|
|
13330
13330
|
}
|
|
@@ -13405,7 +13405,7 @@ async function runReconcileFromDiskAction(options) {
|
|
|
13405
13405
|
stdoutTail: failedGateResult?.stdoutTail,
|
|
13406
13406
|
durationMs: durationMs$1
|
|
13407
13407
|
});
|
|
13408
|
-
logger$
|
|
13408
|
+
logger$2.info({
|
|
13409
13409
|
runId: resolvedRunId,
|
|
13410
13410
|
failedGate: failedGateName,
|
|
13411
13411
|
exitCode: failedGateResult?.exitCode
|
|
@@ -13463,7 +13463,7 @@ async function runReconcileFromDiskAction(options) {
|
|
|
13463
13463
|
resolvedRunId
|
|
13464
13464
|
]);
|
|
13465
13465
|
});
|
|
13466
|
-
logger$
|
|
13466
|
+
logger$2.info({
|
|
13467
13467
|
runId: resolvedRunId,
|
|
13468
13468
|
affectedStories: reconcilableRecords.map((r) => r.storyKey)
|
|
13469
13469
|
}, "reconcile-from-disk: Dolt update complete");
|
|
@@ -13516,6 +13516,449 @@ function registerReconcileFromDiskCommand(program, _version = "0.0.0", projectRo
|
|
|
13516
13516
|
});
|
|
13517
13517
|
}
|
|
13518
13518
|
|
|
13519
|
+
//#endregion
|
|
13520
|
+
//#region src/cli/commands/report.ts
|
|
13521
|
+
const logger$1 = createLogger("report");
|
|
13522
|
+
/**
|
|
13523
|
+
* Determine whether verification ran for a story.
|
|
13524
|
+
*
|
|
13525
|
+
* Handles both real manifest format (checks array) and probe fixture format
|
|
13526
|
+
* (explicit verification_ran boolean).
|
|
13527
|
+
*/
|
|
13528
|
+
function didVerificationRun(state) {
|
|
13529
|
+
const vr = state.verification_result;
|
|
13530
|
+
if (!vr) return false;
|
|
13531
|
+
if (typeof vr.verification_ran === "boolean") return vr.verification_ran;
|
|
13532
|
+
if (Array.isArray(vr.checks)) return vr.checks.length > 0;
|
|
13533
|
+
return false;
|
|
13534
|
+
}
|
|
13535
|
+
/**
|
|
13536
|
+
* Extract aggregated finding counts from a story's verification result.
|
|
13537
|
+
*
|
|
13538
|
+
* Handles both real manifest format (checks[].findings[]) and probe fixture
|
|
13539
|
+
* format (error_count / warn_count / info_count + flat findings[]).
|
|
13540
|
+
*/
|
|
13541
|
+
function extractVerificationFindings(state) {
|
|
13542
|
+
const vr = state.verification_result;
|
|
13543
|
+
if (!vr) return {
|
|
13544
|
+
error: 0,
|
|
13545
|
+
warn: 0,
|
|
13546
|
+
info: 0,
|
|
13547
|
+
byAuthor: {}
|
|
13548
|
+
};
|
|
13549
|
+
const byAuthor = {};
|
|
13550
|
+
if (typeof vr.error_count === "number" || typeof vr.warn_count === "number") {
|
|
13551
|
+
const error$1 = vr.error_count ?? 0;
|
|
13552
|
+
const warn$1 = vr.warn_count ?? 0;
|
|
13553
|
+
const info$1 = vr.info_count ?? 0;
|
|
13554
|
+
const findings = vr.findings ?? [];
|
|
13555
|
+
for (const f of findings) {
|
|
13556
|
+
const author = f._authoredBy ?? "unknown";
|
|
13557
|
+
byAuthor[author] = (byAuthor[author] ?? 0) + 1;
|
|
13558
|
+
}
|
|
13559
|
+
return {
|
|
13560
|
+
error: error$1,
|
|
13561
|
+
warn: warn$1,
|
|
13562
|
+
info: info$1,
|
|
13563
|
+
byAuthor
|
|
13564
|
+
};
|
|
13565
|
+
}
|
|
13566
|
+
let error = 0;
|
|
13567
|
+
let warn = 0;
|
|
13568
|
+
let info = 0;
|
|
13569
|
+
const allChecks = vr.checks ?? [];
|
|
13570
|
+
for (const check of allChecks) for (const f of check.findings ?? []) {
|
|
13571
|
+
if (f.severity === "error") error++;
|
|
13572
|
+
else if (f.severity === "warn") warn++;
|
|
13573
|
+
else if (f.severity === "info") info++;
|
|
13574
|
+
const author = f._authoredBy ?? "unknown";
|
|
13575
|
+
byAuthor[author] = (byAuthor[author] ?? 0) + 1;
|
|
13576
|
+
}
|
|
13577
|
+
return {
|
|
13578
|
+
error,
|
|
13579
|
+
warn,
|
|
13580
|
+
info,
|
|
13581
|
+
byAuthor
|
|
13582
|
+
};
|
|
13583
|
+
}
|
|
13584
|
+
/**
|
|
13585
|
+
* Classify a story outcome based on its state in the manifest.
|
|
13586
|
+
*
|
|
13587
|
+
* Rules (AC3):
|
|
13588
|
+
* - `verified` — status='complete' AND verification ran AND no error findings AND review_cycles=0
|
|
13589
|
+
* - `recovered` — status='complete' AND (verification_ran=false OR review_cycles>0)
|
|
13590
|
+
* - `escalated` — status='escalated'
|
|
13591
|
+
* - `failed` — status='failed' (or any other non-complete, non-escalated status)
|
|
13592
|
+
*
|
|
13593
|
+
* This function is pure (no filesystem / Dolt access) and exported for unit testing.
|
|
13594
|
+
*/
|
|
13595
|
+
function classifyStoryOutcome(state, _manifest) {
|
|
13596
|
+
if (state.status === "escalated") return "escalated";
|
|
13597
|
+
if (state.status === "failed") return "failed";
|
|
13598
|
+
if (state.status === "complete") {
|
|
13599
|
+
const verificationRan = didVerificationRun(state);
|
|
13600
|
+
if (!verificationRan) return "recovered";
|
|
13601
|
+
const findings = extractVerificationFindings(state);
|
|
13602
|
+
const hasErrors = findings.error > 0;
|
|
13603
|
+
const reviewCycles = state.review_cycles ?? 0;
|
|
13604
|
+
if (!hasErrors && reviewCycles === 0) return "verified";
|
|
13605
|
+
return "recovered";
|
|
13606
|
+
}
|
|
13607
|
+
return "failed";
|
|
13608
|
+
}
|
|
13609
|
+
/**
|
|
13610
|
+
* Build escalation diagnostic enrichment for a story (AC4).
|
|
13611
|
+
*
|
|
13612
|
+
* Maps escalation_reason to operator-actionable suggestions.
|
|
13613
|
+
*/
|
|
13614
|
+
function enrichEscalation(storyKey, state, runId, manifest) {
|
|
13615
|
+
const root_cause = state.escalation_reason ?? "unknown";
|
|
13616
|
+
const recovery_attempts = state.review_cycles ?? (manifest.recovery_history ?? []).filter((e) => e.story_key === storyKey).length;
|
|
13617
|
+
const blast_radius = `Story ${storyKey} in run ${runId} — ${recovery_attempts} recovery attempt(s)`;
|
|
13618
|
+
let suggested_operator_action;
|
|
13619
|
+
switch (root_cause) {
|
|
13620
|
+
case "checkpoint-retry-timeout":
|
|
13621
|
+
suggested_operator_action = `Run \`substrate reconcile-from-disk --run ${runId}\` (Epic 69) — implementation may have shipped before timeout; gates will validate.`;
|
|
13622
|
+
break;
|
|
13623
|
+
case "verification-fail-after-cycles":
|
|
13624
|
+
suggested_operator_action = `Read findings via \`substrate metrics --run ${runId} --findings\`; consider --max-review-cycles 3 retry.`;
|
|
13625
|
+
break;
|
|
13626
|
+
case "dispatch:spawnsync-timeout":
|
|
13627
|
+
suggested_operator_action = `Agent dispatch timed out. Check system load and retry with \`substrate run --events --stories ${storyKey}\`.`;
|
|
13628
|
+
break;
|
|
13629
|
+
case "cost-ceiling-exceeded":
|
|
13630
|
+
suggested_operator_action = `Cost ceiling was exceeded. Raise --cost-ceiling or break the story into smaller units before retrying.`;
|
|
13631
|
+
break;
|
|
13632
|
+
default: suggested_operator_action = `Inspect escalation details with \`substrate metrics --run ${runId}\` and manually review the story work before retrying.`;
|
|
13633
|
+
}
|
|
13634
|
+
return {
|
|
13635
|
+
story_key: storyKey,
|
|
13636
|
+
root_cause,
|
|
13637
|
+
recovery_attempts,
|
|
13638
|
+
suggested_operator_action,
|
|
13639
|
+
blast_radius
|
|
13640
|
+
};
|
|
13641
|
+
}
|
|
13642
|
+
/** Pad a string to a fixed width, truncating with '…' if necessary. */
|
|
13643
|
+
function padCell(value, width) {
|
|
13644
|
+
if (value.length > width) return value.slice(0, width - 1) + "…";
|
|
13645
|
+
return value.padEnd(width);
|
|
13646
|
+
}
|
|
13647
|
+
/** Format a row of columns with fixed widths, separated by ' | '. */
|
|
13648
|
+
function formatRow(cells, widths) {
|
|
13649
|
+
return cells.map((c, i) => padCell(c, widths[i] ?? c.length)).join(" | ");
|
|
13650
|
+
}
|
|
13651
|
+
function formatDurationMs(ms) {
|
|
13652
|
+
if (ms < 1e3) return `${ms}ms`;
|
|
13653
|
+
if (ms < 6e4) return `${(ms / 1e3).toFixed(1)}s`;
|
|
13654
|
+
const mins = Math.floor(ms / 6e4);
|
|
13655
|
+
const secs = Math.floor(ms % 6e4 / 1e3);
|
|
13656
|
+
return `${mins}m${secs}s`;
|
|
13657
|
+
}
|
|
13658
|
+
function wallClockMs(state) {
|
|
13659
|
+
if (!state.started_at || !state.completed_at) return void 0;
|
|
13660
|
+
const start = new Date(state.started_at).getTime();
|
|
13661
|
+
const end = new Date(state.completed_at).getTime();
|
|
13662
|
+
return isNaN(start) || isNaN(end) ? void 0 : end - start;
|
|
13663
|
+
}
|
|
13664
|
+
function renderHuman(output, manifest) {
|
|
13665
|
+
const lines = [];
|
|
13666
|
+
const { runId, summary, stories, escalations, cost, duration } = output;
|
|
13667
|
+
const durationStr = duration.wall_clock_ms != null ? formatDurationMs(duration.wall_clock_ms) : "unknown";
|
|
13668
|
+
const costStr = `$${cost.spent.toFixed(4)}`;
|
|
13669
|
+
const ceilingStr = cost.ceiling != null ? ` / $${cost.ceiling.toFixed(4)} ceiling (${cost.utilization ?? "?"}) ${cost.overCeiling ? "[OVER CEILING]" : ""}` : "";
|
|
13670
|
+
const verdict = summary.escalated > 0 || summary.failed > 0 ? "NEEDS ATTENTION" : "ALL PASSED";
|
|
13671
|
+
lines.push(`══════════════════════════════════════════════════════════`);
|
|
13672
|
+
lines.push(` Run: ${runId}`);
|
|
13673
|
+
lines.push(` Duration: ${durationStr}`);
|
|
13674
|
+
lines.push(` Cost: ${costStr}${ceilingStr}`);
|
|
13675
|
+
lines.push(` Verdict: ${verdict}`);
|
|
13676
|
+
lines.push(`══════════════════════════════════════════════════════════`);
|
|
13677
|
+
lines.push("");
|
|
13678
|
+
lines.push(`${summary.verified} verified, ${summary.recovered} recovered, ${summary.escalated} escalated, ${summary.failed} failed of ${summary.total} total`);
|
|
13679
|
+
lines.push("");
|
|
13680
|
+
const COL_WIDTHS = [
|
|
13681
|
+
50,
|
|
13682
|
+
10,
|
|
13683
|
+
10,
|
|
13684
|
+
8,
|
|
13685
|
+
10,
|
|
13686
|
+
14,
|
|
13687
|
+
10
|
|
13688
|
+
];
|
|
13689
|
+
const HEADERS = [
|
|
13690
|
+
"story_key",
|
|
13691
|
+
"outcome",
|
|
13692
|
+
"wall-clock",
|
|
13693
|
+
"cycles",
|
|
13694
|
+
"cost",
|
|
13695
|
+
"findings",
|
|
13696
|
+
"verified"
|
|
13697
|
+
];
|
|
13698
|
+
lines.push(formatRow(HEADERS, COL_WIDTHS));
|
|
13699
|
+
lines.push(COL_WIDTHS.map((w) => "-".repeat(w)).join("-+-"));
|
|
13700
|
+
for (const s of stories) {
|
|
13701
|
+
const wallClock = s.wall_clock_ms != null ? formatDurationMs(s.wall_clock_ms) : "-";
|
|
13702
|
+
const costCell = s.cost_usd != null ? `$${s.cost_usd.toFixed(4)}` : "-";
|
|
13703
|
+
const f = s.verification_findings;
|
|
13704
|
+
const findingsCell = `E:${f.error} W:${f.warn} I:${f.info}`;
|
|
13705
|
+
const verifiedTag = s.outcome === "verified" ? "✓" : "";
|
|
13706
|
+
const key = s.story_key.length > 50 ? s.story_key.slice(0, 49) + "…" : s.story_key;
|
|
13707
|
+
lines.push(formatRow([
|
|
13708
|
+
key,
|
|
13709
|
+
s.outcome,
|
|
13710
|
+
wallClock,
|
|
13711
|
+
String(s.review_cycles),
|
|
13712
|
+
costCell,
|
|
13713
|
+
findingsCell,
|
|
13714
|
+
verifiedTag
|
|
13715
|
+
], COL_WIDTHS));
|
|
13716
|
+
}
|
|
13717
|
+
lines.push("");
|
|
13718
|
+
if (escalations.length > 0) {
|
|
13719
|
+
lines.push("──── Escalation Details ────");
|
|
13720
|
+
for (const esc of escalations) {
|
|
13721
|
+
lines.push("");
|
|
13722
|
+
lines.push(` Story: ${esc.story_key}`);
|
|
13723
|
+
lines.push(` Root cause: ${esc.root_cause}`);
|
|
13724
|
+
lines.push(` Recovery attempts: ${esc.recovery_attempts}`);
|
|
13725
|
+
lines.push(` Blast radius: ${esc.blast_radius}`);
|
|
13726
|
+
lines.push(` Suggested action: ${esc.suggested_operator_action}`);
|
|
13727
|
+
}
|
|
13728
|
+
lines.push("");
|
|
13729
|
+
}
|
|
13730
|
+
return lines.join("\n");
|
|
13731
|
+
}
|
|
13732
|
+
function renderJson(output) {
|
|
13733
|
+
return JSON.stringify(output, null, 2);
|
|
13734
|
+
}
|
|
13735
|
+
/**
|
|
13736
|
+
* Build ReportOutput from a raw manifest.
|
|
13737
|
+
*/
|
|
13738
|
+
function assembleReport(runId, manifest) {
|
|
13739
|
+
const perStoryState = manifest.per_story_state ?? {};
|
|
13740
|
+
const storyKeys = Object.keys(perStoryState);
|
|
13741
|
+
const startedAt = manifest.created_at ?? "";
|
|
13742
|
+
const completedAt = manifest.updated_at;
|
|
13743
|
+
let totalWallMs;
|
|
13744
|
+
if (startedAt && completedAt) {
|
|
13745
|
+
const s = new Date(startedAt).getTime();
|
|
13746
|
+
const e = new Date(completedAt).getTime();
|
|
13747
|
+
if (!isNaN(s) && !isNaN(e)) totalWallMs = e - s;
|
|
13748
|
+
}
|
|
13749
|
+
const duration = {
|
|
13750
|
+
started_at: startedAt,
|
|
13751
|
+
completed_at: completedAt,
|
|
13752
|
+
wall_clock_ms: totalWallMs
|
|
13753
|
+
};
|
|
13754
|
+
const spent = manifest.cost_accumulation?.run_total ?? 0;
|
|
13755
|
+
const ceiling = manifest.cost_accumulation != null ? manifest.cli_flags?.cost_ceiling : void 0;
|
|
13756
|
+
const utilization = ceiling != null && ceiling > 0 ? `${(spent / ceiling * 100).toFixed(1)}%` : void 0;
|
|
13757
|
+
const overCeiling = ceiling != null ? spent > ceiling : false;
|
|
13758
|
+
const cost = {
|
|
13759
|
+
spent,
|
|
13760
|
+
ceiling,
|
|
13761
|
+
utilization,
|
|
13762
|
+
overCeiling
|
|
13763
|
+
};
|
|
13764
|
+
const stories = [];
|
|
13765
|
+
const escalations = [];
|
|
13766
|
+
const summary = {
|
|
13767
|
+
verified: 0,
|
|
13768
|
+
recovered: 0,
|
|
13769
|
+
escalated: 0,
|
|
13770
|
+
failed: 0,
|
|
13771
|
+
total: 0
|
|
13772
|
+
};
|
|
13773
|
+
for (const key of storyKeys) {
|
|
13774
|
+
const state = perStoryState[key];
|
|
13775
|
+
const outcome = classifyStoryOutcome(state, manifest);
|
|
13776
|
+
const findings = extractVerificationFindings(state);
|
|
13777
|
+
const verificationRan = didVerificationRun(state);
|
|
13778
|
+
const wc = wallClockMs(state);
|
|
13779
|
+
const storySummary = {
|
|
13780
|
+
story_key: key,
|
|
13781
|
+
outcome,
|
|
13782
|
+
wall_clock_ms: wc,
|
|
13783
|
+
review_cycles: state.review_cycles ?? 0,
|
|
13784
|
+
cost_usd: state.cost_usd,
|
|
13785
|
+
verification_findings: findings,
|
|
13786
|
+
verification_ran: verificationRan
|
|
13787
|
+
};
|
|
13788
|
+
stories.push(storySummary);
|
|
13789
|
+
summary[outcome]++;
|
|
13790
|
+
summary.total++;
|
|
13791
|
+
if (outcome === "escalated") escalations.push(enrichEscalation(key, state, runId, manifest));
|
|
13792
|
+
}
|
|
13793
|
+
return {
|
|
13794
|
+
runId,
|
|
13795
|
+
summary,
|
|
13796
|
+
stories,
|
|
13797
|
+
escalations,
|
|
13798
|
+
cost,
|
|
13799
|
+
duration
|
|
13800
|
+
};
|
|
13801
|
+
}
|
|
13802
|
+
/**
|
|
13803
|
+
* Read a single run manifest file directly (without strict Zod schema).
|
|
13804
|
+
* Handles both real manifests and probe fixture simplified formats.
|
|
13805
|
+
*/
|
|
13806
|
+
async function readRawManifest(runsDir, runId) {
|
|
13807
|
+
const directPath = join(runsDir, `${runId}.json`);
|
|
13808
|
+
try {
|
|
13809
|
+
const raw = await readFile(directPath, "utf-8");
|
|
13810
|
+
const parsed = JSON.parse(raw);
|
|
13811
|
+
if (parsed != null && typeof parsed === "object" && !Array.isArray(parsed)) {
|
|
13812
|
+
const m = parsed;
|
|
13813
|
+
if (typeof m.run_id === "string" && typeof m.per_story_state === "object") return m;
|
|
13814
|
+
}
|
|
13815
|
+
} catch {}
|
|
13816
|
+
try {
|
|
13817
|
+
const entries = await readdir(runsDir);
|
|
13818
|
+
for (const entry of entries) {
|
|
13819
|
+
if (!entry.endsWith(".json") || entry === "manifest.json") continue;
|
|
13820
|
+
const filePath = join(runsDir, entry);
|
|
13821
|
+
try {
|
|
13822
|
+
const raw = await readFile(filePath, "utf-8");
|
|
13823
|
+
const parsed = JSON.parse(raw);
|
|
13824
|
+
if (parsed != null && typeof parsed === "object" && !Array.isArray(parsed)) {
|
|
13825
|
+
const m = parsed;
|
|
13826
|
+
if (m.run_id === runId && typeof m.per_story_state === "object") return m;
|
|
13827
|
+
}
|
|
13828
|
+
} catch {}
|
|
13829
|
+
}
|
|
13830
|
+
} catch {}
|
|
13831
|
+
return null;
|
|
13832
|
+
}
|
|
13833
|
+
/**
|
|
13834
|
+
* Resolve the latest run ID via canonical chain (Story 71-2 hot-fix).
|
|
13835
|
+
*
|
|
13836
|
+
* Replaces Story 71-1's invented `.substrate/runs/manifest.json` aggregate
|
|
13837
|
+
* format with the canonical chain used by status.ts / health.ts /
|
|
13838
|
+
* reconcile-from-disk:
|
|
13839
|
+
* 1. `.substrate/current-run-id` is consulted by resolveRunManifest at the
|
|
13840
|
+
* caller's site (not here); this helper handles the post-current-run-id
|
|
13841
|
+
* fallback.
|
|
13842
|
+
* 2. `getLatestRun(adapter)` Dolt fallback — the canonical persistence
|
|
13843
|
+
* source. Opens a temporary adapter and closes it before returning.
|
|
13844
|
+
*
|
|
13845
|
+
* Returns null when neither chain link yields a run ID.
|
|
13846
|
+
*/
|
|
13847
|
+
async function resolveLatestRunId(dbRoot) {
|
|
13848
|
+
const probeAdapter = createDatabaseAdapter({
|
|
13849
|
+
backend: "auto",
|
|
13850
|
+
basePath: dbRoot
|
|
13851
|
+
});
|
|
13852
|
+
try {
|
|
13853
|
+
await initSchema(probeAdapter);
|
|
13854
|
+
const latest = await getLatestRun(probeAdapter);
|
|
13855
|
+
return latest?.id ?? null;
|
|
13856
|
+
} catch {
|
|
13857
|
+
logger$1.debug("Dolt fallback failed during run-id resolution");
|
|
13858
|
+
return null;
|
|
13859
|
+
} finally {
|
|
13860
|
+
await probeAdapter.close().catch(() => {});
|
|
13861
|
+
}
|
|
13862
|
+
}
|
|
13863
|
+
async function runReportAction(options) {
|
|
13864
|
+
const { run: runArg, outputFormat, projectRoot, _dbRoot } = options;
|
|
13865
|
+
const effectiveProjectRoot = process.env["SUBSTRATE_PROJECT_ROOT"] ?? projectRoot;
|
|
13866
|
+
const dbRoot = _dbRoot ?? await resolveMainRepoRoot(effectiveProjectRoot);
|
|
13867
|
+
const runsDir = join(dbRoot, ".substrate", "runs");
|
|
13868
|
+
logger$1.debug({
|
|
13869
|
+
runArg,
|
|
13870
|
+
effectiveProjectRoot,
|
|
13871
|
+
dbRoot
|
|
13872
|
+
}, "report action start");
|
|
13873
|
+
let resolvedRunId = null;
|
|
13874
|
+
let manifest = null;
|
|
13875
|
+
const isExplicitId = runArg != null && runArg !== "latest";
|
|
13876
|
+
if (isExplicitId) {
|
|
13877
|
+
resolvedRunId = runArg;
|
|
13878
|
+
const canonical = await resolveRunManifest(dbRoot, resolvedRunId);
|
|
13879
|
+
if (canonical.manifest) try {
|
|
13880
|
+
const data = await canonical.manifest.read();
|
|
13881
|
+
manifest = data;
|
|
13882
|
+
} catch {
|
|
13883
|
+
logger$1.debug({ runId: resolvedRunId }, "canonical manifest read failed — using raw shim");
|
|
13884
|
+
}
|
|
13885
|
+
if (!manifest) manifest = await readRawManifest(runsDir, resolvedRunId);
|
|
13886
|
+
} else {
|
|
13887
|
+
const canonical = await resolveRunManifest(dbRoot, void 0);
|
|
13888
|
+
if (canonical.runId && canonical.manifest) {
|
|
13889
|
+
resolvedRunId = canonical.runId;
|
|
13890
|
+
try {
|
|
13891
|
+
const data = await canonical.manifest.read();
|
|
13892
|
+
manifest = data;
|
|
13893
|
+
} catch {
|
|
13894
|
+
logger$1.debug({ runId: canonical.runId }, "canonical manifest read failed — using raw shim");
|
|
13895
|
+
manifest = null;
|
|
13896
|
+
}
|
|
13897
|
+
}
|
|
13898
|
+
if (!resolvedRunId) {
|
|
13899
|
+
resolvedRunId = await resolveLatestRunId(dbRoot);
|
|
13900
|
+
if (resolvedRunId) {
|
|
13901
|
+
const reread = await resolveRunManifest(dbRoot, resolvedRunId);
|
|
13902
|
+
if (reread.manifest) try {
|
|
13903
|
+
const data = await reread.manifest.read();
|
|
13904
|
+
manifest = data;
|
|
13905
|
+
} catch {
|
|
13906
|
+
logger$1.debug({ runId: resolvedRunId }, "manifest read failed after Dolt resolution");
|
|
13907
|
+
}
|
|
13908
|
+
if (!manifest) manifest = await readRawManifest(runsDir, resolvedRunId);
|
|
13909
|
+
}
|
|
13910
|
+
}
|
|
13911
|
+
if (!resolvedRunId) {
|
|
13912
|
+
process.stderr.write("No runs found. Run `substrate run` to start a pipeline.\n");
|
|
13913
|
+
return 1;
|
|
13914
|
+
}
|
|
13915
|
+
if (!manifest) manifest = await readRawManifest(runsDir, resolvedRunId);
|
|
13916
|
+
}
|
|
13917
|
+
if (!manifest) {
|
|
13918
|
+
process.stderr.write(`No runs found. Run ID "${resolvedRunId}" not found in ${runsDir}.\nRun \`substrate run\` to start a pipeline.
|
|
13919
|
+
`);
|
|
13920
|
+
return 1;
|
|
13921
|
+
}
|
|
13922
|
+
const statePath = join(dbRoot, ".substrate", "state");
|
|
13923
|
+
if (existsSync$1(join(statePath, ".dolt"))) try {
|
|
13924
|
+
const doltClient = new DoltClient({ repoPath: statePath });
|
|
13925
|
+
const storyKeys = Object.keys(manifest.per_story_state);
|
|
13926
|
+
if (storyKeys.length > 0) {
|
|
13927
|
+
const placeholders = storyKeys.map(() => "?").join(", ");
|
|
13928
|
+
const rows = await doltClient.query(`SELECT story_key, completed_at FROM wg_stories WHERE story_key IN (${placeholders})`, storyKeys);
|
|
13929
|
+
for (const row of rows) {
|
|
13930
|
+
const state = manifest.per_story_state[row.story_key];
|
|
13931
|
+
if (state && !state.completed_at && row.completed_at) state.completed_at = row.completed_at;
|
|
13932
|
+
}
|
|
13933
|
+
logger$1.debug({ storyCount: rows.length }, "Dolt wg_stories enrichment applied");
|
|
13934
|
+
}
|
|
13935
|
+
} catch (err) {
|
|
13936
|
+
logger$1.debug({ err }, "Dolt enrichment unavailable — using manifest-only data (degraded mode)");
|
|
13937
|
+
}
|
|
13938
|
+
const output = assembleReport(resolvedRunId, manifest);
|
|
13939
|
+
if (outputFormat === "json") process.stdout.write(renderJson(output) + "\n");
|
|
13940
|
+
else process.stdout.write(renderHuman(output, manifest));
|
|
13941
|
+
return 0;
|
|
13942
|
+
}
|
|
13943
|
+
/**
|
|
13944
|
+
* Register the `substrate report` command.
|
|
13945
|
+
*
|
|
13946
|
+
* Signature mirrors `registerReconcileFromDiskCommand` (Epic 69 Story 69-1)
|
|
13947
|
+
* for uniform CLI registration shape. `registry` is present for signature
|
|
13948
|
+
* uniformity even though this command does not use it.
|
|
13949
|
+
*/
|
|
13950
|
+
function registerReportCommand(program, _version = "0.0.0", projectRoot = process.cwd(), _registry) {
|
|
13951
|
+
program.command("report").description("Read run manifest and produce a structured completion report").option("--run <id|latest>", "Run ID to report on, or \"latest\" (default: current-run-id file, then Dolt getLatestRun fallback)").option("--output-format <format>", "Output format: human (default) or json", "human").action(async (opts) => {
|
|
13952
|
+
const outputFormat = opts.outputFormat === "json" ? "json" : "human";
|
|
13953
|
+
const exitCode = await runReportAction({
|
|
13954
|
+
run: opts.run,
|
|
13955
|
+
outputFormat,
|
|
13956
|
+
projectRoot
|
|
13957
|
+
});
|
|
13958
|
+
process.exitCode = exitCode;
|
|
13959
|
+
});
|
|
13960
|
+
}
|
|
13961
|
+
|
|
13519
13962
|
//#endregion
|
|
13520
13963
|
//#region src/cli/index.ts
|
|
13521
13964
|
process.setMaxListeners(20);
|
|
@@ -13585,6 +14028,7 @@ async function createProgram() {
|
|
|
13585
14028
|
registerScenariosCommand(program);
|
|
13586
14029
|
registerFactoryCommand$1(program);
|
|
13587
14030
|
registerReconcileFromDiskCommand(program, version, process.cwd(), registry);
|
|
14031
|
+
registerReportCommand(program, version, process.cwd(), registry);
|
|
13588
14032
|
registerUpgradeCommand(program);
|
|
13589
14033
|
return program;
|
|
13590
14034
|
}
|