substrate-ai 0.3.7 → 0.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/index.js +375 -126
- package/dist/cli/templates/claude-md-substrate-section.md +6 -0
- package/dist/{decisions-CbysnTi5.js → decisions-D7Ao_KcL.js} +1 -1
- package/dist/{decisions-CdpiJIm5.js → decisions-Db8GTbH2.js} +1 -1
- package/dist/{experimenter-jto3orYl.js → experimenter-CvxtqzXz.js} +4 -4
- package/dist/{git-utils-UbKLSGsD.js → git-utils-C-fdrHF_.js} +1 -1
- package/dist/index.d.ts +39 -0
- package/dist/{operational-DisxqtjC.js → operational-C0_y8DAs.js} +1 -1
- package/dist/routing-CZfJB3y9.js +477 -0
- package/dist/routing-DWCBjrt7.js +4 -0
- package/dist/run-CQJP37ZC.js +9 -0
- package/dist/{run-N7voPS_k.js → run-cqQmW8wL.js} +6674 -4243
- package/dist/{upgrade-DTzeenA-.js → upgrade-C8_VcI8B.js} +2 -2
- package/dist/{upgrade-BlJKjr6I.js → upgrade-njy4XENS.js} +2 -2
- package/dist/{version-manager-impl-zsJjBhak.js → version-manager-impl-DTlmGvHb.js} +1 -1
- package/dist/{version-manager-impl-BsHqAeGT.js → version-manager-impl-QwroczYS.js} +1 -1
- package/package.json +1 -1
- package/packs/bmad/prompts/code-review.md +2 -0
- package/packs/bmad/prompts/dev-story.md +2 -0
- package/dist/run-C_hKt2wY.js +0 -8
package/dist/cli/index.js
CHANGED
|
@@ -1,14 +1,15 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
import { DEFAULT_CONFIG, DEFAULT_ROUTING_POLICY, DatabaseWrapper, DoltNotInstalled, FileStateStore, IngestionServer, SUBSTRATE_OWNED_SETTINGS_KEYS, TelemetryPersistence, VALID_PHASES, buildPipelineStatusOutput, checkDoltInstalled, createConfigSystem, createContextCompiler, createDispatcher, createDoltClient, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStateStore, createStopAfterGate, findPackageRoot, formatOutput, formatPhaseCompletionSummary, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, initializeDolt, parseDbTimestampAsUtc, registerHealthCommand, registerRunCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, resolveStoryKeys, runAnalysisPhase, runMigrations, runPlanningPhase, runSolutioningPhase, validateStopAfterFromConflict } from "../run-
|
|
2
|
+
import { DEFAULT_CONFIG, DEFAULT_ROUTING_POLICY, DatabaseWrapper, DoltClient, DoltNotInstalled, DoltRepoMapMetaRepository, DoltSymbolRepository, FileStateStore, GitClient, GrammarLoader, IngestionServer, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SUBSTRATE_OWNED_SETTINGS_KEYS, SymbolParser, TelemetryPersistence, VALID_PHASES, buildPipelineStatusOutput, checkDoltInstalled, createConfigSystem, createContextCompiler, createDispatcher, createDoltClient, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStateStore, createStopAfterGate, findPackageRoot, formatOutput, formatPhaseCompletionSummary, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, initializeDolt, parseDbTimestampAsUtc, registerHealthCommand, registerRunCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, resolveStoryKeys, runAnalysisPhase, runMigrations, runPlanningPhase, runSolutioningPhase, validateStopAfterFromConflict } from "../run-cqQmW8wL.js";
|
|
3
3
|
import { createLogger } from "../logger-D2fS2ccL.js";
|
|
4
4
|
import { AdapterRegistry } from "../adapter-registry-rSOJ9Kvz.js";
|
|
5
5
|
import { CURRENT_CONFIG_FORMAT_VERSION, CURRENT_TASK_GRAPH_VERSION, PartialSubstrateConfigSchema } from "../config-migrator-DtZW1maj.js";
|
|
6
6
|
import { ConfigError, createEventBus } from "../helpers-BihqWgVe.js";
|
|
7
|
-
import {
|
|
8
|
-
import {
|
|
9
|
-
import {
|
|
10
|
-
import "../
|
|
11
|
-
import
|
|
7
|
+
import { RoutingRecommender } from "../routing-CZfJB3y9.js";
|
|
8
|
+
import { addTokenUsage, createDecision, createPipelineRun, getDecisionsByCategory, getDecisionsByPhaseForRun, getLatestRun, getTokenUsageSummary, listRequirements, updatePipelineRun } from "../decisions-Db8GTbH2.js";
|
|
9
|
+
import { ESCALATION_DIAGNOSIS, EXPERIMENT_RESULT, OPERATIONAL_FINDING, STORY_METRICS, aggregateTokenUsageForRun, compareRunMetrics, getBaselineRunMetrics, getRunMetrics, getStoryMetricsForRun, incrementRunRestarts, listRunMetrics, tagRunAsBaseline } from "../operational-C0_y8DAs.js";
|
|
10
|
+
import { abortMerge, createWorktree, getConflictingFiles, getMergedFiles, getOrphanedWorktrees, performMerge, removeBranch, removeWorktree, simulateMerge, verifyGitVersion } from "../git-utils-C-fdrHF_.js";
|
|
11
|
+
import "../version-manager-impl-DTlmGvHb.js";
|
|
12
|
+
import { registerUpgradeCommand } from "../upgrade-C8_VcI8B.js";
|
|
12
13
|
import { Command } from "commander";
|
|
13
14
|
import { fileURLToPath } from "url";
|
|
14
15
|
import { dirname, join, resolve } from "path";
|
|
@@ -19,7 +20,7 @@ import { createRequire } from "node:module";
|
|
|
19
20
|
import * as path$1 from "node:path";
|
|
20
21
|
import { isAbsolute, join as join$1 } from "node:path";
|
|
21
22
|
import Database from "better-sqlite3";
|
|
22
|
-
import { access as access$1 } from "node:fs/promises";
|
|
23
|
+
import { access as access$1, readFile as readFile$1 } from "node:fs/promises";
|
|
23
24
|
import { existsSync as existsSync$1, mkdirSync as mkdirSync$1, writeFileSync as writeFileSync$1 } from "node:fs";
|
|
24
25
|
import { createInterface } from "node:readline";
|
|
25
26
|
import { homedir } from "os";
|
|
@@ -259,7 +260,7 @@ function registerAdaptersCommand(program, version, registry) {
|
|
|
259
260
|
|
|
260
261
|
//#endregion
|
|
261
262
|
//#region src/cli/commands/init.ts
|
|
262
|
-
const logger$
|
|
263
|
+
const logger$18 = createLogger("init");
|
|
263
264
|
const __dirname = dirname(new URL(import.meta.url).pathname);
|
|
264
265
|
const INIT_EXIT_SUCCESS = 0;
|
|
265
266
|
const INIT_EXIT_ERROR = 1;
|
|
@@ -280,7 +281,7 @@ async function scaffoldBmadFramework(projectRoot, force, outputFormat) {
|
|
|
280
281
|
const version = resolveBmadMethodVersion();
|
|
281
282
|
if (force && bmadExists) process.stderr.write(`Warning: Replacing existing _bmad/ framework with bmad-method@${version}\n`);
|
|
282
283
|
process.stdout.write(`Scaffolding BMAD framework from bmad-method@${version}\n`);
|
|
283
|
-
logger$
|
|
284
|
+
logger$18.info({
|
|
284
285
|
version,
|
|
285
286
|
dest: bmadDest
|
|
286
287
|
}, "Scaffolding BMAD framework");
|
|
@@ -290,7 +291,7 @@ async function scaffoldBmadFramework(projectRoot, force, outputFormat) {
|
|
|
290
291
|
const destDir = join(bmadDest, dir);
|
|
291
292
|
mkdirSync(destDir, { recursive: true });
|
|
292
293
|
cpSync(srcDir, destDir, { recursive: true });
|
|
293
|
-
logger$
|
|
294
|
+
logger$18.info({
|
|
294
295
|
dir,
|
|
295
296
|
dest: destDir
|
|
296
297
|
}, "Scaffolded BMAD framework directory");
|
|
@@ -309,7 +310,7 @@ async function scaffoldBmadFramework(projectRoot, force, outputFormat) {
|
|
|
309
310
|
"document_output_language: English"
|
|
310
311
|
].join("\n") + "\n";
|
|
311
312
|
await writeFile(configFile, configStub, "utf8");
|
|
312
|
-
logger$
|
|
313
|
+
logger$18.info({ configFile }, "Generated _bmad/_config/config.yaml stub");
|
|
313
314
|
}
|
|
314
315
|
}
|
|
315
316
|
const CLAUDE_MD_START_MARKER = "<!-- substrate:start -->";
|
|
@@ -324,7 +325,7 @@ async function scaffoldClaudeMd(projectRoot) {
|
|
|
324
325
|
try {
|
|
325
326
|
sectionContent = await readFile(templatePath, "utf8");
|
|
326
327
|
} catch {
|
|
327
|
-
logger$
|
|
328
|
+
logger$18.warn({ templatePath }, "CLAUDE.md substrate section template not found; skipping");
|
|
328
329
|
return;
|
|
329
330
|
}
|
|
330
331
|
if (!sectionContent.endsWith("\n")) sectionContent += "\n";
|
|
@@ -342,7 +343,7 @@ async function scaffoldClaudeMd(projectRoot) {
|
|
|
342
343
|
newContent = existingContent + separator + sectionContent;
|
|
343
344
|
}
|
|
344
345
|
await writeFile(claudeMdPath, newContent, "utf8");
|
|
345
|
-
logger$
|
|
346
|
+
logger$18.info({ claudeMdPath }, "Wrote substrate section to CLAUDE.md");
|
|
346
347
|
}
|
|
347
348
|
async function scaffoldStatuslineScript(projectRoot) {
|
|
348
349
|
const pkgRoot = findPackageRoot(__dirname);
|
|
@@ -353,7 +354,7 @@ async function scaffoldStatuslineScript(projectRoot) {
|
|
|
353
354
|
try {
|
|
354
355
|
content = await readFile(templatePath, "utf8");
|
|
355
356
|
} catch {
|
|
356
|
-
logger$
|
|
357
|
+
logger$18.warn({ templatePath }, "statusline.sh template not found; skipping");
|
|
357
358
|
return;
|
|
358
359
|
}
|
|
359
360
|
const claudeDir = join(projectRoot, ".claude");
|
|
@@ -361,7 +362,7 @@ async function scaffoldStatuslineScript(projectRoot) {
|
|
|
361
362
|
mkdirSync(claudeDir, { recursive: true });
|
|
362
363
|
await writeFile(statuslinePath, content, "utf8");
|
|
363
364
|
chmodSync(statuslinePath, 493);
|
|
364
|
-
logger$
|
|
365
|
+
logger$18.info({ statuslinePath }, "Wrote .claude/statusline.sh");
|
|
365
366
|
}
|
|
366
367
|
async function scaffoldClaudeSettings(projectRoot) {
|
|
367
368
|
const claudeDir = join(projectRoot, ".claude");
|
|
@@ -377,7 +378,7 @@ async function scaffoldClaudeSettings(projectRoot) {
|
|
|
377
378
|
if (!merged["$schema"]) merged["$schema"] = "https://json.schemastore.org/claude-code-settings.json";
|
|
378
379
|
mkdirSync(claudeDir, { recursive: true });
|
|
379
380
|
await writeFile(settingsPath, JSON.stringify(merged, null, 2) + "\n", "utf8");
|
|
380
|
-
logger$
|
|
381
|
+
logger$18.info({ settingsPath }, "Wrote substrate settings to .claude/settings.json");
|
|
381
382
|
}
|
|
382
383
|
function resolveBmadMethodInstallerLibPath(fromDir = __dirname) {
|
|
383
384
|
try {
|
|
@@ -447,7 +448,7 @@ async function compileBmadAgents(bmadDir) {
|
|
|
447
448
|
writeFileSync(mdPath, result.xml, "utf-8");
|
|
448
449
|
compiled++;
|
|
449
450
|
} catch (compileErr) {
|
|
450
|
-
logger$
|
|
451
|
+
logger$18.debug({
|
|
451
452
|
err: compileErr,
|
|
452
453
|
file
|
|
453
454
|
}, "Failed to compile agent YAML");
|
|
@@ -468,21 +469,31 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
|
|
|
468
469
|
const _require = createRequire(join(__dirname, "synthetic.js"));
|
|
469
470
|
try {
|
|
470
471
|
const compiledCount = await compileBmadAgents(bmadDir);
|
|
471
|
-
if (compiledCount > 0) logger$
|
|
472
|
+
if (compiledCount > 0) logger$18.info({ compiledCount }, "Compiled agent YAML files to MD");
|
|
472
473
|
} catch (compileErr) {
|
|
473
|
-
logger$
|
|
474
|
+
logger$18.warn({ err: compileErr }, "Agent compilation failed; agent commands may be incomplete");
|
|
474
475
|
}
|
|
475
|
-
const
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
476
|
+
const resolveExport = (mod, name) => {
|
|
477
|
+
if (typeof mod[name] === "function") return mod[name];
|
|
478
|
+
const def = mod.default;
|
|
479
|
+
if (def && typeof def[name] === "function") return def[name];
|
|
480
|
+
throw new Error(`${name} is not a constructor`);
|
|
481
|
+
};
|
|
482
|
+
const agentMod = _require(join(installerLibPath, "ide", "shared", "agent-command-generator.js"));
|
|
483
|
+
const AgentCommandGenerator = resolveExport(agentMod, "AgentCommandGenerator");
|
|
484
|
+
const workflowMod = _require(join(installerLibPath, "ide", "shared", "workflow-command-generator.js"));
|
|
485
|
+
const WorkflowCommandGenerator = resolveExport(workflowMod, "WorkflowCommandGenerator");
|
|
486
|
+
const taskToolMod = _require(join(installerLibPath, "ide", "shared", "task-tool-command-generator.js"));
|
|
487
|
+
const TaskToolCommandGenerator = resolveExport(taskToolMod, "TaskToolCommandGenerator");
|
|
488
|
+
const manifestMod = _require(join(installerLibPath, "core", "manifest-generator.js"));
|
|
489
|
+
const ManifestGenerator = resolveExport(manifestMod, "ManifestGenerator");
|
|
479
490
|
const nonCoreModules = scanBmadModules(bmadDir);
|
|
480
491
|
const allModules = ["core", ...nonCoreModules];
|
|
481
492
|
try {
|
|
482
493
|
const manifestGen = new ManifestGenerator();
|
|
483
494
|
await manifestGen.generateManifests(bmadDir, allModules, [], { ides: ["claude-code"] });
|
|
484
495
|
} catch (manifestErr) {
|
|
485
|
-
logger$
|
|
496
|
+
logger$18.warn({ err: manifestErr }, "ManifestGenerator failed; workflow/task commands may be incomplete");
|
|
486
497
|
}
|
|
487
498
|
const commandsDir = join(projectRoot, ".claude", "commands");
|
|
488
499
|
mkdirSync(commandsDir, { recursive: true });
|
|
@@ -498,7 +509,7 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
|
|
|
498
509
|
const taskToolCount = await taskToolGen.writeDashArtifacts(commandsDir, taskToolArtifacts);
|
|
499
510
|
const total = agentCount + workflowCount + taskToolCount;
|
|
500
511
|
if (outputFormat !== "json") process.stdout.write(`Generated ${String(total)} Claude Code commands (${String(agentCount)} agents, ${String(workflowCount)} workflows, ${String(taskToolCount)} tasks/tools)\n`);
|
|
501
|
-
logger$
|
|
512
|
+
logger$18.info({
|
|
502
513
|
agentCount,
|
|
503
514
|
workflowCount,
|
|
504
515
|
taskToolCount,
|
|
@@ -508,7 +519,7 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
|
|
|
508
519
|
} catch (err) {
|
|
509
520
|
const msg = err instanceof Error ? err.message : String(err);
|
|
510
521
|
if (outputFormat !== "json") process.stderr.write(`Warning: .claude/commands/ generation failed: ${msg}\n`);
|
|
511
|
-
logger$
|
|
522
|
+
logger$18.warn({ err }, "scaffoldClaudeCommands failed; init continues");
|
|
512
523
|
}
|
|
513
524
|
}
|
|
514
525
|
const PROVIDER_DEFAULTS = DEFAULT_CONFIG.providers;
|
|
@@ -583,7 +594,7 @@ async function runInitAction(options) {
|
|
|
583
594
|
discoveryReport = await registry.discoverAndRegister();
|
|
584
595
|
} catch (err) {
|
|
585
596
|
const message = err instanceof Error ? err.message : String(err);
|
|
586
|
-
logger$
|
|
597
|
+
logger$18.error({ err }, "Adapter discovery failed");
|
|
587
598
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, `Adapter discovery failed: ${message}`) + "\n");
|
|
588
599
|
else process.stderr.write(` Error: adapter discovery failed — ${message}\n`);
|
|
589
600
|
return INIT_EXIT_ERROR;
|
|
@@ -632,12 +643,12 @@ async function runInitAction(options) {
|
|
|
632
643
|
return INIT_EXIT_ERROR;
|
|
633
644
|
}
|
|
634
645
|
if (force && existsSync(localManifest)) {
|
|
635
|
-
logger$
|
|
646
|
+
logger$18.info({ pack: packName }, "Replacing existing pack with bundled version");
|
|
636
647
|
process.stderr.write(`Warning: Replacing existing pack '${packName}' with bundled version\n`);
|
|
637
648
|
}
|
|
638
649
|
mkdirSync(dirname(packPath), { recursive: true });
|
|
639
650
|
cpSync(bundledPackPath, packPath, { recursive: true });
|
|
640
|
-
logger$
|
|
651
|
+
logger$18.info({
|
|
641
652
|
pack: packName,
|
|
642
653
|
dest: packPath
|
|
643
654
|
}, "Scaffolded methodology pack");
|
|
@@ -674,17 +685,17 @@ async function runInitAction(options) {
|
|
|
674
685
|
process.stderr.write(`${err.message}\n`);
|
|
675
686
|
return INIT_EXIT_ERROR;
|
|
676
687
|
}
|
|
677
|
-
logger$
|
|
688
|
+
logger$18.debug("Dolt not installed, skipping auto-init");
|
|
678
689
|
} else {
|
|
679
690
|
const msg = err instanceof Error ? err.message : String(err);
|
|
680
691
|
if (doltMode === "force") {
|
|
681
692
|
process.stderr.write(`✗ Dolt initialization failed: ${msg}\n`);
|
|
682
693
|
return INIT_EXIT_ERROR;
|
|
683
694
|
}
|
|
684
|
-
logger$
|
|
695
|
+
logger$18.warn({ error: msg }, "Dolt auto-init failed (non-blocking)");
|
|
685
696
|
}
|
|
686
697
|
}
|
|
687
|
-
else logger$
|
|
698
|
+
else logger$18.debug("Dolt step was skipped (--no-dolt)");
|
|
688
699
|
const successMsg = `Pack '${packName}' and database initialized successfully at ${dbPath}`;
|
|
689
700
|
if (outputFormat === "json") process.stdout.write(formatOutput({
|
|
690
701
|
pack: packName,
|
|
@@ -708,6 +719,7 @@ async function runInitAction(options) {
|
|
|
708
719
|
process.stdout.write(` .claude/commands/ /substrate-run, /substrate-supervisor, /substrate-metrics\n`);
|
|
709
720
|
process.stdout.write(` .substrate/ config, database, routing policy\n`);
|
|
710
721
|
if (doltInitialized) process.stdout.write(`✓ Dolt state store initialized at .substrate/state/\n`);
|
|
722
|
+
else if (doltMode !== "skip") process.stdout.write(`ℹ Dolt not detected — install Dolt for versioned state, \`substrate diff\`, and observability persistence. See: https://docs.dolthub.com/introduction/installation\n`);
|
|
711
723
|
process.stdout.write("\n Next steps:\n 1. Start a Claude Code session in this project\n 2. Tell Claude: \"Run the substrate pipeline\"\n 3. Or use the /substrate-run slash command for a guided run\n");
|
|
712
724
|
}
|
|
713
725
|
return INIT_EXIT_SUCCESS;
|
|
@@ -715,7 +727,7 @@ async function runInitAction(options) {
|
|
|
715
727
|
const msg = err instanceof Error ? err.message : String(err);
|
|
716
728
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
717
729
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
718
|
-
logger$
|
|
730
|
+
logger$18.error({ err }, "init failed");
|
|
719
731
|
return INIT_EXIT_ERROR;
|
|
720
732
|
}
|
|
721
733
|
}
|
|
@@ -738,7 +750,7 @@ function registerInitCommand(program, _version, registry) {
|
|
|
738
750
|
|
|
739
751
|
//#endregion
|
|
740
752
|
//#region src/cli/commands/config.ts
|
|
741
|
-
const logger$
|
|
753
|
+
const logger$17 = createLogger("config-cmd");
|
|
742
754
|
const CONFIG_EXIT_SUCCESS = 0;
|
|
743
755
|
const CONFIG_EXIT_ERROR = 1;
|
|
744
756
|
const CONFIG_EXIT_INVALID = 2;
|
|
@@ -764,7 +776,7 @@ async function runConfigShow(opts = {}) {
|
|
|
764
776
|
return CONFIG_EXIT_INVALID;
|
|
765
777
|
}
|
|
766
778
|
const message = err instanceof Error ? err.message : String(err);
|
|
767
|
-
logger$
|
|
779
|
+
logger$17.error({ err }, "Failed to load configuration");
|
|
768
780
|
process.stderr.write(` Error loading configuration: ${message}\n`);
|
|
769
781
|
return CONFIG_EXIT_ERROR;
|
|
770
782
|
}
|
|
@@ -838,7 +850,7 @@ async function runConfigExport(opts = {}) {
|
|
|
838
850
|
return CONFIG_EXIT_INVALID;
|
|
839
851
|
}
|
|
840
852
|
const message = err instanceof Error ? err.message : String(err);
|
|
841
|
-
logger$
|
|
853
|
+
logger$17.error({ err }, "Failed to load configuration");
|
|
842
854
|
process.stderr.write(`Error loading configuration: ${message}\n`);
|
|
843
855
|
return CONFIG_EXIT_ERROR;
|
|
844
856
|
}
|
|
@@ -992,7 +1004,7 @@ function registerConfigCommand(program, _version) {
|
|
|
992
1004
|
|
|
993
1005
|
//#endregion
|
|
994
1006
|
//#region src/cli/commands/resume.ts
|
|
995
|
-
const logger$
|
|
1007
|
+
const logger$16 = createLogger("resume-cmd");
|
|
996
1008
|
async function runResumeAction(options) {
|
|
997
1009
|
const { runId: specifiedRunId, stopAfter, outputFormat, projectRoot, concurrency, pack: packName, registry } = options;
|
|
998
1010
|
if (stopAfter !== void 0 && !VALID_PHASES.includes(stopAfter)) {
|
|
@@ -1075,7 +1087,7 @@ async function runResumeAction(options) {
|
|
|
1075
1087
|
const msg = err instanceof Error ? err.message : String(err);
|
|
1076
1088
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
1077
1089
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
1078
|
-
logger$
|
|
1090
|
+
logger$16.error({ err }, "auto resume failed");
|
|
1079
1091
|
return 1;
|
|
1080
1092
|
} finally {
|
|
1081
1093
|
try {
|
|
@@ -1240,7 +1252,7 @@ async function runFullPipelineFromPhase(options) {
|
|
|
1240
1252
|
});
|
|
1241
1253
|
}
|
|
1242
1254
|
} catch (err) {
|
|
1243
|
-
logger$
|
|
1255
|
+
logger$16.warn({ err }, "Failed to record token usage");
|
|
1244
1256
|
}
|
|
1245
1257
|
});
|
|
1246
1258
|
const storyKeys = resolveStoryKeys(db, projectRoot, { pipelineRunId: runId });
|
|
@@ -1295,7 +1307,7 @@ async function runFullPipelineFromPhase(options) {
|
|
|
1295
1307
|
const msg = err instanceof Error ? err.message : String(err);
|
|
1296
1308
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
1297
1309
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
1298
|
-
logger$
|
|
1310
|
+
logger$16.error({ err }, "pipeline from phase failed");
|
|
1299
1311
|
return 1;
|
|
1300
1312
|
} finally {
|
|
1301
1313
|
try {
|
|
@@ -1321,7 +1333,7 @@ function registerResumeCommand(program, _version = "0.0.0", projectRoot = proces
|
|
|
1321
1333
|
|
|
1322
1334
|
//#endregion
|
|
1323
1335
|
//#region src/cli/commands/status.ts
|
|
1324
|
-
const logger$
|
|
1336
|
+
const logger$15 = createLogger("status-cmd");
|
|
1325
1337
|
async function runStatusAction(options) {
|
|
1326
1338
|
const { outputFormat, runId, projectRoot, stateStore, history } = options;
|
|
1327
1339
|
if (history === true) {
|
|
@@ -1377,7 +1389,7 @@ async function runStatusAction(options) {
|
|
|
1377
1389
|
if (stateStore) try {
|
|
1378
1390
|
storeStories = await stateStore.queryStories({});
|
|
1379
1391
|
} catch (err) {
|
|
1380
|
-
logger$
|
|
1392
|
+
logger$15.debug({ err }, "StateStore query failed, continuing without store data");
|
|
1381
1393
|
}
|
|
1382
1394
|
if (outputFormat === "json") {
|
|
1383
1395
|
const statusOutput = buildPipelineStatusOutput(run, tokenSummary, decisionsCount, storiesCount);
|
|
@@ -1480,7 +1492,7 @@ async function runStatusAction(options) {
|
|
|
1480
1492
|
const msg = err instanceof Error ? err.message : String(err);
|
|
1481
1493
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
1482
1494
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
1483
|
-
logger$
|
|
1495
|
+
logger$15.error({ err }, "status action failed");
|
|
1484
1496
|
return 1;
|
|
1485
1497
|
} finally {
|
|
1486
1498
|
try {
|
|
@@ -1924,7 +1936,7 @@ Analyze thoroughly and return ONLY the JSON array with no additional text.`;
|
|
|
1924
1936
|
|
|
1925
1937
|
//#endregion
|
|
1926
1938
|
//#region src/cli/commands/amend.ts
|
|
1927
|
-
const logger$
|
|
1939
|
+
const logger$14 = createLogger("amend-cmd");
|
|
1928
1940
|
/**
|
|
1929
1941
|
* Detect and apply supersessions after a phase completes in an amendment run.
|
|
1930
1942
|
*
|
|
@@ -1955,7 +1967,7 @@ function runPostPhaseSupersessionDetection(db, amendmentRunId, currentPhase, han
|
|
|
1955
1967
|
});
|
|
1956
1968
|
} catch (err) {
|
|
1957
1969
|
const msg = err instanceof Error ? err.message : String(err);
|
|
1958
|
-
logger$
|
|
1970
|
+
logger$14.warn({
|
|
1959
1971
|
err,
|
|
1960
1972
|
originalId: parentMatch.id,
|
|
1961
1973
|
supersedingId: newDec.id
|
|
@@ -2089,7 +2101,7 @@ async function runAmendAction(options) {
|
|
|
2089
2101
|
for (let i = startIdx; i < phaseOrder.length; i++) {
|
|
2090
2102
|
const currentPhase = phaseOrder[i];
|
|
2091
2103
|
const amendmentContext = handler.loadContextForPhase(currentPhase);
|
|
2092
|
-
logger$
|
|
2104
|
+
logger$14.info({
|
|
2093
2105
|
phase: currentPhase,
|
|
2094
2106
|
amendmentContextLen: amendmentContext.length
|
|
2095
2107
|
}, "Amendment context loaded for phase");
|
|
@@ -2209,7 +2221,7 @@ async function runAmendAction(options) {
|
|
|
2209
2221
|
} catch (err) {
|
|
2210
2222
|
const msg = err instanceof Error ? err.message : String(err);
|
|
2211
2223
|
process.stderr.write(`Error: ${msg}\n`);
|
|
2212
|
-
logger$
|
|
2224
|
+
logger$14.error({ err }, "amend failed");
|
|
2213
2225
|
return 1;
|
|
2214
2226
|
} finally {
|
|
2215
2227
|
try {
|
|
@@ -2682,11 +2694,11 @@ async function runSupervisorAction(options, deps = {}) {
|
|
|
2682
2694
|
try {
|
|
2683
2695
|
const { createExperimenter } = await import(
|
|
2684
2696
|
/* @vite-ignore */
|
|
2685
|
-
"../experimenter-
|
|
2697
|
+
"../experimenter-CvxtqzXz.js"
|
|
2686
2698
|
);
|
|
2687
2699
|
const { getLatestRun: getLatest } = await import(
|
|
2688
2700
|
/* @vite-ignore */
|
|
2689
|
-
"../decisions-
|
|
2701
|
+
"../decisions-D7Ao_KcL.js"
|
|
2690
2702
|
);
|
|
2691
2703
|
const dbPath = join(projectRoot, ".substrate", "substrate.db");
|
|
2692
2704
|
const expDbWrapper = new DatabaseWrapper(dbPath);
|
|
@@ -2696,7 +2708,7 @@ async function runSupervisorAction(options, deps = {}) {
|
|
|
2696
2708
|
const expDb = expDbWrapper.db;
|
|
2697
2709
|
const { runRunAction: runPipeline } = await import(
|
|
2698
2710
|
/* @vite-ignore */
|
|
2699
|
-
"../run-
|
|
2711
|
+
"../run-CQJP37ZC.js"
|
|
2700
2712
|
);
|
|
2701
2713
|
const runStoryFn = async (opts) => {
|
|
2702
2714
|
const exitCode = await runPipeline({
|
|
@@ -2943,7 +2955,7 @@ function registerSupervisorCommand(program, _version = "0.0.0", projectRoot = pr
|
|
|
2943
2955
|
|
|
2944
2956
|
//#endregion
|
|
2945
2957
|
//#region src/cli/commands/metrics.ts
|
|
2946
|
-
const logger$
|
|
2958
|
+
const logger$13 = createLogger("metrics-cmd");
|
|
2947
2959
|
async function openTelemetryDb(dbPath) {
|
|
2948
2960
|
if (!existsSync(dbPath)) return null;
|
|
2949
2961
|
try {
|
|
@@ -3015,7 +3027,7 @@ function printCategoryTable(stats, label) {
|
|
|
3015
3027
|
}
|
|
3016
3028
|
}
|
|
3017
3029
|
async function runMetricsAction(options) {
|
|
3018
|
-
const { outputFormat, projectRoot, limit = 10, compare, tagBaseline, analysis, sprint, story, taskType, since, aggregate, efficiency, recommendations, turns, consumers, categories, compareStories } = options;
|
|
3030
|
+
const { outputFormat, projectRoot, limit = 10, compare, tagBaseline, analysis, sprint, story, taskType, since, aggregate, efficiency, recommendations, turns, consumers, categories, compareStories, routingRecommendations } = options;
|
|
3019
3031
|
const telemetryModes = [
|
|
3020
3032
|
efficiency,
|
|
3021
3033
|
recommendations,
|
|
@@ -3154,6 +3166,59 @@ async function runMetricsAction(options) {
|
|
|
3154
3166
|
} catch {}
|
|
3155
3167
|
}
|
|
3156
3168
|
}
|
|
3169
|
+
if (routingRecommendations === true) {
|
|
3170
|
+
const dbRoot$1 = await resolveMainRepoRoot(projectRoot);
|
|
3171
|
+
const dbDir = join(dbRoot$1, ".substrate");
|
|
3172
|
+
const doltStatePath = join(dbDir, "state", ".dolt");
|
|
3173
|
+
const doltExists = existsSync(doltStatePath);
|
|
3174
|
+
const stateBackend = doltExists ? "dolt" : "file";
|
|
3175
|
+
const stateBasePath = join(dbDir, "state");
|
|
3176
|
+
const stateStore = createStateStore({
|
|
3177
|
+
backend: stateBackend,
|
|
3178
|
+
basePath: stateBasePath
|
|
3179
|
+
});
|
|
3180
|
+
await stateStore.initialize();
|
|
3181
|
+
try {
|
|
3182
|
+
const runIndexRaw = await stateStore.getMetric("__global__", "phase_token_breakdown_runs");
|
|
3183
|
+
const runIds = Array.isArray(runIndexRaw) ? runIndexRaw : [];
|
|
3184
|
+
const recentRunIds = runIds.slice(-20);
|
|
3185
|
+
const breakdowns = [];
|
|
3186
|
+
for (const runId of recentRunIds) try {
|
|
3187
|
+
const raw = await stateStore.getMetric(runId, "phase_token_breakdown");
|
|
3188
|
+
if (raw !== void 0 && raw !== null) {
|
|
3189
|
+
const parsed = typeof raw === "string" ? JSON.parse(raw) : raw;
|
|
3190
|
+
breakdowns.push(parsed);
|
|
3191
|
+
}
|
|
3192
|
+
} catch {}
|
|
3193
|
+
const routingConfigPath = join(dbDir, "routing.yml");
|
|
3194
|
+
let routingConfig = null;
|
|
3195
|
+
if (existsSync(routingConfigPath)) try {
|
|
3196
|
+
const { loadModelRoutingConfig } = await import("../routing-DWCBjrt7.js");
|
|
3197
|
+
routingConfig = loadModelRoutingConfig(routingConfigPath);
|
|
3198
|
+
} catch {}
|
|
3199
|
+
if (routingConfig === null) routingConfig = {
|
|
3200
|
+
version: 1,
|
|
3201
|
+
phases: {},
|
|
3202
|
+
baseline_model: "claude-sonnet"
|
|
3203
|
+
};
|
|
3204
|
+
const recommender = new RoutingRecommender(createLogger("routing:recommender"));
|
|
3205
|
+
const analysis$1 = recommender.analyze(breakdowns, routingConfig);
|
|
3206
|
+
if (outputFormat === "json") process.stdout.write(formatOutput({
|
|
3207
|
+
recommendations: analysis$1.recommendations,
|
|
3208
|
+
analysisRuns: analysis$1.analysisRuns,
|
|
3209
|
+
insufficientData: analysis$1.insufficientData
|
|
3210
|
+
}, "json", true) + "\n");
|
|
3211
|
+
else {
|
|
3212
|
+
process.stdout.write(`Routing Recommendations:\n`);
|
|
3213
|
+
if (analysis$1.insufficientData) process.stdout.write(`No recommendations yet — need at least 3 pipeline runs\n`);
|
|
3214
|
+
else if (analysis$1.recommendations.length === 0) process.stdout.write(` No recommendations — all phases are in the neutral zone\n`);
|
|
3215
|
+
else for (const rec of analysis$1.recommendations) process.stdout.write(` ${rec.phase} | ${rec.currentModel} → ${rec.suggestedModel} | est. savings: ${Math.round(rec.estimatedSavingsPct)}%\n`);
|
|
3216
|
+
}
|
|
3217
|
+
} finally {
|
|
3218
|
+
await stateStore.close().catch(() => {});
|
|
3219
|
+
}
|
|
3220
|
+
return 0;
|
|
3221
|
+
}
|
|
3157
3222
|
if (analysis !== void 0) {
|
|
3158
3223
|
const dbRoot$1 = await resolveMainRepoRoot(projectRoot);
|
|
3159
3224
|
const reportBase = join(dbRoot$1, "_bmad-output", "supervisor-reports", `${analysis}-analysis`);
|
|
@@ -3251,7 +3316,7 @@ async function runMetricsAction(options) {
|
|
|
3251
3316
|
doltMetrics = await stateStore.queryMetrics(doltFilter);
|
|
3252
3317
|
await stateStore.close();
|
|
3253
3318
|
} catch (doltErr) {
|
|
3254
|
-
logger$
|
|
3319
|
+
logger$13.warn({ err: doltErr }, "StateStore query failed — falling back to SQLite metrics only");
|
|
3255
3320
|
}
|
|
3256
3321
|
const storyMetricDecisions = getDecisionsByCategory(db, STORY_METRICS);
|
|
3257
3322
|
const storyMetrics = storyMetricDecisions.map((d) => {
|
|
@@ -3282,9 +3347,21 @@ async function runMetricsAction(options) {
|
|
|
3282
3347
|
};
|
|
3283
3348
|
}
|
|
3284
3349
|
});
|
|
3350
|
+
const phaseBreakdownMap = {};
|
|
3351
|
+
try {
|
|
3352
|
+
const kvStore = new FileStateStore({ basePath: join(dbRoot, ".substrate") });
|
|
3353
|
+
for (const run of runs) {
|
|
3354
|
+
const raw = await kvStore.getMetric(run.run_id, "phase_token_breakdown");
|
|
3355
|
+
phaseBreakdownMap[run.run_id] = raw !== void 0 ? raw : null;
|
|
3356
|
+
}
|
|
3357
|
+
} catch {}
|
|
3285
3358
|
if (outputFormat === "json") {
|
|
3359
|
+
const runsWithBreakdown = runs.map((run) => ({
|
|
3360
|
+
...run,
|
|
3361
|
+
phase_token_breakdown: phaseBreakdownMap[run.run_id] ?? null
|
|
3362
|
+
}));
|
|
3286
3363
|
const jsonPayload = {
|
|
3287
|
-
runs,
|
|
3364
|
+
runs: runsWithBreakdown,
|
|
3288
3365
|
story_metrics: storyMetrics
|
|
3289
3366
|
};
|
|
3290
3367
|
if (doltMetrics !== void 0) if (aggregate) {
|
|
@@ -3322,6 +3399,11 @@ async function runMetricsAction(options) {
|
|
|
3322
3399
|
process.stdout.write(` Stories: attempted=${run.stories_attempted} succeeded=${run.stories_succeeded} failed=${run.stories_failed} escalated=${run.stories_escalated}\n`);
|
|
3323
3400
|
process.stdout.write(` Tokens: ${(run.total_input_tokens ?? 0).toLocaleString()} in / ${(run.total_output_tokens ?? 0).toLocaleString()} out $${(run.total_cost_usd ?? 0).toFixed(4)}\n`);
|
|
3324
3401
|
process.stdout.write(` Cycles: ${run.total_review_cycles} | Dispatches: ${run.total_dispatches} | Concurrency: ${run.concurrency_setting}\n`);
|
|
3402
|
+
const breakdown = phaseBreakdownMap[run.run_id];
|
|
3403
|
+
if (breakdown !== null && breakdown !== void 0 && breakdown.entries.length > 0) {
|
|
3404
|
+
process.stdout.write(" Phase Token Breakdown:\n");
|
|
3405
|
+
for (const entry of breakdown.entries) process.stdout.write(` ${entry.phase.padEnd(10)} | ${entry.model.padEnd(30)} | in: ${entry.inputTokens} | out: ${entry.outputTokens} | dispatches: ${entry.dispatchCount}\n`);
|
|
3406
|
+
}
|
|
3325
3407
|
}
|
|
3326
3408
|
}
|
|
3327
3409
|
if (storyMetrics.length > 0) {
|
|
@@ -3377,7 +3459,7 @@ async function runMetricsAction(options) {
|
|
|
3377
3459
|
const msg = err instanceof Error ? err.message : String(err);
|
|
3378
3460
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
3379
3461
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
3380
|
-
logger$
|
|
3462
|
+
logger$13.error({ err }, "metrics action failed");
|
|
3381
3463
|
return 1;
|
|
3382
3464
|
} finally {
|
|
3383
3465
|
try {
|
|
@@ -3386,7 +3468,7 @@ async function runMetricsAction(options) {
|
|
|
3386
3468
|
}
|
|
3387
3469
|
}
|
|
3388
3470
|
function registerMetricsCommand(program, _version = "0.0.0", projectRoot = process.cwd()) {
|
|
3389
|
-
program.command("metrics").description("Show historical pipeline run metrics and cross-run comparison").option("--project-root <path>", "Project root directory", projectRoot).option("--output-format <format>", "Output format: human (default) or json", "human").option("--limit <n>", "Number of runs to show (default: 10)", (v) => parseInt(v, 10), 10).option("--compare <run-id-a,run-id-b>", "Compare two runs side-by-side (comma-separated IDs, e.g. abc123,def456)").option("--tag-baseline <run-id>", "Mark a run as the performance baseline").option("--analysis <run-id>", "Read and output the analysis report for the specified run (AC5 of Story 17-3)").option("--sprint <sprint>", "Filter StateStore metrics by sprint (e.g. sprint-1)").option("--story <story-key>", "Filter StateStore metrics by story key (e.g. 26-1)").option("--task-type <type>", "Filter StateStore metrics by task type (e.g. dev-story)").option("--since <iso-date>", "Filter StateStore metrics at or after this ISO timestamp").option("--aggregate", "Aggregate StateStore metrics grouped by task_type").option("--efficiency", "Show telemetry efficiency scores for recent stories").option("--recommendations", "Show all telemetry recommendations across stories").option("--turns <storyKey>", "Show per-turn analysis for a specific story").option("--consumers <storyKey>", "Show consumer stats for a specific story").option("--categories", "Show category stats (optionally scoped by --story <storyKey>)").option("--compare-stories <storyA,storyB>", "Compare efficiency scores of two stories side-by-side (comma-separated keys)").action(async (opts) => {
|
|
3471
|
+
program.command("metrics").description("Show historical pipeline run metrics and cross-run comparison").option("--project-root <path>", "Project root directory", projectRoot).option("--output-format <format>", "Output format: human (default) or json", "human").option("--limit <n>", "Number of runs to show (default: 10)", (v) => parseInt(v, 10), 10).option("--compare <run-id-a,run-id-b>", "Compare two runs side-by-side (comma-separated IDs, e.g. abc123,def456)").option("--tag-baseline <run-id>", "Mark a run as the performance baseline").option("--analysis <run-id>", "Read and output the analysis report for the specified run (AC5 of Story 17-3)").option("--sprint <sprint>", "Filter StateStore metrics by sprint (e.g. sprint-1)").option("--story <story-key>", "Filter StateStore metrics by story key (e.g. 26-1)").option("--task-type <type>", "Filter StateStore metrics by task type (e.g. dev-story)").option("--since <iso-date>", "Filter StateStore metrics at or after this ISO timestamp").option("--aggregate", "Aggregate StateStore metrics grouped by task_type").option("--efficiency", "Show telemetry efficiency scores for recent stories").option("--recommendations", "Show all telemetry recommendations across stories").option("--turns <storyKey>", "Show per-turn analysis for a specific story").option("--consumers <storyKey>", "Show consumer stats for a specific story").option("--categories", "Show category stats (optionally scoped by --story <storyKey>)").option("--compare-stories <storyA,storyB>", "Compare efficiency scores of two stories side-by-side (comma-separated keys)").option("--routing-recommendations", "Show routing recommendations derived from phase token breakdown history").action(async (opts) => {
|
|
3390
3472
|
const outputFormat = opts.outputFormat === "json" ? "json" : "human";
|
|
3391
3473
|
let compareIds;
|
|
3392
3474
|
if (opts.compare !== void 0) {
|
|
@@ -3420,7 +3502,8 @@ function registerMetricsCommand(program, _version = "0.0.0", projectRoot = proce
|
|
|
3420
3502
|
...opts.turns !== void 0 && { turns: opts.turns },
|
|
3421
3503
|
...opts.consumers !== void 0 && { consumers: opts.consumers },
|
|
3422
3504
|
...opts.categories !== void 0 && { categories: opts.categories },
|
|
3423
|
-
...compareStoriesIds !== void 0 && { compareStories: compareStoriesIds }
|
|
3505
|
+
...compareStoriesIds !== void 0 && { compareStories: compareStoriesIds },
|
|
3506
|
+
...opts.routingRecommendations !== void 0 && { routingRecommendations: opts.routingRecommendations }
|
|
3424
3507
|
};
|
|
3425
3508
|
const exitCode = await runMetricsAction(metricsOpts);
|
|
3426
3509
|
process.exitCode = exitCode;
|
|
@@ -3805,7 +3888,7 @@ function getPlanningCostTotal(db, sessionId) {
|
|
|
3805
3888
|
function getLatestSessionId(_db) {
|
|
3806
3889
|
return null;
|
|
3807
3890
|
}
|
|
3808
|
-
const logger$
|
|
3891
|
+
const logger$12 = createLogger("cost-cmd");
|
|
3809
3892
|
const COST_EXIT_SUCCESS = 0;
|
|
3810
3893
|
const COST_EXIT_ERROR = 1;
|
|
3811
3894
|
/**
|
|
@@ -4051,7 +4134,7 @@ async function runCostAction(options) {
|
|
|
4051
4134
|
} catch (err) {
|
|
4052
4135
|
const message = err instanceof Error ? err.message : String(err);
|
|
4053
4136
|
process.stderr.write(`Error: ${message}\n`);
|
|
4054
|
-
logger$
|
|
4137
|
+
logger$12.error({ err }, "runCostAction failed");
|
|
4055
4138
|
return COST_EXIT_ERROR;
|
|
4056
4139
|
} finally {
|
|
4057
4140
|
if (wrapper !== null) try {
|
|
@@ -4153,7 +4236,7 @@ function applyMonitorSchema(db) {
|
|
|
4153
4236
|
|
|
4154
4237
|
//#endregion
|
|
4155
4238
|
//#region src/persistence/monitor-database.ts
|
|
4156
|
-
const logger$
|
|
4239
|
+
const logger$11 = createLogger("persistence:monitor-db");
|
|
4157
4240
|
var MonitorDatabaseImpl = class {
|
|
4158
4241
|
_db = null;
|
|
4159
4242
|
_path;
|
|
@@ -4164,10 +4247,10 @@ var MonitorDatabaseImpl = class {
|
|
|
4164
4247
|
this._open();
|
|
4165
4248
|
}
|
|
4166
4249
|
_open() {
|
|
4167
|
-
logger$
|
|
4250
|
+
logger$11.info({ path: this._path }, "Opening monitor database");
|
|
4168
4251
|
this._db = new Database(this._path);
|
|
4169
4252
|
const walResult = this._db.pragma("journal_mode = WAL");
|
|
4170
|
-
if (walResult?.[0]?.journal_mode !== "wal") logger$
|
|
4253
|
+
if (walResult?.[0]?.journal_mode !== "wal") logger$11.warn({ result: walResult?.[0]?.journal_mode }, "Monitor DB: WAL pragma did not confirm wal mode");
|
|
4171
4254
|
this._db.pragma("synchronous = NORMAL");
|
|
4172
4255
|
this._db.pragma("busy_timeout = 5000");
|
|
4173
4256
|
this._db.pragma("foreign_keys = ON");
|
|
@@ -4202,7 +4285,7 @@ var MonitorDatabaseImpl = class {
|
|
|
4202
4285
|
total_retries = total_retries + @retries,
|
|
4203
4286
|
last_updated = @lastUpdated
|
|
4204
4287
|
`);
|
|
4205
|
-
logger$
|
|
4288
|
+
logger$11.info({ path: this._path }, "Monitor database ready");
|
|
4206
4289
|
}
|
|
4207
4290
|
_assertOpen() {
|
|
4208
4291
|
if (this._db === null) throw new Error("MonitorDatabase: connection is closed");
|
|
@@ -4351,7 +4434,7 @@ var MonitorDatabaseImpl = class {
|
|
|
4351
4434
|
const db = this._assertOpen();
|
|
4352
4435
|
const cutoff = new Date(Date.now() - retentionDays * 24 * 60 * 60 * 1e3).toISOString();
|
|
4353
4436
|
const result = db.prepare("DELETE FROM task_metrics WHERE recorded_at < @cutoff").run({ cutoff });
|
|
4354
|
-
logger$
|
|
4437
|
+
logger$11.info({
|
|
4355
4438
|
cutoff,
|
|
4356
4439
|
deleted: result.changes
|
|
4357
4440
|
}, "Pruned old task_metrics rows");
|
|
@@ -4390,13 +4473,13 @@ var MonitorDatabaseImpl = class {
|
|
|
4390
4473
|
db.exec("ROLLBACK");
|
|
4391
4474
|
throw err;
|
|
4392
4475
|
}
|
|
4393
|
-
logger$
|
|
4476
|
+
logger$11.info("Rebuilt performance_aggregates from task_metrics");
|
|
4394
4477
|
}
|
|
4395
4478
|
resetAllData() {
|
|
4396
4479
|
const db = this._assertOpen();
|
|
4397
4480
|
db.exec("DELETE FROM task_metrics");
|
|
4398
4481
|
db.exec("DELETE FROM performance_aggregates");
|
|
4399
|
-
logger$
|
|
4482
|
+
logger$11.info({ path: this._path }, "Monitor data reset — all rows deleted");
|
|
4400
4483
|
}
|
|
4401
4484
|
getTaskMetricsDateRange() {
|
|
4402
4485
|
const db = this._assertOpen();
|
|
@@ -4413,7 +4496,7 @@ var MonitorDatabaseImpl = class {
|
|
|
4413
4496
|
if (this._db === null) return;
|
|
4414
4497
|
this._db.close();
|
|
4415
4498
|
this._db = null;
|
|
4416
|
-
logger$
|
|
4499
|
+
logger$11.info({ path: this._path }, "Monitor database closed");
|
|
4417
4500
|
}
|
|
4418
4501
|
/**
|
|
4419
4502
|
* Access the raw underlying database for testing purposes only.
|
|
@@ -4426,7 +4509,7 @@ var MonitorDatabaseImpl = class {
|
|
|
4426
4509
|
|
|
4427
4510
|
//#endregion
|
|
4428
4511
|
//#region src/modules/monitor/recommendation-engine.ts
|
|
4429
|
-
const logger$
|
|
4512
|
+
const logger$10 = createLogger("monitor:recommendations");
|
|
4430
4513
|
var RecommendationEngine = class {
|
|
4431
4514
|
_monitorDb;
|
|
4432
4515
|
_filters;
|
|
@@ -4459,7 +4542,7 @@ var RecommendationEngine = class {
|
|
|
4459
4542
|
const sinceDate = new Date(Date.now() - this._historyDays * 24 * 60 * 60 * 1e3).toISOString();
|
|
4460
4543
|
const aggregates = this._monitorDb.getAggregates({ sinceDate });
|
|
4461
4544
|
if (aggregates.length === 0) {
|
|
4462
|
-
logger$
|
|
4545
|
+
logger$10.debug("No performance aggregates found — no recommendations to generate");
|
|
4463
4546
|
return [];
|
|
4464
4547
|
}
|
|
4465
4548
|
const byTaskType = new Map();
|
|
@@ -4524,7 +4607,7 @@ var RecommendationEngine = class {
|
|
|
4524
4607
|
if (confDiff !== 0) return confDiff;
|
|
4525
4608
|
return b.improvement_percentage - a.improvement_percentage;
|
|
4526
4609
|
});
|
|
4527
|
-
logger$
|
|
4610
|
+
logger$10.debug({ count: recommendations.length }, "Generated routing recommendations");
|
|
4528
4611
|
return recommendations;
|
|
4529
4612
|
}
|
|
4530
4613
|
/**
|
|
@@ -4690,7 +4773,7 @@ function generateMonitorReport(monitorDb, options = {}) {
|
|
|
4690
4773
|
|
|
4691
4774
|
//#endregion
|
|
4692
4775
|
//#region src/cli/commands/monitor.ts
|
|
4693
|
-
const logger$
|
|
4776
|
+
const logger$9 = createLogger("monitor-cmd");
|
|
4694
4777
|
const MONITOR_EXIT_SUCCESS = 0;
|
|
4695
4778
|
const MONITOR_EXIT_ERROR = 1;
|
|
4696
4779
|
/**
|
|
@@ -4893,7 +4976,7 @@ async function runMonitorReportAction(options) {
|
|
|
4893
4976
|
} catch (err) {
|
|
4894
4977
|
const message = err instanceof Error ? err.message : String(err);
|
|
4895
4978
|
process.stderr.write(`Error: ${message}\n`);
|
|
4896
|
-
logger$
|
|
4979
|
+
logger$9.error({ err }, "runMonitorReportAction failed");
|
|
4897
4980
|
return MONITOR_EXIT_ERROR;
|
|
4898
4981
|
} finally {
|
|
4899
4982
|
if (monitorDb !== null) try {
|
|
@@ -4955,7 +5038,7 @@ async function runMonitorStatusAction(options) {
|
|
|
4955
5038
|
} catch (err) {
|
|
4956
5039
|
const message = err instanceof Error ? err.message : String(err);
|
|
4957
5040
|
process.stderr.write(`Error: ${message}\n`);
|
|
4958
|
-
logger$
|
|
5041
|
+
logger$9.error({ err }, "runMonitorStatusAction failed");
|
|
4959
5042
|
return MONITOR_EXIT_ERROR;
|
|
4960
5043
|
} finally {
|
|
4961
5044
|
if (monitorDb !== null) try {
|
|
@@ -4990,7 +5073,7 @@ async function runMonitorResetAction(options) {
|
|
|
4990
5073
|
} catch (err) {
|
|
4991
5074
|
const message = err instanceof Error ? err.message : String(err);
|
|
4992
5075
|
process.stderr.write(`Error: ${message}\n`);
|
|
4993
|
-
logger$
|
|
5076
|
+
logger$9.error({ err }, "runMonitorResetAction failed");
|
|
4994
5077
|
return MONITOR_EXIT_ERROR;
|
|
4995
5078
|
} finally {
|
|
4996
5079
|
if (monitorDb !== null) try {
|
|
@@ -5038,7 +5121,7 @@ async function runMonitorRecommendationsAction(options) {
|
|
|
5038
5121
|
} catch (err) {
|
|
5039
5122
|
const message = err instanceof Error ? err.message : String(err);
|
|
5040
5123
|
process.stderr.write(`Error: ${message}\n`);
|
|
5041
|
-
logger$
|
|
5124
|
+
logger$9.error({ err }, "runMonitorRecommendationsAction failed");
|
|
5042
5125
|
return MONITOR_EXIT_ERROR;
|
|
5043
5126
|
} finally {
|
|
5044
5127
|
if (monitorDb !== null) try {
|
|
@@ -5116,7 +5199,7 @@ function registerMonitorCommand(program, version = "0.0.0", projectRoot = proces
|
|
|
5116
5199
|
|
|
5117
5200
|
//#endregion
|
|
5118
5201
|
//#region src/modules/git-worktree/git-worktree-manager-impl.ts
|
|
5119
|
-
const logger$
|
|
5202
|
+
const logger$8 = createLogger("git-worktree");
|
|
5120
5203
|
const BRANCH_PREFIX = "substrate/task-";
|
|
5121
5204
|
const DEFAULT_WORKTREE_BASE = ".substrate-worktrees";
|
|
5122
5205
|
var GitWorktreeManagerImpl = class {
|
|
@@ -5135,7 +5218,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5135
5218
|
this._db = db;
|
|
5136
5219
|
this._onTaskReady = ({ taskId }) => {
|
|
5137
5220
|
this._handleTaskReady(taskId).catch((err) => {
|
|
5138
|
-
logger$
|
|
5221
|
+
logger$8.error({
|
|
5139
5222
|
taskId,
|
|
5140
5223
|
err
|
|
5141
5224
|
}, "Unhandled error in _handleTaskReady");
|
|
@@ -5149,40 +5232,40 @@ var GitWorktreeManagerImpl = class {
|
|
|
5149
5232
|
};
|
|
5150
5233
|
}
|
|
5151
5234
|
async initialize() {
|
|
5152
|
-
logger$
|
|
5235
|
+
logger$8.info({ projectRoot: this._projectRoot }, "GitWorktreeManager.initialize()");
|
|
5153
5236
|
await this.verifyGitVersion();
|
|
5154
5237
|
const cleaned = await this.cleanupAllWorktrees();
|
|
5155
|
-
if (cleaned > 0) logger$
|
|
5238
|
+
if (cleaned > 0) logger$8.info({ cleaned }, "Recovered orphaned worktrees on startup");
|
|
5156
5239
|
this._eventBus.on("task:ready", this._onTaskReady);
|
|
5157
5240
|
this._eventBus.on("task:complete", this._onTaskComplete);
|
|
5158
5241
|
this._eventBus.on("task:failed", this._onTaskFailed);
|
|
5159
|
-
logger$
|
|
5242
|
+
logger$8.info("GitWorktreeManager initialized");
|
|
5160
5243
|
}
|
|
5161
5244
|
async shutdown() {
|
|
5162
|
-
logger$
|
|
5245
|
+
logger$8.info("GitWorktreeManager.shutdown()");
|
|
5163
5246
|
this._eventBus.off("task:ready", this._onTaskReady);
|
|
5164
5247
|
this._eventBus.off("task:complete", this._onTaskComplete);
|
|
5165
5248
|
this._eventBus.off("task:failed", this._onTaskFailed);
|
|
5166
5249
|
await this.cleanupAllWorktrees();
|
|
5167
|
-
logger$
|
|
5250
|
+
logger$8.info("GitWorktreeManager shutdown complete");
|
|
5168
5251
|
}
|
|
5169
5252
|
async _handleTaskReady(taskId) {
|
|
5170
|
-
logger$
|
|
5253
|
+
logger$8.debug({ taskId }, "task:ready — creating worktree");
|
|
5171
5254
|
try {
|
|
5172
5255
|
await this.createWorktree(taskId);
|
|
5173
5256
|
} catch (err) {
|
|
5174
|
-
logger$
|
|
5257
|
+
logger$8.error({
|
|
5175
5258
|
taskId,
|
|
5176
5259
|
err
|
|
5177
5260
|
}, "Failed to create worktree for task");
|
|
5178
5261
|
}
|
|
5179
5262
|
}
|
|
5180
5263
|
async _handleTaskDone(taskId) {
|
|
5181
|
-
logger$
|
|
5264
|
+
logger$8.debug({ taskId }, "task done — cleaning up worktree");
|
|
5182
5265
|
try {
|
|
5183
5266
|
await this.cleanupWorktree(taskId);
|
|
5184
5267
|
} catch (err) {
|
|
5185
|
-
logger$
|
|
5268
|
+
logger$8.warn({
|
|
5186
5269
|
taskId,
|
|
5187
5270
|
err
|
|
5188
5271
|
}, "Failed to cleanup worktree for task");
|
|
@@ -5192,7 +5275,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5192
5275
|
if (!taskId || taskId.trim().length === 0) throw new Error("createWorktree: taskId must be a non-empty string");
|
|
5193
5276
|
const branchName = BRANCH_PREFIX + taskId;
|
|
5194
5277
|
const worktreePath = this.getWorktreePath(taskId);
|
|
5195
|
-
logger$
|
|
5278
|
+
logger$8.debug({
|
|
5196
5279
|
taskId,
|
|
5197
5280
|
branchName,
|
|
5198
5281
|
worktreePath,
|
|
@@ -5212,7 +5295,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5212
5295
|
worktreePath,
|
|
5213
5296
|
createdAt
|
|
5214
5297
|
};
|
|
5215
|
-
logger$
|
|
5298
|
+
logger$8.info({
|
|
5216
5299
|
taskId,
|
|
5217
5300
|
branchName,
|
|
5218
5301
|
worktreePath
|
|
@@ -5222,7 +5305,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5222
5305
|
async cleanupWorktree(taskId) {
|
|
5223
5306
|
const branchName = BRANCH_PREFIX + taskId;
|
|
5224
5307
|
const worktreePath = this.getWorktreePath(taskId);
|
|
5225
|
-
logger$
|
|
5308
|
+
logger$8.debug({
|
|
5226
5309
|
taskId,
|
|
5227
5310
|
branchName,
|
|
5228
5311
|
worktreePath
|
|
@@ -5232,7 +5315,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5232
5315
|
await access$1(worktreePath);
|
|
5233
5316
|
worktreeExists = true;
|
|
5234
5317
|
} catch {
|
|
5235
|
-
logger$
|
|
5318
|
+
logger$8.debug({
|
|
5236
5319
|
taskId,
|
|
5237
5320
|
worktreePath
|
|
5238
5321
|
}, "cleanupWorktree: worktree does not exist, skipping removal");
|
|
@@ -5240,7 +5323,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5240
5323
|
if (worktreeExists) try {
|
|
5241
5324
|
await removeWorktree(worktreePath, this._projectRoot);
|
|
5242
5325
|
} catch (err) {
|
|
5243
|
-
logger$
|
|
5326
|
+
logger$8.warn({
|
|
5244
5327
|
taskId,
|
|
5245
5328
|
worktreePath,
|
|
5246
5329
|
err
|
|
@@ -5249,7 +5332,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5249
5332
|
try {
|
|
5250
5333
|
await removeBranch(branchName, this._projectRoot);
|
|
5251
5334
|
} catch (err) {
|
|
5252
|
-
logger$
|
|
5335
|
+
logger$8.warn({
|
|
5253
5336
|
taskId,
|
|
5254
5337
|
branchName,
|
|
5255
5338
|
err
|
|
@@ -5259,13 +5342,13 @@ var GitWorktreeManagerImpl = class {
|
|
|
5259
5342
|
taskId,
|
|
5260
5343
|
branchName
|
|
5261
5344
|
});
|
|
5262
|
-
logger$
|
|
5345
|
+
logger$8.info({
|
|
5263
5346
|
taskId,
|
|
5264
5347
|
branchName
|
|
5265
5348
|
}, "Worktree cleaned up");
|
|
5266
5349
|
}
|
|
5267
5350
|
async cleanupAllWorktrees() {
|
|
5268
|
-
logger$
|
|
5351
|
+
logger$8.debug({ projectRoot: this._projectRoot }, "cleanupAllWorktrees");
|
|
5269
5352
|
const orphanedPaths = await getOrphanedWorktrees(this._projectRoot, this._baseDirectory);
|
|
5270
5353
|
let cleaned = 0;
|
|
5271
5354
|
for (const worktreePath of orphanedPaths) {
|
|
@@ -5274,12 +5357,12 @@ var GitWorktreeManagerImpl = class {
|
|
|
5274
5357
|
try {
|
|
5275
5358
|
await removeWorktree(worktreePath, this._projectRoot);
|
|
5276
5359
|
worktreeRemoved = true;
|
|
5277
|
-
logger$
|
|
5360
|
+
logger$8.debug({
|
|
5278
5361
|
taskId,
|
|
5279
5362
|
worktreePath
|
|
5280
5363
|
}, "cleanupAllWorktrees: removed orphaned worktree");
|
|
5281
5364
|
} catch (err) {
|
|
5282
|
-
logger$
|
|
5365
|
+
logger$8.warn({
|
|
5283
5366
|
taskId,
|
|
5284
5367
|
worktreePath,
|
|
5285
5368
|
err
|
|
@@ -5289,12 +5372,12 @@ var GitWorktreeManagerImpl = class {
|
|
|
5289
5372
|
let branchRemoved = false;
|
|
5290
5373
|
try {
|
|
5291
5374
|
branchRemoved = await removeBranch(branchName, this._projectRoot);
|
|
5292
|
-
if (branchRemoved) logger$
|
|
5375
|
+
if (branchRemoved) logger$8.debug({
|
|
5293
5376
|
taskId,
|
|
5294
5377
|
branchName
|
|
5295
5378
|
}, "cleanupAllWorktrees: removed orphaned branch");
|
|
5296
5379
|
} catch (err) {
|
|
5297
|
-
logger$
|
|
5380
|
+
logger$8.warn({
|
|
5298
5381
|
taskId,
|
|
5299
5382
|
branchName,
|
|
5300
5383
|
err
|
|
@@ -5302,14 +5385,14 @@ var GitWorktreeManagerImpl = class {
|
|
|
5302
5385
|
}
|
|
5303
5386
|
if (worktreeRemoved) cleaned++;
|
|
5304
5387
|
}
|
|
5305
|
-
if (cleaned > 0) logger$
|
|
5388
|
+
if (cleaned > 0) logger$8.info({ cleaned }, "cleanupAllWorktrees: recovered orphaned worktrees");
|
|
5306
5389
|
return cleaned;
|
|
5307
5390
|
}
|
|
5308
5391
|
async detectConflicts(taskId, targetBranch = "main") {
|
|
5309
5392
|
if (!taskId || taskId.trim().length === 0) throw new Error("detectConflicts: taskId must be a non-empty string");
|
|
5310
5393
|
const branchName = BRANCH_PREFIX + taskId;
|
|
5311
5394
|
const worktreePath = this.getWorktreePath(taskId);
|
|
5312
|
-
logger$
|
|
5395
|
+
logger$8.debug({
|
|
5313
5396
|
taskId,
|
|
5314
5397
|
branchName,
|
|
5315
5398
|
targetBranch
|
|
@@ -5337,7 +5420,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5337
5420
|
branch: branchName,
|
|
5338
5421
|
conflictingFiles: report.conflictingFiles
|
|
5339
5422
|
});
|
|
5340
|
-
logger$
|
|
5423
|
+
logger$8.info({
|
|
5341
5424
|
taskId,
|
|
5342
5425
|
hasConflicts: report.hasConflicts,
|
|
5343
5426
|
conflictCount: conflictingFiles.length
|
|
@@ -5347,14 +5430,14 @@ var GitWorktreeManagerImpl = class {
|
|
|
5347
5430
|
async mergeWorktree(taskId, targetBranch = "main") {
|
|
5348
5431
|
if (!taskId || taskId.trim().length === 0) throw new Error("mergeWorktree: taskId must be a non-empty string");
|
|
5349
5432
|
const branchName = BRANCH_PREFIX + taskId;
|
|
5350
|
-
logger$
|
|
5433
|
+
logger$8.debug({
|
|
5351
5434
|
taskId,
|
|
5352
5435
|
branchName,
|
|
5353
5436
|
targetBranch
|
|
5354
5437
|
}, "mergeWorktree");
|
|
5355
5438
|
const conflictReport = await this.detectConflicts(taskId, targetBranch);
|
|
5356
5439
|
if (conflictReport.hasConflicts) {
|
|
5357
|
-
logger$
|
|
5440
|
+
logger$8.info({
|
|
5358
5441
|
taskId,
|
|
5359
5442
|
conflictCount: conflictReport.conflictingFiles.length
|
|
5360
5443
|
}, "Merge skipped due to conflicts");
|
|
@@ -5376,7 +5459,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5376
5459
|
success: true,
|
|
5377
5460
|
mergedFiles
|
|
5378
5461
|
};
|
|
5379
|
-
logger$
|
|
5462
|
+
logger$8.info({
|
|
5380
5463
|
taskId,
|
|
5381
5464
|
branchName,
|
|
5382
5465
|
mergedFileCount: mergedFiles.length
|
|
@@ -5384,7 +5467,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5384
5467
|
return result;
|
|
5385
5468
|
}
|
|
5386
5469
|
async listWorktrees() {
|
|
5387
|
-
logger$
|
|
5470
|
+
logger$8.debug({
|
|
5388
5471
|
projectRoot: this._projectRoot,
|
|
5389
5472
|
baseDirectory: this._baseDirectory
|
|
5390
5473
|
}, "listWorktrees");
|
|
@@ -5408,7 +5491,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5408
5491
|
createdAt
|
|
5409
5492
|
});
|
|
5410
5493
|
}
|
|
5411
|
-
logger$
|
|
5494
|
+
logger$8.debug({ count: results.length }, "listWorktrees: found worktrees");
|
|
5412
5495
|
return results;
|
|
5413
5496
|
}
|
|
5414
5497
|
getWorktreePath(taskId) {
|
|
@@ -5428,7 +5511,7 @@ function createGitWorktreeManager(options) {
|
|
|
5428
5511
|
|
|
5429
5512
|
//#endregion
|
|
5430
5513
|
//#region src/cli/commands/merge.ts
|
|
5431
|
-
const logger$
|
|
5514
|
+
const logger$7 = createLogger("merge-cmd");
|
|
5432
5515
|
const MERGE_EXIT_SUCCESS = 0;
|
|
5433
5516
|
const MERGE_EXIT_CONFLICT = 1;
|
|
5434
5517
|
const MERGE_EXIT_ERROR = 2;
|
|
@@ -5466,7 +5549,7 @@ async function mergeTask(taskId, targetBranch, projectRoot) {
|
|
|
5466
5549
|
projectRoot
|
|
5467
5550
|
});
|
|
5468
5551
|
try {
|
|
5469
|
-
logger$
|
|
5552
|
+
logger$7.info({
|
|
5470
5553
|
taskId,
|
|
5471
5554
|
targetBranch
|
|
5472
5555
|
}, "Running conflict detection...");
|
|
@@ -5488,7 +5571,7 @@ async function mergeTask(taskId, targetBranch, projectRoot) {
|
|
|
5488
5571
|
} catch (err) {
|
|
5489
5572
|
const message = err instanceof Error ? err.message : String(err);
|
|
5490
5573
|
console.error(`Error merging task "${taskId}": ${message}`);
|
|
5491
|
-
logger$
|
|
5574
|
+
logger$7.error({
|
|
5492
5575
|
taskId,
|
|
5493
5576
|
err
|
|
5494
5577
|
}, "merge --task failed");
|
|
@@ -5542,7 +5625,7 @@ async function mergeAll(targetBranch, projectRoot, taskIds) {
|
|
|
5542
5625
|
error: message
|
|
5543
5626
|
});
|
|
5544
5627
|
console.log(` Error for task "${taskId}": ${message}`);
|
|
5545
|
-
logger$
|
|
5628
|
+
logger$7.error({
|
|
5546
5629
|
taskId,
|
|
5547
5630
|
err
|
|
5548
5631
|
}, "merge --all: task failed");
|
|
@@ -5595,7 +5678,7 @@ function registerMergeCommand(program, projectRoot = process.cwd()) {
|
|
|
5595
5678
|
|
|
5596
5679
|
//#endregion
|
|
5597
5680
|
//#region src/cli/commands/worktrees.ts
|
|
5598
|
-
const logger$
|
|
5681
|
+
const logger$6 = createLogger("worktrees-cmd");
|
|
5599
5682
|
const WORKTREES_EXIT_SUCCESS = 0;
|
|
5600
5683
|
const WORKTREES_EXIT_ERROR = 1;
|
|
5601
5684
|
/** Valid task statuses for filtering */
|
|
@@ -5722,7 +5805,7 @@ async function listWorktreesAction(options) {
|
|
|
5722
5805
|
try {
|
|
5723
5806
|
worktreeInfos = await manager.listWorktrees();
|
|
5724
5807
|
} catch (err) {
|
|
5725
|
-
logger$
|
|
5808
|
+
logger$6.error({ err }, "Failed to list worktrees");
|
|
5726
5809
|
const message = err instanceof Error ? err.message : String(err);
|
|
5727
5810
|
process.stderr.write(`Error listing worktrees: ${message}\n`);
|
|
5728
5811
|
return WORKTREES_EXIT_ERROR;
|
|
@@ -5749,7 +5832,7 @@ async function listWorktreesAction(options) {
|
|
|
5749
5832
|
} catch (err) {
|
|
5750
5833
|
const message = err instanceof Error ? err.message : String(err);
|
|
5751
5834
|
process.stderr.write(`Error: ${message}\n`);
|
|
5752
|
-
logger$
|
|
5835
|
+
logger$6.error({ err }, "listWorktreesAction failed");
|
|
5753
5836
|
return WORKTREES_EXIT_ERROR;
|
|
5754
5837
|
}
|
|
5755
5838
|
}
|
|
@@ -5790,7 +5873,7 @@ function registerWorktreesCommand(program, version = "0.0.0", projectRoot = proc
|
|
|
5790
5873
|
|
|
5791
5874
|
//#endregion
|
|
5792
5875
|
//#region src/cli/commands/brainstorm.ts
|
|
5793
|
-
const logger$
|
|
5876
|
+
const logger$5 = createLogger("brainstorm-cmd");
|
|
5794
5877
|
/**
|
|
5795
5878
|
* Detect whether the project has existing planning artifacts that indicate
|
|
5796
5879
|
* this is an amendment session (vs. a brand-new project brainstorm).
|
|
@@ -5836,13 +5919,13 @@ async function loadAmendmentContextDocuments(projectRoot) {
|
|
|
5836
5919
|
try {
|
|
5837
5920
|
brief = await readFile(briefPath, "utf-8");
|
|
5838
5921
|
} catch {
|
|
5839
|
-
logger$
|
|
5922
|
+
logger$5.warn({ briefPath }, "product-brief.md not found — continuing without brief context");
|
|
5840
5923
|
process.stderr.write(`Warning: product-brief.md not found at ${briefPath}\n`);
|
|
5841
5924
|
}
|
|
5842
5925
|
try {
|
|
5843
5926
|
prd = await readFile(prdPath, "utf-8");
|
|
5844
5927
|
} catch {
|
|
5845
|
-
logger$
|
|
5928
|
+
logger$5.warn({ prdPath }, "requirements.md not found — continuing without PRD context");
|
|
5846
5929
|
process.stderr.write(`Warning: requirements.md not found at ${prdPath}\n`);
|
|
5847
5930
|
}
|
|
5848
5931
|
return {
|
|
@@ -6051,7 +6134,7 @@ async function dispatchToPersonas(userPrompt, context, llmDispatch) {
|
|
|
6051
6134
|
}
|
|
6052
6135
|
];
|
|
6053
6136
|
const defaultDispatch = async (prompt, personaName) => {
|
|
6054
|
-
logger$
|
|
6137
|
+
logger$5.debug({
|
|
6055
6138
|
personaName,
|
|
6056
6139
|
promptLength: prompt.length
|
|
6057
6140
|
}, "Dispatching to persona (stub mode)");
|
|
@@ -6068,7 +6151,7 @@ async function dispatchToPersonas(userPrompt, context, llmDispatch) {
|
|
|
6068
6151
|
};
|
|
6069
6152
|
} catch (err) {
|
|
6070
6153
|
const msg = err instanceof Error ? err.message : String(err);
|
|
6071
|
-
logger$
|
|
6154
|
+
logger$5.error({
|
|
6072
6155
|
err,
|
|
6073
6156
|
personaName: persona.name
|
|
6074
6157
|
}, "Persona dispatch failed");
|
|
@@ -6220,7 +6303,7 @@ async function runBrainstormSession(options, llmDispatch, rlInterface) {
|
|
|
6220
6303
|
}
|
|
6221
6304
|
});
|
|
6222
6305
|
rl.on("error", (err) => {
|
|
6223
|
-
logger$
|
|
6306
|
+
logger$5.error({ err }, "readline error");
|
|
6224
6307
|
if (!sessionEnded) endSession(false);
|
|
6225
6308
|
});
|
|
6226
6309
|
});
|
|
@@ -6810,7 +6893,7 @@ function renderReadinessReport(decisions) {
|
|
|
6810
6893
|
|
|
6811
6894
|
//#endregion
|
|
6812
6895
|
//#region src/cli/commands/export.ts
|
|
6813
|
-
const logger$
|
|
6896
|
+
const logger$4 = createLogger("export-cmd");
|
|
6814
6897
|
/**
|
|
6815
6898
|
* Execute the export action.
|
|
6816
6899
|
* Returns an exit code (0 = success, 1 = error).
|
|
@@ -6937,7 +7020,7 @@ async function runExportAction(options) {
|
|
|
6937
7020
|
const msg = err instanceof Error ? err.message : String(err);
|
|
6938
7021
|
if (outputFormat === "json") process.stdout.write(JSON.stringify({ error: msg }) + "\n");
|
|
6939
7022
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
6940
|
-
logger$
|
|
7023
|
+
logger$4.error({ err }, "export action failed");
|
|
6941
7024
|
return 1;
|
|
6942
7025
|
} finally {
|
|
6943
7026
|
if (dbWrapper !== void 0) try {
|
|
@@ -7024,7 +7107,7 @@ function getRetryableEscalations(db, runId) {
|
|
|
7024
7107
|
|
|
7025
7108
|
//#endregion
|
|
7026
7109
|
//#region src/cli/commands/retry-escalated.ts
|
|
7027
|
-
const logger$
|
|
7110
|
+
const logger$3 = createLogger("retry-escalated-cmd");
|
|
7028
7111
|
async function runRetryEscalatedAction(options) {
|
|
7029
7112
|
const { runId, dryRun, outputFormat, projectRoot, concurrency, pack: packName, registry: injectedRegistry } = options;
|
|
7030
7113
|
const dbRoot = await resolveMainRepoRoot(projectRoot);
|
|
@@ -7122,7 +7205,7 @@ async function runRetryEscalatedAction(options) {
|
|
|
7122
7205
|
});
|
|
7123
7206
|
}
|
|
7124
7207
|
} catch (err) {
|
|
7125
|
-
logger$
|
|
7208
|
+
logger$3.warn({ err }, "Failed to record token usage");
|
|
7126
7209
|
}
|
|
7127
7210
|
});
|
|
7128
7211
|
if (outputFormat === "human") {
|
|
@@ -7144,7 +7227,7 @@ async function runRetryEscalatedAction(options) {
|
|
|
7144
7227
|
const msg = err instanceof Error ? err.message : String(err);
|
|
7145
7228
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
7146
7229
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
7147
|
-
logger$
|
|
7230
|
+
logger$3.error({ err }, "retry-escalated failed");
|
|
7148
7231
|
return 1;
|
|
7149
7232
|
} finally {
|
|
7150
7233
|
try {
|
|
@@ -7423,6 +7506,170 @@ function registerHistoryCommand(program) {
|
|
|
7423
7506
|
});
|
|
7424
7507
|
}
|
|
7425
7508
|
|
|
7509
|
+
//#endregion
|
|
7510
|
+
//#region src/cli/commands/repo-map.ts
|
|
7511
|
+
const logger$2 = createLogger("cli:repo-map");
|
|
7512
|
+
/** Validate that a symbol name contains only safe identifier characters. */
|
|
7513
|
+
function isValidSymbolName(name) {
|
|
7514
|
+
return /^[a-zA-Z0-9_]+$/.test(name);
|
|
7515
|
+
}
|
|
7516
|
+
function registerRepoMapCommand(program) {
|
|
7517
|
+
program.command("repo-map").description("Show, update, or query the repo-map symbol index").option("--show", "Show repo-map staleness status").option("--update", "Trigger an incremental repo-map update (Dolt backend only)").option("--query <symbol>", "Query the repo-map for a specific symbol name").option("--dry-run <storyFile>", "Preview repo-map context that would be injected for a story file").option("--output-format <format>", "Output format: text or json", "text").action(async (options) => {
|
|
7518
|
+
if (options.query !== void 0 && !isValidSymbolName(options.query)) {
|
|
7519
|
+
process.stderr.write(`Error: --query value must match /^[a-zA-Z0-9_]+$/ (got: ${options.query})\n`);
|
|
7520
|
+
process.exitCode = 1;
|
|
7521
|
+
return;
|
|
7522
|
+
}
|
|
7523
|
+
const dbRoot = await resolveMainRepoRoot(process.cwd());
|
|
7524
|
+
const statePath = join$1(dbRoot, ".substrate", "state");
|
|
7525
|
+
const doltStatePath = join$1(statePath, ".dolt");
|
|
7526
|
+
const isDolt = existsSync$1(doltStatePath);
|
|
7527
|
+
const notDoltError = (flag) => {
|
|
7528
|
+
if (options.outputFormat === "json") console.log(JSON.stringify({
|
|
7529
|
+
backend: "file",
|
|
7530
|
+
status: "unavailable",
|
|
7531
|
+
hint: "Repo-map requires the Dolt backend. Run `substrate init --dolt` to enable."
|
|
7532
|
+
}));
|
|
7533
|
+
else process.stderr.write(`Error: ${flag} requires the Dolt backend. Run \`substrate init --dolt\` to enable.\n`);
|
|
7534
|
+
process.exitCode = 1;
|
|
7535
|
+
};
|
|
7536
|
+
if (!isDolt) {
|
|
7537
|
+
const flag = options.update ? "--update" : options.query ? "--query" : options.dryRun ? "--dry-run" : "--show";
|
|
7538
|
+
notDoltError(flag);
|
|
7539
|
+
return;
|
|
7540
|
+
}
|
|
7541
|
+
const doltClient = new DoltClient({ repoPath: statePath });
|
|
7542
|
+
const symbolRepo = new DoltSymbolRepository(doltClient, logger$2);
|
|
7543
|
+
const metaRepo = new DoltRepoMapMetaRepository(doltClient);
|
|
7544
|
+
const repoMapModule = new RepoMapModule(metaRepo, logger$2);
|
|
7545
|
+
const queryEngine = new RepoMapQueryEngine(symbolRepo, logger$2);
|
|
7546
|
+
if (options.show === true || !options.update && !options.query && !options.dryRun) {
|
|
7547
|
+
const meta = await metaRepo.getMeta();
|
|
7548
|
+
const staleResult = await repoMapModule.checkStaleness();
|
|
7549
|
+
let staleness = "unknown";
|
|
7550
|
+
if (meta === null) staleness = "unknown";
|
|
7551
|
+
else if (staleResult !== null) staleness = "stale";
|
|
7552
|
+
else staleness = "current";
|
|
7553
|
+
const symbolCount = meta !== null ? (await symbolRepo.getSymbols()).length : 0;
|
|
7554
|
+
if (options.outputFormat === "json") console.log(JSON.stringify({
|
|
7555
|
+
symbolCount,
|
|
7556
|
+
commitSha: meta?.commitSha ?? null,
|
|
7557
|
+
fileCount: meta?.fileCount ?? 0,
|
|
7558
|
+
updatedAt: meta?.updatedAt?.toISOString() ?? null,
|
|
7559
|
+
staleness
|
|
7560
|
+
}));
|
|
7561
|
+
else if (meta !== null) {
|
|
7562
|
+
console.log(`Repo-map: ${symbolCount} symbols, ${meta.fileCount} files`);
|
|
7563
|
+
console.log(`Commit: ${meta.commitSha}`);
|
|
7564
|
+
console.log(`Updated: ${meta.updatedAt.toISOString()}`);
|
|
7565
|
+
if (staleness === "stale") console.log("Status: STALE (run `substrate repo-map --update` to refresh)");
|
|
7566
|
+
else console.log("Status: UP TO DATE");
|
|
7567
|
+
} else console.log("Repo-map: no data stored yet");
|
|
7568
|
+
return;
|
|
7569
|
+
}
|
|
7570
|
+
if (options.update === true) {
|
|
7571
|
+
logger$2.info("repo-map --update: triggering incremental update");
|
|
7572
|
+
const gitClient = new GitClient(logger$2);
|
|
7573
|
+
const grammarLoader = new GrammarLoader(logger$2);
|
|
7574
|
+
const parser = new SymbolParser(grammarLoader, logger$2);
|
|
7575
|
+
const storage = new RepoMapStorage(symbolRepo, metaRepo, gitClient, logger$2);
|
|
7576
|
+
await storage.incrementalUpdate(dbRoot, parser);
|
|
7577
|
+
const meta = await metaRepo.getMeta();
|
|
7578
|
+
const symbolCount = (await symbolRepo.getSymbols()).length;
|
|
7579
|
+
if (options.outputFormat === "json") console.log(JSON.stringify({
|
|
7580
|
+
result: "updated",
|
|
7581
|
+
symbolCount,
|
|
7582
|
+
fileCount: meta?.fileCount ?? 0,
|
|
7583
|
+
commitSha: meta?.commitSha ?? null,
|
|
7584
|
+
updatedAt: meta?.updatedAt?.toISOString() ?? null
|
|
7585
|
+
}));
|
|
7586
|
+
else console.log(`Repo-map updated: ${symbolCount} symbols across ${meta?.fileCount ?? 0} files`);
|
|
7587
|
+
return;
|
|
7588
|
+
}
|
|
7589
|
+
if (options.query !== void 0) {
|
|
7590
|
+
logger$2.debug({ symbol: options.query }, "repo-map --query");
|
|
7591
|
+
const result = await queryEngine.query({
|
|
7592
|
+
symbols: [options.query],
|
|
7593
|
+
maxTokens: 4e3
|
|
7594
|
+
});
|
|
7595
|
+
if (options.outputFormat === "json") console.log(JSON.stringify(result, null, 2));
|
|
7596
|
+
else if (result.symbolCount === 0) console.log(`No symbols found matching '${options.query}'.`);
|
|
7597
|
+
else {
|
|
7598
|
+
console.log(`Found ${result.symbolCount} symbol(s) for '${options.query}':`);
|
|
7599
|
+
for (const sym of result.symbols) console.log(` ${sym.filePath}:${sym.lineNumber} ${sym.symbolType} ${sym.symbolName}`);
|
|
7600
|
+
}
|
|
7601
|
+
return;
|
|
7602
|
+
}
|
|
7603
|
+
if (options.dryRun !== void 0) {
|
|
7604
|
+
let storyContent;
|
|
7605
|
+
try {
|
|
7606
|
+
storyContent = await readFile$1(options.dryRun, "utf-8");
|
|
7607
|
+
} catch {
|
|
7608
|
+
process.stderr.write(`Error: Cannot read story file: ${options.dryRun}\n`);
|
|
7609
|
+
process.exitCode = 1;
|
|
7610
|
+
return;
|
|
7611
|
+
}
|
|
7612
|
+
const injector = new RepoMapInjector(queryEngine, logger$2);
|
|
7613
|
+
const injectionResult = await injector.buildContext(storyContent, 2e3);
|
|
7614
|
+
console.log(JSON.stringify({
|
|
7615
|
+
text: injectionResult.text,
|
|
7616
|
+
symbolCount: injectionResult.symbolCount,
|
|
7617
|
+
truncated: injectionResult.truncated
|
|
7618
|
+
}));
|
|
7619
|
+
return;
|
|
7620
|
+
}
|
|
7621
|
+
});
|
|
7622
|
+
}
|
|
7623
|
+
|
|
7624
|
+
//#endregion
|
|
7625
|
+
//#region src/cli/commands/routing.ts
|
|
7626
|
+
const logger$1 = createLogger("cli:routing");
|
|
7627
|
+
function registerRoutingCommand(program) {
|
|
7628
|
+
program.command("routing").description("Show routing configuration and auto-tune history").option("--history", "Show the routing auto-tune log (model changes applied)").option("--output-format <format>", "Output format: text or json", "text").action(async (options) => {
|
|
7629
|
+
const dbRoot = await resolveMainRepoRoot(process.cwd());
|
|
7630
|
+
const statePath = join$1(dbRoot, ".substrate", "state");
|
|
7631
|
+
const doltStatePath = join$1(statePath, ".dolt");
|
|
7632
|
+
const storeConfig = existsSync$1(doltStatePath) ? {
|
|
7633
|
+
backend: "dolt",
|
|
7634
|
+
basePath: statePath
|
|
7635
|
+
} : {
|
|
7636
|
+
backend: "file",
|
|
7637
|
+
basePath: statePath
|
|
7638
|
+
};
|
|
7639
|
+
const store = createStateStore(storeConfig);
|
|
7640
|
+
try {
|
|
7641
|
+
await store.initialize();
|
|
7642
|
+
if (options.history === true) {
|
|
7643
|
+
logger$1.debug("routing --history: fetching tune log");
|
|
7644
|
+
const raw$1 = await store.getMetric("global", "routing_tune_log");
|
|
7645
|
+
let entries = [];
|
|
7646
|
+
if (Array.isArray(raw$1)) entries = raw$1.sort((a, b) => b.appliedAt.localeCompare(a.appliedAt));
|
|
7647
|
+
if (options.outputFormat === "json") {
|
|
7648
|
+
console.log(JSON.stringify({ entries }, null, 2));
|
|
7649
|
+
return;
|
|
7650
|
+
}
|
|
7651
|
+
if (entries.length === 0) {
|
|
7652
|
+
console.log("No routing auto-tune history found.");
|
|
7653
|
+
return;
|
|
7654
|
+
}
|
|
7655
|
+
console.log("Routing auto-tune history:");
|
|
7656
|
+
for (const entry of entries) console.log(` ${entry.appliedAt} phase=${entry.phase} ${entry.oldModel} → ${entry.newModel} savings=${entry.estimatedSavingsPct.toFixed(1)}% run=${entry.runId}`);
|
|
7657
|
+
return;
|
|
7658
|
+
}
|
|
7659
|
+
const raw = await store.getMetric("global", "routing_tune_log");
|
|
7660
|
+
const entryCount = Array.isArray(raw) ? raw.length : 0;
|
|
7661
|
+
if (options.outputFormat === "json") console.log(JSON.stringify({ tuneLogEntries: entryCount }));
|
|
7662
|
+
else {
|
|
7663
|
+
console.log(`Routing auto-tune log: ${entryCount} entr${entryCount === 1 ? "y" : "ies"}`);
|
|
7664
|
+
if (entryCount === 0) console.log("No auto-tune changes applied yet. Use --history for details.");
|
|
7665
|
+
else console.log("Run `substrate routing --history` to see full history.");
|
|
7666
|
+
}
|
|
7667
|
+
} finally {
|
|
7668
|
+
await store.close();
|
|
7669
|
+
}
|
|
7670
|
+
});
|
|
7671
|
+
}
|
|
7672
|
+
|
|
7426
7673
|
//#endregion
|
|
7427
7674
|
//#region src/cli/index.ts
|
|
7428
7675
|
process.setMaxListeners(20);
|
|
@@ -7475,6 +7722,8 @@ async function createProgram() {
|
|
|
7475
7722
|
registerDiffCommand(program);
|
|
7476
7723
|
registerHistoryCommand(program);
|
|
7477
7724
|
registerMigrateCommand(program);
|
|
7725
|
+
registerRepoMapCommand(program);
|
|
7726
|
+
registerRoutingCommand(program);
|
|
7478
7727
|
registerCostCommand(program, version);
|
|
7479
7728
|
registerMonitorCommand(program, version);
|
|
7480
7729
|
registerMergeCommand(program);
|
|
@@ -7487,8 +7736,8 @@ async function createProgram() {
|
|
|
7487
7736
|
/** Fire-and-forget startup version check (story 8.3, AC3/AC5) */
|
|
7488
7737
|
function checkForUpdatesInBackground(currentVersion) {
|
|
7489
7738
|
if (process.env.SUBSTRATE_NO_UPDATE_CHECK === "1") return;
|
|
7490
|
-
import("../upgrade-
|
|
7491
|
-
const { createVersionManager } = await import("../version-manager-impl-
|
|
7739
|
+
import("../upgrade-njy4XENS.js").then(async () => {
|
|
7740
|
+
const { createVersionManager } = await import("../version-manager-impl-QwroczYS.js");
|
|
7492
7741
|
const vm = createVersionManager();
|
|
7493
7742
|
const result = await vm.checkForUpdates();
|
|
7494
7743
|
if (result.updateAvailable) {
|