substrate-ai 0.3.7 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/index.js +361 -122
- package/dist/cli/templates/claude-md-substrate-section.md +6 -0
- package/dist/{decisions-CbysnTi5.js → decisions-D7Ao_KcL.js} +1 -1
- package/dist/{decisions-CdpiJIm5.js → decisions-Db8GTbH2.js} +1 -1
- package/dist/{experimenter-jto3orYl.js → experimenter-CvxtqzXz.js} +4 -4
- package/dist/{git-utils-UbKLSGsD.js → git-utils-C-fdrHF_.js} +1 -1
- package/dist/index.d.ts +39 -0
- package/dist/{operational-DisxqtjC.js → operational-C0_y8DAs.js} +1 -1
- package/dist/routing-CZfJB3y9.js +477 -0
- package/dist/routing-DWCBjrt7.js +4 -0
- package/dist/run-CQJP37ZC.js +9 -0
- package/dist/{run-N7voPS_k.js → run-cqQmW8wL.js} +6674 -4243
- package/dist/{upgrade-DTzeenA-.js → upgrade-C8_VcI8B.js} +2 -2
- package/dist/{upgrade-BlJKjr6I.js → upgrade-njy4XENS.js} +2 -2
- package/dist/{version-manager-impl-zsJjBhak.js → version-manager-impl-DTlmGvHb.js} +1 -1
- package/dist/{version-manager-impl-BsHqAeGT.js → version-manager-impl-QwroczYS.js} +1 -1
- package/package.json +1 -1
- package/packs/bmad/prompts/code-review.md +2 -0
- package/packs/bmad/prompts/dev-story.md +2 -0
- package/dist/run-C_hKt2wY.js +0 -8
package/dist/cli/index.js
CHANGED
|
@@ -1,14 +1,15 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
import { DEFAULT_CONFIG, DEFAULT_ROUTING_POLICY, DatabaseWrapper, DoltNotInstalled, FileStateStore, IngestionServer, SUBSTRATE_OWNED_SETTINGS_KEYS, TelemetryPersistence, VALID_PHASES, buildPipelineStatusOutput, checkDoltInstalled, createConfigSystem, createContextCompiler, createDispatcher, createDoltClient, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStateStore, createStopAfterGate, findPackageRoot, formatOutput, formatPhaseCompletionSummary, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, initializeDolt, parseDbTimestampAsUtc, registerHealthCommand, registerRunCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, resolveStoryKeys, runAnalysisPhase, runMigrations, runPlanningPhase, runSolutioningPhase, validateStopAfterFromConflict } from "../run-
|
|
2
|
+
import { DEFAULT_CONFIG, DEFAULT_ROUTING_POLICY, DatabaseWrapper, DoltClient, DoltNotInstalled, DoltRepoMapMetaRepository, DoltSymbolRepository, FileStateStore, GitClient, GrammarLoader, IngestionServer, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SUBSTRATE_OWNED_SETTINGS_KEYS, SymbolParser, TelemetryPersistence, VALID_PHASES, buildPipelineStatusOutput, checkDoltInstalled, createConfigSystem, createContextCompiler, createDispatcher, createDoltClient, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStateStore, createStopAfterGate, findPackageRoot, formatOutput, formatPhaseCompletionSummary, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, initializeDolt, parseDbTimestampAsUtc, registerHealthCommand, registerRunCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, resolveStoryKeys, runAnalysisPhase, runMigrations, runPlanningPhase, runSolutioningPhase, validateStopAfterFromConflict } from "../run-cqQmW8wL.js";
|
|
3
3
|
import { createLogger } from "../logger-D2fS2ccL.js";
|
|
4
4
|
import { AdapterRegistry } from "../adapter-registry-rSOJ9Kvz.js";
|
|
5
5
|
import { CURRENT_CONFIG_FORMAT_VERSION, CURRENT_TASK_GRAPH_VERSION, PartialSubstrateConfigSchema } from "../config-migrator-DtZW1maj.js";
|
|
6
6
|
import { ConfigError, createEventBus } from "../helpers-BihqWgVe.js";
|
|
7
|
-
import {
|
|
8
|
-
import {
|
|
9
|
-
import {
|
|
10
|
-
import "../
|
|
11
|
-
import
|
|
7
|
+
import { RoutingRecommender } from "../routing-CZfJB3y9.js";
|
|
8
|
+
import { addTokenUsage, createDecision, createPipelineRun, getDecisionsByCategory, getDecisionsByPhaseForRun, getLatestRun, getTokenUsageSummary, listRequirements, updatePipelineRun } from "../decisions-Db8GTbH2.js";
|
|
9
|
+
import { ESCALATION_DIAGNOSIS, EXPERIMENT_RESULT, OPERATIONAL_FINDING, STORY_METRICS, aggregateTokenUsageForRun, compareRunMetrics, getBaselineRunMetrics, getRunMetrics, getStoryMetricsForRun, incrementRunRestarts, listRunMetrics, tagRunAsBaseline } from "../operational-C0_y8DAs.js";
|
|
10
|
+
import { abortMerge, createWorktree, getConflictingFiles, getMergedFiles, getOrphanedWorktrees, performMerge, removeBranch, removeWorktree, simulateMerge, verifyGitVersion } from "../git-utils-C-fdrHF_.js";
|
|
11
|
+
import "../version-manager-impl-DTlmGvHb.js";
|
|
12
|
+
import { registerUpgradeCommand } from "../upgrade-C8_VcI8B.js";
|
|
12
13
|
import { Command } from "commander";
|
|
13
14
|
import { fileURLToPath } from "url";
|
|
14
15
|
import { dirname, join, resolve } from "path";
|
|
@@ -19,7 +20,7 @@ import { createRequire } from "node:module";
|
|
|
19
20
|
import * as path$1 from "node:path";
|
|
20
21
|
import { isAbsolute, join as join$1 } from "node:path";
|
|
21
22
|
import Database from "better-sqlite3";
|
|
22
|
-
import { access as access$1 } from "node:fs/promises";
|
|
23
|
+
import { access as access$1, readFile as readFile$1 } from "node:fs/promises";
|
|
23
24
|
import { existsSync as existsSync$1, mkdirSync as mkdirSync$1, writeFileSync as writeFileSync$1 } from "node:fs";
|
|
24
25
|
import { createInterface } from "node:readline";
|
|
25
26
|
import { homedir } from "os";
|
|
@@ -259,7 +260,7 @@ function registerAdaptersCommand(program, version, registry) {
|
|
|
259
260
|
|
|
260
261
|
//#endregion
|
|
261
262
|
//#region src/cli/commands/init.ts
|
|
262
|
-
const logger$
|
|
263
|
+
const logger$18 = createLogger("init");
|
|
263
264
|
const __dirname = dirname(new URL(import.meta.url).pathname);
|
|
264
265
|
const INIT_EXIT_SUCCESS = 0;
|
|
265
266
|
const INIT_EXIT_ERROR = 1;
|
|
@@ -280,7 +281,7 @@ async function scaffoldBmadFramework(projectRoot, force, outputFormat) {
|
|
|
280
281
|
const version = resolveBmadMethodVersion();
|
|
281
282
|
if (force && bmadExists) process.stderr.write(`Warning: Replacing existing _bmad/ framework with bmad-method@${version}\n`);
|
|
282
283
|
process.stdout.write(`Scaffolding BMAD framework from bmad-method@${version}\n`);
|
|
283
|
-
logger$
|
|
284
|
+
logger$18.info({
|
|
284
285
|
version,
|
|
285
286
|
dest: bmadDest
|
|
286
287
|
}, "Scaffolding BMAD framework");
|
|
@@ -290,7 +291,7 @@ async function scaffoldBmadFramework(projectRoot, force, outputFormat) {
|
|
|
290
291
|
const destDir = join(bmadDest, dir);
|
|
291
292
|
mkdirSync(destDir, { recursive: true });
|
|
292
293
|
cpSync(srcDir, destDir, { recursive: true });
|
|
293
|
-
logger$
|
|
294
|
+
logger$18.info({
|
|
294
295
|
dir,
|
|
295
296
|
dest: destDir
|
|
296
297
|
}, "Scaffolded BMAD framework directory");
|
|
@@ -309,7 +310,7 @@ async function scaffoldBmadFramework(projectRoot, force, outputFormat) {
|
|
|
309
310
|
"document_output_language: English"
|
|
310
311
|
].join("\n") + "\n";
|
|
311
312
|
await writeFile(configFile, configStub, "utf8");
|
|
312
|
-
logger$
|
|
313
|
+
logger$18.info({ configFile }, "Generated _bmad/_config/config.yaml stub");
|
|
313
314
|
}
|
|
314
315
|
}
|
|
315
316
|
const CLAUDE_MD_START_MARKER = "<!-- substrate:start -->";
|
|
@@ -324,7 +325,7 @@ async function scaffoldClaudeMd(projectRoot) {
|
|
|
324
325
|
try {
|
|
325
326
|
sectionContent = await readFile(templatePath, "utf8");
|
|
326
327
|
} catch {
|
|
327
|
-
logger$
|
|
328
|
+
logger$18.warn({ templatePath }, "CLAUDE.md substrate section template not found; skipping");
|
|
328
329
|
return;
|
|
329
330
|
}
|
|
330
331
|
if (!sectionContent.endsWith("\n")) sectionContent += "\n";
|
|
@@ -342,7 +343,7 @@ async function scaffoldClaudeMd(projectRoot) {
|
|
|
342
343
|
newContent = existingContent + separator + sectionContent;
|
|
343
344
|
}
|
|
344
345
|
await writeFile(claudeMdPath, newContent, "utf8");
|
|
345
|
-
logger$
|
|
346
|
+
logger$18.info({ claudeMdPath }, "Wrote substrate section to CLAUDE.md");
|
|
346
347
|
}
|
|
347
348
|
async function scaffoldStatuslineScript(projectRoot) {
|
|
348
349
|
const pkgRoot = findPackageRoot(__dirname);
|
|
@@ -353,7 +354,7 @@ async function scaffoldStatuslineScript(projectRoot) {
|
|
|
353
354
|
try {
|
|
354
355
|
content = await readFile(templatePath, "utf8");
|
|
355
356
|
} catch {
|
|
356
|
-
logger$
|
|
357
|
+
logger$18.warn({ templatePath }, "statusline.sh template not found; skipping");
|
|
357
358
|
return;
|
|
358
359
|
}
|
|
359
360
|
const claudeDir = join(projectRoot, ".claude");
|
|
@@ -361,7 +362,7 @@ async function scaffoldStatuslineScript(projectRoot) {
|
|
|
361
362
|
mkdirSync(claudeDir, { recursive: true });
|
|
362
363
|
await writeFile(statuslinePath, content, "utf8");
|
|
363
364
|
chmodSync(statuslinePath, 493);
|
|
364
|
-
logger$
|
|
365
|
+
logger$18.info({ statuslinePath }, "Wrote .claude/statusline.sh");
|
|
365
366
|
}
|
|
366
367
|
async function scaffoldClaudeSettings(projectRoot) {
|
|
367
368
|
const claudeDir = join(projectRoot, ".claude");
|
|
@@ -377,7 +378,7 @@ async function scaffoldClaudeSettings(projectRoot) {
|
|
|
377
378
|
if (!merged["$schema"]) merged["$schema"] = "https://json.schemastore.org/claude-code-settings.json";
|
|
378
379
|
mkdirSync(claudeDir, { recursive: true });
|
|
379
380
|
await writeFile(settingsPath, JSON.stringify(merged, null, 2) + "\n", "utf8");
|
|
380
|
-
logger$
|
|
381
|
+
logger$18.info({ settingsPath }, "Wrote substrate settings to .claude/settings.json");
|
|
381
382
|
}
|
|
382
383
|
function resolveBmadMethodInstallerLibPath(fromDir = __dirname) {
|
|
383
384
|
try {
|
|
@@ -447,7 +448,7 @@ async function compileBmadAgents(bmadDir) {
|
|
|
447
448
|
writeFileSync(mdPath, result.xml, "utf-8");
|
|
448
449
|
compiled++;
|
|
449
450
|
} catch (compileErr) {
|
|
450
|
-
logger$
|
|
451
|
+
logger$18.debug({
|
|
451
452
|
err: compileErr,
|
|
452
453
|
file
|
|
453
454
|
}, "Failed to compile agent YAML");
|
|
@@ -468,9 +469,9 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
|
|
|
468
469
|
const _require = createRequire(join(__dirname, "synthetic.js"));
|
|
469
470
|
try {
|
|
470
471
|
const compiledCount = await compileBmadAgents(bmadDir);
|
|
471
|
-
if (compiledCount > 0) logger$
|
|
472
|
+
if (compiledCount > 0) logger$18.info({ compiledCount }, "Compiled agent YAML files to MD");
|
|
472
473
|
} catch (compileErr) {
|
|
473
|
-
logger$
|
|
474
|
+
logger$18.warn({ err: compileErr }, "Agent compilation failed; agent commands may be incomplete");
|
|
474
475
|
}
|
|
475
476
|
const { AgentCommandGenerator } = _require(join(installerLibPath, "ide", "shared", "agent-command-generator.js"));
|
|
476
477
|
const { WorkflowCommandGenerator } = _require(join(installerLibPath, "ide", "shared", "workflow-command-generator.js"));
|
|
@@ -482,7 +483,7 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
|
|
|
482
483
|
const manifestGen = new ManifestGenerator();
|
|
483
484
|
await manifestGen.generateManifests(bmadDir, allModules, [], { ides: ["claude-code"] });
|
|
484
485
|
} catch (manifestErr) {
|
|
485
|
-
logger$
|
|
486
|
+
logger$18.warn({ err: manifestErr }, "ManifestGenerator failed; workflow/task commands may be incomplete");
|
|
486
487
|
}
|
|
487
488
|
const commandsDir = join(projectRoot, ".claude", "commands");
|
|
488
489
|
mkdirSync(commandsDir, { recursive: true });
|
|
@@ -498,7 +499,7 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
|
|
|
498
499
|
const taskToolCount = await taskToolGen.writeDashArtifacts(commandsDir, taskToolArtifacts);
|
|
499
500
|
const total = agentCount + workflowCount + taskToolCount;
|
|
500
501
|
if (outputFormat !== "json") process.stdout.write(`Generated ${String(total)} Claude Code commands (${String(agentCount)} agents, ${String(workflowCount)} workflows, ${String(taskToolCount)} tasks/tools)\n`);
|
|
501
|
-
logger$
|
|
502
|
+
logger$18.info({
|
|
502
503
|
agentCount,
|
|
503
504
|
workflowCount,
|
|
504
505
|
taskToolCount,
|
|
@@ -508,7 +509,7 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
|
|
|
508
509
|
} catch (err) {
|
|
509
510
|
const msg = err instanceof Error ? err.message : String(err);
|
|
510
511
|
if (outputFormat !== "json") process.stderr.write(`Warning: .claude/commands/ generation failed: ${msg}\n`);
|
|
511
|
-
logger$
|
|
512
|
+
logger$18.warn({ err }, "scaffoldClaudeCommands failed; init continues");
|
|
512
513
|
}
|
|
513
514
|
}
|
|
514
515
|
const PROVIDER_DEFAULTS = DEFAULT_CONFIG.providers;
|
|
@@ -583,7 +584,7 @@ async function runInitAction(options) {
|
|
|
583
584
|
discoveryReport = await registry.discoverAndRegister();
|
|
584
585
|
} catch (err) {
|
|
585
586
|
const message = err instanceof Error ? err.message : String(err);
|
|
586
|
-
logger$
|
|
587
|
+
logger$18.error({ err }, "Adapter discovery failed");
|
|
587
588
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, `Adapter discovery failed: ${message}`) + "\n");
|
|
588
589
|
else process.stderr.write(` Error: adapter discovery failed — ${message}\n`);
|
|
589
590
|
return INIT_EXIT_ERROR;
|
|
@@ -632,12 +633,12 @@ async function runInitAction(options) {
|
|
|
632
633
|
return INIT_EXIT_ERROR;
|
|
633
634
|
}
|
|
634
635
|
if (force && existsSync(localManifest)) {
|
|
635
|
-
logger$
|
|
636
|
+
logger$18.info({ pack: packName }, "Replacing existing pack with bundled version");
|
|
636
637
|
process.stderr.write(`Warning: Replacing existing pack '${packName}' with bundled version\n`);
|
|
637
638
|
}
|
|
638
639
|
mkdirSync(dirname(packPath), { recursive: true });
|
|
639
640
|
cpSync(bundledPackPath, packPath, { recursive: true });
|
|
640
|
-
logger$
|
|
641
|
+
logger$18.info({
|
|
641
642
|
pack: packName,
|
|
642
643
|
dest: packPath
|
|
643
644
|
}, "Scaffolded methodology pack");
|
|
@@ -674,17 +675,17 @@ async function runInitAction(options) {
|
|
|
674
675
|
process.stderr.write(`${err.message}\n`);
|
|
675
676
|
return INIT_EXIT_ERROR;
|
|
676
677
|
}
|
|
677
|
-
logger$
|
|
678
|
+
logger$18.debug("Dolt not installed, skipping auto-init");
|
|
678
679
|
} else {
|
|
679
680
|
const msg = err instanceof Error ? err.message : String(err);
|
|
680
681
|
if (doltMode === "force") {
|
|
681
682
|
process.stderr.write(`✗ Dolt initialization failed: ${msg}\n`);
|
|
682
683
|
return INIT_EXIT_ERROR;
|
|
683
684
|
}
|
|
684
|
-
logger$
|
|
685
|
+
logger$18.warn({ error: msg }, "Dolt auto-init failed (non-blocking)");
|
|
685
686
|
}
|
|
686
687
|
}
|
|
687
|
-
else logger$
|
|
688
|
+
else logger$18.debug("Dolt step was skipped (--no-dolt)");
|
|
688
689
|
const successMsg = `Pack '${packName}' and database initialized successfully at ${dbPath}`;
|
|
689
690
|
if (outputFormat === "json") process.stdout.write(formatOutput({
|
|
690
691
|
pack: packName,
|
|
@@ -708,6 +709,7 @@ async function runInitAction(options) {
|
|
|
708
709
|
process.stdout.write(` .claude/commands/ /substrate-run, /substrate-supervisor, /substrate-metrics\n`);
|
|
709
710
|
process.stdout.write(` .substrate/ config, database, routing policy\n`);
|
|
710
711
|
if (doltInitialized) process.stdout.write(`✓ Dolt state store initialized at .substrate/state/\n`);
|
|
712
|
+
else if (doltMode !== "skip") process.stdout.write(`ℹ Dolt not detected — install Dolt for versioned state, \`substrate diff\`, and observability persistence. See: https://docs.dolthub.com/introduction/installation\n`);
|
|
711
713
|
process.stdout.write("\n Next steps:\n 1. Start a Claude Code session in this project\n 2. Tell Claude: \"Run the substrate pipeline\"\n 3. Or use the /substrate-run slash command for a guided run\n");
|
|
712
714
|
}
|
|
713
715
|
return INIT_EXIT_SUCCESS;
|
|
@@ -715,7 +717,7 @@ async function runInitAction(options) {
|
|
|
715
717
|
const msg = err instanceof Error ? err.message : String(err);
|
|
716
718
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
717
719
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
718
|
-
logger$
|
|
720
|
+
logger$18.error({ err }, "init failed");
|
|
719
721
|
return INIT_EXIT_ERROR;
|
|
720
722
|
}
|
|
721
723
|
}
|
|
@@ -738,7 +740,7 @@ function registerInitCommand(program, _version, registry) {
|
|
|
738
740
|
|
|
739
741
|
//#endregion
|
|
740
742
|
//#region src/cli/commands/config.ts
|
|
741
|
-
const logger$
|
|
743
|
+
const logger$17 = createLogger("config-cmd");
|
|
742
744
|
const CONFIG_EXIT_SUCCESS = 0;
|
|
743
745
|
const CONFIG_EXIT_ERROR = 1;
|
|
744
746
|
const CONFIG_EXIT_INVALID = 2;
|
|
@@ -764,7 +766,7 @@ async function runConfigShow(opts = {}) {
|
|
|
764
766
|
return CONFIG_EXIT_INVALID;
|
|
765
767
|
}
|
|
766
768
|
const message = err instanceof Error ? err.message : String(err);
|
|
767
|
-
logger$
|
|
769
|
+
logger$17.error({ err }, "Failed to load configuration");
|
|
768
770
|
process.stderr.write(` Error loading configuration: ${message}\n`);
|
|
769
771
|
return CONFIG_EXIT_ERROR;
|
|
770
772
|
}
|
|
@@ -838,7 +840,7 @@ async function runConfigExport(opts = {}) {
|
|
|
838
840
|
return CONFIG_EXIT_INVALID;
|
|
839
841
|
}
|
|
840
842
|
const message = err instanceof Error ? err.message : String(err);
|
|
841
|
-
logger$
|
|
843
|
+
logger$17.error({ err }, "Failed to load configuration");
|
|
842
844
|
process.stderr.write(`Error loading configuration: ${message}\n`);
|
|
843
845
|
return CONFIG_EXIT_ERROR;
|
|
844
846
|
}
|
|
@@ -992,7 +994,7 @@ function registerConfigCommand(program, _version) {
|
|
|
992
994
|
|
|
993
995
|
//#endregion
|
|
994
996
|
//#region src/cli/commands/resume.ts
|
|
995
|
-
const logger$
|
|
997
|
+
const logger$16 = createLogger("resume-cmd");
|
|
996
998
|
async function runResumeAction(options) {
|
|
997
999
|
const { runId: specifiedRunId, stopAfter, outputFormat, projectRoot, concurrency, pack: packName, registry } = options;
|
|
998
1000
|
if (stopAfter !== void 0 && !VALID_PHASES.includes(stopAfter)) {
|
|
@@ -1075,7 +1077,7 @@ async function runResumeAction(options) {
|
|
|
1075
1077
|
const msg = err instanceof Error ? err.message : String(err);
|
|
1076
1078
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
1077
1079
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
1078
|
-
logger$
|
|
1080
|
+
logger$16.error({ err }, "auto resume failed");
|
|
1079
1081
|
return 1;
|
|
1080
1082
|
} finally {
|
|
1081
1083
|
try {
|
|
@@ -1240,7 +1242,7 @@ async function runFullPipelineFromPhase(options) {
|
|
|
1240
1242
|
});
|
|
1241
1243
|
}
|
|
1242
1244
|
} catch (err) {
|
|
1243
|
-
logger$
|
|
1245
|
+
logger$16.warn({ err }, "Failed to record token usage");
|
|
1244
1246
|
}
|
|
1245
1247
|
});
|
|
1246
1248
|
const storyKeys = resolveStoryKeys(db, projectRoot, { pipelineRunId: runId });
|
|
@@ -1295,7 +1297,7 @@ async function runFullPipelineFromPhase(options) {
|
|
|
1295
1297
|
const msg = err instanceof Error ? err.message : String(err);
|
|
1296
1298
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
1297
1299
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
1298
|
-
logger$
|
|
1300
|
+
logger$16.error({ err }, "pipeline from phase failed");
|
|
1299
1301
|
return 1;
|
|
1300
1302
|
} finally {
|
|
1301
1303
|
try {
|
|
@@ -1321,7 +1323,7 @@ function registerResumeCommand(program, _version = "0.0.0", projectRoot = proces
|
|
|
1321
1323
|
|
|
1322
1324
|
//#endregion
|
|
1323
1325
|
//#region src/cli/commands/status.ts
|
|
1324
|
-
const logger$
|
|
1326
|
+
const logger$15 = createLogger("status-cmd");
|
|
1325
1327
|
async function runStatusAction(options) {
|
|
1326
1328
|
const { outputFormat, runId, projectRoot, stateStore, history } = options;
|
|
1327
1329
|
if (history === true) {
|
|
@@ -1377,7 +1379,7 @@ async function runStatusAction(options) {
|
|
|
1377
1379
|
if (stateStore) try {
|
|
1378
1380
|
storeStories = await stateStore.queryStories({});
|
|
1379
1381
|
} catch (err) {
|
|
1380
|
-
logger$
|
|
1382
|
+
logger$15.debug({ err }, "StateStore query failed, continuing without store data");
|
|
1381
1383
|
}
|
|
1382
1384
|
if (outputFormat === "json") {
|
|
1383
1385
|
const statusOutput = buildPipelineStatusOutput(run, tokenSummary, decisionsCount, storiesCount);
|
|
@@ -1480,7 +1482,7 @@ async function runStatusAction(options) {
|
|
|
1480
1482
|
const msg = err instanceof Error ? err.message : String(err);
|
|
1481
1483
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
1482
1484
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
1483
|
-
logger$
|
|
1485
|
+
logger$15.error({ err }, "status action failed");
|
|
1484
1486
|
return 1;
|
|
1485
1487
|
} finally {
|
|
1486
1488
|
try {
|
|
@@ -1924,7 +1926,7 @@ Analyze thoroughly and return ONLY the JSON array with no additional text.`;
|
|
|
1924
1926
|
|
|
1925
1927
|
//#endregion
|
|
1926
1928
|
//#region src/cli/commands/amend.ts
|
|
1927
|
-
const logger$
|
|
1929
|
+
const logger$14 = createLogger("amend-cmd");
|
|
1928
1930
|
/**
|
|
1929
1931
|
* Detect and apply supersessions after a phase completes in an amendment run.
|
|
1930
1932
|
*
|
|
@@ -1955,7 +1957,7 @@ function runPostPhaseSupersessionDetection(db, amendmentRunId, currentPhase, han
|
|
|
1955
1957
|
});
|
|
1956
1958
|
} catch (err) {
|
|
1957
1959
|
const msg = err instanceof Error ? err.message : String(err);
|
|
1958
|
-
logger$
|
|
1960
|
+
logger$14.warn({
|
|
1959
1961
|
err,
|
|
1960
1962
|
originalId: parentMatch.id,
|
|
1961
1963
|
supersedingId: newDec.id
|
|
@@ -2089,7 +2091,7 @@ async function runAmendAction(options) {
|
|
|
2089
2091
|
for (let i = startIdx; i < phaseOrder.length; i++) {
|
|
2090
2092
|
const currentPhase = phaseOrder[i];
|
|
2091
2093
|
const amendmentContext = handler.loadContextForPhase(currentPhase);
|
|
2092
|
-
logger$
|
|
2094
|
+
logger$14.info({
|
|
2093
2095
|
phase: currentPhase,
|
|
2094
2096
|
amendmentContextLen: amendmentContext.length
|
|
2095
2097
|
}, "Amendment context loaded for phase");
|
|
@@ -2209,7 +2211,7 @@ async function runAmendAction(options) {
|
|
|
2209
2211
|
} catch (err) {
|
|
2210
2212
|
const msg = err instanceof Error ? err.message : String(err);
|
|
2211
2213
|
process.stderr.write(`Error: ${msg}\n`);
|
|
2212
|
-
logger$
|
|
2214
|
+
logger$14.error({ err }, "amend failed");
|
|
2213
2215
|
return 1;
|
|
2214
2216
|
} finally {
|
|
2215
2217
|
try {
|
|
@@ -2682,11 +2684,11 @@ async function runSupervisorAction(options, deps = {}) {
|
|
|
2682
2684
|
try {
|
|
2683
2685
|
const { createExperimenter } = await import(
|
|
2684
2686
|
/* @vite-ignore */
|
|
2685
|
-
"../experimenter-
|
|
2687
|
+
"../experimenter-CvxtqzXz.js"
|
|
2686
2688
|
);
|
|
2687
2689
|
const { getLatestRun: getLatest } = await import(
|
|
2688
2690
|
/* @vite-ignore */
|
|
2689
|
-
"../decisions-
|
|
2691
|
+
"../decisions-D7Ao_KcL.js"
|
|
2690
2692
|
);
|
|
2691
2693
|
const dbPath = join(projectRoot, ".substrate", "substrate.db");
|
|
2692
2694
|
const expDbWrapper = new DatabaseWrapper(dbPath);
|
|
@@ -2696,7 +2698,7 @@ async function runSupervisorAction(options, deps = {}) {
|
|
|
2696
2698
|
const expDb = expDbWrapper.db;
|
|
2697
2699
|
const { runRunAction: runPipeline } = await import(
|
|
2698
2700
|
/* @vite-ignore */
|
|
2699
|
-
"../run-
|
|
2701
|
+
"../run-CQJP37ZC.js"
|
|
2700
2702
|
);
|
|
2701
2703
|
const runStoryFn = async (opts) => {
|
|
2702
2704
|
const exitCode = await runPipeline({
|
|
@@ -2943,7 +2945,7 @@ function registerSupervisorCommand(program, _version = "0.0.0", projectRoot = pr
|
|
|
2943
2945
|
|
|
2944
2946
|
//#endregion
|
|
2945
2947
|
//#region src/cli/commands/metrics.ts
|
|
2946
|
-
const logger$
|
|
2948
|
+
const logger$13 = createLogger("metrics-cmd");
|
|
2947
2949
|
async function openTelemetryDb(dbPath) {
|
|
2948
2950
|
if (!existsSync(dbPath)) return null;
|
|
2949
2951
|
try {
|
|
@@ -3015,7 +3017,7 @@ function printCategoryTable(stats, label) {
|
|
|
3015
3017
|
}
|
|
3016
3018
|
}
|
|
3017
3019
|
async function runMetricsAction(options) {
|
|
3018
|
-
const { outputFormat, projectRoot, limit = 10, compare, tagBaseline, analysis, sprint, story, taskType, since, aggregate, efficiency, recommendations, turns, consumers, categories, compareStories } = options;
|
|
3020
|
+
const { outputFormat, projectRoot, limit = 10, compare, tagBaseline, analysis, sprint, story, taskType, since, aggregate, efficiency, recommendations, turns, consumers, categories, compareStories, routingRecommendations } = options;
|
|
3019
3021
|
const telemetryModes = [
|
|
3020
3022
|
efficiency,
|
|
3021
3023
|
recommendations,
|
|
@@ -3154,6 +3156,59 @@ async function runMetricsAction(options) {
|
|
|
3154
3156
|
} catch {}
|
|
3155
3157
|
}
|
|
3156
3158
|
}
|
|
3159
|
+
if (routingRecommendations === true) {
|
|
3160
|
+
const dbRoot$1 = await resolveMainRepoRoot(projectRoot);
|
|
3161
|
+
const dbDir = join(dbRoot$1, ".substrate");
|
|
3162
|
+
const doltStatePath = join(dbDir, "state", ".dolt");
|
|
3163
|
+
const doltExists = existsSync(doltStatePath);
|
|
3164
|
+
const stateBackend = doltExists ? "dolt" : "file";
|
|
3165
|
+
const stateBasePath = join(dbDir, "state");
|
|
3166
|
+
const stateStore = createStateStore({
|
|
3167
|
+
backend: stateBackend,
|
|
3168
|
+
basePath: stateBasePath
|
|
3169
|
+
});
|
|
3170
|
+
await stateStore.initialize();
|
|
3171
|
+
try {
|
|
3172
|
+
const runIndexRaw = await stateStore.getMetric("__global__", "phase_token_breakdown_runs");
|
|
3173
|
+
const runIds = Array.isArray(runIndexRaw) ? runIndexRaw : [];
|
|
3174
|
+
const recentRunIds = runIds.slice(-20);
|
|
3175
|
+
const breakdowns = [];
|
|
3176
|
+
for (const runId of recentRunIds) try {
|
|
3177
|
+
const raw = await stateStore.getMetric(runId, "phase_token_breakdown");
|
|
3178
|
+
if (raw !== void 0 && raw !== null) {
|
|
3179
|
+
const parsed = typeof raw === "string" ? JSON.parse(raw) : raw;
|
|
3180
|
+
breakdowns.push(parsed);
|
|
3181
|
+
}
|
|
3182
|
+
} catch {}
|
|
3183
|
+
const routingConfigPath = join(dbDir, "routing.yml");
|
|
3184
|
+
let routingConfig = null;
|
|
3185
|
+
if (existsSync(routingConfigPath)) try {
|
|
3186
|
+
const { loadModelRoutingConfig } = await import("../routing-DWCBjrt7.js");
|
|
3187
|
+
routingConfig = loadModelRoutingConfig(routingConfigPath);
|
|
3188
|
+
} catch {}
|
|
3189
|
+
if (routingConfig === null) routingConfig = {
|
|
3190
|
+
version: 1,
|
|
3191
|
+
phases: {},
|
|
3192
|
+
baseline_model: "claude-sonnet"
|
|
3193
|
+
};
|
|
3194
|
+
const recommender = new RoutingRecommender(createLogger("routing:recommender"));
|
|
3195
|
+
const analysis$1 = recommender.analyze(breakdowns, routingConfig);
|
|
3196
|
+
if (outputFormat === "json") process.stdout.write(formatOutput({
|
|
3197
|
+
recommendations: analysis$1.recommendations,
|
|
3198
|
+
analysisRuns: analysis$1.analysisRuns,
|
|
3199
|
+
insufficientData: analysis$1.insufficientData
|
|
3200
|
+
}, "json", true) + "\n");
|
|
3201
|
+
else {
|
|
3202
|
+
process.stdout.write(`Routing Recommendations:\n`);
|
|
3203
|
+
if (analysis$1.insufficientData) process.stdout.write(`No recommendations yet — need at least 3 pipeline runs\n`);
|
|
3204
|
+
else if (analysis$1.recommendations.length === 0) process.stdout.write(` No recommendations — all phases are in the neutral zone\n`);
|
|
3205
|
+
else for (const rec of analysis$1.recommendations) process.stdout.write(` ${rec.phase} | ${rec.currentModel} → ${rec.suggestedModel} | est. savings: ${Math.round(rec.estimatedSavingsPct)}%\n`);
|
|
3206
|
+
}
|
|
3207
|
+
} finally {
|
|
3208
|
+
await stateStore.close().catch(() => {});
|
|
3209
|
+
}
|
|
3210
|
+
return 0;
|
|
3211
|
+
}
|
|
3157
3212
|
if (analysis !== void 0) {
|
|
3158
3213
|
const dbRoot$1 = await resolveMainRepoRoot(projectRoot);
|
|
3159
3214
|
const reportBase = join(dbRoot$1, "_bmad-output", "supervisor-reports", `${analysis}-analysis`);
|
|
@@ -3251,7 +3306,7 @@ async function runMetricsAction(options) {
|
|
|
3251
3306
|
doltMetrics = await stateStore.queryMetrics(doltFilter);
|
|
3252
3307
|
await stateStore.close();
|
|
3253
3308
|
} catch (doltErr) {
|
|
3254
|
-
logger$
|
|
3309
|
+
logger$13.warn({ err: doltErr }, "StateStore query failed — falling back to SQLite metrics only");
|
|
3255
3310
|
}
|
|
3256
3311
|
const storyMetricDecisions = getDecisionsByCategory(db, STORY_METRICS);
|
|
3257
3312
|
const storyMetrics = storyMetricDecisions.map((d) => {
|
|
@@ -3282,9 +3337,21 @@ async function runMetricsAction(options) {
|
|
|
3282
3337
|
};
|
|
3283
3338
|
}
|
|
3284
3339
|
});
|
|
3340
|
+
const phaseBreakdownMap = {};
|
|
3341
|
+
try {
|
|
3342
|
+
const kvStore = new FileStateStore({ basePath: join(dbRoot, ".substrate") });
|
|
3343
|
+
for (const run of runs) {
|
|
3344
|
+
const raw = await kvStore.getMetric(run.run_id, "phase_token_breakdown");
|
|
3345
|
+
phaseBreakdownMap[run.run_id] = raw !== void 0 ? raw : null;
|
|
3346
|
+
}
|
|
3347
|
+
} catch {}
|
|
3285
3348
|
if (outputFormat === "json") {
|
|
3349
|
+
const runsWithBreakdown = runs.map((run) => ({
|
|
3350
|
+
...run,
|
|
3351
|
+
phase_token_breakdown: phaseBreakdownMap[run.run_id] ?? null
|
|
3352
|
+
}));
|
|
3286
3353
|
const jsonPayload = {
|
|
3287
|
-
runs,
|
|
3354
|
+
runs: runsWithBreakdown,
|
|
3288
3355
|
story_metrics: storyMetrics
|
|
3289
3356
|
};
|
|
3290
3357
|
if (doltMetrics !== void 0) if (aggregate) {
|
|
@@ -3322,6 +3389,11 @@ async function runMetricsAction(options) {
|
|
|
3322
3389
|
process.stdout.write(` Stories: attempted=${run.stories_attempted} succeeded=${run.stories_succeeded} failed=${run.stories_failed} escalated=${run.stories_escalated}\n`);
|
|
3323
3390
|
process.stdout.write(` Tokens: ${(run.total_input_tokens ?? 0).toLocaleString()} in / ${(run.total_output_tokens ?? 0).toLocaleString()} out $${(run.total_cost_usd ?? 0).toFixed(4)}\n`);
|
|
3324
3391
|
process.stdout.write(` Cycles: ${run.total_review_cycles} | Dispatches: ${run.total_dispatches} | Concurrency: ${run.concurrency_setting}\n`);
|
|
3392
|
+
const breakdown = phaseBreakdownMap[run.run_id];
|
|
3393
|
+
if (breakdown !== null && breakdown !== void 0 && breakdown.entries.length > 0) {
|
|
3394
|
+
process.stdout.write(" Phase Token Breakdown:\n");
|
|
3395
|
+
for (const entry of breakdown.entries) process.stdout.write(` ${entry.phase.padEnd(10)} | ${entry.model.padEnd(30)} | in: ${entry.inputTokens} | out: ${entry.outputTokens} | dispatches: ${entry.dispatchCount}\n`);
|
|
3396
|
+
}
|
|
3325
3397
|
}
|
|
3326
3398
|
}
|
|
3327
3399
|
if (storyMetrics.length > 0) {
|
|
@@ -3377,7 +3449,7 @@ async function runMetricsAction(options) {
|
|
|
3377
3449
|
const msg = err instanceof Error ? err.message : String(err);
|
|
3378
3450
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
3379
3451
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
3380
|
-
logger$
|
|
3452
|
+
logger$13.error({ err }, "metrics action failed");
|
|
3381
3453
|
return 1;
|
|
3382
3454
|
} finally {
|
|
3383
3455
|
try {
|
|
@@ -3386,7 +3458,7 @@ async function runMetricsAction(options) {
|
|
|
3386
3458
|
}
|
|
3387
3459
|
}
|
|
3388
3460
|
function registerMetricsCommand(program, _version = "0.0.0", projectRoot = process.cwd()) {
|
|
3389
|
-
program.command("metrics").description("Show historical pipeline run metrics and cross-run comparison").option("--project-root <path>", "Project root directory", projectRoot).option("--output-format <format>", "Output format: human (default) or json", "human").option("--limit <n>", "Number of runs to show (default: 10)", (v) => parseInt(v, 10), 10).option("--compare <run-id-a,run-id-b>", "Compare two runs side-by-side (comma-separated IDs, e.g. abc123,def456)").option("--tag-baseline <run-id>", "Mark a run as the performance baseline").option("--analysis <run-id>", "Read and output the analysis report for the specified run (AC5 of Story 17-3)").option("--sprint <sprint>", "Filter StateStore metrics by sprint (e.g. sprint-1)").option("--story <story-key>", "Filter StateStore metrics by story key (e.g. 26-1)").option("--task-type <type>", "Filter StateStore metrics by task type (e.g. dev-story)").option("--since <iso-date>", "Filter StateStore metrics at or after this ISO timestamp").option("--aggregate", "Aggregate StateStore metrics grouped by task_type").option("--efficiency", "Show telemetry efficiency scores for recent stories").option("--recommendations", "Show all telemetry recommendations across stories").option("--turns <storyKey>", "Show per-turn analysis for a specific story").option("--consumers <storyKey>", "Show consumer stats for a specific story").option("--categories", "Show category stats (optionally scoped by --story <storyKey>)").option("--compare-stories <storyA,storyB>", "Compare efficiency scores of two stories side-by-side (comma-separated keys)").action(async (opts) => {
|
|
3461
|
+
program.command("metrics").description("Show historical pipeline run metrics and cross-run comparison").option("--project-root <path>", "Project root directory", projectRoot).option("--output-format <format>", "Output format: human (default) or json", "human").option("--limit <n>", "Number of runs to show (default: 10)", (v) => parseInt(v, 10), 10).option("--compare <run-id-a,run-id-b>", "Compare two runs side-by-side (comma-separated IDs, e.g. abc123,def456)").option("--tag-baseline <run-id>", "Mark a run as the performance baseline").option("--analysis <run-id>", "Read and output the analysis report for the specified run (AC5 of Story 17-3)").option("--sprint <sprint>", "Filter StateStore metrics by sprint (e.g. sprint-1)").option("--story <story-key>", "Filter StateStore metrics by story key (e.g. 26-1)").option("--task-type <type>", "Filter StateStore metrics by task type (e.g. dev-story)").option("--since <iso-date>", "Filter StateStore metrics at or after this ISO timestamp").option("--aggregate", "Aggregate StateStore metrics grouped by task_type").option("--efficiency", "Show telemetry efficiency scores for recent stories").option("--recommendations", "Show all telemetry recommendations across stories").option("--turns <storyKey>", "Show per-turn analysis for a specific story").option("--consumers <storyKey>", "Show consumer stats for a specific story").option("--categories", "Show category stats (optionally scoped by --story <storyKey>)").option("--compare-stories <storyA,storyB>", "Compare efficiency scores of two stories side-by-side (comma-separated keys)").option("--routing-recommendations", "Show routing recommendations derived from phase token breakdown history").action(async (opts) => {
|
|
3390
3462
|
const outputFormat = opts.outputFormat === "json" ? "json" : "human";
|
|
3391
3463
|
let compareIds;
|
|
3392
3464
|
if (opts.compare !== void 0) {
|
|
@@ -3420,7 +3492,8 @@ function registerMetricsCommand(program, _version = "0.0.0", projectRoot = proce
|
|
|
3420
3492
|
...opts.turns !== void 0 && { turns: opts.turns },
|
|
3421
3493
|
...opts.consumers !== void 0 && { consumers: opts.consumers },
|
|
3422
3494
|
...opts.categories !== void 0 && { categories: opts.categories },
|
|
3423
|
-
...compareStoriesIds !== void 0 && { compareStories: compareStoriesIds }
|
|
3495
|
+
...compareStoriesIds !== void 0 && { compareStories: compareStoriesIds },
|
|
3496
|
+
...opts.routingRecommendations !== void 0 && { routingRecommendations: opts.routingRecommendations }
|
|
3424
3497
|
};
|
|
3425
3498
|
const exitCode = await runMetricsAction(metricsOpts);
|
|
3426
3499
|
process.exitCode = exitCode;
|
|
@@ -3805,7 +3878,7 @@ function getPlanningCostTotal(db, sessionId) {
|
|
|
3805
3878
|
function getLatestSessionId(_db) {
|
|
3806
3879
|
return null;
|
|
3807
3880
|
}
|
|
3808
|
-
const logger$
|
|
3881
|
+
const logger$12 = createLogger("cost-cmd");
|
|
3809
3882
|
const COST_EXIT_SUCCESS = 0;
|
|
3810
3883
|
const COST_EXIT_ERROR = 1;
|
|
3811
3884
|
/**
|
|
@@ -4051,7 +4124,7 @@ async function runCostAction(options) {
|
|
|
4051
4124
|
} catch (err) {
|
|
4052
4125
|
const message = err instanceof Error ? err.message : String(err);
|
|
4053
4126
|
process.stderr.write(`Error: ${message}\n`);
|
|
4054
|
-
logger$
|
|
4127
|
+
logger$12.error({ err }, "runCostAction failed");
|
|
4055
4128
|
return COST_EXIT_ERROR;
|
|
4056
4129
|
} finally {
|
|
4057
4130
|
if (wrapper !== null) try {
|
|
@@ -4153,7 +4226,7 @@ function applyMonitorSchema(db) {
|
|
|
4153
4226
|
|
|
4154
4227
|
//#endregion
|
|
4155
4228
|
//#region src/persistence/monitor-database.ts
|
|
4156
|
-
const logger$
|
|
4229
|
+
const logger$11 = createLogger("persistence:monitor-db");
|
|
4157
4230
|
var MonitorDatabaseImpl = class {
|
|
4158
4231
|
_db = null;
|
|
4159
4232
|
_path;
|
|
@@ -4164,10 +4237,10 @@ var MonitorDatabaseImpl = class {
|
|
|
4164
4237
|
this._open();
|
|
4165
4238
|
}
|
|
4166
4239
|
_open() {
|
|
4167
|
-
logger$
|
|
4240
|
+
logger$11.info({ path: this._path }, "Opening monitor database");
|
|
4168
4241
|
this._db = new Database(this._path);
|
|
4169
4242
|
const walResult = this._db.pragma("journal_mode = WAL");
|
|
4170
|
-
if (walResult?.[0]?.journal_mode !== "wal") logger$
|
|
4243
|
+
if (walResult?.[0]?.journal_mode !== "wal") logger$11.warn({ result: walResult?.[0]?.journal_mode }, "Monitor DB: WAL pragma did not confirm wal mode");
|
|
4171
4244
|
this._db.pragma("synchronous = NORMAL");
|
|
4172
4245
|
this._db.pragma("busy_timeout = 5000");
|
|
4173
4246
|
this._db.pragma("foreign_keys = ON");
|
|
@@ -4202,7 +4275,7 @@ var MonitorDatabaseImpl = class {
|
|
|
4202
4275
|
total_retries = total_retries + @retries,
|
|
4203
4276
|
last_updated = @lastUpdated
|
|
4204
4277
|
`);
|
|
4205
|
-
logger$
|
|
4278
|
+
logger$11.info({ path: this._path }, "Monitor database ready");
|
|
4206
4279
|
}
|
|
4207
4280
|
_assertOpen() {
|
|
4208
4281
|
if (this._db === null) throw new Error("MonitorDatabase: connection is closed");
|
|
@@ -4351,7 +4424,7 @@ var MonitorDatabaseImpl = class {
|
|
|
4351
4424
|
const db = this._assertOpen();
|
|
4352
4425
|
const cutoff = new Date(Date.now() - retentionDays * 24 * 60 * 60 * 1e3).toISOString();
|
|
4353
4426
|
const result = db.prepare("DELETE FROM task_metrics WHERE recorded_at < @cutoff").run({ cutoff });
|
|
4354
|
-
logger$
|
|
4427
|
+
logger$11.info({
|
|
4355
4428
|
cutoff,
|
|
4356
4429
|
deleted: result.changes
|
|
4357
4430
|
}, "Pruned old task_metrics rows");
|
|
@@ -4390,13 +4463,13 @@ var MonitorDatabaseImpl = class {
|
|
|
4390
4463
|
db.exec("ROLLBACK");
|
|
4391
4464
|
throw err;
|
|
4392
4465
|
}
|
|
4393
|
-
logger$
|
|
4466
|
+
logger$11.info("Rebuilt performance_aggregates from task_metrics");
|
|
4394
4467
|
}
|
|
4395
4468
|
resetAllData() {
|
|
4396
4469
|
const db = this._assertOpen();
|
|
4397
4470
|
db.exec("DELETE FROM task_metrics");
|
|
4398
4471
|
db.exec("DELETE FROM performance_aggregates");
|
|
4399
|
-
logger$
|
|
4472
|
+
logger$11.info({ path: this._path }, "Monitor data reset — all rows deleted");
|
|
4400
4473
|
}
|
|
4401
4474
|
getTaskMetricsDateRange() {
|
|
4402
4475
|
const db = this._assertOpen();
|
|
@@ -4413,7 +4486,7 @@ var MonitorDatabaseImpl = class {
|
|
|
4413
4486
|
if (this._db === null) return;
|
|
4414
4487
|
this._db.close();
|
|
4415
4488
|
this._db = null;
|
|
4416
|
-
logger$
|
|
4489
|
+
logger$11.info({ path: this._path }, "Monitor database closed");
|
|
4417
4490
|
}
|
|
4418
4491
|
/**
|
|
4419
4492
|
* Access the raw underlying database for testing purposes only.
|
|
@@ -4426,7 +4499,7 @@ var MonitorDatabaseImpl = class {
|
|
|
4426
4499
|
|
|
4427
4500
|
//#endregion
|
|
4428
4501
|
//#region src/modules/monitor/recommendation-engine.ts
|
|
4429
|
-
const logger$
|
|
4502
|
+
const logger$10 = createLogger("monitor:recommendations");
|
|
4430
4503
|
var RecommendationEngine = class {
|
|
4431
4504
|
_monitorDb;
|
|
4432
4505
|
_filters;
|
|
@@ -4459,7 +4532,7 @@ var RecommendationEngine = class {
|
|
|
4459
4532
|
const sinceDate = new Date(Date.now() - this._historyDays * 24 * 60 * 60 * 1e3).toISOString();
|
|
4460
4533
|
const aggregates = this._monitorDb.getAggregates({ sinceDate });
|
|
4461
4534
|
if (aggregates.length === 0) {
|
|
4462
|
-
logger$
|
|
4535
|
+
logger$10.debug("No performance aggregates found — no recommendations to generate");
|
|
4463
4536
|
return [];
|
|
4464
4537
|
}
|
|
4465
4538
|
const byTaskType = new Map();
|
|
@@ -4524,7 +4597,7 @@ var RecommendationEngine = class {
|
|
|
4524
4597
|
if (confDiff !== 0) return confDiff;
|
|
4525
4598
|
return b.improvement_percentage - a.improvement_percentage;
|
|
4526
4599
|
});
|
|
4527
|
-
logger$
|
|
4600
|
+
logger$10.debug({ count: recommendations.length }, "Generated routing recommendations");
|
|
4528
4601
|
return recommendations;
|
|
4529
4602
|
}
|
|
4530
4603
|
/**
|
|
@@ -4690,7 +4763,7 @@ function generateMonitorReport(monitorDb, options = {}) {
|
|
|
4690
4763
|
|
|
4691
4764
|
//#endregion
|
|
4692
4765
|
//#region src/cli/commands/monitor.ts
|
|
4693
|
-
const logger$
|
|
4766
|
+
const logger$9 = createLogger("monitor-cmd");
|
|
4694
4767
|
const MONITOR_EXIT_SUCCESS = 0;
|
|
4695
4768
|
const MONITOR_EXIT_ERROR = 1;
|
|
4696
4769
|
/**
|
|
@@ -4893,7 +4966,7 @@ async function runMonitorReportAction(options) {
|
|
|
4893
4966
|
} catch (err) {
|
|
4894
4967
|
const message = err instanceof Error ? err.message : String(err);
|
|
4895
4968
|
process.stderr.write(`Error: ${message}\n`);
|
|
4896
|
-
logger$
|
|
4969
|
+
logger$9.error({ err }, "runMonitorReportAction failed");
|
|
4897
4970
|
return MONITOR_EXIT_ERROR;
|
|
4898
4971
|
} finally {
|
|
4899
4972
|
if (monitorDb !== null) try {
|
|
@@ -4955,7 +5028,7 @@ async function runMonitorStatusAction(options) {
|
|
|
4955
5028
|
} catch (err) {
|
|
4956
5029
|
const message = err instanceof Error ? err.message : String(err);
|
|
4957
5030
|
process.stderr.write(`Error: ${message}\n`);
|
|
4958
|
-
logger$
|
|
5031
|
+
logger$9.error({ err }, "runMonitorStatusAction failed");
|
|
4959
5032
|
return MONITOR_EXIT_ERROR;
|
|
4960
5033
|
} finally {
|
|
4961
5034
|
if (monitorDb !== null) try {
|
|
@@ -4990,7 +5063,7 @@ async function runMonitorResetAction(options) {
|
|
|
4990
5063
|
} catch (err) {
|
|
4991
5064
|
const message = err instanceof Error ? err.message : String(err);
|
|
4992
5065
|
process.stderr.write(`Error: ${message}\n`);
|
|
4993
|
-
logger$
|
|
5066
|
+
logger$9.error({ err }, "runMonitorResetAction failed");
|
|
4994
5067
|
return MONITOR_EXIT_ERROR;
|
|
4995
5068
|
} finally {
|
|
4996
5069
|
if (monitorDb !== null) try {
|
|
@@ -5038,7 +5111,7 @@ async function runMonitorRecommendationsAction(options) {
|
|
|
5038
5111
|
} catch (err) {
|
|
5039
5112
|
const message = err instanceof Error ? err.message : String(err);
|
|
5040
5113
|
process.stderr.write(`Error: ${message}\n`);
|
|
5041
|
-
logger$
|
|
5114
|
+
logger$9.error({ err }, "runMonitorRecommendationsAction failed");
|
|
5042
5115
|
return MONITOR_EXIT_ERROR;
|
|
5043
5116
|
} finally {
|
|
5044
5117
|
if (monitorDb !== null) try {
|
|
@@ -5116,7 +5189,7 @@ function registerMonitorCommand(program, version = "0.0.0", projectRoot = proces
|
|
|
5116
5189
|
|
|
5117
5190
|
//#endregion
|
|
5118
5191
|
//#region src/modules/git-worktree/git-worktree-manager-impl.ts
|
|
5119
|
-
const logger$
|
|
5192
|
+
const logger$8 = createLogger("git-worktree");
|
|
5120
5193
|
const BRANCH_PREFIX = "substrate/task-";
|
|
5121
5194
|
const DEFAULT_WORKTREE_BASE = ".substrate-worktrees";
|
|
5122
5195
|
var GitWorktreeManagerImpl = class {
|
|
@@ -5135,7 +5208,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5135
5208
|
this._db = db;
|
|
5136
5209
|
this._onTaskReady = ({ taskId }) => {
|
|
5137
5210
|
this._handleTaskReady(taskId).catch((err) => {
|
|
5138
|
-
logger$
|
|
5211
|
+
logger$8.error({
|
|
5139
5212
|
taskId,
|
|
5140
5213
|
err
|
|
5141
5214
|
}, "Unhandled error in _handleTaskReady");
|
|
@@ -5149,40 +5222,40 @@ var GitWorktreeManagerImpl = class {
|
|
|
5149
5222
|
};
|
|
5150
5223
|
}
|
|
5151
5224
|
async initialize() {
|
|
5152
|
-
logger$
|
|
5225
|
+
logger$8.info({ projectRoot: this._projectRoot }, "GitWorktreeManager.initialize()");
|
|
5153
5226
|
await this.verifyGitVersion();
|
|
5154
5227
|
const cleaned = await this.cleanupAllWorktrees();
|
|
5155
|
-
if (cleaned > 0) logger$
|
|
5228
|
+
if (cleaned > 0) logger$8.info({ cleaned }, "Recovered orphaned worktrees on startup");
|
|
5156
5229
|
this._eventBus.on("task:ready", this._onTaskReady);
|
|
5157
5230
|
this._eventBus.on("task:complete", this._onTaskComplete);
|
|
5158
5231
|
this._eventBus.on("task:failed", this._onTaskFailed);
|
|
5159
|
-
logger$
|
|
5232
|
+
logger$8.info("GitWorktreeManager initialized");
|
|
5160
5233
|
}
|
|
5161
5234
|
async shutdown() {
|
|
5162
|
-
logger$
|
|
5235
|
+
logger$8.info("GitWorktreeManager.shutdown()");
|
|
5163
5236
|
this._eventBus.off("task:ready", this._onTaskReady);
|
|
5164
5237
|
this._eventBus.off("task:complete", this._onTaskComplete);
|
|
5165
5238
|
this._eventBus.off("task:failed", this._onTaskFailed);
|
|
5166
5239
|
await this.cleanupAllWorktrees();
|
|
5167
|
-
logger$
|
|
5240
|
+
logger$8.info("GitWorktreeManager shutdown complete");
|
|
5168
5241
|
}
|
|
5169
5242
|
async _handleTaskReady(taskId) {
|
|
5170
|
-
logger$
|
|
5243
|
+
logger$8.debug({ taskId }, "task:ready — creating worktree");
|
|
5171
5244
|
try {
|
|
5172
5245
|
await this.createWorktree(taskId);
|
|
5173
5246
|
} catch (err) {
|
|
5174
|
-
logger$
|
|
5247
|
+
logger$8.error({
|
|
5175
5248
|
taskId,
|
|
5176
5249
|
err
|
|
5177
5250
|
}, "Failed to create worktree for task");
|
|
5178
5251
|
}
|
|
5179
5252
|
}
|
|
5180
5253
|
async _handleTaskDone(taskId) {
|
|
5181
|
-
logger$
|
|
5254
|
+
logger$8.debug({ taskId }, "task done — cleaning up worktree");
|
|
5182
5255
|
try {
|
|
5183
5256
|
await this.cleanupWorktree(taskId);
|
|
5184
5257
|
} catch (err) {
|
|
5185
|
-
logger$
|
|
5258
|
+
logger$8.warn({
|
|
5186
5259
|
taskId,
|
|
5187
5260
|
err
|
|
5188
5261
|
}, "Failed to cleanup worktree for task");
|
|
@@ -5192,7 +5265,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5192
5265
|
if (!taskId || taskId.trim().length === 0) throw new Error("createWorktree: taskId must be a non-empty string");
|
|
5193
5266
|
const branchName = BRANCH_PREFIX + taskId;
|
|
5194
5267
|
const worktreePath = this.getWorktreePath(taskId);
|
|
5195
|
-
logger$
|
|
5268
|
+
logger$8.debug({
|
|
5196
5269
|
taskId,
|
|
5197
5270
|
branchName,
|
|
5198
5271
|
worktreePath,
|
|
@@ -5212,7 +5285,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5212
5285
|
worktreePath,
|
|
5213
5286
|
createdAt
|
|
5214
5287
|
};
|
|
5215
|
-
logger$
|
|
5288
|
+
logger$8.info({
|
|
5216
5289
|
taskId,
|
|
5217
5290
|
branchName,
|
|
5218
5291
|
worktreePath
|
|
@@ -5222,7 +5295,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5222
5295
|
async cleanupWorktree(taskId) {
|
|
5223
5296
|
const branchName = BRANCH_PREFIX + taskId;
|
|
5224
5297
|
const worktreePath = this.getWorktreePath(taskId);
|
|
5225
|
-
logger$
|
|
5298
|
+
logger$8.debug({
|
|
5226
5299
|
taskId,
|
|
5227
5300
|
branchName,
|
|
5228
5301
|
worktreePath
|
|
@@ -5232,7 +5305,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5232
5305
|
await access$1(worktreePath);
|
|
5233
5306
|
worktreeExists = true;
|
|
5234
5307
|
} catch {
|
|
5235
|
-
logger$
|
|
5308
|
+
logger$8.debug({
|
|
5236
5309
|
taskId,
|
|
5237
5310
|
worktreePath
|
|
5238
5311
|
}, "cleanupWorktree: worktree does not exist, skipping removal");
|
|
@@ -5240,7 +5313,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5240
5313
|
if (worktreeExists) try {
|
|
5241
5314
|
await removeWorktree(worktreePath, this._projectRoot);
|
|
5242
5315
|
} catch (err) {
|
|
5243
|
-
logger$
|
|
5316
|
+
logger$8.warn({
|
|
5244
5317
|
taskId,
|
|
5245
5318
|
worktreePath,
|
|
5246
5319
|
err
|
|
@@ -5249,7 +5322,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5249
5322
|
try {
|
|
5250
5323
|
await removeBranch(branchName, this._projectRoot);
|
|
5251
5324
|
} catch (err) {
|
|
5252
|
-
logger$
|
|
5325
|
+
logger$8.warn({
|
|
5253
5326
|
taskId,
|
|
5254
5327
|
branchName,
|
|
5255
5328
|
err
|
|
@@ -5259,13 +5332,13 @@ var GitWorktreeManagerImpl = class {
|
|
|
5259
5332
|
taskId,
|
|
5260
5333
|
branchName
|
|
5261
5334
|
});
|
|
5262
|
-
logger$
|
|
5335
|
+
logger$8.info({
|
|
5263
5336
|
taskId,
|
|
5264
5337
|
branchName
|
|
5265
5338
|
}, "Worktree cleaned up");
|
|
5266
5339
|
}
|
|
5267
5340
|
async cleanupAllWorktrees() {
|
|
5268
|
-
logger$
|
|
5341
|
+
logger$8.debug({ projectRoot: this._projectRoot }, "cleanupAllWorktrees");
|
|
5269
5342
|
const orphanedPaths = await getOrphanedWorktrees(this._projectRoot, this._baseDirectory);
|
|
5270
5343
|
let cleaned = 0;
|
|
5271
5344
|
for (const worktreePath of orphanedPaths) {
|
|
@@ -5274,12 +5347,12 @@ var GitWorktreeManagerImpl = class {
|
|
|
5274
5347
|
try {
|
|
5275
5348
|
await removeWorktree(worktreePath, this._projectRoot);
|
|
5276
5349
|
worktreeRemoved = true;
|
|
5277
|
-
logger$
|
|
5350
|
+
logger$8.debug({
|
|
5278
5351
|
taskId,
|
|
5279
5352
|
worktreePath
|
|
5280
5353
|
}, "cleanupAllWorktrees: removed orphaned worktree");
|
|
5281
5354
|
} catch (err) {
|
|
5282
|
-
logger$
|
|
5355
|
+
logger$8.warn({
|
|
5283
5356
|
taskId,
|
|
5284
5357
|
worktreePath,
|
|
5285
5358
|
err
|
|
@@ -5289,12 +5362,12 @@ var GitWorktreeManagerImpl = class {
|
|
|
5289
5362
|
let branchRemoved = false;
|
|
5290
5363
|
try {
|
|
5291
5364
|
branchRemoved = await removeBranch(branchName, this._projectRoot);
|
|
5292
|
-
if (branchRemoved) logger$
|
|
5365
|
+
if (branchRemoved) logger$8.debug({
|
|
5293
5366
|
taskId,
|
|
5294
5367
|
branchName
|
|
5295
5368
|
}, "cleanupAllWorktrees: removed orphaned branch");
|
|
5296
5369
|
} catch (err) {
|
|
5297
|
-
logger$
|
|
5370
|
+
logger$8.warn({
|
|
5298
5371
|
taskId,
|
|
5299
5372
|
branchName,
|
|
5300
5373
|
err
|
|
@@ -5302,14 +5375,14 @@ var GitWorktreeManagerImpl = class {
|
|
|
5302
5375
|
}
|
|
5303
5376
|
if (worktreeRemoved) cleaned++;
|
|
5304
5377
|
}
|
|
5305
|
-
if (cleaned > 0) logger$
|
|
5378
|
+
if (cleaned > 0) logger$8.info({ cleaned }, "cleanupAllWorktrees: recovered orphaned worktrees");
|
|
5306
5379
|
return cleaned;
|
|
5307
5380
|
}
|
|
5308
5381
|
async detectConflicts(taskId, targetBranch = "main") {
|
|
5309
5382
|
if (!taskId || taskId.trim().length === 0) throw new Error("detectConflicts: taskId must be a non-empty string");
|
|
5310
5383
|
const branchName = BRANCH_PREFIX + taskId;
|
|
5311
5384
|
const worktreePath = this.getWorktreePath(taskId);
|
|
5312
|
-
logger$
|
|
5385
|
+
logger$8.debug({
|
|
5313
5386
|
taskId,
|
|
5314
5387
|
branchName,
|
|
5315
5388
|
targetBranch
|
|
@@ -5337,7 +5410,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5337
5410
|
branch: branchName,
|
|
5338
5411
|
conflictingFiles: report.conflictingFiles
|
|
5339
5412
|
});
|
|
5340
|
-
logger$
|
|
5413
|
+
logger$8.info({
|
|
5341
5414
|
taskId,
|
|
5342
5415
|
hasConflicts: report.hasConflicts,
|
|
5343
5416
|
conflictCount: conflictingFiles.length
|
|
@@ -5347,14 +5420,14 @@ var GitWorktreeManagerImpl = class {
|
|
|
5347
5420
|
async mergeWorktree(taskId, targetBranch = "main") {
|
|
5348
5421
|
if (!taskId || taskId.trim().length === 0) throw new Error("mergeWorktree: taskId must be a non-empty string");
|
|
5349
5422
|
const branchName = BRANCH_PREFIX + taskId;
|
|
5350
|
-
logger$
|
|
5423
|
+
logger$8.debug({
|
|
5351
5424
|
taskId,
|
|
5352
5425
|
branchName,
|
|
5353
5426
|
targetBranch
|
|
5354
5427
|
}, "mergeWorktree");
|
|
5355
5428
|
const conflictReport = await this.detectConflicts(taskId, targetBranch);
|
|
5356
5429
|
if (conflictReport.hasConflicts) {
|
|
5357
|
-
logger$
|
|
5430
|
+
logger$8.info({
|
|
5358
5431
|
taskId,
|
|
5359
5432
|
conflictCount: conflictReport.conflictingFiles.length
|
|
5360
5433
|
}, "Merge skipped due to conflicts");
|
|
@@ -5376,7 +5449,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5376
5449
|
success: true,
|
|
5377
5450
|
mergedFiles
|
|
5378
5451
|
};
|
|
5379
|
-
logger$
|
|
5452
|
+
logger$8.info({
|
|
5380
5453
|
taskId,
|
|
5381
5454
|
branchName,
|
|
5382
5455
|
mergedFileCount: mergedFiles.length
|
|
@@ -5384,7 +5457,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5384
5457
|
return result;
|
|
5385
5458
|
}
|
|
5386
5459
|
async listWorktrees() {
|
|
5387
|
-
logger$
|
|
5460
|
+
logger$8.debug({
|
|
5388
5461
|
projectRoot: this._projectRoot,
|
|
5389
5462
|
baseDirectory: this._baseDirectory
|
|
5390
5463
|
}, "listWorktrees");
|
|
@@ -5408,7 +5481,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5408
5481
|
createdAt
|
|
5409
5482
|
});
|
|
5410
5483
|
}
|
|
5411
|
-
logger$
|
|
5484
|
+
logger$8.debug({ count: results.length }, "listWorktrees: found worktrees");
|
|
5412
5485
|
return results;
|
|
5413
5486
|
}
|
|
5414
5487
|
getWorktreePath(taskId) {
|
|
@@ -5428,7 +5501,7 @@ function createGitWorktreeManager(options) {
|
|
|
5428
5501
|
|
|
5429
5502
|
//#endregion
|
|
5430
5503
|
//#region src/cli/commands/merge.ts
|
|
5431
|
-
const logger$
|
|
5504
|
+
const logger$7 = createLogger("merge-cmd");
|
|
5432
5505
|
const MERGE_EXIT_SUCCESS = 0;
|
|
5433
5506
|
const MERGE_EXIT_CONFLICT = 1;
|
|
5434
5507
|
const MERGE_EXIT_ERROR = 2;
|
|
@@ -5466,7 +5539,7 @@ async function mergeTask(taskId, targetBranch, projectRoot) {
|
|
|
5466
5539
|
projectRoot
|
|
5467
5540
|
});
|
|
5468
5541
|
try {
|
|
5469
|
-
logger$
|
|
5542
|
+
logger$7.info({
|
|
5470
5543
|
taskId,
|
|
5471
5544
|
targetBranch
|
|
5472
5545
|
}, "Running conflict detection...");
|
|
@@ -5488,7 +5561,7 @@ async function mergeTask(taskId, targetBranch, projectRoot) {
|
|
|
5488
5561
|
} catch (err) {
|
|
5489
5562
|
const message = err instanceof Error ? err.message : String(err);
|
|
5490
5563
|
console.error(`Error merging task "${taskId}": ${message}`);
|
|
5491
|
-
logger$
|
|
5564
|
+
logger$7.error({
|
|
5492
5565
|
taskId,
|
|
5493
5566
|
err
|
|
5494
5567
|
}, "merge --task failed");
|
|
@@ -5542,7 +5615,7 @@ async function mergeAll(targetBranch, projectRoot, taskIds) {
|
|
|
5542
5615
|
error: message
|
|
5543
5616
|
});
|
|
5544
5617
|
console.log(` Error for task "${taskId}": ${message}`);
|
|
5545
|
-
logger$
|
|
5618
|
+
logger$7.error({
|
|
5546
5619
|
taskId,
|
|
5547
5620
|
err
|
|
5548
5621
|
}, "merge --all: task failed");
|
|
@@ -5595,7 +5668,7 @@ function registerMergeCommand(program, projectRoot = process.cwd()) {
|
|
|
5595
5668
|
|
|
5596
5669
|
//#endregion
|
|
5597
5670
|
//#region src/cli/commands/worktrees.ts
|
|
5598
|
-
const logger$
|
|
5671
|
+
const logger$6 = createLogger("worktrees-cmd");
|
|
5599
5672
|
const WORKTREES_EXIT_SUCCESS = 0;
|
|
5600
5673
|
const WORKTREES_EXIT_ERROR = 1;
|
|
5601
5674
|
/** Valid task statuses for filtering */
|
|
@@ -5722,7 +5795,7 @@ async function listWorktreesAction(options) {
|
|
|
5722
5795
|
try {
|
|
5723
5796
|
worktreeInfos = await manager.listWorktrees();
|
|
5724
5797
|
} catch (err) {
|
|
5725
|
-
logger$
|
|
5798
|
+
logger$6.error({ err }, "Failed to list worktrees");
|
|
5726
5799
|
const message = err instanceof Error ? err.message : String(err);
|
|
5727
5800
|
process.stderr.write(`Error listing worktrees: ${message}\n`);
|
|
5728
5801
|
return WORKTREES_EXIT_ERROR;
|
|
@@ -5749,7 +5822,7 @@ async function listWorktreesAction(options) {
|
|
|
5749
5822
|
} catch (err) {
|
|
5750
5823
|
const message = err instanceof Error ? err.message : String(err);
|
|
5751
5824
|
process.stderr.write(`Error: ${message}\n`);
|
|
5752
|
-
logger$
|
|
5825
|
+
logger$6.error({ err }, "listWorktreesAction failed");
|
|
5753
5826
|
return WORKTREES_EXIT_ERROR;
|
|
5754
5827
|
}
|
|
5755
5828
|
}
|
|
@@ -5790,7 +5863,7 @@ function registerWorktreesCommand(program, version = "0.0.0", projectRoot = proc
|
|
|
5790
5863
|
|
|
5791
5864
|
//#endregion
|
|
5792
5865
|
//#region src/cli/commands/brainstorm.ts
|
|
5793
|
-
const logger$
|
|
5866
|
+
const logger$5 = createLogger("brainstorm-cmd");
|
|
5794
5867
|
/**
|
|
5795
5868
|
* Detect whether the project has existing planning artifacts that indicate
|
|
5796
5869
|
* this is an amendment session (vs. a brand-new project brainstorm).
|
|
@@ -5836,13 +5909,13 @@ async function loadAmendmentContextDocuments(projectRoot) {
|
|
|
5836
5909
|
try {
|
|
5837
5910
|
brief = await readFile(briefPath, "utf-8");
|
|
5838
5911
|
} catch {
|
|
5839
|
-
logger$
|
|
5912
|
+
logger$5.warn({ briefPath }, "product-brief.md not found — continuing without brief context");
|
|
5840
5913
|
process.stderr.write(`Warning: product-brief.md not found at ${briefPath}\n`);
|
|
5841
5914
|
}
|
|
5842
5915
|
try {
|
|
5843
5916
|
prd = await readFile(prdPath, "utf-8");
|
|
5844
5917
|
} catch {
|
|
5845
|
-
logger$
|
|
5918
|
+
logger$5.warn({ prdPath }, "requirements.md not found — continuing without PRD context");
|
|
5846
5919
|
process.stderr.write(`Warning: requirements.md not found at ${prdPath}\n`);
|
|
5847
5920
|
}
|
|
5848
5921
|
return {
|
|
@@ -6051,7 +6124,7 @@ async function dispatchToPersonas(userPrompt, context, llmDispatch) {
|
|
|
6051
6124
|
}
|
|
6052
6125
|
];
|
|
6053
6126
|
const defaultDispatch = async (prompt, personaName) => {
|
|
6054
|
-
logger$
|
|
6127
|
+
logger$5.debug({
|
|
6055
6128
|
personaName,
|
|
6056
6129
|
promptLength: prompt.length
|
|
6057
6130
|
}, "Dispatching to persona (stub mode)");
|
|
@@ -6068,7 +6141,7 @@ async function dispatchToPersonas(userPrompt, context, llmDispatch) {
|
|
|
6068
6141
|
};
|
|
6069
6142
|
} catch (err) {
|
|
6070
6143
|
const msg = err instanceof Error ? err.message : String(err);
|
|
6071
|
-
logger$
|
|
6144
|
+
logger$5.error({
|
|
6072
6145
|
err,
|
|
6073
6146
|
personaName: persona.name
|
|
6074
6147
|
}, "Persona dispatch failed");
|
|
@@ -6220,7 +6293,7 @@ async function runBrainstormSession(options, llmDispatch, rlInterface) {
|
|
|
6220
6293
|
}
|
|
6221
6294
|
});
|
|
6222
6295
|
rl.on("error", (err) => {
|
|
6223
|
-
logger$
|
|
6296
|
+
logger$5.error({ err }, "readline error");
|
|
6224
6297
|
if (!sessionEnded) endSession(false);
|
|
6225
6298
|
});
|
|
6226
6299
|
});
|
|
@@ -6810,7 +6883,7 @@ function renderReadinessReport(decisions) {
|
|
|
6810
6883
|
|
|
6811
6884
|
//#endregion
|
|
6812
6885
|
//#region src/cli/commands/export.ts
|
|
6813
|
-
const logger$
|
|
6886
|
+
const logger$4 = createLogger("export-cmd");
|
|
6814
6887
|
/**
|
|
6815
6888
|
* Execute the export action.
|
|
6816
6889
|
* Returns an exit code (0 = success, 1 = error).
|
|
@@ -6937,7 +7010,7 @@ async function runExportAction(options) {
|
|
|
6937
7010
|
const msg = err instanceof Error ? err.message : String(err);
|
|
6938
7011
|
if (outputFormat === "json") process.stdout.write(JSON.stringify({ error: msg }) + "\n");
|
|
6939
7012
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
6940
|
-
logger$
|
|
7013
|
+
logger$4.error({ err }, "export action failed");
|
|
6941
7014
|
return 1;
|
|
6942
7015
|
} finally {
|
|
6943
7016
|
if (dbWrapper !== void 0) try {
|
|
@@ -7024,7 +7097,7 @@ function getRetryableEscalations(db, runId) {
|
|
|
7024
7097
|
|
|
7025
7098
|
//#endregion
|
|
7026
7099
|
//#region src/cli/commands/retry-escalated.ts
|
|
7027
|
-
const logger$
|
|
7100
|
+
const logger$3 = createLogger("retry-escalated-cmd");
|
|
7028
7101
|
async function runRetryEscalatedAction(options) {
|
|
7029
7102
|
const { runId, dryRun, outputFormat, projectRoot, concurrency, pack: packName, registry: injectedRegistry } = options;
|
|
7030
7103
|
const dbRoot = await resolveMainRepoRoot(projectRoot);
|
|
@@ -7122,7 +7195,7 @@ async function runRetryEscalatedAction(options) {
|
|
|
7122
7195
|
});
|
|
7123
7196
|
}
|
|
7124
7197
|
} catch (err) {
|
|
7125
|
-
logger$
|
|
7198
|
+
logger$3.warn({ err }, "Failed to record token usage");
|
|
7126
7199
|
}
|
|
7127
7200
|
});
|
|
7128
7201
|
if (outputFormat === "human") {
|
|
@@ -7144,7 +7217,7 @@ async function runRetryEscalatedAction(options) {
|
|
|
7144
7217
|
const msg = err instanceof Error ? err.message : String(err);
|
|
7145
7218
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
7146
7219
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
7147
|
-
logger$
|
|
7220
|
+
logger$3.error({ err }, "retry-escalated failed");
|
|
7148
7221
|
return 1;
|
|
7149
7222
|
} finally {
|
|
7150
7223
|
try {
|
|
@@ -7423,6 +7496,170 @@ function registerHistoryCommand(program) {
|
|
|
7423
7496
|
});
|
|
7424
7497
|
}
|
|
7425
7498
|
|
|
7499
|
+
//#endregion
|
|
7500
|
+
//#region src/cli/commands/repo-map.ts
|
|
7501
|
+
const logger$2 = createLogger("cli:repo-map");
|
|
7502
|
+
/** Validate that a symbol name contains only safe identifier characters. */
|
|
7503
|
+
function isValidSymbolName(name) {
|
|
7504
|
+
return /^[a-zA-Z0-9_]+$/.test(name);
|
|
7505
|
+
}
|
|
7506
|
+
function registerRepoMapCommand(program) {
|
|
7507
|
+
program.command("repo-map").description("Show, update, or query the repo-map symbol index").option("--show", "Show repo-map staleness status").option("--update", "Trigger an incremental repo-map update (Dolt backend only)").option("--query <symbol>", "Query the repo-map for a specific symbol name").option("--dry-run <storyFile>", "Preview repo-map context that would be injected for a story file").option("--output-format <format>", "Output format: text or json", "text").action(async (options) => {
|
|
7508
|
+
if (options.query !== void 0 && !isValidSymbolName(options.query)) {
|
|
7509
|
+
process.stderr.write(`Error: --query value must match /^[a-zA-Z0-9_]+$/ (got: ${options.query})\n`);
|
|
7510
|
+
process.exitCode = 1;
|
|
7511
|
+
return;
|
|
7512
|
+
}
|
|
7513
|
+
const dbRoot = await resolveMainRepoRoot(process.cwd());
|
|
7514
|
+
const statePath = join$1(dbRoot, ".substrate", "state");
|
|
7515
|
+
const doltStatePath = join$1(statePath, ".dolt");
|
|
7516
|
+
const isDolt = existsSync$1(doltStatePath);
|
|
7517
|
+
const notDoltError = (flag) => {
|
|
7518
|
+
if (options.outputFormat === "json") console.log(JSON.stringify({
|
|
7519
|
+
backend: "file",
|
|
7520
|
+
status: "unavailable",
|
|
7521
|
+
hint: "Repo-map requires the Dolt backend. Run `substrate init --dolt` to enable."
|
|
7522
|
+
}));
|
|
7523
|
+
else process.stderr.write(`Error: ${flag} requires the Dolt backend. Run \`substrate init --dolt\` to enable.\n`);
|
|
7524
|
+
process.exitCode = 1;
|
|
7525
|
+
};
|
|
7526
|
+
if (!isDolt) {
|
|
7527
|
+
const flag = options.update ? "--update" : options.query ? "--query" : options.dryRun ? "--dry-run" : "--show";
|
|
7528
|
+
notDoltError(flag);
|
|
7529
|
+
return;
|
|
7530
|
+
}
|
|
7531
|
+
const doltClient = new DoltClient({ repoPath: statePath });
|
|
7532
|
+
const symbolRepo = new DoltSymbolRepository(doltClient, logger$2);
|
|
7533
|
+
const metaRepo = new DoltRepoMapMetaRepository(doltClient);
|
|
7534
|
+
const repoMapModule = new RepoMapModule(metaRepo, logger$2);
|
|
7535
|
+
const queryEngine = new RepoMapQueryEngine(symbolRepo, logger$2);
|
|
7536
|
+
if (options.show === true || !options.update && !options.query && !options.dryRun) {
|
|
7537
|
+
const meta = await metaRepo.getMeta();
|
|
7538
|
+
const staleResult = await repoMapModule.checkStaleness();
|
|
7539
|
+
let staleness = "unknown";
|
|
7540
|
+
if (meta === null) staleness = "unknown";
|
|
7541
|
+
else if (staleResult !== null) staleness = "stale";
|
|
7542
|
+
else staleness = "current";
|
|
7543
|
+
const symbolCount = meta !== null ? (await symbolRepo.getSymbols()).length : 0;
|
|
7544
|
+
if (options.outputFormat === "json") console.log(JSON.stringify({
|
|
7545
|
+
symbolCount,
|
|
7546
|
+
commitSha: meta?.commitSha ?? null,
|
|
7547
|
+
fileCount: meta?.fileCount ?? 0,
|
|
7548
|
+
updatedAt: meta?.updatedAt?.toISOString() ?? null,
|
|
7549
|
+
staleness
|
|
7550
|
+
}));
|
|
7551
|
+
else if (meta !== null) {
|
|
7552
|
+
console.log(`Repo-map: ${symbolCount} symbols, ${meta.fileCount} files`);
|
|
7553
|
+
console.log(`Commit: ${meta.commitSha}`);
|
|
7554
|
+
console.log(`Updated: ${meta.updatedAt.toISOString()}`);
|
|
7555
|
+
if (staleness === "stale") console.log("Status: STALE (run `substrate repo-map --update` to refresh)");
|
|
7556
|
+
else console.log("Status: UP TO DATE");
|
|
7557
|
+
} else console.log("Repo-map: no data stored yet");
|
|
7558
|
+
return;
|
|
7559
|
+
}
|
|
7560
|
+
if (options.update === true) {
|
|
7561
|
+
logger$2.info("repo-map --update: triggering incremental update");
|
|
7562
|
+
const gitClient = new GitClient(logger$2);
|
|
7563
|
+
const grammarLoader = new GrammarLoader(logger$2);
|
|
7564
|
+
const parser = new SymbolParser(grammarLoader, logger$2);
|
|
7565
|
+
const storage = new RepoMapStorage(symbolRepo, metaRepo, gitClient, logger$2);
|
|
7566
|
+
await storage.incrementalUpdate(dbRoot, parser);
|
|
7567
|
+
const meta = await metaRepo.getMeta();
|
|
7568
|
+
const symbolCount = (await symbolRepo.getSymbols()).length;
|
|
7569
|
+
if (options.outputFormat === "json") console.log(JSON.stringify({
|
|
7570
|
+
result: "updated",
|
|
7571
|
+
symbolCount,
|
|
7572
|
+
fileCount: meta?.fileCount ?? 0,
|
|
7573
|
+
commitSha: meta?.commitSha ?? null,
|
|
7574
|
+
updatedAt: meta?.updatedAt?.toISOString() ?? null
|
|
7575
|
+
}));
|
|
7576
|
+
else console.log(`Repo-map updated: ${symbolCount} symbols across ${meta?.fileCount ?? 0} files`);
|
|
7577
|
+
return;
|
|
7578
|
+
}
|
|
7579
|
+
if (options.query !== void 0) {
|
|
7580
|
+
logger$2.debug({ symbol: options.query }, "repo-map --query");
|
|
7581
|
+
const result = await queryEngine.query({
|
|
7582
|
+
symbols: [options.query],
|
|
7583
|
+
maxTokens: 4e3
|
|
7584
|
+
});
|
|
7585
|
+
if (options.outputFormat === "json") console.log(JSON.stringify(result, null, 2));
|
|
7586
|
+
else if (result.symbolCount === 0) console.log(`No symbols found matching '${options.query}'.`);
|
|
7587
|
+
else {
|
|
7588
|
+
console.log(`Found ${result.symbolCount} symbol(s) for '${options.query}':`);
|
|
7589
|
+
for (const sym of result.symbols) console.log(` ${sym.filePath}:${sym.lineNumber} ${sym.symbolType} ${sym.symbolName}`);
|
|
7590
|
+
}
|
|
7591
|
+
return;
|
|
7592
|
+
}
|
|
7593
|
+
if (options.dryRun !== void 0) {
|
|
7594
|
+
let storyContent;
|
|
7595
|
+
try {
|
|
7596
|
+
storyContent = await readFile$1(options.dryRun, "utf-8");
|
|
7597
|
+
} catch {
|
|
7598
|
+
process.stderr.write(`Error: Cannot read story file: ${options.dryRun}\n`);
|
|
7599
|
+
process.exitCode = 1;
|
|
7600
|
+
return;
|
|
7601
|
+
}
|
|
7602
|
+
const injector = new RepoMapInjector(queryEngine, logger$2);
|
|
7603
|
+
const injectionResult = await injector.buildContext(storyContent, 2e3);
|
|
7604
|
+
console.log(JSON.stringify({
|
|
7605
|
+
text: injectionResult.text,
|
|
7606
|
+
symbolCount: injectionResult.symbolCount,
|
|
7607
|
+
truncated: injectionResult.truncated
|
|
7608
|
+
}));
|
|
7609
|
+
return;
|
|
7610
|
+
}
|
|
7611
|
+
});
|
|
7612
|
+
}
|
|
7613
|
+
|
|
7614
|
+
//#endregion
|
|
7615
|
+
//#region src/cli/commands/routing.ts
|
|
7616
|
+
const logger$1 = createLogger("cli:routing");
|
|
7617
|
+
function registerRoutingCommand(program) {
|
|
7618
|
+
program.command("routing").description("Show routing configuration and auto-tune history").option("--history", "Show the routing auto-tune log (model changes applied)").option("--output-format <format>", "Output format: text or json", "text").action(async (options) => {
|
|
7619
|
+
const dbRoot = await resolveMainRepoRoot(process.cwd());
|
|
7620
|
+
const statePath = join$1(dbRoot, ".substrate", "state");
|
|
7621
|
+
const doltStatePath = join$1(statePath, ".dolt");
|
|
7622
|
+
const storeConfig = existsSync$1(doltStatePath) ? {
|
|
7623
|
+
backend: "dolt",
|
|
7624
|
+
basePath: statePath
|
|
7625
|
+
} : {
|
|
7626
|
+
backend: "file",
|
|
7627
|
+
basePath: statePath
|
|
7628
|
+
};
|
|
7629
|
+
const store = createStateStore(storeConfig);
|
|
7630
|
+
try {
|
|
7631
|
+
await store.initialize();
|
|
7632
|
+
if (options.history === true) {
|
|
7633
|
+
logger$1.debug("routing --history: fetching tune log");
|
|
7634
|
+
const raw$1 = await store.getMetric("global", "routing_tune_log");
|
|
7635
|
+
let entries = [];
|
|
7636
|
+
if (Array.isArray(raw$1)) entries = raw$1.sort((a, b) => b.appliedAt.localeCompare(a.appliedAt));
|
|
7637
|
+
if (options.outputFormat === "json") {
|
|
7638
|
+
console.log(JSON.stringify({ entries }, null, 2));
|
|
7639
|
+
return;
|
|
7640
|
+
}
|
|
7641
|
+
if (entries.length === 0) {
|
|
7642
|
+
console.log("No routing auto-tune history found.");
|
|
7643
|
+
return;
|
|
7644
|
+
}
|
|
7645
|
+
console.log("Routing auto-tune history:");
|
|
7646
|
+
for (const entry of entries) console.log(` ${entry.appliedAt} phase=${entry.phase} ${entry.oldModel} → ${entry.newModel} savings=${entry.estimatedSavingsPct.toFixed(1)}% run=${entry.runId}`);
|
|
7647
|
+
return;
|
|
7648
|
+
}
|
|
7649
|
+
const raw = await store.getMetric("global", "routing_tune_log");
|
|
7650
|
+
const entryCount = Array.isArray(raw) ? raw.length : 0;
|
|
7651
|
+
if (options.outputFormat === "json") console.log(JSON.stringify({ tuneLogEntries: entryCount }));
|
|
7652
|
+
else {
|
|
7653
|
+
console.log(`Routing auto-tune log: ${entryCount} entr${entryCount === 1 ? "y" : "ies"}`);
|
|
7654
|
+
if (entryCount === 0) console.log("No auto-tune changes applied yet. Use --history for details.");
|
|
7655
|
+
else console.log("Run `substrate routing --history` to see full history.");
|
|
7656
|
+
}
|
|
7657
|
+
} finally {
|
|
7658
|
+
await store.close();
|
|
7659
|
+
}
|
|
7660
|
+
});
|
|
7661
|
+
}
|
|
7662
|
+
|
|
7426
7663
|
//#endregion
|
|
7427
7664
|
//#region src/cli/index.ts
|
|
7428
7665
|
process.setMaxListeners(20);
|
|
@@ -7475,6 +7712,8 @@ async function createProgram() {
|
|
|
7475
7712
|
registerDiffCommand(program);
|
|
7476
7713
|
registerHistoryCommand(program);
|
|
7477
7714
|
registerMigrateCommand(program);
|
|
7715
|
+
registerRepoMapCommand(program);
|
|
7716
|
+
registerRoutingCommand(program);
|
|
7478
7717
|
registerCostCommand(program, version);
|
|
7479
7718
|
registerMonitorCommand(program, version);
|
|
7480
7719
|
registerMergeCommand(program);
|
|
@@ -7487,8 +7726,8 @@ async function createProgram() {
|
|
|
7487
7726
|
/** Fire-and-forget startup version check (story 8.3, AC3/AC5) */
|
|
7488
7727
|
function checkForUpdatesInBackground(currentVersion) {
|
|
7489
7728
|
if (process.env.SUBSTRATE_NO_UPDATE_CHECK === "1") return;
|
|
7490
|
-
import("../upgrade-
|
|
7491
|
-
const { createVersionManager } = await import("../version-manager-impl-
|
|
7729
|
+
import("../upgrade-njy4XENS.js").then(async () => {
|
|
7730
|
+
const { createVersionManager } = await import("../version-manager-impl-QwroczYS.js");
|
|
7492
7731
|
const vm = createVersionManager();
|
|
7493
7732
|
const result = await vm.checkForUpdates();
|
|
7494
7733
|
if (result.updateAvailable) {
|