substrate-ai 0.3.6 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/adapter-registry-CctHT8ZH.js +3 -0
- package/dist/{adapter-registry-a2WX0qo_.js → adapter-registry-rSOJ9Kvz.js} +3 -4
- package/dist/cli/index.js +379 -125
- package/dist/cli/templates/claude-md-substrate-section.md +6 -0
- package/dist/{decisions-CbysnTi5.js → decisions-D7Ao_KcL.js} +1 -1
- package/dist/{decisions-CdpiJIm5.js → decisions-Db8GTbH2.js} +1 -1
- package/dist/{experimenter-jto3orYl.js → experimenter-CvxtqzXz.js} +4 -4
- package/dist/{git-utils-UbKLSGsD.js → git-utils-C-fdrHF_.js} +1 -1
- package/dist/index.d.ts +41 -1
- package/dist/index.js +1 -1
- package/dist/{operational-DisxqtjC.js → operational-C0_y8DAs.js} +1 -1
- package/dist/routing-CZfJB3y9.js +477 -0
- package/dist/routing-DWCBjrt7.js +4 -0
- package/dist/run-CQJP37ZC.js +9 -0
- package/dist/{run-DzzmgEOd.js → run-cqQmW8wL.js} +6674 -4243
- package/dist/{upgrade-DTzeenA-.js → upgrade-C8_VcI8B.js} +2 -2
- package/dist/{upgrade-BlJKjr6I.js → upgrade-njy4XENS.js} +2 -2
- package/dist/{version-manager-impl-zsJjBhak.js → version-manager-impl-DTlmGvHb.js} +1 -1
- package/dist/{version-manager-impl-BsHqAeGT.js → version-manager-impl-QwroczYS.js} +1 -1
- package/package.json +7 -1
- package/packs/bmad/manifest.yaml +1 -0
- package/packs/bmad/prompts/code-review.md +2 -0
- package/packs/bmad/prompts/dev-story.md +2 -0
- package/dist/adapter-registry-BBn0Rmqj.js +0 -3
- package/dist/run-D3rMGI6c.js +0 -8
package/dist/cli/index.js
CHANGED
|
@@ -1,14 +1,15 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
import { DEFAULT_CONFIG, DEFAULT_ROUTING_POLICY, DatabaseWrapper, DoltNotInstalled, FileStateStore, SUBSTRATE_OWNED_SETTINGS_KEYS, TelemetryPersistence, VALID_PHASES, buildPipelineStatusOutput, checkDoltInstalled, createConfigSystem, createContextCompiler, createDispatcher, createDoltClient, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStateStore, createStopAfterGate, findPackageRoot, formatOutput, formatPhaseCompletionSummary, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, initializeDolt, parseDbTimestampAsUtc, registerHealthCommand, registerRunCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, resolveStoryKeys, runAnalysisPhase, runMigrations, runPlanningPhase, runSolutioningPhase, validateStopAfterFromConflict } from "../run-
|
|
2
|
+
import { DEFAULT_CONFIG, DEFAULT_ROUTING_POLICY, DatabaseWrapper, DoltClient, DoltNotInstalled, DoltRepoMapMetaRepository, DoltSymbolRepository, FileStateStore, GitClient, GrammarLoader, IngestionServer, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SUBSTRATE_OWNED_SETTINGS_KEYS, SymbolParser, TelemetryPersistence, VALID_PHASES, buildPipelineStatusOutput, checkDoltInstalled, createConfigSystem, createContextCompiler, createDispatcher, createDoltClient, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStateStore, createStopAfterGate, findPackageRoot, formatOutput, formatPhaseCompletionSummary, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, initializeDolt, parseDbTimestampAsUtc, registerHealthCommand, registerRunCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, resolveStoryKeys, runAnalysisPhase, runMigrations, runPlanningPhase, runSolutioningPhase, validateStopAfterFromConflict } from "../run-cqQmW8wL.js";
|
|
3
3
|
import { createLogger } from "../logger-D2fS2ccL.js";
|
|
4
|
-
import { AdapterRegistry } from "../adapter-registry-
|
|
4
|
+
import { AdapterRegistry } from "../adapter-registry-rSOJ9Kvz.js";
|
|
5
5
|
import { CURRENT_CONFIG_FORMAT_VERSION, CURRENT_TASK_GRAPH_VERSION, PartialSubstrateConfigSchema } from "../config-migrator-DtZW1maj.js";
|
|
6
6
|
import { ConfigError, createEventBus } from "../helpers-BihqWgVe.js";
|
|
7
|
-
import {
|
|
8
|
-
import {
|
|
9
|
-
import {
|
|
10
|
-
import "../
|
|
11
|
-
import
|
|
7
|
+
import { RoutingRecommender } from "../routing-CZfJB3y9.js";
|
|
8
|
+
import { addTokenUsage, createDecision, createPipelineRun, getDecisionsByCategory, getDecisionsByPhaseForRun, getLatestRun, getTokenUsageSummary, listRequirements, updatePipelineRun } from "../decisions-Db8GTbH2.js";
|
|
9
|
+
import { ESCALATION_DIAGNOSIS, EXPERIMENT_RESULT, OPERATIONAL_FINDING, STORY_METRICS, aggregateTokenUsageForRun, compareRunMetrics, getBaselineRunMetrics, getRunMetrics, getStoryMetricsForRun, incrementRunRestarts, listRunMetrics, tagRunAsBaseline } from "../operational-C0_y8DAs.js";
|
|
10
|
+
import { abortMerge, createWorktree, getConflictingFiles, getMergedFiles, getOrphanedWorktrees, performMerge, removeBranch, removeWorktree, simulateMerge, verifyGitVersion } from "../git-utils-C-fdrHF_.js";
|
|
11
|
+
import "../version-manager-impl-DTlmGvHb.js";
|
|
12
|
+
import { registerUpgradeCommand } from "../upgrade-C8_VcI8B.js";
|
|
12
13
|
import { Command } from "commander";
|
|
13
14
|
import { fileURLToPath } from "url";
|
|
14
15
|
import { dirname, join, resolve } from "path";
|
|
@@ -19,7 +20,7 @@ import { createRequire } from "node:module";
|
|
|
19
20
|
import * as path$1 from "node:path";
|
|
20
21
|
import { isAbsolute, join as join$1 } from "node:path";
|
|
21
22
|
import Database from "better-sqlite3";
|
|
22
|
-
import { access as access$1 } from "node:fs/promises";
|
|
23
|
+
import { access as access$1, readFile as readFile$1 } from "node:fs/promises";
|
|
23
24
|
import { existsSync as existsSync$1, mkdirSync as mkdirSync$1, writeFileSync as writeFileSync$1 } from "node:fs";
|
|
24
25
|
import { createInterface } from "node:readline";
|
|
25
26
|
import { homedir } from "os";
|
|
@@ -259,7 +260,7 @@ function registerAdaptersCommand(program, version, registry) {
|
|
|
259
260
|
|
|
260
261
|
//#endregion
|
|
261
262
|
//#region src/cli/commands/init.ts
|
|
262
|
-
const logger$
|
|
263
|
+
const logger$18 = createLogger("init");
|
|
263
264
|
const __dirname = dirname(new URL(import.meta.url).pathname);
|
|
264
265
|
const INIT_EXIT_SUCCESS = 0;
|
|
265
266
|
const INIT_EXIT_ERROR = 1;
|
|
@@ -280,7 +281,7 @@ async function scaffoldBmadFramework(projectRoot, force, outputFormat) {
|
|
|
280
281
|
const version = resolveBmadMethodVersion();
|
|
281
282
|
if (force && bmadExists) process.stderr.write(`Warning: Replacing existing _bmad/ framework with bmad-method@${version}\n`);
|
|
282
283
|
process.stdout.write(`Scaffolding BMAD framework from bmad-method@${version}\n`);
|
|
283
|
-
logger$
|
|
284
|
+
logger$18.info({
|
|
284
285
|
version,
|
|
285
286
|
dest: bmadDest
|
|
286
287
|
}, "Scaffolding BMAD framework");
|
|
@@ -290,7 +291,7 @@ async function scaffoldBmadFramework(projectRoot, force, outputFormat) {
|
|
|
290
291
|
const destDir = join(bmadDest, dir);
|
|
291
292
|
mkdirSync(destDir, { recursive: true });
|
|
292
293
|
cpSync(srcDir, destDir, { recursive: true });
|
|
293
|
-
logger$
|
|
294
|
+
logger$18.info({
|
|
294
295
|
dir,
|
|
295
296
|
dest: destDir
|
|
296
297
|
}, "Scaffolded BMAD framework directory");
|
|
@@ -309,7 +310,7 @@ async function scaffoldBmadFramework(projectRoot, force, outputFormat) {
|
|
|
309
310
|
"document_output_language: English"
|
|
310
311
|
].join("\n") + "\n";
|
|
311
312
|
await writeFile(configFile, configStub, "utf8");
|
|
312
|
-
logger$
|
|
313
|
+
logger$18.info({ configFile }, "Generated _bmad/_config/config.yaml stub");
|
|
313
314
|
}
|
|
314
315
|
}
|
|
315
316
|
const CLAUDE_MD_START_MARKER = "<!-- substrate:start -->";
|
|
@@ -324,7 +325,7 @@ async function scaffoldClaudeMd(projectRoot) {
|
|
|
324
325
|
try {
|
|
325
326
|
sectionContent = await readFile(templatePath, "utf8");
|
|
326
327
|
} catch {
|
|
327
|
-
logger$
|
|
328
|
+
logger$18.warn({ templatePath }, "CLAUDE.md substrate section template not found; skipping");
|
|
328
329
|
return;
|
|
329
330
|
}
|
|
330
331
|
if (!sectionContent.endsWith("\n")) sectionContent += "\n";
|
|
@@ -342,7 +343,7 @@ async function scaffoldClaudeMd(projectRoot) {
|
|
|
342
343
|
newContent = existingContent + separator + sectionContent;
|
|
343
344
|
}
|
|
344
345
|
await writeFile(claudeMdPath, newContent, "utf8");
|
|
345
|
-
logger$
|
|
346
|
+
logger$18.info({ claudeMdPath }, "Wrote substrate section to CLAUDE.md");
|
|
346
347
|
}
|
|
347
348
|
async function scaffoldStatuslineScript(projectRoot) {
|
|
348
349
|
const pkgRoot = findPackageRoot(__dirname);
|
|
@@ -353,7 +354,7 @@ async function scaffoldStatuslineScript(projectRoot) {
|
|
|
353
354
|
try {
|
|
354
355
|
content = await readFile(templatePath, "utf8");
|
|
355
356
|
} catch {
|
|
356
|
-
logger$
|
|
357
|
+
logger$18.warn({ templatePath }, "statusline.sh template not found; skipping");
|
|
357
358
|
return;
|
|
358
359
|
}
|
|
359
360
|
const claudeDir = join(projectRoot, ".claude");
|
|
@@ -361,7 +362,7 @@ async function scaffoldStatuslineScript(projectRoot) {
|
|
|
361
362
|
mkdirSync(claudeDir, { recursive: true });
|
|
362
363
|
await writeFile(statuslinePath, content, "utf8");
|
|
363
364
|
chmodSync(statuslinePath, 493);
|
|
364
|
-
logger$
|
|
365
|
+
logger$18.info({ statuslinePath }, "Wrote .claude/statusline.sh");
|
|
365
366
|
}
|
|
366
367
|
async function scaffoldClaudeSettings(projectRoot) {
|
|
367
368
|
const claudeDir = join(projectRoot, ".claude");
|
|
@@ -377,7 +378,7 @@ async function scaffoldClaudeSettings(projectRoot) {
|
|
|
377
378
|
if (!merged["$schema"]) merged["$schema"] = "https://json.schemastore.org/claude-code-settings.json";
|
|
378
379
|
mkdirSync(claudeDir, { recursive: true });
|
|
379
380
|
await writeFile(settingsPath, JSON.stringify(merged, null, 2) + "\n", "utf8");
|
|
380
|
-
logger$
|
|
381
|
+
logger$18.info({ settingsPath }, "Wrote substrate settings to .claude/settings.json");
|
|
381
382
|
}
|
|
382
383
|
function resolveBmadMethodInstallerLibPath(fromDir = __dirname) {
|
|
383
384
|
try {
|
|
@@ -447,7 +448,7 @@ async function compileBmadAgents(bmadDir) {
|
|
|
447
448
|
writeFileSync(mdPath, result.xml, "utf-8");
|
|
448
449
|
compiled++;
|
|
449
450
|
} catch (compileErr) {
|
|
450
|
-
logger$
|
|
451
|
+
logger$18.debug({
|
|
451
452
|
err: compileErr,
|
|
452
453
|
file
|
|
453
454
|
}, "Failed to compile agent YAML");
|
|
@@ -468,9 +469,9 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
|
|
|
468
469
|
const _require = createRequire(join(__dirname, "synthetic.js"));
|
|
469
470
|
try {
|
|
470
471
|
const compiledCount = await compileBmadAgents(bmadDir);
|
|
471
|
-
if (compiledCount > 0) logger$
|
|
472
|
+
if (compiledCount > 0) logger$18.info({ compiledCount }, "Compiled agent YAML files to MD");
|
|
472
473
|
} catch (compileErr) {
|
|
473
|
-
logger$
|
|
474
|
+
logger$18.warn({ err: compileErr }, "Agent compilation failed; agent commands may be incomplete");
|
|
474
475
|
}
|
|
475
476
|
const { AgentCommandGenerator } = _require(join(installerLibPath, "ide", "shared", "agent-command-generator.js"));
|
|
476
477
|
const { WorkflowCommandGenerator } = _require(join(installerLibPath, "ide", "shared", "workflow-command-generator.js"));
|
|
@@ -482,7 +483,7 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
|
|
|
482
483
|
const manifestGen = new ManifestGenerator();
|
|
483
484
|
await manifestGen.generateManifests(bmadDir, allModules, [], { ides: ["claude-code"] });
|
|
484
485
|
} catch (manifestErr) {
|
|
485
|
-
logger$
|
|
486
|
+
logger$18.warn({ err: manifestErr }, "ManifestGenerator failed; workflow/task commands may be incomplete");
|
|
486
487
|
}
|
|
487
488
|
const commandsDir = join(projectRoot, ".claude", "commands");
|
|
488
489
|
mkdirSync(commandsDir, { recursive: true });
|
|
@@ -498,7 +499,7 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
|
|
|
498
499
|
const taskToolCount = await taskToolGen.writeDashArtifacts(commandsDir, taskToolArtifacts);
|
|
499
500
|
const total = agentCount + workflowCount + taskToolCount;
|
|
500
501
|
if (outputFormat !== "json") process.stdout.write(`Generated ${String(total)} Claude Code commands (${String(agentCount)} agents, ${String(workflowCount)} workflows, ${String(taskToolCount)} tasks/tools)\n`);
|
|
501
|
-
logger$
|
|
502
|
+
logger$18.info({
|
|
502
503
|
agentCount,
|
|
503
504
|
workflowCount,
|
|
504
505
|
taskToolCount,
|
|
@@ -508,7 +509,7 @@ async function scaffoldClaudeCommands(projectRoot, outputFormat) {
|
|
|
508
509
|
} catch (err) {
|
|
509
510
|
const msg = err instanceof Error ? err.message : String(err);
|
|
510
511
|
if (outputFormat !== "json") process.stderr.write(`Warning: .claude/commands/ generation failed: ${msg}\n`);
|
|
511
|
-
logger$
|
|
512
|
+
logger$18.warn({ err }, "scaffoldClaudeCommands failed; init continues");
|
|
512
513
|
}
|
|
513
514
|
}
|
|
514
515
|
const PROVIDER_DEFAULTS = DEFAULT_CONFIG.providers;
|
|
@@ -583,7 +584,7 @@ async function runInitAction(options) {
|
|
|
583
584
|
discoveryReport = await registry.discoverAndRegister();
|
|
584
585
|
} catch (err) {
|
|
585
586
|
const message = err instanceof Error ? err.message : String(err);
|
|
586
|
-
logger$
|
|
587
|
+
logger$18.error({ err }, "Adapter discovery failed");
|
|
587
588
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, `Adapter discovery failed: ${message}`) + "\n");
|
|
588
589
|
else process.stderr.write(` Error: adapter discovery failed — ${message}\n`);
|
|
589
590
|
return INIT_EXIT_ERROR;
|
|
@@ -632,12 +633,12 @@ async function runInitAction(options) {
|
|
|
632
633
|
return INIT_EXIT_ERROR;
|
|
633
634
|
}
|
|
634
635
|
if (force && existsSync(localManifest)) {
|
|
635
|
-
logger$
|
|
636
|
+
logger$18.info({ pack: packName }, "Replacing existing pack with bundled version");
|
|
636
637
|
process.stderr.write(`Warning: Replacing existing pack '${packName}' with bundled version\n`);
|
|
637
638
|
}
|
|
638
639
|
mkdirSync(dirname(packPath), { recursive: true });
|
|
639
640
|
cpSync(bundledPackPath, packPath, { recursive: true });
|
|
640
|
-
logger$
|
|
641
|
+
logger$18.info({
|
|
641
642
|
pack: packName,
|
|
642
643
|
dest: packPath
|
|
643
644
|
}, "Scaffolded methodology pack");
|
|
@@ -674,17 +675,17 @@ async function runInitAction(options) {
|
|
|
674
675
|
process.stderr.write(`${err.message}\n`);
|
|
675
676
|
return INIT_EXIT_ERROR;
|
|
676
677
|
}
|
|
677
|
-
logger$
|
|
678
|
+
logger$18.debug("Dolt not installed, skipping auto-init");
|
|
678
679
|
} else {
|
|
679
680
|
const msg = err instanceof Error ? err.message : String(err);
|
|
680
681
|
if (doltMode === "force") {
|
|
681
682
|
process.stderr.write(`✗ Dolt initialization failed: ${msg}\n`);
|
|
682
683
|
return INIT_EXIT_ERROR;
|
|
683
684
|
}
|
|
684
|
-
logger$
|
|
685
|
+
logger$18.warn({ error: msg }, "Dolt auto-init failed (non-blocking)");
|
|
685
686
|
}
|
|
686
687
|
}
|
|
687
|
-
else logger$
|
|
688
|
+
else logger$18.debug("Dolt step was skipped (--no-dolt)");
|
|
688
689
|
const successMsg = `Pack '${packName}' and database initialized successfully at ${dbPath}`;
|
|
689
690
|
if (outputFormat === "json") process.stdout.write(formatOutput({
|
|
690
691
|
pack: packName,
|
|
@@ -708,6 +709,7 @@ async function runInitAction(options) {
|
|
|
708
709
|
process.stdout.write(` .claude/commands/ /substrate-run, /substrate-supervisor, /substrate-metrics\n`);
|
|
709
710
|
process.stdout.write(` .substrate/ config, database, routing policy\n`);
|
|
710
711
|
if (doltInitialized) process.stdout.write(`✓ Dolt state store initialized at .substrate/state/\n`);
|
|
712
|
+
else if (doltMode !== "skip") process.stdout.write(`ℹ Dolt not detected — install Dolt for versioned state, \`substrate diff\`, and observability persistence. See: https://docs.dolthub.com/introduction/installation\n`);
|
|
711
713
|
process.stdout.write("\n Next steps:\n 1. Start a Claude Code session in this project\n 2. Tell Claude: \"Run the substrate pipeline\"\n 3. Or use the /substrate-run slash command for a guided run\n");
|
|
712
714
|
}
|
|
713
715
|
return INIT_EXIT_SUCCESS;
|
|
@@ -715,7 +717,7 @@ async function runInitAction(options) {
|
|
|
715
717
|
const msg = err instanceof Error ? err.message : String(err);
|
|
716
718
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
717
719
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
718
|
-
logger$
|
|
720
|
+
logger$18.error({ err }, "init failed");
|
|
719
721
|
return INIT_EXIT_ERROR;
|
|
720
722
|
}
|
|
721
723
|
}
|
|
@@ -738,7 +740,7 @@ function registerInitCommand(program, _version, registry) {
|
|
|
738
740
|
|
|
739
741
|
//#endregion
|
|
740
742
|
//#region src/cli/commands/config.ts
|
|
741
|
-
const logger$
|
|
743
|
+
const logger$17 = createLogger("config-cmd");
|
|
742
744
|
const CONFIG_EXIT_SUCCESS = 0;
|
|
743
745
|
const CONFIG_EXIT_ERROR = 1;
|
|
744
746
|
const CONFIG_EXIT_INVALID = 2;
|
|
@@ -764,7 +766,7 @@ async function runConfigShow(opts = {}) {
|
|
|
764
766
|
return CONFIG_EXIT_INVALID;
|
|
765
767
|
}
|
|
766
768
|
const message = err instanceof Error ? err.message : String(err);
|
|
767
|
-
logger$
|
|
769
|
+
logger$17.error({ err }, "Failed to load configuration");
|
|
768
770
|
process.stderr.write(` Error loading configuration: ${message}\n`);
|
|
769
771
|
return CONFIG_EXIT_ERROR;
|
|
770
772
|
}
|
|
@@ -838,7 +840,7 @@ async function runConfigExport(opts = {}) {
|
|
|
838
840
|
return CONFIG_EXIT_INVALID;
|
|
839
841
|
}
|
|
840
842
|
const message = err instanceof Error ? err.message : String(err);
|
|
841
|
-
logger$
|
|
843
|
+
logger$17.error({ err }, "Failed to load configuration");
|
|
842
844
|
process.stderr.write(`Error loading configuration: ${message}\n`);
|
|
843
845
|
return CONFIG_EXIT_ERROR;
|
|
844
846
|
}
|
|
@@ -992,7 +994,7 @@ function registerConfigCommand(program, _version) {
|
|
|
992
994
|
|
|
993
995
|
//#endregion
|
|
994
996
|
//#region src/cli/commands/resume.ts
|
|
995
|
-
const logger$
|
|
997
|
+
const logger$16 = createLogger("resume-cmd");
|
|
996
998
|
async function runResumeAction(options) {
|
|
997
999
|
const { runId: specifiedRunId, stopAfter, outputFormat, projectRoot, concurrency, pack: packName, registry } = options;
|
|
998
1000
|
if (stopAfter !== void 0 && !VALID_PHASES.includes(stopAfter)) {
|
|
@@ -1075,7 +1077,7 @@ async function runResumeAction(options) {
|
|
|
1075
1077
|
const msg = err instanceof Error ? err.message : String(err);
|
|
1076
1078
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
1077
1079
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
1078
|
-
logger$
|
|
1080
|
+
logger$16.error({ err }, "auto resume failed");
|
|
1079
1081
|
return 1;
|
|
1080
1082
|
} finally {
|
|
1081
1083
|
try {
|
|
@@ -1197,6 +1199,19 @@ async function runFullPipelineFromPhase(options) {
|
|
|
1197
1199
|
}
|
|
1198
1200
|
if (outputFormat === "human") process.stdout.write(`[SOLUTIONING] Complete\n`);
|
|
1199
1201
|
} else if (currentPhase === "implementation") {
|
|
1202
|
+
let telemetryEnabled = false;
|
|
1203
|
+
let telemetryPort = 4318;
|
|
1204
|
+
try {
|
|
1205
|
+
const configSystem = createConfigSystem({ projectConfigDir: dbDir });
|
|
1206
|
+
await configSystem.load();
|
|
1207
|
+
const cfg = configSystem.getConfig();
|
|
1208
|
+
if (cfg.telemetry?.enabled === true) {
|
|
1209
|
+
telemetryEnabled = true;
|
|
1210
|
+
telemetryPort = cfg.telemetry.port ?? 4318;
|
|
1211
|
+
}
|
|
1212
|
+
} catch {}
|
|
1213
|
+
const ingestionServer = telemetryEnabled ? new IngestionServer({ port: telemetryPort }) : void 0;
|
|
1214
|
+
const telemetryPersistence = telemetryEnabled ? new TelemetryPersistence(db) : void 0;
|
|
1200
1215
|
const orchestrator = createImplementationOrchestrator({
|
|
1201
1216
|
db,
|
|
1202
1217
|
pack,
|
|
@@ -1208,7 +1223,9 @@ async function runFullPipelineFromPhase(options) {
|
|
|
1208
1223
|
maxReviewCycles: 2,
|
|
1209
1224
|
pipelineRunId: runId
|
|
1210
1225
|
},
|
|
1211
|
-
projectRoot
|
|
1226
|
+
projectRoot,
|
|
1227
|
+
...ingestionServer !== void 0 ? { ingestionServer } : {},
|
|
1228
|
+
...telemetryPersistence !== void 0 ? { telemetryPersistence } : {}
|
|
1212
1229
|
});
|
|
1213
1230
|
eventBus.on("orchestrator:story-phase-complete", (payload) => {
|
|
1214
1231
|
try {
|
|
@@ -1225,7 +1242,7 @@ async function runFullPipelineFromPhase(options) {
|
|
|
1225
1242
|
});
|
|
1226
1243
|
}
|
|
1227
1244
|
} catch (err) {
|
|
1228
|
-
logger$
|
|
1245
|
+
logger$16.warn({ err }, "Failed to record token usage");
|
|
1229
1246
|
}
|
|
1230
1247
|
});
|
|
1231
1248
|
const storyKeys = resolveStoryKeys(db, projectRoot, { pipelineRunId: runId });
|
|
@@ -1280,7 +1297,7 @@ async function runFullPipelineFromPhase(options) {
|
|
|
1280
1297
|
const msg = err instanceof Error ? err.message : String(err);
|
|
1281
1298
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
1282
1299
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
1283
|
-
logger$
|
|
1300
|
+
logger$16.error({ err }, "pipeline from phase failed");
|
|
1284
1301
|
return 1;
|
|
1285
1302
|
} finally {
|
|
1286
1303
|
try {
|
|
@@ -1306,7 +1323,7 @@ function registerResumeCommand(program, _version = "0.0.0", projectRoot = proces
|
|
|
1306
1323
|
|
|
1307
1324
|
//#endregion
|
|
1308
1325
|
//#region src/cli/commands/status.ts
|
|
1309
|
-
const logger$
|
|
1326
|
+
const logger$15 = createLogger("status-cmd");
|
|
1310
1327
|
async function runStatusAction(options) {
|
|
1311
1328
|
const { outputFormat, runId, projectRoot, stateStore, history } = options;
|
|
1312
1329
|
if (history === true) {
|
|
@@ -1362,7 +1379,7 @@ async function runStatusAction(options) {
|
|
|
1362
1379
|
if (stateStore) try {
|
|
1363
1380
|
storeStories = await stateStore.queryStories({});
|
|
1364
1381
|
} catch (err) {
|
|
1365
|
-
logger$
|
|
1382
|
+
logger$15.debug({ err }, "StateStore query failed, continuing without store data");
|
|
1366
1383
|
}
|
|
1367
1384
|
if (outputFormat === "json") {
|
|
1368
1385
|
const statusOutput = buildPipelineStatusOutput(run, tokenSummary, decisionsCount, storiesCount);
|
|
@@ -1465,7 +1482,7 @@ async function runStatusAction(options) {
|
|
|
1465
1482
|
const msg = err instanceof Error ? err.message : String(err);
|
|
1466
1483
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
1467
1484
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
1468
|
-
logger$
|
|
1485
|
+
logger$15.error({ err }, "status action failed");
|
|
1469
1486
|
return 1;
|
|
1470
1487
|
} finally {
|
|
1471
1488
|
try {
|
|
@@ -1909,7 +1926,7 @@ Analyze thoroughly and return ONLY the JSON array with no additional text.`;
|
|
|
1909
1926
|
|
|
1910
1927
|
//#endregion
|
|
1911
1928
|
//#region src/cli/commands/amend.ts
|
|
1912
|
-
const logger$
|
|
1929
|
+
const logger$14 = createLogger("amend-cmd");
|
|
1913
1930
|
/**
|
|
1914
1931
|
* Detect and apply supersessions after a phase completes in an amendment run.
|
|
1915
1932
|
*
|
|
@@ -1940,7 +1957,7 @@ function runPostPhaseSupersessionDetection(db, amendmentRunId, currentPhase, han
|
|
|
1940
1957
|
});
|
|
1941
1958
|
} catch (err) {
|
|
1942
1959
|
const msg = err instanceof Error ? err.message : String(err);
|
|
1943
|
-
logger$
|
|
1960
|
+
logger$14.warn({
|
|
1944
1961
|
err,
|
|
1945
1962
|
originalId: parentMatch.id,
|
|
1946
1963
|
supersedingId: newDec.id
|
|
@@ -2074,7 +2091,7 @@ async function runAmendAction(options) {
|
|
|
2074
2091
|
for (let i = startIdx; i < phaseOrder.length; i++) {
|
|
2075
2092
|
const currentPhase = phaseOrder[i];
|
|
2076
2093
|
const amendmentContext = handler.loadContextForPhase(currentPhase);
|
|
2077
|
-
logger$
|
|
2094
|
+
logger$14.info({
|
|
2078
2095
|
phase: currentPhase,
|
|
2079
2096
|
amendmentContextLen: amendmentContext.length
|
|
2080
2097
|
}, "Amendment context loaded for phase");
|
|
@@ -2194,7 +2211,7 @@ async function runAmendAction(options) {
|
|
|
2194
2211
|
} catch (err) {
|
|
2195
2212
|
const msg = err instanceof Error ? err.message : String(err);
|
|
2196
2213
|
process.stderr.write(`Error: ${msg}\n`);
|
|
2197
|
-
logger$
|
|
2214
|
+
logger$14.error({ err }, "amend failed");
|
|
2198
2215
|
return 1;
|
|
2199
2216
|
} finally {
|
|
2200
2217
|
try {
|
|
@@ -2284,7 +2301,7 @@ function defaultSupervisorDeps() {
|
|
|
2284
2301
|
if (cached === null) {
|
|
2285
2302
|
const { AdapterRegistry: AR } = await import(
|
|
2286
2303
|
/* @vite-ignore */
|
|
2287
|
-
"../adapter-registry-
|
|
2304
|
+
"../adapter-registry-CctHT8ZH.js"
|
|
2288
2305
|
);
|
|
2289
2306
|
cached = new AR();
|
|
2290
2307
|
await cached.discoverAndRegister();
|
|
@@ -2667,11 +2684,11 @@ async function runSupervisorAction(options, deps = {}) {
|
|
|
2667
2684
|
try {
|
|
2668
2685
|
const { createExperimenter } = await import(
|
|
2669
2686
|
/* @vite-ignore */
|
|
2670
|
-
"../experimenter-
|
|
2687
|
+
"../experimenter-CvxtqzXz.js"
|
|
2671
2688
|
);
|
|
2672
2689
|
const { getLatestRun: getLatest } = await import(
|
|
2673
2690
|
/* @vite-ignore */
|
|
2674
|
-
"../decisions-
|
|
2691
|
+
"../decisions-D7Ao_KcL.js"
|
|
2675
2692
|
);
|
|
2676
2693
|
const dbPath = join(projectRoot, ".substrate", "substrate.db");
|
|
2677
2694
|
const expDbWrapper = new DatabaseWrapper(dbPath);
|
|
@@ -2681,7 +2698,7 @@ async function runSupervisorAction(options, deps = {}) {
|
|
|
2681
2698
|
const expDb = expDbWrapper.db;
|
|
2682
2699
|
const { runRunAction: runPipeline } = await import(
|
|
2683
2700
|
/* @vite-ignore */
|
|
2684
|
-
"../run-
|
|
2701
|
+
"../run-CQJP37ZC.js"
|
|
2685
2702
|
);
|
|
2686
2703
|
const runStoryFn = async (opts) => {
|
|
2687
2704
|
const exitCode = await runPipeline({
|
|
@@ -2928,7 +2945,7 @@ function registerSupervisorCommand(program, _version = "0.0.0", projectRoot = pr
|
|
|
2928
2945
|
|
|
2929
2946
|
//#endregion
|
|
2930
2947
|
//#region src/cli/commands/metrics.ts
|
|
2931
|
-
const logger$
|
|
2948
|
+
const logger$13 = createLogger("metrics-cmd");
|
|
2932
2949
|
async function openTelemetryDb(dbPath) {
|
|
2933
2950
|
if (!existsSync(dbPath)) return null;
|
|
2934
2951
|
try {
|
|
@@ -3000,7 +3017,7 @@ function printCategoryTable(stats, label) {
|
|
|
3000
3017
|
}
|
|
3001
3018
|
}
|
|
3002
3019
|
async function runMetricsAction(options) {
|
|
3003
|
-
const { outputFormat, projectRoot, limit = 10, compare, tagBaseline, analysis, sprint, story, taskType, since, aggregate, efficiency, recommendations, turns, consumers, categories, compareStories } = options;
|
|
3020
|
+
const { outputFormat, projectRoot, limit = 10, compare, tagBaseline, analysis, sprint, story, taskType, since, aggregate, efficiency, recommendations, turns, consumers, categories, compareStories, routingRecommendations } = options;
|
|
3004
3021
|
const telemetryModes = [
|
|
3005
3022
|
efficiency,
|
|
3006
3023
|
recommendations,
|
|
@@ -3139,6 +3156,59 @@ async function runMetricsAction(options) {
|
|
|
3139
3156
|
} catch {}
|
|
3140
3157
|
}
|
|
3141
3158
|
}
|
|
3159
|
+
if (routingRecommendations === true) {
|
|
3160
|
+
const dbRoot$1 = await resolveMainRepoRoot(projectRoot);
|
|
3161
|
+
const dbDir = join(dbRoot$1, ".substrate");
|
|
3162
|
+
const doltStatePath = join(dbDir, "state", ".dolt");
|
|
3163
|
+
const doltExists = existsSync(doltStatePath);
|
|
3164
|
+
const stateBackend = doltExists ? "dolt" : "file";
|
|
3165
|
+
const stateBasePath = join(dbDir, "state");
|
|
3166
|
+
const stateStore = createStateStore({
|
|
3167
|
+
backend: stateBackend,
|
|
3168
|
+
basePath: stateBasePath
|
|
3169
|
+
});
|
|
3170
|
+
await stateStore.initialize();
|
|
3171
|
+
try {
|
|
3172
|
+
const runIndexRaw = await stateStore.getMetric("__global__", "phase_token_breakdown_runs");
|
|
3173
|
+
const runIds = Array.isArray(runIndexRaw) ? runIndexRaw : [];
|
|
3174
|
+
const recentRunIds = runIds.slice(-20);
|
|
3175
|
+
const breakdowns = [];
|
|
3176
|
+
for (const runId of recentRunIds) try {
|
|
3177
|
+
const raw = await stateStore.getMetric(runId, "phase_token_breakdown");
|
|
3178
|
+
if (raw !== void 0 && raw !== null) {
|
|
3179
|
+
const parsed = typeof raw === "string" ? JSON.parse(raw) : raw;
|
|
3180
|
+
breakdowns.push(parsed);
|
|
3181
|
+
}
|
|
3182
|
+
} catch {}
|
|
3183
|
+
const routingConfigPath = join(dbDir, "routing.yml");
|
|
3184
|
+
let routingConfig = null;
|
|
3185
|
+
if (existsSync(routingConfigPath)) try {
|
|
3186
|
+
const { loadModelRoutingConfig } = await import("../routing-DWCBjrt7.js");
|
|
3187
|
+
routingConfig = loadModelRoutingConfig(routingConfigPath);
|
|
3188
|
+
} catch {}
|
|
3189
|
+
if (routingConfig === null) routingConfig = {
|
|
3190
|
+
version: 1,
|
|
3191
|
+
phases: {},
|
|
3192
|
+
baseline_model: "claude-sonnet"
|
|
3193
|
+
};
|
|
3194
|
+
const recommender = new RoutingRecommender(createLogger("routing:recommender"));
|
|
3195
|
+
const analysis$1 = recommender.analyze(breakdowns, routingConfig);
|
|
3196
|
+
if (outputFormat === "json") process.stdout.write(formatOutput({
|
|
3197
|
+
recommendations: analysis$1.recommendations,
|
|
3198
|
+
analysisRuns: analysis$1.analysisRuns,
|
|
3199
|
+
insufficientData: analysis$1.insufficientData
|
|
3200
|
+
}, "json", true) + "\n");
|
|
3201
|
+
else {
|
|
3202
|
+
process.stdout.write(`Routing Recommendations:\n`);
|
|
3203
|
+
if (analysis$1.insufficientData) process.stdout.write(`No recommendations yet — need at least 3 pipeline runs\n`);
|
|
3204
|
+
else if (analysis$1.recommendations.length === 0) process.stdout.write(` No recommendations — all phases are in the neutral zone\n`);
|
|
3205
|
+
else for (const rec of analysis$1.recommendations) process.stdout.write(` ${rec.phase} | ${rec.currentModel} → ${rec.suggestedModel} | est. savings: ${Math.round(rec.estimatedSavingsPct)}%\n`);
|
|
3206
|
+
}
|
|
3207
|
+
} finally {
|
|
3208
|
+
await stateStore.close().catch(() => {});
|
|
3209
|
+
}
|
|
3210
|
+
return 0;
|
|
3211
|
+
}
|
|
3142
3212
|
if (analysis !== void 0) {
|
|
3143
3213
|
const dbRoot$1 = await resolveMainRepoRoot(projectRoot);
|
|
3144
3214
|
const reportBase = join(dbRoot$1, "_bmad-output", "supervisor-reports", `${analysis}-analysis`);
|
|
@@ -3236,7 +3306,7 @@ async function runMetricsAction(options) {
|
|
|
3236
3306
|
doltMetrics = await stateStore.queryMetrics(doltFilter);
|
|
3237
3307
|
await stateStore.close();
|
|
3238
3308
|
} catch (doltErr) {
|
|
3239
|
-
logger$
|
|
3309
|
+
logger$13.warn({ err: doltErr }, "StateStore query failed — falling back to SQLite metrics only");
|
|
3240
3310
|
}
|
|
3241
3311
|
const storyMetricDecisions = getDecisionsByCategory(db, STORY_METRICS);
|
|
3242
3312
|
const storyMetrics = storyMetricDecisions.map((d) => {
|
|
@@ -3267,9 +3337,21 @@ async function runMetricsAction(options) {
|
|
|
3267
3337
|
};
|
|
3268
3338
|
}
|
|
3269
3339
|
});
|
|
3340
|
+
const phaseBreakdownMap = {};
|
|
3341
|
+
try {
|
|
3342
|
+
const kvStore = new FileStateStore({ basePath: join(dbRoot, ".substrate") });
|
|
3343
|
+
for (const run of runs) {
|
|
3344
|
+
const raw = await kvStore.getMetric(run.run_id, "phase_token_breakdown");
|
|
3345
|
+
phaseBreakdownMap[run.run_id] = raw !== void 0 ? raw : null;
|
|
3346
|
+
}
|
|
3347
|
+
} catch {}
|
|
3270
3348
|
if (outputFormat === "json") {
|
|
3349
|
+
const runsWithBreakdown = runs.map((run) => ({
|
|
3350
|
+
...run,
|
|
3351
|
+
phase_token_breakdown: phaseBreakdownMap[run.run_id] ?? null
|
|
3352
|
+
}));
|
|
3271
3353
|
const jsonPayload = {
|
|
3272
|
-
runs,
|
|
3354
|
+
runs: runsWithBreakdown,
|
|
3273
3355
|
story_metrics: storyMetrics
|
|
3274
3356
|
};
|
|
3275
3357
|
if (doltMetrics !== void 0) if (aggregate) {
|
|
@@ -3307,6 +3389,11 @@ async function runMetricsAction(options) {
|
|
|
3307
3389
|
process.stdout.write(` Stories: attempted=${run.stories_attempted} succeeded=${run.stories_succeeded} failed=${run.stories_failed} escalated=${run.stories_escalated}\n`);
|
|
3308
3390
|
process.stdout.write(` Tokens: ${(run.total_input_tokens ?? 0).toLocaleString()} in / ${(run.total_output_tokens ?? 0).toLocaleString()} out $${(run.total_cost_usd ?? 0).toFixed(4)}\n`);
|
|
3309
3391
|
process.stdout.write(` Cycles: ${run.total_review_cycles} | Dispatches: ${run.total_dispatches} | Concurrency: ${run.concurrency_setting}\n`);
|
|
3392
|
+
const breakdown = phaseBreakdownMap[run.run_id];
|
|
3393
|
+
if (breakdown !== null && breakdown !== void 0 && breakdown.entries.length > 0) {
|
|
3394
|
+
process.stdout.write(" Phase Token Breakdown:\n");
|
|
3395
|
+
for (const entry of breakdown.entries) process.stdout.write(` ${entry.phase.padEnd(10)} | ${entry.model.padEnd(30)} | in: ${entry.inputTokens} | out: ${entry.outputTokens} | dispatches: ${entry.dispatchCount}\n`);
|
|
3396
|
+
}
|
|
3310
3397
|
}
|
|
3311
3398
|
}
|
|
3312
3399
|
if (storyMetrics.length > 0) {
|
|
@@ -3362,7 +3449,7 @@ async function runMetricsAction(options) {
|
|
|
3362
3449
|
const msg = err instanceof Error ? err.message : String(err);
|
|
3363
3450
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
3364
3451
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
3365
|
-
logger$
|
|
3452
|
+
logger$13.error({ err }, "metrics action failed");
|
|
3366
3453
|
return 1;
|
|
3367
3454
|
} finally {
|
|
3368
3455
|
try {
|
|
@@ -3371,7 +3458,7 @@ async function runMetricsAction(options) {
|
|
|
3371
3458
|
}
|
|
3372
3459
|
}
|
|
3373
3460
|
function registerMetricsCommand(program, _version = "0.0.0", projectRoot = process.cwd()) {
|
|
3374
|
-
program.command("metrics").description("Show historical pipeline run metrics and cross-run comparison").option("--project-root <path>", "Project root directory", projectRoot).option("--output-format <format>", "Output format: human (default) or json", "human").option("--limit <n>", "Number of runs to show (default: 10)", (v) => parseInt(v, 10), 10).option("--compare <run-id-a,run-id-b>", "Compare two runs side-by-side (comma-separated IDs, e.g. abc123,def456)").option("--tag-baseline <run-id>", "Mark a run as the performance baseline").option("--analysis <run-id>", "Read and output the analysis report for the specified run (AC5 of Story 17-3)").option("--sprint <sprint>", "Filter StateStore metrics by sprint (e.g. sprint-1)").option("--story <story-key>", "Filter StateStore metrics by story key (e.g. 26-1)").option("--task-type <type>", "Filter StateStore metrics by task type (e.g. dev-story)").option("--since <iso-date>", "Filter StateStore metrics at or after this ISO timestamp").option("--aggregate", "Aggregate StateStore metrics grouped by task_type").option("--efficiency", "Show telemetry efficiency scores for recent stories").option("--recommendations", "Show all telemetry recommendations across stories").option("--turns <storyKey>", "Show per-turn analysis for a specific story").option("--consumers <storyKey>", "Show consumer stats for a specific story").option("--categories", "Show category stats (optionally scoped by --story <storyKey>)").option("--compare-stories <storyA,storyB>", "Compare efficiency scores of two stories side-by-side (comma-separated keys)").action(async (opts) => {
|
|
3461
|
+
program.command("metrics").description("Show historical pipeline run metrics and cross-run comparison").option("--project-root <path>", "Project root directory", projectRoot).option("--output-format <format>", "Output format: human (default) or json", "human").option("--limit <n>", "Number of runs to show (default: 10)", (v) => parseInt(v, 10), 10).option("--compare <run-id-a,run-id-b>", "Compare two runs side-by-side (comma-separated IDs, e.g. abc123,def456)").option("--tag-baseline <run-id>", "Mark a run as the performance baseline").option("--analysis <run-id>", "Read and output the analysis report for the specified run (AC5 of Story 17-3)").option("--sprint <sprint>", "Filter StateStore metrics by sprint (e.g. sprint-1)").option("--story <story-key>", "Filter StateStore metrics by story key (e.g. 26-1)").option("--task-type <type>", "Filter StateStore metrics by task type (e.g. dev-story)").option("--since <iso-date>", "Filter StateStore metrics at or after this ISO timestamp").option("--aggregate", "Aggregate StateStore metrics grouped by task_type").option("--efficiency", "Show telemetry efficiency scores for recent stories").option("--recommendations", "Show all telemetry recommendations across stories").option("--turns <storyKey>", "Show per-turn analysis for a specific story").option("--consumers <storyKey>", "Show consumer stats for a specific story").option("--categories", "Show category stats (optionally scoped by --story <storyKey>)").option("--compare-stories <storyA,storyB>", "Compare efficiency scores of two stories side-by-side (comma-separated keys)").option("--routing-recommendations", "Show routing recommendations derived from phase token breakdown history").action(async (opts) => {
|
|
3375
3462
|
const outputFormat = opts.outputFormat === "json" ? "json" : "human";
|
|
3376
3463
|
let compareIds;
|
|
3377
3464
|
if (opts.compare !== void 0) {
|
|
@@ -3405,7 +3492,8 @@ function registerMetricsCommand(program, _version = "0.0.0", projectRoot = proce
|
|
|
3405
3492
|
...opts.turns !== void 0 && { turns: opts.turns },
|
|
3406
3493
|
...opts.consumers !== void 0 && { consumers: opts.consumers },
|
|
3407
3494
|
...opts.categories !== void 0 && { categories: opts.categories },
|
|
3408
|
-
...compareStoriesIds !== void 0 && { compareStories: compareStoriesIds }
|
|
3495
|
+
...compareStoriesIds !== void 0 && { compareStories: compareStoriesIds },
|
|
3496
|
+
...opts.routingRecommendations !== void 0 && { routingRecommendations: opts.routingRecommendations }
|
|
3409
3497
|
};
|
|
3410
3498
|
const exitCode = await runMetricsAction(metricsOpts);
|
|
3411
3499
|
process.exitCode = exitCode;
|
|
@@ -3790,7 +3878,7 @@ function getPlanningCostTotal(db, sessionId) {
|
|
|
3790
3878
|
function getLatestSessionId(_db) {
|
|
3791
3879
|
return null;
|
|
3792
3880
|
}
|
|
3793
|
-
const logger$
|
|
3881
|
+
const logger$12 = createLogger("cost-cmd");
|
|
3794
3882
|
const COST_EXIT_SUCCESS = 0;
|
|
3795
3883
|
const COST_EXIT_ERROR = 1;
|
|
3796
3884
|
/**
|
|
@@ -4036,7 +4124,7 @@ async function runCostAction(options) {
|
|
|
4036
4124
|
} catch (err) {
|
|
4037
4125
|
const message = err instanceof Error ? err.message : String(err);
|
|
4038
4126
|
process.stderr.write(`Error: ${message}\n`);
|
|
4039
|
-
logger$
|
|
4127
|
+
logger$12.error({ err }, "runCostAction failed");
|
|
4040
4128
|
return COST_EXIT_ERROR;
|
|
4041
4129
|
} finally {
|
|
4042
4130
|
if (wrapper !== null) try {
|
|
@@ -4138,7 +4226,7 @@ function applyMonitorSchema(db) {
|
|
|
4138
4226
|
|
|
4139
4227
|
//#endregion
|
|
4140
4228
|
//#region src/persistence/monitor-database.ts
|
|
4141
|
-
const logger$
|
|
4229
|
+
const logger$11 = createLogger("persistence:monitor-db");
|
|
4142
4230
|
var MonitorDatabaseImpl = class {
|
|
4143
4231
|
_db = null;
|
|
4144
4232
|
_path;
|
|
@@ -4149,10 +4237,10 @@ var MonitorDatabaseImpl = class {
|
|
|
4149
4237
|
this._open();
|
|
4150
4238
|
}
|
|
4151
4239
|
_open() {
|
|
4152
|
-
logger$
|
|
4240
|
+
logger$11.info({ path: this._path }, "Opening monitor database");
|
|
4153
4241
|
this._db = new Database(this._path);
|
|
4154
4242
|
const walResult = this._db.pragma("journal_mode = WAL");
|
|
4155
|
-
if (walResult?.[0]?.journal_mode !== "wal") logger$
|
|
4243
|
+
if (walResult?.[0]?.journal_mode !== "wal") logger$11.warn({ result: walResult?.[0]?.journal_mode }, "Monitor DB: WAL pragma did not confirm wal mode");
|
|
4156
4244
|
this._db.pragma("synchronous = NORMAL");
|
|
4157
4245
|
this._db.pragma("busy_timeout = 5000");
|
|
4158
4246
|
this._db.pragma("foreign_keys = ON");
|
|
@@ -4187,7 +4275,7 @@ var MonitorDatabaseImpl = class {
|
|
|
4187
4275
|
total_retries = total_retries + @retries,
|
|
4188
4276
|
last_updated = @lastUpdated
|
|
4189
4277
|
`);
|
|
4190
|
-
logger$
|
|
4278
|
+
logger$11.info({ path: this._path }, "Monitor database ready");
|
|
4191
4279
|
}
|
|
4192
4280
|
_assertOpen() {
|
|
4193
4281
|
if (this._db === null) throw new Error("MonitorDatabase: connection is closed");
|
|
@@ -4336,7 +4424,7 @@ var MonitorDatabaseImpl = class {
|
|
|
4336
4424
|
const db = this._assertOpen();
|
|
4337
4425
|
const cutoff = new Date(Date.now() - retentionDays * 24 * 60 * 60 * 1e3).toISOString();
|
|
4338
4426
|
const result = db.prepare("DELETE FROM task_metrics WHERE recorded_at < @cutoff").run({ cutoff });
|
|
4339
|
-
logger$
|
|
4427
|
+
logger$11.info({
|
|
4340
4428
|
cutoff,
|
|
4341
4429
|
deleted: result.changes
|
|
4342
4430
|
}, "Pruned old task_metrics rows");
|
|
@@ -4375,13 +4463,13 @@ var MonitorDatabaseImpl = class {
|
|
|
4375
4463
|
db.exec("ROLLBACK");
|
|
4376
4464
|
throw err;
|
|
4377
4465
|
}
|
|
4378
|
-
logger$
|
|
4466
|
+
logger$11.info("Rebuilt performance_aggregates from task_metrics");
|
|
4379
4467
|
}
|
|
4380
4468
|
resetAllData() {
|
|
4381
4469
|
const db = this._assertOpen();
|
|
4382
4470
|
db.exec("DELETE FROM task_metrics");
|
|
4383
4471
|
db.exec("DELETE FROM performance_aggregates");
|
|
4384
|
-
logger$
|
|
4472
|
+
logger$11.info({ path: this._path }, "Monitor data reset — all rows deleted");
|
|
4385
4473
|
}
|
|
4386
4474
|
getTaskMetricsDateRange() {
|
|
4387
4475
|
const db = this._assertOpen();
|
|
@@ -4398,7 +4486,7 @@ var MonitorDatabaseImpl = class {
|
|
|
4398
4486
|
if (this._db === null) return;
|
|
4399
4487
|
this._db.close();
|
|
4400
4488
|
this._db = null;
|
|
4401
|
-
logger$
|
|
4489
|
+
logger$11.info({ path: this._path }, "Monitor database closed");
|
|
4402
4490
|
}
|
|
4403
4491
|
/**
|
|
4404
4492
|
* Access the raw underlying database for testing purposes only.
|
|
@@ -4411,7 +4499,7 @@ var MonitorDatabaseImpl = class {
|
|
|
4411
4499
|
|
|
4412
4500
|
//#endregion
|
|
4413
4501
|
//#region src/modules/monitor/recommendation-engine.ts
|
|
4414
|
-
const logger$
|
|
4502
|
+
const logger$10 = createLogger("monitor:recommendations");
|
|
4415
4503
|
var RecommendationEngine = class {
|
|
4416
4504
|
_monitorDb;
|
|
4417
4505
|
_filters;
|
|
@@ -4444,7 +4532,7 @@ var RecommendationEngine = class {
|
|
|
4444
4532
|
const sinceDate = new Date(Date.now() - this._historyDays * 24 * 60 * 60 * 1e3).toISOString();
|
|
4445
4533
|
const aggregates = this._monitorDb.getAggregates({ sinceDate });
|
|
4446
4534
|
if (aggregates.length === 0) {
|
|
4447
|
-
logger$
|
|
4535
|
+
logger$10.debug("No performance aggregates found — no recommendations to generate");
|
|
4448
4536
|
return [];
|
|
4449
4537
|
}
|
|
4450
4538
|
const byTaskType = new Map();
|
|
@@ -4509,7 +4597,7 @@ var RecommendationEngine = class {
|
|
|
4509
4597
|
if (confDiff !== 0) return confDiff;
|
|
4510
4598
|
return b.improvement_percentage - a.improvement_percentage;
|
|
4511
4599
|
});
|
|
4512
|
-
logger$
|
|
4600
|
+
logger$10.debug({ count: recommendations.length }, "Generated routing recommendations");
|
|
4513
4601
|
return recommendations;
|
|
4514
4602
|
}
|
|
4515
4603
|
/**
|
|
@@ -4675,7 +4763,7 @@ function generateMonitorReport(monitorDb, options = {}) {
|
|
|
4675
4763
|
|
|
4676
4764
|
//#endregion
|
|
4677
4765
|
//#region src/cli/commands/monitor.ts
|
|
4678
|
-
const logger$
|
|
4766
|
+
const logger$9 = createLogger("monitor-cmd");
|
|
4679
4767
|
const MONITOR_EXIT_SUCCESS = 0;
|
|
4680
4768
|
const MONITOR_EXIT_ERROR = 1;
|
|
4681
4769
|
/**
|
|
@@ -4878,7 +4966,7 @@ async function runMonitorReportAction(options) {
|
|
|
4878
4966
|
} catch (err) {
|
|
4879
4967
|
const message = err instanceof Error ? err.message : String(err);
|
|
4880
4968
|
process.stderr.write(`Error: ${message}\n`);
|
|
4881
|
-
logger$
|
|
4969
|
+
logger$9.error({ err }, "runMonitorReportAction failed");
|
|
4882
4970
|
return MONITOR_EXIT_ERROR;
|
|
4883
4971
|
} finally {
|
|
4884
4972
|
if (monitorDb !== null) try {
|
|
@@ -4940,7 +5028,7 @@ async function runMonitorStatusAction(options) {
|
|
|
4940
5028
|
} catch (err) {
|
|
4941
5029
|
const message = err instanceof Error ? err.message : String(err);
|
|
4942
5030
|
process.stderr.write(`Error: ${message}\n`);
|
|
4943
|
-
logger$
|
|
5031
|
+
logger$9.error({ err }, "runMonitorStatusAction failed");
|
|
4944
5032
|
return MONITOR_EXIT_ERROR;
|
|
4945
5033
|
} finally {
|
|
4946
5034
|
if (monitorDb !== null) try {
|
|
@@ -4975,7 +5063,7 @@ async function runMonitorResetAction(options) {
|
|
|
4975
5063
|
} catch (err) {
|
|
4976
5064
|
const message = err instanceof Error ? err.message : String(err);
|
|
4977
5065
|
process.stderr.write(`Error: ${message}\n`);
|
|
4978
|
-
logger$
|
|
5066
|
+
logger$9.error({ err }, "runMonitorResetAction failed");
|
|
4979
5067
|
return MONITOR_EXIT_ERROR;
|
|
4980
5068
|
} finally {
|
|
4981
5069
|
if (monitorDb !== null) try {
|
|
@@ -5023,7 +5111,7 @@ async function runMonitorRecommendationsAction(options) {
|
|
|
5023
5111
|
} catch (err) {
|
|
5024
5112
|
const message = err instanceof Error ? err.message : String(err);
|
|
5025
5113
|
process.stderr.write(`Error: ${message}\n`);
|
|
5026
|
-
logger$
|
|
5114
|
+
logger$9.error({ err }, "runMonitorRecommendationsAction failed");
|
|
5027
5115
|
return MONITOR_EXIT_ERROR;
|
|
5028
5116
|
} finally {
|
|
5029
5117
|
if (monitorDb !== null) try {
|
|
@@ -5101,7 +5189,7 @@ function registerMonitorCommand(program, version = "0.0.0", projectRoot = proces
|
|
|
5101
5189
|
|
|
5102
5190
|
//#endregion
|
|
5103
5191
|
//#region src/modules/git-worktree/git-worktree-manager-impl.ts
|
|
5104
|
-
const logger$
|
|
5192
|
+
const logger$8 = createLogger("git-worktree");
|
|
5105
5193
|
const BRANCH_PREFIX = "substrate/task-";
|
|
5106
5194
|
const DEFAULT_WORKTREE_BASE = ".substrate-worktrees";
|
|
5107
5195
|
var GitWorktreeManagerImpl = class {
|
|
@@ -5120,7 +5208,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5120
5208
|
this._db = db;
|
|
5121
5209
|
this._onTaskReady = ({ taskId }) => {
|
|
5122
5210
|
this._handleTaskReady(taskId).catch((err) => {
|
|
5123
|
-
logger$
|
|
5211
|
+
logger$8.error({
|
|
5124
5212
|
taskId,
|
|
5125
5213
|
err
|
|
5126
5214
|
}, "Unhandled error in _handleTaskReady");
|
|
@@ -5134,40 +5222,40 @@ var GitWorktreeManagerImpl = class {
|
|
|
5134
5222
|
};
|
|
5135
5223
|
}
|
|
5136
5224
|
async initialize() {
|
|
5137
|
-
logger$
|
|
5225
|
+
logger$8.info({ projectRoot: this._projectRoot }, "GitWorktreeManager.initialize()");
|
|
5138
5226
|
await this.verifyGitVersion();
|
|
5139
5227
|
const cleaned = await this.cleanupAllWorktrees();
|
|
5140
|
-
if (cleaned > 0) logger$
|
|
5228
|
+
if (cleaned > 0) logger$8.info({ cleaned }, "Recovered orphaned worktrees on startup");
|
|
5141
5229
|
this._eventBus.on("task:ready", this._onTaskReady);
|
|
5142
5230
|
this._eventBus.on("task:complete", this._onTaskComplete);
|
|
5143
5231
|
this._eventBus.on("task:failed", this._onTaskFailed);
|
|
5144
|
-
logger$
|
|
5232
|
+
logger$8.info("GitWorktreeManager initialized");
|
|
5145
5233
|
}
|
|
5146
5234
|
async shutdown() {
|
|
5147
|
-
logger$
|
|
5235
|
+
logger$8.info("GitWorktreeManager.shutdown()");
|
|
5148
5236
|
this._eventBus.off("task:ready", this._onTaskReady);
|
|
5149
5237
|
this._eventBus.off("task:complete", this._onTaskComplete);
|
|
5150
5238
|
this._eventBus.off("task:failed", this._onTaskFailed);
|
|
5151
5239
|
await this.cleanupAllWorktrees();
|
|
5152
|
-
logger$
|
|
5240
|
+
logger$8.info("GitWorktreeManager shutdown complete");
|
|
5153
5241
|
}
|
|
5154
5242
|
async _handleTaskReady(taskId) {
|
|
5155
|
-
logger$
|
|
5243
|
+
logger$8.debug({ taskId }, "task:ready — creating worktree");
|
|
5156
5244
|
try {
|
|
5157
5245
|
await this.createWorktree(taskId);
|
|
5158
5246
|
} catch (err) {
|
|
5159
|
-
logger$
|
|
5247
|
+
logger$8.error({
|
|
5160
5248
|
taskId,
|
|
5161
5249
|
err
|
|
5162
5250
|
}, "Failed to create worktree for task");
|
|
5163
5251
|
}
|
|
5164
5252
|
}
|
|
5165
5253
|
async _handleTaskDone(taskId) {
|
|
5166
|
-
logger$
|
|
5254
|
+
logger$8.debug({ taskId }, "task done — cleaning up worktree");
|
|
5167
5255
|
try {
|
|
5168
5256
|
await this.cleanupWorktree(taskId);
|
|
5169
5257
|
} catch (err) {
|
|
5170
|
-
logger$
|
|
5258
|
+
logger$8.warn({
|
|
5171
5259
|
taskId,
|
|
5172
5260
|
err
|
|
5173
5261
|
}, "Failed to cleanup worktree for task");
|
|
@@ -5177,7 +5265,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5177
5265
|
if (!taskId || taskId.trim().length === 0) throw new Error("createWorktree: taskId must be a non-empty string");
|
|
5178
5266
|
const branchName = BRANCH_PREFIX + taskId;
|
|
5179
5267
|
const worktreePath = this.getWorktreePath(taskId);
|
|
5180
|
-
logger$
|
|
5268
|
+
logger$8.debug({
|
|
5181
5269
|
taskId,
|
|
5182
5270
|
branchName,
|
|
5183
5271
|
worktreePath,
|
|
@@ -5197,7 +5285,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5197
5285
|
worktreePath,
|
|
5198
5286
|
createdAt
|
|
5199
5287
|
};
|
|
5200
|
-
logger$
|
|
5288
|
+
logger$8.info({
|
|
5201
5289
|
taskId,
|
|
5202
5290
|
branchName,
|
|
5203
5291
|
worktreePath
|
|
@@ -5207,7 +5295,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5207
5295
|
async cleanupWorktree(taskId) {
|
|
5208
5296
|
const branchName = BRANCH_PREFIX + taskId;
|
|
5209
5297
|
const worktreePath = this.getWorktreePath(taskId);
|
|
5210
|
-
logger$
|
|
5298
|
+
logger$8.debug({
|
|
5211
5299
|
taskId,
|
|
5212
5300
|
branchName,
|
|
5213
5301
|
worktreePath
|
|
@@ -5217,7 +5305,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5217
5305
|
await access$1(worktreePath);
|
|
5218
5306
|
worktreeExists = true;
|
|
5219
5307
|
} catch {
|
|
5220
|
-
logger$
|
|
5308
|
+
logger$8.debug({
|
|
5221
5309
|
taskId,
|
|
5222
5310
|
worktreePath
|
|
5223
5311
|
}, "cleanupWorktree: worktree does not exist, skipping removal");
|
|
@@ -5225,7 +5313,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5225
5313
|
if (worktreeExists) try {
|
|
5226
5314
|
await removeWorktree(worktreePath, this._projectRoot);
|
|
5227
5315
|
} catch (err) {
|
|
5228
|
-
logger$
|
|
5316
|
+
logger$8.warn({
|
|
5229
5317
|
taskId,
|
|
5230
5318
|
worktreePath,
|
|
5231
5319
|
err
|
|
@@ -5234,7 +5322,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5234
5322
|
try {
|
|
5235
5323
|
await removeBranch(branchName, this._projectRoot);
|
|
5236
5324
|
} catch (err) {
|
|
5237
|
-
logger$
|
|
5325
|
+
logger$8.warn({
|
|
5238
5326
|
taskId,
|
|
5239
5327
|
branchName,
|
|
5240
5328
|
err
|
|
@@ -5244,13 +5332,13 @@ var GitWorktreeManagerImpl = class {
|
|
|
5244
5332
|
taskId,
|
|
5245
5333
|
branchName
|
|
5246
5334
|
});
|
|
5247
|
-
logger$
|
|
5335
|
+
logger$8.info({
|
|
5248
5336
|
taskId,
|
|
5249
5337
|
branchName
|
|
5250
5338
|
}, "Worktree cleaned up");
|
|
5251
5339
|
}
|
|
5252
5340
|
async cleanupAllWorktrees() {
|
|
5253
|
-
logger$
|
|
5341
|
+
logger$8.debug({ projectRoot: this._projectRoot }, "cleanupAllWorktrees");
|
|
5254
5342
|
const orphanedPaths = await getOrphanedWorktrees(this._projectRoot, this._baseDirectory);
|
|
5255
5343
|
let cleaned = 0;
|
|
5256
5344
|
for (const worktreePath of orphanedPaths) {
|
|
@@ -5259,12 +5347,12 @@ var GitWorktreeManagerImpl = class {
|
|
|
5259
5347
|
try {
|
|
5260
5348
|
await removeWorktree(worktreePath, this._projectRoot);
|
|
5261
5349
|
worktreeRemoved = true;
|
|
5262
|
-
logger$
|
|
5350
|
+
logger$8.debug({
|
|
5263
5351
|
taskId,
|
|
5264
5352
|
worktreePath
|
|
5265
5353
|
}, "cleanupAllWorktrees: removed orphaned worktree");
|
|
5266
5354
|
} catch (err) {
|
|
5267
|
-
logger$
|
|
5355
|
+
logger$8.warn({
|
|
5268
5356
|
taskId,
|
|
5269
5357
|
worktreePath,
|
|
5270
5358
|
err
|
|
@@ -5274,12 +5362,12 @@ var GitWorktreeManagerImpl = class {
|
|
|
5274
5362
|
let branchRemoved = false;
|
|
5275
5363
|
try {
|
|
5276
5364
|
branchRemoved = await removeBranch(branchName, this._projectRoot);
|
|
5277
|
-
if (branchRemoved) logger$
|
|
5365
|
+
if (branchRemoved) logger$8.debug({
|
|
5278
5366
|
taskId,
|
|
5279
5367
|
branchName
|
|
5280
5368
|
}, "cleanupAllWorktrees: removed orphaned branch");
|
|
5281
5369
|
} catch (err) {
|
|
5282
|
-
logger$
|
|
5370
|
+
logger$8.warn({
|
|
5283
5371
|
taskId,
|
|
5284
5372
|
branchName,
|
|
5285
5373
|
err
|
|
@@ -5287,14 +5375,14 @@ var GitWorktreeManagerImpl = class {
|
|
|
5287
5375
|
}
|
|
5288
5376
|
if (worktreeRemoved) cleaned++;
|
|
5289
5377
|
}
|
|
5290
|
-
if (cleaned > 0) logger$
|
|
5378
|
+
if (cleaned > 0) logger$8.info({ cleaned }, "cleanupAllWorktrees: recovered orphaned worktrees");
|
|
5291
5379
|
return cleaned;
|
|
5292
5380
|
}
|
|
5293
5381
|
async detectConflicts(taskId, targetBranch = "main") {
|
|
5294
5382
|
if (!taskId || taskId.trim().length === 0) throw new Error("detectConflicts: taskId must be a non-empty string");
|
|
5295
5383
|
const branchName = BRANCH_PREFIX + taskId;
|
|
5296
5384
|
const worktreePath = this.getWorktreePath(taskId);
|
|
5297
|
-
logger$
|
|
5385
|
+
logger$8.debug({
|
|
5298
5386
|
taskId,
|
|
5299
5387
|
branchName,
|
|
5300
5388
|
targetBranch
|
|
@@ -5322,7 +5410,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5322
5410
|
branch: branchName,
|
|
5323
5411
|
conflictingFiles: report.conflictingFiles
|
|
5324
5412
|
});
|
|
5325
|
-
logger$
|
|
5413
|
+
logger$8.info({
|
|
5326
5414
|
taskId,
|
|
5327
5415
|
hasConflicts: report.hasConflicts,
|
|
5328
5416
|
conflictCount: conflictingFiles.length
|
|
@@ -5332,14 +5420,14 @@ var GitWorktreeManagerImpl = class {
|
|
|
5332
5420
|
async mergeWorktree(taskId, targetBranch = "main") {
|
|
5333
5421
|
if (!taskId || taskId.trim().length === 0) throw new Error("mergeWorktree: taskId must be a non-empty string");
|
|
5334
5422
|
const branchName = BRANCH_PREFIX + taskId;
|
|
5335
|
-
logger$
|
|
5423
|
+
logger$8.debug({
|
|
5336
5424
|
taskId,
|
|
5337
5425
|
branchName,
|
|
5338
5426
|
targetBranch
|
|
5339
5427
|
}, "mergeWorktree");
|
|
5340
5428
|
const conflictReport = await this.detectConflicts(taskId, targetBranch);
|
|
5341
5429
|
if (conflictReport.hasConflicts) {
|
|
5342
|
-
logger$
|
|
5430
|
+
logger$8.info({
|
|
5343
5431
|
taskId,
|
|
5344
5432
|
conflictCount: conflictReport.conflictingFiles.length
|
|
5345
5433
|
}, "Merge skipped due to conflicts");
|
|
@@ -5361,7 +5449,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5361
5449
|
success: true,
|
|
5362
5450
|
mergedFiles
|
|
5363
5451
|
};
|
|
5364
|
-
logger$
|
|
5452
|
+
logger$8.info({
|
|
5365
5453
|
taskId,
|
|
5366
5454
|
branchName,
|
|
5367
5455
|
mergedFileCount: mergedFiles.length
|
|
@@ -5369,7 +5457,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5369
5457
|
return result;
|
|
5370
5458
|
}
|
|
5371
5459
|
async listWorktrees() {
|
|
5372
|
-
logger$
|
|
5460
|
+
logger$8.debug({
|
|
5373
5461
|
projectRoot: this._projectRoot,
|
|
5374
5462
|
baseDirectory: this._baseDirectory
|
|
5375
5463
|
}, "listWorktrees");
|
|
@@ -5393,7 +5481,7 @@ var GitWorktreeManagerImpl = class {
|
|
|
5393
5481
|
createdAt
|
|
5394
5482
|
});
|
|
5395
5483
|
}
|
|
5396
|
-
logger$
|
|
5484
|
+
logger$8.debug({ count: results.length }, "listWorktrees: found worktrees");
|
|
5397
5485
|
return results;
|
|
5398
5486
|
}
|
|
5399
5487
|
getWorktreePath(taskId) {
|
|
@@ -5413,7 +5501,7 @@ function createGitWorktreeManager(options) {
|
|
|
5413
5501
|
|
|
5414
5502
|
//#endregion
|
|
5415
5503
|
//#region src/cli/commands/merge.ts
|
|
5416
|
-
const logger$
|
|
5504
|
+
const logger$7 = createLogger("merge-cmd");
|
|
5417
5505
|
const MERGE_EXIT_SUCCESS = 0;
|
|
5418
5506
|
const MERGE_EXIT_CONFLICT = 1;
|
|
5419
5507
|
const MERGE_EXIT_ERROR = 2;
|
|
@@ -5451,7 +5539,7 @@ async function mergeTask(taskId, targetBranch, projectRoot) {
|
|
|
5451
5539
|
projectRoot
|
|
5452
5540
|
});
|
|
5453
5541
|
try {
|
|
5454
|
-
logger$
|
|
5542
|
+
logger$7.info({
|
|
5455
5543
|
taskId,
|
|
5456
5544
|
targetBranch
|
|
5457
5545
|
}, "Running conflict detection...");
|
|
@@ -5473,7 +5561,7 @@ async function mergeTask(taskId, targetBranch, projectRoot) {
|
|
|
5473
5561
|
} catch (err) {
|
|
5474
5562
|
const message = err instanceof Error ? err.message : String(err);
|
|
5475
5563
|
console.error(`Error merging task "${taskId}": ${message}`);
|
|
5476
|
-
logger$
|
|
5564
|
+
logger$7.error({
|
|
5477
5565
|
taskId,
|
|
5478
5566
|
err
|
|
5479
5567
|
}, "merge --task failed");
|
|
@@ -5527,7 +5615,7 @@ async function mergeAll(targetBranch, projectRoot, taskIds) {
|
|
|
5527
5615
|
error: message
|
|
5528
5616
|
});
|
|
5529
5617
|
console.log(` Error for task "${taskId}": ${message}`);
|
|
5530
|
-
logger$
|
|
5618
|
+
logger$7.error({
|
|
5531
5619
|
taskId,
|
|
5532
5620
|
err
|
|
5533
5621
|
}, "merge --all: task failed");
|
|
@@ -5580,7 +5668,7 @@ function registerMergeCommand(program, projectRoot = process.cwd()) {
|
|
|
5580
5668
|
|
|
5581
5669
|
//#endregion
|
|
5582
5670
|
//#region src/cli/commands/worktrees.ts
|
|
5583
|
-
const logger$
|
|
5671
|
+
const logger$6 = createLogger("worktrees-cmd");
|
|
5584
5672
|
const WORKTREES_EXIT_SUCCESS = 0;
|
|
5585
5673
|
const WORKTREES_EXIT_ERROR = 1;
|
|
5586
5674
|
/** Valid task statuses for filtering */
|
|
@@ -5707,7 +5795,7 @@ async function listWorktreesAction(options) {
|
|
|
5707
5795
|
try {
|
|
5708
5796
|
worktreeInfos = await manager.listWorktrees();
|
|
5709
5797
|
} catch (err) {
|
|
5710
|
-
logger$
|
|
5798
|
+
logger$6.error({ err }, "Failed to list worktrees");
|
|
5711
5799
|
const message = err instanceof Error ? err.message : String(err);
|
|
5712
5800
|
process.stderr.write(`Error listing worktrees: ${message}\n`);
|
|
5713
5801
|
return WORKTREES_EXIT_ERROR;
|
|
@@ -5734,7 +5822,7 @@ async function listWorktreesAction(options) {
|
|
|
5734
5822
|
} catch (err) {
|
|
5735
5823
|
const message = err instanceof Error ? err.message : String(err);
|
|
5736
5824
|
process.stderr.write(`Error: ${message}\n`);
|
|
5737
|
-
logger$
|
|
5825
|
+
logger$6.error({ err }, "listWorktreesAction failed");
|
|
5738
5826
|
return WORKTREES_EXIT_ERROR;
|
|
5739
5827
|
}
|
|
5740
5828
|
}
|
|
@@ -5775,7 +5863,7 @@ function registerWorktreesCommand(program, version = "0.0.0", projectRoot = proc
|
|
|
5775
5863
|
|
|
5776
5864
|
//#endregion
|
|
5777
5865
|
//#region src/cli/commands/brainstorm.ts
|
|
5778
|
-
const logger$
|
|
5866
|
+
const logger$5 = createLogger("brainstorm-cmd");
|
|
5779
5867
|
/**
|
|
5780
5868
|
* Detect whether the project has existing planning artifacts that indicate
|
|
5781
5869
|
* this is an amendment session (vs. a brand-new project brainstorm).
|
|
@@ -5821,13 +5909,13 @@ async function loadAmendmentContextDocuments(projectRoot) {
|
|
|
5821
5909
|
try {
|
|
5822
5910
|
brief = await readFile(briefPath, "utf-8");
|
|
5823
5911
|
} catch {
|
|
5824
|
-
logger$
|
|
5912
|
+
logger$5.warn({ briefPath }, "product-brief.md not found — continuing without brief context");
|
|
5825
5913
|
process.stderr.write(`Warning: product-brief.md not found at ${briefPath}\n`);
|
|
5826
5914
|
}
|
|
5827
5915
|
try {
|
|
5828
5916
|
prd = await readFile(prdPath, "utf-8");
|
|
5829
5917
|
} catch {
|
|
5830
|
-
logger$
|
|
5918
|
+
logger$5.warn({ prdPath }, "requirements.md not found — continuing without PRD context");
|
|
5831
5919
|
process.stderr.write(`Warning: requirements.md not found at ${prdPath}\n`);
|
|
5832
5920
|
}
|
|
5833
5921
|
return {
|
|
@@ -6036,7 +6124,7 @@ async function dispatchToPersonas(userPrompt, context, llmDispatch) {
|
|
|
6036
6124
|
}
|
|
6037
6125
|
];
|
|
6038
6126
|
const defaultDispatch = async (prompt, personaName) => {
|
|
6039
|
-
logger$
|
|
6127
|
+
logger$5.debug({
|
|
6040
6128
|
personaName,
|
|
6041
6129
|
promptLength: prompt.length
|
|
6042
6130
|
}, "Dispatching to persona (stub mode)");
|
|
@@ -6053,7 +6141,7 @@ async function dispatchToPersonas(userPrompt, context, llmDispatch) {
|
|
|
6053
6141
|
};
|
|
6054
6142
|
} catch (err) {
|
|
6055
6143
|
const msg = err instanceof Error ? err.message : String(err);
|
|
6056
|
-
logger$
|
|
6144
|
+
logger$5.error({
|
|
6057
6145
|
err,
|
|
6058
6146
|
personaName: persona.name
|
|
6059
6147
|
}, "Persona dispatch failed");
|
|
@@ -6205,7 +6293,7 @@ async function runBrainstormSession(options, llmDispatch, rlInterface) {
|
|
|
6205
6293
|
}
|
|
6206
6294
|
});
|
|
6207
6295
|
rl.on("error", (err) => {
|
|
6208
|
-
logger$
|
|
6296
|
+
logger$5.error({ err }, "readline error");
|
|
6209
6297
|
if (!sessionEnded) endSession(false);
|
|
6210
6298
|
});
|
|
6211
6299
|
});
|
|
@@ -6795,7 +6883,7 @@ function renderReadinessReport(decisions) {
|
|
|
6795
6883
|
|
|
6796
6884
|
//#endregion
|
|
6797
6885
|
//#region src/cli/commands/export.ts
|
|
6798
|
-
const logger$
|
|
6886
|
+
const logger$4 = createLogger("export-cmd");
|
|
6799
6887
|
/**
|
|
6800
6888
|
* Execute the export action.
|
|
6801
6889
|
* Returns an exit code (0 = success, 1 = error).
|
|
@@ -6922,7 +7010,7 @@ async function runExportAction(options) {
|
|
|
6922
7010
|
const msg = err instanceof Error ? err.message : String(err);
|
|
6923
7011
|
if (outputFormat === "json") process.stdout.write(JSON.stringify({ error: msg }) + "\n");
|
|
6924
7012
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
6925
|
-
logger$
|
|
7013
|
+
logger$4.error({ err }, "export action failed");
|
|
6926
7014
|
return 1;
|
|
6927
7015
|
} finally {
|
|
6928
7016
|
if (dbWrapper !== void 0) try {
|
|
@@ -7009,7 +7097,7 @@ function getRetryableEscalations(db, runId) {
|
|
|
7009
7097
|
|
|
7010
7098
|
//#endregion
|
|
7011
7099
|
//#region src/cli/commands/retry-escalated.ts
|
|
7012
|
-
const logger$
|
|
7100
|
+
const logger$3 = createLogger("retry-escalated-cmd");
|
|
7013
7101
|
async function runRetryEscalatedAction(options) {
|
|
7014
7102
|
const { runId, dryRun, outputFormat, projectRoot, concurrency, pack: packName, registry: injectedRegistry } = options;
|
|
7015
7103
|
const dbRoot = await resolveMainRepoRoot(projectRoot);
|
|
@@ -7107,7 +7195,7 @@ async function runRetryEscalatedAction(options) {
|
|
|
7107
7195
|
});
|
|
7108
7196
|
}
|
|
7109
7197
|
} catch (err) {
|
|
7110
|
-
logger$
|
|
7198
|
+
logger$3.warn({ err }, "Failed to record token usage");
|
|
7111
7199
|
}
|
|
7112
7200
|
});
|
|
7113
7201
|
if (outputFormat === "human") {
|
|
@@ -7129,7 +7217,7 @@ async function runRetryEscalatedAction(options) {
|
|
|
7129
7217
|
const msg = err instanceof Error ? err.message : String(err);
|
|
7130
7218
|
if (outputFormat === "json") process.stdout.write(formatOutput(null, "json", false, msg) + "\n");
|
|
7131
7219
|
else process.stderr.write(`Error: ${msg}\n`);
|
|
7132
|
-
logger$
|
|
7220
|
+
logger$3.error({ err }, "retry-escalated failed");
|
|
7133
7221
|
return 1;
|
|
7134
7222
|
} finally {
|
|
7135
7223
|
try {
|
|
@@ -7408,6 +7496,170 @@ function registerHistoryCommand(program) {
|
|
|
7408
7496
|
});
|
|
7409
7497
|
}
|
|
7410
7498
|
|
|
7499
|
+
//#endregion
|
|
7500
|
+
//#region src/cli/commands/repo-map.ts
|
|
7501
|
+
const logger$2 = createLogger("cli:repo-map");
|
|
7502
|
+
/** Validate that a symbol name contains only safe identifier characters. */
|
|
7503
|
+
function isValidSymbolName(name) {
|
|
7504
|
+
return /^[a-zA-Z0-9_]+$/.test(name);
|
|
7505
|
+
}
|
|
7506
|
+
function registerRepoMapCommand(program) {
|
|
7507
|
+
program.command("repo-map").description("Show, update, or query the repo-map symbol index").option("--show", "Show repo-map staleness status").option("--update", "Trigger an incremental repo-map update (Dolt backend only)").option("--query <symbol>", "Query the repo-map for a specific symbol name").option("--dry-run <storyFile>", "Preview repo-map context that would be injected for a story file").option("--output-format <format>", "Output format: text or json", "text").action(async (options) => {
|
|
7508
|
+
if (options.query !== void 0 && !isValidSymbolName(options.query)) {
|
|
7509
|
+
process.stderr.write(`Error: --query value must match /^[a-zA-Z0-9_]+$/ (got: ${options.query})\n`);
|
|
7510
|
+
process.exitCode = 1;
|
|
7511
|
+
return;
|
|
7512
|
+
}
|
|
7513
|
+
const dbRoot = await resolveMainRepoRoot(process.cwd());
|
|
7514
|
+
const statePath = join$1(dbRoot, ".substrate", "state");
|
|
7515
|
+
const doltStatePath = join$1(statePath, ".dolt");
|
|
7516
|
+
const isDolt = existsSync$1(doltStatePath);
|
|
7517
|
+
const notDoltError = (flag) => {
|
|
7518
|
+
if (options.outputFormat === "json") console.log(JSON.stringify({
|
|
7519
|
+
backend: "file",
|
|
7520
|
+
status: "unavailable",
|
|
7521
|
+
hint: "Repo-map requires the Dolt backend. Run `substrate init --dolt` to enable."
|
|
7522
|
+
}));
|
|
7523
|
+
else process.stderr.write(`Error: ${flag} requires the Dolt backend. Run \`substrate init --dolt\` to enable.\n`);
|
|
7524
|
+
process.exitCode = 1;
|
|
7525
|
+
};
|
|
7526
|
+
if (!isDolt) {
|
|
7527
|
+
const flag = options.update ? "--update" : options.query ? "--query" : options.dryRun ? "--dry-run" : "--show";
|
|
7528
|
+
notDoltError(flag);
|
|
7529
|
+
return;
|
|
7530
|
+
}
|
|
7531
|
+
const doltClient = new DoltClient({ repoPath: statePath });
|
|
7532
|
+
const symbolRepo = new DoltSymbolRepository(doltClient, logger$2);
|
|
7533
|
+
const metaRepo = new DoltRepoMapMetaRepository(doltClient);
|
|
7534
|
+
const repoMapModule = new RepoMapModule(metaRepo, logger$2);
|
|
7535
|
+
const queryEngine = new RepoMapQueryEngine(symbolRepo, logger$2);
|
|
7536
|
+
if (options.show === true || !options.update && !options.query && !options.dryRun) {
|
|
7537
|
+
const meta = await metaRepo.getMeta();
|
|
7538
|
+
const staleResult = await repoMapModule.checkStaleness();
|
|
7539
|
+
let staleness = "unknown";
|
|
7540
|
+
if (meta === null) staleness = "unknown";
|
|
7541
|
+
else if (staleResult !== null) staleness = "stale";
|
|
7542
|
+
else staleness = "current";
|
|
7543
|
+
const symbolCount = meta !== null ? (await symbolRepo.getSymbols()).length : 0;
|
|
7544
|
+
if (options.outputFormat === "json") console.log(JSON.stringify({
|
|
7545
|
+
symbolCount,
|
|
7546
|
+
commitSha: meta?.commitSha ?? null,
|
|
7547
|
+
fileCount: meta?.fileCount ?? 0,
|
|
7548
|
+
updatedAt: meta?.updatedAt?.toISOString() ?? null,
|
|
7549
|
+
staleness
|
|
7550
|
+
}));
|
|
7551
|
+
else if (meta !== null) {
|
|
7552
|
+
console.log(`Repo-map: ${symbolCount} symbols, ${meta.fileCount} files`);
|
|
7553
|
+
console.log(`Commit: ${meta.commitSha}`);
|
|
7554
|
+
console.log(`Updated: ${meta.updatedAt.toISOString()}`);
|
|
7555
|
+
if (staleness === "stale") console.log("Status: STALE (run `substrate repo-map --update` to refresh)");
|
|
7556
|
+
else console.log("Status: UP TO DATE");
|
|
7557
|
+
} else console.log("Repo-map: no data stored yet");
|
|
7558
|
+
return;
|
|
7559
|
+
}
|
|
7560
|
+
if (options.update === true) {
|
|
7561
|
+
logger$2.info("repo-map --update: triggering incremental update");
|
|
7562
|
+
const gitClient = new GitClient(logger$2);
|
|
7563
|
+
const grammarLoader = new GrammarLoader(logger$2);
|
|
7564
|
+
const parser = new SymbolParser(grammarLoader, logger$2);
|
|
7565
|
+
const storage = new RepoMapStorage(symbolRepo, metaRepo, gitClient, logger$2);
|
|
7566
|
+
await storage.incrementalUpdate(dbRoot, parser);
|
|
7567
|
+
const meta = await metaRepo.getMeta();
|
|
7568
|
+
const symbolCount = (await symbolRepo.getSymbols()).length;
|
|
7569
|
+
if (options.outputFormat === "json") console.log(JSON.stringify({
|
|
7570
|
+
result: "updated",
|
|
7571
|
+
symbolCount,
|
|
7572
|
+
fileCount: meta?.fileCount ?? 0,
|
|
7573
|
+
commitSha: meta?.commitSha ?? null,
|
|
7574
|
+
updatedAt: meta?.updatedAt?.toISOString() ?? null
|
|
7575
|
+
}));
|
|
7576
|
+
else console.log(`Repo-map updated: ${symbolCount} symbols across ${meta?.fileCount ?? 0} files`);
|
|
7577
|
+
return;
|
|
7578
|
+
}
|
|
7579
|
+
if (options.query !== void 0) {
|
|
7580
|
+
logger$2.debug({ symbol: options.query }, "repo-map --query");
|
|
7581
|
+
const result = await queryEngine.query({
|
|
7582
|
+
symbols: [options.query],
|
|
7583
|
+
maxTokens: 4e3
|
|
7584
|
+
});
|
|
7585
|
+
if (options.outputFormat === "json") console.log(JSON.stringify(result, null, 2));
|
|
7586
|
+
else if (result.symbolCount === 0) console.log(`No symbols found matching '${options.query}'.`);
|
|
7587
|
+
else {
|
|
7588
|
+
console.log(`Found ${result.symbolCount} symbol(s) for '${options.query}':`);
|
|
7589
|
+
for (const sym of result.symbols) console.log(` ${sym.filePath}:${sym.lineNumber} ${sym.symbolType} ${sym.symbolName}`);
|
|
7590
|
+
}
|
|
7591
|
+
return;
|
|
7592
|
+
}
|
|
7593
|
+
if (options.dryRun !== void 0) {
|
|
7594
|
+
let storyContent;
|
|
7595
|
+
try {
|
|
7596
|
+
storyContent = await readFile$1(options.dryRun, "utf-8");
|
|
7597
|
+
} catch {
|
|
7598
|
+
process.stderr.write(`Error: Cannot read story file: ${options.dryRun}\n`);
|
|
7599
|
+
process.exitCode = 1;
|
|
7600
|
+
return;
|
|
7601
|
+
}
|
|
7602
|
+
const injector = new RepoMapInjector(queryEngine, logger$2);
|
|
7603
|
+
const injectionResult = await injector.buildContext(storyContent, 2e3);
|
|
7604
|
+
console.log(JSON.stringify({
|
|
7605
|
+
text: injectionResult.text,
|
|
7606
|
+
symbolCount: injectionResult.symbolCount,
|
|
7607
|
+
truncated: injectionResult.truncated
|
|
7608
|
+
}));
|
|
7609
|
+
return;
|
|
7610
|
+
}
|
|
7611
|
+
});
|
|
7612
|
+
}
|
|
7613
|
+
|
|
7614
|
+
//#endregion
|
|
7615
|
+
//#region src/cli/commands/routing.ts
|
|
7616
|
+
const logger$1 = createLogger("cli:routing");
|
|
7617
|
+
function registerRoutingCommand(program) {
|
|
7618
|
+
program.command("routing").description("Show routing configuration and auto-tune history").option("--history", "Show the routing auto-tune log (model changes applied)").option("--output-format <format>", "Output format: text or json", "text").action(async (options) => {
|
|
7619
|
+
const dbRoot = await resolveMainRepoRoot(process.cwd());
|
|
7620
|
+
const statePath = join$1(dbRoot, ".substrate", "state");
|
|
7621
|
+
const doltStatePath = join$1(statePath, ".dolt");
|
|
7622
|
+
const storeConfig = existsSync$1(doltStatePath) ? {
|
|
7623
|
+
backend: "dolt",
|
|
7624
|
+
basePath: statePath
|
|
7625
|
+
} : {
|
|
7626
|
+
backend: "file",
|
|
7627
|
+
basePath: statePath
|
|
7628
|
+
};
|
|
7629
|
+
const store = createStateStore(storeConfig);
|
|
7630
|
+
try {
|
|
7631
|
+
await store.initialize();
|
|
7632
|
+
if (options.history === true) {
|
|
7633
|
+
logger$1.debug("routing --history: fetching tune log");
|
|
7634
|
+
const raw$1 = await store.getMetric("global", "routing_tune_log");
|
|
7635
|
+
let entries = [];
|
|
7636
|
+
if (Array.isArray(raw$1)) entries = raw$1.sort((a, b) => b.appliedAt.localeCompare(a.appliedAt));
|
|
7637
|
+
if (options.outputFormat === "json") {
|
|
7638
|
+
console.log(JSON.stringify({ entries }, null, 2));
|
|
7639
|
+
return;
|
|
7640
|
+
}
|
|
7641
|
+
if (entries.length === 0) {
|
|
7642
|
+
console.log("No routing auto-tune history found.");
|
|
7643
|
+
return;
|
|
7644
|
+
}
|
|
7645
|
+
console.log("Routing auto-tune history:");
|
|
7646
|
+
for (const entry of entries) console.log(` ${entry.appliedAt} phase=${entry.phase} ${entry.oldModel} → ${entry.newModel} savings=${entry.estimatedSavingsPct.toFixed(1)}% run=${entry.runId}`);
|
|
7647
|
+
return;
|
|
7648
|
+
}
|
|
7649
|
+
const raw = await store.getMetric("global", "routing_tune_log");
|
|
7650
|
+
const entryCount = Array.isArray(raw) ? raw.length : 0;
|
|
7651
|
+
if (options.outputFormat === "json") console.log(JSON.stringify({ tuneLogEntries: entryCount }));
|
|
7652
|
+
else {
|
|
7653
|
+
console.log(`Routing auto-tune log: ${entryCount} entr${entryCount === 1 ? "y" : "ies"}`);
|
|
7654
|
+
if (entryCount === 0) console.log("No auto-tune changes applied yet. Use --history for details.");
|
|
7655
|
+
else console.log("Run `substrate routing --history` to see full history.");
|
|
7656
|
+
}
|
|
7657
|
+
} finally {
|
|
7658
|
+
await store.close();
|
|
7659
|
+
}
|
|
7660
|
+
});
|
|
7661
|
+
}
|
|
7662
|
+
|
|
7411
7663
|
//#endregion
|
|
7412
7664
|
//#region src/cli/index.ts
|
|
7413
7665
|
process.setMaxListeners(20);
|
|
@@ -7460,6 +7712,8 @@ async function createProgram() {
|
|
|
7460
7712
|
registerDiffCommand(program);
|
|
7461
7713
|
registerHistoryCommand(program);
|
|
7462
7714
|
registerMigrateCommand(program);
|
|
7715
|
+
registerRepoMapCommand(program);
|
|
7716
|
+
registerRoutingCommand(program);
|
|
7463
7717
|
registerCostCommand(program, version);
|
|
7464
7718
|
registerMonitorCommand(program, version);
|
|
7465
7719
|
registerMergeCommand(program);
|
|
@@ -7472,8 +7726,8 @@ async function createProgram() {
|
|
|
7472
7726
|
/** Fire-and-forget startup version check (story 8.3, AC3/AC5) */
|
|
7473
7727
|
function checkForUpdatesInBackground(currentVersion) {
|
|
7474
7728
|
if (process.env.SUBSTRATE_NO_UPDATE_CHECK === "1") return;
|
|
7475
|
-
import("../upgrade-
|
|
7476
|
-
const { createVersionManager } = await import("../version-manager-impl-
|
|
7729
|
+
import("../upgrade-njy4XENS.js").then(async () => {
|
|
7730
|
+
const { createVersionManager } = await import("../version-manager-impl-QwroczYS.js");
|
|
7477
7731
|
const vm = createVersionManager();
|
|
7478
7732
|
const result = await vm.checkForUpdates();
|
|
7479
7733
|
if (result.updateAvailable) {
|