substrate-ai 0.5.3 → 0.5.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,4 @@
1
+ import "./logger-D2fS2ccL.js";
2
+ import { AdapterRegistry } from "./adapter-registry-CDNPbixE.js";
3
+
4
+ export { AdapterRegistry };
@@ -1,8 +1,10 @@
1
+ import { createLogger } from "./logger-D2fS2ccL.js";
1
2
  import { exec } from "child_process";
2
3
  import { promisify } from "util";
3
4
 
4
5
  //#region src/adapters/claude-adapter.ts
5
6
  const execAsync$2 = promisify(exec);
7
+ const logger = createLogger("claude-adapter");
6
8
  /** Default model used when none is specified */
7
9
  const DEFAULT_MODEL$1 = "claude-sonnet-4-6";
8
10
  /** Approximate characters per token for estimation */
@@ -62,15 +64,21 @@ var ClaudeCodeAdapter = class {
62
64
  buildCommand(prompt, options) {
63
65
  const model = options.model ?? DEFAULT_MODEL$1;
64
66
  const systemPrompt = "You are an autonomous coding agent executing a single pipeline task. Ignore all session startup context, memory notes, and \"Next Up\" indicators. Follow the instructions in the user message exactly. Emit ONLY the YAML output specified in the Output Contract — no other text.";
67
+ const effectiveSystemPrompt = options.optimizationDirectives !== void 0 && options.optimizationDirectives.length > 0 ? `${systemPrompt}\n\n## Optimization Directives\n${options.optimizationDirectives}` : systemPrompt;
68
+ if (options.optimizationDirectives !== void 0 && options.optimizationDirectives.length > 0) logger.debug({
69
+ storyKey: options.storyKey,
70
+ directiveChars: options.optimizationDirectives.length
71
+ }, "Injecting optimization directives into system prompt");
65
72
  const args = [
66
73
  "-p",
67
74
  "--model",
68
75
  model,
69
76
  "--dangerously-skip-permissions",
70
77
  "--system-prompt",
71
- systemPrompt
78
+ effectiveSystemPrompt
72
79
  ];
73
80
  if (options.maxTurns !== void 0) args.push("--max-turns", String(options.maxTurns));
81
+ if (options.maxContextTokens !== void 0) args.push("--max-context-tokens", String(options.maxContextTokens));
74
82
  if (options.additionalFlags && options.additionalFlags.length > 0) args.push(...options.additionalFlags);
75
83
  const envEntries = {};
76
84
  const unsetKeys = ["CLAUDECODE", "CLAUDE_CODE_ENTRYPOINT"];
@@ -825,4 +833,4 @@ var AdapterRegistry = class {
825
833
 
826
834
  //#endregion
827
835
  export { AdapterRegistry, ClaudeCodeAdapter, CodexCLIAdapter, GeminiCLIAdapter };
828
- //# sourceMappingURL=adapter-registry-BkUvZSKJ.js.map
836
+ //# sourceMappingURL=adapter-registry-CDNPbixE.js.map
package/dist/cli/index.js CHANGED
@@ -1,7 +1,7 @@
1
1
  #!/usr/bin/env node
2
- import { AdapterTelemetryPersistence, AppError, DEFAULT_CONFIG, DEFAULT_ROUTING_POLICY, DoltClient, DoltNotInstalled, DoltRepoMapMetaRepository, DoltSymbolRepository, ERR_REPO_MAP_STORAGE_WRITE, FileStateStore, GitClient, GrammarLoader, IngestionServer, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SUBSTRATE_OWNED_SETTINGS_KEYS, SymbolParser, VALID_PHASES, WorkGraphRepository, buildPipelineStatusOutput, checkDoltInstalled, createConfigSystem, createContextCompiler, createDatabaseAdapter, createDispatcher, createDoltClient, createEventEmitter, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStateStore, createStopAfterGate, detectCycles, findPackageRoot, formatOutput, formatPhaseCompletionSummary, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, initSchema, initializeDolt, isSyncAdapter, parseDbTimestampAsUtc, registerHealthCommand, registerRunCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, resolveStoryKeys, runAnalysisPhase, runPlanningPhase, runSolutioningPhase, validateStopAfterFromConflict } from "../run-D7a-qzk9.js";
2
+ import { AdapterTelemetryPersistence, AppError, DEFAULT_CONFIG, DEFAULT_ROUTING_POLICY, DoltClient, DoltNotInstalled, DoltRepoMapMetaRepository, DoltSymbolRepository, ERR_REPO_MAP_STORAGE_WRITE, FileStateStore, GitClient, GrammarLoader, IngestionServer, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SUBSTRATE_OWNED_SETTINGS_KEYS, SymbolParser, VALID_PHASES, WorkGraphRepository, buildPipelineStatusOutput, checkDoltInstalled, createConfigSystem, createContextCompiler, createDatabaseAdapter, createDispatcher, createDoltClient, createEventEmitter, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStateStore, createStopAfterGate, createTelemetryAdvisor, detectCycles, findPackageRoot, formatOutput, formatPhaseCompletionSummary, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, initSchema, initializeDolt, isSyncAdapter, parseDbTimestampAsUtc, registerHealthCommand, registerRunCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, resolveStoryKeys, runAnalysisPhase, runPlanningPhase, runSolutioningPhase, validateStopAfterFromConflict } from "../run-BVqGAkUO.js";
3
3
  import { createLogger } from "../logger-D2fS2ccL.js";
4
- import { AdapterRegistry } from "../adapter-registry-BkUvZSKJ.js";
4
+ import { AdapterRegistry } from "../adapter-registry-CDNPbixE.js";
5
5
  import { CURRENT_CONFIG_FORMAT_VERSION, CURRENT_TASK_GRAPH_VERSION, PartialSubstrateConfigSchema } from "../config-migrator-DtZW1maj.js";
6
6
  import { ConfigError, createEventBus } from "../helpers-BihqWgVe.js";
7
7
  import { RoutingRecommender } from "../routing-BUE9pIxW.js";
@@ -1122,7 +1122,8 @@ async function runResumeAction(options) {
1122
1122
  events: eventsFlag,
1123
1123
  existingRunId: runId,
1124
1124
  projectRoot,
1125
- registry
1125
+ registry,
1126
+ stories: options.stories
1126
1127
  });
1127
1128
  } catch (err) {
1128
1129
  const msg = err instanceof Error ? err.message : String(err);
@@ -1137,7 +1138,7 @@ async function runResumeAction(options) {
1137
1138
  }
1138
1139
  }
1139
1140
  async function runFullPipelineFromPhase(options) {
1140
- const { packName, packPath, dbDir, dbPath, startPhase, stopAfter, concept, concurrency, outputFormat, events: eventsFlag, existingRunId, projectRoot, registry: injectedRegistry } = options;
1141
+ const { packName, packPath, dbDir, dbPath, startPhase, stopAfter, concept, concurrency, outputFormat, events: eventsFlag, existingRunId, projectRoot, registry: injectedRegistry, stories: explicitStories } = options;
1141
1142
  if (!existsSync(dbDir)) mkdirSync(dbDir, { recursive: true });
1142
1143
  const adapter = createDatabaseAdapter({
1143
1144
  backend: "auto",
@@ -1283,7 +1284,10 @@ async function runFullPipelineFromPhase(options) {
1283
1284
  ...telemetryPersistence !== void 0 ? { telemetryPersistence } : {}
1284
1285
  });
1285
1286
  if (ndjsonEmitter !== void 0) {
1286
- const resolvedKeys = await resolveStoryKeys(adapter, projectRoot, { pipelineRunId: runId });
1287
+ const resolvedKeys = await resolveStoryKeys(adapter, projectRoot, {
1288
+ explicit: explicitStories,
1289
+ pipelineRunId: runId
1290
+ });
1287
1291
  ndjsonEmitter.emit({
1288
1292
  type: "pipeline:start",
1289
1293
  ts: new Date().toISOString(),
@@ -1376,7 +1380,10 @@ async function runFullPipelineFromPhase(options) {
1376
1380
  logger$16.warn({ err }, "Failed to record token usage");
1377
1381
  }
1378
1382
  });
1379
- const storyKeys = await resolveStoryKeys(adapter, projectRoot, { pipelineRunId: runId });
1383
+ const storyKeys = await resolveStoryKeys(adapter, projectRoot, {
1384
+ explicit: explicitStories,
1385
+ pipelineRunId: runId
1386
+ });
1380
1387
  if (storyKeys.length === 0 && outputFormat === "human") process.stdout.write("[IMPLEMENTATION] No stories found for this run. Check solutioning phase output.\n");
1381
1388
  await orchestrator.run(storyKeys);
1382
1389
  if (ndjsonEmitter !== void 0) ndjsonEmitter.emit({
@@ -2518,7 +2525,7 @@ function defaultSupervisorDeps() {
2518
2525
  if (cached === null) {
2519
2526
  const { AdapterRegistry: AR } = await import(
2520
2527
  /* @vite-ignore */
2521
- "../adapter-registry-BRQXdPnB.js"
2528
+ "../adapter-registry-B0XmM7pb.js"
2522
2529
  );
2523
2530
  cached = new AR();
2524
2531
  await cached.discoverAndRegister();
@@ -2626,6 +2633,31 @@ function defaultSupervisorDeps() {
2626
2633
  await raAdapter.close();
2627
2634
  } catch {}
2628
2635
  }
2636
+ },
2637
+ getRunConfig: async (runId, projectRoot) => {
2638
+ try {
2639
+ const dbRoot = await resolveMainRepoRoot(projectRoot);
2640
+ const rcAdapter = createDatabaseAdapter({
2641
+ backend: "auto",
2642
+ basePath: dbRoot
2643
+ });
2644
+ try {
2645
+ await initSchema(rcAdapter);
2646
+ const rows = await rcAdapter.query("SELECT config_json FROM pipeline_runs WHERE id = ?", [runId]);
2647
+ if (rows.length === 0 || rows[0].config_json === null) return null;
2648
+ const config = JSON.parse(rows[0].config_json);
2649
+ return {
2650
+ explicitStories: config.explicitStories,
2651
+ epic: config.epic
2652
+ };
2653
+ } finally {
2654
+ try {
2655
+ await rcAdapter.close();
2656
+ } catch {}
2657
+ }
2658
+ } catch {
2659
+ return null;
2660
+ }
2629
2661
  }
2630
2662
  };
2631
2663
  }
@@ -2681,7 +2713,11 @@ async function handleStallRecovery(health, state, config, deps, io) {
2681
2713
  const { killPid, resumePipeline, sleep, incrementRestarts, getAllDescendants, writeStallFindings, getRegistry } = deps;
2682
2714
  const { emitEvent, log } = io;
2683
2715
  const { projectRoot } = state;
2684
- if (health.staleness_seconds < stallThreshold) return null;
2716
+ const REVIEW_PHASES = new Set(["IN_REVIEW", "code-review"]);
2717
+ const activePhases = Object.values(health.stories.details ?? {}).map((s) => s.phase);
2718
+ const inReviewPhase = activePhases.some((p) => REVIEW_PHASES.has(p));
2719
+ const effectiveThreshold = inReviewPhase ? stallThreshold * 2 : stallThreshold;
2720
+ if (health.staleness_seconds < effectiveThreshold) return null;
2685
2721
  const directPids = [...health.process.orchestrator_pid !== null ? [health.process.orchestrator_pid] : [], ...health.process.child_pids];
2686
2722
  const descendantPids = getAllDescendants(directPids);
2687
2723
  const directPidSet = new Set(directPids);
@@ -2747,6 +2783,15 @@ async function handleStallRecovery(health, state, config, deps, io) {
2747
2783
  });
2748
2784
  log(`Supervisor: Restarting pipeline (attempt ${newRestartCount}/${maxRestarts})`);
2749
2785
  try {
2786
+ let scopedStories;
2787
+ if (deps.getRunConfig !== void 0 && health.run_id !== null) try {
2788
+ const runConfig = await deps.getRunConfig(health.run_id, projectRoot);
2789
+ if (runConfig?.explicitStories !== void 0 && runConfig.explicitStories.length > 0) scopedStories = runConfig.explicitStories;
2790
+ } catch {}
2791
+ if (scopedStories === void 0) {
2792
+ const healthKeys = Object.keys(health.stories.details ?? {});
2793
+ if (healthKeys.length > 0) scopedStories = healthKeys;
2794
+ }
2750
2795
  const registry = await getRegistry();
2751
2796
  await resumePipeline({
2752
2797
  runId: health.run_id ?? void 0,
@@ -2754,7 +2799,8 @@ async function handleStallRecovery(health, state, config, deps, io) {
2754
2799
  projectRoot,
2755
2800
  concurrency: 3,
2756
2801
  pack,
2757
- registry
2802
+ registry,
2803
+ ...scopedStories !== void 0 ? { stories: scopedStories } : {}
2758
2804
  });
2759
2805
  if (writeStallFindings) await writeStallFindings({
2760
2806
  runId: health.run_id,
@@ -2923,7 +2969,7 @@ async function runSupervisorAction(options, deps = {}) {
2923
2969
  await initSchema(expAdapter);
2924
2970
  const { runRunAction: runPipeline } = await import(
2925
2971
  /* @vite-ignore */
2926
- "../run-DE9y1W6N.js"
2972
+ "../run-WMR5BAhL.js"
2927
2973
  );
2928
2974
  const runStoryFn = async (opts) => {
2929
2975
  const exitCode = await runPipeline({
@@ -3001,7 +3047,8 @@ async function runSupervisorAction(options, deps = {}) {
3001
3047
  incrementRestarts: resolvedDeps.incrementRestarts,
3002
3048
  getAllDescendants: resolvedDeps.getAllDescendants,
3003
3049
  writeStallFindings: resolvedDeps.writeStallFindings,
3004
- getRegistry: resolvedDeps.getRegistry
3050
+ getRegistry: resolvedDeps.getRegistry,
3051
+ getRunConfig: resolvedDeps.getRunConfig
3005
3052
  }, {
3006
3053
  emitEvent,
3007
3054
  log
@@ -3189,7 +3236,7 @@ async function openTelemetryAdapter(basePath) {
3189
3236
  function rowsToEfficiencyScore(rows) {
3190
3237
  return rows;
3191
3238
  }
3192
- function printEfficiencyTable(scores) {
3239
+ function printEfficiencyTable(scores, dispatchScoresByStory = new Map()) {
3193
3240
  process.stdout.write(`\nEfficiency Scores (${scores.length} records)\n`);
3194
3241
  process.stdout.write("─".repeat(80) + "\n");
3195
3242
  process.stdout.write(` ${"Story Key".padEnd(14)} ${"Score".padStart(6)} ${"Cache Hit%".padStart(11)} ${"I/O Ratio".padStart(10)} ${"Ctx Mgmt".padStart(9)} Model\n`);
@@ -3200,6 +3247,16 @@ function printEfficiencyTable(scores) {
3200
3247
  const ctxMgmt = String(Math.round(s.contextManagementSubScore));
3201
3248
  const model = s.perModelBreakdown.length > 0 ? s.perModelBreakdown[0]?.model ?? "unknown" : "unknown";
3202
3249
  process.stdout.write(` ${s.storyKey.padEnd(14)} ${String(s.compositeScore).padStart(6)} ${cacheHitPct.padStart(11)} ${ioRatio.padStart(10)} ${ctxMgmt.padStart(9)} ${model}\n`);
3250
+ const dispatchScores = dispatchScoresByStory.get(s.storyKey);
3251
+ if (dispatchScores !== void 0 && dispatchScores.length > 0) {
3252
+ const rows = dispatchScores.slice(0, 5);
3253
+ for (const ds of rows) {
3254
+ const taskType = ds.taskType ?? "unknown";
3255
+ const phase = ds.phase ?? "unknown";
3256
+ const dsCacheHitPct = ds.totalTurns > 0 ? `${(ds.avgCacheHitRate * 100).toFixed(1)}%` : "0.0%";
3257
+ process.stdout.write(` ↳ ${taskType}/${phase} score=${ds.compositeScore} cache=${dsCacheHitPct} turns=${ds.totalTurns}\n`);
3258
+ }
3259
+ }
3203
3260
  }
3204
3261
  }
3205
3262
  function printRecommendationTable(recs) {
@@ -3215,13 +3272,15 @@ function printRecommendationTable(recs) {
3215
3272
  }
3216
3273
  function printTurnTable(turns, storyKey) {
3217
3274
  process.stdout.write(`\nTurn Analysis: ${storyKey} (${turns.length} turns)\n`);
3218
- process.stdout.write("─".repeat(80) + "\n");
3219
- process.stdout.write(` ${"#".padStart(4)} ${"Tokens In".padStart(10)} ${"Tok Out".padStart(8)} ${"Cache Hit%".padStart(11)} ${"Ctx Size".padStart(9)} Spike\n`);
3220
- process.stdout.write(" " + "─".repeat(60) + "\n");
3275
+ process.stdout.write("─".repeat(100) + "\n");
3276
+ process.stdout.write(` ${"#".padStart(4)} ${"Tokens In".padStart(10)} ${"Tok Out".padStart(8)} ${"Cache Hit%".padStart(11)} ${"Ctx Size".padStart(9)} ${"Task Type".padEnd(16)} ${"Phase".padEnd(16)} Spike\n`);
3277
+ process.stdout.write(" " + "─".repeat(86) + "\n");
3221
3278
  for (const t of turns) {
3222
3279
  const cacheHitPct = t.inputTokens > 0 ? `${(t.cacheHitRate * 100).toFixed(1)}%` : "0.0%";
3223
3280
  const spike = t.isContextSpike ? " ⚠" : "";
3224
- process.stdout.write(` ${String(t.turnNumber).padStart(4)} ${t.inputTokens.toLocaleString().padStart(10)} ${t.outputTokens.toLocaleString().padStart(8)} ${cacheHitPct.padStart(11)} ${t.contextSize.toLocaleString().padStart(9)}${spike}\n`);
3281
+ const taskType = (t.taskType ?? "-").padEnd(16);
3282
+ const phase = (t.phase ?? "-").padEnd(16);
3283
+ process.stdout.write(` ${String(t.turnNumber).padStart(4)} ${t.inputTokens.toLocaleString().padStart(10)} ${t.outputTokens.toLocaleString().padStart(8)} ${cacheHitPct.padStart(11)} ${t.contextSize.toLocaleString().padStart(9)} ${taskType} ${phase}${spike}\n`);
3225
3284
  }
3226
3285
  }
3227
3286
  function printConsumerTable(consumers, storyKey) {
@@ -3295,8 +3354,19 @@ async function runMetricsAction(options) {
3295
3354
  try {
3296
3355
  if (efficiency === true) {
3297
3356
  const scores = await telemetryPersistence.getEfficiencyScores(20);
3298
- if (outputFormat === "json") process.stdout.write(formatOutput({ efficiency: rowsToEfficiencyScore(scores) }, "json", true) + "\n");
3299
- else printEfficiencyTable(scores);
3357
+ const storyKeys = [...new Set(scores.map((s) => s.storyKey))];
3358
+ const dispatchScoresByStory = new Map();
3359
+ await Promise.all(storyKeys.map(async (sk) => {
3360
+ const ds = await telemetryPersistence.getDispatchEfficiencyScores(sk);
3361
+ if (ds.length > 0) dispatchScoresByStory.set(sk, ds);
3362
+ }));
3363
+ if (outputFormat === "json") {
3364
+ const efficiencyWithDispatch = scores.map((s) => ({
3365
+ ...rowsToEfficiencyScore([s])[0],
3366
+ dispatchScores: dispatchScoresByStory.get(s.storyKey) ?? []
3367
+ }));
3368
+ process.stdout.write(formatOutput({ efficiency: efficiencyWithDispatch }, "json", true) + "\n");
3369
+ } else printEfficiencyTable(scores, dispatchScoresByStory);
3300
3370
  return 0;
3301
3371
  }
3302
3372
  if (recommendations === true) {
@@ -7279,7 +7349,7 @@ async function getRetryableEscalations(adapter, runId) {
7279
7349
  //#region src/cli/commands/retry-escalated.ts
7280
7350
  const logger$3 = createLogger("retry-escalated-cmd");
7281
7351
  async function runRetryEscalatedAction(options) {
7282
- const { runId, dryRun, outputFormat, projectRoot, concurrency, pack: packName, registry: injectedRegistry } = options;
7352
+ const { runId, dryRun, force, outputFormat, projectRoot, concurrency, pack: packName, registry: injectedRegistry } = options;
7283
7353
  const dbRoot = await resolveMainRepoRoot(projectRoot);
7284
7354
  const dbPath = join(dbRoot, ".substrate", "substrate.db");
7285
7355
  const doltDir = join(dbRoot, ".substrate", "state", ".dolt");
@@ -7304,6 +7374,26 @@ async function runRetryEscalatedAction(options) {
7304
7374
  else process.stdout.write("No retry-targeted escalations found.\n");
7305
7375
  return 0;
7306
7376
  }
7377
+ const perStoryContextCeilings = {};
7378
+ if (!dryRun && !force) {
7379
+ const advisor = createTelemetryAdvisor({ db: adapter });
7380
+ const CONTEXT_SPIKE_THRESHOLD = 1e5;
7381
+ const contextCeiling = Math.round(CONTEXT_SPIKE_THRESHOLD * .8);
7382
+ for (const storyKey of retryable) try {
7383
+ const profile = await advisor.getEfficiencyProfile(storyKey);
7384
+ if (profile === null) continue;
7385
+ if (profile.compositeScore < 50) process.stdout.write(`[WARN] ${storyKey}: Previous run had low efficiency (score: ${profile.compositeScore}). Retry may encounter the same issues.\n`);
7386
+ if (profile.contextManagementSubScore < 50) {
7387
+ perStoryContextCeilings[storyKey] = contextCeiling;
7388
+ process.stdout.write(`[INFO] ${storyKey}: Context ceiling set to ${contextCeiling} tokens due to prior context spike pattern.\n`);
7389
+ }
7390
+ } catch (err) {
7391
+ logger$3.warn({
7392
+ err,
7393
+ storyKey
7394
+ }, "Failed to read efficiency profile — skipping gate");
7395
+ }
7396
+ }
7307
7397
  if (dryRun) {
7308
7398
  if (outputFormat === "json") process.stdout.write(formatOutput({
7309
7399
  retryKeys: retryable,
@@ -7339,7 +7429,8 @@ async function runRetryEscalatedAction(options) {
7339
7429
  config_json: JSON.stringify({
7340
7430
  storyKeys: retryable,
7341
7431
  concurrency,
7342
- retryRun: true
7432
+ retryRun: true,
7433
+ explicitStories: retryable
7343
7434
  })
7344
7435
  });
7345
7436
  const eventBus = createEventBus();
@@ -7358,7 +7449,8 @@ async function runRetryEscalatedAction(options) {
7358
7449
  config: {
7359
7450
  maxConcurrency: concurrency,
7360
7451
  maxReviewCycles: 2,
7361
- pipelineRunId: pipelineRun.id
7452
+ pipelineRunId: pipelineRun.id,
7453
+ ...Object.keys(perStoryContextCeilings).length > 0 ? { perStoryContextCeilings } : {}
7362
7454
  },
7363
7455
  projectRoot
7364
7456
  });
@@ -7410,7 +7502,7 @@ async function runRetryEscalatedAction(options) {
7410
7502
  }
7411
7503
  }
7412
7504
  function registerRetryEscalatedCommand(program, _version = "0.0.0", projectRoot = process.cwd(), registry) {
7413
- program.command("retry-escalated").description("Retry escalated stories flagged as retry-targeted by escalation diagnosis").option("--run-id <id>", "Scope to a specific pipeline run ID (defaults to latest run with escalations)").option("--dry-run", "Print retryable and skipped stories without invoking the orchestrator").option("--concurrency <n>", "Maximum parallel story executions", (v) => {
7505
+ program.command("retry-escalated").description("Retry escalated stories flagged as retry-targeted by escalation diagnosis").option("--run-id <id>", "Scope to a specific pipeline run ID (defaults to latest run with escalations)").option("--dry-run", "Print retryable and skipped stories without invoking the orchestrator").option("--force", "Bypass efficiency-gate checks (warning and context ceiling)", false).option("--concurrency <n>", "Maximum parallel story executions", (v) => {
7414
7506
  const n = parseInt(v, 10);
7415
7507
  if (isNaN(n) || n < 1) throw new Error(`--concurrency must be a positive integer, got: ${v}`);
7416
7508
  return n;
@@ -7419,6 +7511,7 @@ function registerRetryEscalatedCommand(program, _version = "0.0.0", projectRoot
7419
7511
  const exitCode = await runRetryEscalatedAction({
7420
7512
  runId: opts.runId,
7421
7513
  dryRun: opts.dryRun === true,
7514
+ force: opts.force === true,
7422
7515
  outputFormat,
7423
7516
  projectRoot: opts.projectRoot,
7424
7517
  concurrency: opts.concurrency,
@@ -7928,7 +8021,7 @@ const SPRINT_HEADER_RE = /^(?:\*\*)?Sprint\s+(\d+)\s*[—–-]/i;
7928
8021
  * Regex for story lines: `- 31-2: Epic doc ingestion (P0, Medium)`
7929
8022
  * Captures: epicNum, storyNum, title, priority, size
7930
8023
  */
7931
- const STORY_LINE_RE = /^(?:-\s+)?(\d+)-(\d+):\s+(.+?)\s+\((P\d+),\s+([\w-]+)\)\s*$/;
8024
+ const STORY_LINE_RE = /^(?:-\s+)?(?:Story\s+)?(\d+)-(\d+):\s+(.+?)\s+\((P\d+),\s+([\w-]+)\)\s*$/;
7932
8025
  /** Regex to find the story map section heading */
7933
8026
  const STORY_MAP_HEADING_RE = /^#{1,6}\s+.*Story\s+Map/im;
7934
8027
  /** Regex to find the dependency chain line */
package/dist/index.d.ts CHANGED
@@ -1514,6 +1514,19 @@ interface AdapterOptions {
1514
1514
  * so the telemetry pipeline can group spans/events per story.
1515
1515
  */
1516
1516
  storyKey?: string;
1517
+ /**
1518
+ * Optional maximum context tokens (passed as --max-context-tokens to Claude CLI).
1519
+ * When set, constrains the context window to prevent runaway token usage.
1520
+ * Used by efficiency-gated retry logic (Story 30-8) to cap context for stories
1521
+ * that previously exhibited context spike patterns.
1522
+ */
1523
+ maxContextTokens?: number;
1524
+ /**
1525
+ * Optional optimization directives derived from prior stories' telemetry (Story 30-6).
1526
+ * When set, appended to the system prompt to guide the sub-agent toward efficient patterns.
1527
+ * Generated by TelemetryAdvisor.formatOptimizationDirectives().
1528
+ */
1529
+ optimizationDirectives?: string;
1517
1530
  }
1518
1531
  /**
1519
1532
  * Capabilities reported by an adapter for this CLI agent.
package/dist/index.js CHANGED
@@ -1,5 +1,5 @@
1
1
  import { childLogger, createLogger, logger } from "./logger-D2fS2ccL.js";
2
- import { AdapterRegistry, ClaudeCodeAdapter, CodexCLIAdapter, GeminiCLIAdapter } from "./adapter-registry-BkUvZSKJ.js";
2
+ import { AdapterRegistry, ClaudeCodeAdapter, CodexCLIAdapter, GeminiCLIAdapter } from "./adapter-registry-CDNPbixE.js";
3
3
  import { AdtError, BudgetExceededError, ConfigError, ConfigIncompatibleFormatError, GitError, RecoveryError, TaskConfigError, TaskGraphCycleError, TaskGraphError, TaskGraphIncompatibleFormatError, WorkerError, WorkerNotFoundError, assertDefined, createEventBus, createTuiApp, deepClone, formatDuration, generateId, isPlainObject, isTuiCapable, printNonTtyWarning, sleep, withRetry } from "./helpers-BihqWgVe.js";
4
4
 
5
5
  //#region src/core/di.ts