substrate-ai 0.4.6 → 0.4.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli/index.js CHANGED
@@ -1,10 +1,10 @@
1
1
  #!/usr/bin/env node
2
- import { DEFAULT_CONFIG, DEFAULT_ROUTING_POLICY, DatabaseWrapper, DoltClient, DoltNotInstalled, DoltRepoMapMetaRepository, DoltSymbolRepository, FileStateStore, GitClient, GrammarLoader, IngestionServer, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SUBSTRATE_OWNED_SETTINGS_KEYS, SymbolParser, TelemetryPersistence, VALID_PHASES, buildPipelineStatusOutput, checkDoltInstalled, createConfigSystem, createContextCompiler, createDispatcher, createDoltClient, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStateStore, createStopAfterGate, findPackageRoot, formatOutput, formatPhaseCompletionSummary, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, initializeDolt, parseDbTimestampAsUtc, registerHealthCommand, registerRunCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, resolveStoryKeys, runAnalysisPhase, runMigrations, runPlanningPhase, runSolutioningPhase, validateStopAfterFromConflict } from "../run-CkYqARL5.js";
2
+ import { AppError, DEFAULT_CONFIG, DEFAULT_ROUTING_POLICY, DatabaseWrapper, DoltClient, DoltNotInstalled, DoltRepoMapMetaRepository, DoltSymbolRepository, ERR_REPO_MAP_STORAGE_WRITE, FileStateStore, GitClient, GrammarLoader, IngestionServer, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SUBSTRATE_OWNED_SETTINGS_KEYS, SymbolParser, TelemetryPersistence, VALID_PHASES, buildPipelineStatusOutput, checkDoltInstalled, createConfigSystem, createContextCompiler, createDispatcher, createDoltClient, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStateStore, createStopAfterGate, findPackageRoot, formatOutput, formatPhaseCompletionSummary, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, initializeDolt, parseDbTimestampAsUtc, registerHealthCommand, registerRunCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, resolveStoryKeys, runAnalysisPhase, runMigrations, runPlanningPhase, runSolutioningPhase, validateStopAfterFromConflict } from "../run-CvQCHfTV.js";
3
3
  import { createLogger } from "../logger-D2fS2ccL.js";
4
4
  import { AdapterRegistry } from "../adapter-registry-Cd-7lG5v.js";
5
5
  import { CURRENT_CONFIG_FORMAT_VERSION, CURRENT_TASK_GRAPH_VERSION, PartialSubstrateConfigSchema } from "../config-migrator-DtZW1maj.js";
6
6
  import { ConfigError, createEventBus } from "../helpers-BihqWgVe.js";
7
- import { RoutingRecommender } from "../routing-CZfJB3y9.js";
7
+ import { RoutingRecommender } from "../routing-BUE9pIxW.js";
8
8
  import { addTokenUsage, createDecision, createPipelineRun, getDecisionsByCategory, getDecisionsByPhaseForRun, getLatestRun, getTokenUsageSummary, listRequirements, updatePipelineRun } from "../decisions-Db8GTbH2.js";
9
9
  import { ESCALATION_DIAGNOSIS, EXPERIMENT_RESULT, OPERATIONAL_FINDING, STORY_METRICS, aggregateTokenUsageForRun, compareRunMetrics, getBaselineRunMetrics, getRunMetrics, getStoryMetricsForRun, incrementRunRestarts, listRunMetrics, tagRunAsBaseline } from "../operational-C0_y8DAs.js";
10
10
  import { abortMerge, createWorktree, getConflictingFiles, getMergedFiles, getOrphanedWorktrees, performMerge, removeBranch, removeWorktree, simulateMerge, verifyGitVersion } from "../git-utils-C-fdrHF_.js";
@@ -2709,7 +2709,7 @@ async function runSupervisorAction(options, deps = {}) {
2709
2709
  const expDb = expDbWrapper.db;
2710
2710
  const { runRunAction: runPipeline } = await import(
2711
2711
  /* @vite-ignore */
2712
- "../run-B_W_-Pp9.js"
2712
+ "../run-BGgReI0e.js"
2713
2713
  );
2714
2714
  const runStoryFn = async (opts) => {
2715
2715
  const exitCode = await runPipeline({
@@ -3194,7 +3194,7 @@ async function runMetricsAction(options) {
3194
3194
  const routingConfigPath = join(dbDir, "routing.yml");
3195
3195
  let routingConfig = null;
3196
3196
  if (existsSync(routingConfigPath)) try {
3197
- const { loadModelRoutingConfig } = await import("../routing-DWCBjrt7.js");
3197
+ const { loadModelRoutingConfig } = await import("../routing-DbR9FPmj.js");
3198
3198
  routingConfig = loadModelRoutingConfig(routingConfigPath);
3199
3199
  } catch {}
3200
3200
  if (routingConfig === null) routingConfig = {
@@ -7540,6 +7540,15 @@ function registerRepoMapCommand(program) {
7540
7540
  return;
7541
7541
  }
7542
7542
  const doltClient = new DoltClient({ repoPath: statePath });
7543
+ try {
7544
+ const colRows = await doltClient.query(`SHOW COLUMNS FROM repo_map_symbols LIKE 'dependencies'`);
7545
+ if (colRows.length === 0) {
7546
+ await doltClient.query(`ALTER TABLE repo_map_symbols ADD COLUMN dependencies JSON`);
7547
+ logger$2.info("Applied migration: added dependencies column to repo_map_symbols");
7548
+ }
7549
+ } catch {
7550
+ logger$2.debug("Skipping repo_map_symbols migration: table not yet created");
7551
+ }
7543
7552
  const symbolRepo = new DoltSymbolRepository(doltClient, logger$2);
7544
7553
  const metaRepo = new DoltRepoMapMetaRepository(doltClient);
7545
7554
  const repoMapModule = new RepoMapModule(metaRepo, logger$2);
@@ -7572,19 +7581,41 @@ function registerRepoMapCommand(program) {
7572
7581
  logger$2.info("repo-map --update: triggering incremental update");
7573
7582
  const gitClient = new GitClient(logger$2);
7574
7583
  const grammarLoader = new GrammarLoader(logger$2);
7584
+ if (grammarLoader.getGrammar(".ts") === null) {
7585
+ const msg = "tree-sitter grammars not installed. Run `npm install tree-sitter tree-sitter-typescript tree-sitter-javascript tree-sitter-python` in the substrate installation directory.";
7586
+ if (options.outputFormat === "json") console.log(JSON.stringify({
7587
+ result: "error",
7588
+ error: msg
7589
+ }));
7590
+ else process.stderr.write(`Error: ${msg}\n`);
7591
+ process.exitCode = 1;
7592
+ return;
7593
+ }
7575
7594
  const parser = new SymbolParser(grammarLoader, logger$2);
7576
7595
  const storage = new RepoMapStorage(symbolRepo, metaRepo, gitClient, logger$2);
7577
- await storage.incrementalUpdate(dbRoot, parser);
7596
+ let updateWarning;
7597
+ try {
7598
+ await storage.incrementalUpdate(dbRoot, parser);
7599
+ } catch (err) {
7600
+ if (err instanceof AppError && err.code === ERR_REPO_MAP_STORAGE_WRITE) {
7601
+ updateWarning = err.message;
7602
+ logger$2.warn({ err }, "repo-map --update: storage write error (partial update)");
7603
+ } else throw err;
7604
+ }
7578
7605
  const meta = await metaRepo.getMeta();
7579
7606
  const symbolCount = (await symbolRepo.getSymbols()).length;
7580
7607
  if (options.outputFormat === "json") console.log(JSON.stringify({
7581
- result: "updated",
7608
+ result: updateWarning ? "partial" : "updated",
7582
7609
  symbolCount,
7583
7610
  fileCount: meta?.fileCount ?? 0,
7584
7611
  commitSha: meta?.commitSha ?? null,
7585
- updatedAt: meta?.updatedAt?.toISOString() ?? null
7612
+ updatedAt: meta?.updatedAt?.toISOString() ?? null,
7613
+ ...updateWarning ? { warning: updateWarning } : {}
7586
7614
  }));
7587
- else console.log(`Repo-map updated: ${symbolCount} symbols across ${meta?.fileCount ?? 0} files`);
7615
+ else if (updateWarning) {
7616
+ console.log(`Repo-map partially updated: ${symbolCount} symbols across ${meta?.fileCount ?? 0} files`);
7617
+ console.log(`Warning: ${updateWarning}`);
7618
+ } else console.log(`Repo-map updated: ${symbolCount} symbols across ${meta?.fileCount ?? 0} files`);
7588
7619
  return;
7589
7620
  }
7590
7621
  if (options.query !== void 0) {
@@ -1,5 +1,5 @@
1
1
  import { createLogger } from "./logger-D2fS2ccL.js";
2
- import { load } from "js-yaml";
2
+ import { dump, load } from "js-yaml";
3
3
  import { z } from "zod";
4
4
  import { readFileSync, writeFileSync } from "node:fs";
5
5
 
@@ -280,13 +280,142 @@ var RoutingResolver = class RoutingResolver {
280
280
  }
281
281
  };
282
282
 
283
+ //#endregion
284
+ //#region src/modules/routing/routing-token-accumulator.ts
285
+ /**
286
+ * Accumulates per-dispatch routing decisions and agent token usage, then
287
+ * flushes an aggregated `PhaseTokenBreakdown` to the StateStore at run end.
288
+ *
289
+ * Thread-safety: all methods are synchronous accumulators; `flush` is async
290
+ * but should only be called once per run after all dispatches settle.
291
+ */
292
+ var RoutingTokenAccumulator = class {
293
+ _config;
294
+ _stateStore;
295
+ _logger;
296
+ /** Maps dispatchId → { phase, model } registered from routing:model-selected events */
297
+ _dispatchMap = new Map();
298
+ /**
299
+ * Bucket key = `"${phase}::${model}"`.
300
+ * Separate entries per (phase, model) combination so mixed-model runs
301
+ * produce distinct rows in the breakdown.
302
+ */
303
+ _buckets = new Map();
304
+ constructor(config, stateStore, logger$1) {
305
+ this._config = config;
306
+ this._stateStore = stateStore;
307
+ this._logger = logger$1;
308
+ }
309
+ /**
310
+ * Register the routing decision for a dispatch.
311
+ * A second event for the same `dispatchId` overwrites the prior entry (last-writer-wins).
312
+ *
313
+ * @param event - payload from `routing:model-selected`
314
+ */
315
+ onRoutingSelected(event) {
316
+ this._dispatchMap.set(event.dispatchId, {
317
+ phase: event.phase,
318
+ model: event.model
319
+ });
320
+ this._logger.debug({
321
+ dispatchId: event.dispatchId,
322
+ phase: event.phase,
323
+ model: event.model
324
+ }, "routing:model-selected registered");
325
+ }
326
+ /**
327
+ * Attribute token usage to the phase bucket for this dispatch.
328
+ * Unknown `dispatchId` values are attributed to `phase: 'default', model: 'unknown'`.
329
+ *
330
+ * @param event - payload from `agent:completed` (must include inputTokens / outputTokens)
331
+ */
332
+ onAgentCompleted(event) {
333
+ const mapping = this._dispatchMap.get(event.dispatchId);
334
+ const phase = mapping?.phase ?? "default";
335
+ const model = mapping?.model ?? "unknown";
336
+ this._upsertBucket(phase, model, event.inputTokens, event.outputTokens);
337
+ this._logger.debug({
338
+ dispatchId: event.dispatchId,
339
+ phase,
340
+ model,
341
+ inputTokens: event.inputTokens
342
+ }, "agent:completed attributed");
343
+ }
344
+ /**
345
+ * Construct the `PhaseTokenBreakdown` from the accumulated buckets and
346
+ * persist it to the StateStore via `setMetric`.
347
+ * Clears all in-memory state afterwards so a second call writes an empty entry.
348
+ *
349
+ * @param runId - the pipeline run ID used to scope the metric key
350
+ */
351
+ async flush(runId) {
352
+ const entries = Array.from(this._buckets.values());
353
+ const breakdown = {
354
+ entries,
355
+ baselineModel: this._config.baseline_model,
356
+ runId
357
+ };
358
+ await this._stateStore.setMetric(runId, "phase_token_breakdown", breakdown);
359
+ this._logger.debug({
360
+ runId,
361
+ entryCount: entries.length
362
+ }, "Phase token breakdown flushed to StateStore");
363
+ this._dispatchMap.clear();
364
+ this._buckets.clear();
365
+ }
366
+ _upsertBucket(phase, model, inputTokens, outputTokens) {
367
+ const key = `${phase}::${model}`;
368
+ const existing = this._buckets.get(key);
369
+ if (existing) {
370
+ existing.inputTokens += inputTokens;
371
+ existing.outputTokens += outputTokens;
372
+ existing.dispatchCount += 1;
373
+ } else this._buckets.set(key, {
374
+ phase,
375
+ model,
376
+ inputTokens,
377
+ outputTokens,
378
+ dispatchCount: 1
379
+ });
380
+ }
381
+ };
382
+
383
+ //#endregion
384
+ //#region src/modules/routing/routing-telemetry.ts
385
+ /**
386
+ * Emits `routing.model_resolved` OTEL spans via a TelemetryPersistence instance.
387
+ *
388
+ * Injected into the run command alongside RoutingResolver. When telemetry is
389
+ * not configured, pass null to the run command; no spans are emitted.
390
+ */
391
+ var RoutingTelemetry = class {
392
+ _telemetry;
393
+ _logger;
394
+ constructor(telemetry, logger$1) {
395
+ this._telemetry = telemetry;
396
+ this._logger = logger$1;
397
+ }
398
+ /**
399
+ * Emit a `routing.model_resolved` span for a single routing decision.
400
+ *
401
+ * @param attrs - span attributes including dispatchId, taskType, phase, model, source, latencyMs
402
+ */
403
+ recordModelResolved(attrs) {
404
+ this._telemetry.recordSpan({
405
+ name: "routing.model_resolved",
406
+ attributes: attrs
407
+ });
408
+ this._logger.debug(attrs, "routing.model_resolved span emitted");
409
+ }
410
+ };
411
+
283
412
  //#endregion
284
413
  //#region src/modules/routing/routing-recommender.ts
285
414
  /**
286
415
  * Ordered tier list: index 0 = cheapest / smallest, index N = most expensive / largest.
287
416
  * Tiers are determined by substring matching — e.g. 'claude-haiku-4-5' → tier 1.
288
417
  */
289
- const TIER_KEYWORDS = [
418
+ const TIER_KEYWORDS$1 = [
290
419
  {
291
420
  keyword: "haiku",
292
421
  tier: 1
@@ -330,7 +459,7 @@ var RoutingRecommender = class {
330
459
  */
331
460
  _getTier(model) {
332
461
  const lower = model.toLowerCase();
333
- for (const { keyword, tier } of TIER_KEYWORDS) if (lower.includes(keyword)) return tier;
462
+ for (const { keyword, tier } of TIER_KEYWORDS$1) if (lower.includes(keyword)) return tier;
334
463
  return 2;
335
464
  }
336
465
  /**
@@ -473,5 +602,231 @@ var RoutingRecommender = class {
473
602
  };
474
603
 
475
604
  //#endregion
476
- export { ModelRoutingConfigSchema, ProviderPolicySchema, RoutingConfigError, RoutingRecommender, RoutingResolver, TASK_TYPE_PHASE_MAP, loadModelRoutingConfig };
477
- //# sourceMappingURL=routing-CZfJB3y9.js.map
605
+ //#region src/modules/routing/model-tier.ts
606
+ /**
607
+ * Shared model tier resolution utility.
608
+ *
609
+ * Determines whether a model string belongs to the haiku (1), sonnet (2),
610
+ * or opus (3) tier based on substring matching against well-known keywords.
611
+ *
612
+ * Used by both RoutingRecommender and RoutingTuner to ensure consistent
613
+ * tier comparisons — in particular the one-step guard in RoutingTuner.
614
+ */
615
+ /** Ordered tier keywords: index 0 = cheapest, index N = most expensive. */
616
+ const TIER_KEYWORDS = [
617
+ {
618
+ keyword: "haiku",
619
+ tier: 1
620
+ },
621
+ {
622
+ keyword: "sonnet",
623
+ tier: 2
624
+ },
625
+ {
626
+ keyword: "opus",
627
+ tier: 3
628
+ }
629
+ ];
630
+ /**
631
+ * Get the model tier for a given model name string.
632
+ *
633
+ * Returns:
634
+ * - 1 for haiku-tier models
635
+ * - 2 for sonnet-tier models (also the default when unrecognized)
636
+ * - 3 for opus-tier models
637
+ *
638
+ * Matching is case-insensitive substring search.
639
+ */
640
+ function getModelTier(model) {
641
+ const lower = model.toLowerCase();
642
+ for (const { keyword, tier } of TIER_KEYWORDS) if (lower.includes(keyword)) return tier;
643
+ return 2;
644
+ }
645
+
646
+ //#endregion
647
+ //#region src/modules/routing/routing-tuner.ts
648
+ /** Minimum number of breakdowns required before auto-tuning is attempted. */
649
+ const MIN_BREAKDOWNS_FOR_TUNING = 5;
650
+ /** Key used to store the list of known run IDs in the StateStore. */
651
+ const RUN_INDEX_KEY = "phase_token_breakdown_runs";
652
+ /** Key used to store the tune log in the StateStore. */
653
+ const TUNE_LOG_KEY = "routing_tune_log";
654
+ /**
655
+ * Auto-applies a single conservative model downgrade per invocation when
656
+ * `config.auto_tune` is `true` and sufficient historical data is available.
657
+ *
658
+ * The tuner reads the current routing YAML config, applies the change in memory,
659
+ * and writes it back to disk synchronously. It also appends a `TuneLogEntry`
660
+ * to the StateStore for audit purposes, and emits a `routing:auto-tuned` event.
661
+ */
662
+ var RoutingTuner = class {
663
+ _stateStore;
664
+ _recommender;
665
+ _eventEmitter;
666
+ _configPath;
667
+ _logger;
668
+ constructor(stateStore, recommender, eventEmitter, configPath, logger$1) {
669
+ this._stateStore = stateStore;
670
+ this._recommender = recommender;
671
+ this._eventEmitter = eventEmitter;
672
+ this._configPath = configPath;
673
+ this._logger = logger$1;
674
+ }
675
+ /**
676
+ * Called at the end of a pipeline run. When auto_tune is enabled and sufficient
677
+ * historical data exists, applies a single conservative model downgrade to the
678
+ * routing config YAML file.
679
+ *
680
+ * @param runId - ID of the just-completed pipeline run
681
+ * @param config - Current model routing config (already loaded from disk)
682
+ */
683
+ async maybeAutoTune(runId, config) {
684
+ if (config.auto_tune !== true) {
685
+ this._logger.debug({ runId }, "auto_tune_disabled — skipping RoutingTuner");
686
+ return;
687
+ }
688
+ await this._registerRunId(runId);
689
+ const breakdowns = await this._loadRecentBreakdowns(10);
690
+ if (breakdowns.length < MIN_BREAKDOWNS_FOR_TUNING) {
691
+ this._logger.debug({
692
+ runId,
693
+ available: breakdowns.length,
694
+ required: MIN_BREAKDOWNS_FOR_TUNING
695
+ }, "insufficient_data — not enough breakdowns for auto-tuning");
696
+ return;
697
+ }
698
+ const analysis = this._recommender.analyze(breakdowns, config);
699
+ if (analysis.insufficientData) {
700
+ this._logger.debug({ runId }, "Recommender returned insufficientData");
701
+ return;
702
+ }
703
+ const downgradeCandidates = analysis.recommendations.filter((rec) => {
704
+ if (rec.direction !== "downgrade") return false;
705
+ const tierDiff = Math.abs(getModelTier(rec.currentModel) - getModelTier(rec.suggestedModel));
706
+ return tierDiff === 1;
707
+ });
708
+ if (downgradeCandidates.length === 0) {
709
+ this._logger.debug({ runId }, "no_safe_recommendation");
710
+ return;
711
+ }
712
+ const topRec = downgradeCandidates.sort((a, b) => b.confidence - a.confidence)[0];
713
+ let rawContent;
714
+ try {
715
+ rawContent = readFileSync(this._configPath, "utf-8");
716
+ } catch (err) {
717
+ const msg = err instanceof Error ? err.message : String(err);
718
+ this._logger.warn({
719
+ err: msg,
720
+ configPath: this._configPath
721
+ }, "Failed to read routing config for auto-tune");
722
+ return;
723
+ }
724
+ let rawObject;
725
+ try {
726
+ rawObject = load(rawContent);
727
+ } catch (err) {
728
+ const msg = err instanceof Error ? err.message : String(err);
729
+ this._logger.warn({ err: msg }, "Failed to parse routing config YAML for auto-tune");
730
+ return;
731
+ }
732
+ const configObj = rawObject;
733
+ if (configObj.phases === void 0) configObj.phases = {};
734
+ const existingPhase = configObj.phases[topRec.phase];
735
+ if (existingPhase !== void 0) existingPhase.model = topRec.suggestedModel;
736
+ else configObj.phases[topRec.phase] = { model: topRec.suggestedModel };
737
+ try {
738
+ writeFileSync(this._configPath, dump(rawObject, { lineWidth: 120 }), "utf-8");
739
+ } catch (err) {
740
+ const msg = err instanceof Error ? err.message : String(err);
741
+ this._logger.warn({
742
+ err: msg,
743
+ configPath: this._configPath
744
+ }, "Failed to write updated routing config");
745
+ return;
746
+ }
747
+ const tuneEntry = {
748
+ id: crypto.randomUUID(),
749
+ runId,
750
+ phase: topRec.phase,
751
+ oldModel: topRec.currentModel,
752
+ newModel: topRec.suggestedModel,
753
+ estimatedSavingsPct: topRec.estimatedSavingsPct,
754
+ appliedAt: new Date().toISOString()
755
+ };
756
+ await this._appendTuneLog(tuneEntry);
757
+ this._eventEmitter.emit("routing:auto-tuned", {
758
+ runId,
759
+ phase: topRec.phase,
760
+ oldModel: topRec.currentModel,
761
+ newModel: topRec.suggestedModel,
762
+ estimatedSavingsPct: topRec.estimatedSavingsPct
763
+ });
764
+ this._logger.info({
765
+ runId,
766
+ phase: topRec.phase,
767
+ oldModel: topRec.currentModel,
768
+ newModel: topRec.suggestedModel
769
+ }, "Auto-tuned routing config — applied downgrade");
770
+ }
771
+ /**
772
+ * Register a run ID in the stored run index so future calls can discover
773
+ * all historical breakdowns without a separate run listing endpoint.
774
+ */
775
+ async _registerRunId(runId) {
776
+ const existing = await this._stateStore.getMetric("__global__", RUN_INDEX_KEY);
777
+ const runIds = Array.isArray(existing) ? existing : [];
778
+ if (!runIds.includes(runId)) {
779
+ runIds.push(runId);
780
+ await this._stateStore.setMetric("__global__", RUN_INDEX_KEY, runIds);
781
+ }
782
+ }
783
+ /**
784
+ * Load the most recent `lookback` PhaseTokenBreakdown records from the StateStore.
785
+ *
786
+ * Each breakdown is stored by RoutingTokenAccumulator under the key
787
+ * `'phase_token_breakdown'` scoped to the run ID. The run IDs themselves are
788
+ * tracked in a global index stored under `('__global__', RUN_INDEX_KEY)`.
789
+ *
790
+ * @param lookback - Maximum number of recent runs to inspect
791
+ */
792
+ async _loadRecentBreakdowns(lookback) {
793
+ const existing = await this._stateStore.getMetric("__global__", RUN_INDEX_KEY);
794
+ const allRunIds = Array.isArray(existing) ? existing : [];
795
+ const recentRunIds = allRunIds.slice(-lookback);
796
+ const breakdowns = [];
797
+ for (const runId of recentRunIds) try {
798
+ const raw = await this._stateStore.getMetric(runId, "phase_token_breakdown");
799
+ if (raw !== void 0 && raw !== null) {
800
+ const parsed = typeof raw === "string" ? JSON.parse(raw) : raw;
801
+ breakdowns.push(parsed);
802
+ }
803
+ } catch (err) {
804
+ const msg = err instanceof Error ? err.message : String(err);
805
+ this._logger.debug({
806
+ runId,
807
+ err: msg
808
+ }, "Failed to load breakdown for run — skipping");
809
+ }
810
+ return breakdowns;
811
+ }
812
+ /**
813
+ * Append a TuneLogEntry to the persisted tune log in the StateStore.
814
+ *
815
+ * NOTE: This uses `'__global__'` as the scope key (codebase convention) rather
816
+ * than the literal `'global'` mentioned in AC6. The tune log is stored as a raw
817
+ * array (not a JSON-stringified string) for internal consistency with how other
818
+ * array values are stored in this StateStore. Story 28-9's
819
+ * `substrate routing --history` command MUST use the same `'__global__'` scope
820
+ * key and `'routing_tune_log'` metric key when reading this log.
821
+ */
822
+ async _appendTuneLog(entry) {
823
+ const existing = await this._stateStore.getMetric("__global__", TUNE_LOG_KEY);
824
+ const log = Array.isArray(existing) ? existing : [];
825
+ log.push(entry);
826
+ await this._stateStore.setMetric("__global__", TUNE_LOG_KEY, log);
827
+ }
828
+ };
829
+
830
+ //#endregion
831
+ export { ModelRoutingConfigSchema, ProviderPolicySchema, RoutingConfigError, RoutingRecommender, RoutingResolver, RoutingTelemetry, RoutingTokenAccumulator, RoutingTuner, TASK_TYPE_PHASE_MAP, getModelTier, loadModelRoutingConfig };
832
+ //# sourceMappingURL=routing-BUE9pIxW.js.map
@@ -0,0 +1,4 @@
1
+ import "./logger-D2fS2ccL.js";
2
+ import { ModelRoutingConfigSchema, ProviderPolicySchema, RoutingConfigError, RoutingRecommender, RoutingResolver, RoutingTelemetry, RoutingTokenAccumulator, RoutingTuner, TASK_TYPE_PHASE_MAP, getModelTier, loadModelRoutingConfig } from "./routing-BUE9pIxW.js";
3
+
4
+ export { loadModelRoutingConfig };
@@ -1,8 +1,8 @@
1
- import { registerRunCommand, runRunAction } from "./run-CkYqARL5.js";
1
+ import { registerRunCommand, runRunAction } from "./run-CvQCHfTV.js";
2
2
  import "./logger-D2fS2ccL.js";
3
3
  import "./config-migrator-DtZW1maj.js";
4
4
  import "./helpers-BihqWgVe.js";
5
- import "./routing-CZfJB3y9.js";
5
+ import "./routing-BUE9pIxW.js";
6
6
  import "./decisions-Db8GTbH2.js";
7
7
  import "./operational-C0_y8DAs.js";
8
8
 
@@ -1,7 +1,7 @@
1
1
  import { createLogger, deepMask } from "./logger-D2fS2ccL.js";
2
2
  import { CURRENT_CONFIG_FORMAT_VERSION, PartialSubstrateConfigSchema, SUPPORTED_CONFIG_FORMAT_VERSIONS, SubstrateConfigSchema, defaultConfigMigrator } from "./config-migrator-DtZW1maj.js";
3
3
  import { ConfigError, ConfigIncompatibleFormatError, createEventBus, createTuiApp, isTuiCapable, printNonTtyWarning, sleep } from "./helpers-BihqWgVe.js";
4
- import { RoutingResolver } from "./routing-CZfJB3y9.js";
4
+ import { RoutingRecommender, RoutingResolver, RoutingTelemetry, RoutingTokenAccumulator, RoutingTuner, loadModelRoutingConfig } from "./routing-BUE9pIxW.js";
5
5
  import { addTokenUsage, createDecision, createPipelineRun, createRequirement, getArtifactByTypeForRun, getArtifactsByRun, getDecisionsByCategory, getDecisionsByPhase, getDecisionsByPhaseForRun, getLatestRun, getPipelineRunById, getRunningPipelineRuns, getTokenUsageSummary, registerArtifact, updatePipelineRun, updatePipelineRunConfig, upsertDecision } from "./decisions-Db8GTbH2.js";
6
6
  import { ADVISORY_NOTES, ESCALATION_DIAGNOSIS, OPERATIONAL_FINDING, STORY_METRICS, STORY_OUTCOME, TEST_EXPANSION_FINDING, TEST_PLAN, aggregateTokenUsageForRun, aggregateTokenUsageForStory, getStoryMetricsForRun, writeRunMetrics, writeStoryMetrics } from "./operational-C0_y8DAs.js";
7
7
  import { createRequire } from "module";
@@ -3832,10 +3832,12 @@ var DoltSymbolRepository = class {
3832
3832
  this._logger.debug({ filePath }, "upsertFileSymbols: cleared symbols for deleted/empty file");
3833
3833
  return;
3834
3834
  }
3835
- const placeholders = symbols.map(() => "(?, ?, ?, ?, ?, ?, ?)").join(", ");
3835
+ const deps = symbols.filter((s) => s.kind === "import").map((s) => s.name);
3836
+ const depsJson = JSON.stringify(deps);
3837
+ const placeholders = symbols.map(() => "(?, ?, ?, ?, ?, ?, ?, ?)").join(", ");
3836
3838
  const params = [];
3837
- for (const sym of symbols) params.push(filePath, sym.name, sym.kind, sym.signature ?? "", sym.lineNumber, sym.exported ? 1 : 0, fileHash);
3838
- await this._client.query(`INSERT INTO repo_map_symbols (file_path, symbol_name, symbol_kind, signature, line_number, exported, file_hash) VALUES ${placeholders}`, params);
3839
+ for (const sym of symbols) params.push(filePath, sym.name, sym.kind, sym.signature ?? "", sym.lineNumber, sym.exported ? 1 : 0, fileHash, depsJson);
3840
+ await this._client.query(`INSERT INTO repo_map_symbols (file_path, symbol_name, symbol_kind, signature, line_number, exported, file_hash, dependencies) VALUES ${placeholders}`, params);
3839
3841
  this._logger.debug({
3840
3842
  filePath,
3841
3843
  count: symbols.length
@@ -3899,7 +3901,7 @@ var DoltSymbolRepository = class {
3899
3901
  if (filePaths.length === 0) return [];
3900
3902
  try {
3901
3903
  const placeholders = filePaths.map(() => "?").join(", ");
3902
- const rows = await this._client.query(`SELECT file_path, symbol_name, symbol_kind, signature, line_number, exported, file_hash FROM repo_map_symbols WHERE file_path IN (${placeholders})`, filePaths);
3904
+ const rows = await this._client.query(`SELECT file_path, symbol_name, symbol_kind, signature, line_number, exported, file_hash, dependencies FROM repo_map_symbols WHERE file_path IN (${placeholders})`, filePaths);
3903
3905
  return rows.map((r) => this._rowToRepoMapSymbol(r));
3904
3906
  } catch (err) {
3905
3907
  const detail = err instanceof Error ? err.message : String(err);
@@ -3910,7 +3912,7 @@ var DoltSymbolRepository = class {
3910
3912
  if (names.length === 0) return [];
3911
3913
  try {
3912
3914
  const placeholders = names.map(() => "?").join(", ");
3913
- const rows = await this._client.query(`SELECT file_path, symbol_name, symbol_kind, signature, line_number, exported, file_hash FROM repo_map_symbols WHERE symbol_name IN (${placeholders})`, names);
3915
+ const rows = await this._client.query(`SELECT file_path, symbol_name, symbol_kind, signature, line_number, exported, file_hash, dependencies FROM repo_map_symbols WHERE symbol_name IN (${placeholders})`, names);
3914
3916
  return rows.map((r) => this._rowToRepoMapSymbol(r));
3915
3917
  } catch (err) {
3916
3918
  const detail = err instanceof Error ? err.message : String(err);
@@ -3921,7 +3923,7 @@ var DoltSymbolRepository = class {
3921
3923
  if (types$1.length === 0) return [];
3922
3924
  try {
3923
3925
  const placeholders = types$1.map(() => "?").join(", ");
3924
- const rows = await this._client.query(`SELECT file_path, symbol_name, symbol_kind, signature, line_number, exported, file_hash FROM repo_map_symbols WHERE symbol_kind IN (${placeholders})`, types$1);
3926
+ const rows = await this._client.query(`SELECT file_path, symbol_name, symbol_kind, signature, line_number, exported, file_hash, dependencies FROM repo_map_symbols WHERE symbol_kind IN (${placeholders})`, types$1);
3925
3927
  return rows.map((r) => this._rowToRepoMapSymbol(r));
3926
3928
  } catch (err) {
3927
3929
  const detail = err instanceof Error ? err.message : String(err);
@@ -3929,16 +3931,20 @@ var DoltSymbolRepository = class {
3929
3931
  }
3930
3932
  }
3931
3933
  /**
3932
- * Returns symbols from files whose dependencies contain the given symbol name.
3933
- * Currently returns [] — the repo_map_symbols schema does not yet include a
3934
- * `dependencies` JSON column. A future migration will add it.
3934
+ * Returns symbols from files whose dependencies array contains symbolName.
3935
3935
  */
3936
- async findByDependedBy(_symbolName) {
3937
- return [];
3936
+ async findByDependedBy(symbolName) {
3937
+ try {
3938
+ const rows = await this._client.query(`SELECT file_path, symbol_name, symbol_kind, signature, line_number, exported, file_hash, dependencies FROM repo_map_symbols WHERE JSON_CONTAINS(dependencies, JSON_QUOTE(?), '$')`, [symbolName]);
3939
+ return rows.map((r) => this._rowToRepoMapSymbol(r));
3940
+ } catch (err) {
3941
+ const detail = err instanceof Error ? err.message : String(err);
3942
+ throw new AppError(ERR_REPO_MAP_STORAGE_READ, 2, `findByDependedBy failed: ${detail}`);
3943
+ }
3938
3944
  }
3939
3945
  async findAll() {
3940
3946
  try {
3941
- const rows = await this._client.query("SELECT file_path, symbol_name, symbol_kind, signature, line_number, exported, file_hash FROM repo_map_symbols");
3947
+ const rows = await this._client.query("SELECT file_path, symbol_name, symbol_kind, signature, line_number, exported, file_hash, dependencies FROM repo_map_symbols");
3942
3948
  return rows.map((r) => this._rowToRepoMapSymbol(r));
3943
3949
  } catch (err) {
3944
3950
  const detail = err instanceof Error ? err.message : String(err);
@@ -3946,13 +3952,18 @@ var DoltSymbolRepository = class {
3946
3952
  }
3947
3953
  }
3948
3954
  _rowToRepoMapSymbol(row) {
3955
+ let deps = [];
3956
+ if (row.dependencies) try {
3957
+ const parsed = typeof row.dependencies === "string" ? JSON.parse(row.dependencies) : row.dependencies;
3958
+ if (Array.isArray(parsed)) deps = parsed;
3959
+ } catch {}
3949
3960
  return {
3950
3961
  filePath: row.file_path,
3951
3962
  symbolName: row.symbol_name,
3952
3963
  symbolType: row.symbol_kind,
3953
3964
  signature: row.signature ?? void 0,
3954
3965
  lineNumber: row.line_number,
3955
- dependencies: [],
3966
+ dependencies: deps,
3956
3967
  fileHash: row.file_hash
3957
3968
  };
3958
3969
  }
@@ -3978,7 +3989,7 @@ var DoltRepoMapMetaRepository = class {
3978
3989
  updated_at = VALUES(updated_at),
3979
3990
  file_count = VALUES(file_count)`, [
3980
3991
  meta.commitSha,
3981
- meta.updatedAt,
3992
+ meta.updatedAt.toISOString(),
3982
3993
  meta.fileCount
3983
3994
  ]);
3984
3995
  } catch (err) {
@@ -4050,15 +4061,18 @@ var RepoMapStorage = class {
4050
4061
  const changedFiles = await this._gitClient.getChangedFiles(projectRoot, meta.commitSha);
4051
4062
  this._logger.debug({ count: changedFiles.length }, "incrementalUpdate: changed files");
4052
4063
  const supported = changedFiles.filter((f) => SUPPORTED_EXTENSIONS.has(extname(f)));
4064
+ let parsedCount = 0;
4053
4065
  for (const filePath of supported) try {
4054
4066
  const exists = await fileExists(filePath);
4055
4067
  if (!exists) {
4056
4068
  await this._symbolRepo.upsertFileSymbols(filePath, [], "");
4069
+ parsedCount++;
4057
4070
  continue;
4058
4071
  }
4059
4072
  const symbols = await parser.parseFile(filePath);
4060
4073
  const hash = await computeFileHash(filePath);
4061
4074
  await this._symbolRepo.upsertFileSymbols(filePath, symbols, hash);
4075
+ parsedCount++;
4062
4076
  } catch (err) {
4063
4077
  this._logger.warn({
4064
4078
  filePath,
@@ -4069,7 +4083,7 @@ var RepoMapStorage = class {
4069
4083
  await this._metaRepo.updateMeta({
4070
4084
  commitSha: currentSha,
4071
4085
  updatedAt: new Date(),
4072
- fileCount: supported.length
4086
+ fileCount: parsedCount
4073
4087
  });
4074
4088
  }
4075
4089
  /**
@@ -5699,6 +5713,36 @@ var RepoMapFormatter = class {
5699
5713
  }
5700
5714
  };
5701
5715
 
5716
+ //#endregion
5717
+ //#region src/modules/repo-map/repo-map-telemetry.ts
5718
+ /**
5719
+ * Emits `repo_map.query` OTEL spans via a TelemetryPersistence instance.
5720
+ *
5721
+ * Constructed with an `ITelemetryPersistence` and a logger. Pass an instance
5722
+ * to `RepoMapQueryEngine` constructor to enable query telemetry; omit it to
5723
+ * skip telemetry without changing existing query behaviour.
5724
+ */
5725
+ var RepoMapTelemetry = class {
5726
+ _telemetry;
5727
+ _logger;
5728
+ constructor(telemetry, logger$27) {
5729
+ this._telemetry = telemetry;
5730
+ this._logger = logger$27;
5731
+ }
5732
+ /**
5733
+ * Emit a `repo_map.query` span.
5734
+ *
5735
+ * @param attrs - query telemetry attributes
5736
+ */
5737
+ recordQuery(attrs) {
5738
+ this._telemetry.recordSpan({
5739
+ name: "repo_map.query",
5740
+ attributes: attrs
5741
+ });
5742
+ this._logger.debug(attrs, "repo_map.query span emitted");
5743
+ }
5744
+ };
5745
+
5702
5746
  //#endregion
5703
5747
  //#region src/modules/repo-map/RepoMapModule.ts
5704
5748
  /**
@@ -7061,6 +7105,21 @@ var DoltStateStore = class DoltStateStore {
7061
7105
  )`
7062
7106
  ];
7063
7107
  for (const sql of ddl) await this._client.query(sql);
7108
+ try {
7109
+ const colRows = await this._client.query(`SHOW COLUMNS FROM repo_map_symbols LIKE 'dependencies'`);
7110
+ if (colRows.length === 0) {
7111
+ await this._client.query(`ALTER TABLE repo_map_symbols ADD COLUMN dependencies JSON`);
7112
+ await this._client.query(`INSERT IGNORE INTO _schema_version (version, description) VALUES (6, 'Add dependencies JSON column to repo_map_symbols (Epic 28-3)')`);
7113
+ log$1.info({
7114
+ component: "dolt-state",
7115
+ migration: "v5-to-v6",
7116
+ column: "dependencies",
7117
+ table: "repo_map_symbols"
7118
+ }, "Applied migration v5-to-v6: added dependencies column to repo_map_symbols");
7119
+ }
7120
+ } catch {
7121
+ log$1.debug("Skipping repo_map_symbols migration: table not yet created");
7122
+ }
7064
7123
  log$1.debug("Schema migrations applied");
7065
7124
  }
7066
7125
  /**
@@ -12632,6 +12691,7 @@ var Categorizer = class {
12632
12691
  if (lower.includes("conversation") || lower.includes("history") || lower.includes("chat")) return "conversation_history";
12633
12692
  if (lower.includes("user") || lower.includes("human")) return "user_prompts";
12634
12693
  if (toolName !== void 0 && toolName.length > 0) return "tool_outputs";
12694
+ if (lower === "log_turn") return "conversation_history";
12635
12695
  return "other";
12636
12696
  }
12637
12697
  /**
@@ -12665,6 +12725,75 @@ var Categorizer = class {
12665
12725
  return "stable";
12666
12726
  }
12667
12727
  /**
12728
+ * Compute per-category token statistics from TurnAnalysis data (not raw spans).
12729
+ *
12730
+ * All six SemanticCategory values are always present in the result (zero-token
12731
+ * categories are included with totalTokens: 0). Results are sorted by
12732
+ * totalTokens descending.
12733
+ *
12734
+ * Trend is computed by comparing first-half vs second-half turn token attribution
12735
+ * for each category, using the same 1.2×/0.8× thresholds as computeTrend().
12736
+ *
12737
+ * @param turns - TurnAnalysis[] for the story
12738
+ */
12739
+ computeCategoryStatsFromTurns(turns) {
12740
+ if (turns.length === 0) return ALL_CATEGORIES.map((category) => ({
12741
+ category,
12742
+ totalTokens: 0,
12743
+ percentage: 0,
12744
+ eventCount: 0,
12745
+ avgTokensPerEvent: 0,
12746
+ trend: "stable"
12747
+ }));
12748
+ const grandTotal = turns.reduce((sum, t) => sum + t.inputTokens + t.outputTokens, 0);
12749
+ const buckets = new Map();
12750
+ for (const cat of ALL_CATEGORIES) buckets.set(cat, {
12751
+ total: 0,
12752
+ count: 0,
12753
+ first: 0,
12754
+ second: 0
12755
+ });
12756
+ const half = Math.floor(turns.length / 2);
12757
+ for (let i = 0; i < turns.length; i++) {
12758
+ const turn = turns[i];
12759
+ const cat = this.classify(turn.name, turn.toolName);
12760
+ const bucket = buckets.get(cat);
12761
+ const tokens = turn.inputTokens + turn.outputTokens;
12762
+ bucket.total += tokens;
12763
+ bucket.count += 1;
12764
+ if (i < half) bucket.first += tokens;
12765
+ else bucket.second += tokens;
12766
+ }
12767
+ const results = ALL_CATEGORIES.map((category) => {
12768
+ const bucket = buckets.get(category);
12769
+ const totalTokens = bucket.total;
12770
+ const eventCount = bucket.count;
12771
+ const percentage = grandTotal > 0 ? Math.round(totalTokens / grandTotal * 100 * 1e3) / 1e3 : 0;
12772
+ const avgTokensPerEvent = eventCount > 0 ? totalTokens / eventCount : 0;
12773
+ let trend = "stable";
12774
+ if (turns.length >= 2) {
12775
+ const { first, second } = bucket;
12776
+ if (first === 0 && second === 0) trend = "stable";
12777
+ else if (first === 0) trend = "growing";
12778
+ else if (second > 1.2 * first) trend = "growing";
12779
+ else if (second < .8 * first) trend = "shrinking";
12780
+ }
12781
+ return {
12782
+ category,
12783
+ totalTokens,
12784
+ percentage,
12785
+ eventCount,
12786
+ avgTokensPerEvent,
12787
+ trend
12788
+ };
12789
+ });
12790
+ this._logger.debug({
12791
+ categories: results.length,
12792
+ grandTotal
12793
+ }, "Computed category stats from turns");
12794
+ return results.sort((a, b) => b.totalTokens - a.totalTokens);
12795
+ }
12796
+ /**
12668
12797
  * Compute per-category token statistics for a complete set of spans.
12669
12798
  *
12670
12799
  * All six SemanticCategory values are always present in the result (zero-token
@@ -12794,6 +12923,54 @@ var ConsumerAnalyzer = class {
12794
12923
  return results.sort((a, b) => b.totalTokens - a.totalTokens);
12795
12924
  }
12796
12925
  /**
12926
+ * Group turns by consumer key (model|toolName), rank by totalTokens descending,
12927
+ * and return ConsumerStats for each non-zero-token group.
12928
+ *
12929
+ * @param turns - All TurnAnalysis records for the story
12930
+ */
12931
+ analyzeFromTurns(turns) {
12932
+ if (turns.length === 0) return [];
12933
+ const grandTotal = turns.reduce((sum, t) => sum + t.inputTokens + t.outputTokens, 0);
12934
+ const groups = new Map();
12935
+ for (const turn of turns) {
12936
+ const key = this._buildConsumerKeyFromTurn(turn);
12937
+ const existing = groups.get(key);
12938
+ if (existing !== void 0) existing.push(turn);
12939
+ else groups.set(key, [turn]);
12940
+ }
12941
+ const results = [];
12942
+ for (const [consumerKey, groupTurns] of groups) {
12943
+ const totalTokens = groupTurns.reduce((sum, t) => sum + t.inputTokens + t.outputTokens, 0);
12944
+ if (totalTokens === 0) continue;
12945
+ const percentage = grandTotal > 0 ? Math.round(totalTokens / grandTotal * 100 * 1e3) / 1e3 : 0;
12946
+ const eventCount = groupTurns.length;
12947
+ const firstTurn = groupTurns[0];
12948
+ const category = this._categorizer.classify(firstTurn.name, firstTurn.toolName);
12949
+ const sorted = groupTurns.slice().sort((a, b) => b.inputTokens + b.outputTokens - (a.inputTokens + a.outputTokens));
12950
+ const topInvocations = sorted.slice(0, 20).map((t) => ({
12951
+ spanId: t.spanId,
12952
+ name: t.name,
12953
+ toolName: t.toolName,
12954
+ totalTokens: t.inputTokens + t.outputTokens,
12955
+ inputTokens: t.inputTokens,
12956
+ outputTokens: t.outputTokens
12957
+ }));
12958
+ results.push({
12959
+ consumerKey,
12960
+ category,
12961
+ totalTokens,
12962
+ percentage,
12963
+ eventCount,
12964
+ topInvocations
12965
+ });
12966
+ }
12967
+ this._logger.debug({
12968
+ consumers: results.length,
12969
+ grandTotal
12970
+ }, "Computed consumer stats from turns");
12971
+ return results.sort((a, b) => b.totalTokens - a.totalTokens);
12972
+ }
12973
+ /**
12797
12974
  * Build a stable, collision-resistant consumer key from a span.
12798
12975
  * Format: `operationName|toolName` (tool part is empty string if absent).
12799
12976
  */
@@ -12803,6 +12980,15 @@ var ConsumerAnalyzer = class {
12803
12980
  return `${operationPart}|${toolPart}`;
12804
12981
  }
12805
12982
  /**
12983
+ * Build a stable consumer key from a turn.
12984
+ * Format: `model|toolName` (tool part is empty string if absent).
12985
+ */
12986
+ _buildConsumerKeyFromTurn(turn) {
12987
+ const modelPart = (turn.model ?? "unknown").slice(0, 200);
12988
+ const toolPart = (turn.toolName ?? "").slice(0, 100);
12989
+ return `${modelPart}|${toolPart}`;
12990
+ }
12991
+ /**
12806
12992
  * Extract a tool name from span attributes, checking three known attribute keys
12807
12993
  * in priority order.
12808
12994
  */
@@ -13200,6 +13386,99 @@ var TurnAnalyzer = class {
13200
13386
  }
13201
13387
  };
13202
13388
 
13389
+ //#endregion
13390
+ //#region src/modules/telemetry/log-turn-analyzer.ts
13391
+ var LogTurnAnalyzer = class {
13392
+ _logger;
13393
+ constructor(logger$27) {
13394
+ this._logger = logger$27;
13395
+ }
13396
+ /**
13397
+ * Analyze a list of NormalizedLog records and produce TurnAnalysis[].
13398
+ *
13399
+ * Returns an empty array immediately when logs is empty or on any error.
13400
+ *
13401
+ * @param logs - All log records for a story
13402
+ */
13403
+ analyze(logs) {
13404
+ try {
13405
+ if (!Array.isArray(logs) || logs.length === 0) return [];
13406
+ const validLogs = logs.filter((log$2) => log$2 != null && typeof log$2 === "object" && ((log$2.inputTokens ?? 0) > 0 || (log$2.outputTokens ?? 0) > 0));
13407
+ if (validLogs.length === 0) {
13408
+ this._logger.debug("LogTurnAnalyzer: no LLM logs with tokens to analyze");
13409
+ return [];
13410
+ }
13411
+ const grouped = new Map();
13412
+ for (const log$2 of validLogs) {
13413
+ const key = log$2.traceId != null && log$2.spanId != null ? `${log$2.traceId}:${log$2.spanId}` : log$2.logId;
13414
+ const group = grouped.get(key) ?? [];
13415
+ group.push(log$2);
13416
+ grouped.set(key, group);
13417
+ }
13418
+ const merged = [];
13419
+ for (const group of grouped.values()) {
13420
+ const sorted = [...group].sort((a, b) => a.timestamp - b.timestamp);
13421
+ const representative = sorted[0];
13422
+ let inputTokens = 0;
13423
+ let outputTokens = 0;
13424
+ let cacheReadTokens = 0;
13425
+ let costUsd = 0;
13426
+ for (const log$2 of group) {
13427
+ inputTokens += log$2.inputTokens ?? 0;
13428
+ outputTokens += log$2.outputTokens ?? 0;
13429
+ cacheReadTokens += log$2.cacheReadTokens ?? 0;
13430
+ costUsd += log$2.costUsd ?? 0;
13431
+ }
13432
+ merged.push({
13433
+ representative,
13434
+ inputTokens,
13435
+ outputTokens,
13436
+ cacheReadTokens,
13437
+ costUsd
13438
+ });
13439
+ }
13440
+ merged.sort((a, b) => a.representative.timestamp - b.representative.timestamp);
13441
+ let runningContext = 0;
13442
+ const turns = merged.map(({ representative: log$2, inputTokens, outputTokens, cacheReadTokens, costUsd }, idx) => {
13443
+ const prevContext = runningContext;
13444
+ runningContext += inputTokens;
13445
+ const freshTokens = inputTokens - cacheReadTokens;
13446
+ const cacheHitRate = inputTokens > 0 ? cacheReadTokens / inputTokens : 0;
13447
+ return {
13448
+ spanId: log$2.spanId ?? log$2.logId,
13449
+ turnNumber: idx + 1,
13450
+ name: log$2.eventName ?? "log_turn",
13451
+ timestamp: log$2.timestamp,
13452
+ source: "claude-code",
13453
+ model: log$2.model,
13454
+ inputTokens,
13455
+ outputTokens,
13456
+ cacheReadTokens,
13457
+ freshTokens,
13458
+ cacheHitRate,
13459
+ costUsd,
13460
+ durationMs: 0,
13461
+ contextSize: runningContext,
13462
+ contextDelta: runningContext - prevContext,
13463
+ toolName: log$2.toolName,
13464
+ isContextSpike: false,
13465
+ childSpans: []
13466
+ };
13467
+ });
13468
+ const avg = turns.reduce((sum, t) => sum + t.inputTokens, 0) / turns.length;
13469
+ for (const turn of turns) turn.isContextSpike = avg > 0 && turn.inputTokens > 2 * avg;
13470
+ this._logger.debug({
13471
+ turnCount: turns.length,
13472
+ avg
13473
+ }, "LogTurnAnalyzer.analyze complete");
13474
+ return turns;
13475
+ } catch (err) {
13476
+ this._logger.warn({ err }, "LogTurnAnalyzer.analyze failed — returning empty array");
13477
+ return [];
13478
+ }
13479
+ }
13480
+ };
13481
+
13203
13482
  //#endregion
13204
13483
  //#region src/modules/telemetry/cost-table.ts
13205
13484
  /**
@@ -13764,6 +14043,7 @@ const logger$6 = createLogger("telemetry:pipeline");
13764
14043
  var TelemetryPipeline = class {
13765
14044
  _normalizer;
13766
14045
  _turnAnalyzer;
14046
+ _logTurnAnalyzer;
13767
14047
  _categorizer;
13768
14048
  _consumerAnalyzer;
13769
14049
  _efficiencyScorer;
@@ -13772,6 +14052,7 @@ var TelemetryPipeline = class {
13772
14052
  constructor(deps) {
13773
14053
  this._normalizer = deps.normalizer;
13774
14054
  this._turnAnalyzer = deps.turnAnalyzer;
14055
+ this._logTurnAnalyzer = deps.logTurnAnalyzer;
13775
14056
  this._categorizer = deps.categorizer;
13776
14057
  this._consumerAnalyzer = deps.consumerAnalyzer;
13777
14058
  this._efficiencyScorer = deps.efficiencyScorer;
@@ -13781,8 +14062,14 @@ var TelemetryPipeline = class {
13781
14062
  /**
13782
14063
  * Process a batch of raw OTLP payloads through the full analysis pipeline.
13783
14064
  *
13784
- * Each payload is normalized independently. Spans are then grouped by storyKey
13785
- * for per-story analysis. Items that fail normalization are skipped with a warning.
14065
+ * Each payload is normalized independently. Spans and logs are grouped by
14066
+ * storyKey for per-story analysis. Items that fail normalization are skipped
14067
+ * with a warning.
14068
+ *
14069
+ * Dual-track analysis (Story 27-15):
14070
+ * - Span-derived turns via TurnAnalyzer
14071
+ * - Log-derived turns via LogTurnAnalyzer
14072
+ * - Merged (deduplicated by spanId) before downstream analysis
13786
14073
  */
13787
14074
  async processBatch(items) {
13788
14075
  if (items.length === 0) return;
@@ -13807,25 +14094,46 @@ var TelemetryPipeline = class {
13807
14094
  spans: allSpans.length,
13808
14095
  logs: allLogs.length
13809
14096
  }, "TelemetryPipeline: normalized batch");
13810
- if (allSpans.length === 0) {
13811
- logger$6.debug("TelemetryPipeline: no spans normalized from batch");
14097
+ if (allSpans.length === 0 && allLogs.length === 0) {
14098
+ logger$6.debug("TelemetryPipeline: no spans or logs normalized from batch");
13812
14099
  return;
13813
14100
  }
13814
- const spansByStory = new Map();
13815
14101
  const unknownStoryKey = "__unknown__";
14102
+ const spansByStory = new Map();
13816
14103
  for (const span of allSpans) {
13817
14104
  const key = span.storyKey ?? unknownStoryKey;
13818
14105
  const existing = spansByStory.get(key);
13819
14106
  if (existing !== void 0) existing.push(span);
13820
14107
  else spansByStory.set(key, [span]);
13821
14108
  }
13822
- for (const [storyKey, spans] of spansByStory) {
14109
+ const logsByStory = new Map();
14110
+ for (const log$2 of allLogs) {
14111
+ const key = log$2.storyKey ?? unknownStoryKey;
14112
+ const existing = logsByStory.get(key);
14113
+ if (existing !== void 0) existing.push(log$2);
14114
+ else logsByStory.set(key, [log$2]);
14115
+ }
14116
+ const allStoryKeys = new Set();
14117
+ for (const key of spansByStory.keys()) allStoryKeys.add(key);
14118
+ for (const key of logsByStory.keys()) allStoryKeys.add(key);
14119
+ for (const storyKey of allStoryKeys) {
13823
14120
  if (storyKey === unknownStoryKey) {
13824
- logger$6.debug({ spanCount: spans.length }, "TelemetryPipeline: spans without storyKey — skipping analysis");
14121
+ const spanCount = spansByStory.get(unknownStoryKey)?.length ?? 0;
14122
+ const logCount = logsByStory.get(unknownStoryKey)?.length ?? 0;
14123
+ logger$6.debug({
14124
+ spanCount,
14125
+ logCount
14126
+ }, "TelemetryPipeline: data without storyKey — skipping analysis");
13825
14127
  continue;
13826
14128
  }
13827
14129
  try {
13828
- await this._processStory(storyKey, spans);
14130
+ const spans = spansByStory.get(storyKey) ?? [];
14131
+ const logs = logsByStory.get(storyKey) ?? [];
14132
+ const spanTurns = spans.length > 0 ? this._turnAnalyzer.analyze(spans) : [];
14133
+ const logTurns = logs.length > 0 ? this._logTurnAnalyzer.analyze(logs) : [];
14134
+ const mergedTurns = this._mergeTurns(spanTurns, logTurns);
14135
+ if (spans.length > 0) await this._processStory(storyKey, spans, mergedTurns);
14136
+ else await this._processStoryFromTurns(storyKey, mergedTurns);
13829
14137
  } catch (err) {
13830
14138
  logger$6.warn({
13831
14139
  err,
@@ -13833,10 +14141,29 @@ var TelemetryPipeline = class {
13833
14141
  }, "TelemetryPipeline: story processing failed — skipping");
13834
14142
  }
13835
14143
  }
13836
- logger$6.debug({ storyCount: spansByStory.size }, "TelemetryPipeline.processBatch complete");
14144
+ logger$6.debug({ storyCount: allStoryKeys.size }, "TelemetryPipeline.processBatch complete");
13837
14145
  }
13838
- async _processStory(storyKey, spans) {
13839
- const turns = this._turnAnalyzer.analyze(spans);
14146
+ /**
14147
+ * Merge span-derived and log-derived turns, deduplicating by spanId.
14148
+ * When a span and a log share the same spanId, the span-derived turn is preferred
14149
+ * (richer data). The merged result is sorted chronologically and renumbered.
14150
+ */
14151
+ _mergeTurns(spanTurns, logTurns) {
14152
+ if (logTurns.length === 0) return spanTurns;
14153
+ if (spanTurns.length === 0) return logTurns;
14154
+ const spanTurnIds = new Set(spanTurns.map((t) => t.spanId));
14155
+ const uniqueLogTurns = logTurns.filter((t) => !spanTurnIds.has(t.spanId));
14156
+ return [...spanTurns, ...uniqueLogTurns].sort((a, b) => a.timestamp - b.timestamp).map((t, i) => ({
14157
+ ...t,
14158
+ turnNumber: i + 1
14159
+ }));
14160
+ }
14161
+ /**
14162
+ * Full span-based analysis path (unchanged behavior when no logs present — AC4).
14163
+ * When mergedTurns is provided, uses those instead of computing from spans alone.
14164
+ */
14165
+ async _processStory(storyKey, spans, mergedTurns) {
14166
+ const turns = mergedTurns;
13840
14167
  const categories = this._categorizer.computeCategoryStats(spans, turns);
13841
14168
  const consumers = this._consumerAnalyzer.analyze(spans);
13842
14169
  const efficiencyScore = this._efficiencyScorer.score(storyKey, turns);
@@ -13880,6 +14207,28 @@ var TelemetryPipeline = class {
13880
14207
  recommendations: recommendations.length
13881
14208
  }, "TelemetryPipeline: story analysis complete");
13882
14209
  }
14210
+ /**
14211
+ * Log-only analysis path (AC3, AC6): processes turns from LogTurnAnalyzer
14212
+ * through efficiency scoring and persistence.
14213
+ *
14214
+ * Categorizer and consumer analyzer remain span-only for now (story 27-16).
14215
+ */
14216
+ async _processStoryFromTurns(storyKey, turns) {
14217
+ if (turns.length === 0) return;
14218
+ const efficiencyScore = this._efficiencyScorer.score(storyKey, turns);
14219
+ await Promise.all([this._persistence.storeTurnAnalysis(storyKey, turns).catch((err) => logger$6.warn({
14220
+ err,
14221
+ storyKey
14222
+ }, "Failed to store turn analysis")), this._persistence.storeEfficiencyScore(efficiencyScore).catch((err) => logger$6.warn({
14223
+ err,
14224
+ storyKey
14225
+ }, "Failed to store efficiency score"))]);
14226
+ logger$6.info({
14227
+ storyKey,
14228
+ turns: turns.length,
14229
+ compositeScore: efficiencyScore.compositeScore
14230
+ }, "TelemetryPipeline: story analysis from turns complete");
14231
+ }
13883
14232
  };
13884
14233
 
13885
14234
  //#endregion
@@ -15130,13 +15479,12 @@ function createImplementationOrchestrator(deps) {
15130
15479
  }
15131
15480
  if (telemetryPersistence !== void 0) try {
15132
15481
  const turns = await telemetryPersistence.getTurnAnalysis(storyKey);
15133
- const spans = [];
15134
- if (spans.length === 0) logger$27.debug({ storyKey }, "No spans for telemetry categorization — skipping");
15482
+ if (turns.length === 0) logger$27.debug({ storyKey }, "No turn analysis data for telemetry categorization — skipping");
15135
15483
  else {
15136
15484
  const categorizer = new Categorizer(logger$27);
15137
15485
  const consumerAnalyzer = new ConsumerAnalyzer(categorizer, logger$27);
15138
- const categoryStats = categorizer.computeCategoryStats(spans, turns);
15139
- const consumerStats = consumerAnalyzer.analyze(spans);
15486
+ const categoryStats = categorizer.computeCategoryStatsFromTurns(turns);
15487
+ const consumerStats = consumerAnalyzer.analyzeFromTurns(turns);
15140
15488
  await telemetryPersistence.storeCategoryStats(storyKey, categoryStats);
15141
15489
  await telemetryPersistence.storeConsumerStats(storyKey, consumerStats);
15142
15490
  const growingCount = categoryStats.filter((c) => c.trend === "growing").length;
@@ -15617,6 +15965,7 @@ function createImplementationOrchestrator(deps) {
15617
15965
  const telemetryPipeline = new TelemetryPipeline({
15618
15966
  normalizer: new TelemetryNormalizer(pipelineLogger),
15619
15967
  turnAnalyzer: new TurnAnalyzer(pipelineLogger),
15968
+ logTurnAnalyzer: new LogTurnAnalyzer(pipelineLogger),
15620
15969
  categorizer: new Categorizer(pipelineLogger),
15621
15970
  consumerAnalyzer: new ConsumerAnalyzer(new Categorizer(pipelineLogger), pipelineLogger),
15622
15971
  efficiencyScorer: new EfficiencyScorer(pipelineLogger),
@@ -20188,6 +20537,7 @@ async function runRunAction(options) {
20188
20537
  return 1;
20189
20538
  }
20190
20539
  const db = dbWrapper.db;
20540
+ const telemetryPersistence = telemetryEnabled ? new TelemetryPersistence(db) : void 0;
20191
20541
  const packLoader = createPackLoader();
20192
20542
  let pack;
20193
20543
  try {
@@ -20257,6 +20607,33 @@ async function runRunAction(options) {
20257
20607
  if (!injectedRegistry) throw new Error("AdapterRegistry is required — must be initialized at CLI startup");
20258
20608
  const routingConfigPath = join(projectRoot, "substrate.routing.yml");
20259
20609
  const routingResolver = RoutingResolver.createWithFallback(routingConfigPath, logger);
20610
+ let routingTokenAccumulator;
20611
+ let routingConfig;
20612
+ try {
20613
+ routingConfig = loadModelRoutingConfig(routingConfigPath);
20614
+ } catch {
20615
+ logger.debug("Routing config not loadable — RoutingTokenAccumulator skipped");
20616
+ }
20617
+ let routingTuner;
20618
+ if (routingConfig !== void 0) {
20619
+ const kvStateStore = new FileStateStore({ basePath: join(dbRoot, ".substrate") });
20620
+ routingTokenAccumulator = new RoutingTokenAccumulator(routingConfig, kvStateStore, logger);
20621
+ eventBus.on("routing:model-selected", (payload) => {
20622
+ routingTokenAccumulator.onRoutingSelected({
20623
+ dispatchId: payload.dispatchId,
20624
+ phase: payload.phase,
20625
+ model: payload.model
20626
+ });
20627
+ });
20628
+ eventBus.on("agent:completed", (payload) => {
20629
+ routingTokenAccumulator.onAgentCompleted({
20630
+ dispatchId: payload.dispatchId,
20631
+ inputTokens: payload.inputTokens ?? 0,
20632
+ outputTokens: payload.outputTokens ?? 0
20633
+ });
20634
+ });
20635
+ if (routingConfig.auto_tune === true) routingTuner = new RoutingTuner(kvStateStore, new RoutingRecommender(logger), eventBus, routingConfigPath, logger);
20636
+ }
20260
20637
  const statePath = join(dbRoot, ".substrate", "state");
20261
20638
  const isDoltAvailable = existsSync(join(statePath, ".dolt"));
20262
20639
  let repoMapInjector;
@@ -20266,7 +20643,8 @@ async function runRunAction(options) {
20266
20643
  const doltClient = new DoltClient({ repoPath: statePath });
20267
20644
  const symbolRepo = new DoltSymbolRepository(doltClient, logger);
20268
20645
  const metaRepo = new DoltRepoMapMetaRepository(doltClient);
20269
- const queryEngine = new RepoMapQueryEngine(symbolRepo, logger);
20646
+ const repoMapTelemetry = telemetryPersistence !== void 0 ? new RepoMapTelemetry(telemetryPersistence, logger) : void 0;
20647
+ const queryEngine = new RepoMapQueryEngine(symbolRepo, logger, repoMapTelemetry);
20270
20648
  repoMapInjector = new RepoMapInjector(queryEngine, logger);
20271
20649
  repoMapModule = new RepoMapModule(metaRepo, logger);
20272
20650
  logger.debug("repo-map injector constructed (Dolt backend detected)");
@@ -20520,6 +20898,17 @@ async function runRunAction(options) {
20520
20898
  });
20521
20899
  }
20522
20900
  });
20901
+ eventBus.on("routing:model-selected", (payload) => {
20902
+ ndjsonEmitter.emit({
20903
+ type: "routing:model-selected",
20904
+ ts: new Date().toISOString(),
20905
+ dispatch_id: payload.dispatchId,
20906
+ task_type: payload.taskType,
20907
+ phase: payload.phase,
20908
+ model: payload.model,
20909
+ source: payload.source
20910
+ });
20911
+ });
20523
20912
  eventBus.on("orchestrator:story-complete", (payload) => {
20524
20913
  ndjsonEmitter.emit({
20525
20914
  type: "story:done",
@@ -20654,7 +21043,19 @@ async function runRunAction(options) {
20654
21043
  });
20655
21044
  }
20656
21045
  const ingestionServer = telemetryEnabled ? new IngestionServer({ port: telemetryPort }) : void 0;
20657
- const telemetryPersistence = telemetryEnabled ? new TelemetryPersistence(db) : void 0;
21046
+ if (telemetryPersistence !== void 0) {
21047
+ const routingTelemetry = new RoutingTelemetry(telemetryPersistence, logger);
21048
+ eventBus.on("routing:model-selected", (payload) => {
21049
+ routingTelemetry.recordModelResolved({
21050
+ dispatchId: payload.dispatchId,
21051
+ taskType: payload.taskType,
21052
+ phase: payload.phase,
21053
+ model: payload.model,
21054
+ source: payload.source,
21055
+ latencyMs: 0
21056
+ });
21057
+ });
21058
+ }
20658
21059
  if (repoMapModule !== void 0) try {
20659
21060
  const stale = await repoMapModule.checkStaleness();
20660
21061
  if (stale !== null) {
@@ -20692,6 +21093,17 @@ async function runRunAction(options) {
20692
21093
  process.stdout.write(`Stories: ${storyKeys.join(", ")}\n`);
20693
21094
  }
20694
21095
  const status = await orchestrator.run(storyKeys);
21096
+ if (routingTokenAccumulator !== void 0) try {
21097
+ await routingTokenAccumulator.flush(pipelineRun.id);
21098
+ logger.debug({ runId: pipelineRun.id }, "Phase token breakdown flushed");
21099
+ } catch (flushErr) {
21100
+ logger.warn({ err: flushErr }, "Failed to flush phase token breakdown (best-effort)");
21101
+ }
21102
+ if (routingTuner !== void 0 && routingConfig !== void 0) try {
21103
+ await routingTuner.maybeAutoTune(pipelineRun.id, routingConfig);
21104
+ } catch (tuneErr) {
21105
+ logger.warn({ err: tuneErr }, "RoutingTuner.maybeAutoTune failed (best-effort)");
21106
+ }
20695
21107
  const succeededKeys = [];
20696
21108
  const failedKeys = [];
20697
21109
  const escalatedKeys = [];
@@ -21135,5 +21547,5 @@ function registerRunCommand(program, _version = "0.0.0", projectRoot = process.c
21135
21547
  }
21136
21548
 
21137
21549
  //#endregion
21138
- export { DEFAULT_CONFIG, DEFAULT_ROUTING_POLICY, DatabaseWrapper, DoltClient, DoltNotInstalled, DoltRepoMapMetaRepository, DoltSymbolRepository, FileStateStore, GitClient, GrammarLoader, IngestionServer, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SUBSTRATE_OWNED_SETTINGS_KEYS, SymbolParser, TelemetryPersistence, VALID_PHASES, buildPipelineStatusOutput, checkDoltInstalled, createConfigSystem, createContextCompiler, createDispatcher, createDoltClient, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStateStore, createStopAfterGate, findPackageRoot, formatOutput, formatPhaseCompletionSummary, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, initializeDolt, parseDbTimestampAsUtc, registerHealthCommand, registerRunCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, resolveStoryKeys, runAnalysisPhase, runMigrations, runPlanningPhase, runRunAction, runSolutioningPhase, validateStopAfterFromConflict };
21139
- //# sourceMappingURL=run-CkYqARL5.js.map
21550
+ export { AppError, DEFAULT_CONFIG, DEFAULT_ROUTING_POLICY, DatabaseWrapper, DoltClient, DoltNotInstalled, DoltRepoMapMetaRepository, DoltSymbolRepository, ERR_REPO_MAP_STORAGE_WRITE, FileStateStore, GitClient, GrammarLoader, IngestionServer, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SUBSTRATE_OWNED_SETTINGS_KEYS, SymbolParser, TelemetryPersistence, VALID_PHASES, buildPipelineStatusOutput, checkDoltInstalled, createConfigSystem, createContextCompiler, createDispatcher, createDoltClient, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStateStore, createStopAfterGate, findPackageRoot, formatOutput, formatPhaseCompletionSummary, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, initializeDolt, parseDbTimestampAsUtc, registerHealthCommand, registerRunCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, resolveStoryKeys, runAnalysisPhase, runMigrations, runPlanningPhase, runRunAction, runSolutioningPhase, validateStopAfterFromConflict };
21551
+ //# sourceMappingURL=run-CvQCHfTV.js.map
package/dist/schema.sql CHANGED
@@ -215,7 +215,8 @@ CREATE TABLE IF NOT EXISTS repo_map_symbols (
215
215
  signature TEXT,
216
216
  line_number INT NOT NULL DEFAULT 0,
217
217
  exported TINYINT(1) NOT NULL DEFAULT 0,
218
- file_hash VARCHAR(64) NOT NULL,
218
+ file_hash VARCHAR(64) NOT NULL,
219
+ dependencies JSON,
219
220
  PRIMARY KEY (id)
220
221
  );
221
222
 
@@ -234,3 +235,4 @@ CREATE TABLE IF NOT EXISTS repo_map_meta (
234
235
  );
235
236
 
236
237
  INSERT IGNORE INTO _schema_version (version, description) VALUES (5, 'Add repo_map_symbols and repo_map_meta tables (Epic 28-2)');
238
+ INSERT IGNORE INTO _schema_version (version, description) VALUES (6, 'Add dependencies JSON column to repo_map_symbols (Epic 28-3)');
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "substrate-ai",
3
- "version": "0.4.6",
3
+ "version": "0.4.8",
4
4
  "description": "Substrate — multi-agent orchestration daemon for AI coding agents",
5
5
  "type": "module",
6
6
  "license": "MIT",
@@ -75,9 +75,9 @@
75
75
  },
76
76
  "optionalDependencies": {
77
77
  "tree-sitter": "^0.21.1",
78
- "tree-sitter-typescript": "^0.21.2",
79
78
  "tree-sitter-javascript": "^0.21.4",
80
- "tree-sitter-python": "^0.21.0"
79
+ "tree-sitter-python": "^0.21.0",
80
+ "tree-sitter-typescript": "^0.21.2"
81
81
  },
82
82
  "devDependencies": {
83
83
  "@eslint/js": "^9.17.0",
@@ -1,4 +0,0 @@
1
- import "./logger-D2fS2ccL.js";
2
- import { ModelRoutingConfigSchema, ProviderPolicySchema, RoutingConfigError, RoutingRecommender, RoutingResolver, TASK_TYPE_PHASE_MAP, loadModelRoutingConfig } from "./routing-CZfJB3y9.js";
3
-
4
- export { loadModelRoutingConfig };