substrate-ai 0.19.0 → 0.19.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli/index.js
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
import { FileStateStore, SUBSTRATE_OWNED_SETTINGS_KEYS, VALID_PHASES, WorkGraphRepository, buildPipelineStatusOutput, createDatabaseAdapter, createStateStore,
|
|
2
|
+
import { FileStateStore, SUBSTRATE_OWNED_SETTINGS_KEYS, VALID_PHASES, WorkGraphRepository, buildPipelineStatusOutput, createDatabaseAdapter, createStateStore, findPackageRoot, formatOutput, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, parseDbTimestampAsUtc, registerHealthCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot } from "../health-Cx2ZhRNT.js";
|
|
3
3
|
import { createLogger } from "../logger-KeHncl-f.js";
|
|
4
4
|
import { createEventBus } from "../helpers-CElYrONe.js";
|
|
5
5
|
import { AdapterRegistry, BudgetConfigSchema, CURRENT_CONFIG_FORMAT_VERSION, CURRENT_TASK_GRAPH_VERSION, ConfigError, CostTrackerConfigSchema, DEFAULT_CONFIG, DoltClient, DoltNotInstalled, EXPERIMENT_RESULT, GlobalSettingsSchema, IngestionServer, MonitorDatabaseImpl, OPERATIONAL_FINDING, PartialGlobalSettingsSchema, PartialProviderConfigSchema, ProvidersSchema, RoutingRecommender, STORY_METRICS, TelemetryConfigSchema, addTokenUsage, aggregateTokenUsageForRun, checkDoltInstalled, compareRunMetrics, createAmendmentRun, createConfigSystem, createDecision, createDoltClient, createPipelineRun, getActiveDecisions, getAllCostEntriesFiltered, getBaselineRunMetrics, getDecisionsByCategory, getDecisionsByPhaseForRun, getLatestCompletedRun, getLatestRun, getPipelineRunById, getPlanningCostTotal, getRetryableEscalations, getRunMetrics, getSessionCostSummary, getSessionCostSummaryFiltered, getStoryMetricsForRun, getTokenUsageSummary, incrementRunRestarts, initSchema, initializeDolt, listRequirements, listRunMetrics, loadParentRunDecisions, supersedeDecision, tagRunAsBaseline, updatePipelineRun } from "../dist-Bm0qSZer.js";
|
|
6
6
|
import "../adapter-registry-DXLMTmfD.js";
|
|
7
|
-
import { AdapterTelemetryPersistence, AppError, DoltRepoMapMetaRepository, DoltSymbolRepository, ERR_REPO_MAP_STORAGE_WRITE, GitClient, GrammarLoader, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SymbolParser, createContextCompiler, createDispatcher, createEventEmitter, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStopAfterGate, createTelemetryAdvisor, formatPhaseCompletionSummary, getFactoryRunSummaries, getScenarioResultsForRun, getTwinRunsForRun, listGraphRuns, registerFactoryCommand, registerRunCommand, registerScenariosCommand, resolveStoryKeys, runAnalysisPhase, runPlanningPhase, runSolutioningPhase, validateStopAfterFromConflict } from "../run-
|
|
7
|
+
import { AdapterTelemetryPersistence, AppError, DoltRepoMapMetaRepository, DoltSymbolRepository, ERR_REPO_MAP_STORAGE_WRITE, EpicIngester, GitClient, GrammarLoader, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SymbolParser, createContextCompiler, createDispatcher, createEventEmitter, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStopAfterGate, createTelemetryAdvisor, formatPhaseCompletionSummary, getFactoryRunSummaries, getScenarioResultsForRun, getTwinRunsForRun, listGraphRuns, registerFactoryCommand, registerRunCommand, registerScenariosCommand, resolveStoryKeys, runAnalysisPhase, runPlanningPhase, runSolutioningPhase, validateStopAfterFromConflict } from "../run-BOhSIujp.js";
|
|
8
8
|
import "../errors-BSpu7pIv.js";
|
|
9
9
|
import "../routing-CcBOCuC9.js";
|
|
10
10
|
import "../decisions-C0pz9Clx.js";
|
|
@@ -1135,6 +1135,17 @@ const TokenCeilingsSchema = z.object({
|
|
|
1135
1135
|
"test-plan": z.number().int().positive("test-plan token ceiling must be a positive integer").optional(),
|
|
1136
1136
|
"test-expansion": z.number().int().positive("test-expansion token ceiling must be a positive integer").optional()
|
|
1137
1137
|
});
|
|
1138
|
+
/**
|
|
1139
|
+
* Per-task-type dispatch timeout overrides (milliseconds).
|
|
1140
|
+
* Keys match task types in DEFAULT_TIMEOUTS. Values override defaults.
|
|
1141
|
+
*/
|
|
1142
|
+
const DispatchTimeoutsSchema = z.object({
|
|
1143
|
+
"create-story": z.number().int().positive().optional(),
|
|
1144
|
+
"dev-story": z.number().int().positive().optional(),
|
|
1145
|
+
"code-review": z.number().int().positive().optional(),
|
|
1146
|
+
"minor-fixes": z.number().int().positive().optional(),
|
|
1147
|
+
"major-rework": z.number().int().positive().optional()
|
|
1148
|
+
});
|
|
1138
1149
|
const SubstrateConfigSchema = z.object({
|
|
1139
1150
|
config_format_version: z.enum(["1"]),
|
|
1140
1151
|
task_graph_version: z.enum(["1"]).optional(),
|
|
@@ -1143,6 +1154,7 @@ const SubstrateConfigSchema = z.object({
|
|
|
1143
1154
|
cost_tracker: CostTrackerConfigSchema.optional(),
|
|
1144
1155
|
budget: BudgetConfigSchema.optional(),
|
|
1145
1156
|
token_ceilings: TokenCeilingsSchema.optional(),
|
|
1157
|
+
dispatch_timeouts: DispatchTimeoutsSchema.optional(),
|
|
1146
1158
|
telemetry: TelemetryConfigSchema.optional()
|
|
1147
1159
|
}).strict();
|
|
1148
1160
|
const PartialSubstrateConfigSchema = z.object({
|
|
@@ -1157,6 +1169,7 @@ const PartialSubstrateConfigSchema = z.object({
|
|
|
1157
1169
|
cost_tracker: CostTrackerConfigSchema.partial().optional(),
|
|
1158
1170
|
budget: BudgetConfigSchema.partial().optional(),
|
|
1159
1171
|
token_ceilings: TokenCeilingsSchema.optional(),
|
|
1172
|
+
dispatch_timeouts: DispatchTimeoutsSchema.optional(),
|
|
1160
1173
|
telemetry: TelemetryConfigSchema.partial().optional()
|
|
1161
1174
|
}).strict();
|
|
1162
1175
|
|
|
@@ -2621,26 +2634,6 @@ function registerConfigCommand(program, _version) {
|
|
|
2621
2634
|
});
|
|
2622
2635
|
}
|
|
2623
2636
|
|
|
2624
|
-
//#endregion
|
|
2625
|
-
//#region src/modules/work-graph/errors.ts
|
|
2626
|
-
/**
|
|
2627
|
-
* Work-graph error types.
|
|
2628
|
-
*
|
|
2629
|
-
* Story 31-7: Cycle Detection in Work Graph
|
|
2630
|
-
*/
|
|
2631
|
-
/**
|
|
2632
|
-
* Thrown by `EpicIngester.ingest()` when the provided dependency list
|
|
2633
|
-
* contains a cycle. The `cycle` field contains the path of story keys
|
|
2634
|
-
* that form the cycle (first and last element are the same).
|
|
2635
|
-
*/
|
|
2636
|
-
var CyclicDependencyError = class extends Error {
|
|
2637
|
-
constructor(cycle) {
|
|
2638
|
-
super(`Cyclic dependency detected: ${cycle.join(" → ")}`);
|
|
2639
|
-
this.cycle = cycle;
|
|
2640
|
-
this.name = "CyclicDependencyError";
|
|
2641
|
-
}
|
|
2642
|
-
};
|
|
2643
|
-
|
|
2644
2637
|
//#endregion
|
|
2645
2638
|
//#region src/cli/commands/resume.ts
|
|
2646
2639
|
const logger$13 = createLogger("resume-cmd");
|
|
@@ -4529,7 +4522,7 @@ async function runSupervisorAction(options, deps = {}) {
|
|
|
4529
4522
|
await initSchema(expAdapter);
|
|
4530
4523
|
const { runRunAction: runPipeline } = await import(
|
|
4531
4524
|
/* @vite-ignore */
|
|
4532
|
-
"../run-
|
|
4525
|
+
"../run-CCHb_JMl.js"
|
|
4533
4526
|
);
|
|
4534
4527
|
const runStoryFn = async (opts) => {
|
|
4535
4528
|
const exitCode = await runPipeline({
|
|
@@ -8607,64 +8600,6 @@ var EpicParser = class {
|
|
|
8607
8600
|
}
|
|
8608
8601
|
};
|
|
8609
8602
|
|
|
8610
|
-
//#endregion
|
|
8611
|
-
//#region src/modules/work-graph/epic-ingester.ts
|
|
8612
|
-
var EpicIngester = class {
|
|
8613
|
-
adapter;
|
|
8614
|
-
constructor(adapter) {
|
|
8615
|
-
this.adapter = adapter;
|
|
8616
|
-
}
|
|
8617
|
-
/**
|
|
8618
|
-
* Upsert stories and sync dependencies into the database.
|
|
8619
|
-
*
|
|
8620
|
-
* Both operations are wrapped in a single transaction: if either fails the
|
|
8621
|
-
* entire batch is rolled back.
|
|
8622
|
-
*
|
|
8623
|
-
* @param stories - Parsed story metadata from `EpicParser.parseStories()`.
|
|
8624
|
-
* @param dependencies - Parsed dependency edges from `EpicParser.parseDependencies()`.
|
|
8625
|
-
* @returns `IngestResult` with counts of affected rows.
|
|
8626
|
-
*/
|
|
8627
|
-
async ingest(stories, dependencies) {
|
|
8628
|
-
const cycle = detectCycles(dependencies);
|
|
8629
|
-
if (cycle !== null) throw new CyclicDependencyError(cycle);
|
|
8630
|
-
return this.adapter.transaction(async (tx) => {
|
|
8631
|
-
let storiesUpserted = 0;
|
|
8632
|
-
for (const story of stories) {
|
|
8633
|
-
const existing = await tx.query("SELECT status FROM wg_stories WHERE story_key = ?", [story.story_key]);
|
|
8634
|
-
if (existing.length > 0) await tx.query("UPDATE wg_stories SET title = ?, updated_at = ? WHERE story_key = ?", [
|
|
8635
|
-
story.title,
|
|
8636
|
-
new Date().toISOString(),
|
|
8637
|
-
story.story_key
|
|
8638
|
-
]);
|
|
8639
|
-
else {
|
|
8640
|
-
const now = new Date().toISOString();
|
|
8641
|
-
await tx.query("INSERT INTO wg_stories (story_key, epic, title, status, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?)", [
|
|
8642
|
-
story.story_key,
|
|
8643
|
-
String(story.epic_num),
|
|
8644
|
-
story.title,
|
|
8645
|
-
"planned",
|
|
8646
|
-
now,
|
|
8647
|
-
now
|
|
8648
|
-
]);
|
|
8649
|
-
storiesUpserted++;
|
|
8650
|
-
}
|
|
8651
|
-
}
|
|
8652
|
-
const epicNum = stories.length > 0 ? stories[0].epic_num : null;
|
|
8653
|
-
if (epicNum !== null) await tx.query(`DELETE FROM story_dependencies WHERE source = 'explicit' AND story_key LIKE ?`, [`${epicNum}-%`]);
|
|
8654
|
-
for (const dep of dependencies) await tx.query("INSERT INTO story_dependencies (story_key, depends_on, dependency_type, source) VALUES (?, ?, ?, ?)", [
|
|
8655
|
-
dep.story_key,
|
|
8656
|
-
dep.depends_on,
|
|
8657
|
-
dep.dependency_type,
|
|
8658
|
-
dep.source
|
|
8659
|
-
]);
|
|
8660
|
-
return {
|
|
8661
|
-
storiesUpserted,
|
|
8662
|
-
dependenciesReplaced: dependencies.length
|
|
8663
|
-
};
|
|
8664
|
-
});
|
|
8665
|
-
}
|
|
8666
|
-
};
|
|
8667
|
-
|
|
8668
8603
|
//#endregion
|
|
8669
8604
|
//#region src/cli/commands/ingest-epic.ts
|
|
8670
8605
|
function registerIngestEpicCommand(program) {
|
package/dist/index.d.ts
CHANGED
|
@@ -1417,6 +1417,7 @@ declare function withRetry<T>(fn: () => Promise<T>, maxRetries?: number, baseDel
|
|
|
1417
1417
|
//#endregion
|
|
1418
1418
|
//#region src/modules/config/config-schema.d.ts
|
|
1419
1419
|
//# sourceMappingURL=helpers.d.ts.map
|
|
1420
|
+
|
|
1420
1421
|
declare const SubstrateConfigSchema: z.ZodObject<{
|
|
1421
1422
|
config_format_version: z.ZodEnum<{
|
|
1422
1423
|
1: "1";
|
|
@@ -1514,6 +1515,13 @@ declare const SubstrateConfigSchema: z.ZodObject<{
|
|
|
1514
1515
|
'test-plan': z.ZodOptional<z.ZodNumber>;
|
|
1515
1516
|
'test-expansion': z.ZodOptional<z.ZodNumber>;
|
|
1516
1517
|
}, z.core.$strip>>;
|
|
1518
|
+
dispatch_timeouts: z.ZodOptional<z.ZodObject<{
|
|
1519
|
+
'create-story': z.ZodOptional<z.ZodNumber>;
|
|
1520
|
+
'dev-story': z.ZodOptional<z.ZodNumber>;
|
|
1521
|
+
'code-review': z.ZodOptional<z.ZodNumber>;
|
|
1522
|
+
'minor-fixes': z.ZodOptional<z.ZodNumber>;
|
|
1523
|
+
'major-rework': z.ZodOptional<z.ZodNumber>;
|
|
1524
|
+
}, z.core.$strip>>;
|
|
1517
1525
|
telemetry: z.ZodOptional<z.ZodObject<{
|
|
1518
1526
|
enabled: z.ZodDefault<z.ZodBoolean>;
|
|
1519
1527
|
port: z.ZodDefault<z.ZodNumber>;
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { BMAD_BASELINE_TOKENS_FULL, DoltMergeConflict, FileStateStore, STOP_AFTER_VALID_PHASES, STORY_KEY_PATTERN, VALID_PHASES, WorkGraphRepository, __commonJS, __require, __toESM, buildPipelineStatusOutput, createDatabaseAdapter, formatOutput, formatPipelineSummary, formatTokenTelemetry, inspectProcessTree, parseDbTimestampAsUtc, resolveMainRepoRoot, validateStoryKey } from "./health-Cx2ZhRNT.js";
|
|
1
|
+
import { BMAD_BASELINE_TOKENS_FULL, DoltMergeConflict, FileStateStore, STOP_AFTER_VALID_PHASES, STORY_KEY_PATTERN, VALID_PHASES, WorkGraphRepository, __commonJS, __require, __toESM, buildPipelineStatusOutput, createDatabaseAdapter, detectCycles, formatOutput, formatPipelineSummary, formatTokenTelemetry, inspectProcessTree, parseDbTimestampAsUtc, resolveMainRepoRoot, validateStoryKey } from "./health-Cx2ZhRNT.js";
|
|
2
2
|
import { createLogger } from "./logger-KeHncl-f.js";
|
|
3
3
|
import { TypedEventBusImpl, createEventBus, createTuiApp, isTuiCapable, printNonTtyWarning, sleep } from "./helpers-CElYrONe.js";
|
|
4
4
|
import { ADVISORY_NOTES, Categorizer, ConsumerAnalyzer, DEFAULT_GLOBAL_SETTINGS, DispatcherImpl, DoltClient, ESCALATION_DIAGNOSIS, EfficiencyScorer, IngestionServer, LogTurnAnalyzer, OPERATIONAL_FINDING, Recommender, RoutingRecommender, RoutingResolver, RoutingTelemetry, RoutingTokenAccumulator, RoutingTuner, STORY_METRICS, STORY_OUTCOME, SubstrateConfigSchema, TEST_EXPANSION_FINDING, TEST_PLAN, TelemetryNormalizer, TelemetryPipeline, TurnAnalyzer, addTokenUsage, aggregateTokenUsageForRun, aggregateTokenUsageForStory, callLLM, createConfigSystem, createDatabaseAdapter$1, createDecision, createPipelineRun, createRequirement, detectInterfaceChanges, getArtifactByTypeForRun, getArtifactsByRun, getDecisionsByCategory, getDecisionsByPhase, getDecisionsByPhaseForRun, getPipelineRunById, getRunningPipelineRuns, getStoryMetricsForRun, getTokenUsageSummary, initSchema, loadModelRoutingConfig, registerArtifact, updatePipelineRun, updatePipelineRunConfig, upsertDecision, writeRunMetrics, writeStoryMetrics } from "./dist-Bm0qSZer.js";
|
|
@@ -6009,6 +6009,26 @@ function countFilesInLayout(content) {
|
|
|
6009
6009
|
return count;
|
|
6010
6010
|
}
|
|
6011
6011
|
|
|
6012
|
+
//#endregion
|
|
6013
|
+
//#region src/modules/work-graph/errors.ts
|
|
6014
|
+
/**
|
|
6015
|
+
* Work-graph error types.
|
|
6016
|
+
*
|
|
6017
|
+
* Story 31-7: Cycle Detection in Work Graph
|
|
6018
|
+
*/
|
|
6019
|
+
/**
|
|
6020
|
+
* Thrown by `EpicIngester.ingest()` when the provided dependency list
|
|
6021
|
+
* contains a cycle. The `cycle` field contains the path of story keys
|
|
6022
|
+
* that form the cycle (first and last element are the same).
|
|
6023
|
+
*/
|
|
6024
|
+
var CyclicDependencyError = class extends Error {
|
|
6025
|
+
constructor(cycle) {
|
|
6026
|
+
super(`Cyclic dependency detected: ${cycle.join(" → ")}`);
|
|
6027
|
+
this.cycle = cycle;
|
|
6028
|
+
this.name = "CyclicDependencyError";
|
|
6029
|
+
}
|
|
6030
|
+
};
|
|
6031
|
+
|
|
6012
6032
|
//#endregion
|
|
6013
6033
|
//#region src/modules/work-graph/spec-migrator.ts
|
|
6014
6034
|
/**
|
|
@@ -6671,6 +6691,43 @@ function defaultFailResult(error, tokenUsage) {
|
|
|
6671
6691
|
};
|
|
6672
6692
|
}
|
|
6673
6693
|
/**
|
|
6694
|
+
* Count test cases (`it(`, `test(`, `it.each(`, `test.each(`) in modified
|
|
6695
|
+
* test files. Returns a structured summary the reviewer can use as ground
|
|
6696
|
+
* truth instead of manually estimating test counts from code inspection.
|
|
6697
|
+
*/
|
|
6698
|
+
async function countTestMetrics(filesModified, cwd) {
|
|
6699
|
+
if (!filesModified || filesModified.length === 0) return "";
|
|
6700
|
+
const testFiles = filesModified.filter((f$1) => f$1.includes(".test.") || f$1.includes(".spec.") || f$1.includes("__tests__"));
|
|
6701
|
+
if (testFiles.length === 0) return "";
|
|
6702
|
+
const results = [];
|
|
6703
|
+
let totalCount = 0;
|
|
6704
|
+
for (const file of testFiles) try {
|
|
6705
|
+
const out = execSync(`grep -cE "^\\s*(it|test|it\\.each|test\\.each)\\s*\\(" "${file}" 2>/dev/null || echo 0`, {
|
|
6706
|
+
cwd,
|
|
6707
|
+
encoding: "utf-8",
|
|
6708
|
+
timeout: 5e3
|
|
6709
|
+
}).trim();
|
|
6710
|
+
const count = parseInt(out, 10) || 0;
|
|
6711
|
+
if (count > 0) {
|
|
6712
|
+
results.push({
|
|
6713
|
+
file: file.split("/").pop(),
|
|
6714
|
+
count
|
|
6715
|
+
});
|
|
6716
|
+
totalCount += count;
|
|
6717
|
+
}
|
|
6718
|
+
} catch {}
|
|
6719
|
+
if (totalCount === 0) return "";
|
|
6720
|
+
const lines = [
|
|
6721
|
+
`VERIFIED TEST COUNT (automated — use as ground truth):`,
|
|
6722
|
+
`Total test cases: ${totalCount} across ${results.length} test file(s)`,
|
|
6723
|
+
...results.map((r) => ` ${r.file}: ${r.count} test(s)`),
|
|
6724
|
+
"",
|
|
6725
|
+
"IMPORTANT: Use this verified count when evaluating AC test coverage thresholds.",
|
|
6726
|
+
"Do NOT manually estimate test counts — use the numbers above."
|
|
6727
|
+
];
|
|
6728
|
+
return lines.join("\n");
|
|
6729
|
+
}
|
|
6730
|
+
/**
|
|
6674
6731
|
* Execute the compiled code-review workflow.
|
|
6675
6732
|
*
|
|
6676
6733
|
* Steps:
|
|
@@ -6808,6 +6865,8 @@ async function runCodeReview(deps, params) {
|
|
|
6808
6865
|
}, "Injecting prior findings into code-review prompt");
|
|
6809
6866
|
}
|
|
6810
6867
|
} catch {}
|
|
6868
|
+
const testMetricsContent = await countTestMetrics(filesModified, cwd);
|
|
6869
|
+
if (testMetricsContent) logger$12.debug({ storyKey }, "Injecting verified test-count metrics into code-review context");
|
|
6811
6870
|
const sections = [
|
|
6812
6871
|
{
|
|
6813
6872
|
name: "story_content",
|
|
@@ -6819,6 +6878,11 @@ async function runCodeReview(deps, params) {
|
|
|
6819
6878
|
content: gitDiffContent,
|
|
6820
6879
|
priority: "important"
|
|
6821
6880
|
},
|
|
6881
|
+
{
|
|
6882
|
+
name: "test_metrics",
|
|
6883
|
+
content: testMetricsContent,
|
|
6884
|
+
priority: "important"
|
|
6885
|
+
},
|
|
6822
6886
|
{
|
|
6823
6887
|
name: "previous_findings",
|
|
6824
6888
|
content: previousFindingsContent,
|
|
@@ -9617,6 +9681,446 @@ function createTelemetryAdvisor(deps) {
|
|
|
9617
9681
|
return new TelemetryAdvisor(deps);
|
|
9618
9682
|
}
|
|
9619
9683
|
|
|
9684
|
+
//#endregion
|
|
9685
|
+
//#region src/modules/work-graph/epic-ingester.ts
|
|
9686
|
+
var EpicIngester = class {
|
|
9687
|
+
adapter;
|
|
9688
|
+
constructor(adapter) {
|
|
9689
|
+
this.adapter = adapter;
|
|
9690
|
+
}
|
|
9691
|
+
/**
|
|
9692
|
+
* Upsert stories and sync dependencies into the database.
|
|
9693
|
+
*
|
|
9694
|
+
* Both operations are wrapped in a single transaction: if either fails the
|
|
9695
|
+
* entire batch is rolled back.
|
|
9696
|
+
*
|
|
9697
|
+
* @param stories - Parsed story metadata from `EpicParser.parseStories()`.
|
|
9698
|
+
* @param dependencies - Parsed dependency edges from `EpicParser.parseDependencies()`.
|
|
9699
|
+
* @returns `IngestResult` with counts of affected rows.
|
|
9700
|
+
*/
|
|
9701
|
+
async ingest(stories, dependencies) {
|
|
9702
|
+
const cycle = detectCycles(dependencies);
|
|
9703
|
+
if (cycle !== null) throw new CyclicDependencyError(cycle);
|
|
9704
|
+
return this.adapter.transaction(async (tx) => {
|
|
9705
|
+
let storiesUpserted = 0;
|
|
9706
|
+
for (const story of stories) {
|
|
9707
|
+
const existing = await tx.query("SELECT status FROM wg_stories WHERE story_key = ?", [story.story_key]);
|
|
9708
|
+
if (existing.length > 0) await tx.query("UPDATE wg_stories SET title = ?, updated_at = ? WHERE story_key = ?", [
|
|
9709
|
+
story.title,
|
|
9710
|
+
new Date().toISOString(),
|
|
9711
|
+
story.story_key
|
|
9712
|
+
]);
|
|
9713
|
+
else {
|
|
9714
|
+
const now = new Date().toISOString();
|
|
9715
|
+
await tx.query("INSERT INTO wg_stories (story_key, epic, title, status, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?)", [
|
|
9716
|
+
story.story_key,
|
|
9717
|
+
String(story.epic_num),
|
|
9718
|
+
story.title,
|
|
9719
|
+
"planned",
|
|
9720
|
+
now,
|
|
9721
|
+
now
|
|
9722
|
+
]);
|
|
9723
|
+
storiesUpserted++;
|
|
9724
|
+
}
|
|
9725
|
+
}
|
|
9726
|
+
const epicNum = stories.length > 0 ? stories[0].epic_num : null;
|
|
9727
|
+
if (epicNum !== null) await tx.query(`DELETE FROM story_dependencies WHERE source = 'explicit' AND story_key LIKE ?`, [`${epicNum}-%`]);
|
|
9728
|
+
for (const dep of dependencies) await tx.query("INSERT INTO story_dependencies (story_key, depends_on, dependency_type, source) VALUES (?, ?, ?, ?)", [
|
|
9729
|
+
dep.story_key,
|
|
9730
|
+
dep.depends_on,
|
|
9731
|
+
dep.dependency_type,
|
|
9732
|
+
dep.source
|
|
9733
|
+
]);
|
|
9734
|
+
return {
|
|
9735
|
+
storiesUpserted,
|
|
9736
|
+
dependenciesReplaced: dependencies.length
|
|
9737
|
+
};
|
|
9738
|
+
});
|
|
9739
|
+
}
|
|
9740
|
+
};
|
|
9741
|
+
|
|
9742
|
+
//#endregion
|
|
9743
|
+
//#region src/modules/implementation-orchestrator/story-discovery.ts
|
|
9744
|
+
/**
|
|
9745
|
+
* Unified story key resolution with a 5-level fallback chain.
|
|
9746
|
+
*
|
|
9747
|
+
* 1. Explicit keys (from --stories flag) — returned as-is
|
|
9748
|
+
* 1.5. ready_stories SQL view — when work graph is populated (story 31-3)
|
|
9749
|
+
* 2. Decisions table (category='stories', phase='solutioning')
|
|
9750
|
+
* 3. Epic shard decisions (category='epic-shard') — parsed with parseStoryKeysFromEpics
|
|
9751
|
+
* 4. epics.md file on disk (via discoverPendingStoryKeys)
|
|
9752
|
+
*
|
|
9753
|
+
* Optionally filters out completed stories when filterCompleted is set.
|
|
9754
|
+
*
|
|
9755
|
+
* @returns Sorted, deduplicated array of story keys in "N-M" format
|
|
9756
|
+
*/
|
|
9757
|
+
async function resolveStoryKeys(db, projectRoot, opts) {
|
|
9758
|
+
if (opts?.explicit !== void 0 && opts.explicit.length > 0) return topologicalSortByDependencies(opts.explicit, projectRoot);
|
|
9759
|
+
let keys = [];
|
|
9760
|
+
const readyKeys = await db.queryReadyStories();
|
|
9761
|
+
if (readyKeys.length > 0) {
|
|
9762
|
+
let filteredKeys = readyKeys;
|
|
9763
|
+
if (opts?.epicNumber !== void 0) {
|
|
9764
|
+
const prefix = `${opts.epicNumber}-`;
|
|
9765
|
+
filteredKeys = filteredKeys.filter((k) => k.startsWith(prefix));
|
|
9766
|
+
}
|
|
9767
|
+
if (opts?.filterCompleted === true && filteredKeys.length > 0) {
|
|
9768
|
+
const completedKeys = await getCompletedStoryKeys(db);
|
|
9769
|
+
filteredKeys = filteredKeys.filter((k) => !completedKeys.has(k));
|
|
9770
|
+
}
|
|
9771
|
+
const existingArtifacts = collectExistingStoryKeys(projectRoot);
|
|
9772
|
+
const alreadyDone = filteredKeys.filter((k) => existingArtifacts.has(k));
|
|
9773
|
+
if (alreadyDone.length > 0) {
|
|
9774
|
+
filteredKeys = filteredKeys.filter((k) => !existingArtifacts.has(k));
|
|
9775
|
+
for (const key of alreadyDone) db.query(`UPDATE wg_stories SET status = 'complete', completed_at = ? WHERE story_key = ? AND status <> 'complete'`, [new Date().toISOString(), key]).catch(() => {});
|
|
9776
|
+
}
|
|
9777
|
+
return sortStoryKeys([...new Set(filteredKeys)]);
|
|
9778
|
+
}
|
|
9779
|
+
try {
|
|
9780
|
+
const sql = opts?.pipelineRunId !== void 0 ? `SELECT key FROM decisions WHERE phase = 'solutioning' AND category = 'stories' AND pipeline_run_id = ? ORDER BY created_at ASC` : `SELECT key FROM decisions WHERE phase = 'solutioning' AND category = 'stories' ORDER BY created_at ASC`;
|
|
9781
|
+
const params = opts?.pipelineRunId !== void 0 ? [opts.pipelineRunId] : [];
|
|
9782
|
+
const rows = await db.query(sql, params);
|
|
9783
|
+
for (const row of rows) if (/^\d+-\d+/.test(row.key)) {
|
|
9784
|
+
const match$1 = /^(\d+-\d+)/.exec(row.key);
|
|
9785
|
+
if (match$1 !== null) keys.push(match$1[1]);
|
|
9786
|
+
}
|
|
9787
|
+
} catch {}
|
|
9788
|
+
if (keys.length === 0) try {
|
|
9789
|
+
const sql = opts?.pipelineRunId !== void 0 ? `SELECT value FROM decisions WHERE category = 'epic-shard' AND pipeline_run_id = ? ORDER BY created_at ASC` : `SELECT value FROM decisions WHERE category = 'epic-shard' ORDER BY created_at ASC`;
|
|
9790
|
+
const params = opts?.pipelineRunId !== void 0 ? [opts.pipelineRunId] : [];
|
|
9791
|
+
const shardRows = await db.query(sql, params);
|
|
9792
|
+
const allContent = shardRows.map((r) => r.value).join("\n");
|
|
9793
|
+
if (allContent.length > 0) keys = parseStoryKeysFromEpics(allContent);
|
|
9794
|
+
} catch {}
|
|
9795
|
+
if (keys.length === 0) keys = discoverPendingStoryKeys(projectRoot, opts?.epicNumber);
|
|
9796
|
+
if (opts?.epicNumber !== void 0 && keys.length > 0) {
|
|
9797
|
+
const prefix = `${opts.epicNumber}-`;
|
|
9798
|
+
keys = keys.filter((k) => k.startsWith(prefix));
|
|
9799
|
+
}
|
|
9800
|
+
if (opts?.filterCompleted === true && keys.length > 0) {
|
|
9801
|
+
const completedKeys = await getCompletedStoryKeys(db);
|
|
9802
|
+
keys = keys.filter((k) => !completedKeys.has(k));
|
|
9803
|
+
}
|
|
9804
|
+
if (keys.length > 0) {
|
|
9805
|
+
const existingArtifacts = collectExistingStoryKeys(projectRoot);
|
|
9806
|
+
keys = keys.filter((k) => !existingArtifacts.has(k));
|
|
9807
|
+
}
|
|
9808
|
+
return sortStoryKeys([...new Set(keys)]);
|
|
9809
|
+
}
|
|
9810
|
+
/**
|
|
9811
|
+
* Extract all story keys (N-M format) from epics.md content.
|
|
9812
|
+
*
|
|
9813
|
+
* Supports three extraction patterns found in real epics.md files:
|
|
9814
|
+
* 1. Explicit key lines: **Story key:** `7-2-human-turn-loop` → extracts "7-2"
|
|
9815
|
+
* 2. Story headings: ### Story 7.2: Human Turn Loop → extracts "7-2"
|
|
9816
|
+
* 3. File path refs: _bmad-output/implementation-artifacts/7-2-human-turn-loop.md → extracts "7-2"
|
|
9817
|
+
*
|
|
9818
|
+
* Keys are deduplicated and sorted numerically (epic number primary, story number secondary).
|
|
9819
|
+
*
|
|
9820
|
+
* @param content - Raw string content of epics.md
|
|
9821
|
+
* @returns Sorted, deduplicated array of story key strings in "N-M" format
|
|
9822
|
+
*/
|
|
9823
|
+
function parseStoryKeysFromEpics(content) {
|
|
9824
|
+
if (content.length === 0) return [];
|
|
9825
|
+
const keys = new Set();
|
|
9826
|
+
const explicitKeyPattern = /\*\*Story key:\*\*\s*`?([A-Za-z0-9]+-[A-Za-z0-9]+)(?:-[^`\s]*)?`?/g;
|
|
9827
|
+
let match$1;
|
|
9828
|
+
while ((match$1 = explicitKeyPattern.exec(content)) !== null) if (match$1[1] !== void 0) keys.add(match$1[1]);
|
|
9829
|
+
const headingPattern = /^###\s+Story\s+([A-Za-z0-9]+)[.\-]([A-Za-z0-9]+)/gm;
|
|
9830
|
+
while ((match$1 = headingPattern.exec(content)) !== null) if (match$1[1] !== void 0 && match$1[2] !== void 0) keys.add(`${match$1[1]}-${match$1[2]}`);
|
|
9831
|
+
const inlineStoryPattern = /Story\s+([A-Za-z0-9]+)-([A-Za-z0-9]+)[:\s]/g;
|
|
9832
|
+
while ((match$1 = inlineStoryPattern.exec(content)) !== null) if (match$1[1] !== void 0 && match$1[2] !== void 0) keys.add(`${match$1[1]}-${match$1[2]}`);
|
|
9833
|
+
const filePathPattern = /_bmad-output\/implementation-artifacts\/([A-Za-z0-9]+-[A-Za-z0-9]+)-/g;
|
|
9834
|
+
while ((match$1 = filePathPattern.exec(content)) !== null) if (match$1[1] !== void 0) keys.add(match$1[1]);
|
|
9835
|
+
return sortStoryKeys(Array.from(keys));
|
|
9836
|
+
}
|
|
9837
|
+
/**
|
|
9838
|
+
* Discover pending story keys by diffing epics.md against existing story files.
|
|
9839
|
+
*
|
|
9840
|
+
* Algorithm:
|
|
9841
|
+
* 1. Read _bmad-output/planning-artifacts/epics.md (falls back to _bmad-output/epics.md)
|
|
9842
|
+
* 2. Extract all story keys from epics.md
|
|
9843
|
+
* 3. Glob _bmad-output/implementation-artifacts/ for N-M-*.md files
|
|
9844
|
+
* 4. Return keys from step 2 that are NOT in step 3 (pending work)
|
|
9845
|
+
*
|
|
9846
|
+
* Returns an empty array (without error) if epics.md does not exist.
|
|
9847
|
+
*
|
|
9848
|
+
* @param projectRoot - Absolute path to the project root directory
|
|
9849
|
+
* @returns Sorted array of pending story keys in "N-M" format
|
|
9850
|
+
*/
|
|
9851
|
+
function discoverPendingStoryKeys(projectRoot, epicNumber) {
|
|
9852
|
+
let allKeys = [];
|
|
9853
|
+
if (epicNumber !== void 0) {
|
|
9854
|
+
const epicFiles = findEpicFiles(projectRoot);
|
|
9855
|
+
const targetPattern = new RegExp(`^epic-${epicNumber}[^0-9]`);
|
|
9856
|
+
const matched = epicFiles.filter((f$1) => targetPattern.test(f$1.split("/").pop()));
|
|
9857
|
+
for (const epicFile of matched) try {
|
|
9858
|
+
const content = readFileSync(epicFile, "utf-8");
|
|
9859
|
+
const keys = parseStoryKeysFromEpics(content);
|
|
9860
|
+
allKeys.push(...keys);
|
|
9861
|
+
} catch {}
|
|
9862
|
+
allKeys = sortStoryKeys([...new Set(allKeys)]);
|
|
9863
|
+
} else {
|
|
9864
|
+
const epicsPath = findEpicsFile(projectRoot);
|
|
9865
|
+
if (epicsPath !== void 0) try {
|
|
9866
|
+
const content = readFileSync(epicsPath, "utf-8");
|
|
9867
|
+
allKeys = parseStoryKeysFromEpics(content);
|
|
9868
|
+
} catch {}
|
|
9869
|
+
if (allKeys.length === 0) {
|
|
9870
|
+
const epicFiles = findEpicFiles(projectRoot);
|
|
9871
|
+
for (const epicFile of epicFiles) try {
|
|
9872
|
+
const content = readFileSync(epicFile, "utf-8");
|
|
9873
|
+
const keys = parseStoryKeysFromEpics(content);
|
|
9874
|
+
allKeys.push(...keys);
|
|
9875
|
+
} catch {}
|
|
9876
|
+
allKeys = sortStoryKeys([...new Set(allKeys)]);
|
|
9877
|
+
}
|
|
9878
|
+
}
|
|
9879
|
+
const sprintKeys = parseStoryKeysFromSprintStatus(projectRoot);
|
|
9880
|
+
if (sprintKeys.length > 0) {
|
|
9881
|
+
const merged = new Set(allKeys);
|
|
9882
|
+
for (const k of sprintKeys) merged.add(k);
|
|
9883
|
+
allKeys = sortStoryKeys([...merged]);
|
|
9884
|
+
}
|
|
9885
|
+
if (allKeys.length === 0) return [];
|
|
9886
|
+
const existingKeys = collectExistingStoryKeys(projectRoot);
|
|
9887
|
+
return allKeys.filter((k) => !existingKeys.has(k));
|
|
9888
|
+
}
|
|
9889
|
+
/**
|
|
9890
|
+
* Find epic files from known candidate paths relative to projectRoot.
|
|
9891
|
+
*
|
|
9892
|
+
* Checks for:
|
|
9893
|
+
* 1. epics.md (consolidated epic file)
|
|
9894
|
+
* 2. Individual epic-*.md files in planning-artifacts/
|
|
9895
|
+
*
|
|
9896
|
+
* Returns a single path for epics.md, or undefined if not found.
|
|
9897
|
+
* For individual epic files, use findEpicFiles() instead.
|
|
9898
|
+
*/
|
|
9899
|
+
function findEpicsFile(projectRoot) {
|
|
9900
|
+
const candidates = ["_bmad-output/planning-artifacts/epics.md", "_bmad-output/epics.md"];
|
|
9901
|
+
for (const candidate of candidates) {
|
|
9902
|
+
const fullPath = join$1(projectRoot, candidate);
|
|
9903
|
+
if (existsSync(fullPath)) return fullPath;
|
|
9904
|
+
}
|
|
9905
|
+
const planningDir = join$1(projectRoot, "_bmad-output", "planning-artifacts");
|
|
9906
|
+
if (existsSync(planningDir)) try {
|
|
9907
|
+
const entries = readdirSync(planningDir, { encoding: "utf-8" });
|
|
9908
|
+
const match$1 = entries.filter((e) => /^epics[-.].*\.md$/i.test(e) && !/^epic-\d+/.test(e)).sort();
|
|
9909
|
+
if (match$1.length > 0) return join$1(planningDir, match$1[0]);
|
|
9910
|
+
} catch {}
|
|
9911
|
+
return void 0;
|
|
9912
|
+
}
|
|
9913
|
+
/**
|
|
9914
|
+
* Find individual epic-*.md files in the planning artifacts directory.
|
|
9915
|
+
* Returns paths sorted alphabetically.
|
|
9916
|
+
*/
|
|
9917
|
+
function findEpicFiles(projectRoot) {
|
|
9918
|
+
const planningDir = join$1(projectRoot, "_bmad-output", "planning-artifacts");
|
|
9919
|
+
if (!existsSync(planningDir)) return [];
|
|
9920
|
+
try {
|
|
9921
|
+
const entries = readdirSync(planningDir, { encoding: "utf-8" });
|
|
9922
|
+
return entries.filter((e) => /^epic-\d+.*\.md$/.test(e)).sort().map((e) => join$1(planningDir, e));
|
|
9923
|
+
} catch {
|
|
9924
|
+
return [];
|
|
9925
|
+
}
|
|
9926
|
+
}
|
|
9927
|
+
/**
|
|
9928
|
+
* Collect story keys that already have implementation artifact files.
|
|
9929
|
+
* Scans _bmad-output/implementation-artifacts/ for files matching N-M-*.md.
|
|
9930
|
+
*/
|
|
9931
|
+
function collectExistingStoryKeys(projectRoot) {
|
|
9932
|
+
const existing = new Set();
|
|
9933
|
+
const artifactsDir = join$1(projectRoot, "_bmad-output", "implementation-artifacts");
|
|
9934
|
+
if (!existsSync(artifactsDir)) return existing;
|
|
9935
|
+
let entries;
|
|
9936
|
+
try {
|
|
9937
|
+
entries = readdirSync(artifactsDir, { encoding: "utf-8" });
|
|
9938
|
+
} catch {
|
|
9939
|
+
return existing;
|
|
9940
|
+
}
|
|
9941
|
+
const filePattern = /^([A-Za-z0-9]+-[A-Za-z0-9]+)-/;
|
|
9942
|
+
for (const entry of entries) {
|
|
9943
|
+
if (!entry.endsWith(".md")) continue;
|
|
9944
|
+
const m = filePattern.exec(entry);
|
|
9945
|
+
if (m !== null && m[1] !== void 0) existing.add(m[1]);
|
|
9946
|
+
}
|
|
9947
|
+
return existing;
|
|
9948
|
+
}
|
|
9949
|
+
/**
|
|
9950
|
+
* Parse story keys from sprint-status.yaml.
|
|
9951
|
+
* Reads the development_status map and extracts keys that match the
|
|
9952
|
+
* alphanumeric story key pattern (e.g., 1-1a, NEW-26, E5-accessibility).
|
|
9953
|
+
* Filters out epic status entries (epic-N) and retrospective entries.
|
|
9954
|
+
*/
|
|
9955
|
+
function parseStoryKeysFromSprintStatus(projectRoot) {
|
|
9956
|
+
const candidates = [join$1(projectRoot, "_bmad-output", "implementation-artifacts", "sprint-status.yaml"), join$1(projectRoot, "_bmad-output", "sprint-status.yaml")];
|
|
9957
|
+
const statusPath = candidates.find((p) => existsSync(p));
|
|
9958
|
+
if (!statusPath) return [];
|
|
9959
|
+
try {
|
|
9960
|
+
const content = readFileSync(statusPath, "utf-8");
|
|
9961
|
+
const keys = [];
|
|
9962
|
+
const linePattern = /^\s{2}([A-Za-z0-9]+-[A-Za-z0-9]+(?:-[A-Za-z0-9-]*)?)\s*:/gm;
|
|
9963
|
+
let match$1;
|
|
9964
|
+
while ((match$1 = linePattern.exec(content)) !== null) {
|
|
9965
|
+
const fullKey = match$1[1];
|
|
9966
|
+
if (/^epic-\d+$/.test(fullKey)) continue;
|
|
9967
|
+
if (fullKey.includes("retrospective")) continue;
|
|
9968
|
+
const segments = fullKey.split("-");
|
|
9969
|
+
if (segments.length >= 2) keys.push(`${segments[0]}-${segments[1]}`);
|
|
9970
|
+
}
|
|
9971
|
+
return [...new Set(keys)];
|
|
9972
|
+
} catch {
|
|
9973
|
+
return [];
|
|
9974
|
+
}
|
|
9975
|
+
}
|
|
9976
|
+
/**
|
|
9977
|
+
* Collect story keys already completed in previous pipeline runs.
|
|
9978
|
+
* Scans pipeline_runs with status='completed' and extracts story keys
|
|
9979
|
+
* with phase='COMPLETE' from their token_usage_json state.
|
|
9980
|
+
*/
|
|
9981
|
+
async function getCompletedStoryKeys(db) {
|
|
9982
|
+
const completed = new Set();
|
|
9983
|
+
try {
|
|
9984
|
+
const rows = await db.query(`SELECT token_usage_json FROM pipeline_runs WHERE status = 'completed' AND token_usage_json IS NOT NULL`);
|
|
9985
|
+
for (const row of rows) try {
|
|
9986
|
+
const state = JSON.parse(row.token_usage_json);
|
|
9987
|
+
if (state.stories !== void 0) {
|
|
9988
|
+
for (const [key, s$1] of Object.entries(state.stories)) if (s$1.phase === "COMPLETE") completed.add(key);
|
|
9989
|
+
}
|
|
9990
|
+
} catch {}
|
|
9991
|
+
} catch {}
|
|
9992
|
+
return completed;
|
|
9993
|
+
}
|
|
9994
|
+
/**
|
|
9995
|
+
* Sort story keys: numeric keys first (by epic then story number),
|
|
9996
|
+
* then alphabetic-prefix keys (NEW-*, E-*) sorted lexicographically.
|
|
9997
|
+
* E.g. ["10-1", "1-2a", "1-2", "NEW-26", "E5-acc"] → ["1-2", "1-2a", "10-1", "E5-acc", "NEW-26"]
|
|
9998
|
+
*/
|
|
9999
|
+
function sortStoryKeys(keys) {
|
|
10000
|
+
return keys.slice().sort((a, b) => {
|
|
10001
|
+
const aParts = a.split("-");
|
|
10002
|
+
const bParts = b.split("-");
|
|
10003
|
+
const aNum = Number(aParts[0]);
|
|
10004
|
+
const bNum = Number(bParts[0]);
|
|
10005
|
+
if (!isNaN(aNum) && !isNaN(bNum)) {
|
|
10006
|
+
if (aNum !== bNum) return aNum - bNum;
|
|
10007
|
+
const aStory = Number(aParts[1]);
|
|
10008
|
+
const bStory = Number(bParts[1]);
|
|
10009
|
+
if (!isNaN(aStory) && !isNaN(bStory) && aStory !== bStory) return aStory - bStory;
|
|
10010
|
+
return (aParts[1] ?? "").localeCompare(bParts[1] ?? "");
|
|
10011
|
+
}
|
|
10012
|
+
if (!isNaN(aNum)) return -1;
|
|
10013
|
+
if (!isNaN(bNum)) return 1;
|
|
10014
|
+
return a.localeCompare(b);
|
|
10015
|
+
});
|
|
10016
|
+
}
|
|
10017
|
+
/**
|
|
10018
|
+
* Parse inter-story dependencies from the consolidated epics document.
|
|
10019
|
+
*
|
|
10020
|
+
* Scans for patterns like:
|
|
10021
|
+
* ### Story 50-2: Title
|
|
10022
|
+
* **Dependencies:** 50-1
|
|
10023
|
+
*
|
|
10024
|
+
* Returns a Map where key=storyKey, value=Set of dependency keys.
|
|
10025
|
+
* Only returns dependencies that are within the provided storyKeys set
|
|
10026
|
+
* (external dependencies to other epics are ignored for ordering purposes).
|
|
10027
|
+
*/
|
|
10028
|
+
function parseEpicsDependencies(projectRoot, storyKeys) {
|
|
10029
|
+
const deps = new Map();
|
|
10030
|
+
const epicsPath = findEpicsFile(projectRoot);
|
|
10031
|
+
if (epicsPath === void 0) return deps;
|
|
10032
|
+
let content;
|
|
10033
|
+
try {
|
|
10034
|
+
content = readFileSync(epicsPath, "utf-8");
|
|
10035
|
+
} catch {
|
|
10036
|
+
return deps;
|
|
10037
|
+
}
|
|
10038
|
+
const storyPattern = /^###\s+Story\s+(\d+)-(\d+)[:\s]/gm;
|
|
10039
|
+
const depPattern = /^\*\*Dependencies:\*\*\s*(.+)$/gm;
|
|
10040
|
+
const storyPositions = [];
|
|
10041
|
+
let match$1;
|
|
10042
|
+
while ((match$1 = storyPattern.exec(content)) !== null) storyPositions.push({
|
|
10043
|
+
key: `${match$1[1]}-${match$1[2]}`,
|
|
10044
|
+
pos: match$1.index
|
|
10045
|
+
});
|
|
10046
|
+
for (let i = 0; i < storyPositions.length; i++) {
|
|
10047
|
+
const story = storyPositions[i];
|
|
10048
|
+
const nextStoryPos = i + 1 < storyPositions.length ? storyPositions[i + 1].pos : content.length;
|
|
10049
|
+
const section = content.slice(story.pos, nextStoryPos);
|
|
10050
|
+
depPattern.lastIndex = 0;
|
|
10051
|
+
const depMatch = depPattern.exec(section);
|
|
10052
|
+
if (depMatch === null || /^none$/i.test(depMatch[1].trim())) continue;
|
|
10053
|
+
const depText = depMatch[1];
|
|
10054
|
+
const storyDeps = new Set();
|
|
10055
|
+
const rangeMatch = /(\d+)-(\d+)\s+through\s+\1-(\d+)/i.exec(depText);
|
|
10056
|
+
if (rangeMatch !== null) {
|
|
10057
|
+
const epic = rangeMatch[1];
|
|
10058
|
+
const start = Number(rangeMatch[2]);
|
|
10059
|
+
const end = Number(rangeMatch[3]);
|
|
10060
|
+
for (let n$1 = start; n$1 <= end; n$1++) {
|
|
10061
|
+
const depKey = `${epic}-${n$1}`;
|
|
10062
|
+
if (storyKeys.has(depKey)) storyDeps.add(depKey);
|
|
10063
|
+
}
|
|
10064
|
+
} else {
|
|
10065
|
+
const keyPattern = /(\d+-\d+[a-z]?)/g;
|
|
10066
|
+
let km;
|
|
10067
|
+
while ((km = keyPattern.exec(depText)) !== null) {
|
|
10068
|
+
const depKey = km[1];
|
|
10069
|
+
if (storyKeys.has(depKey)) storyDeps.add(depKey);
|
|
10070
|
+
}
|
|
10071
|
+
}
|
|
10072
|
+
if (storyDeps.size > 0) deps.set(story.key, storyDeps);
|
|
10073
|
+
}
|
|
10074
|
+
return deps;
|
|
10075
|
+
}
|
|
10076
|
+
/**
|
|
10077
|
+
* Topologically sort explicit story keys by inter-story dependencies.
|
|
10078
|
+
*
|
|
10079
|
+
* Parses the consolidated epics document for dependency metadata, builds
|
|
10080
|
+
* a DAG, and returns keys in dependency-first order using Kahn's algorithm.
|
|
10081
|
+
* Stories with no dependencies come first; stories that depend on others
|
|
10082
|
+
* are placed after their prerequisites.
|
|
10083
|
+
*
|
|
10084
|
+
* Falls back to numeric sort if no epics document exists or no
|
|
10085
|
+
* dependencies are found among the provided keys.
|
|
10086
|
+
*/
|
|
10087
|
+
function topologicalSortByDependencies(keys, projectRoot) {
|
|
10088
|
+
if (keys.length <= 1) return keys;
|
|
10089
|
+
const keySet = new Set(keys);
|
|
10090
|
+
const deps = parseEpicsDependencies(projectRoot, keySet);
|
|
10091
|
+
if (deps.size === 0) return sortStoryKeys(keys);
|
|
10092
|
+
const inDegree = new Map();
|
|
10093
|
+
const successors = new Map();
|
|
10094
|
+
for (const key of keys) {
|
|
10095
|
+
inDegree.set(key, 0);
|
|
10096
|
+
successors.set(key, new Set());
|
|
10097
|
+
}
|
|
10098
|
+
for (const [dependent, depSet] of deps) {
|
|
10099
|
+
if (!keySet.has(dependent)) continue;
|
|
10100
|
+
for (const dep of depSet) {
|
|
10101
|
+
if (!keySet.has(dep)) continue;
|
|
10102
|
+
successors.get(dep).add(dependent);
|
|
10103
|
+
inDegree.set(dependent, (inDegree.get(dependent) ?? 0) + 1);
|
|
10104
|
+
}
|
|
10105
|
+
}
|
|
10106
|
+
const result = [];
|
|
10107
|
+
const processed = new Set();
|
|
10108
|
+
while (processed.size < keys.length) {
|
|
10109
|
+
const wave = [];
|
|
10110
|
+
for (const key of keys) if (!processed.has(key) && (inDegree.get(key) ?? 0) === 0) wave.push(key);
|
|
10111
|
+
if (wave.length === 0) {
|
|
10112
|
+
for (const key of sortStoryKeys(keys)) if (!processed.has(key)) result.push(key);
|
|
10113
|
+
break;
|
|
10114
|
+
}
|
|
10115
|
+
for (const key of sortStoryKeys(wave)) {
|
|
10116
|
+
result.push(key);
|
|
10117
|
+
processed.add(key);
|
|
10118
|
+
for (const succ of successors.get(key) ?? []) inDegree.set(succ, (inDegree.get(succ) ?? 0) - 1);
|
|
10119
|
+
}
|
|
10120
|
+
}
|
|
10121
|
+
return result;
|
|
10122
|
+
}
|
|
10123
|
+
|
|
9620
10124
|
//#endregion
|
|
9621
10125
|
//#region src/modules/implementation-orchestrator/orchestrator-impl.ts
|
|
9622
10126
|
function estimateDispatchCost(input, output) {
|
|
@@ -9715,6 +10219,134 @@ function extractExpectedStoryTitle(shardContent, storyKey) {
|
|
|
9715
10219
|
}
|
|
9716
10220
|
return null;
|
|
9717
10221
|
}
|
|
10222
|
+
/**
|
|
10223
|
+
* Check whether a story's expected NEW files already exist in the working tree,
|
|
10224
|
+
* indicating the story was implicitly implemented by adjacent stories.
|
|
10225
|
+
*
|
|
10226
|
+
* Parses the consolidated epics document for the story's "Files likely touched"
|
|
10227
|
+
* section and checks for files marked as "(new)". If all expected new files
|
|
10228
|
+
* already exist, the story is considered implicitly covered.
|
|
10229
|
+
*
|
|
10230
|
+
* Returns `true` if the story appears already covered, `false` otherwise.
|
|
10231
|
+
*/
|
|
10232
|
+
function isImplicitlyCovered(storyKey, projectRoot) {
|
|
10233
|
+
const planningDir = join$1(projectRoot, "_bmad-output", "planning-artifacts");
|
|
10234
|
+
if (!existsSync(planningDir)) return false;
|
|
10235
|
+
let epicsPath;
|
|
10236
|
+
try {
|
|
10237
|
+
const entries = readdirSync(planningDir, { encoding: "utf-8" });
|
|
10238
|
+
const match$1 = entries.find((e) => /^epics[-.].*\.md$/i.test(e) && !/^epic-\d+/.test(e));
|
|
10239
|
+
if (match$1) epicsPath = join$1(planningDir, match$1);
|
|
10240
|
+
} catch {
|
|
10241
|
+
return false;
|
|
10242
|
+
}
|
|
10243
|
+
if (!epicsPath) return false;
|
|
10244
|
+
let content;
|
|
10245
|
+
try {
|
|
10246
|
+
content = readFileSync(epicsPath, "utf-8");
|
|
10247
|
+
} catch {
|
|
10248
|
+
return false;
|
|
10249
|
+
}
|
|
10250
|
+
const escapedKey = storyKey.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
10251
|
+
const storyHeading = new RegExp(`^###\\s+Story\\s+${escapedKey}[:\\s]`, "m");
|
|
10252
|
+
const headingMatch = storyHeading.exec(content);
|
|
10253
|
+
if (!headingMatch) return false;
|
|
10254
|
+
const sectionStart = headingMatch.index;
|
|
10255
|
+
const nextHeading = content.indexOf("\n### Story ", sectionStart + 1);
|
|
10256
|
+
const section = nextHeading > 0 ? content.slice(sectionStart, nextHeading) : content.slice(sectionStart);
|
|
10257
|
+
const filesIdx = section.indexOf("Files likely touched:");
|
|
10258
|
+
if (filesIdx < 0) return false;
|
|
10259
|
+
const filesBlock = section.slice(filesIdx);
|
|
10260
|
+
const newFilePattern = /^-\s*`([^`]+)`\s*\(new\)/gm;
|
|
10261
|
+
const expectedNewFiles = [];
|
|
10262
|
+
let fm;
|
|
10263
|
+
while ((fm = newFilePattern.exec(filesBlock)) !== null) if (fm[1]) expectedNewFiles.push(fm[1]);
|
|
10264
|
+
if (expectedNewFiles.length === 0) return false;
|
|
10265
|
+
const existCount = expectedNewFiles.filter((f$1) => existsSync(join$1(projectRoot, f$1))).length;
|
|
10266
|
+
return existCount === expectedNewFiles.length;
|
|
10267
|
+
}
|
|
10268
|
+
/**
|
|
10269
|
+
* Auto-ingest stories and inter-story dependencies from the consolidated
|
|
10270
|
+
* epics document into the work graph (`wg_stories` + `story_dependencies`).
|
|
10271
|
+
*
|
|
10272
|
+
* This bridges the gap between Level 4 discovery (file-based, no dependency
|
|
10273
|
+
* gating) and Level 1.5 discovery (`ready_stories` view, dependency-aware).
|
|
10274
|
+
*
|
|
10275
|
+
* Idempotent: existing stories preserve their status; dependencies are
|
|
10276
|
+
* replaced per-epic (EpicIngester's delete-and-reinsert pattern).
|
|
10277
|
+
*/
|
|
10278
|
+
async function autoIngestEpicsDependencies(db, projectRoot) {
|
|
10279
|
+
const epicsPath = findEpicsFile(projectRoot);
|
|
10280
|
+
if (!epicsPath) return {
|
|
10281
|
+
storiesIngested: 0,
|
|
10282
|
+
dependenciesIngested: 0
|
|
10283
|
+
};
|
|
10284
|
+
let content;
|
|
10285
|
+
try {
|
|
10286
|
+
content = readFileSync(epicsPath, "utf-8");
|
|
10287
|
+
} catch {
|
|
10288
|
+
return {
|
|
10289
|
+
storiesIngested: 0,
|
|
10290
|
+
dependenciesIngested: 0
|
|
10291
|
+
};
|
|
10292
|
+
}
|
|
10293
|
+
const storyPattern = /^###\s+Story\s+(\d+)-(\d+):\s+(.+)$/gm;
|
|
10294
|
+
const stories = [];
|
|
10295
|
+
let match$1;
|
|
10296
|
+
while ((match$1 = storyPattern.exec(content)) !== null) {
|
|
10297
|
+
const epicNum = parseInt(match$1[1], 10);
|
|
10298
|
+
const storyNum = parseInt(match$1[2], 10);
|
|
10299
|
+
stories.push({
|
|
10300
|
+
story_key: `${epicNum}-${storyNum}`,
|
|
10301
|
+
epic_num: epicNum,
|
|
10302
|
+
story_num: storyNum,
|
|
10303
|
+
title: match$1[3].trim(),
|
|
10304
|
+
priority: "P0",
|
|
10305
|
+
size: "Medium",
|
|
10306
|
+
sprint: 0
|
|
10307
|
+
});
|
|
10308
|
+
}
|
|
10309
|
+
if (stories.length === 0) return {
|
|
10310
|
+
storiesIngested: 0,
|
|
10311
|
+
dependenciesIngested: 0
|
|
10312
|
+
};
|
|
10313
|
+
const allKeys = new Set(stories.map((s$1) => s$1.story_key));
|
|
10314
|
+
const depMap = parseEpicsDependencies(projectRoot, allKeys);
|
|
10315
|
+
const dependencies = [];
|
|
10316
|
+
for (const [dependent, depSet] of depMap) for (const dep of depSet) dependencies.push({
|
|
10317
|
+
story_key: dependent,
|
|
10318
|
+
depends_on: dep,
|
|
10319
|
+
dependency_type: "blocks",
|
|
10320
|
+
source: "explicit"
|
|
10321
|
+
});
|
|
10322
|
+
try {
|
|
10323
|
+
await db.query(`CREATE TABLE IF NOT EXISTS wg_stories (
|
|
10324
|
+
story_key VARCHAR(20) NOT NULL,
|
|
10325
|
+
epic VARCHAR(20) NOT NULL,
|
|
10326
|
+
title VARCHAR(255),
|
|
10327
|
+
status VARCHAR(30) NOT NULL DEFAULT 'planned',
|
|
10328
|
+
spec_path VARCHAR(500),
|
|
10329
|
+
created_at DATETIME,
|
|
10330
|
+
updated_at DATETIME,
|
|
10331
|
+
completed_at DATETIME,
|
|
10332
|
+
PRIMARY KEY (story_key)
|
|
10333
|
+
)`);
|
|
10334
|
+
await db.query(`CREATE TABLE IF NOT EXISTS story_dependencies (
|
|
10335
|
+
story_key VARCHAR(50) NOT NULL,
|
|
10336
|
+
depends_on VARCHAR(50) NOT NULL,
|
|
10337
|
+
dependency_type VARCHAR(50) NOT NULL DEFAULT 'blocks',
|
|
10338
|
+
source VARCHAR(50) NOT NULL DEFAULT 'explicit',
|
|
10339
|
+
created_at DATETIME,
|
|
10340
|
+
PRIMARY KEY (story_key, depends_on)
|
|
10341
|
+
)`);
|
|
10342
|
+
} catch {}
|
|
10343
|
+
const ingester = new EpicIngester(db);
|
|
10344
|
+
const result = await ingester.ingest(stories, dependencies);
|
|
10345
|
+
return {
|
|
10346
|
+
storiesIngested: result.storiesUpserted,
|
|
10347
|
+
dependenciesIngested: result.dependenciesReplaced
|
|
10348
|
+
};
|
|
10349
|
+
}
|
|
9718
10350
|
const TITLE_OVERLAP_WARNING_THRESHOLD = .3;
|
|
9719
10351
|
/**
|
|
9720
10352
|
* Map a StoryPhase to the corresponding WgStoryStatus for wg_stories writes.
|
|
@@ -10318,6 +10950,25 @@ function createImplementationOrchestrator(deps) {
|
|
|
10318
10950
|
}
|
|
10319
10951
|
}
|
|
10320
10952
|
} catch {}
|
|
10953
|
+
if (storyFilePath === void 0 && projectRoot && isImplicitlyCovered(storyKey, projectRoot)) {
|
|
10954
|
+
logger$21.info({ storyKey }, `Story ${storyKey} appears implicitly covered — all expected new files already exist. Skipping create-story.`);
|
|
10955
|
+
endPhase(storyKey, "create-story");
|
|
10956
|
+
eventBus.emit("orchestrator:story-phase-complete", {
|
|
10957
|
+
storyKey,
|
|
10958
|
+
phase: "IN_STORY_CREATION",
|
|
10959
|
+
result: {
|
|
10960
|
+
result: "success",
|
|
10961
|
+
story_key: storyKey,
|
|
10962
|
+
implicitlyCovered: true
|
|
10963
|
+
}
|
|
10964
|
+
});
|
|
10965
|
+
updateStory(storyKey, {
|
|
10966
|
+
phase: "COMPLETE",
|
|
10967
|
+
completedAt: new Date().toISOString()
|
|
10968
|
+
});
|
|
10969
|
+
await persistState();
|
|
10970
|
+
return;
|
|
10971
|
+
}
|
|
10321
10972
|
if (storyFilePath === void 0) try {
|
|
10322
10973
|
incrementDispatches(storyKey);
|
|
10323
10974
|
const createResult = await runCreateStory({
|
|
@@ -11966,6 +12617,18 @@ function createImplementationOrchestrator(deps) {
|
|
|
11966
12617
|
durationMs: _startupTimings.seedMethodologyMs
|
|
11967
12618
|
}, "Methodology context seeded from planning artifacts");
|
|
11968
12619
|
}
|
|
12620
|
+
if (projectRoot !== void 0) {
|
|
12621
|
+
const ingestStart = Date.now();
|
|
12622
|
+
try {
|
|
12623
|
+
const ingestResult = await autoIngestEpicsDependencies(db, projectRoot);
|
|
12624
|
+
if (ingestResult.storiesIngested > 0 || ingestResult.dependenciesIngested > 0) logger$21.info({
|
|
12625
|
+
...ingestResult,
|
|
12626
|
+
durationMs: Date.now() - ingestStart
|
|
12627
|
+
}, "Auto-ingested stories and dependencies from epics document");
|
|
12628
|
+
} catch (err) {
|
|
12629
|
+
logger$21.debug({ err }, "Auto-ingest from epics document skipped — work graph may be unavailable");
|
|
12630
|
+
}
|
|
12631
|
+
}
|
|
11969
12632
|
try {
|
|
11970
12633
|
if (stateStore !== void 0) {
|
|
11971
12634
|
const stateStoreInitStart = Date.now();
|
|
@@ -12243,388 +12906,6 @@ function createImplementationOrchestrator(deps) {
|
|
|
12243
12906
|
};
|
|
12244
12907
|
}
|
|
12245
12908
|
|
|
12246
|
-
//#endregion
|
|
12247
|
-
//#region src/modules/implementation-orchestrator/story-discovery.ts
|
|
12248
|
-
/**
|
|
12249
|
-
* Unified story key resolution with a 5-level fallback chain.
|
|
12250
|
-
*
|
|
12251
|
-
* 1. Explicit keys (from --stories flag) — returned as-is
|
|
12252
|
-
* 1.5. ready_stories SQL view — when work graph is populated (story 31-3)
|
|
12253
|
-
* 2. Decisions table (category='stories', phase='solutioning')
|
|
12254
|
-
* 3. Epic shard decisions (category='epic-shard') — parsed with parseStoryKeysFromEpics
|
|
12255
|
-
* 4. epics.md file on disk (via discoverPendingStoryKeys)
|
|
12256
|
-
*
|
|
12257
|
-
* Optionally filters out completed stories when filterCompleted is set.
|
|
12258
|
-
*
|
|
12259
|
-
* @returns Sorted, deduplicated array of story keys in "N-M" format
|
|
12260
|
-
*/
|
|
12261
|
-
async function resolveStoryKeys(db, projectRoot, opts) {
|
|
12262
|
-
if (opts?.explicit !== void 0 && opts.explicit.length > 0) return topologicalSortByDependencies(opts.explicit, projectRoot);
|
|
12263
|
-
let keys = [];
|
|
12264
|
-
const readyKeys = await db.queryReadyStories();
|
|
12265
|
-
if (readyKeys.length > 0) {
|
|
12266
|
-
let filteredKeys = readyKeys;
|
|
12267
|
-
if (opts?.epicNumber !== void 0) {
|
|
12268
|
-
const prefix = `${opts.epicNumber}-`;
|
|
12269
|
-
filteredKeys = filteredKeys.filter((k) => k.startsWith(prefix));
|
|
12270
|
-
}
|
|
12271
|
-
if (opts?.filterCompleted === true && filteredKeys.length > 0) {
|
|
12272
|
-
const completedKeys = await getCompletedStoryKeys(db);
|
|
12273
|
-
filteredKeys = filteredKeys.filter((k) => !completedKeys.has(k));
|
|
12274
|
-
}
|
|
12275
|
-
const existingArtifacts = collectExistingStoryKeys(projectRoot);
|
|
12276
|
-
const alreadyDone = filteredKeys.filter((k) => existingArtifacts.has(k));
|
|
12277
|
-
if (alreadyDone.length > 0) {
|
|
12278
|
-
filteredKeys = filteredKeys.filter((k) => !existingArtifacts.has(k));
|
|
12279
|
-
for (const key of alreadyDone) db.query(`UPDATE wg_stories SET status = 'complete', completed_at = ? WHERE story_key = ? AND status <> 'complete'`, [new Date().toISOString(), key]).catch(() => {});
|
|
12280
|
-
}
|
|
12281
|
-
return sortStoryKeys([...new Set(filteredKeys)]);
|
|
12282
|
-
}
|
|
12283
|
-
try {
|
|
12284
|
-
const sql = opts?.pipelineRunId !== void 0 ? `SELECT key FROM decisions WHERE phase = 'solutioning' AND category = 'stories' AND pipeline_run_id = ? ORDER BY created_at ASC` : `SELECT key FROM decisions WHERE phase = 'solutioning' AND category = 'stories' ORDER BY created_at ASC`;
|
|
12285
|
-
const params = opts?.pipelineRunId !== void 0 ? [opts.pipelineRunId] : [];
|
|
12286
|
-
const rows = await db.query(sql, params);
|
|
12287
|
-
for (const row of rows) if (/^\d+-\d+/.test(row.key)) {
|
|
12288
|
-
const match$1 = /^(\d+-\d+)/.exec(row.key);
|
|
12289
|
-
if (match$1 !== null) keys.push(match$1[1]);
|
|
12290
|
-
}
|
|
12291
|
-
} catch {}
|
|
12292
|
-
if (keys.length === 0) try {
|
|
12293
|
-
const sql = opts?.pipelineRunId !== void 0 ? `SELECT value FROM decisions WHERE category = 'epic-shard' AND pipeline_run_id = ? ORDER BY created_at ASC` : `SELECT value FROM decisions WHERE category = 'epic-shard' ORDER BY created_at ASC`;
|
|
12294
|
-
const params = opts?.pipelineRunId !== void 0 ? [opts.pipelineRunId] : [];
|
|
12295
|
-
const shardRows = await db.query(sql, params);
|
|
12296
|
-
const allContent = shardRows.map((r) => r.value).join("\n");
|
|
12297
|
-
if (allContent.length > 0) keys = parseStoryKeysFromEpics(allContent);
|
|
12298
|
-
} catch {}
|
|
12299
|
-
if (keys.length === 0) keys = discoverPendingStoryKeys(projectRoot, opts?.epicNumber);
|
|
12300
|
-
if (opts?.epicNumber !== void 0 && keys.length > 0) {
|
|
12301
|
-
const prefix = `${opts.epicNumber}-`;
|
|
12302
|
-
keys = keys.filter((k) => k.startsWith(prefix));
|
|
12303
|
-
}
|
|
12304
|
-
if (opts?.filterCompleted === true && keys.length > 0) {
|
|
12305
|
-
const completedKeys = await getCompletedStoryKeys(db);
|
|
12306
|
-
keys = keys.filter((k) => !completedKeys.has(k));
|
|
12307
|
-
}
|
|
12308
|
-
if (keys.length > 0) {
|
|
12309
|
-
const existingArtifacts = collectExistingStoryKeys(projectRoot);
|
|
12310
|
-
keys = keys.filter((k) => !existingArtifacts.has(k));
|
|
12311
|
-
}
|
|
12312
|
-
return sortStoryKeys([...new Set(keys)]);
|
|
12313
|
-
}
|
|
12314
|
-
/**
|
|
12315
|
-
* Extract all story keys (N-M format) from epics.md content.
|
|
12316
|
-
*
|
|
12317
|
-
* Supports three extraction patterns found in real epics.md files:
|
|
12318
|
-
* 1. Explicit key lines: **Story key:** `7-2-human-turn-loop` → extracts "7-2"
|
|
12319
|
-
* 2. Story headings: ### Story 7.2: Human Turn Loop → extracts "7-2"
|
|
12320
|
-
* 3. File path refs: _bmad-output/implementation-artifacts/7-2-human-turn-loop.md → extracts "7-2"
|
|
12321
|
-
*
|
|
12322
|
-
* Keys are deduplicated and sorted numerically (epic number primary, story number secondary).
|
|
12323
|
-
*
|
|
12324
|
-
* @param content - Raw string content of epics.md
|
|
12325
|
-
* @returns Sorted, deduplicated array of story key strings in "N-M" format
|
|
12326
|
-
*/
|
|
12327
|
-
function parseStoryKeysFromEpics(content) {
|
|
12328
|
-
if (content.length === 0) return [];
|
|
12329
|
-
const keys = new Set();
|
|
12330
|
-
const explicitKeyPattern = /\*\*Story key:\*\*\s*`?([A-Za-z0-9]+-[A-Za-z0-9]+)(?:-[^`\s]*)?`?/g;
|
|
12331
|
-
let match$1;
|
|
12332
|
-
while ((match$1 = explicitKeyPattern.exec(content)) !== null) if (match$1[1] !== void 0) keys.add(match$1[1]);
|
|
12333
|
-
const headingPattern = /^###\s+Story\s+([A-Za-z0-9]+)[.\-]([A-Za-z0-9]+)/gm;
|
|
12334
|
-
while ((match$1 = headingPattern.exec(content)) !== null) if (match$1[1] !== void 0 && match$1[2] !== void 0) keys.add(`${match$1[1]}-${match$1[2]}`);
|
|
12335
|
-
const inlineStoryPattern = /Story\s+([A-Za-z0-9]+)-([A-Za-z0-9]+)[:\s]/g;
|
|
12336
|
-
while ((match$1 = inlineStoryPattern.exec(content)) !== null) if (match$1[1] !== void 0 && match$1[2] !== void 0) keys.add(`${match$1[1]}-${match$1[2]}`);
|
|
12337
|
-
const filePathPattern = /_bmad-output\/implementation-artifacts\/([A-Za-z0-9]+-[A-Za-z0-9]+)-/g;
|
|
12338
|
-
while ((match$1 = filePathPattern.exec(content)) !== null) if (match$1[1] !== void 0) keys.add(match$1[1]);
|
|
12339
|
-
return sortStoryKeys(Array.from(keys));
|
|
12340
|
-
}
|
|
12341
|
-
/**
|
|
12342
|
-
* Discover pending story keys by diffing epics.md against existing story files.
|
|
12343
|
-
*
|
|
12344
|
-
* Algorithm:
|
|
12345
|
-
* 1. Read _bmad-output/planning-artifacts/epics.md (falls back to _bmad-output/epics.md)
|
|
12346
|
-
* 2. Extract all story keys from epics.md
|
|
12347
|
-
* 3. Glob _bmad-output/implementation-artifacts/ for N-M-*.md files
|
|
12348
|
-
* 4. Return keys from step 2 that are NOT in step 3 (pending work)
|
|
12349
|
-
*
|
|
12350
|
-
* Returns an empty array (without error) if epics.md does not exist.
|
|
12351
|
-
*
|
|
12352
|
-
* @param projectRoot - Absolute path to the project root directory
|
|
12353
|
-
* @returns Sorted array of pending story keys in "N-M" format
|
|
12354
|
-
*/
|
|
12355
|
-
function discoverPendingStoryKeys(projectRoot, epicNumber) {
|
|
12356
|
-
let allKeys = [];
|
|
12357
|
-
if (epicNumber !== void 0) {
|
|
12358
|
-
const epicFiles = findEpicFiles(projectRoot);
|
|
12359
|
-
const targetPattern = new RegExp(`^epic-${epicNumber}[^0-9]`);
|
|
12360
|
-
const matched = epicFiles.filter((f$1) => targetPattern.test(f$1.split("/").pop()));
|
|
12361
|
-
for (const epicFile of matched) try {
|
|
12362
|
-
const content = readFileSync(epicFile, "utf-8");
|
|
12363
|
-
const keys = parseStoryKeysFromEpics(content);
|
|
12364
|
-
allKeys.push(...keys);
|
|
12365
|
-
} catch {}
|
|
12366
|
-
allKeys = sortStoryKeys([...new Set(allKeys)]);
|
|
12367
|
-
} else {
|
|
12368
|
-
const epicsPath = findEpicsFile(projectRoot);
|
|
12369
|
-
if (epicsPath !== void 0) try {
|
|
12370
|
-
const content = readFileSync(epicsPath, "utf-8");
|
|
12371
|
-
allKeys = parseStoryKeysFromEpics(content);
|
|
12372
|
-
} catch {}
|
|
12373
|
-
if (allKeys.length === 0) {
|
|
12374
|
-
const epicFiles = findEpicFiles(projectRoot);
|
|
12375
|
-
for (const epicFile of epicFiles) try {
|
|
12376
|
-
const content = readFileSync(epicFile, "utf-8");
|
|
12377
|
-
const keys = parseStoryKeysFromEpics(content);
|
|
12378
|
-
allKeys.push(...keys);
|
|
12379
|
-
} catch {}
|
|
12380
|
-
allKeys = sortStoryKeys([...new Set(allKeys)]);
|
|
12381
|
-
}
|
|
12382
|
-
}
|
|
12383
|
-
const sprintKeys = parseStoryKeysFromSprintStatus(projectRoot);
|
|
12384
|
-
if (sprintKeys.length > 0) {
|
|
12385
|
-
const merged = new Set(allKeys);
|
|
12386
|
-
for (const k of sprintKeys) merged.add(k);
|
|
12387
|
-
allKeys = sortStoryKeys([...merged]);
|
|
12388
|
-
}
|
|
12389
|
-
if (allKeys.length === 0) return [];
|
|
12390
|
-
const existingKeys = collectExistingStoryKeys(projectRoot);
|
|
12391
|
-
return allKeys.filter((k) => !existingKeys.has(k));
|
|
12392
|
-
}
|
|
12393
|
-
/**
|
|
12394
|
-
* Find epic files from known candidate paths relative to projectRoot.
|
|
12395
|
-
*
|
|
12396
|
-
* Checks for:
|
|
12397
|
-
* 1. epics.md (consolidated epic file)
|
|
12398
|
-
* 2. Individual epic-*.md files in planning-artifacts/
|
|
12399
|
-
*
|
|
12400
|
-
* Returns a single path for epics.md, or undefined if not found.
|
|
12401
|
-
* For individual epic files, use findEpicFiles() instead.
|
|
12402
|
-
*/
|
|
12403
|
-
function findEpicsFile(projectRoot) {
|
|
12404
|
-
const candidates = ["_bmad-output/planning-artifacts/epics.md", "_bmad-output/epics.md"];
|
|
12405
|
-
for (const candidate of candidates) {
|
|
12406
|
-
const fullPath = join$1(projectRoot, candidate);
|
|
12407
|
-
if (existsSync(fullPath)) return fullPath;
|
|
12408
|
-
}
|
|
12409
|
-
const planningDir = join$1(projectRoot, "_bmad-output", "planning-artifacts");
|
|
12410
|
-
if (existsSync(planningDir)) try {
|
|
12411
|
-
const entries = readdirSync(planningDir, { encoding: "utf-8" });
|
|
12412
|
-
const match$1 = entries.filter((e) => /^epics[-.].*\.md$/i.test(e) && !/^epic-\d+/.test(e)).sort();
|
|
12413
|
-
if (match$1.length > 0) return join$1(planningDir, match$1[0]);
|
|
12414
|
-
} catch {}
|
|
12415
|
-
return void 0;
|
|
12416
|
-
}
|
|
12417
|
-
/**
|
|
12418
|
-
* Find individual epic-*.md files in the planning artifacts directory.
|
|
12419
|
-
* Returns paths sorted alphabetically.
|
|
12420
|
-
*/
|
|
12421
|
-
function findEpicFiles(projectRoot) {
|
|
12422
|
-
const planningDir = join$1(projectRoot, "_bmad-output", "planning-artifacts");
|
|
12423
|
-
if (!existsSync(planningDir)) return [];
|
|
12424
|
-
try {
|
|
12425
|
-
const entries = readdirSync(planningDir, { encoding: "utf-8" });
|
|
12426
|
-
return entries.filter((e) => /^epic-\d+.*\.md$/.test(e)).sort().map((e) => join$1(planningDir, e));
|
|
12427
|
-
} catch {
|
|
12428
|
-
return [];
|
|
12429
|
-
}
|
|
12430
|
-
}
|
|
12431
|
-
/**
|
|
12432
|
-
* Collect story keys that already have implementation artifact files.
|
|
12433
|
-
* Scans _bmad-output/implementation-artifacts/ for files matching N-M-*.md.
|
|
12434
|
-
*/
|
|
12435
|
-
function collectExistingStoryKeys(projectRoot) {
|
|
12436
|
-
const existing = new Set();
|
|
12437
|
-
const artifactsDir = join$1(projectRoot, "_bmad-output", "implementation-artifacts");
|
|
12438
|
-
if (!existsSync(artifactsDir)) return existing;
|
|
12439
|
-
let entries;
|
|
12440
|
-
try {
|
|
12441
|
-
entries = readdirSync(artifactsDir, { encoding: "utf-8" });
|
|
12442
|
-
} catch {
|
|
12443
|
-
return existing;
|
|
12444
|
-
}
|
|
12445
|
-
const filePattern = /^([A-Za-z0-9]+-[A-Za-z0-9]+)-/;
|
|
12446
|
-
for (const entry of entries) {
|
|
12447
|
-
if (!entry.endsWith(".md")) continue;
|
|
12448
|
-
const m = filePattern.exec(entry);
|
|
12449
|
-
if (m !== null && m[1] !== void 0) existing.add(m[1]);
|
|
12450
|
-
}
|
|
12451
|
-
return existing;
|
|
12452
|
-
}
|
|
12453
|
-
/**
|
|
12454
|
-
* Parse story keys from sprint-status.yaml.
|
|
12455
|
-
* Reads the development_status map and extracts keys that match the
|
|
12456
|
-
* alphanumeric story key pattern (e.g., 1-1a, NEW-26, E5-accessibility).
|
|
12457
|
-
* Filters out epic status entries (epic-N) and retrospective entries.
|
|
12458
|
-
*/
|
|
12459
|
-
function parseStoryKeysFromSprintStatus(projectRoot) {
|
|
12460
|
-
const candidates = [join$1(projectRoot, "_bmad-output", "implementation-artifacts", "sprint-status.yaml"), join$1(projectRoot, "_bmad-output", "sprint-status.yaml")];
|
|
12461
|
-
const statusPath = candidates.find((p) => existsSync(p));
|
|
12462
|
-
if (!statusPath) return [];
|
|
12463
|
-
try {
|
|
12464
|
-
const content = readFileSync(statusPath, "utf-8");
|
|
12465
|
-
const keys = [];
|
|
12466
|
-
const linePattern = /^\s{2}([A-Za-z0-9]+-[A-Za-z0-9]+(?:-[A-Za-z0-9-]*)?)\s*:/gm;
|
|
12467
|
-
let match$1;
|
|
12468
|
-
while ((match$1 = linePattern.exec(content)) !== null) {
|
|
12469
|
-
const fullKey = match$1[1];
|
|
12470
|
-
if (/^epic-\d+$/.test(fullKey)) continue;
|
|
12471
|
-
if (fullKey.includes("retrospective")) continue;
|
|
12472
|
-
const segments = fullKey.split("-");
|
|
12473
|
-
if (segments.length >= 2) keys.push(`${segments[0]}-${segments[1]}`);
|
|
12474
|
-
}
|
|
12475
|
-
return [...new Set(keys)];
|
|
12476
|
-
} catch {
|
|
12477
|
-
return [];
|
|
12478
|
-
}
|
|
12479
|
-
}
|
|
12480
|
-
/**
|
|
12481
|
-
* Collect story keys already completed in previous pipeline runs.
|
|
12482
|
-
* Scans pipeline_runs with status='completed' and extracts story keys
|
|
12483
|
-
* with phase='COMPLETE' from their token_usage_json state.
|
|
12484
|
-
*/
|
|
12485
|
-
async function getCompletedStoryKeys(db) {
|
|
12486
|
-
const completed = new Set();
|
|
12487
|
-
try {
|
|
12488
|
-
const rows = await db.query(`SELECT token_usage_json FROM pipeline_runs WHERE status = 'completed' AND token_usage_json IS NOT NULL`);
|
|
12489
|
-
for (const row of rows) try {
|
|
12490
|
-
const state = JSON.parse(row.token_usage_json);
|
|
12491
|
-
if (state.stories !== void 0) {
|
|
12492
|
-
for (const [key, s$1] of Object.entries(state.stories)) if (s$1.phase === "COMPLETE") completed.add(key);
|
|
12493
|
-
}
|
|
12494
|
-
} catch {}
|
|
12495
|
-
} catch {}
|
|
12496
|
-
return completed;
|
|
12497
|
-
}
|
|
12498
|
-
/**
|
|
12499
|
-
* Sort story keys: numeric keys first (by epic then story number),
|
|
12500
|
-
* then alphabetic-prefix keys (NEW-*, E-*) sorted lexicographically.
|
|
12501
|
-
* E.g. ["10-1", "1-2a", "1-2", "NEW-26", "E5-acc"] → ["1-2", "1-2a", "10-1", "E5-acc", "NEW-26"]
|
|
12502
|
-
*/
|
|
12503
|
-
function sortStoryKeys(keys) {
|
|
12504
|
-
return keys.slice().sort((a, b) => {
|
|
12505
|
-
const aParts = a.split("-");
|
|
12506
|
-
const bParts = b.split("-");
|
|
12507
|
-
const aNum = Number(aParts[0]);
|
|
12508
|
-
const bNum = Number(bParts[0]);
|
|
12509
|
-
if (!isNaN(aNum) && !isNaN(bNum)) {
|
|
12510
|
-
if (aNum !== bNum) return aNum - bNum;
|
|
12511
|
-
const aStory = Number(aParts[1]);
|
|
12512
|
-
const bStory = Number(bParts[1]);
|
|
12513
|
-
if (!isNaN(aStory) && !isNaN(bStory) && aStory !== bStory) return aStory - bStory;
|
|
12514
|
-
return (aParts[1] ?? "").localeCompare(bParts[1] ?? "");
|
|
12515
|
-
}
|
|
12516
|
-
if (!isNaN(aNum)) return -1;
|
|
12517
|
-
if (!isNaN(bNum)) return 1;
|
|
12518
|
-
return a.localeCompare(b);
|
|
12519
|
-
});
|
|
12520
|
-
}
|
|
12521
|
-
/**
|
|
12522
|
-
* Parse inter-story dependencies from the consolidated epics document.
|
|
12523
|
-
*
|
|
12524
|
-
* Scans for patterns like:
|
|
12525
|
-
* ### Story 50-2: Title
|
|
12526
|
-
* **Dependencies:** 50-1
|
|
12527
|
-
*
|
|
12528
|
-
* Returns a Map where key=storyKey, value=Set of dependency keys.
|
|
12529
|
-
* Only returns dependencies that are within the provided storyKeys set
|
|
12530
|
-
* (external dependencies to other epics are ignored for ordering purposes).
|
|
12531
|
-
*/
|
|
12532
|
-
function parseEpicsDependencies(projectRoot, storyKeys) {
|
|
12533
|
-
const deps = new Map();
|
|
12534
|
-
const epicsPath = findEpicsFile(projectRoot);
|
|
12535
|
-
if (epicsPath === void 0) return deps;
|
|
12536
|
-
let content;
|
|
12537
|
-
try {
|
|
12538
|
-
content = readFileSync(epicsPath, "utf-8");
|
|
12539
|
-
} catch {
|
|
12540
|
-
return deps;
|
|
12541
|
-
}
|
|
12542
|
-
const storyPattern = /^###\s+Story\s+(\d+)-(\d+)[:\s]/gm;
|
|
12543
|
-
const depPattern = /^\*\*Dependencies:\*\*\s*(.+)$/gm;
|
|
12544
|
-
const storyPositions = [];
|
|
12545
|
-
let match$1;
|
|
12546
|
-
while ((match$1 = storyPattern.exec(content)) !== null) storyPositions.push({
|
|
12547
|
-
key: `${match$1[1]}-${match$1[2]}`,
|
|
12548
|
-
pos: match$1.index
|
|
12549
|
-
});
|
|
12550
|
-
for (let i = 0; i < storyPositions.length; i++) {
|
|
12551
|
-
const story = storyPositions[i];
|
|
12552
|
-
const nextStoryPos = i + 1 < storyPositions.length ? storyPositions[i + 1].pos : content.length;
|
|
12553
|
-
const section = content.slice(story.pos, nextStoryPos);
|
|
12554
|
-
depPattern.lastIndex = 0;
|
|
12555
|
-
const depMatch = depPattern.exec(section);
|
|
12556
|
-
if (depMatch === null || /^none$/i.test(depMatch[1].trim())) continue;
|
|
12557
|
-
const depText = depMatch[1];
|
|
12558
|
-
const storyDeps = new Set();
|
|
12559
|
-
const rangeMatch = /(\d+)-(\d+)\s+through\s+\1-(\d+)/i.exec(depText);
|
|
12560
|
-
if (rangeMatch !== null) {
|
|
12561
|
-
const epic = rangeMatch[1];
|
|
12562
|
-
const start = Number(rangeMatch[2]);
|
|
12563
|
-
const end = Number(rangeMatch[3]);
|
|
12564
|
-
for (let n$1 = start; n$1 <= end; n$1++) {
|
|
12565
|
-
const depKey = `${epic}-${n$1}`;
|
|
12566
|
-
if (storyKeys.has(depKey)) storyDeps.add(depKey);
|
|
12567
|
-
}
|
|
12568
|
-
} else {
|
|
12569
|
-
const keyPattern = /(\d+-\d+[a-z]?)/g;
|
|
12570
|
-
let km;
|
|
12571
|
-
while ((km = keyPattern.exec(depText)) !== null) {
|
|
12572
|
-
const depKey = km[1];
|
|
12573
|
-
if (storyKeys.has(depKey)) storyDeps.add(depKey);
|
|
12574
|
-
}
|
|
12575
|
-
}
|
|
12576
|
-
if (storyDeps.size > 0) deps.set(story.key, storyDeps);
|
|
12577
|
-
}
|
|
12578
|
-
return deps;
|
|
12579
|
-
}
|
|
12580
|
-
/**
|
|
12581
|
-
* Topologically sort explicit story keys by inter-story dependencies.
|
|
12582
|
-
*
|
|
12583
|
-
* Parses the consolidated epics document for dependency metadata, builds
|
|
12584
|
-
* a DAG, and returns keys in dependency-first order using Kahn's algorithm.
|
|
12585
|
-
* Stories with no dependencies come first; stories that depend on others
|
|
12586
|
-
* are placed after their prerequisites.
|
|
12587
|
-
*
|
|
12588
|
-
* Falls back to numeric sort if no epics document exists or no
|
|
12589
|
-
* dependencies are found among the provided keys.
|
|
12590
|
-
*/
|
|
12591
|
-
function topologicalSortByDependencies(keys, projectRoot) {
|
|
12592
|
-
if (keys.length <= 1) return keys;
|
|
12593
|
-
const keySet = new Set(keys);
|
|
12594
|
-
const deps = parseEpicsDependencies(projectRoot, keySet);
|
|
12595
|
-
if (deps.size === 0) return sortStoryKeys(keys);
|
|
12596
|
-
const inDegree = new Map();
|
|
12597
|
-
const successors = new Map();
|
|
12598
|
-
for (const key of keys) {
|
|
12599
|
-
inDegree.set(key, 0);
|
|
12600
|
-
successors.set(key, new Set());
|
|
12601
|
-
}
|
|
12602
|
-
for (const [dependent, depSet] of deps) {
|
|
12603
|
-
if (!keySet.has(dependent)) continue;
|
|
12604
|
-
for (const dep of depSet) {
|
|
12605
|
-
if (!keySet.has(dep)) continue;
|
|
12606
|
-
successors.get(dep).add(dependent);
|
|
12607
|
-
inDegree.set(dependent, (inDegree.get(dependent) ?? 0) + 1);
|
|
12608
|
-
}
|
|
12609
|
-
}
|
|
12610
|
-
const result = [];
|
|
12611
|
-
const processed = new Set();
|
|
12612
|
-
while (processed.size < keys.length) {
|
|
12613
|
-
const wave = [];
|
|
12614
|
-
for (const key of keys) if (!processed.has(key) && (inDegree.get(key) ?? 0) === 0) wave.push(key);
|
|
12615
|
-
if (wave.length === 0) {
|
|
12616
|
-
for (const key of sortStoryKeys(keys)) if (!processed.has(key)) result.push(key);
|
|
12617
|
-
break;
|
|
12618
|
-
}
|
|
12619
|
-
for (const key of sortStoryKeys(wave)) {
|
|
12620
|
-
result.push(key);
|
|
12621
|
-
processed.add(key);
|
|
12622
|
-
for (const succ of successors.get(key) ?? []) inDegree.set(succ, (inDegree.get(succ) ?? 0) - 1);
|
|
12623
|
-
}
|
|
12624
|
-
}
|
|
12625
|
-
return result;
|
|
12626
|
-
}
|
|
12627
|
-
|
|
12628
12909
|
//#endregion
|
|
12629
12910
|
//#region src/modules/phase-orchestrator/phase-detection.ts
|
|
12630
12911
|
const PHASE_ARTIFACTS = [
|
|
@@ -39338,6 +39619,7 @@ async function runRunAction(options) {
|
|
|
39338
39619
|
});
|
|
39339
39620
|
} catch {}
|
|
39340
39621
|
let tokenCeilings;
|
|
39622
|
+
let dispatchTimeouts;
|
|
39341
39623
|
let telemetryEnabled = false;
|
|
39342
39624
|
let telemetryPort = 4318;
|
|
39343
39625
|
try {
|
|
@@ -39345,6 +39627,10 @@ async function runRunAction(options) {
|
|
|
39345
39627
|
await configSystem.load();
|
|
39346
39628
|
const cfg = configSystem.getConfig();
|
|
39347
39629
|
tokenCeilings = cfg.token_ceilings;
|
|
39630
|
+
if (cfg.dispatch_timeouts) {
|
|
39631
|
+
dispatchTimeouts = Object.fromEntries(Object.entries(cfg.dispatch_timeouts).filter(([, v]) => v !== void 0));
|
|
39632
|
+
logger.info({ dispatchTimeouts }, "Loaded dispatch timeout overrides from config");
|
|
39633
|
+
}
|
|
39348
39634
|
if (cfg.telemetry?.enabled === true) {
|
|
39349
39635
|
telemetryEnabled = true;
|
|
39350
39636
|
telemetryPort = cfg.telemetry.port ?? 4318;
|
|
@@ -39631,7 +39917,10 @@ async function runRunAction(options) {
|
|
|
39631
39917
|
const dispatcher = createDispatcher({
|
|
39632
39918
|
eventBus,
|
|
39633
39919
|
adapterRegistry: injectedRegistry,
|
|
39634
|
-
config: {
|
|
39920
|
+
config: {
|
|
39921
|
+
routingResolver,
|
|
39922
|
+
...dispatchTimeouts ? { defaultTimeouts: dispatchTimeouts } : {}
|
|
39923
|
+
}
|
|
39635
39924
|
});
|
|
39636
39925
|
eventBus.on("orchestrator:story-phase-complete", (payload) => {
|
|
39637
39926
|
try {
|
|
@@ -40497,5 +40786,5 @@ function registerRunCommand(program, _version = "0.0.0", projectRoot = process.c
|
|
|
40497
40786
|
}
|
|
40498
40787
|
|
|
40499
40788
|
//#endregion
|
|
40500
|
-
export { AdapterTelemetryPersistence, AppError, DoltRepoMapMetaRepository, DoltSymbolRepository, ERR_REPO_MAP_STORAGE_WRITE, GitClient, GrammarLoader, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SymbolParser, createContextCompiler, createDispatcher, createEventEmitter, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStopAfterGate, createTelemetryAdvisor, formatPhaseCompletionSummary, getFactoryRunSummaries, getScenarioResultsForRun, getTwinRunsForRun, listGraphRuns, normalizeGraphSummaryToStatus, registerFactoryCommand, registerRunCommand, registerScenariosCommand, resolveStoryKeys, runAnalysisPhase, runPlanningPhase, runRunAction, runSolutioningPhase, validateStopAfterFromConflict };
|
|
40501
|
-
//# sourceMappingURL=run-
|
|
40789
|
+
export { AdapterTelemetryPersistence, AppError, DoltRepoMapMetaRepository, DoltSymbolRepository, ERR_REPO_MAP_STORAGE_WRITE, EpicIngester, GitClient, GrammarLoader, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SymbolParser, createContextCompiler, createDispatcher, createEventEmitter, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStopAfterGate, createTelemetryAdvisor, formatPhaseCompletionSummary, getFactoryRunSummaries, getScenarioResultsForRun, getTwinRunsForRun, listGraphRuns, normalizeGraphSummaryToStatus, registerFactoryCommand, registerRunCommand, registerScenariosCommand, resolveStoryKeys, runAnalysisPhase, runPlanningPhase, runRunAction, runSolutioningPhase, validateStopAfterFromConflict };
|
|
40790
|
+
//# sourceMappingURL=run-BOhSIujp.js.map
|
|
@@ -2,7 +2,7 @@ import "./health-Cx2ZhRNT.js";
|
|
|
2
2
|
import "./logger-KeHncl-f.js";
|
|
3
3
|
import "./helpers-CElYrONe.js";
|
|
4
4
|
import "./dist-Bm0qSZer.js";
|
|
5
|
-
import { normalizeGraphSummaryToStatus, registerRunCommand, runRunAction } from "./run-
|
|
5
|
+
import { normalizeGraphSummaryToStatus, registerRunCommand, runRunAction } from "./run-BOhSIujp.js";
|
|
6
6
|
import "./routing-CcBOCuC9.js";
|
|
7
7
|
import "./decisions-C0pz9Clx.js";
|
|
8
8
|
|