@hyperdrive.bot/bmad-workflow 1.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +1017 -0
- package/bin/dev +5 -0
- package/bin/dev.cmd +3 -0
- package/bin/dev.js +5 -0
- package/bin/run +5 -0
- package/bin/run.cmd +3 -0
- package/bin/run.js +5 -0
- package/dist/commands/config/show.d.ts +34 -0
- package/dist/commands/config/show.js +108 -0
- package/dist/commands/config/validate.d.ts +29 -0
- package/dist/commands/config/validate.js +131 -0
- package/dist/commands/decompose.d.ts +79 -0
- package/dist/commands/decompose.js +327 -0
- package/dist/commands/demo.d.ts +18 -0
- package/dist/commands/demo.js +107 -0
- package/dist/commands/epics/create.d.ts +123 -0
- package/dist/commands/epics/create.js +459 -0
- package/dist/commands/epics/list.d.ts +120 -0
- package/dist/commands/epics/list.js +280 -0
- package/dist/commands/hello/index.d.ts +12 -0
- package/dist/commands/hello/index.js +34 -0
- package/dist/commands/hello/world.d.ts +8 -0
- package/dist/commands/hello/world.js +24 -0
- package/dist/commands/prd/fix.d.ts +39 -0
- package/dist/commands/prd/fix.js +140 -0
- package/dist/commands/prd/validate.d.ts +112 -0
- package/dist/commands/prd/validate.js +302 -0
- package/dist/commands/stories/create.d.ts +95 -0
- package/dist/commands/stories/create.js +431 -0
- package/dist/commands/stories/develop.d.ts +91 -0
- package/dist/commands/stories/develop.js +460 -0
- package/dist/commands/stories/list.d.ts +84 -0
- package/dist/commands/stories/list.js +291 -0
- package/dist/commands/stories/move.d.ts +66 -0
- package/dist/commands/stories/move.js +273 -0
- package/dist/commands/stories/qa.d.ts +99 -0
- package/dist/commands/stories/qa.js +530 -0
- package/dist/commands/workflow.d.ts +97 -0
- package/dist/commands/workflow.js +390 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.js +1 -0
- package/dist/models/agent-options.d.ts +50 -0
- package/dist/models/agent-options.js +1 -0
- package/dist/models/agent-result.d.ts +29 -0
- package/dist/models/agent-result.js +1 -0
- package/dist/models/index.d.ts +10 -0
- package/dist/models/index.js +10 -0
- package/dist/models/phase-result.d.ts +65 -0
- package/dist/models/phase-result.js +7 -0
- package/dist/models/provider.d.ts +28 -0
- package/dist/models/provider.js +18 -0
- package/dist/models/story.d.ts +154 -0
- package/dist/models/story.js +18 -0
- package/dist/models/workflow-config.d.ts +148 -0
- package/dist/models/workflow-config.js +1 -0
- package/dist/models/workflow-result.d.ts +164 -0
- package/dist/models/workflow-result.js +7 -0
- package/dist/services/agents/agent-runner-factory.d.ts +31 -0
- package/dist/services/agents/agent-runner-factory.js +44 -0
- package/dist/services/agents/agent-runner.d.ts +46 -0
- package/dist/services/agents/agent-runner.js +29 -0
- package/dist/services/agents/claude-agent-runner.d.ts +81 -0
- package/dist/services/agents/claude-agent-runner.js +332 -0
- package/dist/services/agents/gemini-agent-runner.d.ts +82 -0
- package/dist/services/agents/gemini-agent-runner.js +350 -0
- package/dist/services/agents/index.d.ts +7 -0
- package/dist/services/agents/index.js +7 -0
- package/dist/services/file-system/file-manager.d.ts +110 -0
- package/dist/services/file-system/file-manager.js +223 -0
- package/dist/services/file-system/glob-matcher.d.ts +75 -0
- package/dist/services/file-system/glob-matcher.js +126 -0
- package/dist/services/file-system/path-resolver.d.ts +183 -0
- package/dist/services/file-system/path-resolver.js +400 -0
- package/dist/services/logging/workflow-logger.d.ts +232 -0
- package/dist/services/logging/workflow-logger.js +552 -0
- package/dist/services/orchestration/batch-processor.d.ts +113 -0
- package/dist/services/orchestration/batch-processor.js +187 -0
- package/dist/services/orchestration/dependency-graph-executor.d.ts +60 -0
- package/dist/services/orchestration/dependency-graph-executor.js +447 -0
- package/dist/services/orchestration/index.d.ts +10 -0
- package/dist/services/orchestration/index.js +8 -0
- package/dist/services/orchestration/input-detector.d.ts +125 -0
- package/dist/services/orchestration/input-detector.js +381 -0
- package/dist/services/orchestration/story-queue.d.ts +94 -0
- package/dist/services/orchestration/story-queue.js +170 -0
- package/dist/services/orchestration/story-type-detector.d.ts +80 -0
- package/dist/services/orchestration/story-type-detector.js +258 -0
- package/dist/services/orchestration/task-decomposition-service.d.ts +67 -0
- package/dist/services/orchestration/task-decomposition-service.js +607 -0
- package/dist/services/orchestration/workflow-orchestrator.d.ts +659 -0
- package/dist/services/orchestration/workflow-orchestrator.js +2201 -0
- package/dist/services/parsers/epic-parser.d.ts +117 -0
- package/dist/services/parsers/epic-parser.js +264 -0
- package/dist/services/parsers/prd-fixer.d.ts +86 -0
- package/dist/services/parsers/prd-fixer.js +194 -0
- package/dist/services/parsers/prd-parser.d.ts +123 -0
- package/dist/services/parsers/prd-parser.js +286 -0
- package/dist/services/parsers/standalone-story-parser.d.ts +114 -0
- package/dist/services/parsers/standalone-story-parser.js +255 -0
- package/dist/services/parsers/story-parser-factory.d.ts +81 -0
- package/dist/services/parsers/story-parser-factory.js +108 -0
- package/dist/services/parsers/story-parser.d.ts +122 -0
- package/dist/services/parsers/story-parser.js +262 -0
- package/dist/services/scaffolding/decompose-session-scaffolder.d.ts +74 -0
- package/dist/services/scaffolding/decompose-session-scaffolder.js +315 -0
- package/dist/services/scaffolding/file-scaffolder.d.ts +94 -0
- package/dist/services/scaffolding/file-scaffolder.js +314 -0
- package/dist/services/validation/config-validator.d.ts +88 -0
- package/dist/services/validation/config-validator.js +167 -0
- package/dist/types/task-graph.d.ts +142 -0
- package/dist/types/task-graph.js +5 -0
- package/dist/utils/colors.d.ts +49 -0
- package/dist/utils/colors.js +50 -0
- package/dist/utils/error-formatter.d.ts +64 -0
- package/dist/utils/error-formatter.js +279 -0
- package/dist/utils/errors.d.ts +170 -0
- package/dist/utils/errors.js +233 -0
- package/dist/utils/formatters.d.ts +84 -0
- package/dist/utils/formatters.js +162 -0
- package/dist/utils/logger.d.ts +63 -0
- package/dist/utils/logger.js +78 -0
- package/dist/utils/progress.d.ts +104 -0
- package/dist/utils/progress.js +161 -0
- package/dist/utils/retry.d.ts +114 -0
- package/dist/utils/retry.js +160 -0
- package/dist/utils/shared-flags.d.ts +28 -0
- package/dist/utils/shared-flags.js +43 -0
- package/package.json +119 -0
|
@@ -0,0 +1,2201 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* WorkflowOrchestrator Service
|
|
3
|
+
*
|
|
4
|
+
* Coordinates the complete PRD → Epic → Story → Dev workflow with automatic
|
|
5
|
+
* input detection, conditional phase execution, and comprehensive result tracking.
|
|
6
|
+
*
|
|
7
|
+
* Responsibilities:
|
|
8
|
+
* - Detect input type (PRD, epic, or story pattern)
|
|
9
|
+
* - Execute phases conditionally based on skip flags and input type
|
|
10
|
+
* - Coordinate between PrdParser, EpicParser, and ClaudeAgentRunner
|
|
11
|
+
* - Track phase results with success/failure counts and durations
|
|
12
|
+
* - Handle cross-phase errors gracefully (fail early if critical phase fails)
|
|
13
|
+
* - Aggregate results across all phases into WorkflowResult
|
|
14
|
+
*
|
|
15
|
+
* @example
|
|
16
|
+
* ```typescript
|
|
17
|
+
* const logger = createLogger({ namespace: 'orchestrator' })
|
|
18
|
+
* const orchestrator = new WorkflowOrchestrator({
|
|
19
|
+
* inputDetector,
|
|
20
|
+
* prdParser,
|
|
21
|
+
* epicParser,
|
|
22
|
+
* agentRunner: claudeRunner,
|
|
23
|
+
* batchProcessor,
|
|
24
|
+
* fileManager,
|
|
25
|
+
* pathResolver,
|
|
26
|
+
* storyTypeDetector,
|
|
27
|
+
* logger,
|
|
28
|
+
* })
|
|
29
|
+
*
|
|
30
|
+
* const result = await orchestrator.execute({
|
|
31
|
+
* input: 'docs/PRD-feature.md',
|
|
32
|
+
* prdInterval: 60,
|
|
33
|
+
* epicInterval: 60,
|
|
34
|
+
* storyInterval: 60,
|
|
35
|
+
* parallel: 3,
|
|
36
|
+
* prefix: 'PROJ-123',
|
|
37
|
+
* references: [],
|
|
38
|
+
* skipEpics: false,
|
|
39
|
+
* skipStories: false,
|
|
40
|
+
* skipDev: false,
|
|
41
|
+
* dryRun: false,
|
|
42
|
+
* verbose: false
|
|
43
|
+
* })
|
|
44
|
+
* ```
|
|
45
|
+
*/
|
|
46
|
+
import { isEpicStory } from '../../models/story.js';
|
|
47
|
+
import { ParserError, ValidationError } from '../../utils/errors.js';
|
|
48
|
+
import { PrdFixer } from '../parsers/prd-fixer.js';
|
|
49
|
+
import { FileScaffolder } from '../scaffolding/file-scaffolder.js';
|
|
50
|
+
import { BatchProcessor } from './batch-processor.js';
|
|
51
|
+
import { StoryQueue } from './story-queue.js';
|
|
52
|
+
/**
|
|
53
|
+
* WorkflowOrchestrator service for coordinating multi-phase workflows
|
|
54
|
+
*
|
|
55
|
+
* Orchestrates the complete workflow from PRD to development, executing
|
|
56
|
+
* phases conditionally based on input type and skip flags. Tracks results
|
|
57
|
+
* and handles cross-phase errors gracefully.
|
|
58
|
+
*/
|
|
59
|
+
export class WorkflowOrchestrator {
|
|
60
|
+
agentRunner;
|
|
61
|
+
batchProcessor;
|
|
62
|
+
epicParser;
|
|
63
|
+
fileManager;
|
|
64
|
+
fileScaffolder;
|
|
65
|
+
inputDetector;
|
|
66
|
+
logger;
|
|
67
|
+
pathResolver;
|
|
68
|
+
prdFixer;
|
|
69
|
+
prdParser;
|
|
70
|
+
storyTypeDetector;
|
|
71
|
+
workflowLogger;
|
|
72
|
+
/**
|
|
73
|
+
* Create a new WorkflowOrchestrator instance
|
|
74
|
+
*
|
|
75
|
+
* @param config - Configuration object containing all service dependencies
|
|
76
|
+
*/
|
|
77
|
+
constructor(config) {
|
|
78
|
+
this.inputDetector = config.inputDetector;
|
|
79
|
+
this.prdParser = config.prdParser;
|
|
80
|
+
this.epicParser = config.epicParser;
|
|
81
|
+
this.agentRunner = config.agentRunner;
|
|
82
|
+
this.batchProcessor = config.batchProcessor;
|
|
83
|
+
this.fileManager = config.fileManager;
|
|
84
|
+
this.pathResolver = config.pathResolver;
|
|
85
|
+
this.storyTypeDetector = config.storyTypeDetector;
|
|
86
|
+
this.logger = config.logger;
|
|
87
|
+
this.workflowLogger = config.workflowLogger;
|
|
88
|
+
this.fileScaffolder = new FileScaffolder(config.logger);
|
|
89
|
+
this.prdFixer = new PrdFixer(config.agentRunner, config.fileManager, config.logger);
|
|
90
|
+
this.logger.debug('WorkflowOrchestrator initialized');
|
|
91
|
+
}
|
|
92
|
+
/**
|
|
93
|
+
* Execute the complete workflow
|
|
94
|
+
*
|
|
95
|
+
* Detects input type, executes phases conditionally based on skip flags
|
|
96
|
+
* and input type, tracks results, and handles cross-phase errors.
|
|
97
|
+
*
|
|
98
|
+
* @param config - Workflow configuration
|
|
99
|
+
* @returns WorkflowResult with all phase results and aggregate metrics
|
|
100
|
+
* @throws {ValidationError} If configuration is invalid
|
|
101
|
+
*/
|
|
102
|
+
async execute(config) {
|
|
103
|
+
const startTime = Date.now();
|
|
104
|
+
this.logWorkflowStart(config);
|
|
105
|
+
this.validateConfig(config);
|
|
106
|
+
const detection = await this.detectInput(config.input);
|
|
107
|
+
const phaseFlags = this.determinePhaseExecution(detection, config);
|
|
108
|
+
const epicPhase = await this.executeEpicPhaseIfNeeded(config, detection, phaseFlags.shouldExecuteEpicPhase);
|
|
109
|
+
// Early return if epic phase failed completely
|
|
110
|
+
if (this.shouldAbortAfterEpicFailure(epicPhase)) {
|
|
111
|
+
return this.buildFailureResult(startTime, epicPhase);
|
|
112
|
+
}
|
|
113
|
+
const { devPhase, storyPhase } = await this.executeStoryAndDevPhases(config, detection, epicPhase, phaseFlags, startTime);
|
|
114
|
+
const qaPhase = await this.executeQaPhaseIfNeeded(config, devPhase, phaseFlags.shouldExecuteQaPhase);
|
|
115
|
+
return this.buildSuccessResult(startTime, epicPhase, storyPhase, devPhase, qaPhase);
|
|
116
|
+
}
|
|
117
|
+
/**
|
|
118
|
+
* Build prompt for epic creation
|
|
119
|
+
*
|
|
120
|
+
* @param epic - Epic to create
|
|
121
|
+
* @param options - Prompt configuration options
|
|
122
|
+
* @returns Agent prompt
|
|
123
|
+
* @private
|
|
124
|
+
*/
|
|
125
|
+
buildEpicPrompt(epic, options) {
|
|
126
|
+
const { cwd, outputPath, prdPath, references } = options;
|
|
127
|
+
const referencesText = references.length > 0 ? `\nReferences: ${references.join(', ')}` : '';
|
|
128
|
+
const cwdText = cwd ? `\n\nWorking directory: ${cwd}` : '';
|
|
129
|
+
return `@.bmad-core/agents/sm.md${cwdText}
|
|
130
|
+
|
|
131
|
+
Create epic '${epic.number}: ${epic.title}' for PRD '${prdPath}'.${referencesText}.
|
|
132
|
+
|
|
133
|
+
IMPORTANT: The file at '${outputPath}' has been pre-scaffolded with structure and metadata.
|
|
134
|
+
- DO NOT modify the Epic Header section (Epic ID, Status: Draft, Created date are already set)
|
|
135
|
+
- DO NOT change the document structure or section headers
|
|
136
|
+
- ONLY populate the empty content sections marked with [AI Agent will populate]
|
|
137
|
+
- Follow the template structure at @.bmad-core/templates/epic-tmpl.yaml for content guidance
|
|
138
|
+
|
|
139
|
+
Write output to: '${outputPath}'`;
|
|
140
|
+
}
|
|
141
|
+
/**
|
|
142
|
+
* Build failure result when epic phase fails completely
|
|
143
|
+
*
|
|
144
|
+
* @param startTime - Workflow start time
|
|
145
|
+
* @param epicPhase - Failed epic phase result
|
|
146
|
+
* @returns WorkflowResult
|
|
147
|
+
* @private
|
|
148
|
+
*/
|
|
149
|
+
buildFailureResult(startTime, epicPhase) {
|
|
150
|
+
this.logger.error('Epic phase failed completely, skipping downstream phases');
|
|
151
|
+
const totalDuration = Date.now() - startTime;
|
|
152
|
+
return {
|
|
153
|
+
devPhase: this.createSkippedPhaseResult('dev'),
|
|
154
|
+
epicPhase,
|
|
155
|
+
overallSuccess: false,
|
|
156
|
+
qaPhase: this.createSkippedPhaseResult('qa'),
|
|
157
|
+
storyPhase: this.createSkippedPhaseResult('story'),
|
|
158
|
+
totalDuration,
|
|
159
|
+
totalFailures: epicPhase.failures.length,
|
|
160
|
+
totalFilesProcessed: 0,
|
|
161
|
+
};
|
|
162
|
+
}
|
|
163
|
+
/**
|
|
164
|
+
* Build prompt for story creation
|
|
165
|
+
*
|
|
166
|
+
* @param story - Story to create
|
|
167
|
+
* @param options - Prompt configuration options
|
|
168
|
+
* @returns Agent prompt
|
|
169
|
+
* @private
|
|
170
|
+
*/
|
|
171
|
+
buildStoryPrompt(story, options) {
|
|
172
|
+
const { cwd, epicPath, outputPath, references } = options;
|
|
173
|
+
const referencesText = references.length > 0 ? `\nReferences: ${references.join(', ')}` : '';
|
|
174
|
+
const cwdText = cwd ? `\n\nWorking directory: ${cwd}` : '';
|
|
175
|
+
return `@.bmad-core/agents/sm.md${cwdText}
|
|
176
|
+
|
|
177
|
+
Create story '${story.fullNumber}: ${story.title}' for epic '${epicPath}'. ${referencesText}
|
|
178
|
+
|
|
179
|
+
IMPORTANT: The file at '${outputPath}' has been pre-scaffolded with structure and metadata.
|
|
180
|
+
- DO NOT modify the Status section (already set to Draft)
|
|
181
|
+
- DO NOT modify the Created date in Change Log
|
|
182
|
+
- DO NOT change the document structure or section headers
|
|
183
|
+
- ONLY populate the empty content sections marked with [AI Agent will populate]
|
|
184
|
+
- Follow the template structure at @.bmad-core/templates/story-tmpl.yaml for content guidance
|
|
185
|
+
|
|
186
|
+
Write output to: ${outputPath}`;
|
|
187
|
+
}
|
|
188
|
+
/**
|
|
189
|
+
* Build success result with all phase results
|
|
190
|
+
*
|
|
191
|
+
* @param startTime - Workflow start time
|
|
192
|
+
* @param epicPhase - Epic phase result
|
|
193
|
+
* @param storyPhase - Story phase result
|
|
194
|
+
* @param devPhase - Dev phase result
|
|
195
|
+
* @param qaPhase - QA phase result
|
|
196
|
+
* @returns WorkflowResult
|
|
197
|
+
* @private
|
|
198
|
+
*/
|
|
199
|
+
buildSuccessResult(startTime, epicPhase, storyPhase, devPhase, qaPhase) {
|
|
200
|
+
const totalDuration = Date.now() - startTime;
|
|
201
|
+
const totalFilesProcessed = (epicPhase?.success ?? 0) + (storyPhase?.success ?? 0) + (devPhase?.success ?? 0) + (qaPhase?.success ?? 0);
|
|
202
|
+
const totalFailures = (epicPhase?.failures.length ?? 0) +
|
|
203
|
+
(storyPhase?.failures.length ?? 0) +
|
|
204
|
+
(devPhase?.failures.length ?? 0) +
|
|
205
|
+
(qaPhase?.failures.length ?? 0);
|
|
206
|
+
const overallSuccess = totalFailures === 0;
|
|
207
|
+
this.logger.info({
|
|
208
|
+
overallSuccess,
|
|
209
|
+
totalDuration,
|
|
210
|
+
totalFailures,
|
|
211
|
+
totalFilesProcessed,
|
|
212
|
+
}, 'Workflow execution completed');
|
|
213
|
+
return {
|
|
214
|
+
devPhase,
|
|
215
|
+
epicPhase,
|
|
216
|
+
overallSuccess,
|
|
217
|
+
qaPhase,
|
|
218
|
+
storyPhase,
|
|
219
|
+
totalDuration,
|
|
220
|
+
totalFailures,
|
|
221
|
+
totalFilesProcessed,
|
|
222
|
+
};
|
|
223
|
+
}
|
|
224
|
+
/**
|
|
225
|
+
* Check which files already exist in a directory
|
|
226
|
+
*
|
|
227
|
+
* @param directory - Directory to check
|
|
228
|
+
* @param fileNames - List of file names to check
|
|
229
|
+
* @returns List of existing file names
|
|
230
|
+
* @private
|
|
231
|
+
*/
|
|
232
|
+
async checkExistingFiles(directory, fileNames) {
|
|
233
|
+
// Parallelize file existence checks for better performance
|
|
234
|
+
const existenceChecks = await Promise.all(fileNames.map(async (fileName) => {
|
|
235
|
+
const filePath = `${directory}/${fileName}`;
|
|
236
|
+
const exists = await this.fileManager.fileExists(filePath);
|
|
237
|
+
return { exists, fileName };
|
|
238
|
+
}));
|
|
239
|
+
// Filter to only existing files
|
|
240
|
+
return existenceChecks.filter((result) => result.exists).map((result) => result.fileName);
|
|
241
|
+
}
|
|
242
|
+
/**
|
|
243
|
+
* Check which story files already exist across all story directories
|
|
244
|
+
*
|
|
245
|
+
* Checks docs/stories, docs/qa/stories, and docs/done/stories to prevent
|
|
246
|
+
* recreating stories that have been moved for QA or marked as done.
|
|
247
|
+
*
|
|
248
|
+
* Stories found in docs/qa/stories or docs/done/stories are considered completed
|
|
249
|
+
* and are skipped, allowing the workflow to continue with the next story number
|
|
250
|
+
* in docs/stories.
|
|
251
|
+
*
|
|
252
|
+
* @param fileNames - List of story file names to check
|
|
253
|
+
* @returns List of existing file names that should be skipped
|
|
254
|
+
* @private
|
|
255
|
+
*/
|
|
256
|
+
async checkExistingStoryFiles(fileNames) {
|
|
257
|
+
const allStoryDirs = this.pathResolver.getAllStoryDirs();
|
|
258
|
+
const storyDir = this.pathResolver.getStoryDir();
|
|
259
|
+
// Parallelize story file existence checks across all directories
|
|
260
|
+
const existenceChecks = await Promise.all(fileNames.map(async (fileName) => {
|
|
261
|
+
// Check all directories in parallel for this file
|
|
262
|
+
const dirChecks = await Promise.all(allStoryDirs.map(async (dir) => {
|
|
263
|
+
const filePath = `${dir}/${fileName}`;
|
|
264
|
+
const exists = await this.fileManager.fileExists(filePath);
|
|
265
|
+
return { dir, exists, filePath };
|
|
266
|
+
}));
|
|
267
|
+
// Find first directory where file exists
|
|
268
|
+
const foundIn = dirChecks.find((check) => check.exists);
|
|
269
|
+
if (foundIn) {
|
|
270
|
+
// Determine if story is in active development or completed
|
|
271
|
+
const isInActiveDevelopment = foundIn.dir === storyDir;
|
|
272
|
+
const status = isInActiveDevelopment ? 'in active development' : 'completed (in QA/done)';
|
|
273
|
+
this.logger.info({ directory: foundIn.dir, fileName, filePath: foundIn.filePath, status }, `Story file already exists - ${status}`);
|
|
274
|
+
return { exists: true, fileName };
|
|
275
|
+
}
|
|
276
|
+
return { exists: false, fileName };
|
|
277
|
+
}));
|
|
278
|
+
// Filter to only existing files
|
|
279
|
+
return existenceChecks.filter((result) => result.exists).map((result) => result.fileName);
|
|
280
|
+
}
|
|
281
|
+
/**
|
|
282
|
+
* Check if epic files are properly populated (not just scaffolded)
|
|
283
|
+
*
|
|
284
|
+
* @param directory - Directory containing epic files
|
|
285
|
+
* @param fileNames - List of file names to check
|
|
286
|
+
* @returns List of properly populated file names
|
|
287
|
+
* @private
|
|
288
|
+
*/
|
|
289
|
+
async checkProperlyPopulatedEpics(directory, fileNames) {
|
|
290
|
+
// Parallelize epic file checks for better performance
|
|
291
|
+
const populationChecks = await Promise.all(fileNames.map(async (fileName) => {
|
|
292
|
+
const filePath = `${directory}/${fileName}`;
|
|
293
|
+
const exists = await this.fileManager.fileExists(filePath);
|
|
294
|
+
if (exists) {
|
|
295
|
+
// Check if file is properly populated (doesn't contain placeholder text)
|
|
296
|
+
const content = await this.fileManager.readFile(filePath);
|
|
297
|
+
const isScaffolded = content.includes('_[AI Agent will populate');
|
|
298
|
+
if (isScaffolded) {
|
|
299
|
+
this.logger.warn({
|
|
300
|
+
fileName,
|
|
301
|
+
filePath,
|
|
302
|
+
}, 'Epic file exists but is only scaffolded (will be re-populated)');
|
|
303
|
+
return { fileName, properlyPopulated: false };
|
|
304
|
+
}
|
|
305
|
+
return { fileName, properlyPopulated: true };
|
|
306
|
+
}
|
|
307
|
+
return { fileName, properlyPopulated: false };
|
|
308
|
+
}));
|
|
309
|
+
// Filter to only properly populated files
|
|
310
|
+
return populationChecks.filter((result) => result.properlyPopulated).map((result) => result.fileName);
|
|
311
|
+
}
|
|
312
|
+
/**
|
|
313
|
+
* Create a skipped phase result
|
|
314
|
+
*
|
|
315
|
+
* @param phaseName - Name of the phase
|
|
316
|
+
* @returns PhaseResult marked as skipped
|
|
317
|
+
* @private
|
|
318
|
+
*/
|
|
319
|
+
createSkippedPhaseResult(phaseName) {
|
|
320
|
+
return {
|
|
321
|
+
duration: 0,
|
|
322
|
+
failures: [],
|
|
323
|
+
phaseName,
|
|
324
|
+
skipped: true,
|
|
325
|
+
success: 0,
|
|
326
|
+
};
|
|
327
|
+
}
|
|
328
|
+
/**
|
|
329
|
+
* Detect input type using InputDetector
|
|
330
|
+
*
|
|
331
|
+
* @param input - Input file path or pattern
|
|
332
|
+
* @returns Input detection result
|
|
333
|
+
* @throws {ValidationError} If input is invalid or cannot be detected
|
|
334
|
+
* @private
|
|
335
|
+
*/
|
|
336
|
+
async detectInput(input) {
|
|
337
|
+
this.logger.info({ input }, 'Detecting input type');
|
|
338
|
+
try {
|
|
339
|
+
const result = await this.inputDetector.detect(input);
|
|
340
|
+
this.logger.info({
|
|
341
|
+
filePath: result.filePath,
|
|
342
|
+
inputType: result.type,
|
|
343
|
+
metadata: result.metadata,
|
|
344
|
+
}, 'Input detected successfully');
|
|
345
|
+
return result;
|
|
346
|
+
}
|
|
347
|
+
catch (error) {
|
|
348
|
+
this.logger.error({ error: error.message, input }, 'Failed to detect input type');
|
|
349
|
+
throw new ValidationError(`Failed to detect input type: ${error.message}`, {
|
|
350
|
+
input,
|
|
351
|
+
originalError: error.message,
|
|
352
|
+
});
|
|
353
|
+
}
|
|
354
|
+
}
|
|
355
|
+
/**
|
|
356
|
+
* Determine which phases should execute based on input type and skip flags
|
|
357
|
+
*
|
|
358
|
+
* @param detection - Input detection result
|
|
359
|
+
* @param config - Workflow configuration
|
|
360
|
+
* @returns Object with boolean flags for each phase
|
|
361
|
+
* @private
|
|
362
|
+
*/
|
|
363
|
+
determinePhaseExecution(detection, config) {
|
|
364
|
+
const shouldExecuteEpicPhase = detection.type === 'prd' && !config.skipEpics;
|
|
365
|
+
const shouldExecuteStoryPhase = !config.skipStories && detection.type !== 'story-pattern';
|
|
366
|
+
const shouldExecuteDevPhase = !config.skipDev;
|
|
367
|
+
const shouldExecuteQaPhase = config.qa === true && shouldExecuteDevPhase;
|
|
368
|
+
return {
|
|
369
|
+
shouldExecuteDevPhase,
|
|
370
|
+
shouldExecuteEpicPhase,
|
|
371
|
+
shouldExecuteQaPhase,
|
|
372
|
+
shouldExecuteStoryPhase,
|
|
373
|
+
};
|
|
374
|
+
}
|
|
375
|
+
/**
|
|
376
|
+
* Individual worker function for processing stories from the queue
|
|
377
|
+
*
|
|
378
|
+
* Continuously dequeues stories and processes them until the queue is closed and empty.
|
|
379
|
+
* Handles errors gracefully without crashing the worker pool.
|
|
380
|
+
*
|
|
381
|
+
* @param workerId - Unique identifier for this worker (for logging)
|
|
382
|
+
* @param queue - StoryQueue to dequeue stories from
|
|
383
|
+
* @param config - Workflow configuration
|
|
384
|
+
* @returns Worker result with success count and failures
|
|
385
|
+
* @private
|
|
386
|
+
*/
|
|
387
|
+
/**
|
|
388
|
+
* Dev worker that consumes stories from the queue and processes them concurrently.
|
|
389
|
+
*
|
|
390
|
+
* Worker loop algorithm:
|
|
391
|
+
* 1. Dequeue next story (blocks/waits if queue empty)
|
|
392
|
+
* 2. If null returned, queue is closed and empty → terminate worker
|
|
393
|
+
* 3. For each story:
|
|
394
|
+
* - Check if already in QA folder (skip if developed)
|
|
395
|
+
* - Update story status to InProgress
|
|
396
|
+
* - Execute dev agent with context
|
|
397
|
+
* - On success: Update status to Done, move to QA
|
|
398
|
+
* - On failure: Log error, continue to next story
|
|
399
|
+
* - Respect story interval delay
|
|
400
|
+
* 4. Return aggregate success count and failures
|
|
401
|
+
*
|
|
402
|
+
* @param workerId - Worker identifier for logging and tracking (0-based index)
|
|
403
|
+
* @param queue - StoryQueue to dequeue stories from
|
|
404
|
+
* @param config - Workflow configuration with intervals and references
|
|
405
|
+
* @returns Promise resolving to object with success count and failure array
|
|
406
|
+
* @private
|
|
407
|
+
*/
|
|
408
|
+
async devWorker(workerId, queue, config) {
|
|
409
|
+
const workerLogger = this.logger.child({ workerId });
|
|
410
|
+
let successCount = 0;
|
|
411
|
+
const failures = [];
|
|
412
|
+
workerLogger.info('Worker started');
|
|
413
|
+
try {
|
|
414
|
+
const storyDir = await this.pathResolver.getStoryDir();
|
|
415
|
+
const qaStoryDir = await this.pathResolver.getQaStoryDir();
|
|
416
|
+
while (true) {
|
|
417
|
+
// Dequeue next story (waits if queue is empty)
|
|
418
|
+
const story = await queue.dequeue();
|
|
419
|
+
// Queue closed and empty - terminate worker
|
|
420
|
+
if (!story) {
|
|
421
|
+
workerLogger.info('Queue closed and empty, worker terminating');
|
|
422
|
+
break;
|
|
423
|
+
}
|
|
424
|
+
workerLogger.info({
|
|
425
|
+
storyNumber: story.fullNumber,
|
|
426
|
+
storyTitle: story.title,
|
|
427
|
+
}, 'Worker processing story');
|
|
428
|
+
try {
|
|
429
|
+
// Use the actual file path from the story object
|
|
430
|
+
const storyFilePath = story.filePath || `${storyDir}/STORY-${story.fullNumber}.md`;
|
|
431
|
+
// Extract just the filename for QA folder (preserve the same filename)
|
|
432
|
+
const storyFileName = storyFilePath.split('/').pop() || `STORY-${story.fullNumber}.md`;
|
|
433
|
+
const qaFilePath = `${qaStoryDir}/${storyFileName}`;
|
|
434
|
+
// Check if story is already in QA folder (already developed)
|
|
435
|
+
const alreadyInQa = await this.fileManager.fileExists(qaFilePath);
|
|
436
|
+
if (alreadyInQa) {
|
|
437
|
+
workerLogger.info({
|
|
438
|
+
qaFilePath,
|
|
439
|
+
storyNumber: story.fullNumber,
|
|
440
|
+
}, 'Story already in QA folder, skipping development');
|
|
441
|
+
successCount++;
|
|
442
|
+
continue;
|
|
443
|
+
}
|
|
444
|
+
const storyExists = await this.fileManager.fileExists(storyFilePath);
|
|
445
|
+
if (!storyExists) {
|
|
446
|
+
workerLogger.warn({ storyNumber: story.fullNumber }, `Story file not found ${storyFilePath}, skipping`);
|
|
447
|
+
failures.push({
|
|
448
|
+
error: 'Story file not found',
|
|
449
|
+
identifier: story.fullNumber,
|
|
450
|
+
});
|
|
451
|
+
continue;
|
|
452
|
+
}
|
|
453
|
+
// Update story status to InProgress
|
|
454
|
+
await this.updateStoryStatus(storyFilePath, 'InProgress');
|
|
455
|
+
// Log story developing in pipeline mode
|
|
456
|
+
const storyStartTime = Date.now();
|
|
457
|
+
if (this.workflowLogger) {
|
|
458
|
+
await this.workflowLogger.logStoryDeveloping(story.fullNumber, workerId);
|
|
459
|
+
// Log verbose transition if enabled
|
|
460
|
+
if (config.verbose) {
|
|
461
|
+
this.workflowLogger.logVerboseTransition(story.fullNumber, story.title, 'DEVELOPING', {
|
|
462
|
+
workerId,
|
|
463
|
+
});
|
|
464
|
+
}
|
|
465
|
+
}
|
|
466
|
+
// Read story content for type detection
|
|
467
|
+
const storyContent = await this.fileManager.readFile(storyFilePath);
|
|
468
|
+
// Detect story type and get auto-documentation references
|
|
469
|
+
const detection = this.storyTypeDetector.detectStoryType(storyContent);
|
|
470
|
+
const autoReferences = this.storyTypeDetector.getDocumentationReferences(detection.type);
|
|
471
|
+
workerLogger.info({
|
|
472
|
+
autoReferencesCount: autoReferences.length,
|
|
473
|
+
confidence: detection.confidence,
|
|
474
|
+
storyNumber: story.fullNumber,
|
|
475
|
+
storyType: detection.type,
|
|
476
|
+
}, 'Story type detected, auto-including documentation references');
|
|
477
|
+
// Build prompt with auto-detected references
|
|
478
|
+
let prompt = `@.bmad-core/agents/dev.md\n\n`;
|
|
479
|
+
// Add working directory instruction if specified
|
|
480
|
+
if (config.cwd) {
|
|
481
|
+
prompt += `Working directory: ${config.cwd}\n\n`;
|
|
482
|
+
}
|
|
483
|
+
prompt += `*develop-story ${storyFilePath}\n\n`;
|
|
484
|
+
// Combine auto-detected references with user-provided references
|
|
485
|
+
const allReferences = [...autoReferences, ...(config.references || [])];
|
|
486
|
+
if (allReferences.length > 0) {
|
|
487
|
+
prompt += 'References:\n';
|
|
488
|
+
for (const ref of allReferences) {
|
|
489
|
+
prompt += `@${ref}\n`;
|
|
490
|
+
}
|
|
491
|
+
prompt += '\n';
|
|
492
|
+
}
|
|
493
|
+
prompt += '*yolo mode*\n';
|
|
494
|
+
// Execute dev agent
|
|
495
|
+
const result = await this.agentRunner.runAgent(prompt, {
|
|
496
|
+
agentType: 'dev',
|
|
497
|
+
references: config.references,
|
|
498
|
+
timeout: 1_800_000, // 30 minutes
|
|
499
|
+
});
|
|
500
|
+
if (result.success) {
|
|
501
|
+
// Update story status to Done
|
|
502
|
+
await this.updateStoryStatus(storyFilePath, 'Done');
|
|
503
|
+
// Move to QA folder
|
|
504
|
+
await this.fileManager.moveFile(storyFilePath, qaFilePath);
|
|
505
|
+
successCount++;
|
|
506
|
+
// Log story completed in pipeline mode
|
|
507
|
+
const storyDuration = Date.now() - storyStartTime;
|
|
508
|
+
if (this.workflowLogger) {
|
|
509
|
+
await this.workflowLogger.logStoryCompleted(story.fullNumber, storyDuration, true);
|
|
510
|
+
// Log verbose transition if enabled
|
|
511
|
+
if (config.verbose) {
|
|
512
|
+
this.workflowLogger.logVerboseTransition(story.fullNumber, story.title, 'COMPLETED', {
|
|
513
|
+
duration: storyDuration,
|
|
514
|
+
});
|
|
515
|
+
}
|
|
516
|
+
}
|
|
517
|
+
workerLogger.info({ storyNumber: story.fullNumber }, 'Story development completed successfully');
|
|
518
|
+
}
|
|
519
|
+
else {
|
|
520
|
+
// Log story failed in pipeline mode
|
|
521
|
+
const storyDuration = Date.now() - storyStartTime;
|
|
522
|
+
if (this.workflowLogger) {
|
|
523
|
+
await this.workflowLogger.logStoryCompleted(story.fullNumber, storyDuration, false);
|
|
524
|
+
// Log verbose transition if enabled
|
|
525
|
+
if (config.verbose) {
|
|
526
|
+
this.workflowLogger.logVerboseTransition(story.fullNumber, story.title, 'FAILED', {
|
|
527
|
+
duration: storyDuration,
|
|
528
|
+
});
|
|
529
|
+
}
|
|
530
|
+
}
|
|
531
|
+
workerLogger.error({
|
|
532
|
+
error: result.errors,
|
|
533
|
+
storyNumber: story.fullNumber,
|
|
534
|
+
}, 'Story development failed');
|
|
535
|
+
failures.push({
|
|
536
|
+
error: result.errors,
|
|
537
|
+
identifier: story.fullNumber,
|
|
538
|
+
});
|
|
539
|
+
}
|
|
540
|
+
}
|
|
541
|
+
catch (error) {
|
|
542
|
+
// Catch errors for individual stories to prevent worker crash
|
|
543
|
+
workerLogger.error({
|
|
544
|
+
error: error.message,
|
|
545
|
+
storyNumber: story.fullNumber,
|
|
546
|
+
}, 'Error processing story, continuing with next story');
|
|
547
|
+
failures.push({
|
|
548
|
+
error: error.message,
|
|
549
|
+
identifier: story.fullNumber,
|
|
550
|
+
});
|
|
551
|
+
}
|
|
552
|
+
// Wait for configured interval before next story
|
|
553
|
+
if (config.storyInterval > 0) {
|
|
554
|
+
workerLogger.debug({ interval: config.storyInterval }, 'Waiting before next story');
|
|
555
|
+
await this.sleep(config.storyInterval * 1000);
|
|
556
|
+
}
|
|
557
|
+
}
|
|
558
|
+
workerLogger.info({
|
|
559
|
+
failureCount: failures.length,
|
|
560
|
+
successCount,
|
|
561
|
+
}, 'Worker completed');
|
|
562
|
+
return { failures, success: successCount };
|
|
563
|
+
}
|
|
564
|
+
catch (error) {
|
|
565
|
+
workerLogger.error({ error: error.message }, 'Worker failed with unhandled error');
|
|
566
|
+
// Return partial results even on worker failure
|
|
567
|
+
return { failures, success: successCount };
|
|
568
|
+
}
|
|
569
|
+
}
|
|
570
|
+
/**
|
|
571
|
+
* Enqueue existing stories sequentially for pipeline mode
|
|
572
|
+
*
|
|
573
|
+
* This method MUST process stories sequentially (not in parallel) to preserve
|
|
574
|
+
* story order in the queue. The development phase processes stories in order,
|
|
575
|
+
* so we must maintain that order when enqueuing existing stories.
|
|
576
|
+
*
|
|
577
|
+
* @param stories - List of stories to enqueue
|
|
578
|
+
* @param storyDir - Story directory path
|
|
579
|
+
* @param prefix - File name prefix
|
|
580
|
+
* @param onStoryComplete - Callback to enqueue each story
|
|
581
|
+
* @private
|
|
582
|
+
*/
|
|
583
|
+
async enqueueExistingStoriesSequentially(stories, storyDir, prefix, onStoryComplete) {
|
|
584
|
+
this.logger.info({ storyCount: stories.length }, 'Enqueuing existing stories for development');
|
|
585
|
+
// MUST be sequential - do not parallelize this loop
|
|
586
|
+
// Story order must be preserved for correct queue processing
|
|
587
|
+
for (const story of stories) {
|
|
588
|
+
const storyFileName = this.generateStoryFileName(prefix, story.fullNumber);
|
|
589
|
+
const storyFilePath = `${storyDir}/${storyFileName}`;
|
|
590
|
+
const metadata = {
|
|
591
|
+
epicNumber: story.epicNumber,
|
|
592
|
+
filePath: storyFilePath,
|
|
593
|
+
id: story.fullNumber,
|
|
594
|
+
number: story.fullNumber,
|
|
595
|
+
status: 'Ready',
|
|
596
|
+
storyNumber: story.number,
|
|
597
|
+
title: story.title,
|
|
598
|
+
type: 'epic-based',
|
|
599
|
+
};
|
|
600
|
+
try {
|
|
601
|
+
await onStoryComplete(metadata);
|
|
602
|
+
}
|
|
603
|
+
catch (error) {
|
|
604
|
+
this.logger.error({
|
|
605
|
+
error: error.message,
|
|
606
|
+
storyNumber: story.fullNumber,
|
|
607
|
+
}, 'Failed to enqueue existing story, continuing');
|
|
608
|
+
}
|
|
609
|
+
}
|
|
610
|
+
}
|
|
611
|
+
/**
|
|
612
|
+
* Execute development phase
|
|
613
|
+
*
|
|
614
|
+
* Executes story development sequentially using ClaudeAgentRunner.
|
|
615
|
+
* Updates story status and moves completed stories to QA folder.
|
|
616
|
+
*
|
|
617
|
+
* @param config - Workflow configuration
|
|
618
|
+
* @param stories - List of stories to develop
|
|
619
|
+
* @returns PhaseResult with success count, failures, and duration
|
|
620
|
+
* @private
|
|
621
|
+
*/
|
|
622
|
+
async executeDevelopmentPhase(config, stories) {
|
|
623
|
+
const startTime = Date.now();
|
|
624
|
+
const failures = [];
|
|
625
|
+
let successCount = 0;
|
|
626
|
+
this.logger.info({
|
|
627
|
+
interval: config.storyInterval,
|
|
628
|
+
storyCount: stories.length,
|
|
629
|
+
}, 'Starting development phase');
|
|
630
|
+
try {
|
|
631
|
+
const storyDir = await this.pathResolver.getStoryDir();
|
|
632
|
+
const qaStoryDir = await this.pathResolver.getQaStoryDir();
|
|
633
|
+
// Execute stories sequentially (NOT parallel - development must be sequential)
|
|
634
|
+
for (const story of stories) {
|
|
635
|
+
// Use the actual file path from the story object (already includes correct prefix)
|
|
636
|
+
const storyFilePath = story.filePath || `${storyDir}/STORY-${story.fullNumber}.md`;
|
|
637
|
+
// Extract just the filename for QA folder (preserve the same filename)
|
|
638
|
+
const storyFileName = storyFilePath.split('/').pop() || `STORY-${story.fullNumber}.md`;
|
|
639
|
+
const qaFilePath = `${qaStoryDir}/${storyFileName}`;
|
|
640
|
+
// Check if story is already in QA folder (already developed)
|
|
641
|
+
const alreadyInQa = await this.fileManager.fileExists(qaFilePath);
|
|
642
|
+
if (alreadyInQa) {
|
|
643
|
+
this.logger.info({
|
|
644
|
+
qaFilePath,
|
|
645
|
+
storyNumber: story.fullNumber,
|
|
646
|
+
}, 'Story already in QA folder, skipping development');
|
|
647
|
+
successCount++;
|
|
648
|
+
continue;
|
|
649
|
+
}
|
|
650
|
+
const storyExists = await this.fileManager.fileExists(storyFilePath);
|
|
651
|
+
if (!storyExists) {
|
|
652
|
+
this.logger.warn({ storyNumber: story.fullNumber }, `Story file not found ${storyFilePath}, skipping`);
|
|
653
|
+
failures.push({
|
|
654
|
+
error: 'Story file not found',
|
|
655
|
+
identifier: story.fullNumber,
|
|
656
|
+
});
|
|
657
|
+
continue;
|
|
658
|
+
}
|
|
659
|
+
this.logger.info({ storyNumber: story.fullNumber }, 'Developing story');
|
|
660
|
+
// Update story status to InProgress
|
|
661
|
+
await this.updateStoryStatus(storyFilePath, 'InProgress');
|
|
662
|
+
// Read story content for type detection
|
|
663
|
+
const storyContent = await this.fileManager.readFile(storyFilePath);
|
|
664
|
+
// Detect story type and get auto-documentation references
|
|
665
|
+
const detection = this.storyTypeDetector.detectStoryType(storyContent);
|
|
666
|
+
const autoReferences = this.storyTypeDetector.getDocumentationReferences(detection.type);
|
|
667
|
+
this.logger.info({
|
|
668
|
+
autoReferencesCount: autoReferences.length,
|
|
669
|
+
confidence: detection.confidence,
|
|
670
|
+
matchedKeywordsCount: detection.matchedKeywords.length,
|
|
671
|
+
storyNumber: story.fullNumber,
|
|
672
|
+
storyType: detection.type,
|
|
673
|
+
}, 'Story type detected, auto-including documentation references');
|
|
674
|
+
// Build prompt with auto-detected references
|
|
675
|
+
let prompt = `@.bmad-core/agents/dev.md\n\n`;
|
|
676
|
+
// Add working directory instruction if specified
|
|
677
|
+
if (config.cwd) {
|
|
678
|
+
prompt += `Working directory: ${config.cwd}\n\n`;
|
|
679
|
+
}
|
|
680
|
+
prompt += `Implement story: ${storyFilePath}\n\n`;
|
|
681
|
+
// Combine auto-detected references with user-provided references
|
|
682
|
+
const allReferences = [...autoReferences, ...(config.references || [])];
|
|
683
|
+
if (allReferences.length > 0) {
|
|
684
|
+
prompt += 'References:\n';
|
|
685
|
+
for (const ref of allReferences) {
|
|
686
|
+
prompt += `@${ref}\n`;
|
|
687
|
+
}
|
|
688
|
+
prompt += '\n';
|
|
689
|
+
}
|
|
690
|
+
prompt += '*yolo mode*\n';
|
|
691
|
+
const result = await this.agentRunner.runAgent(prompt, {
|
|
692
|
+
agentType: 'dev',
|
|
693
|
+
references: config.references,
|
|
694
|
+
timeout: 1_800_000, // 30 minutes
|
|
695
|
+
});
|
|
696
|
+
if (result.success) {
|
|
697
|
+
// Update story status to Done
|
|
698
|
+
await this.updateStoryStatus(storyFilePath, 'Done');
|
|
699
|
+
// Move to QA folder (qaFilePath already defined above with correct filename)
|
|
700
|
+
await this.fileManager.moveFile(storyFilePath, qaFilePath);
|
|
701
|
+
successCount++;
|
|
702
|
+
this.logger.info({ storyNumber: story.fullNumber }, 'Story development completed successfully');
|
|
703
|
+
}
|
|
704
|
+
else {
|
|
705
|
+
this.logger.error({
|
|
706
|
+
error: result.errors,
|
|
707
|
+
storyNumber: story.fullNumber,
|
|
708
|
+
}, 'Story development failed');
|
|
709
|
+
failures.push({
|
|
710
|
+
error: result.errors,
|
|
711
|
+
identifier: story.fullNumber,
|
|
712
|
+
});
|
|
713
|
+
}
|
|
714
|
+
// Wait for configured interval before next story
|
|
715
|
+
if (config.storyInterval > 0 && stories.indexOf(story) < stories.length - 1) {
|
|
716
|
+
this.logger.debug({ interval: config.storyInterval }, 'Waiting before next story');
|
|
717
|
+
await this.sleep(config.storyInterval * 1000);
|
|
718
|
+
}
|
|
719
|
+
}
|
|
720
|
+
const duration = Date.now() - startTime;
|
|
721
|
+
this.logger.info({
|
|
722
|
+
duration,
|
|
723
|
+
failures: failures.length,
|
|
724
|
+
success: successCount,
|
|
725
|
+
}, 'Development phase completed');
|
|
726
|
+
return {
|
|
727
|
+
duration,
|
|
728
|
+
failures,
|
|
729
|
+
phaseName: 'dev',
|
|
730
|
+
skipped: false,
|
|
731
|
+
success: successCount,
|
|
732
|
+
};
|
|
733
|
+
}
|
|
734
|
+
catch (error) {
|
|
735
|
+
this.logger.error({ error: error.message }, 'Development phase failed');
|
|
736
|
+
return {
|
|
737
|
+
duration: Date.now() - startTime,
|
|
738
|
+
failures: [
|
|
739
|
+
...failures,
|
|
740
|
+
{
|
|
741
|
+
error: error.message,
|
|
742
|
+
identifier: 'dev-phase',
|
|
743
|
+
},
|
|
744
|
+
],
|
|
745
|
+
phaseName: 'dev',
|
|
746
|
+
skipped: false,
|
|
747
|
+
success: successCount,
|
|
748
|
+
};
|
|
749
|
+
}
|
|
750
|
+
}
|
|
751
|
+
/**
|
|
752
|
+
* Execute epic creation phase
|
|
753
|
+
*
|
|
754
|
+
* Parses PRD file, extracts epics, and creates epic markdown files
|
|
755
|
+
* using ClaudeAgentRunner and BatchProcessor.
|
|
756
|
+
*
|
|
757
|
+
* @param config - Workflow configuration
|
|
758
|
+
* @param prdFilePath - Path to PRD file
|
|
759
|
+
* @returns PhaseResult with success count, failures, and duration
|
|
760
|
+
* @private
|
|
761
|
+
*/
|
|
762
|
+
async executeEpicPhase(config, prdFilePath) {
|
|
763
|
+
const startTime = Date.now();
|
|
764
|
+
const failures = [];
|
|
765
|
+
let successCount = 0;
|
|
766
|
+
this.logger.info({
|
|
767
|
+
interval: config.prdInterval,
|
|
768
|
+
parallel: config.parallel,
|
|
769
|
+
prdFile: prdFilePath,
|
|
770
|
+
}, 'Starting epic creation phase');
|
|
771
|
+
try {
|
|
772
|
+
// Read PRD file
|
|
773
|
+
let prdContent = await this.fileManager.readFile(prdFilePath);
|
|
774
|
+
// Generate prefix from PRD filename if not provided
|
|
775
|
+
const prefix = config.prefix || this.generatePrefixFromPrdPath(prdFilePath);
|
|
776
|
+
if (!config.prefix) {
|
|
777
|
+
this.logger.info({ generatedPrefix: prefix, prdPath: prdFilePath }, 'No prefix provided, generated from PRD filename');
|
|
778
|
+
}
|
|
779
|
+
// Parse epics from PRD (with auto-fix support)
|
|
780
|
+
let epics;
|
|
781
|
+
try {
|
|
782
|
+
epics = this.prdParser.parseEpics(prdContent, prdFilePath);
|
|
783
|
+
}
|
|
784
|
+
catch (parseError) {
|
|
785
|
+
// If parsing fails and auto-fix is enabled, attempt to fix the PRD
|
|
786
|
+
if (config.autoFix && parseError instanceof ParserError) {
|
|
787
|
+
this.logger.warn({ error: parseError.message, prdPath: prdFilePath }, 'PRD parsing failed, attempting auto-fix');
|
|
788
|
+
const fixResult = await this.prdFixer.fixPrd(prdFilePath, prdContent, config.references);
|
|
789
|
+
if (fixResult.fixed) {
|
|
790
|
+
this.logger.info({ prdPath: prdFilePath }, 'PRD auto-fixed successfully, retrying parse');
|
|
791
|
+
prdContent = fixResult.content;
|
|
792
|
+
// Retry parsing with fixed content
|
|
793
|
+
epics = this.prdParser.parseEpics(prdContent, prdFilePath);
|
|
794
|
+
}
|
|
795
|
+
else {
|
|
796
|
+
this.logger.error({ error: fixResult.error, prdPath: prdFilePath }, 'PRD auto-fix failed');
|
|
797
|
+
throw parseError;
|
|
798
|
+
}
|
|
799
|
+
}
|
|
800
|
+
else {
|
|
801
|
+
throw parseError;
|
|
802
|
+
}
|
|
803
|
+
}
|
|
804
|
+
this.logger.info({ epicCount: epics.length }, 'Epics extracted from PRD');
|
|
805
|
+
if (epics.length === 0) {
|
|
806
|
+
this.logger.warn('No epics found in PRD file');
|
|
807
|
+
return {
|
|
808
|
+
duration: Date.now() - startTime,
|
|
809
|
+
failures: [],
|
|
810
|
+
phaseName: 'epic',
|
|
811
|
+
skipped: false,
|
|
812
|
+
success: 0,
|
|
813
|
+
};
|
|
814
|
+
}
|
|
815
|
+
// Get epic directory
|
|
816
|
+
const epicDir = await this.pathResolver.getEpicDir();
|
|
817
|
+
// Check which epic files are properly populated (not just scaffolded)
|
|
818
|
+
const properlyPopulatedEpics = await this.checkProperlyPopulatedEpics(epicDir, epics.map((e) => this.generateEpicFileName(prefix, e.number)));
|
|
819
|
+
// Filter out properly populated epics (exclude scaffolded-only files)
|
|
820
|
+
const epicsToCreate = epics.filter((epic) => !properlyPopulatedEpics.includes(this.generateEpicFileName(prefix, epic.number)));
|
|
821
|
+
if (epicsToCreate.length === 0) {
|
|
822
|
+
this.logger.info('All epics already exist and are properly populated, skipping creation');
|
|
823
|
+
return {
|
|
824
|
+
duration: Date.now() - startTime,
|
|
825
|
+
failures: [],
|
|
826
|
+
phaseName: 'epic',
|
|
827
|
+
skipped: false,
|
|
828
|
+
success: epics.length,
|
|
829
|
+
};
|
|
830
|
+
}
|
|
831
|
+
this.logger.info({
|
|
832
|
+
epicsProperlyPopulated: properlyPopulatedEpics.length,
|
|
833
|
+
epicsToCreate: epicsToCreate.length,
|
|
834
|
+
}, 'Creating/re-populating epic files');
|
|
835
|
+
// Create epic files using BatchProcessor and ClaudeAgentRunner
|
|
836
|
+
const results = await this.batchProcessor.processBatch(epicsToCreate, async (epic) => {
|
|
837
|
+
// Generate epic file path with prefix
|
|
838
|
+
const epicFileName = this.generateEpicFileName(prefix, epic.number);
|
|
839
|
+
const epicFilePath = `${epicDir}/${epicFileName}`;
|
|
840
|
+
// Check if file already exists and is properly populated
|
|
841
|
+
const fileExists = await this.fileManager.fileExists(epicFilePath);
|
|
842
|
+
if (fileExists) {
|
|
843
|
+
// Check if the file is properly populated (has actual story entries, not placeholder text)
|
|
844
|
+
const existingContent = await this.fileManager.readFile(epicFilePath);
|
|
845
|
+
const isScaffolded = existingContent.includes('_[AI Agent will populate');
|
|
846
|
+
if (!isScaffolded) {
|
|
847
|
+
// File is properly populated, skip recreation
|
|
848
|
+
this.logger.info({
|
|
849
|
+
epicNumber: epic.number,
|
|
850
|
+
epicTitle: epic.title,
|
|
851
|
+
filePath: epicFilePath,
|
|
852
|
+
}, 'Epic file already exists and is properly populated, skipping creation');
|
|
853
|
+
return epicFilePath;
|
|
854
|
+
}
|
|
855
|
+
// File exists but is only scaffolded (incomplete) - will re-populate it
|
|
856
|
+
this.logger.warn({
|
|
857
|
+
epicNumber: epic.number,
|
|
858
|
+
epicTitle: epic.title,
|
|
859
|
+
filePath: epicFilePath,
|
|
860
|
+
}, 'Epic file exists but is only scaffolded (incomplete), re-populating with Claude agent');
|
|
861
|
+
}
|
|
862
|
+
// Step 1: Create scaffolded file with structured sections and populated metadata (if not exists)
|
|
863
|
+
const scaffoldedContent = this.fileScaffolder.scaffoldEpic({
|
|
864
|
+
epicNumber: epic.number,
|
|
865
|
+
epicTitle: epic.title,
|
|
866
|
+
prefix,
|
|
867
|
+
});
|
|
868
|
+
if (fileExists) {
|
|
869
|
+
this.logger.info({
|
|
870
|
+
epicNumber: epic.number,
|
|
871
|
+
epicTitle: epic.title,
|
|
872
|
+
filePath: epicFilePath,
|
|
873
|
+
}, 'Using existing scaffolded epic file');
|
|
874
|
+
}
|
|
875
|
+
else {
|
|
876
|
+
await this.fileManager.writeFile(epicFilePath, scaffoldedContent);
|
|
877
|
+
this.logger.info({
|
|
878
|
+
epicNumber: epic.number,
|
|
879
|
+
epicTitle: epic.title,
|
|
880
|
+
filePath: epicFilePath,
|
|
881
|
+
}, 'Epic scaffolded file created');
|
|
882
|
+
}
|
|
883
|
+
// Step 2: Build Claude prompt to populate the scaffolded file
|
|
884
|
+
const prompt = this.buildEpicPrompt(epic, {
|
|
885
|
+
cwd: config.cwd,
|
|
886
|
+
outputPath: epicFilePath,
|
|
887
|
+
prdPath: prdFilePath,
|
|
888
|
+
prefix,
|
|
889
|
+
references: config.references,
|
|
890
|
+
});
|
|
891
|
+
// Log prompt if verbose
|
|
892
|
+
if (config.verbose) {
|
|
893
|
+
this.logger.info({
|
|
894
|
+
epicNumber: epic.number,
|
|
895
|
+
epicTitle: epic.title,
|
|
896
|
+
outputPath: epicFilePath,
|
|
897
|
+
prompt,
|
|
898
|
+
}, 'Claude Prompt (Epic)');
|
|
899
|
+
}
|
|
900
|
+
// Step 3: Run Claude agent to populate content sections
|
|
901
|
+
const result = await this.agentRunner.runAgent(prompt, {
|
|
902
|
+
agentType: 'architect',
|
|
903
|
+
references: config.references,
|
|
904
|
+
timeout: 1_800_000, // 30 minutes
|
|
905
|
+
});
|
|
906
|
+
// Log output if verbose
|
|
907
|
+
if (config.verbose) {
|
|
908
|
+
this.logger.info({
|
|
909
|
+
duration: result.duration,
|
|
910
|
+
epicNumber: epic.number,
|
|
911
|
+
errors: result.errors,
|
|
912
|
+
output: result.output,
|
|
913
|
+
outputLength: result.output.length,
|
|
914
|
+
success: result.success,
|
|
915
|
+
}, 'Claude Response (Epic)');
|
|
916
|
+
}
|
|
917
|
+
if (!result.success) {
|
|
918
|
+
throw new Error(result.errors);
|
|
919
|
+
}
|
|
920
|
+
// Step 4: Verify file was updated by Claude
|
|
921
|
+
const updatedContent = await this.fileManager.readFile(epicFilePath);
|
|
922
|
+
if (updatedContent === scaffoldedContent) {
|
|
923
|
+
throw new Error(`Claude did not update the epic file at ${epicFilePath}`);
|
|
924
|
+
}
|
|
925
|
+
return epicFilePath;
|
|
926
|
+
}, (info) => {
|
|
927
|
+
this.logger.info({
|
|
928
|
+
completedItems: info.completedItems,
|
|
929
|
+
currentBatch: info.currentBatch,
|
|
930
|
+
totalBatches: info.totalBatches,
|
|
931
|
+
totalItems: info.totalItems,
|
|
932
|
+
}, 'Epic creation progress');
|
|
933
|
+
});
|
|
934
|
+
// Track successes and failures
|
|
935
|
+
for (const [i, result] of results.entries()) {
|
|
936
|
+
if (result.success) {
|
|
937
|
+
successCount++;
|
|
938
|
+
}
|
|
939
|
+
else {
|
|
940
|
+
failures.push({
|
|
941
|
+
error: result.error?.message ?? 'Unknown error',
|
|
942
|
+
identifier: `${epicsToCreate[i].number}`,
|
|
943
|
+
});
|
|
944
|
+
}
|
|
945
|
+
}
|
|
946
|
+
// Add properly populated epics to success count
|
|
947
|
+
successCount += properlyPopulatedEpics.length;
|
|
948
|
+
const duration = Date.now() - startTime;
|
|
949
|
+
this.logger.info({
|
|
950
|
+
duration,
|
|
951
|
+
failures: failures.length,
|
|
952
|
+
success: successCount,
|
|
953
|
+
}, 'Epic creation phase completed');
|
|
954
|
+
return {
|
|
955
|
+
duration,
|
|
956
|
+
failures,
|
|
957
|
+
phaseName: 'epic',
|
|
958
|
+
skipped: false,
|
|
959
|
+
success: successCount,
|
|
960
|
+
};
|
|
961
|
+
}
|
|
962
|
+
catch (error) {
|
|
963
|
+
this.logger.error({ error: error.message }, 'Epic phase failed');
|
|
964
|
+
return {
|
|
965
|
+
duration: Date.now() - startTime,
|
|
966
|
+
failures: [
|
|
967
|
+
...failures,
|
|
968
|
+
{
|
|
969
|
+
error: error.message,
|
|
970
|
+
identifier: 'epic-phase',
|
|
971
|
+
},
|
|
972
|
+
],
|
|
973
|
+
phaseName: 'epic',
|
|
974
|
+
skipped: false,
|
|
975
|
+
success: successCount,
|
|
976
|
+
};
|
|
977
|
+
}
|
|
978
|
+
}
|
|
979
|
+
/**
|
|
980
|
+
* Execute epic phase if needed
|
|
981
|
+
*
|
|
982
|
+
* @param config - Workflow configuration
|
|
983
|
+
* @param detection - Input detection result
|
|
984
|
+
* @param shouldExecute - Whether to execute epic phase
|
|
985
|
+
* @returns Epic phase result or skipped result
|
|
986
|
+
* @private
|
|
987
|
+
*/
|
|
988
|
+
async executeEpicPhaseIfNeeded(config, detection, shouldExecute) {
|
|
989
|
+
if (!shouldExecute) {
|
|
990
|
+
return this.createSkippedPhaseResult('epic');
|
|
991
|
+
}
|
|
992
|
+
if (config.dryRun) {
|
|
993
|
+
this.logger.info('[DRY RUN] Would execute epic creation phase');
|
|
994
|
+
return this.createSkippedPhaseResult('epic');
|
|
995
|
+
}
|
|
996
|
+
return this.executeEpicPhase(config, detection.filePath);
|
|
997
|
+
}
|
|
998
|
+
/**
|
|
999
|
+
* Execute pipelined development phase
|
|
1000
|
+
*
|
|
1001
|
+
* Executes story development using concurrent workers that consume from a StoryQueue.
|
|
1002
|
+
* Each worker dequeues stories, executes dev work, and updates story status independently.
|
|
1003
|
+
* Workers terminate when the queue is closed and empty.
|
|
1004
|
+
*
|
|
1005
|
+
* @param queue - StoryQueue to consume stories from
|
|
1006
|
+
* @param config - Workflow configuration
|
|
1007
|
+
* @returns PhaseResult with aggregated success count, failures, and duration
|
|
1008
|
+
* @private
|
|
1009
|
+
*/
|
|
1010
|
+
/**
|
|
1011
|
+
* Execute pipelined development phase with worker pool.
|
|
1012
|
+
*
|
|
1013
|
+
* Pipeline coordination algorithm:
|
|
1014
|
+
* 1. Create worker pool of size config.parallel (default: 3 workers)
|
|
1015
|
+
* 2. Each worker independently dequeues from StoryQueue
|
|
1016
|
+
* 3. Workers run concurrently using Promise.all
|
|
1017
|
+
* 4. Natural load balancing via FIFO queue
|
|
1018
|
+
* 5. Workers terminate when queue closed and empty
|
|
1019
|
+
* 6. Aggregate results from all workers
|
|
1020
|
+
*
|
|
1021
|
+
* Performance characteristics:
|
|
1022
|
+
* - Concurrent story development (up to N workers processing simultaneously)
|
|
1023
|
+
* - Queue-based coordination for thread-safe handoff
|
|
1024
|
+
* - No worker starvation due to FIFO fairness
|
|
1025
|
+
* - Graceful completion when story phase closes queue
|
|
1026
|
+
*
|
|
1027
|
+
* @param queue - StoryQueue for consuming completed stories
|
|
1028
|
+
* @param config - Workflow configuration with worker count and intervals
|
|
1029
|
+
* @returns PhaseResult with aggregate success/failure counts and duration
|
|
1030
|
+
* @private
|
|
1031
|
+
*/
|
|
1032
|
+
async executePipelinedDevPhase(queue, config) {
|
|
1033
|
+
const startTime = Date.now();
|
|
1034
|
+
// PIPELINE MODE: Use single worker for sequential story processing
|
|
1035
|
+
// Stories are processed in order, one at a time as they become available
|
|
1036
|
+
const workerCount = 1;
|
|
1037
|
+
this.logger.info({
|
|
1038
|
+
interval: config.storyInterval,
|
|
1039
|
+
mode: 'sequential',
|
|
1040
|
+
workerCount,
|
|
1041
|
+
}, 'Starting pipelined development phase (sequential processing)');
|
|
1042
|
+
try {
|
|
1043
|
+
// Create single worker for sequential processing
|
|
1044
|
+
const workers = Array.from({ length: workerCount }, (_, i) => this.devWorker(i, queue, config));
|
|
1045
|
+
// Wait for worker to complete
|
|
1046
|
+
const workerResults = await Promise.all(workers);
|
|
1047
|
+
// Aggregate results from all workers
|
|
1048
|
+
let totalSuccess = 0;
|
|
1049
|
+
const allFailures = [];
|
|
1050
|
+
for (const result of workerResults) {
|
|
1051
|
+
totalSuccess += result.success;
|
|
1052
|
+
allFailures.push(...result.failures);
|
|
1053
|
+
}
|
|
1054
|
+
const duration = Date.now() - startTime;
|
|
1055
|
+
this.logger.info({
|
|
1056
|
+
duration,
|
|
1057
|
+
failures: allFailures.length,
|
|
1058
|
+
success: totalSuccess,
|
|
1059
|
+
workerCount,
|
|
1060
|
+
}, 'Pipelined development phase completed');
|
|
1061
|
+
return {
|
|
1062
|
+
duration,
|
|
1063
|
+
failures: allFailures,
|
|
1064
|
+
phaseName: 'dev',
|
|
1065
|
+
skipped: false,
|
|
1066
|
+
success: totalSuccess,
|
|
1067
|
+
};
|
|
1068
|
+
}
|
|
1069
|
+
catch (error) {
|
|
1070
|
+
this.logger.error({ error: error.message }, 'Pipelined development phase failed');
|
|
1071
|
+
return {
|
|
1072
|
+
duration: Date.now() - startTime,
|
|
1073
|
+
failures: [
|
|
1074
|
+
{
|
|
1075
|
+
error: error.message,
|
|
1076
|
+
identifier: 'dev-phase',
|
|
1077
|
+
},
|
|
1078
|
+
],
|
|
1079
|
+
phaseName: 'dev',
|
|
1080
|
+
skipped: false,
|
|
1081
|
+
success: 0,
|
|
1082
|
+
};
|
|
1083
|
+
}
|
|
1084
|
+
}
|
|
1085
|
+
/**
|
|
1086
|
+
* Execute pipelined workflow (story and dev phases in parallel)
|
|
1087
|
+
*
|
|
1088
|
+
* @param config - Workflow configuration
|
|
1089
|
+
* @param detection - Input detection result
|
|
1090
|
+
* @returns Story and dev phase results
|
|
1091
|
+
* @private
|
|
1092
|
+
*/
|
|
1093
|
+
async executePipelinedWorkflow(config, detection) {
|
|
1094
|
+
this.logger.info('Executing pipelined workflow (sequential dev starts as stories are created)');
|
|
1095
|
+
const queue = new StoryQueue(this.logger);
|
|
1096
|
+
try {
|
|
1097
|
+
const storyPhasePromise = this.executeStoryPhaseWithQueue(config, detection, queue);
|
|
1098
|
+
const devPhasePromise = this.executePipelinedDevPhase(queue, config);
|
|
1099
|
+
const [storyResult, devResult] = await Promise.allSettled([storyPhasePromise, devPhasePromise]);
|
|
1100
|
+
return {
|
|
1101
|
+
devPhase: this.handlePhaseResult(devResult, 'dev'),
|
|
1102
|
+
storyPhase: this.handlePhaseResult(storyResult, 'story'),
|
|
1103
|
+
};
|
|
1104
|
+
}
|
|
1105
|
+
catch (error) {
|
|
1106
|
+
this.logger.error({ error: error.message }, 'Pipelined execution failed');
|
|
1107
|
+
return {
|
|
1108
|
+
devPhase: this.createSkippedPhaseResult('dev'),
|
|
1109
|
+
storyPhase: this.createSkippedPhaseResult('story'),
|
|
1110
|
+
};
|
|
1111
|
+
}
|
|
1112
|
+
}
|
|
1113
|
+
/**
|
|
1114
|
+
* Execute QA phase
|
|
1115
|
+
*
|
|
1116
|
+
* Runs QA workflow on all stories in the QA folder.
|
|
1117
|
+
* Dynamically imports and delegates to StoriesQaCommand.
|
|
1118
|
+
*
|
|
1119
|
+
* @param config - Workflow configuration
|
|
1120
|
+
* @returns PhaseResult with success count, failures, and duration
|
|
1121
|
+
* @private
|
|
1122
|
+
*/
|
|
1123
|
+
async executeQaPhase(config) {
|
|
1124
|
+
const startTime = Date.now();
|
|
1125
|
+
const failures = [];
|
|
1126
|
+
let successCount = 0;
|
|
1127
|
+
this.logger.info({
|
|
1128
|
+
qaRetries: config.qaRetries ?? 2,
|
|
1129
|
+
}, 'Starting QA phase');
|
|
1130
|
+
try {
|
|
1131
|
+
// Get QA story directory
|
|
1132
|
+
const qaStoryDir = await this.pathResolver.getQaStoryDir();
|
|
1133
|
+
// Find all stories in QA folder
|
|
1134
|
+
const storyPattern = '*.md';
|
|
1135
|
+
const storyFiles = await this.fileManager.listFiles(qaStoryDir, storyPattern);
|
|
1136
|
+
if (storyFiles.length === 0) {
|
|
1137
|
+
this.logger.info('No stories found in QA folder, skipping QA phase');
|
|
1138
|
+
return {
|
|
1139
|
+
duration: Date.now() - startTime,
|
|
1140
|
+
failures: [],
|
|
1141
|
+
phaseName: 'qa',
|
|
1142
|
+
skipped: true,
|
|
1143
|
+
success: 0,
|
|
1144
|
+
};
|
|
1145
|
+
}
|
|
1146
|
+
this.logger.info({ storyCount: storyFiles.length }, 'Found stories for QA phase');
|
|
1147
|
+
// Dynamically import QA command to avoid circular dependencies
|
|
1148
|
+
const { default: StoriesQaCommand } = await import('../../commands/stories/qa.js');
|
|
1149
|
+
// Process each story through QA
|
|
1150
|
+
for (const storyFile of storyFiles) {
|
|
1151
|
+
const storyPath = `${qaStoryDir}/${storyFile}`;
|
|
1152
|
+
this.logger.info({ storyPath }, 'Running QA workflow for story');
|
|
1153
|
+
try {
|
|
1154
|
+
// Build args for QA command
|
|
1155
|
+
const qaArgs = [storyPath];
|
|
1156
|
+
// Build flags
|
|
1157
|
+
const qaFlags = [
|
|
1158
|
+
`--max-retries=${config.qaRetries ?? 2}`,
|
|
1159
|
+
`--interval=${config.storyInterval}`,
|
|
1160
|
+
`--provider=${config.provider ?? 'claude'}`,
|
|
1161
|
+
];
|
|
1162
|
+
if (config.qaPrompt) {
|
|
1163
|
+
qaFlags.push(`--qa-prompt=${config.qaPrompt}`);
|
|
1164
|
+
}
|
|
1165
|
+
if (config.references && config.references.length > 0) {
|
|
1166
|
+
for (const ref of config.references) {
|
|
1167
|
+
qaFlags.push(`--reference=${ref}`);
|
|
1168
|
+
}
|
|
1169
|
+
}
|
|
1170
|
+
// Run QA command
|
|
1171
|
+
await StoriesQaCommand.run([...qaArgs, ...qaFlags]);
|
|
1172
|
+
successCount++;
|
|
1173
|
+
this.logger.info({ storyPath }, 'QA workflow completed successfully for story');
|
|
1174
|
+
}
|
|
1175
|
+
catch (error) {
|
|
1176
|
+
const err = error;
|
|
1177
|
+
this.logger.error({ error: err, storyPath }, 'QA workflow failed for story');
|
|
1178
|
+
failures.push({
|
|
1179
|
+
error: err.message,
|
|
1180
|
+
identifier: storyFile,
|
|
1181
|
+
});
|
|
1182
|
+
}
|
|
1183
|
+
}
|
|
1184
|
+
const duration = Date.now() - startTime;
|
|
1185
|
+
this.logger.info({
|
|
1186
|
+
duration,
|
|
1187
|
+
failures: failures.length,
|
|
1188
|
+
success: successCount,
|
|
1189
|
+
}, 'QA phase completed');
|
|
1190
|
+
return {
|
|
1191
|
+
duration,
|
|
1192
|
+
failures,
|
|
1193
|
+
phaseName: 'qa',
|
|
1194
|
+
skipped: false,
|
|
1195
|
+
success: successCount,
|
|
1196
|
+
};
|
|
1197
|
+
}
|
|
1198
|
+
catch (error) {
|
|
1199
|
+
this.logger.error({ error: error.message }, 'QA phase failed');
|
|
1200
|
+
return {
|
|
1201
|
+
duration: Date.now() - startTime,
|
|
1202
|
+
failures: [
|
|
1203
|
+
...failures,
|
|
1204
|
+
{
|
|
1205
|
+
error: error.message,
|
|
1206
|
+
identifier: 'qa-phase',
|
|
1207
|
+
},
|
|
1208
|
+
],
|
|
1209
|
+
phaseName: 'qa',
|
|
1210
|
+
skipped: false,
|
|
1211
|
+
success: successCount,
|
|
1212
|
+
};
|
|
1213
|
+
}
|
|
1214
|
+
}
|
|
1215
|
+
/**
|
|
1216
|
+
* Execute QA phase if needed
|
|
1217
|
+
*
|
|
1218
|
+
* @param config - Workflow configuration
|
|
1219
|
+
* @param devPhase - Dev phase result
|
|
1220
|
+
* @param shouldExecute - Whether to execute QA phase
|
|
1221
|
+
* @returns QA phase result
|
|
1222
|
+
* @private
|
|
1223
|
+
*/
|
|
1224
|
+
async executeQaPhaseIfNeeded(config, devPhase, shouldExecute) {
|
|
1225
|
+
if (!shouldExecute || !devPhase || devPhase.success === 0) {
|
|
1226
|
+
return this.createSkippedPhaseResult('qa');
|
|
1227
|
+
}
|
|
1228
|
+
if (config.dryRun) {
|
|
1229
|
+
this.logger.info('[DRY RUN] Would execute QA phase');
|
|
1230
|
+
return this.createSkippedPhaseResult('qa');
|
|
1231
|
+
}
|
|
1232
|
+
return this.executeQaPhase(config);
|
|
1233
|
+
}
|
|
1234
|
+
/**
|
|
1235
|
+
* Execute dev phase in sequential mode
|
|
1236
|
+
*
|
|
1237
|
+
* @param config - Workflow configuration
|
|
1238
|
+
* @param detection - Input detection result
|
|
1239
|
+
* @param shouldExecute - Whether to execute dev phase
|
|
1240
|
+
* @returns Dev phase result
|
|
1241
|
+
* @private
|
|
1242
|
+
*/
|
|
1243
|
+
async executeSequentialDevPhase(config, detection, shouldExecute) {
|
|
1244
|
+
if (!shouldExecute) {
|
|
1245
|
+
return this.createSkippedPhaseResult('dev');
|
|
1246
|
+
}
|
|
1247
|
+
if (config.dryRun) {
|
|
1248
|
+
this.logger.info('[DRY RUN] Would execute development phase');
|
|
1249
|
+
return this.createSkippedPhaseResult('dev');
|
|
1250
|
+
}
|
|
1251
|
+
const stories = await this.getStoriesForDevPhase(config, detection);
|
|
1252
|
+
return this.executeDevelopmentPhase(config, stories);
|
|
1253
|
+
}
|
|
1254
|
+
/**
|
|
1255
|
+
* Execute story phase in sequential mode
|
|
1256
|
+
*
|
|
1257
|
+
* @param config - Workflow configuration
|
|
1258
|
+
* @param detection - Input detection result
|
|
1259
|
+
* @param epicPhase - Epic phase result
|
|
1260
|
+
* @param startTime - Workflow start time
|
|
1261
|
+
* @returns Story phase result
|
|
1262
|
+
* @private
|
|
1263
|
+
*/
|
|
1264
|
+
async executeSequentialStoryPhase(config, detection, _epicPhase, _startTime) {
|
|
1265
|
+
if (config.skipStories || detection.type === 'story-pattern') {
|
|
1266
|
+
return this.createSkippedPhaseResult('story');
|
|
1267
|
+
}
|
|
1268
|
+
if (config.dryRun) {
|
|
1269
|
+
this.logger.info('[DRY RUN] Would execute story creation phase');
|
|
1270
|
+
return this.createSkippedPhaseResult('story');
|
|
1271
|
+
}
|
|
1272
|
+
if (detection.type === 'epic') {
|
|
1273
|
+
return this.executeStoryPhaseFromEpicFile(config, detection.filePath);
|
|
1274
|
+
}
|
|
1275
|
+
const epics = await this.getEpicsForStoryPhase(config, detection);
|
|
1276
|
+
return this.executeStoryPhase(config, epics);
|
|
1277
|
+
}
|
|
1278
|
+
/**
|
|
1279
|
+
* Execute sequential workflow (phases one after another)
|
|
1280
|
+
*
|
|
1281
|
+
* @param config - Workflow configuration
|
|
1282
|
+
* @param detection - Input detection result
|
|
1283
|
+
* @param epicPhase - Epic phase result
|
|
1284
|
+
* @param phaseFlags - Phase execution flags
|
|
1285
|
+
* @param startTime - Workflow start time
|
|
1286
|
+
* @returns Story and dev phase results
|
|
1287
|
+
* @private
|
|
1288
|
+
*/
|
|
1289
|
+
async executeSequentialWorkflow(config, detection, epicPhase, phaseFlags, startTime) {
|
|
1290
|
+
this.logger.info('Executing sequential workflow (backward compatibility mode)');
|
|
1291
|
+
const storyPhase = await this.executeSequentialStoryPhase(config, detection, epicPhase, startTime);
|
|
1292
|
+
// If story phase failed completely, skip dev phase
|
|
1293
|
+
if (storyPhase && this.shouldAbortAfterStoryFailure(storyPhase)) {
|
|
1294
|
+
this.logger.warn('Story phase failed completely, skipping dev phase');
|
|
1295
|
+
return { devPhase: this.createSkippedPhaseResult('dev'), storyPhase };
|
|
1296
|
+
}
|
|
1297
|
+
const devPhase = await this.executeSequentialDevPhase(config, detection, phaseFlags.shouldExecuteDevPhase);
|
|
1298
|
+
return { devPhase, storyPhase };
|
|
1299
|
+
}
|
|
1300
|
+
/**
|
|
1301
|
+
* Execute story and dev phases (pipelined or sequential)
|
|
1302
|
+
*
|
|
1303
|
+
* @param config - Workflow configuration
|
|
1304
|
+
* @param detection - Input detection result
|
|
1305
|
+
* @param epicPhase - Epic phase result
|
|
1306
|
+
* @param phaseFlags - Phase execution flags
|
|
1307
|
+
* @param startTime - Workflow start time
|
|
1308
|
+
* @returns Story and dev phase results
|
|
1309
|
+
* @private
|
|
1310
|
+
*/
|
|
1311
|
+
async executeStoryAndDevPhases(config, detection, epicPhase, phaseFlags, startTime) {
|
|
1312
|
+
const shouldPipeline = config.pipeline && phaseFlags.shouldExecuteStoryPhase && phaseFlags.shouldExecuteDevPhase && !config.dryRun;
|
|
1313
|
+
if (shouldPipeline) {
|
|
1314
|
+
return this.executePipelinedWorkflow(config, detection);
|
|
1315
|
+
}
|
|
1316
|
+
return this.executeSequentialWorkflow(config, detection, epicPhase, phaseFlags, startTime);
|
|
1317
|
+
}
|
|
1318
|
+
/**
|
|
1319
|
+
* Execute story creation phase
|
|
1320
|
+
*
|
|
1321
|
+
* Parses epic files, extracts stories, and creates story markdown files
|
|
1322
|
+
* using ClaudeAgentRunner and BatchProcessor.
|
|
1323
|
+
*
|
|
1324
|
+
* @param config - Workflow configuration
|
|
1325
|
+
* @param epics - List of epics to extract stories from
|
|
1326
|
+
* @param onStoryComplete - Optional callback invoked after each story is successfully created
|
|
1327
|
+
* @returns PhaseResult with success count, failures, and duration
|
|
1328
|
+
* @private
|
|
1329
|
+
*/
|
|
1330
|
+
async executeStoryPhase(config, epics, onStoryComplete) {
|
|
1331
|
+
const startTime = Date.now();
|
|
1332
|
+
const failures = [];
|
|
1333
|
+
let successCount = 0;
|
|
1334
|
+
// In pipeline mode, stories must be created sequentially (one at a time)
|
|
1335
|
+
// to ensure they are queued in the correct order for development
|
|
1336
|
+
const isPipelineMode = Boolean(onStoryComplete);
|
|
1337
|
+
const effectiveParallel = isPipelineMode ? 1 : config.parallel;
|
|
1338
|
+
this.logger.info({
|
|
1339
|
+
epicCount: epics.length,
|
|
1340
|
+
interval: config.epicInterval,
|
|
1341
|
+
mode: isPipelineMode ? 'sequential (pipeline)' : 'parallel',
|
|
1342
|
+
parallel: effectiveParallel,
|
|
1343
|
+
}, 'Starting story creation phase');
|
|
1344
|
+
try {
|
|
1345
|
+
const allStories = [];
|
|
1346
|
+
// Get epic directory
|
|
1347
|
+
const epicDir = await this.pathResolver.getEpicDir();
|
|
1348
|
+
// Generate prefix from PRD filename if not provided (same as epic creation)
|
|
1349
|
+
let { prefix } = config;
|
|
1350
|
+
if (!prefix) {
|
|
1351
|
+
prefix = this.generatePrefixFromPrdPath(config.input);
|
|
1352
|
+
this.logger.info({ generatedPrefix: prefix, prdPath: config.input }, 'No prefix provided, generated from PRD filename');
|
|
1353
|
+
}
|
|
1354
|
+
// Parse stories from each epic in parallel for better performance
|
|
1355
|
+
const epicStories = await Promise.all(epics.map(async (epic) => {
|
|
1356
|
+
const epicFileName = this.generateEpicFileName(prefix, epic.number);
|
|
1357
|
+
const epicFilePath = `${epicDir}/${epicFileName}`;
|
|
1358
|
+
const epicExists = await this.fileManager.fileExists(epicFilePath);
|
|
1359
|
+
if (!epicExists) {
|
|
1360
|
+
this.logger.warn({ epicNumber: epic.number }, 'Epic file not found, skipping');
|
|
1361
|
+
return [];
|
|
1362
|
+
}
|
|
1363
|
+
const epicContent = await this.fileManager.readFile(epicFilePath);
|
|
1364
|
+
const stories = this.epicParser.parseStories(epicContent, epicFilePath);
|
|
1365
|
+
return stories;
|
|
1366
|
+
}));
|
|
1367
|
+
// Flatten the array of story arrays
|
|
1368
|
+
allStories.push(...epicStories.flat());
|
|
1369
|
+
this.logger.info({ storyCount: allStories.length }, 'Stories extracted from epics');
|
|
1370
|
+
if (allStories.length === 0) {
|
|
1371
|
+
this.logger.warn('No stories found in epic files');
|
|
1372
|
+
return {
|
|
1373
|
+
duration: Date.now() - startTime,
|
|
1374
|
+
failures: [],
|
|
1375
|
+
phaseName: 'story',
|
|
1376
|
+
skipped: false,
|
|
1377
|
+
success: 0,
|
|
1378
|
+
};
|
|
1379
|
+
}
|
|
1380
|
+
// Get story directory
|
|
1381
|
+
const storyDir = await this.pathResolver.getStoryDir();
|
|
1382
|
+
// Check existing story files across all story directories
|
|
1383
|
+
const storyFileNames = allStories.map((s) => this.generateStoryFileName(prefix, s.fullNumber));
|
|
1384
|
+
const existingStories = await this.checkExistingStoryFiles(storyFileNames);
|
|
1385
|
+
// Filter out existing stories
|
|
1386
|
+
const storiesToCreate = allStories.filter((story) => !existingStories.includes(this.generateStoryFileName(prefix, story.fullNumber)));
|
|
1387
|
+
if (storiesToCreate.length === 0) {
|
|
1388
|
+
this.logger.info('All stories already exist, skipping creation');
|
|
1389
|
+
// In pipeline mode, enqueue existing stories for development
|
|
1390
|
+
if (onStoryComplete) {
|
|
1391
|
+
await this.enqueueExistingStoriesSequentially(allStories, storyDir, prefix, onStoryComplete);
|
|
1392
|
+
}
|
|
1393
|
+
return {
|
|
1394
|
+
duration: Date.now() - startTime,
|
|
1395
|
+
failures: [],
|
|
1396
|
+
phaseName: 'story',
|
|
1397
|
+
skipped: false,
|
|
1398
|
+
success: allStories.length,
|
|
1399
|
+
};
|
|
1400
|
+
}
|
|
1401
|
+
this.logger.info({
|
|
1402
|
+
mode: isPipelineMode ? 'sequential (pipeline)' : 'parallel',
|
|
1403
|
+
parallel: effectiveParallel,
|
|
1404
|
+
storiesSkipped: existingStories.length,
|
|
1405
|
+
storiesToCreate: storiesToCreate.length,
|
|
1406
|
+
}, 'Creating story files');
|
|
1407
|
+
// Create appropriate batch processor based on mode
|
|
1408
|
+
// Pipeline mode requires sequential processing to maintain story order in queue
|
|
1409
|
+
const storyBatchProcessor = isPipelineMode
|
|
1410
|
+
? new BatchProcessor(effectiveParallel, 0, this.logger)
|
|
1411
|
+
: this.batchProcessor;
|
|
1412
|
+
// Create story files using BatchProcessor and ClaudeAgentRunner
|
|
1413
|
+
const results = await storyBatchProcessor.processBatch(storiesToCreate, async (story) => {
|
|
1414
|
+
// Generate story file path with prefix
|
|
1415
|
+
const storyFileName = this.generateStoryFileName(prefix, story.fullNumber);
|
|
1416
|
+
const storyFilePath = `${storyDir}/${storyFileName}`;
|
|
1417
|
+
// Check if file already exists (might have been created by parallel process)
|
|
1418
|
+
const fileExists = await this.fileManager.fileExists(storyFilePath);
|
|
1419
|
+
if (fileExists) {
|
|
1420
|
+
this.logger.info({
|
|
1421
|
+
filePath: storyFilePath,
|
|
1422
|
+
storyNumber: story.fullNumber,
|
|
1423
|
+
storyTitle: story.title,
|
|
1424
|
+
}, 'Story file already exists, skipping creation');
|
|
1425
|
+
return storyFilePath;
|
|
1426
|
+
}
|
|
1427
|
+
// Step 1: Create scaffolded file with structured sections and populated metadata
|
|
1428
|
+
const scaffoldedContent = this.fileScaffolder.scaffoldStory({
|
|
1429
|
+
epicNumber: story.epicNumber,
|
|
1430
|
+
storyNumber: story.number,
|
|
1431
|
+
storyTitle: story.title,
|
|
1432
|
+
});
|
|
1433
|
+
await this.fileManager.writeFile(storyFilePath, scaffoldedContent);
|
|
1434
|
+
this.logger.info({
|
|
1435
|
+
filePath: storyFilePath,
|
|
1436
|
+
storyNumber: story.fullNumber,
|
|
1437
|
+
storyTitle: story.title,
|
|
1438
|
+
}, 'Story scaffolded file created');
|
|
1439
|
+
// Step 2: Generate epic file path for this story
|
|
1440
|
+
const epicFileName = this.generateEpicFileName(prefix, story.epicNumber);
|
|
1441
|
+
const epicFilePath = `${epicDir}/${epicFileName}`;
|
|
1442
|
+
// Step 3: Build Claude prompt to populate the scaffolded file
|
|
1443
|
+
const prompt = this.buildStoryPrompt(story, {
|
|
1444
|
+
cwd: config.cwd,
|
|
1445
|
+
epicPath: epicFilePath,
|
|
1446
|
+
outputPath: storyFilePath,
|
|
1447
|
+
prefix,
|
|
1448
|
+
references: config.references,
|
|
1449
|
+
});
|
|
1450
|
+
// Log prompt if verbose
|
|
1451
|
+
if (config.verbose) {
|
|
1452
|
+
this.logger.info({
|
|
1453
|
+
outputPath: storyFilePath,
|
|
1454
|
+
prompt,
|
|
1455
|
+
storyNumber: story.fullNumber,
|
|
1456
|
+
storyTitle: story.title,
|
|
1457
|
+
}, 'Claude Prompt (Story)');
|
|
1458
|
+
}
|
|
1459
|
+
// Step 4: Run Claude agent to populate content sections
|
|
1460
|
+
const result = await this.agentRunner.runAgent(prompt, {
|
|
1461
|
+
agentType: 'sm',
|
|
1462
|
+
references: config.references,
|
|
1463
|
+
timeout: 1_800_000, // 30 minutes
|
|
1464
|
+
});
|
|
1465
|
+
// Log output if verbose
|
|
1466
|
+
if (config.verbose) {
|
|
1467
|
+
this.logger.info({
|
|
1468
|
+
duration: result.duration,
|
|
1469
|
+
errors: result.errors,
|
|
1470
|
+
output: result.output,
|
|
1471
|
+
outputLength: result.output.length,
|
|
1472
|
+
storyNumber: story.fullNumber,
|
|
1473
|
+
success: result.success,
|
|
1474
|
+
}, 'Claude Response (Story)');
|
|
1475
|
+
}
|
|
1476
|
+
if (!result.success) {
|
|
1477
|
+
throw new Error(result.errors);
|
|
1478
|
+
}
|
|
1479
|
+
// Step 5: Verify file was updated by Claude
|
|
1480
|
+
const updatedContent = await this.fileManager.readFile(storyFilePath);
|
|
1481
|
+
if (updatedContent === scaffoldedContent) {
|
|
1482
|
+
throw new Error(`Claude did not update the story file at ${storyFilePath}`);
|
|
1483
|
+
}
|
|
1484
|
+
// Invoke callback after successful story creation
|
|
1485
|
+
if (onStoryComplete) {
|
|
1486
|
+
try {
|
|
1487
|
+
const metadata = {
|
|
1488
|
+
epicNumber: story.epicNumber,
|
|
1489
|
+
filePath: storyFilePath,
|
|
1490
|
+
id: story.fullNumber,
|
|
1491
|
+
number: story.fullNumber,
|
|
1492
|
+
status: 'Draft',
|
|
1493
|
+
storyNumber: story.number,
|
|
1494
|
+
title: story.title,
|
|
1495
|
+
type: 'epic-based',
|
|
1496
|
+
};
|
|
1497
|
+
this.logger.debug({
|
|
1498
|
+
metadata,
|
|
1499
|
+
storyNumber: story.fullNumber,
|
|
1500
|
+
}, 'Invoking story completion callback');
|
|
1501
|
+
await onStoryComplete(metadata);
|
|
1502
|
+
}
|
|
1503
|
+
catch (error) {
|
|
1504
|
+
this.logger.error({
|
|
1505
|
+
error: error.message,
|
|
1506
|
+
storyNumber: story.fullNumber,
|
|
1507
|
+
}, 'Story completion callback failed, continuing story creation');
|
|
1508
|
+
}
|
|
1509
|
+
}
|
|
1510
|
+
return storyFilePath;
|
|
1511
|
+
}, (info) => {
|
|
1512
|
+
this.logger.info({
|
|
1513
|
+
completedItems: info.completedItems,
|
|
1514
|
+
currentBatch: info.currentBatch,
|
|
1515
|
+
totalBatches: info.totalBatches,
|
|
1516
|
+
totalItems: info.totalItems,
|
|
1517
|
+
}, 'Story creation progress');
|
|
1518
|
+
});
|
|
1519
|
+
// Track successes and failures
|
|
1520
|
+
for (const [i, result] of results.entries()) {
|
|
1521
|
+
if (result.success) {
|
|
1522
|
+
successCount++;
|
|
1523
|
+
}
|
|
1524
|
+
else {
|
|
1525
|
+
failures.push({
|
|
1526
|
+
error: result.error?.message ?? 'Unknown error',
|
|
1527
|
+
identifier: storiesToCreate[i].fullNumber,
|
|
1528
|
+
});
|
|
1529
|
+
}
|
|
1530
|
+
}
|
|
1531
|
+
// Add existing stories to success count
|
|
1532
|
+
successCount += existingStories.length;
|
|
1533
|
+
const duration = Date.now() - startTime;
|
|
1534
|
+
this.logger.info({
|
|
1535
|
+
duration,
|
|
1536
|
+
failures: failures.length,
|
|
1537
|
+
success: successCount,
|
|
1538
|
+
}, 'Story creation phase completed');
|
|
1539
|
+
return {
|
|
1540
|
+
duration,
|
|
1541
|
+
failures,
|
|
1542
|
+
phaseName: 'story',
|
|
1543
|
+
skipped: false,
|
|
1544
|
+
success: successCount,
|
|
1545
|
+
};
|
|
1546
|
+
}
|
|
1547
|
+
catch (error) {
|
|
1548
|
+
this.logger.error({ error: error.message }, 'Story phase failed');
|
|
1549
|
+
return {
|
|
1550
|
+
duration: Date.now() - startTime,
|
|
1551
|
+
failures: [
|
|
1552
|
+
...failures,
|
|
1553
|
+
{
|
|
1554
|
+
error: error.message,
|
|
1555
|
+
identifier: 'story-phase',
|
|
1556
|
+
},
|
|
1557
|
+
],
|
|
1558
|
+
phaseName: 'story',
|
|
1559
|
+
skipped: false,
|
|
1560
|
+
success: successCount,
|
|
1561
|
+
};
|
|
1562
|
+
}
|
|
1563
|
+
}
|
|
1564
|
+
/**
|
|
1565
|
+
* Execute story phase from a single epic file
|
|
1566
|
+
*
|
|
1567
|
+
* Parses stories from a specific epic file and creates story files.
|
|
1568
|
+
*
|
|
1569
|
+
* @param config - Workflow configuration
|
|
1570
|
+
* @param epicFilePath - Path to the epic file
|
|
1571
|
+
* @param onStoryComplete - Optional callback invoked after each story is successfully created
|
|
1572
|
+
* @returns PhaseResult with success count, failures, and duration
|
|
1573
|
+
* @private
|
|
1574
|
+
*/
|
|
1575
|
+
async executeStoryPhaseFromEpicFile(config, epicFilePath, onStoryComplete) {
|
|
1576
|
+
const startTime = Date.now();
|
|
1577
|
+
const failures = [];
|
|
1578
|
+
let successCount = 0;
|
|
1579
|
+
// In pipeline mode, stories must be created sequentially (one at a time)
|
|
1580
|
+
// to ensure they are queued in the correct order for development
|
|
1581
|
+
const isPipelineMode = Boolean(onStoryComplete);
|
|
1582
|
+
const effectiveParallel = isPipelineMode ? 1 : config.parallel;
|
|
1583
|
+
this.logger.info({
|
|
1584
|
+
epicFilePath,
|
|
1585
|
+
mode: isPipelineMode ? 'sequential (pipeline)' : 'parallel',
|
|
1586
|
+
parallel: effectiveParallel,
|
|
1587
|
+
}, 'Starting story creation from epic file');
|
|
1588
|
+
try {
|
|
1589
|
+
// Read and parse stories from epic file
|
|
1590
|
+
const epicContent = await this.fileManager.readFile(epicFilePath);
|
|
1591
|
+
const allStories = this.epicParser.parseStories(epicContent, epicFilePath);
|
|
1592
|
+
// Generate prefix from epic filename if not provided
|
|
1593
|
+
const prefix = config.prefix || this.generatePrefixFromEpicPath(epicFilePath);
|
|
1594
|
+
if (!config.prefix) {
|
|
1595
|
+
this.logger.info({ epicPath: epicFilePath, generatedPrefix: prefix }, 'No prefix provided, generated from epic filename');
|
|
1596
|
+
}
|
|
1597
|
+
this.logger.info({ storyCount: allStories.length }, 'Stories extracted from epic');
|
|
1598
|
+
if (allStories.length === 0) {
|
|
1599
|
+
this.logger.warn('No stories found in epic file');
|
|
1600
|
+
return {
|
|
1601
|
+
duration: Date.now() - startTime,
|
|
1602
|
+
failures: [],
|
|
1603
|
+
phaseName: 'story',
|
|
1604
|
+
skipped: false,
|
|
1605
|
+
success: 0,
|
|
1606
|
+
};
|
|
1607
|
+
}
|
|
1608
|
+
// Get story directory
|
|
1609
|
+
const storyDir = await this.pathResolver.getStoryDir();
|
|
1610
|
+
// Check existing story files across all story directories
|
|
1611
|
+
const storyFileNames = allStories.map((s) => this.generateStoryFileName(prefix, s.fullNumber));
|
|
1612
|
+
const existingStories = await this.checkExistingStoryFiles(storyFileNames);
|
|
1613
|
+
// Filter out existing stories
|
|
1614
|
+
const storiesToCreate = allStories.filter((story) => !existingStories.includes(this.generateStoryFileName(prefix, story.fullNumber)));
|
|
1615
|
+
if (storiesToCreate.length === 0) {
|
|
1616
|
+
this.logger.info('All stories already exist, skipping creation');
|
|
1617
|
+
// In pipeline mode, enqueue existing stories for development
|
|
1618
|
+
if (onStoryComplete) {
|
|
1619
|
+
await this.enqueueExistingStoriesSequentially(allStories, storyDir, prefix, onStoryComplete);
|
|
1620
|
+
}
|
|
1621
|
+
return {
|
|
1622
|
+
duration: Date.now() - startTime,
|
|
1623
|
+
failures: [],
|
|
1624
|
+
phaseName: 'story',
|
|
1625
|
+
skipped: false,
|
|
1626
|
+
success: allStories.length,
|
|
1627
|
+
};
|
|
1628
|
+
}
|
|
1629
|
+
this.logger.info({
|
|
1630
|
+
mode: isPipelineMode ? 'sequential (pipeline)' : 'parallel',
|
|
1631
|
+
parallel: effectiveParallel,
|
|
1632
|
+
storiesSkipped: existingStories.length,
|
|
1633
|
+
storiesToCreate: storiesToCreate.length,
|
|
1634
|
+
}, 'Creating story files');
|
|
1635
|
+
// Create appropriate batch processor based on mode
|
|
1636
|
+
// Pipeline mode requires sequential processing to maintain story order in queue
|
|
1637
|
+
const storyBatchProcessor = isPipelineMode
|
|
1638
|
+
? new BatchProcessor(effectiveParallel, 0, this.logger)
|
|
1639
|
+
: this.batchProcessor;
|
|
1640
|
+
// Create story files using BatchProcessor and ClaudeAgentRunner
|
|
1641
|
+
const results = await storyBatchProcessor.processBatch(storiesToCreate, async (story) => {
|
|
1642
|
+
// Generate story file path with prefix
|
|
1643
|
+
const storyFileName = this.generateStoryFileName(prefix, story.fullNumber);
|
|
1644
|
+
const storyFilePath = `${storyDir}/${storyFileName}`;
|
|
1645
|
+
// Check if file already exists (might have been created by parallel process)
|
|
1646
|
+
const fileExists = await this.fileManager.fileExists(storyFilePath);
|
|
1647
|
+
if (fileExists) {
|
|
1648
|
+
this.logger.info({
|
|
1649
|
+
filePath: storyFilePath,
|
|
1650
|
+
storyNumber: story.fullNumber,
|
|
1651
|
+
storyTitle: story.title,
|
|
1652
|
+
}, 'Story file already exists, skipping creation');
|
|
1653
|
+
return storyFilePath;
|
|
1654
|
+
}
|
|
1655
|
+
// Step 1: Create scaffolded file with structured sections and populated metadata
|
|
1656
|
+
const scaffoldedContent = this.fileScaffolder.scaffoldStory({
|
|
1657
|
+
epicNumber: story.epicNumber,
|
|
1658
|
+
storyNumber: story.number,
|
|
1659
|
+
storyTitle: story.title,
|
|
1660
|
+
});
|
|
1661
|
+
await this.fileManager.writeFile(storyFilePath, scaffoldedContent);
|
|
1662
|
+
this.logger.info({
|
|
1663
|
+
filePath: storyFilePath,
|
|
1664
|
+
storyNumber: story.fullNumber,
|
|
1665
|
+
storyTitle: story.title,
|
|
1666
|
+
}, 'Story scaffolded file created');
|
|
1667
|
+
// Step 2: Use the epic file path that was passed to this method
|
|
1668
|
+
// Step 3: Build Claude prompt to populate the scaffolded file
|
|
1669
|
+
const prompt = this.buildStoryPrompt(story, {
|
|
1670
|
+
cwd: config.cwd,
|
|
1671
|
+
epicPath: epicFilePath,
|
|
1672
|
+
outputPath: storyFilePath,
|
|
1673
|
+
prefix,
|
|
1674
|
+
references: config.references,
|
|
1675
|
+
});
|
|
1676
|
+
// Log prompt if verbose
|
|
1677
|
+
if (config.verbose) {
|
|
1678
|
+
this.logger.info({
|
|
1679
|
+
epicPath: epicFilePath,
|
|
1680
|
+
outputPath: storyFilePath,
|
|
1681
|
+
prompt,
|
|
1682
|
+
storyNumber: story.fullNumber,
|
|
1683
|
+
storyTitle: story.title,
|
|
1684
|
+
}, 'Claude Prompt (Story)');
|
|
1685
|
+
}
|
|
1686
|
+
// Step 4: Run Claude agent to populate content sections
|
|
1687
|
+
const result = await this.agentRunner.runAgent(prompt, {
|
|
1688
|
+
agentType: 'sm',
|
|
1689
|
+
references: config.references,
|
|
1690
|
+
timeout: 1_800_000, // 30 minutes
|
|
1691
|
+
});
|
|
1692
|
+
// Log output if verbose
|
|
1693
|
+
if (config.verbose) {
|
|
1694
|
+
this.logger.info({
|
|
1695
|
+
duration: result.duration,
|
|
1696
|
+
errors: result.errors,
|
|
1697
|
+
output: result.output,
|
|
1698
|
+
outputLength: result.output.length,
|
|
1699
|
+
storyNumber: story.fullNumber,
|
|
1700
|
+
success: result.success,
|
|
1701
|
+
}, 'Claude Response (Story)');
|
|
1702
|
+
}
|
|
1703
|
+
if (!result.success) {
|
|
1704
|
+
throw new Error(result.errors);
|
|
1705
|
+
}
|
|
1706
|
+
// Step 5: Verify file was updated by Claude
|
|
1707
|
+
const updatedContent = await this.fileManager.readFile(storyFilePath);
|
|
1708
|
+
if (updatedContent === scaffoldedContent) {
|
|
1709
|
+
throw new Error(`Claude did not update the story file at ${storyFilePath}`);
|
|
1710
|
+
}
|
|
1711
|
+
// Invoke callback after successful story creation
|
|
1712
|
+
if (onStoryComplete) {
|
|
1713
|
+
try {
|
|
1714
|
+
const metadata = {
|
|
1715
|
+
epicNumber: story.epicNumber,
|
|
1716
|
+
filePath: storyFilePath,
|
|
1717
|
+
id: story.fullNumber,
|
|
1718
|
+
number: story.fullNumber,
|
|
1719
|
+
status: 'Draft',
|
|
1720
|
+
storyNumber: story.number,
|
|
1721
|
+
title: story.title,
|
|
1722
|
+
type: 'epic-based',
|
|
1723
|
+
};
|
|
1724
|
+
this.logger.debug({
|
|
1725
|
+
metadata,
|
|
1726
|
+
storyNumber: story.fullNumber,
|
|
1727
|
+
}, 'Invoking story completion callback');
|
|
1728
|
+
await onStoryComplete(metadata);
|
|
1729
|
+
}
|
|
1730
|
+
catch (error) {
|
|
1731
|
+
this.logger.error({
|
|
1732
|
+
error: error.message,
|
|
1733
|
+
storyNumber: story.fullNumber,
|
|
1734
|
+
}, 'Story completion callback failed, continuing story creation');
|
|
1735
|
+
}
|
|
1736
|
+
}
|
|
1737
|
+
return storyFilePath;
|
|
1738
|
+
}, (info) => {
|
|
1739
|
+
this.logger.info({
|
|
1740
|
+
completedItems: info.completedItems,
|
|
1741
|
+
currentBatch: info.currentBatch,
|
|
1742
|
+
totalBatches: info.totalBatches,
|
|
1743
|
+
totalItems: info.totalItems,
|
|
1744
|
+
}, 'Story creation progress');
|
|
1745
|
+
});
|
|
1746
|
+
// Track successes and failures
|
|
1747
|
+
for (const [i, result] of results.entries()) {
|
|
1748
|
+
if (result.success) {
|
|
1749
|
+
successCount++;
|
|
1750
|
+
}
|
|
1751
|
+
else {
|
|
1752
|
+
failures.push({
|
|
1753
|
+
error: result.error?.message ?? 'Unknown error',
|
|
1754
|
+
identifier: storiesToCreate[i].fullNumber,
|
|
1755
|
+
});
|
|
1756
|
+
}
|
|
1757
|
+
}
|
|
1758
|
+
// Add existing stories to success count
|
|
1759
|
+
successCount += existingStories.length;
|
|
1760
|
+
const duration = Date.now() - startTime;
|
|
1761
|
+
this.logger.info({
|
|
1762
|
+
duration,
|
|
1763
|
+
failures: failures.length,
|
|
1764
|
+
success: successCount,
|
|
1765
|
+
}, 'Story creation from epic file completed');
|
|
1766
|
+
return {
|
|
1767
|
+
duration,
|
|
1768
|
+
failures,
|
|
1769
|
+
phaseName: 'story',
|
|
1770
|
+
skipped: false,
|
|
1771
|
+
success: successCount,
|
|
1772
|
+
};
|
|
1773
|
+
}
|
|
1774
|
+
catch (error) {
|
|
1775
|
+
this.logger.error({ error: error.message }, 'Story phase from epic file failed');
|
|
1776
|
+
return {
|
|
1777
|
+
duration: Date.now() - startTime,
|
|
1778
|
+
failures: [
|
|
1779
|
+
...failures,
|
|
1780
|
+
{
|
|
1781
|
+
error: error.message,
|
|
1782
|
+
identifier: 'story-phase',
|
|
1783
|
+
},
|
|
1784
|
+
],
|
|
1785
|
+
phaseName: 'story',
|
|
1786
|
+
skipped: false,
|
|
1787
|
+
success: successCount,
|
|
1788
|
+
};
|
|
1789
|
+
}
|
|
1790
|
+
}
|
|
1791
|
+
/**
|
|
1792
|
+
* Execute story phase with queue integration for pipelined workflow
|
|
1793
|
+
*
|
|
1794
|
+
* Wraps story phase execution and enqueues completed stories to the provided queue.
|
|
1795
|
+
* Closes queue after story phase completes to signal dev workers to terminate.
|
|
1796
|
+
*
|
|
1797
|
+
* @param config - Workflow configuration
|
|
1798
|
+
* @param detection - Input detection result
|
|
1799
|
+
* @param queue - StoryQueue instance to enqueue completed stories
|
|
1800
|
+
* @returns PhaseResult from story phase
|
|
1801
|
+
* @private
|
|
1802
|
+
*/
|
|
1803
|
+
/**
|
|
1804
|
+
* Execute story creation phase with queue integration for pipeline mode.
|
|
1805
|
+
*
|
|
1806
|
+
* Producer phase in pipelined architecture:
|
|
1807
|
+
* 1. Parse epics from input (PRD or epic file)
|
|
1808
|
+
* 2. Create stories using BatchProcessor with parallel execution
|
|
1809
|
+
* 3. For each completed story, invoke onStoryComplete callback
|
|
1810
|
+
* 4. onStoryComplete enqueues story to StoryQueue for dev workers
|
|
1811
|
+
* 5. After all stories created, close queue to signal completion
|
|
1812
|
+
* 6. Dev workers (running concurrently) consume from queue
|
|
1813
|
+
*
|
|
1814
|
+
* Queue closure semantics:
|
|
1815
|
+
* - Queue.close() signals "no more stories will be added"
|
|
1816
|
+
* - Dev workers terminate when queue returns null (closed + empty)
|
|
1817
|
+
* - Ensures graceful pipeline completion
|
|
1818
|
+
*
|
|
1819
|
+
* Error handling:
|
|
1820
|
+
* - Individual story creation failures logged but don't stop phase
|
|
1821
|
+
* - Queue enqueueing failures logged but don't stop creation
|
|
1822
|
+
* - Queue always closed in finally block to prevent worker deadlock
|
|
1823
|
+
*
|
|
1824
|
+
* @param config - Workflow configuration
|
|
1825
|
+
* @param detection - Input detection result (PRD or epic file)
|
|
1826
|
+
* @param queue - StoryQueue for enqueuing completed stories
|
|
1827
|
+
* @returns PhaseResult with story creation metrics
|
|
1828
|
+
* @private
|
|
1829
|
+
*/
|
|
1830
|
+
async executeStoryPhaseWithQueue(config, detection, queue) {
|
|
1831
|
+
try {
|
|
1832
|
+
// Create callback to enqueue completed stories
|
|
1833
|
+
const onStoryComplete = async (metadata) => {
|
|
1834
|
+
try {
|
|
1835
|
+
// Only epic-based stories can be enqueued (standalone stories don't have epic numbering)
|
|
1836
|
+
if (!isEpicStory(metadata)) {
|
|
1837
|
+
this.logger.warn({ storyId: metadata.id }, 'Skipping standalone story - not supported in queue mode');
|
|
1838
|
+
return;
|
|
1839
|
+
}
|
|
1840
|
+
// Convert StoryMetadata to Story object for queue
|
|
1841
|
+
const story = {
|
|
1842
|
+
epicNumber: metadata.epicNumber,
|
|
1843
|
+
filePath: metadata.filePath,
|
|
1844
|
+
fullNumber: metadata.number,
|
|
1845
|
+
number: metadata.storyNumber,
|
|
1846
|
+
title: metadata.title,
|
|
1847
|
+
};
|
|
1848
|
+
// Log story created and queued in pipeline mode
|
|
1849
|
+
if (this.workflowLogger) {
|
|
1850
|
+
await this.workflowLogger.logStoryCreated(story.fullNumber);
|
|
1851
|
+
await this.workflowLogger.logStoryQueued(story.fullNumber, queue.getPendingCount());
|
|
1852
|
+
// Log verbose transitions if enabled
|
|
1853
|
+
if (config.verbose) {
|
|
1854
|
+
this.workflowLogger.logVerboseTransition(story.fullNumber, story.title, 'CREATING');
|
|
1855
|
+
this.workflowLogger.logVerboseTransition(story.fullNumber, story.title, 'QUEUED', {
|
|
1856
|
+
position: queue.getPendingCount(),
|
|
1857
|
+
});
|
|
1858
|
+
}
|
|
1859
|
+
}
|
|
1860
|
+
this.logger.debug({
|
|
1861
|
+
storyNumber: story.fullNumber,
|
|
1862
|
+
storyTitle: story.title,
|
|
1863
|
+
}, 'Enqueuing completed story');
|
|
1864
|
+
queue.enqueue(story);
|
|
1865
|
+
}
|
|
1866
|
+
catch (error) {
|
|
1867
|
+
this.logger.error({
|
|
1868
|
+
error: error.message,
|
|
1869
|
+
storyId: metadata.id,
|
|
1870
|
+
}, 'Failed to enqueue story, continuing story creation');
|
|
1871
|
+
}
|
|
1872
|
+
};
|
|
1873
|
+
// Execute story phase with callback
|
|
1874
|
+
let storyPhase;
|
|
1875
|
+
if (detection.type === 'epic') {
|
|
1876
|
+
// Single epic file - parse stories directly
|
|
1877
|
+
storyPhase = await this.executeStoryPhaseFromEpicFile(config, detection.filePath, onStoryComplete);
|
|
1878
|
+
}
|
|
1879
|
+
else {
|
|
1880
|
+
// Get epics for story phase
|
|
1881
|
+
let epics = [];
|
|
1882
|
+
if (detection.type === 'prd') {
|
|
1883
|
+
// Parse epics from PRD (with auto-fix support)
|
|
1884
|
+
let prdContent = await this.fileManager.readFile(detection.filePath);
|
|
1885
|
+
try {
|
|
1886
|
+
epics = this.prdParser.parseEpics(prdContent, detection.filePath);
|
|
1887
|
+
}
|
|
1888
|
+
catch (parseError) {
|
|
1889
|
+
// If parsing fails and auto-fix is enabled, attempt to fix the PRD
|
|
1890
|
+
if (config.autoFix && parseError instanceof ParserError) {
|
|
1891
|
+
this.logger.warn({ error: parseError.message, prdPath: detection.filePath }, 'PRD parsing failed in pipelined story phase, attempting auto-fix');
|
|
1892
|
+
const fixResult = await this.prdFixer.fixPrd(detection.filePath, prdContent, config.references);
|
|
1893
|
+
if (fixResult.fixed) {
|
|
1894
|
+
this.logger.info({ prdPath: detection.filePath }, 'PRD auto-fixed successfully, retrying parse');
|
|
1895
|
+
prdContent = fixResult.content;
|
|
1896
|
+
epics = this.prdParser.parseEpics(prdContent, detection.filePath);
|
|
1897
|
+
}
|
|
1898
|
+
else {
|
|
1899
|
+
throw parseError;
|
|
1900
|
+
}
|
|
1901
|
+
}
|
|
1902
|
+
else {
|
|
1903
|
+
throw parseError;
|
|
1904
|
+
}
|
|
1905
|
+
}
|
|
1906
|
+
}
|
|
1907
|
+
storyPhase = await this.executeStoryPhase(config, epics, onStoryComplete);
|
|
1908
|
+
}
|
|
1909
|
+
// Close queue to signal dev phase that no more stories will be added
|
|
1910
|
+
this.logger.info('Story phase completed, closing queue');
|
|
1911
|
+
queue.close();
|
|
1912
|
+
return storyPhase;
|
|
1913
|
+
}
|
|
1914
|
+
catch (error) {
|
|
1915
|
+
// Ensure queue is closed even if story phase fails
|
|
1916
|
+
this.logger.error({ error: error.message }, 'Story phase failed, closing queue');
|
|
1917
|
+
queue.close();
|
|
1918
|
+
throw error;
|
|
1919
|
+
}
|
|
1920
|
+
}
|
|
1921
|
+
/**
|
|
1922
|
+
* Extract epic number from file path
|
|
1923
|
+
*
|
|
1924
|
+
* @param filePath - Epic file path (e.g., 'docs/epics/epic-1.md')
|
|
1925
|
+
* @returns Epic number
|
|
1926
|
+
* @private
|
|
1927
|
+
*/
|
|
1928
|
+
extractEpicNumber(filePath) {
|
|
1929
|
+
const match = filePath.match(/epic-(\d+)\.md/);
|
|
1930
|
+
return match ? Number.parseInt(match[1], 10) : 1;
|
|
1931
|
+
}
|
|
1932
|
+
/**
|
|
1933
|
+
* Extract story from file path
|
|
1934
|
+
*
|
|
1935
|
+
* @param filePath - Story file path (e.g., 'docs/stories/USER-GROUPS-story-1.001.md')
|
|
1936
|
+
* @returns Story object
|
|
1937
|
+
* @private
|
|
1938
|
+
*/
|
|
1939
|
+
extractStoryFromFilePath(filePath) {
|
|
1940
|
+
// Match story pattern: <PREFIX>-story-<epicNum>.<storyNum>.md
|
|
1941
|
+
// Example: USER-GROUPS-story-1.001.md or STORY-1.2.md (legacy format)
|
|
1942
|
+
const match = filePath.match(/-story-(\d+)\.(\d+)\.md/i);
|
|
1943
|
+
if (!match) {
|
|
1944
|
+
this.logger.warn({ filePath }, 'Failed to parse story number from file path');
|
|
1945
|
+
return { epicNumber: 1, filePath, fullNumber: '1.1', number: 1, title: '' };
|
|
1946
|
+
}
|
|
1947
|
+
const epicNumber = Number.parseInt(match[1], 10);
|
|
1948
|
+
const storyNumber = Number.parseInt(match[2], 10);
|
|
1949
|
+
return {
|
|
1950
|
+
epicNumber,
|
|
1951
|
+
filePath,
|
|
1952
|
+
fullNumber: `${epicNumber}.${storyNumber}`,
|
|
1953
|
+
number: storyNumber,
|
|
1954
|
+
title: '',
|
|
1955
|
+
};
|
|
1956
|
+
}
|
|
1957
|
+
/**
|
|
1958
|
+
* Generate epic file name with prefix and padded number
|
|
1959
|
+
*
|
|
1960
|
+
* @param prefix - File name prefix
|
|
1961
|
+
* @param epicNumber - Epic number
|
|
1962
|
+
* @returns Formatted file name (e.g., "USER-GROUPS-epic-001.md")
|
|
1963
|
+
* @private
|
|
1964
|
+
*/
|
|
1965
|
+
generateEpicFileName(prefix, epicNumber) {
|
|
1966
|
+
const paddedNumber = String(epicNumber).padStart(3, '0');
|
|
1967
|
+
return `${prefix}-epic-${paddedNumber}.md`;
|
|
1968
|
+
}
|
|
1969
|
+
/**
|
|
1970
|
+
* Generate prefix from epic file path
|
|
1971
|
+
*
|
|
1972
|
+
* Extracts meaningful name from epic filename and converts to uppercase
|
|
1973
|
+
* Example: "docs/epics/epic-1-user-authentication.md" -> "USER-AUTHENTICATION"
|
|
1974
|
+
*
|
|
1975
|
+
* @param epicPath - Path to epic file
|
|
1976
|
+
* @returns Generated prefix
|
|
1977
|
+
* @private
|
|
1978
|
+
*/
|
|
1979
|
+
generatePrefixFromEpicPath(epicPath) {
|
|
1980
|
+
// Get filename without extension
|
|
1981
|
+
const filename = epicPath
|
|
1982
|
+
.split('/')
|
|
1983
|
+
.pop()
|
|
1984
|
+
?.replace(/\.(md|markdown)$/i, '') || 'EPIC';
|
|
1985
|
+
// Remove epic-N- prefix if present and convert to uppercase
|
|
1986
|
+
const prefix = filename
|
|
1987
|
+
.replaceAll(/^epic-\d+-?/gi, '')
|
|
1988
|
+
.toUpperCase()
|
|
1989
|
+
.replaceAll(/\s+/g, '-');
|
|
1990
|
+
// If no meaningful name remains, return EPIC
|
|
1991
|
+
return prefix || 'EPIC';
|
|
1992
|
+
}
|
|
1993
|
+
/**
|
|
1994
|
+
* Generate prefix from PRD file path
|
|
1995
|
+
*
|
|
1996
|
+
* Extracts the PRD name from the file path and converts it to uppercase
|
|
1997
|
+
* Example: "docs/PRD-user-groups.md" -> "USER-GROUPS"
|
|
1998
|
+
*
|
|
1999
|
+
* @param prdPath - Path to PRD file
|
|
2000
|
+
* @returns Generated prefix
|
|
2001
|
+
* @private
|
|
2002
|
+
*/
|
|
2003
|
+
generatePrefixFromPrdPath(prdPath) {
|
|
2004
|
+
// Get filename without extension
|
|
2005
|
+
const filename = prdPath
|
|
2006
|
+
.split('/')
|
|
2007
|
+
.pop()
|
|
2008
|
+
?.replace(/\.(md|markdown)$/i, '') || 'PRD';
|
|
2009
|
+
// Remove PRD- prefix if present and convert to uppercase
|
|
2010
|
+
const prefix = filename.replaceAll(/^PRD-/gi, '').toUpperCase().replaceAll(/\s+/g, '-');
|
|
2011
|
+
return prefix;
|
|
2012
|
+
}
|
|
2013
|
+
/**
|
|
2014
|
+
* Generate story file name with prefix
|
|
2015
|
+
*
|
|
2016
|
+
* @param prefix - File name prefix
|
|
2017
|
+
* @param storyNumber - Story number (e.g., "1.1")
|
|
2018
|
+
* @returns Formatted file name (e.g., "USER-GROUPS-story-1.001.md")
|
|
2019
|
+
* @private
|
|
2020
|
+
*/
|
|
2021
|
+
generateStoryFileName(prefix, storyNumber) {
|
|
2022
|
+
// Parse epic number and story number from format "1.1"
|
|
2023
|
+
const [epicNum, storyNum] = storyNumber.split('.');
|
|
2024
|
+
// Zero-pad the story number to 3 digits
|
|
2025
|
+
const paddedStoryNum = String(storyNum).padStart(3, '0');
|
|
2026
|
+
return `${prefix}-story-${epicNum}.${paddedStoryNum}.md`;
|
|
2027
|
+
}
|
|
2028
|
+
/**
|
|
2029
|
+
* Get epics for story phase execution
|
|
2030
|
+
*
|
|
2031
|
+
* @param config - Workflow configuration
|
|
2032
|
+
* @param detection - Input detection result
|
|
2033
|
+
* @returns Array of epics
|
|
2034
|
+
* @private
|
|
2035
|
+
*/
|
|
2036
|
+
async getEpicsForStoryPhase(config, detection) {
|
|
2037
|
+
if (detection.type !== 'prd') {
|
|
2038
|
+
return [];
|
|
2039
|
+
}
|
|
2040
|
+
let prdContent = await this.fileManager.readFile(detection.filePath);
|
|
2041
|
+
try {
|
|
2042
|
+
return this.prdParser.parseEpics(prdContent, detection.filePath);
|
|
2043
|
+
}
|
|
2044
|
+
catch (parseError) {
|
|
2045
|
+
if (config.autoFix && parseError instanceof ParserError) {
|
|
2046
|
+
this.logger.warn({ error: parseError.message, prdPath: detection.filePath }, 'PRD parsing failed in story phase, attempting auto-fix');
|
|
2047
|
+
const fixResult = await this.prdFixer.fixPrd(detection.filePath, prdContent, config.references);
|
|
2048
|
+
if (fixResult.fixed) {
|
|
2049
|
+
this.logger.info({ prdPath: detection.filePath }, 'PRD auto-fixed successfully, retrying parse');
|
|
2050
|
+
prdContent = fixResult.content;
|
|
2051
|
+
return this.prdParser.parseEpics(prdContent, detection.filePath);
|
|
2052
|
+
}
|
|
2053
|
+
}
|
|
2054
|
+
throw parseError;
|
|
2055
|
+
}
|
|
2056
|
+
}
|
|
2057
|
+
/**
|
|
2058
|
+
* Get stories for dev phase execution
|
|
2059
|
+
*
|
|
2060
|
+
* @param config - Workflow configuration
|
|
2061
|
+
* @param detection - Input detection result
|
|
2062
|
+
* @returns Array of stories
|
|
2063
|
+
* @private
|
|
2064
|
+
*/
|
|
2065
|
+
async getStoriesForDevPhase(config, detection) {
|
|
2066
|
+
const prefix = this.resolvePrefix(config, detection);
|
|
2067
|
+
const storyPattern = `${prefix}-story-*.md`;
|
|
2068
|
+
this.logger.info({ inputType: detection.type, prefix, storyPattern }, 'Looking for story files with pattern');
|
|
2069
|
+
const storyDir = await this.pathResolver.getStoryDir();
|
|
2070
|
+
const storyFiles = await this.fileManager.listFiles(storyDir, storyPattern);
|
|
2071
|
+
return storyFiles.map((file) => this.extractStoryFromFilePath(file));
|
|
2072
|
+
}
|
|
2073
|
+
/**
|
|
2074
|
+
* Handle phase result from Promise.allSettled
|
|
2075
|
+
*
|
|
2076
|
+
* @param result - PromiseSettledResult
|
|
2077
|
+
* @param phaseName - Phase name for error reporting
|
|
2078
|
+
* @returns PhaseResult
|
|
2079
|
+
* @private
|
|
2080
|
+
*/
|
|
2081
|
+
handlePhaseResult(result, phaseName) {
|
|
2082
|
+
if (result.status === 'fulfilled') {
|
|
2083
|
+
return result.value;
|
|
2084
|
+
}
|
|
2085
|
+
this.logger.error({ error: result.reason }, `${phaseName} phase failed`);
|
|
2086
|
+
return {
|
|
2087
|
+
duration: 0,
|
|
2088
|
+
failures: [
|
|
2089
|
+
{
|
|
2090
|
+
error: result.reason.message,
|
|
2091
|
+
identifier: `${phaseName}-phase`,
|
|
2092
|
+
},
|
|
2093
|
+
],
|
|
2094
|
+
phaseName,
|
|
2095
|
+
skipped: false,
|
|
2096
|
+
success: 0,
|
|
2097
|
+
};
|
|
2098
|
+
}
|
|
2099
|
+
/**
|
|
2100
|
+
* Log workflow start information
|
|
2101
|
+
*
|
|
2102
|
+
* @param config - Workflow configuration
|
|
2103
|
+
* @private
|
|
2104
|
+
*/
|
|
2105
|
+
logWorkflowStart(config) {
|
|
2106
|
+
this.logger.info({
|
|
2107
|
+
dryRun: config.dryRun,
|
|
2108
|
+
input: config.input,
|
|
2109
|
+
parallel: config.parallel,
|
|
2110
|
+
pipelineMode: config.pipeline ? 'enabled' : 'disabled',
|
|
2111
|
+
skipDev: config.skipDev,
|
|
2112
|
+
skipEpics: config.skipEpics,
|
|
2113
|
+
skipStories: config.skipStories,
|
|
2114
|
+
}, 'Starting workflow execution');
|
|
2115
|
+
}
|
|
2116
|
+
/**
|
|
2117
|
+
* Resolve prefix for story file pattern
|
|
2118
|
+
*
|
|
2119
|
+
* @param config - Workflow configuration
|
|
2120
|
+
* @param detection - Input detection result
|
|
2121
|
+
* @returns Prefix string
|
|
2122
|
+
* @private
|
|
2123
|
+
*/
|
|
2124
|
+
resolvePrefix(config, detection) {
|
|
2125
|
+
if (config.prefix) {
|
|
2126
|
+
return config.prefix;
|
|
2127
|
+
}
|
|
2128
|
+
if (detection.type === 'epic') {
|
|
2129
|
+
return this.generatePrefixFromEpicPath(detection.filePath);
|
|
2130
|
+
}
|
|
2131
|
+
if (detection.type === 'prd') {
|
|
2132
|
+
return this.generatePrefixFromPrdPath(detection.filePath);
|
|
2133
|
+
}
|
|
2134
|
+
return this.generatePrefixFromPrdPath(config.input);
|
|
2135
|
+
}
|
|
2136
|
+
/**
|
|
2137
|
+
* Check if workflow should abort after epic phase failure
|
|
2138
|
+
*
|
|
2139
|
+
* @param epicPhase - Epic phase result
|
|
2140
|
+
* @returns True if should abort
|
|
2141
|
+
* @private
|
|
2142
|
+
*/
|
|
2143
|
+
shouldAbortAfterEpicFailure(epicPhase) {
|
|
2144
|
+
return epicPhase !== undefined && epicPhase.success === 0 && epicPhase.failures.length > 0;
|
|
2145
|
+
}
|
|
2146
|
+
/**
|
|
2147
|
+
* Check if workflow should abort after story phase failure
|
|
2148
|
+
*
|
|
2149
|
+
* @param storyPhase - Story phase result
|
|
2150
|
+
* @returns True if should abort
|
|
2151
|
+
* @private
|
|
2152
|
+
*/
|
|
2153
|
+
shouldAbortAfterStoryFailure(storyPhase) {
|
|
2154
|
+
return storyPhase.success === 0 && storyPhase.failures.length > 0;
|
|
2155
|
+
}
|
|
2156
|
+
/**
|
|
2157
|
+
* Sleep for specified milliseconds
|
|
2158
|
+
*
|
|
2159
|
+
* @param ms - Milliseconds to sleep
|
|
2160
|
+
* @private
|
|
2161
|
+
*/
|
|
2162
|
+
async sleep(ms) {
|
|
2163
|
+
await new Promise((resolve) => setTimeout(resolve, ms));
|
|
2164
|
+
}
|
|
2165
|
+
/**
|
|
2166
|
+
* Update story status in file
|
|
2167
|
+
*
|
|
2168
|
+
* @param storyFilePath - Path to story file
|
|
2169
|
+
* @param status - New status
|
|
2170
|
+
* @private
|
|
2171
|
+
*/
|
|
2172
|
+
async updateStoryStatus(storyFilePath, status) {
|
|
2173
|
+
const content = await this.fileManager.readFile(storyFilePath);
|
|
2174
|
+
const updatedContent = content.replace(/^## Status\s*\n\n(.+?)$/m, `## Status\n\n${status}`);
|
|
2175
|
+
await this.fileManager.writeFile(storyFilePath, updatedContent);
|
|
2176
|
+
}
|
|
2177
|
+
/**
|
|
2178
|
+
* Validate workflow configuration
|
|
2179
|
+
*
|
|
2180
|
+
* @param config - Workflow configuration to validate
|
|
2181
|
+
* @throws {ValidationError} If configuration is invalid
|
|
2182
|
+
* @private
|
|
2183
|
+
*/
|
|
2184
|
+
validateConfig(config) {
|
|
2185
|
+
if (!config.input || config.input.trim().length === 0) {
|
|
2186
|
+
throw new ValidationError('Input is required', { field: 'input' });
|
|
2187
|
+
}
|
|
2188
|
+
if (config.prdInterval < 0) {
|
|
2189
|
+
throw new ValidationError('prdInterval must be >= 0', { field: 'prdInterval' });
|
|
2190
|
+
}
|
|
2191
|
+
if (config.epicInterval < 0) {
|
|
2192
|
+
throw new ValidationError('epicInterval must be >= 0', { field: 'epicInterval' });
|
|
2193
|
+
}
|
|
2194
|
+
if (config.storyInterval < 0) {
|
|
2195
|
+
throw new ValidationError('storyInterval must be >= 0', { field: 'storyInterval' });
|
|
2196
|
+
}
|
|
2197
|
+
if (config.parallel < 1) {
|
|
2198
|
+
throw new ValidationError('parallel must be >= 1', { field: 'parallel' });
|
|
2199
|
+
}
|
|
2200
|
+
}
|
|
2201
|
+
}
|