@nathapp/nax 0.38.0 → 0.38.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. package/dist/nax.js +3294 -2907
  2. package/package.json +2 -2
  3. package/src/agents/claude-complete.ts +72 -0
  4. package/src/agents/claude-execution.ts +189 -0
  5. package/src/agents/claude-interactive.ts +77 -0
  6. package/src/agents/claude-plan.ts +23 -8
  7. package/src/agents/claude.ts +64 -349
  8. package/src/analyze/classifier.ts +2 -1
  9. package/src/cli/config-descriptions.ts +206 -0
  10. package/src/cli/config-diff.ts +103 -0
  11. package/src/cli/config-display.ts +285 -0
  12. package/src/cli/config-get.ts +55 -0
  13. package/src/cli/config.ts +7 -618
  14. package/src/cli/plugins.ts +15 -4
  15. package/src/cli/prompts-export.ts +58 -0
  16. package/src/cli/prompts-init.ts +200 -0
  17. package/src/cli/prompts-main.ts +237 -0
  18. package/src/cli/prompts-tdd.ts +78 -0
  19. package/src/cli/prompts.ts +10 -541
  20. package/src/commands/logs-formatter.ts +201 -0
  21. package/src/commands/logs-reader.ts +171 -0
  22. package/src/commands/logs.ts +11 -362
  23. package/src/config/loader.ts +4 -15
  24. package/src/config/runtime-types.ts +451 -0
  25. package/src/config/schema-types.ts +53 -0
  26. package/src/config/schemas.ts +2 -0
  27. package/src/config/types.ts +49 -486
  28. package/src/context/auto-detect.ts +2 -1
  29. package/src/context/builder.ts +3 -2
  30. package/src/execution/crash-heartbeat.ts +77 -0
  31. package/src/execution/crash-recovery.ts +23 -365
  32. package/src/execution/crash-signals.ts +149 -0
  33. package/src/execution/crash-writer.ts +154 -0
  34. package/src/execution/lifecycle/run-setup.ts +7 -1
  35. package/src/execution/parallel-coordinator.ts +278 -0
  36. package/src/execution/parallel-executor-rectification-pass.ts +117 -0
  37. package/src/execution/parallel-executor-rectify.ts +135 -0
  38. package/src/execution/parallel-executor.ts +19 -211
  39. package/src/execution/parallel-worker.ts +148 -0
  40. package/src/execution/parallel.ts +5 -404
  41. package/src/execution/pid-registry.ts +3 -8
  42. package/src/execution/runner-completion.ts +160 -0
  43. package/src/execution/runner-execution.ts +221 -0
  44. package/src/execution/runner-setup.ts +82 -0
  45. package/src/execution/runner.ts +53 -202
  46. package/src/execution/timeout-handler.ts +100 -0
  47. package/src/hooks/runner.ts +11 -21
  48. package/src/metrics/tracker.ts +7 -30
  49. package/src/pipeline/runner.ts +2 -1
  50. package/src/pipeline/stages/completion.ts +0 -1
  51. package/src/pipeline/stages/context.ts +2 -1
  52. package/src/plugins/extensions.ts +225 -0
  53. package/src/plugins/loader.ts +40 -4
  54. package/src/plugins/types.ts +18 -221
  55. package/src/prd/index.ts +2 -1
  56. package/src/prd/validate.ts +41 -0
  57. package/src/precheck/checks-blockers.ts +15 -419
  58. package/src/precheck/checks-cli.ts +68 -0
  59. package/src/precheck/checks-config.ts +102 -0
  60. package/src/precheck/checks-git.ts +87 -0
  61. package/src/precheck/checks-system.ts +163 -0
  62. package/src/review/orchestrator.ts +19 -6
  63. package/src/review/runner.ts +17 -5
  64. package/src/routing/chain.ts +2 -1
  65. package/src/routing/loader.ts +2 -5
  66. package/src/tdd/orchestrator.ts +2 -1
  67. package/src/tdd/verdict-reader.ts +266 -0
  68. package/src/tdd/verdict.ts +6 -271
  69. package/src/utils/errors.ts +12 -0
  70. package/src/utils/git.ts +12 -5
  71. package/src/utils/json-file.ts +72 -0
  72. package/src/verification/executor.ts +2 -1
  73. package/src/verification/smart-runner.ts +23 -3
  74. package/src/worktree/manager.ts +9 -3
  75. package/src/worktree/merge.ts +3 -2
@@ -1,412 +1,13 @@
1
1
  /**
2
- * Parallel Execution — Worktree-based concurrent story execution
2
+ * Parallel Execution — Hub file
3
3
  *
4
4
  * Orchestrates parallel story execution using git worktrees: groups stories
5
5
  * by dependencies, creates worktrees, dispatches concurrent pipelines,
6
6
  * merges in dependency order, and cleans up worktrees.
7
- */
8
-
9
- import os from "node:os";
10
- import { join } from "node:path";
11
- import type { NaxConfig } from "../config";
12
- import type { LoadedHooksConfig } from "../hooks";
13
- import { getSafeLogger } from "../logger";
14
- import type { PipelineEventEmitter } from "../pipeline/events";
15
- import { runPipeline } from "../pipeline/runner";
16
- import { defaultPipeline } from "../pipeline/stages";
17
- import type { PipelineContext, RoutingResult } from "../pipeline/types";
18
- import type { PluginRegistry } from "../plugins/registry";
19
- import type { PRD, UserStory } from "../prd";
20
- import { markStoryFailed, markStoryPassed, savePRD } from "../prd";
21
- import { routeTask, tryLlmBatchRoute } from "../routing";
22
- import { WorktreeManager } from "../worktree/manager";
23
- import { MergeEngine, type StoryDependencies } from "../worktree/merge";
24
-
25
- /**
26
- * Result from parallel execution of a batch of stories
27
- */
28
- export interface ParallelBatchResult {
29
- /** Stories that passed the TDD pipeline (pre-merge) */
30
- pipelinePassed: UserStory[];
31
- /** Stories that were actually merged to the base branch */
32
- merged: UserStory[];
33
- /** Stories that failed the pipeline */
34
- failed: Array<{ story: UserStory; error: string }>;
35
- /** Total cost accumulated */
36
- totalCost: number;
37
- /** Stories with merge conflicts (includes per-story original cost for rectification) */
38
- mergeConflicts: Array<{ storyId: string; conflictFiles: string[]; originalCost: number }>;
39
- /** Per-story execution costs for successful stories */
40
- storyCosts: Map<string, number>;
41
- }
42
-
43
- /**
44
- * Group stories into dependency batches; stories in each batch can run in parallel.
45
- */
46
- function groupStoriesByDependencies(stories: UserStory[]): UserStory[][] {
47
- const batches: UserStory[][] = [];
48
- const processed = new Set<string>();
49
- const storyMap = new Map(stories.map((s) => [s.id, s]));
50
-
51
- // Keep processing until all stories are batched
52
- while (processed.size < stories.length) {
53
- const batch: UserStory[] = [];
54
-
55
- for (const story of stories) {
56
- if (processed.has(story.id)) continue;
57
-
58
- // Check if all dependencies are satisfied
59
- const depsCompleted = story.dependencies.every((dep) => processed.has(dep) || !storyMap.has(dep));
60
-
61
- if (depsCompleted) {
62
- batch.push(story);
63
- }
64
- }
65
-
66
- if (batch.length === 0) {
67
- // No stories ready — circular dependency or missing dep
68
- const remaining = stories.filter((s) => !processed.has(s.id));
69
- const logger = getSafeLogger();
70
- logger?.error("parallel", "Cannot resolve story dependencies", {
71
- remainingStories: remaining.map((s) => s.id),
72
- });
73
- throw new Error("Circular dependency or missing dependency detected");
74
- }
75
-
76
- // Mark batch stories as processed
77
- for (const story of batch) {
78
- processed.add(story.id);
79
- }
80
-
81
- batches.push(batch);
82
- }
83
-
84
- return batches;
85
- }
86
-
87
- /**
88
- * Build dependency map for merge engine
89
- */
90
- function buildDependencyMap(stories: UserStory[]): StoryDependencies {
91
- const deps: StoryDependencies = {};
92
- for (const story of stories) {
93
- deps[story.id] = story.dependencies;
94
- }
95
- return deps;
96
- }
97
-
98
- /**
99
- * Execute a single story in its worktree
100
- */
101
- async function executeStoryInWorktree(
102
- story: UserStory,
103
- worktreePath: string,
104
- context: Omit<PipelineContext, "story" | "stories" | "workdir" | "routing">,
105
- routing: RoutingResult,
106
- eventEmitter?: PipelineEventEmitter,
107
- ): Promise<{ success: boolean; cost: number; error?: string }> {
108
- const logger = getSafeLogger();
109
-
110
- try {
111
- const pipelineContext: PipelineContext = {
112
- ...context,
113
- story,
114
- stories: [story],
115
- workdir: worktreePath,
116
- routing,
117
- };
118
-
119
- logger?.debug("parallel", "Executing story in worktree", {
120
- storyId: story.id,
121
- worktreePath,
122
- });
123
-
124
- const result = await runPipeline(defaultPipeline, pipelineContext, eventEmitter);
125
-
126
- return {
127
- success: result.success,
128
- cost: result.context.agentResult?.estimatedCost || 0,
129
- error: result.success ? undefined : result.reason,
130
- };
131
- } catch (error) {
132
- return {
133
- success: false,
134
- cost: 0,
135
- error: error instanceof Error ? error.message : String(error),
136
- };
137
- }
138
- }
139
-
140
- /**
141
- * Execute a batch of independent stories in parallel
142
- */
143
- async function executeParallelBatch(
144
- stories: UserStory[],
145
- projectRoot: string,
146
- config: NaxConfig,
147
- prd: PRD,
148
- context: Omit<PipelineContext, "story" | "stories" | "workdir" | "routing">,
149
- maxConcurrency: number,
150
- eventEmitter?: PipelineEventEmitter,
151
- ): Promise<ParallelBatchResult> {
152
- const logger = getSafeLogger();
153
- const worktreeManager = new WorktreeManager();
154
- const results: ParallelBatchResult = {
155
- pipelinePassed: [],
156
- merged: [],
157
- failed: [],
158
- totalCost: 0,
159
- mergeConflicts: [],
160
- storyCosts: new Map(),
161
- };
162
-
163
- // Create worktrees for all stories in batch
164
- const worktreeSetup: Array<{ story: UserStory; worktreePath: string }> = [];
165
-
166
- for (const story of stories) {
167
- const worktreePath = join(projectRoot, ".nax-wt", story.id);
168
- try {
169
- await worktreeManager.create(projectRoot, story.id);
170
- worktreeSetup.push({ story, worktreePath });
171
-
172
- logger?.info("parallel", "Created worktree for story", {
173
- storyId: story.id,
174
- worktreePath,
175
- });
176
- } catch (error) {
177
- results.failed.push({
178
- story,
179
- error: `Failed to create worktree: ${error instanceof Error ? error.message : String(error)}`,
180
- });
181
- logger?.error("parallel", "Failed to create worktree", {
182
- storyId: story.id,
183
- error: error instanceof Error ? error.message : String(error),
184
- });
185
- }
186
- }
187
-
188
- // Execute stories in parallel with concurrency limit
189
- const executing = new Set<Promise<void>>();
190
-
191
- for (const { story, worktreePath } of worktreeSetup) {
192
- const routing = routeTask(story.title, story.description, story.acceptanceCriteria, story.tags, config);
193
-
194
- const executePromise = executeStoryInWorktree(story, worktreePath, context, routing as RoutingResult, eventEmitter)
195
- .then((result) => {
196
- results.totalCost += result.cost;
197
- results.storyCosts.set(story.id, result.cost);
198
-
199
- if (result.success) {
200
- results.pipelinePassed.push(story);
201
- logger?.info("parallel", "Story execution succeeded", {
202
- storyId: story.id,
203
- cost: result.cost,
204
- });
205
- } else {
206
- results.failed.push({ story, error: result.error || "Unknown error" });
207
- logger?.error("parallel", "Story execution failed", {
208
- storyId: story.id,
209
- error: result.error,
210
- });
211
- }
212
- })
213
- .finally(() => {
214
- executing.delete(executePromise);
215
- });
216
-
217
- executing.add(executePromise);
218
-
219
- // Wait if we've hit the concurrency limit
220
- if (executing.size >= maxConcurrency) {
221
- await Promise.race(executing);
222
- }
223
- }
224
-
225
- // Wait for all remaining executions
226
- await Promise.all(executing);
227
-
228
- return results;
229
- }
230
-
231
- /**
232
- * Determine max concurrency from parallel option
233
- * - undefined: sequential mode (should not call this function)
234
- * - 0: auto-detect (use CPU count)
235
- * - N > 0: use N
236
- */
237
- function resolveMaxConcurrency(parallel: number): number {
238
- if (parallel === 0) {
239
- return os.cpus().length;
240
- }
241
- return Math.max(1, parallel);
242
- }
243
-
244
- /**
245
- * Execute stories in parallel using worktree pipeline
246
7
  *
247
- * High-level flow:
248
- * 1. Group stories by dependencies into batches
249
- * 2. For each batch:
250
- * a. Create worktrees for all stories
251
- * b. Execute pipeline in parallel (respecting maxConcurrency)
252
- * c. Merge successful branches in topological order
253
- * d. Clean up worktrees on success, preserve on failure
254
- * 3. Update PRD with results
8
+ * Re-exports coordinator and worker modules for backward compatibility.
255
9
  */
256
- export async function executeParallel(
257
- stories: UserStory[],
258
- prdPath: string,
259
- projectRoot: string,
260
- config: NaxConfig,
261
- hooks: LoadedHooksConfig,
262
- plugins: PluginRegistry,
263
- prd: PRD,
264
- featureDir: string | undefined,
265
- parallel: number,
266
- eventEmitter?: PipelineEventEmitter,
267
- ): Promise<{
268
- storiesCompleted: number;
269
- totalCost: number;
270
- updatedPrd: PRD;
271
- mergeConflicts: Array<{ storyId: string; conflictFiles: string[]; originalCost: number }>;
272
- }> {
273
- const logger = getSafeLogger();
274
- const maxConcurrency = resolveMaxConcurrency(parallel);
275
- const worktreeManager = new WorktreeManager();
276
- const mergeEngine = new MergeEngine(worktreeManager);
277
-
278
- logger?.info("parallel", "Starting parallel execution", {
279
- totalStories: stories.length,
280
- maxConcurrency,
281
- });
282
-
283
- // Group stories by dependencies
284
- const batches = groupStoriesByDependencies(stories);
285
- logger?.info("parallel", "Grouped stories into batches", {
286
- batchCount: batches.length,
287
- batches: batches.map((b, i) => ({ index: i, storyCount: b.length, storyIds: b.map((s) => s.id) })),
288
- });
289
-
290
- let storiesCompleted = 0;
291
- let totalCost = 0;
292
- const currentPrd = prd;
293
- const allMergeConflicts: Array<{ storyId: string; conflictFiles: string[]; originalCost: number }> = [];
294
-
295
- // Execute each batch sequentially (stories within each batch run in parallel)
296
- for (let batchIndex = 0; batchIndex < batches.length; batchIndex++) {
297
- const batch = batches[batchIndex];
298
- logger?.info("parallel", `Executing batch ${batchIndex + 1}/${batches.length}`, {
299
- storyCount: batch.length,
300
- storyIds: batch.map((s) => s.id),
301
- });
302
-
303
- // Build context for this batch (shared across all stories in batch)
304
- const baseContext = {
305
- config,
306
- prd: currentPrd,
307
- featureDir,
308
- hooks,
309
- plugins,
310
- storyStartTime: new Date().toISOString(),
311
- };
312
-
313
- // Execute batch in parallel
314
- const batchResult = await executeParallelBatch(
315
- batch,
316
- projectRoot,
317
- config,
318
- currentPrd,
319
- baseContext,
320
- maxConcurrency,
321
- eventEmitter,
322
- );
323
-
324
- totalCost += batchResult.totalCost;
325
-
326
- // Merge successful stories in topological order
327
- if (batchResult.pipelinePassed.length > 0) {
328
- const successfulIds = batchResult.pipelinePassed.map((s) => s.id);
329
- const deps = buildDependencyMap(batch);
330
-
331
- logger?.info("parallel", "Merging successful stories", {
332
- storyIds: successfulIds,
333
- });
334
-
335
- const mergeResults = await mergeEngine.mergeAll(projectRoot, successfulIds, deps);
336
-
337
- // Process merge results
338
- for (const mergeResult of mergeResults) {
339
- if (mergeResult.success) {
340
- // Update PRD: mark story as passed
341
- markStoryPassed(currentPrd, mergeResult.storyId);
342
- storiesCompleted++;
343
- const mergedStory = batchResult.pipelinePassed.find((s) => s.id === mergeResult.storyId);
344
- if (mergedStory) batchResult.merged.push(mergedStory);
345
-
346
- logger?.info("parallel", "Story merged successfully", {
347
- storyId: mergeResult.storyId,
348
- retryCount: mergeResult.retryCount,
349
- });
350
- } else {
351
- // Merge conflict — mark story as failed
352
- markStoryFailed(currentPrd, mergeResult.storyId);
353
- batchResult.mergeConflicts.push({
354
- storyId: mergeResult.storyId,
355
- conflictFiles: mergeResult.conflictFiles || [],
356
- originalCost: batchResult.storyCosts.get(mergeResult.storyId) ?? 0,
357
- });
358
-
359
- logger?.error("parallel", "Merge conflict", {
360
- storyId: mergeResult.storyId,
361
- conflictFiles: mergeResult.conflictFiles,
362
- });
363
-
364
- // Keep worktree for manual resolution
365
- logger?.warn("parallel", "Worktree preserved for manual conflict resolution", {
366
- storyId: mergeResult.storyId,
367
- worktreePath: join(projectRoot, ".nax-wt", mergeResult.storyId),
368
- });
369
- }
370
- }
371
- }
372
-
373
- // Mark failed stories in PRD and clean up their worktrees
374
- for (const { story, error } of batchResult.failed) {
375
- markStoryFailed(currentPrd, story.id);
376
-
377
- logger?.error("parallel", "Cleaning up failed story worktree", {
378
- storyId: story.id,
379
- error,
380
- });
381
-
382
- try {
383
- await worktreeManager.remove(projectRoot, story.id);
384
- } catch (cleanupError) {
385
- logger?.warn("parallel", "Failed to clean up worktree", {
386
- storyId: story.id,
387
- error: cleanupError instanceof Error ? cleanupError.message : String(cleanupError),
388
- });
389
- }
390
- }
391
-
392
- // Save PRD after each batch
393
- await savePRD(currentPrd, prdPath);
394
-
395
- allMergeConflicts.push(...batchResult.mergeConflicts);
396
-
397
- logger?.info("parallel", `Batch ${batchIndex + 1} complete`, {
398
- pipelinePassed: batchResult.pipelinePassed.length,
399
- merged: batchResult.merged.length,
400
- failed: batchResult.failed.length,
401
- mergeConflicts: batchResult.mergeConflicts.length,
402
- batchCost: batchResult.totalCost,
403
- });
404
- }
405
-
406
- logger?.info("parallel", "Parallel execution complete", {
407
- storiesCompleted,
408
- totalCost,
409
- });
410
10
 
411
- return { storiesCompleted, totalCost, updatedPrd: currentPrd, mergeConflicts: allMergeConflicts };
412
- }
11
+ // Re-export for backward compatibility
12
+ export { executeParallel } from "./parallel-coordinator";
13
+ export type { ParallelBatchResult } from "./parallel-worker";
@@ -10,6 +10,7 @@
10
10
  */
11
11
 
12
12
  import { existsSync } from "node:fs";
13
+ import { appendFile } from "node:fs/promises";
13
14
  import { getSafeLogger } from "../logger";
14
15
 
15
16
  /**
@@ -77,15 +78,9 @@ export class PidRegistry {
77
78
  };
78
79
 
79
80
  try {
80
- // Read existing content or create empty file
81
- let existingContent = "";
82
- if (existsSync(this.pidsFilePath)) {
83
- existingContent = await Bun.file(this.pidsFilePath).text();
84
- }
85
-
86
- // Append to .nax-pids file (one JSON entry per line)
81
+ // Atomically append to .nax-pids file (one JSON entry per line)
87
82
  const line = `${JSON.stringify(entry)}\n`;
88
- await Bun.write(this.pidsFilePath, existingContent + line);
83
+ await appendFile(this.pidsFilePath, line);
89
84
  logger?.debug("pid-registry", `Registered PID ${pid}`, { pid });
90
85
  } catch (err) {
91
86
  logger?.warn("pid-registry", `Failed to write PID ${pid} to registry`, {
@@ -0,0 +1,160 @@
1
+ /**
2
+ * Runner Completion Phase
3
+ *
4
+ * Handles post-execution steps: acceptance loop, hooks, metrics, and cleanup.
5
+ * Extracted from runner.ts for better code organization.
6
+ */
7
+
8
+ import type { NaxConfig } from "../config";
9
+ import type { LoadedHooksConfig } from "../hooks";
10
+ import { fireHook } from "../hooks";
11
+ import { getSafeLogger } from "../logger";
12
+ import type { StoryMetrics } from "../metrics";
13
+ import type { PipelineEventEmitter } from "../pipeline/events";
14
+ import type { PluginRegistry } from "../plugins/registry";
15
+ import { isComplete } from "../prd";
16
+ import type { PRD } from "../prd";
17
+ import { stopHeartbeat, writeExitSummary } from "./crash-recovery";
18
+ import { hookCtx } from "./story-context";
19
+
20
+ /**
21
+ * Options for the completion phase.
22
+ */
23
+ export interface RunnerCompletionOptions {
24
+ config: NaxConfig;
25
+ hooks: LoadedHooksConfig;
26
+ feature: string;
27
+ workdir: string;
28
+ statusFile: string;
29
+ logFilePath?: string;
30
+ runId: string;
31
+ startedAt: string;
32
+ startTime: number;
33
+ formatterMode: "quiet" | "normal" | "verbose" | "json";
34
+ headless: boolean;
35
+ featureDir?: string;
36
+ prd: PRD;
37
+ allStoryMetrics: StoryMetrics[];
38
+ totalCost: number;
39
+ storiesCompleted: number;
40
+ iterations: number;
41
+ // biome-ignore lint/suspicious/noExplicitAny: StatusWriter interface varies by platform
42
+ statusWriter: any;
43
+ pluginRegistry: PluginRegistry;
44
+ eventEmitter?: PipelineEventEmitter;
45
+ }
46
+
47
+ /**
48
+ * Result from the completion phase.
49
+ */
50
+ export interface RunnerCompletionResult {
51
+ durationMs: number;
52
+ runCompletedAt: string;
53
+ }
54
+
55
+ /**
56
+ * Execute the completion phase of the run.
57
+ *
58
+ * @param options - Completion options
59
+ * @returns Completion result
60
+ */
61
+ export async function runCompletionPhase(options: RunnerCompletionOptions): Promise<RunnerCompletionResult> {
62
+ const logger = getSafeLogger();
63
+
64
+ // Check if we need acceptance retry loop
65
+ if (options.config.acceptance.enabled && isComplete(options.prd)) {
66
+ const { runAcceptanceLoop } = await import("./lifecycle/acceptance-loop");
67
+ const acceptanceResult = await runAcceptanceLoop({
68
+ config: options.config,
69
+ prd: options.prd,
70
+ prdPath: "", // Not needed for this extraction
71
+ workdir: options.workdir,
72
+ featureDir: options.featureDir,
73
+ hooks: options.hooks,
74
+ feature: options.feature,
75
+ totalCost: options.totalCost,
76
+ iterations: options.iterations,
77
+ storiesCompleted: options.storiesCompleted,
78
+ allStoryMetrics: options.allStoryMetrics,
79
+ pluginRegistry: options.pluginRegistry,
80
+ eventEmitter: options.eventEmitter,
81
+ statusWriter: options.statusWriter,
82
+ });
83
+
84
+ Object.assign(options, {
85
+ prd: acceptanceResult.prd,
86
+ totalCost: acceptanceResult.totalCost,
87
+ iterations: acceptanceResult.iterations,
88
+ storiesCompleted: acceptanceResult.storiesCompleted,
89
+ });
90
+ }
91
+
92
+ // Fire on-all-stories-complete before regression gate (RL-001)
93
+ if (isComplete(options.prd)) {
94
+ await fireHook(
95
+ options.hooks,
96
+ "on-all-stories-complete",
97
+ hookCtx(options.feature, { status: "passed", cost: options.totalCost }),
98
+ options.workdir,
99
+ );
100
+ }
101
+
102
+ // Handle run completion: save metrics, log summary, update status
103
+ const { handleRunCompletion } = await import("./lifecycle/run-completion");
104
+ const completionResult = await handleRunCompletion({
105
+ runId: options.runId,
106
+ feature: options.feature,
107
+ startedAt: options.startedAt,
108
+ prd: options.prd,
109
+ allStoryMetrics: options.allStoryMetrics,
110
+ totalCost: options.totalCost,
111
+ storiesCompleted: options.storiesCompleted,
112
+ iterations: options.iterations,
113
+ startTime: options.startTime,
114
+ workdir: options.workdir,
115
+ statusWriter: options.statusWriter,
116
+ config: options.config,
117
+ });
118
+
119
+ const { durationMs, runCompletedAt, finalCounts } = completionResult;
120
+
121
+ // Write feature-level status (SFC-002)
122
+ if (options.featureDir) {
123
+ const finalStatus = isComplete(options.prd) ? "completed" : "failed";
124
+ options.statusWriter.setRunStatus(finalStatus);
125
+ await options.statusWriter.writeFeatureStatus(options.featureDir, options.totalCost, options.iterations);
126
+ }
127
+
128
+ // Output run footer in headless mode
129
+ if (options.headless && options.formatterMode !== "json") {
130
+ const { outputRunFooter } = await import("./lifecycle/headless-formatter");
131
+ outputRunFooter({
132
+ finalCounts: {
133
+ total: finalCounts.total,
134
+ passed: finalCounts.passed,
135
+ failed: finalCounts.failed,
136
+ skipped: finalCounts.skipped,
137
+ },
138
+ durationMs,
139
+ totalCost: options.totalCost,
140
+ startedAt: options.startedAt,
141
+ completedAt: runCompletedAt,
142
+ formatterMode: options.formatterMode,
143
+ });
144
+ }
145
+
146
+ // Stop heartbeat and write exit summary (US-007)
147
+ stopHeartbeat();
148
+ await writeExitSummary(
149
+ options.logFilePath,
150
+ options.totalCost,
151
+ options.iterations,
152
+ options.storiesCompleted,
153
+ durationMs,
154
+ );
155
+
156
+ return {
157
+ durationMs,
158
+ runCompletedAt,
159
+ };
160
+ }