@nathapp/nax 0.38.0 → 0.38.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. package/dist/nax.js +3294 -2907
  2. package/package.json +2 -2
  3. package/src/agents/claude-complete.ts +72 -0
  4. package/src/agents/claude-execution.ts +189 -0
  5. package/src/agents/claude-interactive.ts +77 -0
  6. package/src/agents/claude-plan.ts +23 -8
  7. package/src/agents/claude.ts +64 -349
  8. package/src/analyze/classifier.ts +2 -1
  9. package/src/cli/config-descriptions.ts +206 -0
  10. package/src/cli/config-diff.ts +103 -0
  11. package/src/cli/config-display.ts +285 -0
  12. package/src/cli/config-get.ts +55 -0
  13. package/src/cli/config.ts +7 -618
  14. package/src/cli/plugins.ts +15 -4
  15. package/src/cli/prompts-export.ts +58 -0
  16. package/src/cli/prompts-init.ts +200 -0
  17. package/src/cli/prompts-main.ts +237 -0
  18. package/src/cli/prompts-tdd.ts +78 -0
  19. package/src/cli/prompts.ts +10 -541
  20. package/src/commands/logs-formatter.ts +201 -0
  21. package/src/commands/logs-reader.ts +171 -0
  22. package/src/commands/logs.ts +11 -362
  23. package/src/config/loader.ts +4 -15
  24. package/src/config/runtime-types.ts +451 -0
  25. package/src/config/schema-types.ts +53 -0
  26. package/src/config/schemas.ts +2 -0
  27. package/src/config/types.ts +49 -486
  28. package/src/context/auto-detect.ts +2 -1
  29. package/src/context/builder.ts +3 -2
  30. package/src/execution/crash-heartbeat.ts +77 -0
  31. package/src/execution/crash-recovery.ts +23 -365
  32. package/src/execution/crash-signals.ts +149 -0
  33. package/src/execution/crash-writer.ts +154 -0
  34. package/src/execution/lifecycle/run-setup.ts +7 -1
  35. package/src/execution/parallel-coordinator.ts +278 -0
  36. package/src/execution/parallel-executor-rectification-pass.ts +117 -0
  37. package/src/execution/parallel-executor-rectify.ts +135 -0
  38. package/src/execution/parallel-executor.ts +19 -211
  39. package/src/execution/parallel-worker.ts +148 -0
  40. package/src/execution/parallel.ts +5 -404
  41. package/src/execution/pid-registry.ts +3 -8
  42. package/src/execution/runner-completion.ts +160 -0
  43. package/src/execution/runner-execution.ts +221 -0
  44. package/src/execution/runner-setup.ts +82 -0
  45. package/src/execution/runner.ts +53 -202
  46. package/src/execution/timeout-handler.ts +100 -0
  47. package/src/hooks/runner.ts +11 -21
  48. package/src/metrics/tracker.ts +7 -30
  49. package/src/pipeline/runner.ts +2 -1
  50. package/src/pipeline/stages/completion.ts +0 -1
  51. package/src/pipeline/stages/context.ts +2 -1
  52. package/src/plugins/extensions.ts +225 -0
  53. package/src/plugins/loader.ts +40 -4
  54. package/src/plugins/types.ts +18 -221
  55. package/src/prd/index.ts +2 -1
  56. package/src/prd/validate.ts +41 -0
  57. package/src/precheck/checks-blockers.ts +15 -419
  58. package/src/precheck/checks-cli.ts +68 -0
  59. package/src/precheck/checks-config.ts +102 -0
  60. package/src/precheck/checks-git.ts +87 -0
  61. package/src/precheck/checks-system.ts +163 -0
  62. package/src/review/orchestrator.ts +19 -6
  63. package/src/review/runner.ts +17 -5
  64. package/src/routing/chain.ts +2 -1
  65. package/src/routing/loader.ts +2 -5
  66. package/src/tdd/orchestrator.ts +2 -1
  67. package/src/tdd/verdict-reader.ts +266 -0
  68. package/src/tdd/verdict.ts +6 -271
  69. package/src/utils/errors.ts +12 -0
  70. package/src/utils/git.ts +12 -5
  71. package/src/utils/json-file.ts +72 -0
  72. package/src/verification/executor.ts +2 -1
  73. package/src/verification/smart-runner.ts +23 -3
  74. package/src/worktree/manager.ts +9 -3
  75. package/src/worktree/merge.ts +3 -2
@@ -0,0 +1,154 @@
1
+ /**
2
+ * Crash data serialization — write logs and event summaries
3
+ */
4
+
5
+ import { appendFileSync } from "node:fs";
6
+ import { getSafeLogger } from "../logger";
7
+ import type { StatusWriter } from "./status-writer";
8
+
9
+ /**
10
+ * Write fatal log entry to JSONL file
11
+ */
12
+ export async function writeFatalLog(jsonlFilePath: string | undefined, signal: string, error?: Error): Promise<void> {
13
+ if (!jsonlFilePath) return;
14
+
15
+ try {
16
+ const fatalEntry = {
17
+ timestamp: new Date().toISOString(),
18
+ level: "error",
19
+ stage: "crash-recovery",
20
+ message: error ? `Uncaught exception: ${error.message}` : `Process terminated by ${signal}`,
21
+ data: {
22
+ signal,
23
+ ...(error && {
24
+ stack: error.stack,
25
+ name: error.name,
26
+ }),
27
+ },
28
+ };
29
+
30
+ const line = `${JSON.stringify(fatalEntry)}\n`;
31
+ appendFileSync(jsonlFilePath, line);
32
+ } catch (err) {
33
+ console.error("[crash-recovery] Failed to write fatal log:", err);
34
+ }
35
+ }
36
+
37
+ /**
38
+ * Write run.complete event to JSONL file
39
+ * Called on SIGTERM to emit final run summary before exit
40
+ */
41
+ export interface RunCompleteContext {
42
+ jsonlFilePath?: string;
43
+ runId?: string;
44
+ feature?: string;
45
+ getTotalCost: () => number;
46
+ getIterations: () => number;
47
+ getStartTime?: () => number;
48
+ getTotalStories?: () => number;
49
+ getStoriesCompleted?: () => number;
50
+ }
51
+
52
+ export async function writeRunComplete(ctx: RunCompleteContext, exitReason: string): Promise<void> {
53
+ if (!ctx.jsonlFilePath || !ctx.runId || !ctx.feature) return;
54
+
55
+ const logger = getSafeLogger();
56
+
57
+ try {
58
+ const totalCost = ctx.getTotalCost();
59
+ const iterations = ctx.getIterations();
60
+ const startTime = ctx.getStartTime?.() ?? Date.now();
61
+ const durationMs = Date.now() - startTime;
62
+ const totalStories = ctx.getTotalStories?.() ?? 0;
63
+ const storiesCompleted = ctx.getStoriesCompleted?.() ?? 0;
64
+
65
+ const runCompleteEntry = {
66
+ timestamp: new Date().toISOString(),
67
+ level: "info",
68
+ stage: "run.complete",
69
+ message: "Feature execution terminated",
70
+ data: {
71
+ runId: ctx.runId,
72
+ feature: ctx.feature,
73
+ success: false,
74
+ exitReason,
75
+ totalCost,
76
+ iterations,
77
+ totalStories,
78
+ storiesCompleted,
79
+ durationMs,
80
+ },
81
+ };
82
+
83
+ const line = `${JSON.stringify(runCompleteEntry)}\n`;
84
+ appendFileSync(ctx.jsonlFilePath, line);
85
+ logger?.debug("crash-recovery", "run.complete event written", { exitReason });
86
+ } catch (err) {
87
+ console.error("[crash-recovery] Failed to write run.complete event:", err);
88
+ }
89
+ }
90
+
91
+ /**
92
+ * Update status.json to "crashed" state (both project-level and feature-level)
93
+ */
94
+ export async function updateStatusToCrashed(
95
+ statusWriter: StatusWriter,
96
+ totalCost: number,
97
+ iterations: number,
98
+ signal: string,
99
+ featureDir?: string,
100
+ ): Promise<void> {
101
+ try {
102
+ statusWriter.setRunStatus("crashed");
103
+ await statusWriter.update(totalCost, iterations, {
104
+ crashedAt: new Date().toISOString(),
105
+ crashSignal: signal,
106
+ });
107
+
108
+ if (featureDir) {
109
+ await statusWriter.writeFeatureStatus(featureDir, totalCost, iterations, {
110
+ crashedAt: new Date().toISOString(),
111
+ crashSignal: signal,
112
+ });
113
+ }
114
+ } catch (err) {
115
+ console.error("[crash-recovery] Failed to update status.json:", err);
116
+ }
117
+ }
118
+
119
+ /**
120
+ * Write exit summary entry to JSONL
121
+ */
122
+ export async function writeExitSummary(
123
+ jsonlFilePath: string | undefined,
124
+ totalCost: number,
125
+ iterations: number,
126
+ storiesCompleted: number,
127
+ durationMs: number,
128
+ ): Promise<void> {
129
+ if (!jsonlFilePath) return;
130
+
131
+ const logger = getSafeLogger();
132
+
133
+ try {
134
+ const summaryEntry = {
135
+ timestamp: new Date().toISOString(),
136
+ level: "info",
137
+ stage: "exit-summary",
138
+ message: "Run completed",
139
+ data: {
140
+ totalCost,
141
+ iterations,
142
+ storiesCompleted,
143
+ durationMs,
144
+ exitedCleanly: true,
145
+ },
146
+ };
147
+
148
+ const line = `${JSON.stringify(summaryEntry)}\n`;
149
+ appendFileSync(jsonlFilePath, line);
150
+ logger?.debug("crash-recovery", "Exit summary written");
151
+ } catch (err) {
152
+ logger?.warn("crash-recovery", "Failed to write exit summary", { error: (err as Error).message });
153
+ }
154
+ }
@@ -171,7 +171,13 @@ export async function setupRun(options: RunSetupOptions): Promise<RunSetupResult
171
171
  const globalPluginsDir = path.join(os.homedir(), ".nax", "plugins");
172
172
  const projectPluginsDir = path.join(workdir, "nax", "plugins");
173
173
  const configPlugins = config.plugins || [];
174
- const pluginRegistry = await loadPlugins(globalPluginsDir, projectPluginsDir, configPlugins, workdir);
174
+ const pluginRegistry = await loadPlugins(
175
+ globalPluginsDir,
176
+ projectPluginsDir,
177
+ configPlugins,
178
+ workdir,
179
+ config.disabledPlugins,
180
+ );
175
181
 
176
182
  // Log plugins loaded
177
183
  logger?.info("plugins", `Loaded ${pluginRegistry.plugins.length} plugins`, {
@@ -0,0 +1,278 @@
1
+ /**
2
+ * Parallel coordinator — Orchestrates parallel story execution
3
+ */
4
+
5
+ import os from "node:os";
6
+ import { join } from "node:path";
7
+ import type { NaxConfig } from "../config";
8
+ import type { LoadedHooksConfig } from "../hooks";
9
+ import { getSafeLogger } from "../logger";
10
+ import type { PipelineEventEmitter } from "../pipeline/events";
11
+ import type { PipelineContext } from "../pipeline/types";
12
+ import type { PluginRegistry } from "../plugins/registry";
13
+ import type { PRD, UserStory } from "../prd";
14
+ import { markStoryFailed, markStoryPassed, savePRD } from "../prd";
15
+ import { errorMessage } from "../utils/errors";
16
+ import { WorktreeManager } from "../worktree/manager";
17
+ import { MergeEngine, type StoryDependencies } from "../worktree/merge";
18
+ import { executeParallelBatch } from "./parallel-worker";
19
+
20
+ /**
21
+ * Group stories into dependency batches; stories in each batch can run in parallel.
22
+ */
23
+ function groupStoriesByDependencies(stories: UserStory[]): UserStory[][] {
24
+ const batches: UserStory[][] = [];
25
+ const processed = new Set<string>();
26
+ const storyMap = new Map(stories.map((s) => [s.id, s]));
27
+
28
+ // Keep processing until all stories are batched
29
+ while (processed.size < stories.length) {
30
+ const batch: UserStory[] = [];
31
+
32
+ for (const story of stories) {
33
+ if (processed.has(story.id)) continue;
34
+
35
+ // Check if all dependencies are satisfied
36
+ const depsCompleted = story.dependencies.every((dep) => processed.has(dep) || !storyMap.has(dep));
37
+
38
+ if (depsCompleted) {
39
+ batch.push(story);
40
+ }
41
+ }
42
+
43
+ if (batch.length === 0) {
44
+ // No stories ready — circular dependency or missing dep
45
+ const remaining = stories.filter((s) => !processed.has(s.id));
46
+ const logger = getSafeLogger();
47
+ logger?.error("parallel", "Cannot resolve story dependencies", {
48
+ remainingStories: remaining.map((s) => s.id),
49
+ });
50
+ throw new Error("Circular dependency or missing dependency detected");
51
+ }
52
+
53
+ // Mark batch stories as processed
54
+ for (const story of batch) {
55
+ processed.add(story.id);
56
+ }
57
+
58
+ batches.push(batch);
59
+ }
60
+
61
+ return batches;
62
+ }
63
+
64
+ /**
65
+ * Build dependency map for merge engine
66
+ */
67
+ function buildDependencyMap(stories: UserStory[]): StoryDependencies {
68
+ const deps: StoryDependencies = {};
69
+ for (const story of stories) {
70
+ deps[story.id] = story.dependencies;
71
+ }
72
+ return deps;
73
+ }
74
+
75
+ /**
76
+ * Determine max concurrency from parallel option
77
+ * - undefined: sequential mode (should not call this function)
78
+ * - 0: auto-detect (use CPU count)
79
+ * - N > 0: use N
80
+ */
81
+ function resolveMaxConcurrency(parallel: number): number {
82
+ if (parallel === 0) {
83
+ return os.cpus().length;
84
+ }
85
+ return Math.max(1, parallel);
86
+ }
87
+
88
+ /**
89
+ * Execute stories in parallel using worktree pipeline
90
+ *
91
+ * High-level flow:
92
+ * 1. Group stories by dependencies into batches
93
+ * 2. For each batch:
94
+ * a. Create worktrees for all stories
95
+ * b. Execute pipeline in parallel (respecting maxConcurrency)
96
+ * c. Merge successful branches in topological order
97
+ * d. Clean up worktrees on success, preserve on failure
98
+ * 3. Update PRD with results
99
+ */
100
+ export async function executeParallel(
101
+ stories: UserStory[],
102
+ prdPath: string,
103
+ projectRoot: string,
104
+ config: NaxConfig,
105
+ hooks: LoadedHooksConfig,
106
+ plugins: PluginRegistry,
107
+ prd: PRD,
108
+ featureDir: string | undefined,
109
+ parallel: number,
110
+ eventEmitter?: PipelineEventEmitter,
111
+ ): Promise<{
112
+ storiesCompleted: number;
113
+ totalCost: number;
114
+ updatedPrd: PRD;
115
+ mergeConflicts: Array<{ storyId: string; conflictFiles: string[]; originalCost: number }>;
116
+ }> {
117
+ const logger = getSafeLogger();
118
+ const maxConcurrency = resolveMaxConcurrency(parallel);
119
+ const worktreeManager = new WorktreeManager();
120
+ const mergeEngine = new MergeEngine(worktreeManager);
121
+
122
+ logger?.info("parallel", "Starting parallel execution", {
123
+ totalStories: stories.length,
124
+ maxConcurrency,
125
+ });
126
+
127
+ // Group stories by dependencies
128
+ const batches = groupStoriesByDependencies(stories);
129
+ logger?.info("parallel", "Grouped stories into batches", {
130
+ batchCount: batches.length,
131
+ batches: batches.map((b, i) => ({ index: i, storyCount: b.length, storyIds: b.map((s) => s.id) })),
132
+ });
133
+
134
+ let storiesCompleted = 0;
135
+ let totalCost = 0;
136
+ const currentPrd = prd;
137
+ const allMergeConflicts: Array<{ storyId: string; conflictFiles: string[]; originalCost: number }> = [];
138
+
139
+ // Execute each batch sequentially (stories within each batch run in parallel)
140
+ for (let batchIndex = 0; batchIndex < batches.length; batchIndex++) {
141
+ const batch = batches[batchIndex];
142
+ logger?.info("parallel", `Executing batch ${batchIndex + 1}/${batches.length}`, {
143
+ storyCount: batch.length,
144
+ storyIds: batch.map((s) => s.id),
145
+ });
146
+
147
+ // Build context for this batch (shared across all stories in batch)
148
+ const baseContext = {
149
+ config,
150
+ prd: currentPrd,
151
+ featureDir,
152
+ hooks,
153
+ plugins,
154
+ storyStartTime: new Date().toISOString(),
155
+ };
156
+
157
+ // Create worktrees for all stories in batch
158
+ const worktreePaths = new Map<string, string>();
159
+
160
+ for (const story of batch) {
161
+ const worktreePath = join(projectRoot, ".nax-wt", story.id);
162
+ try {
163
+ await worktreeManager.create(projectRoot, story.id);
164
+ worktreePaths.set(story.id, worktreePath);
165
+
166
+ logger?.info("parallel", "Created worktree for story", {
167
+ storyId: story.id,
168
+ worktreePath,
169
+ });
170
+ } catch (error) {
171
+ markStoryFailed(currentPrd, story.id);
172
+ logger?.error("parallel", "Failed to create worktree", {
173
+ storyId: story.id,
174
+ error: errorMessage(error),
175
+ });
176
+ }
177
+ }
178
+
179
+ // Execute batch in parallel
180
+ const batchResult = await executeParallelBatch(
181
+ batch,
182
+ projectRoot,
183
+ config,
184
+ baseContext,
185
+ worktreePaths,
186
+ maxConcurrency,
187
+ eventEmitter,
188
+ );
189
+
190
+ totalCost += batchResult.totalCost;
191
+
192
+ // Merge successful stories in topological order
193
+ if (batchResult.pipelinePassed.length > 0) {
194
+ const successfulIds = batchResult.pipelinePassed.map((s) => s.id);
195
+ const deps = buildDependencyMap(batch);
196
+
197
+ logger?.info("parallel", "Merging successful stories", {
198
+ storyIds: successfulIds,
199
+ });
200
+
201
+ const mergeResults = await mergeEngine.mergeAll(projectRoot, successfulIds, deps);
202
+
203
+ // Process merge results
204
+ for (const mergeResult of mergeResults) {
205
+ if (mergeResult.success) {
206
+ // Update PRD: mark story as passed
207
+ markStoryPassed(currentPrd, mergeResult.storyId);
208
+ storiesCompleted++;
209
+ const mergedStory = batchResult.pipelinePassed.find((s) => s.id === mergeResult.storyId);
210
+ if (mergedStory) batchResult.merged.push(mergedStory);
211
+
212
+ logger?.info("parallel", "Story merged successfully", {
213
+ storyId: mergeResult.storyId,
214
+ retryCount: mergeResult.retryCount,
215
+ });
216
+ } else {
217
+ // Merge conflict — mark story as failed
218
+ markStoryFailed(currentPrd, mergeResult.storyId);
219
+ batchResult.mergeConflicts.push({
220
+ storyId: mergeResult.storyId,
221
+ conflictFiles: mergeResult.conflictFiles || [],
222
+ originalCost: batchResult.storyCosts.get(mergeResult.storyId) ?? 0,
223
+ });
224
+
225
+ logger?.error("parallel", "Merge conflict", {
226
+ storyId: mergeResult.storyId,
227
+ conflictFiles: mergeResult.conflictFiles,
228
+ });
229
+
230
+ // Keep worktree for manual resolution
231
+ logger?.warn("parallel", "Worktree preserved for manual conflict resolution", {
232
+ storyId: mergeResult.storyId,
233
+ worktreePath: join(projectRoot, ".nax-wt", mergeResult.storyId),
234
+ });
235
+ }
236
+ }
237
+ }
238
+
239
+ // Mark failed stories in PRD and clean up their worktrees
240
+ for (const { story, error } of batchResult.failed) {
241
+ markStoryFailed(currentPrd, story.id);
242
+
243
+ logger?.error("parallel", "Cleaning up failed story worktree", {
244
+ storyId: story.id,
245
+ error,
246
+ });
247
+
248
+ try {
249
+ await worktreeManager.remove(projectRoot, story.id);
250
+ } catch (cleanupError) {
251
+ logger?.warn("parallel", "Failed to clean up worktree", {
252
+ storyId: story.id,
253
+ error: cleanupError instanceof Error ? cleanupError.message : String(cleanupError),
254
+ });
255
+ }
256
+ }
257
+
258
+ // Save PRD after each batch
259
+ await savePRD(currentPrd, prdPath);
260
+
261
+ allMergeConflicts.push(...batchResult.mergeConflicts);
262
+
263
+ logger?.info("parallel", `Batch ${batchIndex + 1} complete`, {
264
+ pipelinePassed: batchResult.pipelinePassed.length,
265
+ merged: batchResult.merged.length,
266
+ failed: batchResult.failed.length,
267
+ mergeConflicts: batchResult.mergeConflicts.length,
268
+ batchCost: batchResult.totalCost,
269
+ });
270
+ }
271
+
272
+ logger?.info("parallel", "Parallel execution complete", {
273
+ storiesCompleted,
274
+ totalCost,
275
+ });
276
+
277
+ return { storiesCompleted, totalCost, updatedPrd: currentPrd, mergeConflicts: allMergeConflicts };
278
+ }
@@ -0,0 +1,117 @@
1
+ /**
2
+ * Rectification Pass — Re-run conflicted stories sequentially
3
+ *
4
+ * After the initial parallel merge pass, handle any conflicts
5
+ * by re-running each conflicted story on the updated base (MFX-005).
6
+ */
7
+
8
+ import { getSafeLogger } from "../logger";
9
+ import type { StoryMetrics } from "../metrics";
10
+ import type { PRD } from "../prd";
11
+ import { markStoryPassed } from "../prd";
12
+ import type { ParallelExecutorOptions } from "./parallel-executor";
13
+ import type {
14
+ ConflictedStoryInfo,
15
+ RectificationResult,
16
+ RectifyConflictedStoryOptions,
17
+ } from "./parallel-executor-rectify";
18
+
19
+ /** Metrics for stories completed via rectification */
20
+ export interface ParallelStoryMetrics extends StoryMetrics {
21
+ source: "parallel" | "sequential" | "rectification";
22
+ rectifiedFromConflict?: boolean;
23
+ originalCost?: number;
24
+ rectificationCost?: number;
25
+ }
26
+
27
+ /**
28
+ * Run the rectification pass: sequentially re-run each conflicted story on
29
+ * the updated base (which already includes all clean merges from the first pass).
30
+ *
31
+ * Note: rectifyConflictedStory must be passed as a parameter for proper test mocking.
32
+ */
33
+ export async function runRectificationPass(
34
+ conflictedStories: ConflictedStoryInfo[],
35
+ options: ParallelExecutorOptions,
36
+ prd: PRD,
37
+ rectifyConflictedStory?: (opts: RectifyConflictedStoryOptions) => Promise<RectificationResult>,
38
+ ): Promise<{
39
+ rectifiedCount: number;
40
+ stillConflictingCount: number;
41
+ additionalCost: number;
42
+ updatedPrd: PRD;
43
+ rectificationMetrics: ParallelStoryMetrics[];
44
+ }> {
45
+ const logger = getSafeLogger();
46
+ const { workdir, config, hooks, pluginRegistry, eventEmitter } = options;
47
+
48
+ // Use provided function or import default
49
+ const rectify =
50
+ rectifyConflictedStory ||
51
+ (async (opts: RectifyConflictedStoryOptions) => {
52
+ const { rectifyConflictedStory: importedRectify } = await import("./parallel-executor-rectify");
53
+ return importedRectify(opts);
54
+ });
55
+
56
+ const rectificationMetrics: ParallelStoryMetrics[] = [];
57
+ let rectifiedCount = 0;
58
+ let stillConflictingCount = 0;
59
+ let additionalCost = 0;
60
+
61
+ logger?.info("parallel", "Starting merge conflict rectification", {
62
+ stories: conflictedStories.map((s) => s.storyId),
63
+ totalConflicts: conflictedStories.length,
64
+ });
65
+
66
+ // Sequential — each story sees all previously rectified stories in the base
67
+ for (const conflictInfo of conflictedStories) {
68
+ const result = await rectify({
69
+ ...conflictInfo,
70
+ workdir,
71
+ config,
72
+ hooks,
73
+ pluginRegistry,
74
+ prd,
75
+ eventEmitter,
76
+ });
77
+
78
+ additionalCost += result.cost;
79
+
80
+ if (result.success) {
81
+ markStoryPassed(prd, result.storyId);
82
+ rectifiedCount++;
83
+
84
+ rectificationMetrics.push({
85
+ storyId: result.storyId,
86
+ complexity: "unknown",
87
+ modelTier: "parallel",
88
+ modelUsed: "parallel",
89
+ attempts: 1,
90
+ finalTier: "parallel",
91
+ success: true,
92
+ cost: result.cost,
93
+ durationMs: 0,
94
+ firstPassSuccess: false,
95
+ startedAt: new Date().toISOString(),
96
+ completedAt: new Date().toISOString(),
97
+ source: "rectification" as const,
98
+ rectifiedFromConflict: true,
99
+ originalCost: conflictInfo.originalCost,
100
+ rectificationCost: result.cost,
101
+ });
102
+ } else {
103
+ const isFinalConflict = result.finalConflict === true;
104
+ if (isFinalConflict) {
105
+ stillConflictingCount++;
106
+ }
107
+ // pipelineFailure — not counted as structural conflict, story remains failed
108
+ }
109
+ }
110
+
111
+ logger?.info("parallel", "Rectification complete", {
112
+ rectified: rectifiedCount,
113
+ stillConflicting: stillConflictingCount,
114
+ });
115
+
116
+ return { rectifiedCount, stillConflictingCount, additionalCost, updatedPrd: prd, rectificationMetrics };
117
+ }
@@ -0,0 +1,135 @@
1
+ /**
2
+ * Conflict Rectification Logic
3
+ *
4
+ * Handles re-running a single conflicted story on the updated base branch
5
+ * so it sees all previously merged stories (MFX-005).
6
+ */
7
+
8
+ import path from "node:path";
9
+ import type { NaxConfig } from "../config";
10
+ import type { LoadedHooksConfig } from "../hooks";
11
+ import { getSafeLogger } from "../logger";
12
+ import type { PipelineEventEmitter } from "../pipeline/events";
13
+ import type { PluginRegistry } from "../plugins/registry";
14
+ import type { PRD } from "../prd";
15
+ import { errorMessage } from "../utils/errors";
16
+
17
+ /** A story that conflicted during the initial parallel merge pass */
18
+ export interface ConflictedStoryInfo {
19
+ storyId: string;
20
+ conflictFiles: string[];
21
+ originalCost: number;
22
+ }
23
+
24
+ /** Result from attempting to rectify a single conflicted story */
25
+ export type RectificationResult =
26
+ | { success: true; storyId: string; cost: number }
27
+ | {
28
+ success: false;
29
+ storyId: string;
30
+ cost: number;
31
+ finalConflict: boolean;
32
+ pipelineFailure?: boolean;
33
+ conflictFiles?: string[];
34
+ };
35
+
36
+ /** Options passed to rectifyConflictedStory */
37
+ export interface RectifyConflictedStoryOptions extends ConflictedStoryInfo {
38
+ workdir: string;
39
+ config: NaxConfig;
40
+ hooks: LoadedHooksConfig;
41
+ pluginRegistry: PluginRegistry;
42
+ prd: PRD;
43
+ eventEmitter?: PipelineEventEmitter;
44
+ }
45
+
46
+ /**
47
+ * Actual implementation of rectifyConflictedStory.
48
+ *
49
+ * Steps:
50
+ * 1. Remove the old worktree
51
+ * 2. Create a fresh worktree from current HEAD (post-merge state)
52
+ * 3. Re-run the full story pipeline
53
+ * 4. Attempt merge on the updated base
54
+ * 5. Return success/finalConflict
55
+ */
56
+ export async function rectifyConflictedStory(options: RectifyConflictedStoryOptions): Promise<RectificationResult> {
57
+ const { storyId, workdir, config, hooks, pluginRegistry, prd, eventEmitter } = options;
58
+ const logger = getSafeLogger();
59
+
60
+ logger?.info("parallel", "Rectifying story on updated base", { storyId, attempt: "rectification" });
61
+
62
+ try {
63
+ const { WorktreeManager } = await import("../worktree/manager");
64
+ const { MergeEngine } = await import("../worktree/merge");
65
+ const { runPipeline } = await import("../pipeline/runner");
66
+ const { defaultPipeline } = await import("../pipeline/stages");
67
+ const { routeTask } = await import("../routing");
68
+
69
+ const worktreeManager = new WorktreeManager();
70
+ const mergeEngine = new MergeEngine(worktreeManager);
71
+
72
+ // Step 1: Remove old worktree
73
+ try {
74
+ await worktreeManager.remove(workdir, storyId);
75
+ } catch {
76
+ // Ignore — worktree may have already been removed
77
+ }
78
+
79
+ // Step 2: Create fresh worktree from current HEAD
80
+ await worktreeManager.create(workdir, storyId);
81
+ const worktreePath = path.join(workdir, ".nax-wt", storyId);
82
+
83
+ // Step 3: Re-run the story pipeline
84
+ const story = prd.userStories.find((s) => s.id === storyId);
85
+ if (!story) {
86
+ return { success: false, storyId, cost: 0, finalConflict: false, pipelineFailure: true };
87
+ }
88
+
89
+ const routing = routeTask(story.title, story.description, story.acceptanceCriteria, story.tags, config);
90
+
91
+ const pipelineContext = {
92
+ config,
93
+ prd,
94
+ story,
95
+ stories: [story],
96
+ workdir: worktreePath,
97
+ featureDir: undefined,
98
+ hooks,
99
+ plugins: pluginRegistry,
100
+ storyStartTime: new Date().toISOString(),
101
+ routing: routing as import("../pipeline/types").RoutingResult,
102
+ };
103
+
104
+ const pipelineResult = await runPipeline(defaultPipeline, pipelineContext, eventEmitter);
105
+ const cost = pipelineResult.context.agentResult?.estimatedCost ?? 0;
106
+
107
+ if (!pipelineResult.success) {
108
+ logger?.info("parallel", "Rectification failed - preserving worktree", { storyId });
109
+ return { success: false, storyId, cost, finalConflict: false, pipelineFailure: true };
110
+ }
111
+
112
+ // Step 4: Attempt merge on updated base
113
+ const mergeResults = await mergeEngine.mergeAll(workdir, [storyId], { [storyId]: [] });
114
+ const mergeResult = mergeResults[0];
115
+
116
+ if (!mergeResult || !mergeResult.success) {
117
+ const conflictFiles = mergeResult?.conflictFiles ?? [];
118
+ logger?.info("parallel", "Rectification failed - preserving worktree", { storyId });
119
+ return { success: false, storyId, cost, finalConflict: true, conflictFiles };
120
+ }
121
+
122
+ logger?.info("parallel", "Rectification succeeded - story merged", {
123
+ storyId,
124
+ originalCost: options.originalCost,
125
+ rectificationCost: cost,
126
+ });
127
+ return { success: true, storyId, cost };
128
+ } catch (error) {
129
+ logger?.error("parallel", "Rectification failed - preserving worktree", {
130
+ storyId,
131
+ error: errorMessage(error),
132
+ });
133
+ return { success: false, storyId, cost: 0, finalConflict: false, pipelineFailure: true };
134
+ }
135
+ }