@nathapp/nax 0.22.1 → 0.22.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CLAUDE.md CHANGED
@@ -75,6 +75,23 @@ Runner.run() [src/execution/runner.ts — thin orchestrator only]
75
75
 
76
76
  Detailed coding standards, test architecture, and forbidden patterns are in `.claude/rules/`. Claude Code loads these automatically.
77
77
 
78
+
79
+ ## Code Intelligence (Solograph MCP)
80
+
81
+ Use **solograph** MCP tools on-demand for code understanding. Do not use web_search, kb_search, or source_* tools.
82
+
83
+ | Tool | When to use |
84
+ |:-----|:------------|
85
+ | `project_code_search` | Find existing patterns, symbols, or implementations before writing new code |
86
+ | `codegraph_explain` | Get architecture overview of nax before tackling unfamiliar areas |
87
+ | `codegraph_query` | Cypher queries — dependency analysis, impact analysis, hub files |
88
+ | `codegraph_stats` | Quick graph stats (file/symbol counts) |
89
+ | `codegraph_shared` | Find packages shared across projects |
90
+ | `session_search` | Search prior Claude Code session history for relevant context |
91
+ | `project_info` | Project registry info |
92
+ | `project_code_reindex` | Reindex after creating or deleting source files, or major refactors |
93
+
94
+ Single source of truth: VPS solograph instance (Mac01 tunnels to VPS — same data either way).
78
95
  ## IMPORTANT
79
96
 
80
97
  - Do NOT push to remote — let the human review and push.
package/bin/nax.ts CHANGED
@@ -39,7 +39,7 @@
39
39
 
40
40
  import { existsSync, mkdirSync } from "node:fs";
41
41
  import { homedir } from "node:os";
42
- import { join, resolve } from "node:path";
42
+ import { join } from "node:path";
43
43
  import chalk from "chalk";
44
44
  import { Command } from "commander";
45
45
 
@@ -217,7 +217,6 @@ program
217
217
  .option("--silent", "Silent mode (errors only)", false)
218
218
  .option("--json", "JSON mode (raw JSONL output to stdout)", false)
219
219
  .option("-d, --dir <path>", "Working directory", process.cwd())
220
- .option("--status-file <path>", "Write machine-readable JSON status file (updated during run)")
221
220
  .option("--skip-precheck", "Skip precheck validations (advanced users only)", false)
222
221
  .action(async (options) => {
223
222
  // Validate directory path
@@ -331,8 +330,8 @@ program
331
330
  console.log(chalk.dim(" [Headless mode — pipe output]"));
332
331
  }
333
332
 
334
- // Resolve --status-file relative to cwd (absolute paths unchanged)
335
- const statusFilePath = options.statusFile ? resolve(process.cwd(), options.statusFile) : undefined;
333
+ // Compute status file path: <workdir>/nax/status.json
334
+ const statusFilePath = join(workdir, "nax", "status.json");
336
335
 
337
336
  // Parse --parallel option
338
337
  let parallel: number | undefined;
package/docs/ROADMAP.md CHANGED
@@ -6,10 +6,10 @@
6
6
 
7
7
  ---
8
8
 
9
- ## Next: v0.18.0 — Orchestration Quality
9
+ ## v0.18.0 — Orchestration Quality
10
10
 
11
11
  **Theme:** Fix execution bugs and improve orchestration reliability
12
- **Status:** 🔲 Planned
12
+ **Status:** Shipped (2026-03-03)
13
13
 
14
14
  ### Bugfixes (Priority)
15
15
  - [x] ~~**BUG-016:** Hardcoded 120s timeout in verify stage → read from config~~
@@ -64,19 +64,8 @@
64
64
  - [x] ~~Result: verify drops from ~125s to ~10-20s for typical single-file fixes~~
65
65
 
66
66
  ### Bun PTY Migration (BUN-001)
67
- - [ ] Replace `node-pty` (native addon, requires python/make/g++ to build) with `Bun.Terminal` API (v1.3.5+)
68
- - [ ] Update `src/agents/claude.ts` `runInteractive()` — replace `nodePty.spawn()` with `Bun.Terminal`
69
- - [ ] Update `src/tui/hooks/usePty.ts` — replace `IPty` interface with Bun equivalent
70
- - [ ] Remove `node-pty` from `dependencies` in `package.json`
71
- - [ ] Remove `--ignore-scripts` workaround from `.gitlab-ci.yml`
72
- - [ ] Benefit: no native build, no gyp/python/gcc in CI, cleaner alpine support
73
-
74
- ### CI Memory Optimization (CI-001)
75
- - [ ] Investigate splitting test suite into parallel jobs (unit / integration / ui) to reduce per-job peak memory
76
- - [ ] Evaluate `bun test --shard` when stable (currently experimental)
77
- - [ ] Target: make test suite pass on 1GB runners (currently requires 8GB shared runner)
78
- - [ ] Known constraints: 2008 tests across 125 files, ~75s on local VPS (3.8GB), OOMs even with `--smol --concurrency 1`
79
- - [ ] Current workaround: use `saas-linux-small-amd64` (8GB) shared runner
67
+ - [x] ~~Replace `node-pty` with `Bun.spawn` (piped stdio) — shipped in v0.18.5~~
68
+
80
69
 
81
70
  ---
82
71
 
@@ -125,12 +114,35 @@
125
114
  **Spec:** [docs/specs/bun-pty-migration.md](specs/bun-pty-migration.md)
126
115
 
127
116
  ### BUN-001: Replace node-pty with Bun.spawn
128
- - [x] Research gate: verify `Bun.Terminal` availability on Bun 1.3.9; confirm Claude Code works with piped stdio
129
- - [ ] `src/agents/claude.ts` `runInteractive()` — replace `nodePty.spawn()` with `Bun.spawn` (piped stdio)
130
- - [ ] `src/tui/hooks/usePty.ts` — replace `pty.IPty` state with `Bun.Subprocess`
131
- - [ ] `src/agents/types.ts` — remove `IPty` dependency from `PtyHandle` interface
132
- - [ ] `package.json`remove `node-pty` from dependencies
133
- - [ ] `.gitlab-ci.yml` — remove `python3 make g++` from `apk add`; remove `--ignore-scripts` from `bun install`
117
+ - [x] ~~All sub-items complete `claude.ts` + `usePty.ts` migrated to `Bun.spawn`, `node-pty` removed from `package.json`, CI cleaned up~~
118
+
119
+ ---
120
+
121
+ ## v0.23.0Status File Consolidation
122
+
123
+ **Theme:** Auto-write status.json to well-known paths, align readers, remove dead options
124
+ **Status:** 🔄 In Progress (self-dev running, SFC-001 ✅)
125
+ **Spec:** [docs/specs/status-file-consolidation.md](specs/status-file-consolidation.md)
126
+ **Pre-requisite for:** v0.24.0 (Central Run Registry)
127
+
128
+ ### Stories
129
+ - [x] ~~**SFC-001:** Auto-write project-level status — remove `--status-file` flag, always write to `<workdir>/nax/status.json`~~
130
+ - [ ] **SFC-002:** Write feature-level status on run end — copy final snapshot to `<workdir>/nax/features/<feature>/status.json`
131
+ - [ ] **SFC-003:** Align status readers — `nax status` + `nax diagnose` read from correct paths
132
+ - [ ] **SFC-004:** Clean up dead code — remove `--status-file` option, `.nax-status.json` references
133
+
134
+ ---
135
+
136
+ ## v0.24.0 — Central Run Registry
137
+
138
+ **Theme:** Global run index across all projects — single source of truth for all nax run history
139
+ **Status:** 🔲 Planned
140
+ **Spec:** [docs/specs/central-run-registry.md](specs/central-run-registry.md)
141
+
142
+ ### Stories
143
+ - [ ] **CRR-001:** `src/pipeline/subscribers/registry.ts` — `wireRegistry()` subscriber, listens to `run:started`, writes `~/.nax/runs/<project>-<feature>-<runId>/meta.json` (path pointers only — no data duplication, no symlinks)
144
+ - [ ] **CRR-002:** `src/commands/runs.ts` — `nax runs` CLI, reads `meta.json` → resolves live `status.json` from `statusPath`, displays table (project, feature, status, stories, duration, date). Filters: `--project`, `--last`, `--status`
145
+ - [ ] **CRR-003:** `nax logs --run <runId>` — resolve run from global registry via `eventsDir`, stream logs from any directory
134
146
 
135
147
  ---
136
148
 
@@ -148,25 +160,25 @@
148
160
  - [x] ~~**BUG-041:**~~ Won't fix — superseded by FEAT-010
149
161
  - [x] ~~**FEAT-012:**~~ Won't fix — balanced tier sufficient for test-writer
150
162
 
151
- ### → v0.22.0 Pipeline Re-Architecture (in progress)
163
+ ### → v0.22.1 Pipeline Re-Architecture ✅ Shipped (2026-03-07)
152
164
  **ADR:** [docs/adr/ADR-005-pipeline-re-architecture.md](adr/ADR-005-pipeline-re-architecture.md)
153
165
  **Plan:** [docs/adr/ADR-005-implementation-plan.md](adr/ADR-005-implementation-plan.md)
154
- **Branch:** `feat/re-architecture`
155
166
 
156
- **Theme:** Eliminate ad-hoc orchestration, consolidate 4 scattered verification paths into single orchestrator, add event-bus-driven hooks/plugins/interaction, new stages (rectify, autofix, regression).
167
+ **Theme:** Eliminate ad-hoc orchestration, consolidate 4 scattered verification paths into single orchestrator, add event-bus-driven hooks/plugins/interaction, new stages (rectify, autofix, regression), post-run pipeline SSOT.
157
168
 
158
- - [ ] **Phase 0:** Test suite preparation reorganize, split monsters, RE-ARCH tags, coverage gaps
159
- - [ ] **Phase 1:** VerificationOrchestrator + Pipeline Event Bus (additive, no behavior change)
160
- - [ ] **Phase 2:** New stages `rectify`, `autofix`, `regression` + `retry` stage action
161
- - [ ] **Phase 3:** Event-bus subscribers for hooks, reporters, interaction (replace 20+ scattered call sites)
162
- - [ ] **Phase 4:** Delete deprecated files, simplify sequential executor to ~80 lines
169
+ - [x] **Phase 1:** VerificationOrchestrator + Pipeline Event Bus (additive, no behavior change)
170
+ - [x] **Phase 2:** New stages `rectify`, `autofix`, `regression` + `retry` stage action
171
+ - [x] **Phase 3:** Event-bus subscribers for hooks, reporters, interaction (replace 20+ scattered call sites)
172
+ - [x] **Phase 5:** Post-run pipeline SSOT `deferred-regression` stage, tier escalation into `iteration-runner`, `runAcceptanceLoop` → `runPipeline(postRunPipeline)`
163
173
 
164
- **Resolves in this version:**
165
- - [ ] **BUG-040:** Lint/typecheck auto-repair → `autofix` stage + `quality.commands.lintFix/formatFix`
166
- - [ ] **BUG-042:** Verifier failure capture → unified `VerifyResult` with `failures[]` always populated
167
- - [ ] **FEAT-014:** Heartbeat observability → Pipeline Event Bus with typed events
168
- - [ ] **BUG-026:** Regression gate triggers full retry → targeted `rectify` stage with `retry` action
169
- - [ ] **BUG-028:** Routing cache ignores escalation tier → cache key includes tier
174
+ **Resolved:**
175
+ - [x] **BUG-040:** Lint/typecheck auto-repair → `autofix` stage + `quality.commands.lintFix/formatFix`
176
+ - [x] **BUG-042:** Verifier failure capture → unified `VerifyResult` with `failures[]` always populated
177
+ - [x] **FEAT-014:** Heartbeat observability → Pipeline Event Bus with typed events
178
+ - [x] **BUG-026:** Regression gate triggers full retry → targeted `rectify` stage with `retry` action
179
+ - [x] **BUG-028:** Routing cache ignores escalation tier → cache key includes tier
180
+
181
+ **Test results:** 2264 pass, 12 skip, 1 fail (pre-existing disk space flaky)
170
182
 
171
183
  ---
172
184
 
@@ -202,9 +214,6 @@
202
214
  - [x] `priorFailures` injected into escalated agent prompts via `context/builder.ts`
203
215
  - [x] Reverse file mapping for regression attribution
204
216
 
205
- ### Central Run Registry (carried forward)
206
- - [ ] `~/.nax/runs/<project>-<feature>-<runId>/` with status.json + events.jsonl symlink
207
-
208
217
  ---
209
218
 
210
219
  ## Shipped
@@ -212,6 +221,8 @@
212
221
  | Version | Theme | Date | Details |
213
222
  |:---|:---|:---|:---|
214
223
  | v0.18.1 | Type Safety + CI Pipeline | 2026-03-03 | 60 TS errors + 12 lint errors fixed, GitLab CI green (1952/56/0) |
224
+ | v0.22.2 | Routing Stability + SFC-001 | 2026-03-07 | BUG-040 floating outputPromise crash on LLM timeout retry; SFC-001 auto-write status.json |
225
+ | v0.22.1 | Pipeline Re-Architecture | 2026-03-07 | VerificationOrchestrator, EventBus, new stages (rectify/autofix/regression/deferred-regression), post-run SSOT. 2264 pass |
215
226
  | v0.20.0 | Verification Architecture v2 | 2026-03-06 | Deferred regression gate, remove duplicate tests, BUG-037 |
216
227
  | v0.19.0 | Hardening & Compliance | 2026-03-04 | SEC-1 to SEC-5, BUG-1, Node.js API removal, _deps rollout |
217
228
  | v0.18.5 | Bun PTY Migration | 2026-03-04 | BUN-001: node-pty → Bun.spawn, CI cleanup, flaky test fix |
@@ -271,16 +282,17 @@
271
282
  - [x] **BUG-032:** Routing stage overrides escalated `modelTier` with complexity-derived tier. `src/pipeline/stages/routing.ts:43` always runs `complexityToModelTier(routing.complexity, config)` even when `story.routing.modelTier` was explicitly set by `handleTierEscalation()`. BUG-026 was escalated to `balanced` (logged in iteration header), but `Task classified` shows `modelTier=fast` because `complexityToModelTier("simple", config)` → `"fast"`. Related to BUG-013 (escalation routing not applied) which was marked fixed, but the fix in `applyCachedRouting()` in `pipeline-result-handler.ts:295-310` runs **after** the routing stage — too late. **Location:** `src/pipeline/stages/routing.ts:43`. **Fix:** When `story.routing.modelTier` is explicitly set (by escalation), skip `complexityToModelTier()` and use the cached tier directly. Only derive from complexity when `story.routing.modelTier` is absent.
272
283
  - [x] **BUG-033:** LLM routing has no retry on timeout — single attempt with hardcoded 15s default. All 5 LLM routing attempts in the v0.18.3 run timed out at 15s, forcing keyword fallback every time. `src/routing/strategies/llm.ts:63` reads `llmConfig?.timeoutMs ?? 15000` but there's no retry logic — one timeout = immediate fallback. **Location:** `src/routing/strategies/llm.ts:callLlm()`. **Fix:** Add `routing.llm.retries` config (default: 1) with backoff. Also surface `routing.llm.timeoutMs` in `nax config --explain` and consider raising default to 30s for batch routing which processes multiple stories.
273
284
 
274
- - [ ] **BUG-037:** Test output summary (verify stage) captures precheck boilerplate instead of actual `bun test` failure. **Symptom:** Logs show successful prechecks (Head) instead of failed tests (Tail). **Fix:** Change `Test output preview` log to tail the last 20 lines of output instead of heading the first 10.
275
- - [ ] **BUG-038:** `smart-runner` over-matching when global defaults change. **Symptom:** Changing `DEFAULT_CONFIG` matches broad integration tests that fail due to environment/precheck side effects, obscuring targeted results. **Fix:** Refine path mapping to prioritize direct unit tests and exclude known heavy integration tests from default smart-runner matches unless explicitly relevant.
285
+ - [x] ~~**BUG-037:** Test output summary (verify stage) captures precheck boilerplate instead of actual `bun test` failure. Fixed: `.slice(-20)` tail shipped in v0.22.1 (re-arch phase 2).~~
286
+ - [x] ~~**BUG-038:** `smart-runner` over-matching when global defaults change. Fixed by FEAT-010 (v0.21.0) per-attempt `storyGitRef` baseRef tracking; `git diff <baseRef>..HEAD` prevents cross-story file pollution.~~
276
287
  ### Features
277
288
  - [x] ~~`nax unlock` command~~
278
289
  - [x] ~~Constitution file support~~
279
290
  - [x] ~~Per-story testStrategy override — v0.18.1~~
280
291
  - [x] ~~Smart Test Runner — v0.18.2~~
281
- - [x] ~~Central Run Registry — v0.19.0~~
282
- - [ ] **BUN-001:** Bun PTY Migration — replace `node-pty` with `Bun.Terminal` API
292
+ - [ ] **Central Run Registry**moved to v0.24.0
293
+ - [x] ~~**BUN-001:** Bun PTY Migration — replace `node-pty` with `Bun.spawn` (piped stdio). Shipped in v0.18.5.~~
283
294
  - [ ] **CI-001:** CI Memory Optimization — parallel test sharding for 1GB runners
295
+ - [ ] **CI-001:** CI Memory Optimization — parallel test sharding to pass on 1GB runners (currently requires 8GB). Evaluate `bun test --shard` when stable.
284
296
  - [ ] Cost tracking dashboard
285
297
  - [ ] npm publish setup
286
298
  - [ ] `nax diagnose --ai` flag (LLM-assisted, future TBD)
@@ -296,4 +308,4 @@ Sequential canary → stable: `v0.12.0-canary.0` → `canary.N` → `v0.12.0`
296
308
  Canary: `npm publish --tag canary`
297
309
  Stable: `npm publish` (latest)
298
310
 
299
- *Last updated: 2026-03-06 (v0.21.0 shipped; v0.22.0: Pipeline Re-Architecture in progress — ADR-005, 5 phases, resolves BUG-026/028/040/042 + FEAT-014)*
311
+ *Last updated: 2026-03-07 (v0.22.1 shipped Pipeline Re-Architecture: VerificationOrchestrator, EventBus, new stages, post-run SSOT)*
@@ -0,0 +1,104 @@
1
+ # Central Run Registry — Spec
2
+
3
+ **Version:** v0.23.0
4
+ **Status:** Planned
5
+
6
+ ---
7
+
8
+ ## Problem
9
+
10
+ nax stores run state per-project at `<workdir>/nax/features/<feature>/status.json`. There is no global index — you must `cd` into each project to see its run history. There is no way to answer "what has nax run across all my projects recently?"
11
+
12
+ ## Existing Layout (per-project)
13
+
14
+ ```
15
+ <workdir>/nax/
16
+ config.json
17
+ features/
18
+ <feature>/
19
+ prd.json
20
+ status.json ← live run state (NaxStatusFile, written continuously)
21
+ runs/
22
+ <timestamp>.jsonl ← event log
23
+ ```
24
+
25
+ `status.json` already contains: runId, feature, status (running/completed/failed/crashed), progress counts, cost, current story, startedAt, etc. — everything needed for a global view.
26
+
27
+ ## Goal
28
+
29
+ A global `~/.nax/runs/` registry that indexes every nax run via path references — no data duplication, no symlinks.
30
+
31
+ ---
32
+
33
+ ## Directory Structure
34
+
35
+ ```
36
+ ~/.nax/runs/
37
+ <project>-<feature>-<runId>/
38
+ meta.json ← pointer record only (paths + minimal identifiers)
39
+ ```
40
+
41
+ ### meta.json Schema
42
+
43
+ ```json
44
+ {
45
+ "runId": "run-2026-03-07T05-30-00-000Z",
46
+ "project": "my-app",
47
+ "feature": "auth-system",
48
+ "workdir": "/Users/william/projects/my-app",
49
+ "statusPath": "/Users/william/projects/my-app/nax/features/auth-system/status.json",
50
+ "eventsDir": "/Users/william/projects/my-app/nax/features/auth-system/runs",
51
+ "registeredAt": "2026-03-07T05:30:00.000Z"
52
+ }
53
+ ```
54
+
55
+ - Written **once** on run start — never updated (source of truth stays in `statusPath`)
56
+ - `nax runs` reads `meta.json` to locate `statusPath`, then reads live `status.json` for current state
57
+ - If `statusPath` doesn't exist (project deleted/moved) → show `[unavailable]` gracefully
58
+
59
+ ---
60
+
61
+ ## Implementation
62
+
63
+ ### CRR-001: Registry Writer (new subscriber)
64
+
65
+ - New module: `src/execution/run-registry.ts` — `registerRun(meta)`, `getRunsDir()`
66
+ - On run start: create `~/.nax/runs/<project>-<feature>-<runId>/meta.json`
67
+ - Wire as **event bus subscriber** (`wireRegistry()` in `src/pipeline/subscribers/registry.ts`) — listens to `run:started`
68
+ - Best-effort: never throw/block the main run on registry failure (try/catch + warn log)
69
+ - `~/.nax/runs/` created on first call — no separate init step
70
+
71
+ ### CRR-002: `nax runs` CLI Command
72
+
73
+ ```
74
+ nax runs # All runs, newest first (default: last 20)
75
+ nax runs --project my-app # Filter by project name
76
+ nax runs --last 50 # Show last N runs
77
+ nax runs --status failed # Filter by status (running/completed/failed/crashed)
78
+ ```
79
+
80
+ **Output table:**
81
+ ```
82
+ RUN ID PROJECT FEATURE STATUS STORIES DURATION DATE
83
+ run-2026-03-07T05-30-00-000Z my-app auth-system completed 5/5 45m 2026-03-07 13:30
84
+ run-2026-03-07T04-00-00-000Z nax re-arch failed 3/5 1h 2m 2026-03-07 12:00
85
+ ```
86
+
87
+ - Reads all `~/.nax/runs/*/meta.json`, resolves live `status.json` from `statusPath`
88
+ - Sorts by `registeredAt` desc
89
+ - If `statusPath` missing → status shows `[unavailable]`
90
+ - New command: `src/commands/runs.ts`
91
+
92
+ ### CRR-003: `nax logs` Enhancement
93
+
94
+ - `nax logs --run <runId>` — resolve run from global registry, locate `eventsDir`, stream logs
95
+ - No need to be in the project directory
96
+ - Falls back to current behaviour (local feature context) when `--run` not specified
97
+
98
+ ---
99
+
100
+ ## Out of Scope
101
+
102
+ - Registry cleanup/prune command (future)
103
+ - Remote sync (future)
104
+ - Search by story ID (future)
@@ -0,0 +1,93 @@
1
+ # Status File Consolidation — Spec
2
+
3
+ **Version:** v0.22.2
4
+ **Status:** Planned
5
+ **Pre-requisite for:** v0.23.0 (Central Run Registry)
6
+
7
+ ---
8
+
9
+ ## Problem
10
+
11
+ StatusWriter only writes `status.json` when the `--status-file` CLI flag is explicitly passed. Without it, no status file is written. Additionally, `nax status` and `nax diagnose` read from different (non-existent) paths, creating a three-way disconnect.
12
+
13
+ ### Current State
14
+
15
+ | Component | Path | Exists? |
16
+ |-----------|------|---------|
17
+ | StatusWriter (writer) | `--status-file <path>` (opt-in) | Only if flag passed |
18
+ | `nax status` (reader) | `nax/features/<feature>/status.json` | ❌ Never written |
19
+ | `nax diagnose` (reader) | `<workdir>/.nax-status.json` | ❌ Legacy path |
20
+ | Actual file on disk | `nax/status.json` | Only from manual flag usage |
21
+
22
+ ## Goal
23
+
24
+ Auto-write status files to well-known paths. Zero config, zero flags. Both project-level and feature-level status always available.
25
+
26
+ ### Target State
27
+
28
+ | File | Written | Purpose |
29
+ |------|---------|---------|
30
+ | `<workdir>/nax/status.json` | Continuously during run | Live monitoring: "is nax running? which feature? cost?" |
31
+ | `<workdir>/nax/features/<feature>/status.json` | Once at run end | Historical: "what was the last run result for this feature?" |
32
+
33
+ ---
34
+
35
+ ## Stories
36
+
37
+ ### SFC-001: Auto-write project-level status
38
+
39
+ **What:** Remove `--status-file` CLI option. StatusWriter always writes to `<workdir>/nax/status.json` automatically.
40
+
41
+ **Changes:**
42
+ - `bin/nax.ts` — remove `--status-file` option, compute `statusFile = join(workdir, "nax", "status.json")` automatically
43
+ - `src/execution/runner.ts` — `statusFile` no longer optional in `RunOptions`, always provided
44
+ - `src/execution/status-writer.ts` — remove the `if (!this.statusFile)` guard in `update()` (statusFile is always set)
45
+ - `src/execution/lifecycle/run-setup.ts` — statusFile always provided
46
+
47
+ **Test:** Run nax without `--status-file` flag → verify `nax/status.json` is written with correct schema.
48
+
49
+ ### SFC-002: Write feature-level status on run end
50
+
51
+ **What:** On run complete/fail/crash, copy the final status snapshot to `<workdir>/nax/features/<feature>/status.json`.
52
+
53
+ **Changes:**
54
+ - `src/execution/status-writer.ts` — add `writeFeatureStatus(featureDir: string)` method that writes current snapshot to `<featureDir>/status.json`
55
+ - `src/execution/runner.ts` — call `statusWriter.writeFeatureStatus(featureDir)` in the finally block (after run completes, fails, or crashes)
56
+ - `src/execution/crash-recovery.ts` — also write feature status on crash
57
+
58
+ **Test:** After a completed run, verify `nax/features/<feature>/status.json` exists with `status: "completed"` or `"failed"`.
59
+
60
+ ### SFC-003: Align status readers
61
+
62
+ **What:** Make `nax status` and `nax diagnose` read from the correct paths.
63
+
64
+ **Changes:**
65
+ - `src/cli/status-features.ts` — `loadStatusFile()` already reads from `<featureDir>/status.json` (correct after SFC-002 writes there). No change needed for feature-level.
66
+ - `src/cli/status-features.ts` — add project-level status display: read `nax/status.json` to show "currently running" info at the top of `nax status` output
67
+ - `src/cli/diagnose.ts` — change `.nax-status.json` → `nax/status.json`
68
+
69
+ **Test:** `nax status` shows current run info from project-level status + per-feature historical info. `nax diagnose` correctly detects running/stalled/crashed state.
70
+
71
+ ### SFC-004: Clean up dead code
72
+
73
+ **What:** Remove deprecated paths and dead options.
74
+
75
+ **Changes:**
76
+ - `bin/nax.ts` — remove `--status-file` option definition and `statusFilePath` resolve logic
77
+ - `src/cli/diagnose.ts` — remove `.nax-status.json` path reference
78
+ - `src/execution/runner.ts` — remove `statusFile?` optional from `RunOptions` type (now required, auto-computed)
79
+
80
+ **Test:** Verify no references to `.nax-status.json` or `--status-file` remain in codebase.
81
+
82
+ ---
83
+
84
+ ## Schema (unchanged)
85
+
86
+ The `NaxStatusFile` interface in `src/execution/status-file.ts` is already correct. No schema changes needed — both project-level and feature-level files use the same `NaxStatusFile` type.
87
+
88
+ ---
89
+
90
+ ## Out of Scope
91
+
92
+ - Central Run Registry (`~/.nax/runs/`) — v0.23.0
93
+ - Status file cleanup/rotation — future
@@ -0,0 +1,137 @@
1
+ {
2
+ "project": "nax",
3
+ "branchName": "feat/post-rearch-bugfix",
4
+ "feature": "post-rearch-bugfix",
5
+ "version": "0.22.3",
6
+ "description": "Fix all critical and key high-priority bugs found in post-re-architecture code review. Stream deadlocks, unhandled rejections, signal handler safety, lock file reliability, interaction system, parallel executor race, and error swallowing.",
7
+ "userStories": [
8
+ {
9
+ "id": "FIX-C1",
10
+ "title": "Fix stream deadlock in acceptance and autofix stages",
11
+ "description": "In src/pipeline/stages/acceptance.ts (line ~136) and src/pipeline/stages/autofix.ts (line ~116), the code awaits proc.exited BEFORE reading stdout/stderr. When output exceeds the 64KB OS pipe buffer, the child blocks on write and proc.exited never resolves, causing a silent deadlock. Fix: use Promise.all([proc.exited, new Response(proc.stdout).text(), new Response(proc.stderr).text()]) to read streams concurrently with exit.",
12
+ "complexity": "simple",
13
+ "status": "pending",
14
+ "acceptanceCriteria": [
15
+ "acceptance.ts reads stdout/stderr concurrently with proc.exited using Promise.all",
16
+ "autofix.ts reads stdout/stderr concurrently with proc.exited using Promise.all",
17
+ "No sequential await proc.exited before stream reads in either file",
18
+ "Existing tests pass"
19
+ ]
20
+ },
21
+ {
22
+ "id": "FIX-C2",
23
+ "title": "Fix emitAsync never called for human-in-the-loop interaction",
24
+ "description": "In src/execution/pipeline-result-handler.ts (line ~151), human-review:requested is emitted via fire-and-forget emit() instead of emitAsync(). The emitAsync() method in src/pipeline/subscribers/interaction.ts was specifically designed to wait for human response but is never called anywhere. The pipeline races past without waiting for human input. Fix: use await pipelineEventBus.emitAsync() for human-review events.",
25
+ "complexity": "medium",
26
+ "status": "pending",
27
+ "acceptanceCriteria": [
28
+ "human-review:requested event uses emitAsync instead of emit",
29
+ "Pipeline waits for human response before continuing",
30
+ "emitAsync is properly awaited at the call site",
31
+ "Existing tests pass"
32
+ ]
33
+ },
34
+ {
35
+ "id": "FIX-C5",
36
+ "title": "Fix timeoutPromise unhandled rejection in LLM routing",
37
+ "description": "In src/routing/strategies/llm.ts, the timeoutPromise created in callLlmOnce() uses reject() but if the timer fires between race resolution and clearTimeout, the rejection is unhandled. Add timeoutPromise.catch(() => {}) right after creation, or restructure to use a clearable pattern that does not reject.",
38
+ "complexity": "simple",
39
+ "status": "pending",
40
+ "acceptanceCriteria": [
41
+ "timeoutPromise rejection is always handled (no unhandled rejection possible)",
42
+ "Existing BUG-040 tests still pass",
43
+ "Add test verifying no unhandled rejection when timeout fires after successful completion"
44
+ ]
45
+ },
46
+ {
47
+ "id": "FIX-C3",
48
+ "title": "Fix TDZ crash in signal handler — prd accessed before initialization",
49
+ "description": "In src/execution/runner.ts around line 123, the crash handler closure references prd which is declared later (~line 134). If SIGTERM arrives during setupRun(), accessing prd throws ReferenceError. Fix: declare let prd: PRD | undefined before the crash handler setup, and add a null guard in the getter: () => prd ? countStories(prd).total : 0.",
50
+ "complexity": "simple",
51
+ "status": "pending",
52
+ "acceptanceCriteria": [
53
+ "prd variable declared before crash handler registration",
54
+ "Crash handler getter has null guard for prd",
55
+ "SIGTERM during setupRun does not throw ReferenceError",
56
+ "Existing tests pass"
57
+ ]
58
+ },
59
+ {
60
+ "id": "FIX-C6",
61
+ "title": "Fix parallel executor shared mutable state race condition",
62
+ "description": "In src/execution/parallel.ts around line 191 and 213, results.totalCost += ... and executing.splice(index, 1) are mutated concurrently from parallel promises. The splice inside .finally() can corrupt array indices when two promises resolve in the same microtask batch. Fix: replace executing array with a Set pattern; use executing.delete(p) instead of splice.",
63
+ "complexity": "medium",
64
+ "status": "pending",
65
+ "acceptanceCriteria": [
66
+ "executing collection uses Set instead of Array with splice",
67
+ "totalCost accumulation is safe against concurrent updates",
68
+ "No array index corruption possible when promises resolve simultaneously",
69
+ "Existing tests pass"
70
+ ]
71
+ },
72
+ {
73
+ "id": "FIX-C7",
74
+ "title": "Fix corrupt lock file permanently blocking all runs",
75
+ "description": "In src/execution/lock.ts around line 48-79, if JSON.parse(lockContent) throws on a corrupted lock file, the error propagates to the outer catch which returns false, the caller interprets this as another process is running. Fix: wrap JSON.parse in its own try-catch; treat unparseable lock files as stale and delete them.",
76
+ "complexity": "simple",
77
+ "status": "pending",
78
+ "acceptanceCriteria": [
79
+ "Corrupt/unparseable lock file is treated as stale and deleted",
80
+ "A warning is logged when a corrupt lock file is found",
81
+ "nax can start normally after encountering a corrupt lock file",
82
+ "Existing tests pass"
83
+ ]
84
+ },
85
+ {
86
+ "id": "FIX-C8",
87
+ "title": "Fix empty catch in drainWithDeadline swallowing all errors",
88
+ "description": "In src/verification/executor.ts around line 36-39, the catch block in drainWithDeadline swallows ALL exceptions including TypeError, OutOfMemoryError etc. Output silently becomes empty string with no diagnostic. Fix: narrow the catch to expected stream-destroyed errors only; log unexpected errors at debug level.",
89
+ "complexity": "simple",
90
+ "status": "pending",
91
+ "acceptanceCriteria": [
92
+ "Expected stream errors (after kill) are still silently handled",
93
+ "Unexpected errors are logged at debug level",
94
+ "Output defaults to empty string on expected stream errors",
95
+ "Existing tests pass"
96
+ ]
97
+ },
98
+ {
99
+ "id": "FIX-H16",
100
+ "title": "Fix lock file not released when setupRun fails",
101
+ "description": "In src/execution/lifecycle/run-setup.ts around line 153-193, the lock is acquired during setupRun but if setupRun fails, it is outside the runner main try block so the lock is never released. Fix: ensure lock release in a finally block within setupRun, or move lock acquisition inside the runner try/finally.",
102
+ "complexity": "medium",
103
+ "status": "pending",
104
+ "acceptanceCriteria": [
105
+ "Lock file is released when setupRun throws an error",
106
+ "Lock file is released on all error paths during setup",
107
+ "Existing tests pass"
108
+ ]
109
+ },
110
+ {
111
+ "id": "FIX-C4",
112
+ "title": "Replace uncancellable Bun.sleep timer in executor",
113
+ "description": "In src/verification/executor.ts around line 97-100, Bun.sleep() is used for the timeout promise but cannot be cancelled. When the process exits quickly, the sleep continues for the full timeoutMs. Fix: replace with a clearable setTimeout-based promise pattern, clearing the timer in the success path.",
114
+ "complexity": "simple",
115
+ "status": "pending",
116
+ "acceptanceCriteria": [
117
+ "Timeout in executeWithTimeout uses clearable setTimeout not Bun.sleep",
118
+ "Timer is cleared when process exits before timeout",
119
+ "No timer leak after successful execution",
120
+ "Existing tests pass"
121
+ ]
122
+ },
123
+ {
124
+ "id": "FIX-H5",
125
+ "title": "Add hard deadline to async signal handlers",
126
+ "description": "In src/execution/crash-recovery.ts around line 149-170, signal handlers contain multiple await operations. If any hangs, process.exit() is never reached. Fix: add a setTimeout hard deadline (e.g. 10s) at the top of each signal handler that calls process.exit() as a fallback.",
127
+ "complexity": "simple",
128
+ "status": "pending",
129
+ "acceptanceCriteria": [
130
+ "SIGTERM handler has a hard deadline timeout (10s) that calls process.exit",
131
+ "SIGINT handler has the same hard deadline",
132
+ "Hard deadline fires even if async operations hang",
133
+ "Existing tests pass"
134
+ ]
135
+ }
136
+ ]
137
+ }
@@ -0,0 +1,61 @@
1
+ {
2
+ "project": "nax",
3
+ "branchName": "feat/status-file-consolidation",
4
+ "feature": "status-file-consolidation",
5
+ "version": "0.22.2",
6
+ "description": "Auto-write status.json to well-known paths (project-level + feature-level). Remove --status-file CLI flag. Align nax status and nax diagnose readers.",
7
+ "userStories": [
8
+ {
9
+ "id": "SFC-001",
10
+ "title": "Auto-write project-level status",
11
+ "description": "Remove --status-file CLI option. StatusWriter always writes to <workdir>/nax/status.json automatically. In bin/nax.ts, remove --status-file option and compute statusFile = join(workdir, 'nax', 'status.json'). In runner.ts, statusFile is no longer optional. In status-writer.ts, remove the if (!this.statusFile) guard in update().",
12
+ "complexity": "medium",
13
+ "status": "passed",
14
+ "acceptanceCriteria": [
15
+ "Running nax without --status-file flag writes nax/status.json automatically",
16
+ "nax/status.json contains valid NaxStatusFile schema with run.id, run.status, progress counts",
17
+ "--status-file CLI option no longer exists",
18
+ "StatusWriter.update() always writes (no no-op guard on missing statusFile)"
19
+ ]
20
+ },
21
+ {
22
+ "id": "SFC-002",
23
+ "title": "Write feature-level status on run end",
24
+ "description": "On run complete/fail/crash, write the final status snapshot to <workdir>/nax/features/<feature>/status.json. Add writeFeatureStatus(featureDir) method to StatusWriter. Call it in runner.ts finally block and in crash-recovery.ts.",
25
+ "complexity": "medium",
26
+ "status": "pending",
27
+ "acceptanceCriteria": [
28
+ "After a completed run, nax/features/<feature>/status.json exists with status 'completed'",
29
+ "After a failed run, nax/features/<feature>/status.json exists with status 'failed'",
30
+ "After a crash, nax/features/<feature>/status.json exists with status 'crashed'",
31
+ "Feature status.json uses the same NaxStatusFile schema as project-level"
32
+ ]
33
+ },
34
+ {
35
+ "id": "SFC-003",
36
+ "title": "Align status readers",
37
+ "description": "Make nax status read project-level status from nax/status.json for currently running info. Make nax diagnose read from nax/status.json instead of .nax-status.json. status-features.ts loadStatusFile() already reads <featureDir>/status.json which SFC-002 now writes \u2014 no change needed for feature-level reads.",
38
+ "complexity": "simple",
39
+ "status": "pending",
40
+ "acceptanceCriteria": [
41
+ "nax status shows current run info from nax/status.json at the top",
42
+ "nax status shows per-feature historical status from nax/features/<feature>/status.json",
43
+ "nax diagnose reads from nax/status.json (not .nax-status.json)",
44
+ "No references to .nax-status.json remain in codebase"
45
+ ]
46
+ },
47
+ {
48
+ "id": "SFC-004",
49
+ "title": "Clean up dead code",
50
+ "description": "Remove --status-file option definition from bin/nax.ts. Remove .nax-status.json path from diagnose.ts. Remove statusFile optional from RunOptions type (now required, auto-computed). Verify no stale references remain.",
51
+ "complexity": "simple",
52
+ "status": "pending",
53
+ "acceptanceCriteria": [
54
+ "No references to --status-file CLI option in codebase",
55
+ "No references to .nax-status.json in codebase",
56
+ "RunOptions.statusFile is required (not optional)",
57
+ "All existing tests pass"
58
+ ]
59
+ }
60
+ ]
61
+ }