qualia-framework 4.0.3 → 4.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -12,6 +12,11 @@ allowed-tools:
12
12
 
13
13
  Generate a concise report of what was done. Committed to git and uploaded to the ERP for clock-out.
14
14
 
15
+ ## Flags
16
+
17
+ - `/qualia-report` — normal flow (generate, commit, push, upload to ERP)
18
+ - `/qualia-report --dry-run` — generate + show payload, SKIP upload and SKIP commit. Useful for debugging or previewing before a real clock-out.
19
+
15
20
  ## Process
16
21
 
17
22
  ```bash
@@ -69,16 +74,33 @@ None. / - {blocker}
69
74
  {list from git log}
70
75
  ```
71
76
 
72
- ### 4. Commit and Push
77
+ ### 4. Obtain Client Report ID (QS-REPORT-NN)
78
+
79
+ Each session report gets a stable, sequential client-side identifier that travels with the report all the way to the ERP. The sequence is per-project, persisted in `tracking.json.report_seq`.
73
80
 
74
81
  ```bash
75
- mkdir -p .planning/reports
76
- git add .planning/reports/report-{date}.md
77
- git commit -m "report: session {YYYY-MM-DD}"
78
- git push
82
+ # --dry-run: peek without incrementing
83
+ if [ "$DRY_RUN" = "true" ]; then
84
+ CLIENT_REPORT_ID=$(node ~/.claude/bin/state.js next-report-id --peek 2>/dev/null | node -e "process.stdout.write(JSON.parse(require('fs').readFileSync(0,'utf8')).report_id||'')")
85
+ else
86
+ CLIENT_REPORT_ID=$(node ~/.claude/bin/state.js next-report-id 2>/dev/null | node -e "process.stdout.write(JSON.parse(require('fs').readFileSync(0,'utf8')).report_id||'')")
87
+ fi
79
88
  ```
80
89
 
81
- ### 5. Upload to ERP (if enabled)
90
+ Example: first report on a fresh project → `QS-REPORT-01`. Next → `QS-REPORT-02`. Etc.
91
+
92
+ ### 5. Commit and Push (SKIP on --dry-run)
93
+
94
+ ```bash
95
+ if [ "$DRY_RUN" != "true" ]; then
96
+ mkdir -p .planning/reports
97
+ git add .planning/reports/report-{date}.md .planning/tracking.json
98
+ git commit -m "report: {CLIENT_REPORT_ID} session {YYYY-MM-DD}"
99
+ git push
100
+ fi
101
+ ```
102
+
103
+ ### 6. Upload to ERP (SKIP on --dry-run)
82
104
 
83
105
  Read `~/.claude/.qualia-config.json` and check the `erp` object:
84
106
  - If `erp.enabled` is `false`, skip this step and print: "ERP upload skipped (disabled in config)."
@@ -94,67 +116,126 @@ REPORT_FILE=".planning/reports/report-{date}.md"
94
116
  SUBMITTED_BY=$(git config user.name)
95
117
  SUBMITTED_AT=$(date -u +%Y-%m-%dT%H:%M:%SZ)
96
118
 
97
- # Only upload if ERP is enabled
119
+ # Build structured JSON payload from tracking.json (matches ERP contract /api/v1/reports)
120
+ # v4: include milestone_name, milestones[], team_id, project_id, git_remote,
121
+ # session_started_at, last_pushed_at, build_count, deploy_count — the ERP
122
+ # uses these to render the project tree (milestone → phases → unphased) correctly.
123
+ # v4.0.4: client_report_id carries the QS-REPORT-NN identifier.
124
+ PAYLOAD=$(node -e "
125
+ const fs = require('fs');
126
+ const t = JSON.parse(fs.readFileSync('.planning/tracking.json', 'utf8'));
127
+ const notes = fs.readFileSync('$REPORT_FILE', 'utf8').substring(0, 60000);
128
+ const commits = [];
129
+ try {
130
+ const { spawnSync } = require('child_process');
131
+ const r = spawnSync('git', ['log', '--oneline', '--since=8 hours ago', '--format=%h'], { encoding: 'utf8', timeout: 3000 });
132
+ if (r.stdout) commits.push(...r.stdout.trim().split('\n').filter(Boolean));
133
+ } catch {}
134
+ console.log(JSON.stringify({
135
+ project: t.project || require('path').basename(process.cwd()),
136
+ project_id: t.project_id || '',
137
+ team_id: t.team_id || '',
138
+ git_remote: t.git_remote || '',
139
+ client: t.client || '',
140
+ client_report_id: '$CLIENT_REPORT_ID',
141
+ milestone: t.milestone || 1,
142
+ milestone_name: t.milestone_name || '',
143
+ milestones: Array.isArray(t.milestones) ? t.milestones : [],
144
+ phase: t.phase,
145
+ phase_name: t.phase_name,
146
+ total_phases: t.total_phases,
147
+ status: t.status,
148
+ tasks_done: t.tasks_done || 0,
149
+ tasks_total: t.tasks_total || 0,
150
+ verification: t.verification || 'pending',
151
+ gap_cycles: (t.gap_cycles || {})[String(t.phase)] || 0,
152
+ build_count: t.build_count || 0,
153
+ deploy_count: t.deploy_count || 0,
154
+ deployed_url: t.deployed_url || '',
155
+ session_started_at: t.session_started_at || '',
156
+ last_pushed_at: t.last_pushed_at || '',
157
+ lifetime: t.lifetime || {},
158
+ commits: commits,
159
+ notes: notes,
160
+ submitted_by: '$SUBMITTED_BY',
161
+ submitted_at: '$SUBMITTED_AT'
162
+ }));
163
+ ")
164
+
165
+ # --dry-run: print payload and stop (no POST, no commit, no increment already handled in step 4)
166
+ if [ "$DRY_RUN" = "true" ]; then
167
+ echo "--- DRY RUN · payload ---"
168
+ echo "$PAYLOAD" | node -e "const d=JSON.parse(require('fs').readFileSync(0,'utf8'));console.log(JSON.stringify(d,null,2))"
169
+ echo "--- DRY RUN · would POST to: $ERP_URL/api/v1/reports ---"
170
+ echo "--- DRY RUN · client_report_id would be: $CLIENT_REPORT_ID ---"
171
+ exit 0
172
+ fi
173
+
174
+ # Real upload — 3 attempts with exponential backoff (1s, 3s, 9s).
175
+ # The local report file is already committed, so a failed upload doesn't
176
+ # lose data — it just leaves the ERP view stale until the next push or
177
+ # manual retry.
98
178
  if [ "$ERP_ENABLED" = "true" ]; then
99
- # Build structured JSON payload from tracking.json (matches ERP contract /api/v1/reports)
100
- # v4: include milestone_name, milestones[], team_id, project_id, git_remote,
101
- # session_started_at, last_pushed_at, build_count, deploy_count — the ERP
102
- # uses these to render the project tree (milestone → phases → unphased) correctly.
103
- PAYLOAD=$(node -e "
104
- const fs = require('fs');
105
- const t = JSON.parse(fs.readFileSync('.planning/tracking.json', 'utf8'));
106
- const notes = fs.readFileSync('$REPORT_FILE', 'utf8').substring(0, 60000);
107
- const commits = [];
108
- try {
109
- const { spawnSync } = require('child_process');
110
- const r = spawnSync('git', ['log', '--oneline', '--since=8 hours ago', '--format=%h'], { encoding: 'utf8', timeout: 3000 });
111
- if (r.stdout) commits.push(...r.stdout.trim().split('\n').filter(Boolean));
112
- } catch {}
113
- console.log(JSON.stringify({
114
- project: t.project || require('path').basename(process.cwd()),
115
- project_id: t.project_id || '',
116
- team_id: t.team_id || '',
117
- git_remote: t.git_remote || '',
118
- client: t.client || '',
119
- milestone: t.milestone || 1,
120
- milestone_name: t.milestone_name || '',
121
- milestones: Array.isArray(t.milestones) ? t.milestones : [],
122
- phase: t.phase,
123
- phase_name: t.phase_name,
124
- total_phases: t.total_phases,
125
- status: t.status,
126
- tasks_done: t.tasks_done || 0,
127
- tasks_total: t.tasks_total || 0,
128
- verification: t.verification || 'pending',
129
- gap_cycles: (t.gap_cycles || {})[String(t.phase)] || 0,
130
- build_count: t.build_count || 0,
131
- deploy_count: t.deploy_count || 0,
132
- deployed_url: t.deployed_url || '',
133
- session_started_at: t.session_started_at || '',
134
- last_pushed_at: t.last_pushed_at || '',
135
- lifetime: t.lifetime || {},
136
- commits: commits,
137
- notes: notes,
138
- submitted_by: '$SUBMITTED_BY',
139
- submitted_at: '$SUBMITTED_AT'
140
- }));
141
- ")
142
-
143
- curl -s -X POST "$ERP_URL/api/v1/reports" \
144
- -H "Authorization: Bearer $API_KEY" \
145
- -H "Content-Type: application/json" \
146
- -d "$PAYLOAD"
179
+ MAX_ATTEMPTS=3
180
+ ATTEMPT=1
181
+ SUCCESS=false
182
+ while [ $ATTEMPT -le $MAX_ATTEMPTS ]; do
183
+ RESPONSE=$(curl -sS -X POST "$ERP_URL/api/v1/reports" \
184
+ -H "Authorization: Bearer $API_KEY" \
185
+ -H "Content-Type: application/json" \
186
+ -d "$PAYLOAD" \
187
+ --max-time 10 \
188
+ -w "\n__HTTP__%{http_code}" 2>&1)
189
+ HTTP_CODE=$(echo "$RESPONSE" | grep -o "__HTTP__[0-9]*" | sed 's/__HTTP__//')
190
+ BODY=$(echo "$RESPONSE" | sed 's/__HTTP__[0-9]*//g')
191
+
192
+ if [ "$HTTP_CODE" = "200" ]; then
193
+ SUCCESS=true
194
+ # Parse and display the ERP-returned report_id alongside our local QS-REPORT-NN
195
+ ERP_REPORT_ID=$(echo "$BODY" | node -e "try{const d=JSON.parse(require('fs').readFileSync(0,'utf8'));process.stdout.write(d.report_id||'')}catch{}")
196
+ node ~/.claude/bin/qualia-ui.js ok "Uploaded as $CLIENT_REPORT_ID (ERP: ${ERP_REPORT_ID:-none})"
197
+ break
198
+ fi
199
+
200
+ # 401 / 422 are permanent failures — no retry.
201
+ if [ "$HTTP_CODE" = "401" ] || [ "$HTTP_CODE" = "422" ]; then
202
+ node ~/.claude/bin/qualia-ui.js warn "ERP rejected report (HTTP $HTTP_CODE). Ask Fawzi."
203
+ echo "$BODY" | head -3
204
+ break
205
+ fi
206
+
207
+ # Transient failure — back off and retry.
208
+ if [ $ATTEMPT -lt $MAX_ATTEMPTS ]; then
209
+ SLEEP=$(( 1 * 3 ** (ATTEMPT - 1) ))
210
+ node ~/.claude/bin/qualia-ui.js warn "ERP upload attempt $ATTEMPT failed (HTTP ${HTTP_CODE:-timeout}), retrying in ${SLEEP}s..."
211
+ sleep $SLEEP
212
+ fi
213
+ ATTEMPT=$(( ATTEMPT + 1 ))
214
+ done
215
+
216
+ if [ "$SUCCESS" != "true" ]; then
217
+ node ~/.claude/bin/qualia-ui.js warn "ERP upload failed after $MAX_ATTEMPTS attempts. $CLIENT_REPORT_ID is committed locally; it will NOT appear in the ERP until you retry with 'curl' or re-run /qualia-report."
218
+ fi
219
+ fi
220
+
221
+ if [ "$ERP_ENABLED" != "true" ]; then
222
+ node ~/.claude/bin/qualia-ui.js info "ERP upload skipped (disabled in config). Report committed locally as $CLIENT_REPORT_ID."
147
223
  fi
148
224
  ```
149
225
 
150
- If the upload succeeds, print: "Report uploaded to ERP. You can now clock out."
151
- If it fails (no API key, network error), print the error and tell the employee to ask Fawzi.
152
- If ERP is disabled, print: "ERP upload skipped (disabled in config)."
226
+ Summary rules:
227
+ - **Upload succeeds:** print "Uploaded as QS-REPORT-NN (ERP: {uuid})". Employee can clock out.
228
+ - **401/422:** no retry. Print the error, tell the employee to ask Fawzi.
229
+ - **Transient (timeout, 5xx, network):** retry 3x with 1s/3s/9s backoff.
230
+ - **All retries fail:** tell employee the report is committed locally, ERP will be stale until retry.
231
+ - **ERP disabled:** skip silently with a note, local commit still happens.
153
232
 
154
- ### 6. Update State
233
+ ### 7. Update State (SKIP on --dry-run)
155
234
 
156
235
  ```bash
157
- node ~/.claude/bin/state.js transition --to activity --notes "Session report generated"
236
+ if [ "$DRY_RUN" != "true" ]; then
237
+ node ~/.claude/bin/state.js transition --to activity --notes "Session report $CLIENT_REPORT_ID generated"
238
+ fi
158
239
  ```
159
240
 
160
241
  Do NOT manually edit STATE.md or tracking.json — state.js handles both.
@@ -40,9 +40,11 @@ ls package.json next.config.* tsconfig.json supabase/ app/ src/ 2>/dev/null
40
40
 
41
41
  ### 1. Security Scan
42
42
 
43
- Run every command. Record each finding with severity.
43
+ **Run the independent greps as parallel Bash calls in a single response** (they don't depend on each other serial execution wastes 15-30s on large codebases). Only the `find … | for` loops are sequential.
44
44
 
45
45
  ```bash
46
+ # PARALLEL BATCH (issue these in one response turn):
47
+
46
48
  # CRITICAL: service_role in client code
47
49
  grep -rn "service_role" --include="*.ts" --include="*.tsx" --include="*.js" app/ components/ src/ lib/ 2>/dev/null | grep -v node_modules | grep -v "\.server\.\|[\\/]server[\\/]\|[\\/]app[\\/]api[\\/]\|route\.\|middleware\."
48
50
 
@@ -55,21 +57,26 @@ grep -rn "dangerouslySetInnerHTML\|eval(" --include="*.ts" --include="*.tsx" --i
55
57
  # CRITICAL: .env files tracked in git
56
58
  git ls-files | grep -i "\.env" | grep -v "\.example\|\.template\|\.sample"
57
59
 
60
+ # HIGH: client-side database mutations
61
+ grep -rn "\.insert\|\.update\|\.delete\|\.upsert" --include="*.tsx" --include="*.jsx" app/ components/ 2>/dev/null | grep -v "use server" | grep -v "\.server\."
62
+
63
+ # MEDIUM: npm vulnerabilities
64
+ npm audit --json 2>/dev/null | node -e "try{const d=JSON.parse(require('fs').readFileSync(0,'utf8'));const v=d.metadata?.vulnerabilities||{};console.log('critical:',v.critical||0,'high:',v.high||0,'moderate:',v.moderate||0)}catch{console.log('audit unavailable')}"
65
+
66
+ # END PARALLEL BATCH
67
+
68
+ # SEQUENTIAL (depends on find):
58
69
  # HIGH: API routes without auth
59
70
  for f in $(find app/api -name "route.ts" -o -name "route.js" 2>/dev/null); do
60
- grep -qL "getUser\|getSession\|auth()\|createClient" "$f" && echo "UNPROTECTED: $f"
71
+ if ! grep -q "getUser\|getSession\|auth()\|createClient" "$f" 2>/dev/null; then
72
+ echo "UNPROTECTED: $f"
73
+ fi
61
74
  done
62
75
 
63
76
  # HIGH: API routes without input validation
64
77
  for f in $(find app/api -name "route.ts" -o -name "route.js" 2>/dev/null); do
65
78
  grep -L "z\.\|zod\|Zod\|parse\|safeParse" "$f" 2>/dev/null
66
79
  done
67
-
68
- # HIGH: client-side database mutations
69
- grep -rn "\.insert\|\.update\|\.delete\|\.upsert" --include="*.tsx" --include="*.jsx" app/ components/ 2>/dev/null | grep -v "use server" | grep -v "\.server\."
70
-
71
- # MEDIUM: npm vulnerabilities
72
- npm audit --json 2>/dev/null | node -e "try{const d=JSON.parse(require('fs').readFileSync(0,'utf8'));const v=d.metadata?.vulnerabilities||{};console.log('critical:',v.critical||0,'high:',v.high||0,'moderate:',v.moderate||0)}catch{console.log('audit unavailable')}"
73
80
  ```
74
81
 
75
82
  ### 2. Code Quality Scan
@@ -94,8 +101,14 @@ grep -rn "console\.log" --include="*.ts" --include="*.tsx" app/ components/ src/
94
101
  ### 3. Performance Scan
95
102
 
96
103
  ```bash
97
- # Build output — route sizes and first load JS
98
- npx next build 2>&1 | grep -E "Route|First Load|shared by all|○|●|ƒ|λ" | tail -25
104
+ # Build output — read existing build artifacts (don't trigger a fresh build during a scan)
105
+ if [ -d ".next" ]; then
106
+ du -sh .next/static/chunks/*.js 2>/dev/null | sort -rh | head -10
107
+ echo "---"
108
+ find .next -name "*.js" -size +200k 2>/dev/null | head -5
109
+ else
110
+ echo "No .next/ build output — run 'npx next build' separately for bundle analysis (review skill does NOT trigger builds — it's a scan)"
111
+ fi
99
112
 
100
113
  # Heavy files (>300 lines often means split needed)
101
114
  find app/ components/ src/ -name "*.tsx" -o -name "*.ts" 2>/dev/null | xargs wc -l 2>/dev/null | sort -rn | head -10
@@ -144,12 +157,19 @@ Write to `.planning/REVIEW.md`:
144
157
  {PASS: no critical/high | FAIL: N blockers — fix before /qualia-ship}
145
158
  ```
146
159
 
147
- **Scoring:**
148
- - 5 = zero high/critical, fewer than 3 medium
149
- - 4 = zero critical, 1 high or fewer than 5 medium
150
- - 3 = zero critical, 2-3 high
151
- - 2 = 1 critical or 4+ high
152
- - 1 = multiple critical
160
+ **Scoring (deterministic — see `rules/grounding.md` for full rubric):**
161
+ ```
162
+ weighted_sum = (critical × 8) + (high × 4) + (medium × 2) + (low × 1)
163
+ category_score = max(1, 5 − floor(weighted_sum / 8))
164
+ ```
165
+ Same inputs always produce the same score. No subjective thresholds.
166
+
167
+ Quick reference (computed from the formula — verified):
168
+ - 0 findings, or only LOW/MEDIUM, or 1 HIGH → 5
169
+ - 2–3 HIGH, or 1 CRITICAL → 4
170
+ - 2 CRITICAL, or 1 CRITICAL + 2–3 HIGH → 3
171
+ - 3 CRITICAL, or 2 CRITICAL + 2+ HIGH → 2
172
+ - 4+ CRITICAL → 1
153
173
 
154
174
  ```bash
155
175
  node ~/.claude/bin/qualia-ui.js divider
@@ -161,7 +161,7 @@ git add skills/{name}/
161
161
  git commit -m "feat: add /{name} skill"
162
162
  ```
163
163
 
164
- Remind the user to run `npx qualia-framework update` on their other machines, or bump the version and `npm publish`.
164
+ Remind the user to run `npx qualia-framework@latest update` on their other machines (always pin `@latest` — npx caches aggressively), or bump the version and `npm publish`.
165
165
 
166
166
  ## Anti-Patterns
167
167
 
@@ -41,6 +41,10 @@ node ~/.claude/bin/qualia-ui.js spawn verifier "Goal-backward check..."
41
41
  ```
42
42
  Agent(prompt="
43
43
  Read your role: @~/.claude/agents/verifier.md
44
+ Grounding + rubrics: @~/.claude/rules/grounding.md
45
+
46
+ Project conventions (MUST consult before scoring Quality):
47
+ @.planning/PROJECT.md
44
48
 
45
49
  Phase plan with success criteria AND verification contracts:
46
50
  @.planning/phase-{N}-plan.md
@@ -48,7 +52,7 @@ Phase plan with success criteria AND verification contracts:
48
52
  {If re-verification: Previous verification with gaps:}
49
53
  {@.planning/phase-{N}-verification.md}
50
54
 
51
- Verify this phase. Write report to .planning/phase-{N}-verification.md
55
+ Verify this phase. Apply the Grounding Protocol — every finding needs file:line evidence. Use the Severity Rubric for all severity labels. Write report to .planning/phase-{N}-verification.md
52
56
  ", subagent_type="qualia-verifier", description="Verify phase {N}")
53
57
  ```
54
58
 
@@ -26,6 +26,7 @@
26
26
  "build_count": 0,
27
27
  "deploy_count": 0,
28
28
  "deployed_url": "",
29
+ "report_seq": 0,
29
30
  "notes": "",
30
31
  "submitted_by": "",
31
32
  "lifetime": {
package/tests/runner.js CHANGED
@@ -1288,6 +1288,104 @@ waves: 1
1288
1288
  fs.rmSync(tmpDir, { recursive: true, force: true });
1289
1289
  }
1290
1290
  });
1291
+
1292
+ // ─── v4.0.4: next-report-id ────────────────────────────────
1293
+ it("next-report-id returns QS-REPORT-01 on fresh project and increments", () => {
1294
+ const tmpDir = makeProject();
1295
+ try {
1296
+ const r1 = spawnSync(process.execPath,
1297
+ [path.join(BIN, "state.js"), "next-report-id"],
1298
+ { encoding: "utf8", cwd: tmpDir, timeout: 5000, stdio: ["pipe", "pipe", "pipe"] });
1299
+ assert.equal(r1.status, 0, `next-report-id failed: ${r1.stderr || r1.stdout}`);
1300
+ const j1 = JSON.parse(r1.stdout);
1301
+ assert.equal(j1.report_id, "QS-REPORT-01");
1302
+ assert.equal(j1.report_seq, 1);
1303
+ assert.equal(j1.peeked, false);
1304
+
1305
+ const r2 = spawnSync(process.execPath,
1306
+ [path.join(BIN, "state.js"), "next-report-id"],
1307
+ { encoding: "utf8", cwd: tmpDir, timeout: 5000, stdio: ["pipe", "pipe", "pipe"] });
1308
+ const j2 = JSON.parse(r2.stdout);
1309
+ assert.equal(j2.report_id, "QS-REPORT-02");
1310
+ assert.equal(j2.report_seq, 2);
1311
+ } finally {
1312
+ fs.rmSync(tmpDir, { recursive: true, force: true });
1313
+ }
1314
+ });
1315
+
1316
+ it("next-report-id --peek does NOT increment the counter", () => {
1317
+ const tmpDir = makeProject();
1318
+ try {
1319
+ const r1 = spawnSync(process.execPath,
1320
+ [path.join(BIN, "state.js"), "next-report-id", "--peek"],
1321
+ { encoding: "utf8", cwd: tmpDir, timeout: 5000, stdio: ["pipe", "pipe", "pipe"] });
1322
+ const j1 = JSON.parse(r1.stdout);
1323
+ assert.equal(j1.report_id, "QS-REPORT-01");
1324
+ assert.equal(j1.peeked, true);
1325
+
1326
+ // Peek again — should still return QS-REPORT-01 since nothing incremented
1327
+ const r2 = spawnSync(process.execPath,
1328
+ [path.join(BIN, "state.js"), "next-report-id", "--peek"],
1329
+ { encoding: "utf8", cwd: tmpDir, timeout: 5000, stdio: ["pipe", "pipe", "pipe"] });
1330
+ const j2 = JSON.parse(r2.stdout);
1331
+ assert.equal(j2.report_id, "QS-REPORT-01");
1332
+ assert.equal(j2.report_seq, 1);
1333
+
1334
+ // On-disk report_seq should still be 0
1335
+ const t = JSON.parse(fs.readFileSync(path.join(tmpDir, ".planning", "tracking.json"), "utf8"));
1336
+ assert.ok(!t.report_seq || t.report_seq === 0,
1337
+ `report_seq should remain 0 after peek, got ${t.report_seq}`);
1338
+ } finally {
1339
+ fs.rmSync(tmpDir, { recursive: true, force: true });
1340
+ }
1341
+ });
1342
+
1343
+ // ─── v4.0.4: close-milestone pre-populates next milestone_name from JOURNEY.md
1344
+ it("close-milestone pre-populates next milestone_name from JOURNEY.md", () => {
1345
+ const tmpDir = makeProject();
1346
+ try {
1347
+ // Write JOURNEY.md with Milestone 2 definition
1348
+ fs.writeFileSync(path.join(tmpDir, ".planning", "JOURNEY.md"), `# Journey
1349
+
1350
+ ## Milestone 1 · Foundation [CURRENT]
1351
+ Exit: scaffolding done
1352
+
1353
+ ## Milestone 2 · Core Features
1354
+ Exit: auth + dashboard
1355
+
1356
+ ## Milestone 3 · Handoff [FINAL]
1357
+ Exit: client takeover
1358
+ `);
1359
+ const r = spawnSync(process.execPath,
1360
+ [path.join(BIN, "state.js"), "close-milestone", "--force"],
1361
+ { encoding: "utf8", cwd: tmpDir, timeout: 5000, stdio: ["pipe", "pipe", "pipe"] });
1362
+ assert.equal(r.status, 0, `close-milestone failed: ${r.stderr || r.stdout}`);
1363
+
1364
+ const t = JSON.parse(fs.readFileSync(path.join(tmpDir, ".planning", "tracking.json"), "utf8"));
1365
+ assert.equal(t.milestone, 2);
1366
+ assert.equal(t.milestone_name, "Core Features",
1367
+ `milestone_name should be pre-populated from JOURNEY.md, got '${t.milestone_name}'`);
1368
+ } finally {
1369
+ fs.rmSync(tmpDir, { recursive: true, force: true });
1370
+ }
1371
+ });
1372
+
1373
+ it("close-milestone leaves milestone_name blank when JOURNEY.md is missing", () => {
1374
+ const tmpDir = makeProject();
1375
+ try {
1376
+ // No JOURNEY.md — milestone_name should fall back to blank (legacy behavior)
1377
+ const r = spawnSync(process.execPath,
1378
+ [path.join(BIN, "state.js"), "close-milestone", "--force"],
1379
+ { encoding: "utf8", cwd: tmpDir, timeout: 5000, stdio: ["pipe", "pipe", "pipe"] });
1380
+ assert.equal(r.status, 0);
1381
+
1382
+ const t = JSON.parse(fs.readFileSync(path.join(tmpDir, ".planning", "tracking.json"), "utf8"));
1383
+ assert.equal(t.milestone_name, "",
1384
+ "milestone_name must be blank when JOURNEY.md is absent (fallback unchanged)");
1385
+ } finally {
1386
+ fs.rmSync(tmpDir, { recursive: true, force: true });
1387
+ }
1388
+ });
1291
1389
  });
1292
1390
 
1293
1391
  // ═══════════════════════════════════════════════════════════