gsd-lite 0.3.5 → 0.3.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -13,7 +13,7 @@
13
13
  "name": "gsd",
14
14
  "source": "./",
15
15
  "description": "AI orchestration tool — GSD management shell + Superpowers quality core. 5 commands, 4 agents, 5 workflows, MCP server, context monitoring.",
16
- "version": "0.3.4",
16
+ "version": "0.3.6",
17
17
  "keywords": ["orchestration", "mcp", "tdd", "task-management"],
18
18
  "category": "Development workflows"
19
19
  }
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "gsd",
3
- "version": "0.3.5",
3
+ "version": "0.3.6",
4
4
  "description": "AI orchestration tool for Claude Code — GSD management shell + Superpowers quality core",
5
5
  "author": {
6
6
  "name": "sdsrss",
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  name: debugger
3
3
  description: Systematic debugging with root cause analysis
4
- tools: Read, Write, Edit, Bash, Grep, Glob
4
+ tools: Read, Bash, Grep, Glob
5
5
  ---
6
6
 
7
7
  <role>
@@ -54,11 +54,11 @@ Phase 3 假设测试:
54
54
  2. 最小变更测试 (一次只改一个变量)
55
55
  3. 验证: 有效 → Phase 4 / 无效 → 新假设
56
56
 
57
- Phase 4 实施修复:
58
- 1. 写失败测试 (复现 bug)
59
- 2. 修复根因 (不是症状)
60
- 3. 验证测试通过 + 无回归
61
- → 3 次修复失败停止。质疑架构。报告给编排器。
57
+ Phase 4 修复方向建议:
58
+ 1. 提出修复方案 (针对根因,不是症状)
59
+ 2. 建议失败测试用例 (供 executor 实现)
60
+ 3. 评估修复影响范围 (哪些下游可能受影响)
61
+ → 3 次修复方向均被 executor 验证无效 停止。标记 architecture_concern: true。报告给编排器。
62
62
  </four_phases>
63
63
 
64
64
  <result_contract>
@@ -39,3 +39,12 @@ tools: Read, Write, Bash, WebSearch, WebFetch, mcp__plugin_context7_context7__*
39
39
  ```
40
40
  </result_contract>
41
41
  </research_output>
42
+
43
+ <uncertainty_handling>
44
+ ## 遇到不确定性时
45
+ 子代理不能直接与用户交互。遇到不确定性时:
46
+ 1. 来源冲突 → 报告双方立场及置信度,让编排器决定。在 result 中标注 "[DECISION] 选择了X因为Y"
47
+ 2. 所有来源不可用 (Context7 + WebSearch + 官方文档均失败) → 返回 "[BLOCKED] 需要: 研究来源不可用,请提供替代信息或缩小范围"
48
+ 3. 研究范围过广无法收敛 → 返回 "[BLOCKED] 需要: 研究范围过广,请指定重点领域"
49
+ 4. 发现结论与已有 decisions 矛盾 → 在 result 中标注冲突,让编排器决定是否更新 decision
50
+ </uncertainty_handling>
package/commands/prd.md CHANGED
@@ -100,12 +100,16 @@ argument-hint: File path to requirements doc, or inline description text
100
100
 
101
101
  → 自审修正后再展示给用户
102
102
 
103
+ <HARD-GATE id="plan-confirmation">
103
104
  ## STEP 9: 展示计划,等待用户确认
104
105
 
105
106
  - 展示完整分阶段计划
106
107
  - 用户指出问题 → 调整 → 再展示
107
108
  - 用户确认 → 继续
108
109
 
110
+ ⛔ 不得在用户确认前执行 STEP 10-12。未确认 = 不写文件、不执行代码。
111
+ </HARD-GATE>
112
+
109
113
  ## STEP 10: 生成文档
110
114
 
111
115
  - 创建 .gsd/ 目录
@@ -35,17 +35,18 @@ description: Resume project execution from saved state with workspace validation
35
35
  - 不一致 → 覆写 `workflow_mode = reconcile_workspace`
36
36
 
37
37
  2. **计划版本校验:**
38
- - 如果本地 plan.md 或 phases/*.md 被手动修改,且 `plan_version` 不匹配
38
+ - 如果本地 plan.md 或 phases/*.md 被手动修改 (mtime > last_session)
39
39
  - → 覆写 `workflow_mode = replan_required`
40
40
 
41
- 3. **研究过期校验:**
41
+ 3. **方向漂移校验:**
42
+ - 如果当前或任何未完成 phase 的 `phase_handoff.direction_ok === false`
43
+ - → 覆写 `workflow_mode = awaiting_user`
44
+
45
+ 4. **研究过期校验:**
42
46
  - 如果 `research.expires_at` 已过期 (早于当前时间)
47
+ - 或 research.decision_index 中有条目的 expires_at 已过期
43
48
  - → 覆写 `workflow_mode = research_refresh_needed`
44
49
 
45
- 4. **工作区冲突校验:**
46
- - 运行 `git status` 检查是否存在冲突或脏工作区
47
- - 存在未解决的合并冲突 → 覆写 `workflow_mode = awaiting_user`
48
-
49
50
  5. **全部通过:**
50
51
  - 保持原 `workflow_mode` 不变
51
52
 
@@ -176,6 +177,14 @@ description: Resume project execution from saved state with workspace validation
176
177
 
177
178
  ---
178
179
 
180
+ ### `planning` — 计划中断
181
+
182
+ - 计划编制过程中被中断
183
+ - 告知用户: "项目仍在计划阶段。请运行 /gsd:start 或 /gsd:prd 重新启动计划流程"
184
+ - 不自动执行
185
+
186
+ ---
187
+
179
188
  ### `failed` — 已失败
180
189
 
181
190
  - 展示失败信息:
package/commands/start.md CHANGED
@@ -30,7 +30,7 @@ argument-hint: Optional feature or project description
30
30
  ## STEP 4 — 需求追问
31
31
 
32
32
  用户回答后,跟进追问直到需求清晰:
33
- - 使用 `references/questioning.md` 技巧 (挑战模糊、具象化、发现边界)
33
+ - 使用 Read 工具读取 `references/questioning.md`,按其中的技巧进行提问 (挑战模糊、具象化、发现边界)
34
34
  - 每个问题提供选项,标识 ⭐ 推荐选项
35
35
  - 多轮对话直到需求清晰 (通常 2-4 轮)
36
36
  - 每轮最多 3-5 个问题,避免过度追问
@@ -113,12 +113,17 @@ argument-hint: Optional feature or project description
113
113
 
114
114
  → 自审修正后再展示给用户。
115
115
 
116
+ <HARD-GATE id="plan-confirmation">
116
117
  ## STEP 9 — 用户确认计划
117
118
 
118
119
  展示计划给用户,等待确认:
119
120
  - 用户指出问题 → 调整计划 → 重新展示
120
121
  - 用户确认 → 继续
121
122
 
123
+ ⛔ 不得在用户确认前执行 STEP 10-12。未确认 = 不写文件、不执行代码。
124
+ </HARD-GATE>
125
+
126
+ <HARD-GATE id="docs-written">
122
127
  ## STEP 10 — 生成文档
123
128
 
124
129
  1. 创建 `.gsd/` 目录
@@ -141,6 +146,13 @@ argument-hint: Optional feature or project description
141
146
  - `phases/*.md` 是 task 规格的唯一 source of truth
142
147
  - `plan.md` 不包含 task 级细节,避免与 `phases/*.md` 重复
143
148
 
149
+ □ state.json 已写入且包含所有 canonical fields
150
+ □ plan.md 已写入
151
+ □ phases/*.md 已写入 (每个 phase 一个文件)
152
+ □ 所有 task 都有 lifecycle / level / requires / review_required
153
+ → 全部满足才可继续
154
+ </HARD-GATE>
155
+
144
156
  ## STEP 11 — 自动执行主路径
145
157
 
146
158
  进入执行主循环。phase = 管理边界,task = 执行边界。
package/commands/stop.md CHANGED
@@ -11,8 +11,9 @@ description: Save current state and pause project execution
11
11
 
12
12
  ## STEP 1: 保存完整状态
13
13
 
14
- 读取并更新 `.gsd/state.json`:
14
+ 读取 `.gsd/state.json`:
15
15
  - 如果文件不存在 → 告知用户 "未找到 GSD 项目状态,无需停止",停止
16
+ - 如果 `workflow_mode` 已是 `completed` 或 `failed` → 告知用户 "项目已终结 ({workflow_mode}),无需停止",停止
16
17
 
17
18
  确保以下信息已保存到 state.json:
18
19
  - `current_phase` / `current_task` — 当前执行位置
@@ -31,10 +31,10 @@ export function postToolUse(basePath) {
31
31
  const gsdDir = join(basePath || process.cwd(), '.gsd');
32
32
  const health = parseInt(readFileSync(join(gsdDir, '.context-health'), 'utf-8'), 10);
33
33
 
34
- if (health < 25) {
34
+ if (health <= 25) {
35
35
  return `🛑 CONTEXT EMERGENCY (${health}% remaining): Save state NOW. Set workflow_mode = awaiting_clear. Tell user to /clear then /gsd:resume.`;
36
36
  }
37
- if (health < 35) {
37
+ if (health <= 35) {
38
38
  return `⚠️ CONTEXT LOW (${health}% remaining): Complete current task, save state, set workflow_mode = awaiting_clear. Tell user to /clear then /gsd:resume.`;
39
39
  }
40
40
  } catch (err) {
@@ -68,14 +68,18 @@ process.stdin.on('end', () => {
68
68
  process.exit(0);
69
69
  }
70
70
 
71
+ // Non-GSD sessions: don't interfere — let Claude's auto-compaction handle it
72
+ const isGsdActive = metrics.has_gsd === true;
73
+ if (!isGsdActive) {
74
+ process.exit(0);
75
+ }
76
+
71
77
  // Debounce logic
72
78
  const warnPath = path.join(tmpDir, `gsd-ctx-${sessionId}-warned.json`);
73
79
  let warnData = { callsSinceWarn: 0, lastLevel: null };
74
- let firstWarn = true;
75
80
 
76
81
  try {
77
82
  warnData = JSON.parse(fs.readFileSync(warnPath, 'utf8'));
78
- firstWarn = false;
79
83
  } catch {
80
84
  // No prior warning state — first warning this session
81
85
  }
@@ -85,25 +89,24 @@ process.stdin.on('end', () => {
85
89
  const isCritical = remaining <= CRITICAL_THRESHOLD;
86
90
  const currentLevel = isCritical ? 'critical' : 'warning';
87
91
 
88
- // Severity escalation bypasses debounce
92
+ // Atomic debounce state write helper
93
+ const writeWarnData = (data) => {
94
+ const tmpFile = warnPath + `.${process.pid}-${Date.now()}.tmp`;
95
+ fs.writeFileSync(tmpFile, JSON.stringify(data));
96
+ fs.renameSync(tmpFile, warnPath);
97
+ };
98
+
99
+ // Severity escalation bypasses debounce (lastLevel null = first warning, always fire)
89
100
  const severityEscalated = currentLevel === 'critical' && warnData.lastLevel === 'warning';
90
- if (!firstWarn && warnData.callsSinceWarn < DEBOUNCE_CALLS && !severityEscalated) {
91
- fs.writeFileSync(warnPath, JSON.stringify(warnData));
101
+ if (warnData.lastLevel !== null && warnData.callsSinceWarn < DEBOUNCE_CALLS && !severityEscalated) {
102
+ writeWarnData(warnData);
92
103
  process.exit(0);
93
104
  }
94
105
 
95
106
  // Reset debounce
96
107
  warnData.callsSinceWarn = 0;
97
108
  warnData.lastLevel = currentLevel;
98
- fs.writeFileSync(warnPath, JSON.stringify(warnData));
99
-
100
- // Use bridge data to avoid extra filesystem check
101
- const isGsdActive = metrics.has_gsd === true;
102
-
103
- // Non-GSD sessions: don't interfere — let Claude's auto-compaction handle it
104
- if (!isGsdActive) {
105
- process.exit(0);
106
- }
109
+ writeWarnData(warnData);
107
110
 
108
111
  let message;
109
112
  if (isCritical) {
@@ -7,6 +7,24 @@ const fs = require('node:fs');
7
7
  const path = require('node:path');
8
8
  const os = require('node:os');
9
9
 
10
+ /**
11
+ * Walk from startDir up to filesystem root looking for a .gsd directory.
12
+ * Returns the absolute path to .gsd if found, or null.
13
+ */
14
+ function findGsdDir(startDir) {
15
+ let dir = startDir;
16
+ while (true) {
17
+ const candidate = path.join(dir, '.gsd');
18
+ try {
19
+ if (fs.statSync(candidate).isDirectory()) return candidate;
20
+ } catch {
21
+ const parent = path.dirname(dir);
22
+ if (parent === dir) return null; // reached filesystem root
23
+ dir = parent;
24
+ }
25
+ }
26
+ }
27
+
10
28
  let input = '';
11
29
  const stdinTimeout = setTimeout(() => process.exit(0), 3000);
12
30
  process.stdin.setEncoding('utf8');
@@ -24,8 +42,8 @@ process.stdin.on('end', () => {
24
42
  // Current GSD task from state.json
25
43
  let task = '';
26
44
  let hasGsd = false;
27
- const gsdDir = path.join(cwd, '.gsd');
28
- try {
45
+ const gsdDir = findGsdDir(cwd);
46
+ if (gsdDir) try {
29
47
  const state = JSON.parse(fs.readFileSync(path.join(gsdDir, 'state.json'), 'utf8'));
30
48
  hasGsd = true;
31
49
  if (state.current_task && state.current_phase) {
@@ -71,21 +89,24 @@ process.stdin.on('end', () => {
71
89
  }
72
90
 
73
91
  // Also write to .gsd/.context-health for MCP server reads (atomic, skip if unchanged)
74
- try {
75
- const healthPath = path.join(gsdDir, '.context-health');
76
- let needsHealthWrite = true;
92
+ // Only write if a .gsd directory was found — never create .gsd from the hook
93
+ if (gsdDir) {
77
94
  try {
78
- const current = fs.readFileSync(healthPath, 'utf8').trim();
79
- if (current === String(remaining)) needsHealthWrite = false;
80
- } catch { /* file doesn't exist yet */ }
81
- if (needsHealthWrite) {
82
- fs.mkdirSync(gsdDir, { recursive: true });
83
- const tmpHealth = path.join(gsdDir, `.context-health.${process.pid}-${Date.now()}.tmp`);
84
- fs.writeFileSync(tmpHealth, String(remaining));
85
- fs.renameSync(tmpHealth, healthPath);
95
+ const healthPath = path.join(gsdDir, '.context-health');
96
+ let needsHealthWrite = true;
97
+ try {
98
+ const current = fs.readFileSync(healthPath, 'utf8').trim();
99
+ if (current === String(remaining)) needsHealthWrite = false;
100
+ } catch { /* file doesn't exist yet */ }
101
+ if (needsHealthWrite) {
102
+ fs.mkdirSync(gsdDir, { recursive: true });
103
+ const tmpHealth = path.join(gsdDir, `.context-health.${process.pid}-${Date.now()}.tmp`);
104
+ fs.writeFileSync(tmpHealth, String(remaining));
105
+ fs.renameSync(tmpHealth, healthPath);
106
+ }
107
+ } catch (e) {
108
+ if (process.env.GSD_DEBUG) process.stderr.write(`gsd-statusline: context-health write failed: ${e.message}\n`);
86
109
  }
87
- } catch (e) {
88
- if (process.env.GSD_DEBUG) process.stderr.write(`gsd-statusline: context-health write failed: ${e.message}\n`);
89
110
  }
90
111
 
91
112
  // Progress bar (10 segments)
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "gsd-lite",
3
- "version": "0.3.5",
3
+ "version": "0.3.7",
4
4
  "description": "AI orchestration tool for Claude Code — GSD management shell + Superpowers quality core",
5
5
  "type": "module",
6
6
  "bin": {
@@ -102,13 +102,12 @@ if (Object.keys(state.evidence).length > MAX_EVIDENCE_ENTRIES) {
102
102
 
103
103
  `_pruneEvidenceFromState(state, currentPhase, gsdDir)`:
104
104
 
105
- 1. 计算阈值: `threshold = currentPhase - 1`
106
- 2. 遍历所有 evidence 条目
107
- 3. 对每条 evidence 调用 `parseScopePhase(entry.scope)` 提取 phase 编号
108
- 4. 如果 `phaseNum !== null && phaseNum < threshold` -> 标记为待归档
109
- 5. 其余保留 (包括 scope 无法解析的条目)
105
+ 1. 遍历所有 evidence 条目
106
+ 2. 对每条 evidence 调用 `parseScopePhase(entry.scope)` 提取 phase 编号
107
+ 3. 如果 `phaseNum !== null && phaseNum < currentPhase` -> 标记为待归档
108
+ 4. 其余保留 (包括 scope 无法解析的条目)
110
109
 
111
- 规则: 保留当前 phase 和前一个 phase 的 evidence,归档更早 phase 的 evidence。
110
+ 规则: 仅保留当前 phase 的 evidence,归档所有更早 phase 的 evidence。
112
111
 
113
112
  ## 归档生命周期
114
113
 
@@ -123,7 +123,7 @@ executor 上下文传递协议 (orchestrator → executor):
123
123
  阶段完成后,编排器批量更新 state.json:
124
124
  - 更新 phase lifecycle → `accepted`
125
125
  - 更新 phase_handoff 信息
126
- - 归档旧 phase 的 evidence (只保留当前 phase 和上一 phase)
126
+ - 归档旧 phase 的 evidence (仅保留当前 phase)
127
127
  - 推进 `current_phase` 到下一个 pending phase
128
128
 
129
129
  **规则:** 只有编排器写 state.json,避免并发竞态。
@@ -133,13 +133,13 @@ executor 上下文传递协议 (orchestrator → executor):
133
133
  每次派发子代理前和阶段切换时检查上下文健康度:
134
134
 
135
135
  ```
136
- remaining < 35%:
136
+ remaining <= 35%:
137
137
  1. 保存完整状态到 state.json
138
138
  2. workflow_mode = awaiting_clear
139
- 3. 输出: "上下文剩余 <35%,已保存进度。请执行 /clear 然后 /gsd:resume 继续"
139
+ 3. 输出: "上下文剩余 <=35%,已保存进度。请执行 /clear 然后 /gsd:resume 继续"
140
140
  4. 停止执行
141
141
 
142
- remaining < 25%:
142
+ remaining <= 25%:
143
143
  1. 紧急保存状态到 state.json
144
144
  2. workflow_mode = awaiting_clear
145
145
  3. 输出: "上下文即将耗尽,已保存进度。请立即执行 /clear 然后 /gsd:resume"
package/src/schema.js CHANGED
@@ -33,9 +33,11 @@ export const PHASE_LIFECYCLE = {
33
33
  reviewing: ['accepted', 'active'],
34
34
  accepted: [],
35
35
  blocked: ['active'],
36
- failed: [],
36
+ failed: ['active'], // H-3: Allow recovery from failed state (gated behind explicit user action)
37
37
  };
38
38
 
39
+ export const TASK_LEVELS = ['L0', 'L1', 'L2', 'L3'];
40
+
39
41
  export const PHASE_REVIEW_STATUS = ['pending', 'reviewing', 'accepted', 'rework_required'];
40
42
 
41
43
  export const CANONICAL_FIELDS = [
@@ -223,6 +225,17 @@ export function validateStateUpdate(state, updates) {
223
225
  case 'evidence':
224
226
  if (!isPlainObject(updates.evidence)) {
225
227
  errors.push('evidence must be an object');
228
+ } else {
229
+ // M-5: Validate evidence entry structure
230
+ for (const [id, entry] of Object.entries(updates.evidence)) {
231
+ if (!isPlainObject(entry)) {
232
+ errors.push(`evidence["${id}"] must be an object`);
233
+ continue;
234
+ }
235
+ if (typeof entry.scope !== 'string' || entry.scope.length === 0) {
236
+ errors.push(`evidence["${id}"].scope must be a non-empty string`);
237
+ }
238
+ }
226
239
  }
227
240
  break;
228
241
  case 'research':
@@ -235,6 +248,14 @@ export function validateStateUpdate(state, updates) {
235
248
  }
236
249
  }
237
250
 
251
+ // M-4: Cross-field check — current_phase ≤ total_phases (skip degenerate 0-phase case)
252
+ const effectivePhase = 'current_phase' in updates ? updates.current_phase : state.current_phase;
253
+ const effectiveTotal = 'total_phases' in updates ? updates.total_phases : state.total_phases;
254
+ if (Number.isFinite(effectivePhase) && Number.isFinite(effectiveTotal)
255
+ && effectiveTotal > 0 && effectivePhase > effectiveTotal) {
256
+ errors.push(`current_phase (${effectivePhase}) must not exceed total_phases (${effectiveTotal})`);
257
+ }
258
+
238
259
  return { valid: errors.length === 0, errors };
239
260
  }
240
261
 
@@ -318,6 +339,22 @@ export function validateState(state) {
318
339
  }
319
340
  if (!isPlainObject(state.evidence)) {
320
341
  errors.push('evidence must be an object');
342
+ } else {
343
+ // M-5: Validate evidence entry structure
344
+ for (const [id, entry] of Object.entries(state.evidence)) {
345
+ if (!isPlainObject(entry)) {
346
+ errors.push(`evidence["${id}"] must be an object`);
347
+ continue;
348
+ }
349
+ if (typeof entry.scope !== 'string' || entry.scope.length === 0) {
350
+ errors.push(`evidence["${id}"].scope must be a non-empty string`);
351
+ }
352
+ }
353
+ }
354
+ // M-4: Cross-field check — current_phase ≤ total_phases (skip degenerate 0-phase case)
355
+ if (Number.isFinite(state.current_phase) && Number.isFinite(state.total_phases)
356
+ && state.total_phases > 0 && state.current_phase > state.total_phases) {
357
+ errors.push(`current_phase (${state.current_phase}) must not exceed total_phases (${state.total_phases})`);
321
358
  }
322
359
  if (Array.isArray(state.phases)) {
323
360
  if (typeof state.total_phases === 'number' && state.total_phases !== state.phases.length) {
@@ -387,8 +424,8 @@ export function validateState(state) {
387
424
  if (!TASK_LIFECYCLE[task.lifecycle]) {
388
425
  errors.push(`Task ${task.id}: invalid lifecycle ${task.lifecycle}`);
389
426
  }
390
- if (typeof task.level !== 'string') {
391
- errors.push(`Task ${task.id}: level must be a string`);
427
+ if (!TASK_LEVELS.includes(task.level)) {
428
+ errors.push(`Task ${task.id}: level must be one of ${TASK_LEVELS.join(', ')}`);
392
429
  }
393
430
  if (!Array.isArray(task.requires)) {
394
431
  errors.push(`Task ${task.id}: requires must be an array`);
@@ -530,6 +567,33 @@ export function validateDebuggerResult(r) {
530
567
  return { valid: errors.length === 0, errors };
531
568
  }
532
569
 
570
+ // C-1: Schema migration infrastructure
571
+ export const CURRENT_SCHEMA_VERSION = 1;
572
+
573
+ /**
574
+ * Migrate state from older schema versions to current.
575
+ * Apply sequential migrations: v0→v1, v1→v2, etc.
576
+ * Mutates and returns the state object.
577
+ */
578
+ export function migrateState(state) {
579
+ if (!state || typeof state !== 'object') return state;
580
+ const version = state.schema_version || 0;
581
+
582
+ // Migration v0 → v1: add missing fields introduced in v1
583
+ if (version < 1) {
584
+ if (!state.evidence) state.evidence = {};
585
+ if (!state.research) state.research = null;
586
+ if (!state.decisions) state.decisions = [];
587
+ if (!state.context) state.context = { last_session: new Date().toISOString(), remaining_percentage: 100 };
588
+ state.schema_version = 1;
589
+ }
590
+
591
+ // Future migrations go here:
592
+ // if (version < 2) { migrateV1toV2(state); state.schema_version = 2; }
593
+
594
+ return state;
595
+ }
596
+
533
597
  export function createInitialState({ project, phases }) {
534
598
  if (!Array.isArray(phases)) {
535
599
  return { error: true, message: 'phases must be an array' };
@@ -548,6 +612,38 @@ export function createInitialState({ project, phases }) {
548
612
  seenIds.add(id);
549
613
  }
550
614
  }
615
+
616
+ // M-7: Detect circular dependencies within each phase (Kahn's algorithm)
617
+ for (const [pi, p] of phases.entries()) {
618
+ const tasks = p.tasks || [];
619
+ const taskIds = tasks.map((t, ti) => `${pi + 1}.${t.index ?? (ti + 1)}`);
620
+ const inDegree = new Map(taskIds.map(id => [id, 0]));
621
+ const adj = new Map(taskIds.map(id => [id, []]));
622
+ for (const [ti, t] of tasks.entries()) {
623
+ const id = `${pi + 1}.${t.index ?? (ti + 1)}`;
624
+ for (const dep of (t.requires || [])) {
625
+ if (dep.kind === 'task' && inDegree.has(dep.id)) {
626
+ adj.get(dep.id).push(id);
627
+ inDegree.set(id, inDegree.get(id) + 1);
628
+ }
629
+ }
630
+ }
631
+ const queue = [...inDegree.entries()].filter(([, d]) => d === 0).map(([id]) => id);
632
+ let sorted = 0;
633
+ while (queue.length > 0) {
634
+ const node = queue.shift();
635
+ sorted++;
636
+ for (const neighbor of adj.get(node)) {
637
+ const d = inDegree.get(neighbor) - 1;
638
+ inDegree.set(neighbor, d);
639
+ if (d === 0) queue.push(neighbor);
640
+ }
641
+ }
642
+ if (sorted < taskIds.length) {
643
+ const cycleNodes = [...inDegree.entries()].filter(([, d]) => d > 0).map(([id]) => id);
644
+ return { error: true, message: `Circular dependency detected in phase ${pi + 1}: ${cycleNodes.join(', ')}` };
645
+ }
646
+ }
551
647
  return {
552
648
  project,
553
649
  schema_version: 1,
@@ -15,7 +15,9 @@ import { validateDebuggerResult, validateExecutorResult, validateResearcherResul
15
15
  import { getGitHead, getGsdDir } from '../utils.js';
16
16
 
17
17
  const MAX_DEBUG_RETRY = 3;
18
+ const MAX_RESUME_DEPTH = 3;
18
19
  const CONTEXT_RESUME_THRESHOLD = 40;
20
+ const MAX_DECISIONS = 200;
19
21
 
20
22
  function isTerminalWorkflowMode(workflowMode) {
21
23
  return workflowMode === 'completed' || workflowMode === 'failed';
@@ -482,7 +484,11 @@ async function resumeExecutingTask(state, basePath) {
482
484
  };
483
485
  }
484
486
 
485
- export async function resumeWorkflow({ basePath = process.cwd() } = {}) {
487
+ export async function resumeWorkflow({ basePath = process.cwd(), _depth = 0 } = {}) {
488
+ if (_depth >= MAX_RESUME_DEPTH) {
489
+ return { error: true, message: `resumeWorkflow recursive depth limit exceeded (max ${MAX_RESUME_DEPTH})` };
490
+ }
491
+
486
492
  const state = await read({ basePath });
487
493
  if (state.error) {
488
494
  return state;
@@ -540,7 +546,7 @@ export async function resumeWorkflow({ basePath = process.cwd() } = {}) {
540
546
  current_review: null,
541
547
  });
542
548
  if (persistError) return persistError;
543
- const resumed = await resumeWorkflow({ basePath });
549
+ const resumed = await resumeWorkflow({ basePath, _depth: _depth + 1 });
544
550
  if (resumed.error) return resumed;
545
551
  return { ...resumed, auto_unblocked: autoUnblock.autoUnblocked };
546
552
  }
@@ -614,17 +620,48 @@ export async function resumeWorkflow({ basePath = process.cwd() } = {}) {
614
620
  total_phases: state.total_phases,
615
621
  message: 'Workflow already completed',
616
622
  };
617
- case 'failed':
623
+ case 'failed': {
624
+ const failedPhases = [];
625
+ const failedTasks = [];
626
+ for (const phase of state.phases || []) {
627
+ if (phase.lifecycle === 'failed') failedPhases.push({ id: phase.id, name: phase.name });
628
+ for (const t of phase.todo || []) {
629
+ if (t.lifecycle === 'failed') {
630
+ failedTasks.push({
631
+ id: t.id,
632
+ name: t.name,
633
+ phase_id: phase.id,
634
+ retry_count: t.retry_count || 0,
635
+ last_failure_summary: t.last_failure_summary || null,
636
+ debug_context: t.debug_context || null,
637
+ });
638
+ }
639
+ }
640
+ }
618
641
  return {
619
642
  success: true,
620
- action: 'noop',
643
+ action: 'await_recovery_decision',
621
644
  workflow_mode: state.workflow_mode,
622
- failed_phases: (state.phases || []).filter((phase) => phase.lifecycle === 'failed').map((phase) => phase.id),
623
- failed_tasks: (state.phases || []).flatMap((phase) =>
624
- (phase.todo || []).filter((task) => task.lifecycle === 'failed').map((task) => task.id)),
625
- message: 'Workflow is in failed state',
645
+ failed_phases: failedPhases,
646
+ failed_tasks: failedTasks,
647
+ recovery_options: ['retry_failed', 'skip_failed', 'replan'],
648
+ message: 'Workflow is in failed state. Recovery options available.',
626
649
  };
650
+ }
627
651
  case 'paused_by_user':
652
+ return {
653
+ success: true,
654
+ action: 'await_manual_intervention',
655
+ workflow_mode: state.workflow_mode,
656
+ resume_to: state.current_review?.scope === 'phase'
657
+ ? 'reviewing_phase'
658
+ : state.current_review?.scope === 'task'
659
+ ? 'reviewing_task'
660
+ : 'executing_task',
661
+ current_review: state.current_review || null,
662
+ current_task: state.current_task || null,
663
+ message: 'Project is paused. Confirm to resume execution.',
664
+ };
628
665
  case 'planning':
629
666
  case 'reconcile_workspace':
630
667
  case 'replan_required':
@@ -660,7 +697,9 @@ export async function handleExecutorResult({ result, basePath = process.cwd() }
660
697
  }
661
698
 
662
699
  const decisionEntries = buildDecisionEntries(result.decisions, phase.id, task.id, (state.decisions || []).length);
663
- const decisions = [...(state.decisions || []), ...decisionEntries];
700
+ const allDecisions = [...(state.decisions || []), ...decisionEntries];
701
+ // H-1: Cap decisions to prevent unbounded growth
702
+ const decisions = allDecisions.length > MAX_DECISIONS ? allDecisions.slice(-MAX_DECISIONS) : allDecisions;
664
703
 
665
704
  if (result.outcome === 'checkpointed') {
666
705
  const reviewLevel = reclassifyReviewLevel(task, result);
@@ -2,7 +2,7 @@
2
2
 
3
3
  import { join, dirname } from 'node:path';
4
4
  import { stat, writeFile, rename, unlink } from 'node:fs/promises';
5
- import { ensureDir, readJson, writeJson, writeAtomic, getStatePath, getGitHead, isPlainObject, clearGsdDirCache } from '../utils.js';
5
+ import { ensureDir, readJson, writeJson, writeAtomic, getStatePath, getGitHead, isPlainObject, clearGsdDirCache, withFileLock } from '../utils.js';
6
6
  import {
7
7
  CANONICAL_FIELDS,
8
8
  TASK_LIFECYCLE,
@@ -13,16 +13,42 @@ import {
13
13
  validateStateUpdate,
14
14
  validateTransition,
15
15
  createInitialState,
16
+ migrateState,
16
17
  } from '../schema.js';
17
18
  import { runAll } from './verify.js';
18
19
 
19
20
  const RESEARCH_FILES = ['STACK.md', 'ARCHITECTURE.md', 'PITFALLS.md', 'SUMMARY.md'];
20
21
  const MAX_EVIDENCE_ENTRIES = 200;
22
+ const MAX_ARCHIVE_ENTRIES = 1000;
23
+
24
+ // M-10: Structured error codes
25
+ export const ERROR_CODES = {
26
+ NO_PROJECT_DIR: 'NO_PROJECT_DIR',
27
+ INVALID_INPUT: 'INVALID_INPUT',
28
+ VALIDATION_FAILED: 'VALIDATION_FAILED',
29
+ STATE_EXISTS: 'STATE_EXISTS',
30
+ NOT_FOUND: 'NOT_FOUND',
31
+ TERMINAL_STATE: 'TERMINAL_STATE',
32
+ TRANSITION_ERROR: 'TRANSITION_ERROR',
33
+ HANDOFF_GATE: 'HANDOFF_GATE',
34
+ };
21
35
 
22
36
  // C-1: Serialize all state mutations to prevent TOCTOU races
37
+ // C-2: Layer cross-process advisory file lock on top of in-process queue
23
38
  let _mutationQueue = Promise.resolve();
39
+ let _fileLockPath = null;
40
+
41
+ export function setLockPath(lockPath) {
42
+ _fileLockPath = lockPath;
43
+ }
44
+
24
45
  function withStateLock(fn) {
25
- const p = _mutationQueue.then(fn);
46
+ const p = _mutationQueue.then(() => {
47
+ if (_fileLockPath) {
48
+ return withFileLock(_fileLockPath, fn);
49
+ }
50
+ return fn();
51
+ });
26
52
  _mutationQueue = p.catch(() => {});
27
53
  return p;
28
54
  }
@@ -47,10 +73,10 @@ function normalizeResearchArtifacts(artifacts) {
47
73
  */
48
74
  export async function init({ project, phases, research, force = false, basePath = process.cwd() }) {
49
75
  if (!project || typeof project !== 'string') {
50
- return { error: true, message: 'project must be a non-empty string' };
76
+ return { error: true, code: ERROR_CODES.INVALID_INPUT, message: 'project must be a non-empty string' };
51
77
  }
52
78
  if (!Array.isArray(phases)) {
53
- return { error: true, message: 'phases must be an array' };
79
+ return { error: true, code: ERROR_CODES.INVALID_INPUT, message: 'phases must be an array' };
54
80
  }
55
81
  const gsdDir = join(basePath, '.gsd');
56
82
  const statePath = join(gsdDir, 'state.json');
@@ -60,8 +86,16 @@ export async function init({ project, phases, research, force = false, basePath
60
86
  if (!force) {
61
87
  try {
62
88
  await stat(statePath);
63
- return { error: true, message: 'state.json already exists; pass force: true to reinitialize' };
89
+ return { error: true, code: ERROR_CODES.STATE_EXISTS, message: 'state.json already exists; pass force: true to reinitialize' };
64
90
  } catch {} // File doesn't exist, proceed
91
+ } else {
92
+ // H-8: Backup existing state before force overwrite
93
+ try {
94
+ const existing = await readJson(statePath);
95
+ if (existing.ok) {
96
+ await writeJson(join(gsdDir, 'state.json.bak'), existing.data);
97
+ }
98
+ } catch {} // No existing state to backup
65
99
  }
66
100
 
67
101
  const phasesDir = join(gsdDir, 'phases');
@@ -105,17 +139,25 @@ export async function init({ project, phases, research, force = false, basePath
105
139
  /**
106
140
  * Read state.json, optionally filtering to specific fields.
107
141
  */
108
- export async function read({ fields, basePath = process.cwd() } = {}) {
142
+ export async function read({ fields, basePath = process.cwd(), validate = false } = {}) {
109
143
  const statePath = await getStatePath(basePath);
110
144
  if (!statePath) {
111
- return { error: true, message: 'No .gsd directory found' };
145
+ return { error: true, code: ERROR_CODES.NO_PROJECT_DIR, message: 'No .gsd directory found' };
112
146
  }
113
147
 
114
148
  const result = await readJson(statePath);
115
149
  if (!result.ok) {
116
- return { error: true, message: result.error };
150
+ return { error: true, code: ERROR_CODES.NO_PROJECT_DIR, message: result.error };
151
+ }
152
+ const state = migrateState(result.data);
153
+
154
+ // H-7: Optional semantic validation on read
155
+ if (validate) {
156
+ const validation = validateState(state);
157
+ if (!validation.valid) {
158
+ return { error: true, code: ERROR_CODES.VALIDATION_FAILED, message: `State validation failed: ${validation.errors.join('; ')}` };
159
+ }
117
160
  }
118
- const state = result.data;
119
161
 
120
162
  if (fields && Array.isArray(fields) && fields.length > 0) {
121
163
  const filtered = {};
@@ -135,7 +177,7 @@ export async function read({ fields, basePath = process.cwd() } = {}) {
135
177
  */
136
178
  export async function update({ updates, basePath = process.cwd() } = {}) {
137
179
  if (!updates || typeof updates !== 'object' || Array.isArray(updates)) {
138
- return { error: true, message: 'updates must be a non-null object' };
180
+ return { error: true, code: ERROR_CODES.INVALID_INPUT, message: 'updates must be a non-null object' };
139
181
  }
140
182
  // Guard: reject non-canonical fields
141
183
  const nonCanonical = Object.keys(updates).filter(
@@ -144,22 +186,34 @@ export async function update({ updates, basePath = process.cwd() } = {}) {
144
186
  if (nonCanonical.length > 0) {
145
187
  return {
146
188
  error: true,
189
+ code: ERROR_CODES.INVALID_INPUT,
147
190
  message: `Non-canonical fields rejected: ${nonCanonical.join(', ')}`,
148
191
  };
149
192
  }
150
193
 
151
194
  const statePath = await getStatePath(basePath);
152
195
  if (!statePath) {
153
- return { error: true, message: 'No .gsd directory found' };
196
+ return { error: true, code: ERROR_CODES.NO_PROJECT_DIR, message: 'No .gsd directory found' };
154
197
  }
198
+ // C-2: Initialize cross-process lock path on first mutation
199
+ if (!_fileLockPath) _fileLockPath = join(dirname(statePath), 'state.lock');
155
200
 
156
201
  return withStateLock(async () => {
157
202
  const result = await readJson(statePath);
158
203
  if (!result.ok) {
159
- return { error: true, message: result.error };
204
+ return { error: true, code: ERROR_CODES.NO_PROJECT_DIR, message: result.error };
160
205
  }
161
206
  const state = result.data;
162
207
 
208
+ // Guard: reject workflow_mode changes FROM terminal states
209
+ if (updates.workflow_mode) {
210
+ const currentMode = state.workflow_mode;
211
+ if ((currentMode === 'completed' || currentMode === 'failed')
212
+ && updates.workflow_mode !== currentMode) {
213
+ return { error: true, code: ERROR_CODES.TERMINAL_STATE, message: `Cannot change workflow_mode from terminal state '${currentMode}'` };
214
+ }
215
+ }
216
+
163
217
  // Validate lifecycle transitions before merging
164
218
  if (updates.phases && Array.isArray(updates.phases)) {
165
219
  for (const newPhase of updates.phases) {
@@ -169,7 +223,7 @@ export async function update({ updates, basePath = process.cwd() } = {}) {
169
223
  // Check phase lifecycle transition
170
224
  if (newPhase.lifecycle && newPhase.lifecycle !== oldPhase.lifecycle) {
171
225
  const tr = validateTransition('phase', oldPhase.lifecycle, newPhase.lifecycle);
172
- if (!tr.valid) return { error: true, message: tr.error };
226
+ if (!tr.valid) return { error: true, code: ERROR_CODES.TRANSITION_ERROR, message: tr.error };
173
227
  }
174
228
 
175
229
  // Check task lifecycle transitions
@@ -179,7 +233,7 @@ export async function update({ updates, basePath = process.cwd() } = {}) {
179
233
  if (!oldTask) continue;
180
234
  if (newTask.lifecycle && newTask.lifecycle !== oldTask.lifecycle) {
181
235
  const tr = validateTransition('task', oldTask.lifecycle, newTask.lifecycle);
182
- if (!tr.valid) return { error: true, message: tr.error };
236
+ if (!tr.valid) return { error: true, code: ERROR_CODES.TRANSITION_ERROR, message: tr.error };
183
237
  }
184
238
  }
185
239
  }
@@ -229,6 +283,7 @@ export async function update({ updates, basePath = process.cwd() } = {}) {
229
283
  if (!validation.valid) {
230
284
  return {
231
285
  error: true,
286
+ code: ERROR_CODES.VALIDATION_FAILED,
232
287
  message: `Validation failed: ${validation.errors.join('; ')}`,
233
288
  };
234
289
  }
@@ -266,20 +321,20 @@ export async function phaseComplete({
266
321
  direction_ok,
267
322
  } = {}) {
268
323
  if (typeof phase_id !== 'number') {
269
- return { error: true, message: 'phase_id must be a number' };
324
+ return { error: true, code: ERROR_CODES.INVALID_INPUT, message: 'phase_id must be a number' };
270
325
  }
271
326
  if (verification != null && (typeof verification !== 'object' || Array.isArray(verification))) {
272
- return { error: true, message: 'verification must be an object when provided' };
327
+ return { error: true, code: ERROR_CODES.INVALID_INPUT, message: 'verification must be an object when provided' };
273
328
  }
274
329
  if (typeof run_verify !== 'boolean') {
275
- return { error: true, message: 'run_verify must be a boolean' };
330
+ return { error: true, code: ERROR_CODES.INVALID_INPUT, message: 'run_verify must be a boolean' };
276
331
  }
277
332
  if (direction_ok !== undefined && typeof direction_ok !== 'boolean') {
278
- return { error: true, message: 'direction_ok must be a boolean when provided' };
333
+ return { error: true, code: ERROR_CODES.INVALID_INPUT, message: 'direction_ok must be a boolean when provided' };
279
334
  }
280
335
  const statePath = await getStatePath(basePath);
281
336
  if (!statePath) {
282
- return { error: true, message: 'No .gsd directory found' };
337
+ return { error: true, code: ERROR_CODES.NO_PROJECT_DIR, message: 'No .gsd directory found' };
283
338
  }
284
339
 
285
340
  return withStateLock(async () => {
@@ -291,13 +346,13 @@ export async function phaseComplete({
291
346
 
292
347
  const phase = state.phases.find((p) => p.id === phase_id);
293
348
  if (!phase) {
294
- return { error: true, message: `Phase ${phase_id} not found` };
349
+ return { error: true, code: ERROR_CODES.NOT_FOUND, message: `Phase ${phase_id} not found` };
295
350
  }
296
351
  if (!Array.isArray(phase.todo)) {
297
- return { error: true, message: `Phase ${phase_id} has invalid todo list` };
352
+ return { error: true, code: ERROR_CODES.VALIDATION_FAILED, message: `Phase ${phase_id} has invalid todo list` };
298
353
  }
299
354
  if (!phase.phase_handoff || typeof phase.phase_handoff !== 'object') {
300
- return { error: true, message: `Phase ${phase_id} is missing phase_handoff metadata` };
355
+ return { error: true, code: ERROR_CODES.VALIDATION_FAILED, message: `Phase ${phase_id} is missing phase_handoff metadata` };
301
356
  }
302
357
 
303
358
  // Validate phase lifecycle transition FIRST (fail-fast) [I-4]
@@ -307,7 +362,7 @@ export async function phaseComplete({
307
362
  'accepted',
308
363
  );
309
364
  if (!transitionResult.valid) {
310
- return { error: true, message: transitionResult.error };
365
+ return { error: true, code: ERROR_CODES.TRANSITION_ERROR, message: transitionResult.error };
311
366
  }
312
367
 
313
368
  // Check handoff gate: all tasks must be accepted
@@ -315,6 +370,7 @@ export async function phaseComplete({
315
370
  if (pendingTasks.length > 0) {
316
371
  return {
317
372
  error: true,
373
+ code: ERROR_CODES.HANDOFF_GATE,
318
374
  message: `Handoff gate not met: ${pendingTasks.length} task(s) not accepted — ${pendingTasks.map((t) => `${t.id}:${t.lifecycle}`).join(', ')}`,
319
375
  };
320
376
  }
@@ -323,6 +379,7 @@ export async function phaseComplete({
323
379
  if (phase.phase_handoff.critical_issues_open > 0) {
324
380
  return {
325
381
  error: true,
382
+ code: ERROR_CODES.HANDOFF_GATE,
326
383
  message: `Handoff gate not met: ${phase.phase_handoff.critical_issues_open} critical issue(s) open`,
327
384
  };
328
385
  }
@@ -332,6 +389,7 @@ export async function phaseComplete({
332
389
  if (!reviewPassed) {
333
390
  return {
334
391
  error: true,
392
+ code: ERROR_CODES.HANDOFF_GATE,
335
393
  message: 'Handoff gate not met: required reviews not passed',
336
394
  };
337
395
  }
@@ -343,6 +401,7 @@ export async function phaseComplete({
343
401
  if (!testsPassed) {
344
402
  return {
345
403
  error: true,
404
+ code: ERROR_CODES.HANDOFF_GATE,
346
405
  message: `Handoff gate not met: verification checks failed — ${verificationSummary(verificationResult)}`,
347
406
  };
348
407
  }
@@ -360,7 +419,7 @@ export async function phaseComplete({
360
419
  phase.phase_handoff.direction_ok = false;
361
420
  const driftValidation = validateState(state);
362
421
  if (!driftValidation.valid) {
363
- return { error: true, message: `Validation failed: ${driftValidation.errors.join('; ')}` };
422
+ return { error: true, code: ERROR_CODES.VALIDATION_FAILED, message: `Validation failed: ${driftValidation.errors.join('; ')}` };
364
423
  }
365
424
  await writeJson(statePath, state);
366
425
  return {
@@ -383,11 +442,15 @@ export async function phaseComplete({
383
442
  // Increment current_phase if this was the active phase
384
443
  if (state.current_phase === phase_id && phase_id < state.total_phases) {
385
444
  state.current_phase = phase_id + 1;
386
- // Activate the next phase
445
+ // Activate the next phase (M-3: use validateTransition for consistency)
387
446
  const nextPhase = state.phases.find((p) => p.id === state.current_phase);
388
- if (nextPhase && nextPhase.lifecycle === 'pending') {
389
- nextPhase.lifecycle = 'active';
447
+ if (nextPhase) {
448
+ const nextTr = validateTransition('phase', nextPhase.lifecycle, 'active');
449
+ if (nextTr.valid) nextPhase.lifecycle = 'active';
390
450
  }
451
+ } else if (state.current_phase === phase_id && phase_id >= state.total_phases) {
452
+ // Final phase completed — mark workflow as completed
453
+ state.workflow_mode = 'completed';
391
454
  }
392
455
 
393
456
  // Update git_head to current commit
@@ -408,18 +471,18 @@ export async function phaseComplete({
408
471
  export async function addEvidence({ id, data, basePath = process.cwd() }) {
409
472
  // I-8: Validate inputs
410
473
  if (!id || typeof id !== 'string') {
411
- return { error: true, message: 'id must be a non-empty string' };
474
+ return { error: true, code: ERROR_CODES.INVALID_INPUT, message: 'id must be a non-empty string' };
412
475
  }
413
476
  if (!data || typeof data !== 'object' || Array.isArray(data)) {
414
- return { error: true, message: 'data must be a non-null object' };
477
+ return { error: true, code: ERROR_CODES.INVALID_INPUT, message: 'data must be a non-null object' };
415
478
  }
416
479
  if (typeof data.scope !== 'string') {
417
- return { error: true, message: 'data.scope must be a string' };
480
+ return { error: true, code: ERROR_CODES.INVALID_INPUT, message: 'data.scope must be a string' };
418
481
  }
419
482
 
420
483
  const statePath = await getStatePath(basePath);
421
484
  if (!statePath) {
422
- return { error: true, message: 'No .gsd directory found' };
485
+ return { error: true, code: ERROR_CODES.NO_PROJECT_DIR, message: 'No .gsd directory found' };
423
486
  }
424
487
 
425
488
  return withStateLock(async () => {
@@ -454,13 +517,12 @@ export async function addEvidence({ id, data, basePath = process.cwd() }) {
454
517
  async function _pruneEvidenceFromState(state, currentPhase, gsdDir) {
455
518
  if (!state.evidence) return 0;
456
519
 
457
- const threshold = currentPhase - 1;
458
520
  const toArchive = {};
459
521
  const toKeep = {};
460
522
 
461
523
  for (const [id, entry] of Object.entries(state.evidence)) {
462
524
  const phaseNum = parseScopePhase(entry.scope);
463
- if (phaseNum !== null && phaseNum < threshold) {
525
+ if (phaseNum !== null && phaseNum < currentPhase) {
464
526
  toArchive[id] = entry;
465
527
  } else {
466
528
  toKeep[id] = entry;
@@ -474,8 +536,15 @@ async function _pruneEvidenceFromState(state, currentPhase, gsdDir) {
474
536
  const existing = await readJson(archivePath);
475
537
  const archive = existing.ok ? existing.data : {};
476
538
  Object.assign(archive, toArchive);
477
- await writeJson(archivePath, archive);
478
539
 
540
+ // H-2: Cap archive size to prevent unbounded growth
541
+ const archiveKeys = Object.keys(archive);
542
+ if (archiveKeys.length > MAX_ARCHIVE_ENTRIES) {
543
+ const toRemove = archiveKeys.slice(0, archiveKeys.length - MAX_ARCHIVE_ENTRIES);
544
+ for (const key of toRemove) delete archive[key];
545
+ }
546
+
547
+ await writeJson(archivePath, archive);
479
548
  state.evidence = toKeep;
480
549
  }
481
550
 
@@ -483,13 +552,16 @@ async function _pruneEvidenceFromState(state, currentPhase, gsdDir) {
483
552
  }
484
553
 
485
554
  /**
486
- * Prune evidence: archive entries from phases older than currentPhase - 1.
555
+ * Prune evidence: archive entries from phases before currentPhase (keep only current phase).
487
556
  * Scope format is "task:X.Y" where X is the phase number.
488
557
  */
489
558
  export async function pruneEvidence({ currentPhase, basePath = process.cwd() }) {
559
+ if (typeof currentPhase !== 'number' || !Number.isFinite(currentPhase)) {
560
+ return { error: true, code: ERROR_CODES.INVALID_INPUT, message: 'currentPhase must be a finite number' };
561
+ }
490
562
  const statePath = await getStatePath(basePath);
491
563
  if (!statePath) {
492
- return { error: true, message: 'No .gsd directory found' };
564
+ return { error: true, code: ERROR_CODES.NO_PROJECT_DIR, message: 'No .gsd directory found' };
493
565
  }
494
566
 
495
567
  return withStateLock(async () => {
@@ -874,7 +946,7 @@ export function applyResearchRefresh(state, newResearch) {
874
946
  export async function storeResearch({ result, artifacts, decision_index, basePath = process.cwd() } = {}) {
875
947
  const resultValidation = validateResearcherResult(result || {});
876
948
  if (!resultValidation.valid) {
877
- return { error: true, message: `Invalid researcher result: ${resultValidation.errors.join('; ')}` };
949
+ return { error: true, code: ERROR_CODES.VALIDATION_FAILED, message: `Invalid researcher result: ${resultValidation.errors.join('; ')}` };
878
950
  }
879
951
 
880
952
  const artifactsValidation = validateResearchArtifacts(artifacts, {
@@ -883,17 +955,17 @@ export async function storeResearch({ result, artifacts, decision_index, basePat
883
955
  expiresAt: result.expires_at,
884
956
  });
885
957
  if (!artifactsValidation.valid) {
886
- return { error: true, message: `Invalid research artifacts: ${artifactsValidation.errors.join('; ')}` };
958
+ return { error: true, code: ERROR_CODES.VALIDATION_FAILED, message: `Invalid research artifacts: ${artifactsValidation.errors.join('; ')}` };
887
959
  }
888
960
 
889
961
  const decisionIndexValidation = validateResearchDecisionIndex(decision_index, result.decision_ids);
890
962
  if (!decisionIndexValidation.valid) {
891
- return { error: true, message: `Invalid research decision_index: ${decisionIndexValidation.errors.join('; ')}` };
963
+ return { error: true, code: ERROR_CODES.VALIDATION_FAILED, message: `Invalid research decision_index: ${decisionIndexValidation.errors.join('; ')}` };
892
964
  }
893
965
 
894
966
  const statePath = await getStatePath(basePath);
895
967
  if (!statePath) {
896
- return { error: true, message: 'No .gsd directory found' };
968
+ return { error: true, code: ERROR_CODES.NO_PROJECT_DIR, message: 'No .gsd directory found' };
897
969
  }
898
970
 
899
971
  return withStateLock(async () => {
@@ -930,11 +1002,10 @@ export async function storeResearch({ result, artifacts, decision_index, basePat
930
1002
  throw err;
931
1003
  }
932
1004
 
933
- const { decision_index: _, ...nextResearchBase } = {
1005
+ const nextResearchBase = {
934
1006
  volatility: result.volatility,
935
1007
  expires_at: result.expires_at,
936
1008
  sources: result.sources,
937
- decision_index,
938
1009
  files: RESEARCH_FILES,
939
1010
  updated_at: new Date().toISOString(),
940
1011
  };
@@ -957,7 +1028,7 @@ export async function storeResearch({ result, artifacts, decision_index, basePat
957
1028
 
958
1029
  const validation = validateState(state);
959
1030
  if (!validation.valid) {
960
- return { error: true, message: `State validation failed: ${validation.errors.join('; ')}` };
1031
+ return { error: true, code: ERROR_CODES.VALIDATION_FAILED, message: `State validation failed: ${validation.errors.join('; ')}` };
961
1032
  }
962
1033
 
963
1034
  await writeJson(statePath, state);
package/src/utils.js CHANGED
@@ -23,7 +23,7 @@ export async function getGsdDir(startDir = process.cwd()) {
23
23
  } catch {}
24
24
  const parent = dirname(dir);
25
25
  if (parent === dir) {
26
- _gsdDirCache.set(resolved, null);
26
+ // H-9: Don't cache negative results — .gsd may be created later by init()
27
27
  return null;
28
28
  }
29
29
  dir = parent;
@@ -52,6 +52,52 @@ export async function getGitHead(cwd = process.cwd()) {
52
52
  }
53
53
  }
54
54
 
55
+ // C-2: Advisory file lock for cross-process serialization
56
+ const LOCK_STALE_MS = 10_000;
57
+ const LOCK_RETRY_MS = 50;
58
+ const LOCK_MAX_RETRIES = 100; // 5 seconds total
59
+
60
+ /**
61
+ * Execute fn while holding an advisory file lock.
62
+ * Uses O_CREAT|O_EXCL (via 'wx' flag) for atomic lock acquisition.
63
+ * Stale locks (>10s) are automatically broken.
64
+ * Falls through without locking on non-EEXIST errors (e.g., read-only fs).
65
+ */
66
+ export async function withFileLock(lockPath, fn) {
67
+ let acquired = false;
68
+ for (let i = 0; i < LOCK_MAX_RETRIES; i++) {
69
+ try {
70
+ await writeFile(lockPath, String(process.pid), { flag: 'wx' });
71
+ acquired = true;
72
+ break;
73
+ } catch (err) {
74
+ if (err.code === 'EEXIST') {
75
+ try {
76
+ const s = await stat(lockPath);
77
+ if (Date.now() - s.mtimeMs > LOCK_STALE_MS) {
78
+ try { await unlink(lockPath); } catch {}
79
+ continue;
80
+ }
81
+ } catch {
82
+ // stat failed — lock may have been released between checks
83
+ continue;
84
+ }
85
+ await new Promise(r => setTimeout(r, LOCK_RETRY_MS));
86
+ } else {
87
+ break; // Non-EEXIST error — proceed without lock
88
+ }
89
+ }
90
+ }
91
+
92
+ try {
93
+ return await fn();
94
+ } finally {
95
+ if (acquired) {
96
+ try { await unlink(lockPath); } catch {}
97
+ }
98
+ }
99
+ }
100
+
55
101
  let _tmpCounter = 0;
56
102
  function tmpPath(filePath) {
57
103
  return `${filePath}.${process.pid}-${Date.now()}-${_tmpCounter++}.tmp`;