gsd-lite 0.6.9 → 0.7.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -13,7 +13,7 @@
13
13
  "name": "gsd",
14
14
  "source": "./",
15
15
  "description": "AI orchestration tool — GSD management shell + Superpowers quality core. 5 commands, 4 agents, 5 workflows, MCP server, context monitoring.",
16
- "version": "0.6.9",
16
+ "version": "0.7.3",
17
17
  "keywords": [
18
18
  "orchestration",
19
19
  "mcp",
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "gsd",
3
- "version": "0.6.9",
3
+ "version": "0.7.3",
4
4
  "description": "AI orchestration tool for Claude Code — GSD management shell + Superpowers quality core",
5
5
  "author": {
6
6
  "name": "sdsrss",
package/README.md CHANGED
@@ -291,7 +291,7 @@ gsd-lite/
291
291
  │ ├── gsd-session-stop.cjs # Graceful shutdown with crash markers
292
292
  │ ├── gsd-statusline.cjs # StatusLine display (composite-aware)
293
293
  │ └── lib/ # Shared hook utilities (gsd-finder, composite statusline, semver)
294
- ├── tests/ # 909 tests (unit + simulation + E2E integration)
294
+ ├── tests/ # 966 tests (unit + simulation + E2E integration)
295
295
  ├── cli.js # Install/uninstall CLI entry
296
296
  ├── install.js # Installation script (plugin-aware, idempotent)
297
297
  └── uninstall.js # Uninstall script
@@ -300,7 +300,7 @@ gsd-lite/
300
300
  ## Testing
301
301
 
302
302
  ```bash
303
- npm test # Run all 909 tests
303
+ npm test # Run all 966 tests
304
304
  npm run test:coverage # Tests + coverage report (94%+ lines, 83%+ branches)
305
305
  npm run lint # Biome lint
306
306
  node --test tests/file.js # Run a single test file
@@ -49,9 +49,9 @@ Phase 2 模式分析:
49
49
  2. 对比差异,列出所有不同点
50
50
  3. 不要假设"那个不重要"
51
51
 
52
- Phase 3 假设测试:
52
+ Phase 3 假设测试 (通过观察验证,不直接修改代码):
53
53
  1. 明确陈述: "我认为 X 是根因,因为 Y"
54
- 2. 最小变更测试 (一次只改一个变量)
54
+ 2. 通过 Bash 运行测试/添加日志、读取运行时输出来验证假设
55
55
  3. 验证: 有效 → Phase 4 / 无效 → 新假设
56
56
 
57
57
  Phase 4 修复方向建议:
@@ -52,7 +52,7 @@ tools: Read, Write, Edit, Bash, Grep, Glob
52
52
  "summary": "Implemented PUT /api/users/:id endpoint",
53
53
  "checkpoint_commit": "a1b2c3d",
54
54
  "files_changed": ["src/api/users.ts", "tests/users.test.ts"],
55
- "decisions": ["[DECISION] use optimistic locking by version column"],
55
+ "decisions": [{"id": "d1", "summary": "use optimistic locking by version column", "rationale": "prevents concurrent update conflicts"}],
56
56
  "blockers": [],
57
57
  "contract_changed": true,
58
58
  "confidence": "high",
@@ -28,18 +28,25 @@ Call the `health` MCP tool:
28
28
 
29
29
  Check if GSD hooks are registered in Claude settings:
30
30
  - Read `~/.claude/settings.json` (or `~/.claude/settings.local.json`)
31
- - StatusLine check (check BOTH paths):
32
- 1. Direct: `statusLine` entry containing `gsd-statusline`
33
- 2. Composite: read `~/.cache/code-graph/statusline-registry.json` — if any entry's `command` contains `gsd-statusline`, it is registered through the composite statusline system
34
- - Either path present: StatusLine = registered
35
- - Check for `PostToolUse` hook entry containing `gsd-context-monitor`
36
- - Both present: record PASS
37
- - Partial: record WARN with which hook is missing
38
- - Neither: record FAIL "No GSD hooks registered"
39
-
40
- Also verify the hook files exist on disk:
31
+ - StatusLine check (registered if ANY path matches):
32
+ 1. Direct: `statusLine.command` contains `gsd-statusline`
33
+ 2. Composite cache registry: `~/.cache/code-graph/statusline-registry.json` — any entry whose `command` contains `gsd-statusline`
34
+ 3. Composite backup mirror: `~/.claude/statusline-providers.json` same match rule (durable mirror written by code-graph's chain CLI)
35
+ - Any path present: StatusLine = registered
36
+ - Check the three hook arrays in `settings.hooks`:
37
+ - `PostToolUse` entry referencing `gsd-context-monitor`
38
+ - `SessionStart` entry referencing `gsd-session-init`
39
+ - `Stop` entry referencing `gsd-session-stop`
40
+ - All four (statusLine + three hooks) present: record PASS
41
+ - Partial: record WARN naming each missing hook
42
+ - None: record FAIL "No GSD hooks registered"
43
+
44
+ Also verify the hook files exist on disk (install.js copies all five):
41
45
  - `~/.claude/hooks/gsd-statusline.cjs`
42
46
  - `~/.claude/hooks/gsd-context-monitor.cjs`
47
+ - `~/.claude/hooks/gsd-session-init.cjs`
48
+ - `~/.claude/hooks/gsd-session-stop.cjs`
49
+ - `~/.claude/hooks/gsd-auto-update.cjs`
43
50
  - Files missing but settings present: record WARN "Hook registered but file missing"
44
51
 
45
52
  ## STEP 4: Lock File Check
@@ -91,6 +91,7 @@ Blocked 任务:
91
91
 
92
92
  | workflow_mode | 建议 |
93
93
  |---|---|
94
+ | planning | "仍在计划阶段,运行 /gsd:start 或 /gsd:prd 重新启动计划" |
94
95
  | executing_task | "自动执行中,等待完成" |
95
96
  | reviewing_task | "L2 审查进行中" |
96
97
  | reviewing_phase | "L1 阶段审查进行中" |
package/commands/stop.md CHANGED
@@ -29,8 +29,6 @@ description: Save current state and pause project execution
29
29
 
30
30
  使用 `state-update` MCP 工具更新状态,确保通过 schema 校验和乐观锁。
31
31
 
32
- 使用原子写入: 先写 `.gsd/state.json.tmp`,成功后 rename 为 `.gsd/state.json`
33
-
34
32
  ## STEP 3: 确认输出
35
33
 
36
34
  输出: "已暂停。运行 /gsd:resume 继续"
@@ -323,7 +323,7 @@ function validateExtractedPackage(extractDir) {
323
323
  const pkgPath = path.join(extractDir, 'package.json');
324
324
  const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf8'));
325
325
  if (pkg.name !== 'gsd-lite') return false;
326
- if (!pkg.version || !/^\d+\.\d+\.\d+/.test(pkg.version)) return false;
326
+ if (!pkg.version || !/^\d+\.\d+\.\d+(-[\w.]+)?$/.test(pkg.version)) return false;
327
327
  // Verify install.js exists and is a regular file (lstat rejects symlinks)
328
328
  const installPath = path.join(extractDir, 'install.js');
329
329
  const lstat = fs.lstatSync(installPath);
@@ -404,8 +404,26 @@ async function downloadAndInstall(tarballUrl, verbose = false, token = null) {
404
404
  // Write tarball to file, then extract with spawnSync (no shell)
405
405
  const tarPath = path.join(tmpDir, 'release.tar.gz');
406
406
  fs.writeFileSync(tarPath, tarData);
407
- const tar = spawnSync('tar', ['xzf', tarPath, '-C', tmpDir, '--strip-components=1'], { timeout: 30000 });
408
- if (tar.status !== 0) throw new Error(`tar extract failed: ${(tar.stderr || '').toString().slice(0, 200)}`);
407
+ const stripFlag = process.platform === 'win32' ? [] : ['--strip-components=1'];
408
+ const tar = spawnSync('tar', ['xzf', tarPath, '-C', tmpDir, ...stripFlag], { timeout: 30000 });
409
+ if (tar.status !== 0) {
410
+ const errMsg = (tar.stderr || '').toString().slice(0, 200);
411
+ if (process.platform === 'win32') {
412
+ console.error('[gsd] Auto-update: tar extraction failed on Windows — manual update may be required');
413
+ }
414
+ throw new Error(`tar extract failed: ${errMsg}`);
415
+ }
416
+ // On Windows without --strip-components, the content is nested in a subdirectory
417
+ if (process.platform === 'win32') {
418
+ const entries = fs.readdirSync(tmpDir).filter(e => e !== 'release.tar.gz');
419
+ if (entries.length === 1 && fs.statSync(path.join(tmpDir, entries[0])).isDirectory()) {
420
+ const nested = path.join(tmpDir, entries[0]);
421
+ for (const f of fs.readdirSync(nested)) {
422
+ fs.renameSync(path.join(nested, f), path.join(tmpDir, f));
423
+ }
424
+ fs.rmdirSync(nested);
425
+ }
426
+ }
409
427
 
410
428
  // Validate extracted package before installing
411
429
  if (!validateExtractedPackage(tmpDir)) {
@@ -499,7 +517,7 @@ function pruneOldCacheVersions(cacheBase, keepCount = 3, verbose = false) {
499
517
  try {
500
518
  if (!fs.existsSync(cacheBase)) return;
501
519
  const entries = fs.readdirSync(cacheBase, { withFileTypes: true })
502
- .filter(e => e.isDirectory() && /^\d+\.\d+\.\d+$/.test(e.name))
520
+ .filter(e => e.isDirectory() && /^\d+\.\d+\.\d+(-[\w.]+)?$/.test(e.name))
503
521
  .map(e => e.name);
504
522
  if (entries.length <= keepCount) return;
505
523
 
@@ -546,7 +564,7 @@ function syncPluginCache(extractedDir, verbose = false) {
546
564
  const newPkgPath = path.join(extractedDir, 'package.json');
547
565
  if (!fs.existsSync(newPkgPath)) return;
548
566
  const newVersion = JSON.parse(fs.readFileSync(newPkgPath, 'utf8')).version;
549
- if (!newVersion) return;
567
+ if (!newVersion || !/^\d+\.\d+\.\d+(-[\w.]+)?$/.test(newVersion)) return;
550
568
 
551
569
  // Determine new cache path
552
570
  const cacheBase = path.join(claudeDir, 'plugins', 'cache', 'gsd', 'gsd');
@@ -2,17 +2,45 @@
2
2
  'use strict';
3
3
 
4
4
  /**
5
- * Compare two semver version strings (e.g. "1.2.3") for sorting.
6
- * Returns negative if a < b, positive if a > b, 0 if equal.
5
+ * Compare two semver version strings for sorting.
6
+ * Handles pre-release suffixes: 1.0.0-beta.1 < 1.0.0 (per semver spec).
7
7
  * @param {string} a
8
8
  * @param {string} b
9
9
  * @returns {number}
10
10
  */
11
11
  function semverSortComparator(a, b) {
12
- const pa = a.split('.').map(s => parseInt(s, 10) || 0);
13
- const pb = b.split('.').map(s => parseInt(s, 10) || 0);
12
+ const [coreA, preA] = String(a).split('-', 2);
13
+ const [coreB, preB] = String(b).split('-', 2);
14
+ const pa = coreA.split('.').map(s => parseInt(s, 10) || 0);
15
+ const pb = coreB.split('.').map(s => parseInt(s, 10) || 0);
14
16
  for (let i = 0; i < 3; i++) {
15
- if (pa[i] !== pb[i]) return pa[i] - pb[i];
17
+ if ((pa[i] || 0) !== (pb[i] || 0)) return (pa[i] || 0) - (pb[i] || 0);
18
+ }
19
+ // Same core version: pre-release < release (1.0.0-beta < 1.0.0)
20
+ if (preA && !preB) return -1;
21
+ if (!preA && preB) return 1;
22
+ if (preA && preB) {
23
+ // Compare pre-release identifiers left-to-right
24
+ const partsA = preA.split('.');
25
+ const partsB = preB.split('.');
26
+ for (let i = 0; i < Math.max(partsA.length, partsB.length); i++) {
27
+ if (i >= partsA.length) return -1; // fewer fields = lower precedence
28
+ if (i >= partsB.length) return 1;
29
+ const na = parseInt(partsA[i], 10);
30
+ const nb = parseInt(partsB[i], 10);
31
+ const aIsNum = !Number.isNaN(na);
32
+ const bIsNum = !Number.isNaN(nb);
33
+ if (aIsNum && bIsNum) {
34
+ if (na !== nb) return na - nb;
35
+ } else if (aIsNum) {
36
+ return -1; // numeric < string
37
+ } else if (bIsNum) {
38
+ return 1;
39
+ } else {
40
+ const cmp = partsA[i].localeCompare(partsB[i]);
41
+ if (cmp !== 0) return cmp;
42
+ }
43
+ }
16
44
  }
17
45
  return 0;
18
46
  }
@@ -1,14 +1,26 @@
1
1
  'use strict';
2
2
  // Detect and register with composite statusline systems (e.g., code-graph).
3
3
  // Used by install.js, gsd-session-init.cjs, and uninstall.js.
4
+ //
5
+ // Preferred path (code-graph ≥ shipping statusline-chain.js): invoke that CLI
6
+ // with `register gsd <cmd> --stdin` / `unregister gsd`. The CLI owns both the
7
+ // primary cache registry and the ~/.claude/statusline-providers.json backup
8
+ // mirror, so we do not have to know their layout.
9
+ // Fallback path (older code-graph without the CLI): write the cache registry
10
+ // directly, same as the original behavior.
4
11
 
5
12
  const fs = require('node:fs');
6
13
  const path = require('node:path');
7
14
  const os = require('node:os');
15
+ const { execFileSync } = require('node:child_process');
16
+ const { semverSortComparator } = require('./semver-sort.cjs');
8
17
 
9
- // Known composite statusline registry paths
18
+ const CLAUDE_DIR = process.env.CLAUDE_CONFIG_DIR || path.join(os.homedir(), '.claude');
19
+
20
+ // Known composite statusline registry paths (fallback-only — chain CLI is preferred).
10
21
  const REGISTRY_PATHS = [
11
22
  path.join(os.homedir(), '.cache', 'code-graph', 'statusline-registry.json'),
23
+ path.join(CLAUDE_DIR, 'statusline-providers.json'),
12
24
  ];
13
25
 
14
26
  function isCompositeStatusLine(command) {
@@ -22,13 +34,46 @@ function findCompositeRegistry() {
22
34
  return null;
23
35
  }
24
36
 
37
+ /**
38
+ * Find code-graph's statusline-chain.js in the plugin cache (newest semver).
39
+ * Returns absolute path or null if the CLI hasn't shipped yet.
40
+ */
41
+ function findChainScript() {
42
+ const base = path.join(CLAUDE_DIR, 'plugins', 'cache', 'code-graph-mcp', 'code-graph-mcp');
43
+ if (!fs.existsSync(base)) return null;
44
+ let versions;
45
+ try {
46
+ versions = fs.readdirSync(base).filter(v => /^\d+\.\d+\.\d+/.test(v));
47
+ } catch { return null; }
48
+ versions.sort(semverSortComparator).reverse();
49
+ for (const v of versions) {
50
+ const p = path.join(base, v, 'scripts', 'statusline-chain.js');
51
+ if (fs.existsSync(p)) return p;
52
+ }
53
+ return null;
54
+ }
55
+
56
+ function runChainCLI(args) {
57
+ const chainScript = findChainScript();
58
+ if (!chainScript) return false;
59
+ try {
60
+ execFileSync(process.execPath, [chainScript, ...args], { stdio: 'pipe', timeout: 5000 });
61
+ return true;
62
+ } catch { return false; }
63
+ }
64
+
25
65
  /**
26
66
  * Register GSD as a provider in the composite statusline registry.
67
+ * Prefers code-graph's statusline-chain.js CLI when available; falls back to
68
+ * writing the cache registry directly for older code-graph versions.
27
69
  * Idempotent: updates existing entry or inserts before code-graph.
28
70
  * @param {string} statuslineScriptPath - Absolute path to gsd-statusline.cjs
29
71
  * @returns {boolean} true if registered/updated
30
72
  */
31
73
  function registerProvider(statuslineScriptPath) {
74
+ const command = `node ${JSON.stringify(statuslineScriptPath)}`;
75
+ if (runChainCLI(['register', 'gsd', command, '--stdin'])) return true;
76
+
32
77
  let registryPath = findCompositeRegistry();
33
78
 
34
79
  // If composite statusLine is configured but registry file is missing,
@@ -54,7 +99,6 @@ function registerProvider(statuslineScriptPath) {
54
99
  }
55
100
  if (!Array.isArray(registry)) return false;
56
101
 
57
- const command = `node ${JSON.stringify(statuslineScriptPath)}`;
58
102
  const provider = { id: 'gsd', command, needsStdin: true };
59
103
 
60
104
  // Find existing GSD entry (by id or command)
@@ -82,9 +126,13 @@ function registerProvider(statuslineScriptPath) {
82
126
 
83
127
  /**
84
128
  * Remove GSD entry from composite statusline registry.
129
+ * Prefers code-graph's statusline-chain.js CLI when available; falls back to
130
+ * rewriting the cache registry directly for older code-graph versions.
85
131
  * @returns {boolean} true if an entry was removed
86
132
  */
87
133
  function removeProvider() {
134
+ if (runChainCLI(['unregister', 'gsd'])) return true;
135
+
88
136
  const registryPath = findCompositeRegistry();
89
137
  if (!registryPath) return false;
90
138
 
package/install.js CHANGED
@@ -145,7 +145,7 @@ export function main() {
145
145
  const preserveRuntime = existsSync(runtimeSubdir);
146
146
  let runtimeBackup;
147
147
  if (preserveRuntime) {
148
- runtimeBackup = join(RUNTIME_DIR, '..', '.gsd-runtime-backup');
148
+ runtimeBackup = join(RUNTIME_DIR, '..', `.gsd-runtime-backup-${process.pid}`);
149
149
  try { cpSync(runtimeSubdir, runtimeBackup, { recursive: true }); } catch { runtimeBackup = null; }
150
150
  }
151
151
  rmSync(RUNTIME_DIR, { recursive: true, force: true });
@@ -272,7 +272,7 @@ export function main() {
272
272
  if (existsSync(cacheBase)) {
273
273
  try {
274
274
  const entries = readdirSync(cacheBase, { withFileTypes: true })
275
- .filter(e => e.isDirectory() && /^\d+\.\d+\.\d+$/.test(e.name)).map(e => e.name);
275
+ .filter(e => e.isDirectory() && /^\d+\.\d+\.\d+(-[\w.]+)?$/.test(e.name)).map(e => e.name);
276
276
  if (entries.length > 3) {
277
277
  const sorted = entries.slice().sort(semverSortComparator);
278
278
  // Detect versions with active processes to avoid disrupting running sessions
package/launcher.js CHANGED
@@ -14,6 +14,7 @@ if (!existsSync(join(__dirname, 'node_modules', '@modelcontextprotocol'))) {
14
14
  execSync('npm install --omit=dev --ignore-scripts', {
15
15
  cwd: __dirname,
16
16
  stdio: 'pipe',
17
+ timeout: 60000,
17
18
  });
18
19
  } catch (err) {
19
20
  console.error('Failed to install dependencies:', err.stderr?.toString() || err.message);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "gsd-lite",
3
- "version": "0.6.9",
3
+ "version": "0.7.3",
4
4
  "description": "AI orchestration tool for Claude Code — GSD management shell + Superpowers quality core",
5
5
  "type": "module",
6
6
  "bin": {
@@ -50,6 +50,10 @@
50
50
  "dependencies": {
51
51
  "@modelcontextprotocol/sdk": "^1.27.1"
52
52
  },
53
+ "overrides": {
54
+ "hono": "^4.12.14",
55
+ "@hono/node-server": "^1.19.14"
56
+ },
53
57
  "devDependencies": {
54
58
  "@biomejs/biome": "^2.4.6",
55
59
  "c8": "^11.0.0"
@@ -34,6 +34,7 @@ task.level 当前值?
34
34
  └── L0 或 L1
35
35
  ├── executor decisions 含 [LEVEL-UP]? -> 升级为 L2
36
36
  ├── contract_changed: true + task.name 匹配敏感关键词? -> 升级为 L2
37
+ ├── L1 + confidence: 'low'? -> 升级为 L2
37
38
  ├── L1 + confidence: 'high' + !contract_changed + 有 evidence 且无测试失败? -> 降为 L0
38
39
  └── 否 -> 保持当前级别
39
40
  ```
@@ -23,6 +23,7 @@ export async function handleExecutorResult({ result, basePath = process.cwd() }
23
23
  // Note: read() is outside the state lock. This is safe because the MCP server
24
24
  // processes tool calls sequentially (single-session, promise-queue serialized).
25
25
  // persist() below re-acquires the lock and applies changes atomically.
26
+ // TODO: if MCP SDK supports concurrent tool calls, move this read inside withStateLock.
26
27
  const state = await read({ basePath });
27
28
  if (state.error) return state;
28
29
  const { phase, task } = getPhaseAndTask(state, result.task_id);
@@ -1,5 +1,7 @@
1
1
  import { read, selectRunnableTask } from '../state/index.js';
2
- import { getGitHead } from '../../utils.js';
2
+ import { getGitHead, getGsdDir } from '../../utils.js';
3
+ import { join } from 'node:path';
4
+ import { unlink } from 'node:fs/promises';
3
5
  import {
4
6
  MAX_RESUME_DEPTH,
5
7
  CONTEXT_RESUME_THRESHOLD,
@@ -269,6 +271,12 @@ export async function resumeWorkflow({ basePath = process.cwd(), _depth = 0, unb
269
271
  return state;
270
272
  }
271
273
 
274
+ // Clear session-end marker if present (crash recovery)
275
+ try {
276
+ const gsdDir = await getGsdDir(basePath);
277
+ if (gsdDir) await unlink(join(gsdDir, '.session-end')).catch(() => {});
278
+ } catch {}
279
+
272
280
  // Force-unblock specified tasks before normal resume flow
273
281
  if (Array.isArray(unblock_tasks) && unblock_tasks.length > 0 && _depth === 0) {
274
282
  const phase = getCurrentPhase(state);
@@ -485,8 +493,8 @@ export async function resumeWorkflow({ basePath = process.cwd(), _depth = 0, unb
485
493
  success: true,
486
494
  action: 'await_manual_intervention',
487
495
  workflow_mode: state.workflow_mode,
488
- guidance: 'Complete planning and call state-init to initialize the project',
489
- message: 'Project is in planning mode; complete the plan and initialize with state-init',
496
+ guidance: 'Plan is being revised. Run /gsd:start or /gsd:prd to continue planning, or state-update workflow_mode back to executing_task when ready.',
497
+ message: 'Project is in planning mode (plan revision). Finish the plan then set workflow_mode back to executing_task.',
490
498
  };
491
499
  break;
492
500
  case 'reconcile_workspace': {
@@ -22,32 +22,43 @@ export const ERROR_CODES = {
22
22
 
23
23
  // C-1: Serialize all state mutations to prevent TOCTOU races
24
24
  // C-2: Layer cross-process advisory file lock on top of in-process queue
25
- let _mutationQueue = Promise.resolve();
26
- let _fileLockPath = null;
25
+ // Per-basePath keyed maps — safe for multi-project concurrent use
26
+ const _mutationQueues = new Map();
27
+ const _fileLockPaths = new Map();
27
28
 
28
29
  export function setLockPath(lockPath) {
29
- _fileLockPath = lockPath;
30
+ // Legacy API for tests — sets/clears the default (null-key) lock path
31
+ if (lockPath === null) {
32
+ _fileLockPaths.delete(null);
33
+ _mutationQueues.delete(null);
34
+ } else {
35
+ _fileLockPaths.set(null, lockPath);
36
+ }
30
37
  }
31
38
 
32
39
  /**
33
- * Ensure _fileLockPath is set from a known state path.
40
+ * Ensure lock path is set for a given state path.
34
41
  * Must be called before withStateLock in all mutation paths.
35
42
  */
36
43
  export function ensureLockPathFromStatePath(statePath) {
37
44
  if (statePath) {
38
- _fileLockPath = join(dirname(statePath), 'state.lock');
45
+ const lockPath = join(dirname(statePath), 'state.lock');
46
+ _fileLockPaths.set(statePath, lockPath);
39
47
  }
40
48
  }
41
49
 
42
- export function withStateLock(fn) {
43
- const p = _mutationQueue.then(() => {
44
- if (_fileLockPath) {
45
- return withFileLock(_fileLockPath, fn);
50
+ export function withStateLock(fn, statePath) {
51
+ const lockPath = _fileLockPaths.get(statePath) ?? _fileLockPaths.get(null);
52
+ const queueKey = statePath ?? null;
53
+ const prev = _mutationQueues.get(queueKey) ?? Promise.resolve();
54
+ const p = prev.then(() => {
55
+ if (lockPath) {
56
+ return withFileLock(lockPath, fn);
46
57
  }
47
58
  process.stderr.write('[gsd] WARNING: withStateLock called without lock path — cross-process safety not guaranteed\n');
48
59
  return fn();
49
60
  });
50
- _mutationQueue = p.catch(() => {});
61
+ _mutationQueues.set(queueKey, p.catch(() => {}));
51
62
  return p;
52
63
  }
53
64
 
@@ -147,7 +147,7 @@ export async function init({ project, phases, research, force = false, basePath
147
147
  })),
148
148
  research: !!research,
149
149
  };
150
- });
150
+ }, statePath);
151
151
  }
152
152
 
153
153
  /**
@@ -156,7 +156,7 @@ export async function init({ project, phases, research, force = false, basePath
156
156
  export async function read({ fields, basePath = process.cwd(), validate = false } = {}) {
157
157
  const statePath = await getStatePath(basePath);
158
158
  if (!statePath) {
159
- return { error: true, code: ERROR_CODES.NO_PROJECT_DIR, message: 'No .gsd directory found' };
159
+ return { error: true, code: ERROR_CODES.NO_PROJECT_DIR, message: 'No GSD project found (.gsd directory missing). Run /gsd:start or /gsd:prd to begin.' };
160
160
  }
161
161
 
162
162
  const result = await readJson(statePath);
@@ -207,7 +207,7 @@ export async function update({ updates, basePath = process.cwd(), expectedVersio
207
207
 
208
208
  const statePath = await getStatePath(basePath);
209
209
  if (!statePath) {
210
- return { error: true, code: ERROR_CODES.NO_PROJECT_DIR, message: 'No .gsd directory found' };
210
+ return { error: true, code: ERROR_CODES.NO_PROJECT_DIR, message: 'No GSD project found (.gsd directory missing). Run /gsd:start or /gsd:prd to begin.' };
211
211
  }
212
212
  ensureLockPathFromStatePath(statePath);
213
213
 
@@ -378,7 +378,7 @@ export async function update({ updates, basePath = process.cwd(), expectedVersio
378
378
 
379
379
  await writeJson(statePath, merged);
380
380
  return { success: true, state: merged };
381
- });
381
+ }, statePath);
382
382
  }
383
383
 
384
384
  /**
@@ -425,7 +425,7 @@ export async function phaseComplete({
425
425
  }
426
426
  const statePath = await getStatePath(basePath);
427
427
  if (!statePath) {
428
- return { error: true, code: ERROR_CODES.NO_PROJECT_DIR, message: 'No .gsd directory found' };
428
+ return { error: true, code: ERROR_CODES.NO_PROJECT_DIR, message: 'No GSD project found (.gsd directory missing). Run /gsd:start or /gsd:prd to begin.' };
429
429
  }
430
430
  ensureLockPathFromStatePath(statePath);
431
431
 
@@ -592,7 +592,7 @@ export async function phaseComplete({
592
592
  workflow_mode: state.workflow_mode,
593
593
  ...(isCompleted ? { message: 'All phases completed — project finished' } : {}),
594
594
  };
595
- });
595
+ }, statePath);
596
596
  }
597
597
 
598
598
  /**
@@ -612,7 +612,7 @@ export async function addEvidence({ id, data, basePath = process.cwd() }) {
612
612
 
613
613
  const statePath = await getStatePath(basePath);
614
614
  if (!statePath) {
615
- return { error: true, code: ERROR_CODES.NO_PROJECT_DIR, message: 'No .gsd directory found' };
615
+ return { error: true, code: ERROR_CODES.NO_PROJECT_DIR, message: 'No GSD project found (.gsd directory missing). Run /gsd:start or /gsd:prd to begin.' };
616
616
  }
617
617
  ensureLockPathFromStatePath(statePath);
618
618
 
@@ -639,7 +639,7 @@ export async function addEvidence({ id, data, basePath = process.cwd() }) {
639
639
  state._version = (state._version ?? 0) + 1;
640
640
  await writeJson(statePath, state);
641
641
  return { success: true };
642
- });
642
+ }, statePath);
643
643
  }
644
644
 
645
645
  /**
@@ -693,7 +693,7 @@ export async function pruneEvidence({ currentPhase, basePath = process.cwd() })
693
693
  }
694
694
  const statePath = await getStatePath(basePath);
695
695
  if (!statePath) {
696
- return { error: true, code: ERROR_CODES.NO_PROJECT_DIR, message: 'No .gsd directory found' };
696
+ return { error: true, code: ERROR_CODES.NO_PROJECT_DIR, message: 'No GSD project found (.gsd directory missing). Run /gsd:start or /gsd:prd to begin.' };
697
697
  }
698
698
  ensureLockPathFromStatePath(statePath);
699
699
 
@@ -712,7 +712,7 @@ export async function pruneEvidence({ currentPhase, basePath = process.cwd() })
712
712
  }
713
713
 
714
714
  return { success: true, archived };
715
- });
715
+ }, statePath);
716
716
  }
717
717
 
718
718
  /**
@@ -733,7 +733,7 @@ export async function patchPlan({ operations, basePath = process.cwd() } = {}) {
733
733
 
734
734
  const statePath = await getStatePath(basePath);
735
735
  if (!statePath) {
736
- return { error: true, code: ERROR_CODES.NO_PROJECT_DIR, message: 'No .gsd directory found' };
736
+ return { error: true, code: ERROR_CODES.NO_PROJECT_DIR, message: 'No GSD project found (.gsd directory missing). Run /gsd:start or /gsd:prd to begin.' };
737
737
  }
738
738
  ensureLockPathFromStatePath(statePath);
739
739
 
@@ -792,7 +792,7 @@ export async function patchPlan({ operations, basePath = process.cwd() } = {}) {
792
792
  state._version = (state._version ?? 0) + 1;
793
793
  await writeJson(statePath, state);
794
794
  return { success: true, applied, plan_version: state.plan_version };
795
- });
795
+ }, statePath);
796
796
  }
797
797
 
798
798
  function _applyPatchOp(state, op) {
@@ -1,7 +1,6 @@
1
1
  // Automation/business logic functions
2
2
 
3
3
  import { dirname, join } from 'node:path';
4
- import { writeFileSync, unlinkSync } from 'node:fs';
5
4
  import { writeFile, rename, unlink } from 'node:fs/promises';
6
5
  import { ensureDir, readJson, writeJson, getStatePath } from '../../utils.js';
7
6
  import {
@@ -431,7 +430,7 @@ export async function storeResearch({ result, artifacts, decision_index, basePat
431
430
 
432
431
  const statePath = await getStatePath(basePath);
433
432
  if (!statePath) {
434
- return { error: true, code: ERROR_CODES.NO_PROJECT_DIR, message: 'No .gsd directory found' };
433
+ return { error: true, code: ERROR_CODES.NO_PROJECT_DIR, message: 'No GSD project found (.gsd directory missing). Run /gsd:start or /gsd:prd to begin.' };
435
434
  }
436
435
  ensureLockPathFromStatePath(statePath);
437
436
 
@@ -450,7 +449,7 @@ export async function storeResearch({ result, artifacts, decision_index, basePat
450
449
  // state.json write. On recovery (future iteration), presence of this file
451
450
  // indicates a potentially inconsistent research state.
452
451
  const sentinelPath = join(gsdDir, '.research-commit-pending');
453
- writeFileSync(sentinelPath, JSON.stringify({ timestamp: Date.now(), pid: process.pid }));
452
+ await writeFile(sentinelPath, JSON.stringify({ timestamp: Date.now(), pid: process.pid }));
454
453
 
455
454
  // Atomic multi-file write: write all artifacts first, then rename in batch
456
455
  const normalizedArtifacts = normalizeResearchArtifacts(artifacts);
@@ -472,7 +471,7 @@ export async function storeResearch({ result, artifacts, decision_index, basePat
472
471
  for (const { tmp } of tmpPaths) {
473
472
  try { await unlink(tmp); } catch {}
474
473
  }
475
- try { unlinkSync(sentinelPath); } catch {}
474
+ try { await unlink(sentinelPath); } catch {}
476
475
  throw err;
477
476
  }
478
477
 
@@ -509,7 +508,7 @@ export async function storeResearch({ result, artifacts, decision_index, basePat
509
508
 
510
509
  const validation = validateState(state);
511
510
  if (!validation.valid) {
512
- try { unlinkSync(sentinelPath); } catch {}
511
+ try { await unlink(sentinelPath); } catch {}
513
512
  return { error: true, code: ERROR_CODES.VALIDATION_FAILED, message: `State validation failed: ${validation.errors.join('; ')}` };
514
513
  }
515
514
 
@@ -517,7 +516,7 @@ export async function storeResearch({ result, artifacts, decision_index, basePat
517
516
  await writeJson(statePath, state);
518
517
 
519
518
  // Remove sentinel after successful state write — crash consistency window closed
520
- try { unlinkSync(sentinelPath); } catch {}
519
+ try { await unlink(sentinelPath); } catch {}
521
520
 
522
521
  return {
523
522
  success: true,
@@ -527,5 +526,5 @@ export async function storeResearch({ result, artifacts, decision_index, basePat
527
526
  warnings: refreshResult.warnings,
528
527
  research: state.research,
529
528
  };
530
- });
529
+ }, statePath);
531
530
  }
@@ -77,7 +77,7 @@
77
77
 
78
78
  ### STOP: 3 次失败停止
79
79
 
80
- **条件:** 同一错误指纹 (file+line msg[:50]) 出现 3 次。
80
+ **条件:** 同一 task 连续失败 3 (retry_count >= 3)。error_fingerprint 仅用于调试上下文,不参与触发判断。
81
81
 
82
82
  处理:
83
83
  1. 返回 `outcome: "failed"`