gsd-pi 2.65.0-dev.16e10d7 → 2.65.0-dev.6cc5110

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. package/dist/resources/extensions/gsd/auto/session.js +4 -0
  2. package/dist/resources/extensions/gsd/auto-dispatch.js +5 -1
  3. package/dist/resources/extensions/gsd/auto-recovery.js +28 -14
  4. package/dist/resources/extensions/gsd/auto-start.js +7 -10
  5. package/dist/resources/extensions/gsd/auto.js +19 -13
  6. package/dist/resources/extensions/gsd/db-writer.js +13 -3
  7. package/dist/resources/extensions/gsd/json-persistence.js +5 -2
  8. package/dist/resources/extensions/gsd/state.js +12 -10
  9. package/dist/resources/extensions/gsd/tools/complete-milestone.js +15 -3
  10. package/dist/resources/extensions/gsd/tools/complete-slice.js +15 -3
  11. package/dist/resources/extensions/gsd/tools/complete-task.js +15 -3
  12. package/dist/resources/extensions/gsd/triage-resolution.js +8 -7
  13. package/dist/resources/extensions/gsd/undo.js +3 -2
  14. package/dist/resources/extensions/gsd/workflow-logger.js +1 -1
  15. package/dist/resources/extensions/gsd/workflow-reconcile.js +99 -6
  16. package/dist/web/standalone/.next/BUILD_ID +1 -1
  17. package/dist/web/standalone/.next/app-path-routes-manifest.json +20 -20
  18. package/dist/web/standalone/.next/build-manifest.json +2 -2
  19. package/dist/web/standalone/.next/prerender-manifest.json +3 -3
  20. package/dist/web/standalone/.next/server/app/_global-error.html +2 -2
  21. package/dist/web/standalone/.next/server/app/_global-error.rsc +1 -1
  22. package/dist/web/standalone/.next/server/app/_global-error.segments/_full.segment.rsc +1 -1
  23. package/dist/web/standalone/.next/server/app/_global-error.segments/_global-error/__PAGE__.segment.rsc +1 -1
  24. package/dist/web/standalone/.next/server/app/_global-error.segments/_global-error.segment.rsc +1 -1
  25. package/dist/web/standalone/.next/server/app/_global-error.segments/_head.segment.rsc +1 -1
  26. package/dist/web/standalone/.next/server/app/_global-error.segments/_index.segment.rsc +1 -1
  27. package/dist/web/standalone/.next/server/app/_global-error.segments/_tree.segment.rsc +1 -1
  28. package/dist/web/standalone/.next/server/app/_not-found.html +1 -1
  29. package/dist/web/standalone/.next/server/app/_not-found.rsc +1 -1
  30. package/dist/web/standalone/.next/server/app/_not-found.segments/_full.segment.rsc +1 -1
  31. package/dist/web/standalone/.next/server/app/_not-found.segments/_head.segment.rsc +1 -1
  32. package/dist/web/standalone/.next/server/app/_not-found.segments/_index.segment.rsc +1 -1
  33. package/dist/web/standalone/.next/server/app/_not-found.segments/_not-found/__PAGE__.segment.rsc +1 -1
  34. package/dist/web/standalone/.next/server/app/_not-found.segments/_not-found.segment.rsc +1 -1
  35. package/dist/web/standalone/.next/server/app/_not-found.segments/_tree.segment.rsc +1 -1
  36. package/dist/web/standalone/.next/server/app/index.html +1 -1
  37. package/dist/web/standalone/.next/server/app/index.rsc +1 -1
  38. package/dist/web/standalone/.next/server/app/index.segments/__PAGE__.segment.rsc +1 -1
  39. package/dist/web/standalone/.next/server/app/index.segments/_full.segment.rsc +1 -1
  40. package/dist/web/standalone/.next/server/app/index.segments/_head.segment.rsc +1 -1
  41. package/dist/web/standalone/.next/server/app/index.segments/_index.segment.rsc +1 -1
  42. package/dist/web/standalone/.next/server/app/index.segments/_tree.segment.rsc +1 -1
  43. package/dist/web/standalone/.next/server/app-paths-manifest.json +20 -20
  44. package/dist/web/standalone/.next/server/pages/404.html +1 -1
  45. package/dist/web/standalone/.next/server/pages/500.html +2 -2
  46. package/dist/web/standalone/.next/server/server-reference-manifest.json +1 -1
  47. package/package.json +1 -1
  48. package/src/resources/extensions/gsd/auto/session.ts +4 -0
  49. package/src/resources/extensions/gsd/auto-dispatch.ts +5 -1
  50. package/src/resources/extensions/gsd/auto-recovery.ts +19 -15
  51. package/src/resources/extensions/gsd/auto-start.ts +7 -10
  52. package/src/resources/extensions/gsd/auto.ts +17 -7
  53. package/src/resources/extensions/gsd/db-writer.ts +11 -3
  54. package/src/resources/extensions/gsd/json-persistence.ts +6 -3
  55. package/src/resources/extensions/gsd/state.ts +11 -9
  56. package/src/resources/extensions/gsd/tests/integration/auto-recovery.test.ts +6 -6
  57. package/src/resources/extensions/gsd/tests/wave1-critical-regressions.test.ts +49 -0
  58. package/src/resources/extensions/gsd/tests/wave2-events-regressions.test.ts +48 -0
  59. package/src/resources/extensions/gsd/tests/wave3-session-regressions.test.ts +47 -0
  60. package/src/resources/extensions/gsd/tests/wave4-write-safety-regressions.test.ts +70 -0
  61. package/src/resources/extensions/gsd/tests/workflow-logger-audit.test.ts +6 -3
  62. package/src/resources/extensions/gsd/tools/complete-milestone.ts +13 -3
  63. package/src/resources/extensions/gsd/tools/complete-slice.ts +13 -3
  64. package/src/resources/extensions/gsd/tools/complete-task.ts +13 -3
  65. package/src/resources/extensions/gsd/triage-resolution.ts +8 -7
  66. package/src/resources/extensions/gsd/undo.ts +3 -2
  67. package/src/resources/extensions/gsd/workflow-events.ts +1 -1
  68. package/src/resources/extensions/gsd/workflow-logger.ts +1 -1
  69. package/src/resources/extensions/gsd/workflow-reconcile.ts +107 -5
  70. /package/dist/web/standalone/.next/static/{Z3TgDP0c7kG9j8CVQVGcl → iueakR5x5bQbax2sGz8Yr}/_buildManifest.js +0 -0
  71. /package/dist/web/standalone/.next/static/{Z3TgDP0c7kG9j8CVQVGcl → iueakR5x5bQbax2sGz8Yr}/_ssgManifest.js +0 -0
@@ -1141,9 +1141,9 @@ export async function startAuto(
1141
1141
  s.stepMode = meta.stepMode ?? requestedStepMode;
1142
1142
  s.autoStartTime = meta.autoStartTime || Date.now();
1143
1143
  s.paused = true;
1144
- try { unlinkSync(pausedPath); } catch (err) { /* non-fatal */
1145
- logWarning("session", `pause file cleanup failed: ${err instanceof Error ? err.message : String(err)}`, { file: "auto.ts" });
1146
- }
1144
+ // Don't delete pause file yet defer until lock is acquired.
1145
+ // If lock fails, the file must survive for retry.
1146
+ s.pausedSessionFile = pausedPath;
1147
1147
  ctx.ui.notify(
1148
1148
  `Resuming paused custom workflow${meta.activeRunDir ? ` (${meta.activeRunDir})` : ""}.`,
1149
1149
  "info",
@@ -1167,10 +1167,9 @@ export async function startAuto(
1167
1167
  s.stepMode = meta.stepMode ?? requestedStepMode;
1168
1168
  s.autoStartTime = meta.autoStartTime || Date.now();
1169
1169
  s.paused = true;
1170
- // Clean up the persisted file — we're consuming it
1171
- try { unlinkSync(pausedPath); } catch (err) { /* non-fatal */
1172
- logWarning("session", `pause file cleanup failed: ${err instanceof Error ? err.message : String(err)}`, { file: "auto.ts" });
1173
- }
1170
+ // Don't delete pause file yet defer until lock is acquired.
1171
+ // If lock fails, the file must survive for retry.
1172
+ s.pausedSessionFile = pausedPath;
1174
1173
  ctx.ui.notify(
1175
1174
  `Resuming paused session for ${meta.milestoneId}${meta.worktreePath ? ` (worktree)` : ""}.`,
1176
1175
  "info",
@@ -1187,10 +1186,21 @@ export async function startAuto(
1187
1186
  if (s.paused) {
1188
1187
  const resumeLock = acquireSessionLock(base);
1189
1188
  if (!resumeLock.acquired) {
1189
+ // Reset paused state so isAutoPaused() doesn't stick true after lock failure.
1190
+ // Pause file is preserved on disk for retry — not deleted.
1191
+ s.paused = false;
1190
1192
  ctx.ui.notify(`Cannot resume: ${resumeLock.reason}`, "error");
1191
1193
  return;
1192
1194
  }
1193
1195
 
1196
+ // Lock acquired — now safe to delete the pause file
1197
+ if (s.pausedSessionFile) {
1198
+ try { unlinkSync(s.pausedSessionFile); } catch (err) {
1199
+ logWarning("session", `pause file cleanup failed: ${err instanceof Error ? err.message : String(err)}`, { file: "auto.ts" });
1200
+ }
1201
+ s.pausedSessionFile = null;
1202
+ }
1203
+
1194
1204
  s.paused = false;
1195
1205
  s.active = true;
1196
1206
  s.verbose = verboseMode;
@@ -345,8 +345,12 @@ export async function saveRequirementToDb(
345
345
  await saveFile(filePath, md);
346
346
  } catch (diskErr) {
347
347
  logError('manifest', 'disk write failed, rolling back DB row', { fn: 'saveRequirementToDb', error: String((diskErr as Error).message) });
348
- const rollbackAdapter = db._getAdapter();
349
- rollbackAdapter?.prepare('DELETE FROM requirements WHERE id = :id').run({ ':id': id });
348
+ try {
349
+ const rollbackAdapter = db._getAdapter();
350
+ rollbackAdapter?.prepare('DELETE FROM requirements WHERE id = :id').run({ ':id': id });
351
+ } catch (rollbackErr) {
352
+ logError('manifest', 'SPLIT BRAIN: disk write failed AND DB rollback failed — DB has orphaned row', { fn: 'saveRequirementToDb', id, error: String((rollbackErr as Error).message) });
353
+ }
350
354
  throw diskErr;
351
355
  }
352
356
  invalidateStateCache();
@@ -466,7 +470,11 @@ export async function saveDecisionToDb(
466
470
  await saveFile(filePath, md);
467
471
  } catch (diskErr) {
468
472
  logError('manifest', 'disk write failed, rolling back DB row', { fn: 'saveDecisionToDb', error: String((diskErr as Error).message) });
469
- adapter?.prepare('DELETE FROM decisions WHERE id = :id').run({ ':id': id });
473
+ try {
474
+ adapter?.prepare('DELETE FROM decisions WHERE id = :id').run({ ':id': id });
475
+ } catch (rollbackErr) {
476
+ logError('manifest', 'SPLIT BRAIN: disk write failed AND DB rollback failed — DB has orphaned row', { fn: 'saveDecisionToDb', id, error: String((rollbackErr as Error).message) });
477
+ }
470
478
  throw diskErr;
471
479
  }
472
480
  // #2661: When a decision defers a slice, update the slice status in the DB
@@ -1,5 +1,6 @@
1
- import { existsSync, readFileSync, writeFileSync, mkdirSync, renameSync } from "node:fs";
1
+ import { existsSync, readFileSync, writeFileSync, mkdirSync, renameSync, unlinkSync } from "node:fs";
2
2
  import { dirname } from "node:path";
3
+ import { randomBytes } from "node:crypto";
3
4
 
4
5
  /**
5
6
  * Load a JSON file with validation, returning a default on failure.
@@ -51,9 +52,11 @@ export function loadJsonFileOrNull<T>(
51
52
  export function saveJsonFile<T>(filePath: string, data: T): void {
52
53
  try {
53
54
  mkdirSync(dirname(filePath), { recursive: true });
54
- const tmp = filePath + ".tmp";
55
+ // Use randomized tmp suffix to prevent concurrent-write data loss
56
+ const tmp = `${filePath}.tmp.${randomBytes(4).toString("hex")}`;
55
57
  writeFileSync(tmp, JSON.stringify(data, null, 2) + "\n", "utf-8");
56
58
  renameSync(tmp, filePath);
59
+ // No cleanup needed — renameSync atomically removes tmp on success
57
60
  } catch {
58
61
  // Non-fatal — don't let persistence failures break operation
59
62
  }
@@ -66,7 +69,7 @@ export function saveJsonFile<T>(filePath: string, data: T): void {
66
69
  export function writeJsonFileAtomic<T>(filePath: string, data: T): void {
67
70
  try {
68
71
  mkdirSync(dirname(filePath), { recursive: true });
69
- const tmp = filePath + ".tmp";
72
+ const tmp = `${filePath}.tmp.${randomBytes(4).toString("hex")}`;
70
73
  writeFileSync(tmp, JSON.stringify(data, null, 2), "utf-8");
71
74
  renameSync(tmp, filePath);
72
75
  } catch {
@@ -189,7 +189,7 @@ export async function getActiveMilestoneId(basePath: string): Promise<string | n
189
189
  const byId = new Map(allMilestones.map(m => [m.id, m]));
190
190
  for (const id of sortedIds) {
191
191
  const m = byId.get(id)!;
192
- if (m.status === "complete" || m.status === "done" || m.status === "parked") continue;
192
+ if (isClosedStatus(m.status) || m.status === "parked") continue;
193
193
  return m.id;
194
194
  }
195
195
  return null;
@@ -442,13 +442,10 @@ export async function deriveStateFromDb(basePath: string): Promise<GSDState> {
442
442
  continue;
443
443
  }
444
444
 
445
- // Check roadmap: all slices done means milestone is complete
446
- const slices = getMilestoneSlices(m.id);
447
- if (slices.length > 0 && slices.every(s => isStatusDone(s.status))) {
448
- // All slices done but no summary — still counts as complete for dep resolution
449
- // if a summary file exists
450
- // Note: without summary file, the milestone is in validating/completing state, not complete
451
- }
445
+ // Milestones with all slices done but no SUMMARY file are in
446
+ // validating/completing state — intentionally NOT added to
447
+ // completeMilestoneIds. The SUMMARY file (checked above) is the
448
+ // terminal artifact that proves completion per #864.
452
449
  }
453
450
 
454
451
  // Phase 2: Build registry and find active milestone
@@ -954,7 +951,12 @@ export async function deriveStateFromDb(basePath: string): Promise<GSDState> {
954
951
  // ── REPLAN-TRIGGER detection ─────────────────────────────────────────
955
952
  if (!blockerTaskId) {
956
953
  const sliceRow = getSlice(activeMilestone.id, activeSlice.id);
957
- if (sliceRow?.replan_triggered_at) {
954
+ // Check DB column first, fall back to disk trigger file when DB write
955
+ // was best-effort and failed (triage-resolution.ts dual-write gap).
956
+ const dbTriggered = !!sliceRow?.replan_triggered_at;
957
+ const diskTriggered = !dbTriggered &&
958
+ !!resolveSliceFile(basePath, activeMilestone.id, activeSlice.id, "REPLAN-TRIGGER");
959
+ if (dbTriggered || diskTriggered) {
958
960
  // Loop protection: if replan_history has entries, replan was already done
959
961
  const replanHistory = getReplanHistory(activeMilestone.id, activeSlice.id);
960
962
  if (replanHistory.length === 0) {
@@ -684,7 +684,7 @@ function makeGitBase(): string {
684
684
  return base;
685
685
  }
686
686
 
687
- test("hasImplementationArtifacts returns false when only .gsd/ files committed (#1703)", (t) => {
687
+ test("hasImplementationArtifacts returns 'absent' when only .gsd/ files committed (#1703)", (t) => {
688
688
  const base = makeGitBase();
689
689
  t.after(() => cleanup(base));
690
690
 
@@ -697,10 +697,10 @@ test("hasImplementationArtifacts returns false when only .gsd/ files committed (
697
697
  execFileSync("git", ["commit", "-m", "chore: add plan files"], { cwd: base, stdio: "ignore" });
698
698
 
699
699
  const result = hasImplementationArtifacts(base);
700
- assert.equal(result, false, "should return false when only .gsd/ files were committed");
700
+ assert.equal(result, "absent", "should return 'absent' when only .gsd/ files were committed");
701
701
  });
702
702
 
703
- test("hasImplementationArtifacts returns true when implementation files committed (#1703)", (t) => {
703
+ test("hasImplementationArtifacts returns 'present' when implementation files committed (#1703)", (t) => {
704
704
  const base = makeGitBase();
705
705
  t.after(() => cleanup(base));
706
706
 
@@ -714,16 +714,16 @@ test("hasImplementationArtifacts returns true when implementation files committe
714
714
  execFileSync("git", ["commit", "-m", "feat: add feature"], { cwd: base, stdio: "ignore" });
715
715
 
716
716
  const result = hasImplementationArtifacts(base);
717
- assert.equal(result, true, "should return true when implementation files are present");
717
+ assert.equal(result, "present", "should return 'present' when implementation files are present");
718
718
  });
719
719
 
720
- test("hasImplementationArtifacts returns true on non-git directory (fail-open)", (t) => {
720
+ test("hasImplementationArtifacts returns 'unknown' on non-git directory (fail-open)", (t) => {
721
721
  const base = join(tmpdir(), `gsd-test-nogit-${randomUUID()}`);
722
722
  mkdirSync(base, { recursive: true });
723
723
  t.after(() => cleanup(base));
724
724
 
725
725
  const result = hasImplementationArtifacts(base);
726
- assert.equal(result, true, "should return true (fail-open) in non-git directory");
726
+ assert.equal(result, "unknown", "should return 'unknown' (fail-open) in non-git directory");
727
727
  });
728
728
 
729
729
  // ─── verifyExpectedArtifact: complete-milestone requires impl artifacts (#1703) ──
@@ -0,0 +1,49 @@
1
+ // GSD State Machine — Wave 1 Critical Regression Tests
2
+ // Validates fixes for event log format mismatch, skipped milestone status,
3
+ // dead code removal, and replan disk-file fallback.
4
+
5
+ import { describe, test } from "node:test";
6
+ import assert from "node:assert/strict";
7
+ import { extractEntityKey } from "../workflow-reconcile.js";
8
+ import { isClosedStatus } from "../status-guards.js";
9
+ import type { WorkflowEvent } from "../workflow-events.js";
10
+
11
+ // ── Fix 1: Event log cmd format — hyphens and underscores both accepted ──
12
+
13
+ describe("extractEntityKey normalizes cmd format", () => {
14
+ const baseEvent = { params: {}, ts: "", hash: "", actor: "agent" as const, session_id: "" };
15
+
16
+ test("accepts hyphenated complete-task", () => {
17
+ const event: WorkflowEvent = { ...baseEvent, cmd: "complete-task", params: { taskId: "T01" } };
18
+ const key = extractEntityKey(event);
19
+ assert.deepStrictEqual(key, { type: "task", id: "T01" });
20
+ });
21
+
22
+ test("accepts underscored complete_task (legacy)", () => {
23
+ const event: WorkflowEvent = { ...baseEvent, cmd: "complete_task", params: { taskId: "T01" } };
24
+ const key = extractEntityKey(event);
25
+ assert.deepStrictEqual(key, { type: "task", id: "T01" });
26
+ });
27
+
28
+ test("accepts hyphenated complete-slice", () => {
29
+ const event: WorkflowEvent = { ...baseEvent, cmd: "complete-slice", params: { sliceId: "S01" } };
30
+ const key = extractEntityKey(event);
31
+ assert.deepStrictEqual(key, { type: "slice", id: "S01" });
32
+ });
33
+
34
+ test("accepts hyphenated complete-milestone", () => {
35
+ const event: WorkflowEvent = { ...baseEvent, cmd: "complete-milestone", params: { milestoneId: "M001" } };
36
+ const key = extractEntityKey(event);
37
+ assert.deepStrictEqual(key, { type: "milestone", id: "M001" });
38
+ });
39
+ });
40
+
41
+ // ── Fix 3: getActiveMilestoneId must skip "skipped" milestones ──
42
+
43
+ describe("isClosedStatus includes skipped", () => {
44
+ test("complete is closed", () => assert.ok(isClosedStatus("complete")));
45
+ test("done is closed", () => assert.ok(isClosedStatus("done")));
46
+ test("skipped is closed", () => assert.ok(isClosedStatus("skipped")));
47
+ test("pending is not closed", () => assert.ok(!isClosedStatus("pending")));
48
+ test("active is not closed", () => assert.ok(!isClosedStatus("active")));
49
+ });
@@ -0,0 +1,48 @@
1
+ // GSD State Machine — Wave 2 Event Log Regression Tests
2
+ // Validates fixes for appendEvent isolation, entity replay handlers,
3
+ // and post-reconcile cache invalidation.
4
+
5
+ import { describe, test } from "node:test";
6
+ import assert from "node:assert/strict";
7
+ import { extractEntityKey } from "../workflow-reconcile.js";
8
+ import type { WorkflowEvent } from "../workflow-events.js";
9
+
10
+ const base = { params: {}, ts: "", hash: "", actor: "agent" as const, session_id: "" };
11
+
12
+ // ── Fix 8: New entity event types handled by extractEntityKey ──
13
+
14
+ describe("extractEntityKey handles plan events", () => {
15
+ test("plan-milestone → milestone type", () => {
16
+ const event: WorkflowEvent = { ...base, cmd: "plan-milestone", params: { milestoneId: "M001" } };
17
+ const key = extractEntityKey(event);
18
+ assert.deepStrictEqual(key, { type: "milestone", id: "M001" });
19
+ });
20
+
21
+ test("plan-task → task type", () => {
22
+ const event: WorkflowEvent = { ...base, cmd: "plan-task", params: { taskId: "T01" } };
23
+ const key = extractEntityKey(event);
24
+ assert.deepStrictEqual(key, { type: "task", id: "T01" });
25
+ });
26
+
27
+ test("plan-slice preserves slice_plan type (conflict isolation)", () => {
28
+ const event: WorkflowEvent = { ...base, cmd: "plan-slice", params: { sliceId: "S01" } };
29
+ const key = extractEntityKey(event);
30
+ assert.deepStrictEqual(key, { type: "slice_plan", id: "S01" });
31
+ });
32
+
33
+ test("replan-slice → slice type", () => {
34
+ const event: WorkflowEvent = { ...base, cmd: "replan-slice", params: { sliceId: "S01" } };
35
+ const key = extractEntityKey(event);
36
+ assert.deepStrictEqual(key, { type: "slice", id: "S01" });
37
+ });
38
+ });
39
+
40
+ // ── Fix 8b: Unknown commands return null (don't crash) ──
41
+
42
+ describe("extractEntityKey handles unknown commands gracefully", () => {
43
+ test("unknown-command returns null", () => {
44
+ const event: WorkflowEvent = { ...base, cmd: "unknown-future-cmd", params: { foo: "bar" } };
45
+ const key = extractEntityKey(event);
46
+ assert.strictEqual(key, null);
47
+ });
48
+ });
@@ -0,0 +1,47 @@
1
+ // GSD State Machine — Wave 3 Session Regression Tests
2
+ // Validates tri-state hasImplementationArtifacts and AutoSession.consecutiveCompleteBootstraps.
3
+
4
+ import { describe, test } from "node:test";
5
+ import assert from "node:assert/strict";
6
+ import { hasImplementationArtifacts } from "../auto-recovery.js";
7
+ import { AutoSession } from "../auto/session.js";
8
+
9
+ // ── Fix 9: hasImplementationArtifacts returns tri-state ──
10
+
11
+ describe("hasImplementationArtifacts tri-state return", () => {
12
+ test("returns 'unknown' for non-git directory", () => {
13
+ const result = hasImplementationArtifacts("/tmp/not-a-git-repo-" + Date.now());
14
+ assert.strictEqual(result, "unknown");
15
+ });
16
+
17
+ test("return type is one of present/absent/unknown", () => {
18
+ const result = hasImplementationArtifacts(process.cwd());
19
+ assert.ok(
20
+ result === "present" || result === "absent" || result === "unknown",
21
+ `Expected present/absent/unknown, got: ${result}`,
22
+ );
23
+ });
24
+ });
25
+
26
+ // ── Fix 11: consecutiveCompleteBootstraps is per-session ──
27
+
28
+ describe("AutoSession.consecutiveCompleteBootstraps", () => {
29
+ test("initial value is 0", () => {
30
+ const s = new AutoSession();
31
+ assert.strictEqual(s.consecutiveCompleteBootstraps, 0);
32
+ });
33
+
34
+ test("reset() clears the counter", () => {
35
+ const s = new AutoSession();
36
+ s.consecutiveCompleteBootstraps = 5;
37
+ s.reset();
38
+ assert.strictEqual(s.consecutiveCompleteBootstraps, 0);
39
+ });
40
+
41
+ test("two sessions have independent counters", () => {
42
+ const s1 = new AutoSession();
43
+ const s2 = new AutoSession();
44
+ s1.consecutiveCompleteBootstraps = 3;
45
+ assert.strictEqual(s2.consecutiveCompleteBootstraps, 0);
46
+ });
47
+ });
@@ -0,0 +1,70 @@
1
+ // GSD State Machine — Wave 4 Write Safety Regression Tests
2
+ // Validates randomized tmp suffix in json-persistence and atomic writes.
3
+
4
+ import { describe, test } from "node:test";
5
+ import assert from "node:assert/strict";
6
+ import { mkdtempSync, readFileSync, readdirSync, rmSync } from "node:fs";
7
+ import { join } from "node:path";
8
+ import { tmpdir } from "node:os";
9
+ import { saveJsonFile, loadJsonFile } from "../json-persistence.js";
10
+
11
+ // ── Fix 15: json-persistence uses randomized tmp suffix ──
12
+
13
+ describe("saveJsonFile atomic write", () => {
14
+ test("writes JSON file correctly", () => {
15
+ const tmp = mkdtempSync(join(tmpdir(), "gsd-json-test-"));
16
+ try {
17
+ const file = join(tmp, "test.json");
18
+ saveJsonFile(file, { key: "value" });
19
+ const content = JSON.parse(readFileSync(file, "utf-8"));
20
+ assert.deepStrictEqual(content, { key: "value" });
21
+ } finally {
22
+ rmSync(tmp, { recursive: true, force: true });
23
+ }
24
+ });
25
+
26
+ test("no .tmp file left after successful write", () => {
27
+ const tmp = mkdtempSync(join(tmpdir(), "gsd-json-test-"));
28
+ try {
29
+ const file = join(tmp, "test.json");
30
+ saveJsonFile(file, { data: 123 });
31
+ const files = readdirSync(tmp);
32
+ const tmpFiles = files.filter((f: string) => f.includes(".tmp"));
33
+ assert.strictEqual(tmpFiles.length, 0, "No .tmp files should remain after write");
34
+ } finally {
35
+ rmSync(tmp, { recursive: true, force: true });
36
+ }
37
+ });
38
+
39
+ test("concurrent writes don't corrupt data", () => {
40
+ const tmp = mkdtempSync(join(tmpdir(), "gsd-json-test-"));
41
+ try {
42
+ const file = join(tmp, "shared.json");
43
+ // Write two different values rapidly — both should succeed without corruption
44
+ saveJsonFile(file, { writer: "first" });
45
+ saveJsonFile(file, { writer: "second" });
46
+ const content = JSON.parse(readFileSync(file, "utf-8"));
47
+ assert.strictEqual(content.writer, "second");
48
+ } finally {
49
+ rmSync(tmp, { recursive: true, force: true });
50
+ }
51
+ });
52
+
53
+ test("round-trip through loadJsonFile", () => {
54
+ const tmp = mkdtempSync(join(tmpdir(), "gsd-json-test-"));
55
+ try {
56
+ const file = join(tmp, "roundtrip.json");
57
+ const data = { items: [1, 2, 3], name: "test" };
58
+ saveJsonFile(file, data);
59
+ const loaded = loadJsonFile(
60
+ file,
61
+ (d): d is typeof data => typeof d === "object" && d !== null && "items" in d,
62
+ () => ({ items: [], name: "" }),
63
+ );
64
+ assert.deepStrictEqual(loaded.items, [1, 2, 3]);
65
+ assert.strictEqual(loaded.name, "test");
66
+ } finally {
67
+ rmSync(tmp, { recursive: true, force: true });
68
+ }
69
+ });
70
+ });
@@ -90,18 +90,21 @@ describe("workflow-logger audit persistence", () => {
90
90
  assert.ok(ctx, "context should exist");
91
91
  assert.equal(ctx.fn, "saveDecisionToDb");
92
92
  assert.equal(ctx.tool, "gsd_decision_save");
93
- assert.equal(ctx.error, undefined, "error key must be stripped from persisted context");
93
+ assert.equal(ctx.error, "SQLITE_BUSY: database is locked", "error key should be preserved in persisted context");
94
94
  assert.equal(ctx.file, undefined, "file key must be stripped from persisted context");
95
95
  });
96
96
 
97
- test("persisted errors omit context when no safe keys present", () => {
97
+ test("persisted errors preserve error key but strip other unsafe keys", () => {
98
98
  logError("bootstrap", "ensureDbOpen failed", {
99
99
  error: "ENOENT",
100
100
  cwd: "/home/user/project",
101
101
  });
102
102
  const lines = readAuditLines(tmp);
103
103
  assert.equal(lines.length, 1);
104
- assert.equal(lines[0].context, undefined, "context should be omitted when no safe keys match");
104
+ const ctx = lines[0].context as Record<string, string>;
105
+ assert.ok(ctx, "context should exist when error key is present");
106
+ assert.equal(ctx.error, "ENOENT", "error key should be preserved");
107
+ assert.equal(ctx.cwd, undefined, "cwd key must be stripped");
105
108
  });
106
109
 
107
110
  test("mixed warnings and errors only persist errors", () => {
@@ -23,7 +23,7 @@ import { invalidateStateCache } from "../state.js";
23
23
  import { renderAllProjections, stripIdPrefix } from "../workflow-projections.js";
24
24
  import { writeManifest } from "../workflow-manifest.js";
25
25
  import { appendEvent } from "../workflow-events.js";
26
- import { logWarning } from "../workflow-logger.js";
26
+ import { logWarning, logError } from "../workflow-logger.js";
27
27
 
28
28
  export interface CompleteMilestoneParams {
29
29
  milestoneId: string;
@@ -218,9 +218,19 @@ export async function handleCompleteMilestone(
218
218
  clearParseCache();
219
219
 
220
220
  // ── Post-mutation hook: projections, manifest, event log ───────────────
221
+ // Separate try/catch per step so a projection failure doesn't prevent
222
+ // the event log entry (critical for worktree reconciliation).
221
223
  try {
222
224
  await renderAllProjections(basePath, params.milestoneId);
225
+ } catch (projErr) {
226
+ logWarning("tool", `complete-milestone projection warning: ${(projErr as Error).message}`);
227
+ }
228
+ try {
223
229
  writeManifest(basePath);
230
+ } catch (mfErr) {
231
+ logWarning("tool", `complete-milestone manifest warning: ${(mfErr as Error).message}`);
232
+ }
233
+ try {
224
234
  appendEvent(basePath, {
225
235
  cmd: "complete-milestone",
226
236
  params: { milestoneId: params.milestoneId },
@@ -229,8 +239,8 @@ export async function handleCompleteMilestone(
229
239
  actor_name: params.actorName,
230
240
  trigger_reason: params.triggerReason,
231
241
  });
232
- } catch (hookErr) {
233
- logWarning("tool", `complete-milestone post-mutation hook warning: ${(hookErr as Error).message}`);
242
+ } catch (eventErr) {
243
+ logError("tool", `complete-milestone event log FAILED — completion invisible to reconciliation`, { error: (eventErr as Error).message });
234
244
  }
235
245
 
236
246
  return {
@@ -30,7 +30,7 @@ import { renderRoadmapCheckboxes } from "../markdown-renderer.js";
30
30
  import { renderAllProjections } from "../workflow-projections.js";
31
31
  import { writeManifest } from "../workflow-manifest.js";
32
32
  import { appendEvent } from "../workflow-events.js";
33
- import { logWarning } from "../workflow-logger.js";
33
+ import { logWarning, logError } from "../workflow-logger.js";
34
34
 
35
35
  export interface CompleteSliceResult {
36
36
  sliceId: string;
@@ -336,9 +336,19 @@ export async function handleCompleteSlice(
336
336
  clearParseCache();
337
337
 
338
338
  // ── Post-mutation hook: projections, manifest, event log ───────────────
339
+ // Separate try/catch per step so a projection failure doesn't prevent
340
+ // the event log entry (critical for worktree reconciliation).
339
341
  try {
340
342
  await renderAllProjections(basePath, params.milestoneId);
343
+ } catch (projErr) {
344
+ logWarning("tool", `complete-slice projection warning for ${params.milestoneId}/${params.sliceId}: ${(projErr as Error).message}`);
345
+ }
346
+ try {
341
347
  writeManifest(basePath);
348
+ } catch (mfErr) {
349
+ logWarning("tool", `complete-slice manifest warning: ${(mfErr as Error).message}`);
350
+ }
351
+ try {
342
352
  appendEvent(basePath, {
343
353
  cmd: "complete-slice",
344
354
  params: { milestoneId: params.milestoneId, sliceId: params.sliceId },
@@ -347,8 +357,8 @@ export async function handleCompleteSlice(
347
357
  actor_name: params.actorName,
348
358
  trigger_reason: params.triggerReason,
349
359
  });
350
- } catch (hookErr) {
351
- logWarning("tool", `complete-slice post-mutation hook failed for ${params.milestoneId}/${params.sliceId}`, { error: (hookErr as Error).message });
360
+ } catch (eventErr) {
361
+ logError("tool", `complete-slice event log FAILED completion invisible to reconciliation`, { error: (eventErr as Error).message });
352
362
  }
353
363
 
354
364
  return {
@@ -33,7 +33,7 @@ import { renderPlanCheckboxes } from "../markdown-renderer.js";
33
33
  import { renderAllProjections, renderSummaryContent } from "../workflow-projections.js";
34
34
  import { writeManifest } from "../workflow-manifest.js";
35
35
  import { appendEvent } from "../workflow-events.js";
36
- import { logWarning } from "../workflow-logger.js";
36
+ import { logWarning, logError } from "../workflow-logger.js";
37
37
 
38
38
  export interface CompleteTaskResult {
39
39
  taskId: string;
@@ -242,9 +242,19 @@ export async function handleCompleteTask(
242
242
  clearParseCache();
243
243
 
244
244
  // ── Post-mutation hook: projections, manifest, event log ───────────────
245
+ // Separate try/catch per step so a projection failure doesn't prevent
246
+ // the event log entry (critical for worktree reconciliation).
245
247
  try {
246
248
  await renderAllProjections(basePath, params.milestoneId);
249
+ } catch (projErr) {
250
+ logWarning("tool", `complete-task projection warning: ${(projErr as Error).message}`);
251
+ }
252
+ try {
247
253
  writeManifest(basePath);
254
+ } catch (mfErr) {
255
+ logWarning("tool", `complete-task manifest warning: ${(mfErr as Error).message}`);
256
+ }
257
+ try {
248
258
  appendEvent(basePath, {
249
259
  cmd: "complete-task",
250
260
  params: { milestoneId: params.milestoneId, sliceId: params.sliceId, taskId: params.taskId },
@@ -253,8 +263,8 @@ export async function handleCompleteTask(
253
263
  actor_name: params.actorName,
254
264
  trigger_reason: params.triggerReason,
255
265
  });
256
- } catch (hookErr) {
257
- logWarning("tool", `complete-task post-mutation hook warning: ${(hookErr as Error).message}`);
266
+ } catch (eventErr) {
267
+ logError("tool", `complete-task event log FAILED — completion invisible to reconciliation`, { error: (eventErr as Error).message });
258
268
  }
259
269
 
260
270
  return {
@@ -10,7 +10,8 @@
10
10
  * Also provides detectFileOverlap() for surfacing downstream impact on quick tasks.
11
11
  */
12
12
 
13
- import { existsSync, mkdirSync, readFileSync, writeFileSync, unlinkSync } from "node:fs";
13
+ import { existsSync, mkdirSync, readFileSync, unlinkSync } from "node:fs";
14
+ import { atomicWriteSync } from "./atomic-write.js";
14
15
  import { join } from "node:path";
15
16
  import { createRequire } from "node:module";
16
17
  import { gsdRoot, milestonesDir } from "./paths.js";
@@ -65,10 +66,10 @@ export function executeInject(
65
66
  const filesSection = content.indexOf("## Files Likely Touched");
66
67
  if (filesSection !== -1) {
67
68
  const updated = content.slice(0, filesSection) + newTask + "\n\n" + content.slice(filesSection);
68
- writeFileSync(planPath, updated, "utf-8");
69
+ atomicWriteSync(planPath, updated, "utf-8");
69
70
  } else {
70
71
  // No Files section — append at end
71
- writeFileSync(planPath, content.trimEnd() + "\n\n" + newTask + "\n", "utf-8");
72
+ atomicWriteSync(planPath, content.trimEnd() + "\n\n" + newTask + "\n", "utf-8");
72
73
  }
73
74
 
74
75
  return newId;
@@ -105,7 +106,7 @@ export function executeReplan(
105
106
  `will detect it and enter the replanning-slice phase.`,
106
107
  ].join("\n");
107
108
 
108
- writeFileSync(triggerPath, content, "utf-8");
109
+ atomicWriteSync(triggerPath, content, "utf-8");
109
110
 
110
111
  // Also write replan_triggered_at column for DB-backed detection
111
112
  try {
@@ -183,7 +184,7 @@ export function executeBacktrack(
183
184
  `3. Resume auto-mode — the state machine will re-enter discussion for the target`,
184
185
  ].join("\n");
185
186
 
186
- writeFileSync(triggerPath, content, "utf-8");
187
+ atomicWriteSync(triggerPath, content, "utf-8");
187
188
 
188
189
  // If we have a valid target, also reset that milestone's completion status
189
190
  // so deriveState() will re-enter it as the active milestone.
@@ -194,7 +195,7 @@ export function executeBacktrack(
194
195
  // Write a regression marker so the state machine knows this milestone
195
196
  // needs re-discussion, not just re-execution
196
197
  const regressionPath = join(targetDir, `${targetMilestoneId}-REGRESSION.md`);
197
- writeFileSync(regressionPath, [
198
+ atomicWriteSync(regressionPath, [
198
199
  `# Milestone Regression`,
199
200
  ``,
200
201
  `**From:** ${currentMilestoneId}`,
@@ -361,7 +362,7 @@ export function ensureDeferMilestoneDir(
361
362
  ``,
362
363
  ].join("\n");
363
364
 
364
- writeFileSync(
365
+ atomicWriteSync(
365
366
  join(msDir, `${targetMilestone}-CONTEXT-DRAFT.md`),
366
367
  draftContent,
367
368
  "utf-8",
@@ -4,9 +4,10 @@
4
4
  // handleResetSlice: Reset a slice and all its tasks, re-rendering plan + roadmap.
5
5
 
6
6
  import type { ExtensionCommandContext, ExtensionAPI } from "@gsd/pi-coding-agent";
7
- import { existsSync, readFileSync, writeFileSync, unlinkSync, readdirSync } from "node:fs";
7
+ import { existsSync, readFileSync, unlinkSync, readdirSync } from "node:fs";
8
8
  import { join, basename } from "node:path";
9
9
  import { nativeRevertCommit, nativeRevertAbort } from "./native-git-bridge.js";
10
+ import { atomicWriteSync } from "./atomic-write.js";
10
11
  import { parseUnitId } from "./unit-id.js";
11
12
  import { deriveState } from "./state.js";
12
13
  import { invalidateAllCaches } from "./cache.js";
@@ -393,7 +394,7 @@ export function uncheckTaskInPlan(basePath: string, mid: string, sid: string, ti
393
394
  const regex = new RegExp(`^(\\s*-\\s*)\\[x\\](\\s*\\**${tid}\\**[:\\s])`, "mi");
394
395
  if (regex.test(content)) {
395
396
  content = content.replace(regex, "$1[ ]$2");
396
- writeFileSync(planFile, content, "utf-8");
397
+ atomicWriteSync(planFile, content);
397
398
  return true;
398
399
  }
399
400
  return false;
@@ -19,7 +19,7 @@ export function getSessionId(): string {
19
19
  // ─── Event Types ─────────────────────────────────────────────────────────
20
20
 
21
21
  export interface WorkflowEvent {
22
- cmd: string; // e.g. "complete_task"
22
+ cmd: string; // e.g. "complete-task" (canonical: hyphens; legacy: underscores — both accepted by replay)
23
23
  params: Record<string, unknown>;
24
24
  ts: string; // ISO 8601
25
25
  hash: string; // content hash (hex, 16 chars)