ppef 1.0.1 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (175) hide show
  1. package/bin/ppef.mjs +20 -0
  2. package/dist/__tests__/framework-pipeline.integration.test.js +13 -11
  3. package/dist/__tests__/framework-pipeline.integration.test.js.map +1 -1
  4. package/dist/__tests__/index-exports.unit.test.d.ts +8 -0
  5. package/dist/__tests__/index-exports.unit.test.d.ts.map +1 -0
  6. package/dist/__tests__/index-exports.unit.test.js +127 -0
  7. package/dist/__tests__/index-exports.unit.test.js.map +1 -0
  8. package/dist/__tests__/registry-executor.integration.test.js +12 -9
  9. package/dist/__tests__/registry-executor.integration.test.js.map +1 -1
  10. package/dist/aggregation/__tests__/aggregators.unit.test.d.ts +7 -0
  11. package/dist/aggregation/__tests__/aggregators.unit.test.d.ts.map +1 -0
  12. package/dist/aggregation/__tests__/aggregators.unit.test.js +350 -0
  13. package/dist/aggregation/__tests__/aggregators.unit.test.js.map +1 -0
  14. package/dist/aggregation/__tests__/pipeline.unit.test.d.ts +7 -0
  15. package/dist/aggregation/__tests__/pipeline.unit.test.d.ts.map +1 -0
  16. package/dist/aggregation/__tests__/pipeline.unit.test.js +213 -0
  17. package/dist/aggregation/__tests__/pipeline.unit.test.js.map +1 -0
  18. package/dist/aggregation/aggregators.d.ts +9 -0
  19. package/dist/aggregation/aggregators.d.ts.map +1 -1
  20. package/dist/aggregation/aggregators.js +1 -1
  21. package/dist/aggregation/aggregators.js.map +1 -1
  22. package/dist/aggregation/index.d.ts +1 -1
  23. package/dist/aggregation/index.d.ts.map +1 -1
  24. package/dist/aggregation/index.js +1 -1
  25. package/dist/aggregation/index.js.map +1 -1
  26. package/dist/claims/__tests__/evaluator.unit.test.d.ts +12 -0
  27. package/dist/claims/__tests__/evaluator.unit.test.d.ts.map +1 -0
  28. package/dist/claims/__tests__/evaluator.unit.test.js +801 -0
  29. package/dist/claims/__tests__/evaluator.unit.test.js.map +1 -0
  30. package/dist/cli/__tests__/aggregate.command.unit.test.d.ts +7 -0
  31. package/dist/cli/__tests__/aggregate.command.unit.test.d.ts.map +1 -0
  32. package/dist/cli/__tests__/aggregate.command.unit.test.js +396 -0
  33. package/dist/cli/__tests__/aggregate.command.unit.test.js.map +1 -0
  34. package/dist/cli/__tests__/commands.unit.test.d.ts +10 -0
  35. package/dist/cli/__tests__/commands.unit.test.d.ts.map +1 -0
  36. package/dist/cli/__tests__/commands.unit.test.js +217 -0
  37. package/dist/cli/__tests__/commands.unit.test.js.map +1 -0
  38. package/dist/cli/__tests__/index.unit.test.d.ts +10 -0
  39. package/dist/cli/__tests__/index.unit.test.d.ts.map +1 -0
  40. package/dist/cli/__tests__/index.unit.test.js +65 -0
  41. package/dist/cli/__tests__/index.unit.test.js.map +1 -0
  42. package/dist/cli/__tests__/logger.unit.test.d.ts +11 -0
  43. package/dist/cli/__tests__/logger.unit.test.d.ts.map +1 -0
  44. package/dist/cli/__tests__/logger.unit.test.js +180 -0
  45. package/dist/cli/__tests__/logger.unit.test.js.map +1 -0
  46. package/dist/cli/__tests__/module-loader.unit.test.d.ts +11 -0
  47. package/dist/cli/__tests__/module-loader.unit.test.d.ts.map +1 -0
  48. package/dist/cli/__tests__/module-loader.unit.test.js +262 -0
  49. package/dist/cli/__tests__/module-loader.unit.test.js.map +1 -0
  50. package/dist/cli/__tests__/output-writer.unit.test.d.ts +10 -0
  51. package/dist/cli/__tests__/output-writer.unit.test.d.ts.map +1 -0
  52. package/dist/cli/__tests__/output-writer.unit.test.js +216 -0
  53. package/dist/cli/__tests__/output-writer.unit.test.js.map +1 -0
  54. package/dist/cli/__tests__/plan.command.unit.test.d.ts +7 -0
  55. package/dist/cli/__tests__/plan.command.unit.test.d.ts.map +1 -0
  56. package/dist/cli/__tests__/plan.command.unit.test.js +289 -0
  57. package/dist/cli/__tests__/plan.command.unit.test.js.map +1 -0
  58. package/dist/cli/__tests__/run.command.unit.test.d.ts +7 -0
  59. package/dist/cli/__tests__/run.command.unit.test.d.ts.map +1 -0
  60. package/dist/cli/__tests__/run.command.unit.test.js +422 -0
  61. package/dist/cli/__tests__/run.command.unit.test.js.map +1 -0
  62. package/dist/cli/__tests__/validate.command.unit.test.d.ts +7 -0
  63. package/dist/cli/__tests__/validate.command.unit.test.d.ts.map +1 -0
  64. package/dist/cli/__tests__/validate.command.unit.test.js +226 -0
  65. package/dist/cli/__tests__/validate.command.unit.test.js.map +1 -0
  66. package/dist/cli/command-deps.d.ts +125 -0
  67. package/dist/cli/command-deps.d.ts.map +1 -0
  68. package/dist/cli/command-deps.js +7 -0
  69. package/dist/cli/command-deps.js.map +1 -0
  70. package/dist/cli/commands/aggregate.d.ts +35 -0
  71. package/dist/cli/commands/aggregate.d.ts.map +1 -0
  72. package/dist/cli/commands/aggregate.js +121 -0
  73. package/dist/cli/commands/aggregate.js.map +1 -0
  74. package/dist/cli/commands/plan.d.ts +36 -0
  75. package/dist/cli/commands/plan.d.ts.map +1 -0
  76. package/dist/cli/commands/plan.js +109 -0
  77. package/dist/cli/commands/plan.js.map +1 -0
  78. package/dist/cli/commands/run.d.ts +33 -0
  79. package/dist/cli/commands/run.d.ts.map +1 -0
  80. package/dist/cli/commands/run.js +185 -0
  81. package/dist/cli/commands/run.js.map +1 -0
  82. package/dist/cli/commands/validate.d.ts +27 -0
  83. package/dist/cli/commands/validate.d.ts.map +1 -0
  84. package/dist/cli/commands/validate.js +88 -0
  85. package/dist/cli/commands/validate.js.map +1 -0
  86. package/dist/cli/config-loader.d.ts +30 -0
  87. package/dist/cli/config-loader.d.ts.map +1 -0
  88. package/dist/cli/config-loader.js +181 -0
  89. package/dist/cli/config-loader.js.map +1 -0
  90. package/dist/cli/index.d.ts +26 -0
  91. package/dist/cli/index.d.ts.map +1 -0
  92. package/dist/cli/index.js +58 -0
  93. package/dist/cli/index.js.map +1 -0
  94. package/dist/cli/logger.d.ts +75 -0
  95. package/dist/cli/logger.d.ts.map +1 -0
  96. package/dist/cli/logger.js +131 -0
  97. package/dist/cli/logger.js.map +1 -0
  98. package/dist/cli/module-loader.d.ts +46 -0
  99. package/dist/cli/module-loader.d.ts.map +1 -0
  100. package/dist/cli/module-loader.js +116 -0
  101. package/dist/cli/module-loader.js.map +1 -0
  102. package/dist/cli/output-writer.d.ts +51 -0
  103. package/dist/cli/output-writer.d.ts.map +1 -0
  104. package/dist/cli/output-writer.js +65 -0
  105. package/dist/cli/output-writer.js.map +1 -0
  106. package/dist/cli/types.d.ts +174 -0
  107. package/dist/cli/types.d.ts.map +1 -0
  108. package/dist/cli/types.js +7 -0
  109. package/dist/cli/types.js.map +1 -0
  110. package/dist/collector/__tests__/result-collector.unit.test.d.ts +7 -0
  111. package/dist/collector/__tests__/result-collector.unit.test.d.ts.map +1 -0
  112. package/dist/collector/__tests__/result-collector.unit.test.js +1021 -0
  113. package/dist/collector/__tests__/result-collector.unit.test.js.map +1 -0
  114. package/dist/collector/__tests__/schema.unit.test.d.ts +7 -0
  115. package/dist/collector/__tests__/schema.unit.test.d.ts.map +1 -0
  116. package/dist/collector/__tests__/schema.unit.test.js +360 -0
  117. package/dist/collector/__tests__/schema.unit.test.js.map +1 -0
  118. package/dist/executor/__tests__/checkpoint-manager.unit.test.js +83 -1
  119. package/dist/executor/__tests__/checkpoint-manager.unit.test.js.map +1 -1
  120. package/dist/executor/__tests__/checkpoint-merge-bug.diagnostic.test.d.ts +3 -6
  121. package/dist/executor/__tests__/checkpoint-merge-bug.diagnostic.test.d.ts.map +1 -1
  122. package/dist/executor/__tests__/checkpoint-merge-bug.diagnostic.test.js +428 -159
  123. package/dist/executor/__tests__/checkpoint-merge-bug.diagnostic.test.js.map +1 -1
  124. package/dist/executor/__tests__/checkpoint-storage.unit.test.js +105 -1
  125. package/dist/executor/__tests__/checkpoint-storage.unit.test.js.map +1 -1
  126. package/dist/executor/__tests__/executor.unit.test.js +69 -1
  127. package/dist/executor/__tests__/executor.unit.test.js.map +1 -1
  128. package/dist/executor/__tests__/memory-monitor.unit.test.d.ts +7 -0
  129. package/dist/executor/__tests__/memory-monitor.unit.test.d.ts.map +1 -0
  130. package/dist/executor/__tests__/memory-monitor.unit.test.js +285 -0
  131. package/dist/executor/__tests__/memory-monitor.unit.test.js.map +1 -0
  132. package/dist/executor/__tests__/parallel-executor.unit.test.d.ts +2 -1
  133. package/dist/executor/__tests__/parallel-executor.unit.test.d.ts.map +1 -1
  134. package/dist/executor/__tests__/parallel-executor.unit.test.js +426 -156
  135. package/dist/executor/__tests__/parallel-executor.unit.test.js.map +1 -1
  136. package/dist/executor/__tests__/run-id.unit.test.d.ts +8 -0
  137. package/dist/executor/__tests__/run-id.unit.test.d.ts.map +1 -0
  138. package/dist/executor/__tests__/run-id.unit.test.js +156 -0
  139. package/dist/executor/__tests__/run-id.unit.test.js.map +1 -0
  140. package/dist/executor/__tests__/worker-entry.integration.test.d.ts +24 -0
  141. package/dist/executor/__tests__/worker-entry.integration.test.d.ts.map +1 -0
  142. package/dist/executor/__tests__/worker-entry.integration.test.js +82 -0
  143. package/dist/executor/__tests__/worker-entry.integration.test.js.map +1 -0
  144. package/dist/executor/__tests__/worker-entry.unit.test.d.ts +7 -0
  145. package/dist/executor/__tests__/worker-entry.unit.test.d.ts.map +1 -0
  146. package/dist/executor/__tests__/worker-entry.unit.test.js +364 -0
  147. package/dist/executor/__tests__/worker-entry.unit.test.js.map +1 -0
  148. package/dist/executor/parallel-executor.d.ts +186 -0
  149. package/dist/executor/parallel-executor.d.ts.map +1 -1
  150. package/dist/executor/parallel-executor.js +218 -83
  151. package/dist/executor/parallel-executor.js.map +1 -1
  152. package/dist/executor/run-id.d.ts.map +1 -1
  153. package/dist/executor/run-id.js +8 -1
  154. package/dist/executor/run-id.js.map +1 -1
  155. package/dist/executor/worker-entry.d.ts +2 -0
  156. package/dist/executor/worker-entry.d.ts.map +1 -1
  157. package/dist/executor/worker-entry.js +29 -54
  158. package/dist/executor/worker-entry.js.map +1 -1
  159. package/dist/executor/worker-executor.d.ts +156 -0
  160. package/dist/executor/worker-executor.d.ts.map +1 -0
  161. package/dist/executor/worker-executor.js +88 -0
  162. package/dist/executor/worker-executor.js.map +1 -0
  163. package/dist/robustness/__tests__/analyzer.unit.test.d.ts +11 -0
  164. package/dist/robustness/__tests__/analyzer.unit.test.d.ts.map +1 -0
  165. package/dist/robustness/__tests__/analyzer.unit.test.js +455 -0
  166. package/dist/robustness/__tests__/analyzer.unit.test.js.map +1 -0
  167. package/dist/robustness/__tests__/perturbations.unit.test.d.ts +11 -0
  168. package/dist/robustness/__tests__/perturbations.unit.test.d.ts.map +1 -0
  169. package/dist/robustness/__tests__/perturbations.unit.test.js +284 -0
  170. package/dist/robustness/__tests__/perturbations.unit.test.js.map +1 -0
  171. package/dist/statistical/__tests__/mann-whitney-u.unit.test.d.ts +7 -0
  172. package/dist/statistical/__tests__/mann-whitney-u.unit.test.d.ts.map +1 -0
  173. package/dist/statistical/__tests__/mann-whitney-u.unit.test.js +185 -0
  174. package/dist/statistical/__tests__/mann-whitney-u.unit.test.js.map +1 -0
  175. package/package.json +8 -1
@@ -1,21 +1,18 @@
1
1
  /**
2
- * Diagnostic Tests for Checkpoint Integration Bug
2
+ * Checkpoint Integration Tests
3
3
  *
4
- * Tests to diagnose why parallel workers aren't properly using checkpoints.
5
- * Symptoms:
6
- * - Workers report "No checkpoint"
7
- * - Workers report "Total runs: 0, From checkpoint: 0, New this run: 0"
8
- * - Main checkpoint has 123/132 runs but workers start fresh
4
+ * Tests checkpoint save/load functionality, shard merging,
5
+ * and config hash validation.
9
6
  */
10
7
  import { createHash, randomBytes } from "node:crypto";
11
8
  import { rmSync } from "node:fs";
12
9
  import { tmpdir } from "node:os";
13
10
  import { join } from "node:path";
14
- import { strict as assert } from "node:assert";
15
11
  import { afterEach, beforeEach, describe, it } from "node:test";
12
+ import { strict as assert } from "node:assert";
16
13
  import { CheckpointManager } from "../checkpoint-manager.js";
17
14
  import { FileStorage } from "../checkpoint-storage.js";
18
- describe("Checkpoint Integration Bug Diagnostics", () => {
15
+ describe("Checkpoint Integration", () => {
19
16
  let testDir;
20
17
  let checkpoint;
21
18
  let checkpointPath;
@@ -27,164 +24,437 @@ describe("Checkpoint Integration Bug Diagnostics", () => {
27
24
  afterEach(() => {
28
25
  rmSync(testDir, { recursive: true, force: true });
29
26
  });
30
- it("diagnostic-1: should save and load single run", async () => {
31
- const sut = createMockSut();
32
- const testCase = createMockCase("case-001");
33
- const { Executor } = await import("../executor.js");
34
- const executor = new Executor({
35
- repetitions: 1,
36
- seedBase: 42,
37
- timeoutMs: 5000,
38
- collectProvenance: false,
39
- onResult: async (result) => {
27
+ describe("save and load", () => {
28
+ it("should save and load single run", async () => {
29
+ const result = {
30
+ run: {
31
+ runId: "test-run-001",
32
+ sut: "test-sut",
33
+ sutRole: "primary",
34
+ sutVersion: "1.0.0",
35
+ caseId: "case-001",
36
+ caseClass: "test-class",
37
+ seed: 42,
38
+ repetition: 0,
39
+ },
40
+ correctness: {
41
+ expectedExists: false,
42
+ producedOutput: true,
43
+ valid: true,
44
+ matchesExpected: null,
45
+ },
46
+ outputs: { summary: {} },
47
+ metrics: { numeric: { test: 1 } },
48
+ provenance: {
49
+ runtime: { platform: "linux", arch: "x64", nodeVersion: "v22.0.0" },
50
+ },
51
+ };
52
+ // Save
53
+ await checkpoint.saveIncremental(result);
54
+ // Load fresh checkpoint
55
+ const fresh = new CheckpointManager({ storage: new FileStorage(checkpointPath) });
56
+ await fresh.load();
57
+ const results = fresh.getResults();
58
+ assert.strictEqual(results.length, 1);
59
+ assert.strictEqual(results[0]?.run.runId, "test-run-001");
60
+ });
61
+ it("should save and load multiple runs", async () => {
62
+ const results = [];
63
+ for (let i = 0; i < 5; i++) {
64
+ const result = {
65
+ run: {
66
+ runId: `test-run-${i}`,
67
+ sut: "test-sut",
68
+ sutRole: "primary",
69
+ sutVersion: "1.0.0",
70
+ caseId: "case-001",
71
+ caseClass: "test-class",
72
+ seed: 42,
73
+ repetition: i,
74
+ },
75
+ correctness: {
76
+ expectedExists: false,
77
+ producedOutput: true,
78
+ valid: true,
79
+ matchesExpected: null,
80
+ },
81
+ outputs: { summary: {} },
82
+ metrics: { numeric: { test: i } },
83
+ provenance: {
84
+ runtime: { platform: "linux", arch: "x64", nodeVersion: "v22.0.0" },
85
+ },
86
+ };
87
+ results.push(result);
40
88
  await checkpoint.saveIncremental(result);
41
- },
89
+ }
90
+ // Load fresh checkpoint
91
+ const fresh = new CheckpointManager({ storage: new FileStorage(checkpointPath) });
92
+ await fresh.load();
93
+ const loaded = fresh.getResults();
94
+ assert.strictEqual(loaded.length, 5);
95
+ for (let i = 0; i < 5; i++) {
96
+ assert.strictEqual(loaded[i]?.run.runId, `test-run-${i}`);
97
+ }
98
+ });
99
+ it("should persist results to file", async () => {
100
+ const result = {
101
+ run: {
102
+ runId: "test-run-001",
103
+ sut: "test-sut",
104
+ sutRole: "primary",
105
+ sutVersion: "1.0.0",
106
+ caseId: "case-001",
107
+ caseClass: "test-class",
108
+ seed: 42,
109
+ repetition: 0,
110
+ },
111
+ correctness: {
112
+ expectedExists: false,
113
+ producedOutput: true,
114
+ valid: true,
115
+ matchesExpected: null,
116
+ },
117
+ outputs: { summary: {} },
118
+ metrics: { numeric: { test: 1 } },
119
+ provenance: {
120
+ runtime: { platform: "linux", arch: "x64", nodeVersion: "v22.0.0" },
121
+ },
122
+ };
123
+ await checkpoint.saveIncremental(result);
124
+ // Verify file exists
125
+ const fresh = new CheckpointManager({ storage: new FileStorage(checkpointPath) });
126
+ await fresh.load();
127
+ assert.strictEqual(fresh.getResults().length, 1);
42
128
  });
43
- // Execute
44
- const summary = await executor.execute([sut], [testCase], () => ({}));
45
- // Verify
46
- assert.strictEqual(summary.successfulRuns, 1);
47
- const results = checkpoint.getResults();
48
- assert.strictEqual(results.length, 1);
49
- // Load fresh checkpoint
50
- const fresh = new CheckpointManager({ storage: new FileStorage(checkpointPath) });
51
- await fresh.load();
52
- assert.strictEqual(fresh.getResults().length, 1);
53
- });
54
- it("diagnostic-2: should detect config hash mismatch", async () => {
55
- // Save with one config
56
- const result = {
57
- run: {
58
- runId: "test-run-001",
59
- sut: "test-sut",
60
- sutRole: "primary",
61
- sutVersion: "1.0.0",
62
- caseId: "case-001",
63
- caseClass: "test-class",
64
- seed: 42,
65
- repetition: 0,
66
- },
67
- correctness: {
68
- expectedExists: false,
69
- producedOutput: true,
70
- valid: true,
71
- matchesExpected: null,
72
- },
73
- outputs: { summary: {} },
74
- metrics: { numeric: { test: 1 } },
75
- provenance: {
76
- runtime: { platform: "linux", arch: "x64", nodeVersion: "v22.0.0" },
77
- },
78
- };
79
- await checkpoint.saveIncremental(result);
80
- // Try to load with different config
81
- const fresh = new CheckpointManager({ storage: new FileStorage(checkpointPath) });
82
- await fresh.load();
83
- // Check if stale with different config
84
- const isStale = fresh.isStale([createMockSut()], [createMockCase("case-001")], {
85
- repetitions: 2, // Different from original (1)
86
- seedBase: 42,
87
- timeoutMs: 5000,
88
- collectProvenance: false,
89
- }, 1);
90
- assert.strictEqual(isStale, true);
91
129
  });
92
- it("diagnostic-3: should find worker shards", async () => {
93
- // Create mock worker shards
94
- const shard1 = join(testDir, "checkpoint-worker-00.json");
95
- const shard2 = join(testDir, "checkpoint-worker-01.json");
96
- const storage1 = new FileStorage(shard1);
97
- const storage2 = new FileStorage(shard2);
98
- await storage1.save({
99
- configHash: "test-hash",
100
- createdAt: new Date().toISOString(),
101
- updatedAt: new Date().toISOString(),
102
- completedRunIds: ["run-001", "run-002"],
103
- results: {},
104
- totalPlanned: 0,
130
+ describe("config hash", () => {
131
+ it("should detect config hash mismatch", async () => {
132
+ const result = {
133
+ run: {
134
+ runId: "test-run-001",
135
+ sut: "test-sut",
136
+ sutRole: "primary",
137
+ sutVersion: "1.0.0",
138
+ caseId: "case-001",
139
+ caseClass: "test-class",
140
+ seed: 42,
141
+ repetition: 0,
142
+ },
143
+ correctness: {
144
+ expectedExists: false,
145
+ producedOutput: true,
146
+ valid: true,
147
+ matchesExpected: null,
148
+ },
149
+ outputs: { summary: {} },
150
+ metrics: { numeric: { test: 1 } },
151
+ provenance: {
152
+ runtime: { platform: "linux", arch: "x64", nodeVersion: "v22.0.0" },
153
+ },
154
+ };
155
+ await checkpoint.saveIncremental(result);
156
+ // Try to load with different config
157
+ const fresh = new CheckpointManager({ storage: new FileStorage(checkpointPath) });
158
+ await fresh.load();
159
+ // Check if stale with different config
160
+ const isStale = fresh.isStale([createMockSut()], [createMockCase("case-001")], {
161
+ repetitions: 2, // Different from original (1)
162
+ seedBase: 42,
163
+ timeoutMs: 5000,
164
+ collectProvenance: false,
165
+ }, 1);
166
+ assert.strictEqual(isStale, true);
167
+ });
168
+ it("should not be stale when config matches", async () => {
169
+ const result = {
170
+ run: {
171
+ runId: "test-run-001",
172
+ sut: "test-sut",
173
+ sutRole: "primary",
174
+ sutVersion: "1.0.0",
175
+ caseId: "case-001",
176
+ caseClass: "test-class",
177
+ seed: 42,
178
+ repetition: 0,
179
+ },
180
+ correctness: {
181
+ expectedExists: false,
182
+ producedOutput: true,
183
+ valid: true,
184
+ matchesExpected: null,
185
+ },
186
+ outputs: { summary: {} },
187
+ metrics: { numeric: { test: 1 } },
188
+ provenance: {
189
+ runtime: { platform: "linux", arch: "x64", nodeVersion: "v22.0.0" },
190
+ },
191
+ };
192
+ // Initialize with proper config hash first
193
+ checkpoint.initializeEmpty([createMockSut()], [createMockCase("case-001")], {
194
+ repetitions: 1,
195
+ seedBase: 42,
196
+ timeoutMs: 5000,
197
+ collectProvenance: false,
198
+ }, 1);
199
+ // Save the initialized checkpoint to persist the config hash
200
+ await checkpoint.save();
201
+ await checkpoint.saveIncremental(result);
202
+ const fresh = new CheckpointManager({ storage: new FileStorage(checkpointPath) });
203
+ await fresh.load();
204
+ // Same config should not be stale
205
+ const isStale = fresh.isStale([createMockSut()], [createMockCase("case-001")], {
206
+ repetitions: 1,
207
+ seedBase: 42,
208
+ timeoutMs: 5000,
209
+ collectProvenance: false,
210
+ }, 1);
211
+ assert.strictEqual(isStale, false);
105
212
  });
106
- await storage2.save({
107
- configHash: "test-hash",
108
- createdAt: new Date().toISOString(),
109
- updatedAt: new Date().toISOString(),
110
- completedRunIds: ["run-003", "run-004"],
111
- results: {},
112
- totalPlanned: 0,
213
+ it("should include all executor config properties in hash", async () => {
214
+ const config1 = {
215
+ continueOnError: true,
216
+ repetitions: 1,
217
+ seedBase: 42,
218
+ timeoutMs: 300_000,
219
+ collectProvenance: true,
220
+ };
221
+ const config2 = {
222
+ ...config1,
223
+ concurrency: 12, // Additional property
224
+ };
225
+ const hash1 = createHash("sha256")
226
+ .update(JSON.stringify(config1, Object.keys(config1).sort()))
227
+ .digest("hex");
228
+ const hash2 = createHash("sha256")
229
+ .update(JSON.stringify(config2, Object.keys(config2).sort()))
230
+ .digest("hex");
231
+ // They should be different because concurrency is different
232
+ assert.notStrictEqual(hash1, hash2);
113
233
  });
114
- // Find shards
115
- const shards = await FileStorage.findShards(testDir);
116
- assert.strictEqual(shards.length, 2);
117
- assert.ok(shards.includes(shard1));
118
- assert.ok(shards.includes(shard2));
119
234
  });
120
- it("diagnostic-4: should merge shards without duplicates", async () => {
121
- // Create main checkpoint
122
- const result1 = {
123
- run: {
124
- runId: "run-001",
125
- sut: "sut-1",
126
- sutRole: "primary",
127
- sutVersion: "1.0.0",
128
- caseId: "case-001",
129
- caseClass: "test-class",
130
- seed: 42,
131
- repetition: 0,
132
- },
133
- correctness: {
134
- expectedExists: false,
135
- producedOutput: true,
136
- valid: true,
137
- matchesExpected: null,
138
- },
139
- outputs: { summary: {} },
140
- metrics: { numeric: { test: 1 } },
141
- provenance: {
142
- runtime: { platform: "linux", arch: "x64", nodeVersion: "v22.0.0" },
143
- },
144
- };
145
- await checkpoint.saveIncremental(result1);
146
- // Create worker shard with overlapping run
147
- const workerStorage = new FileStorage(join(testDir, "checkpoint-worker-00.json"));
148
- await workerStorage.save({
149
- configHash: "test-hash",
150
- createdAt: new Date().toISOString(),
151
- updatedAt: new Date().toISOString(),
152
- completedRunIds: ["run-001", "run-002"], // run-001 overlaps with main
153
- results: {},
154
- totalPlanned: 0,
235
+ describe("shard operations", () => {
236
+ it("should find worker shards", async () => {
237
+ // Create mock worker shards
238
+ const shard1 = join(testDir, "checkpoint-worker-00.json");
239
+ const shard2 = join(testDir, "checkpoint-worker-01.json");
240
+ const storage1 = new FileStorage(shard1);
241
+ const storage2 = new FileStorage(shard2);
242
+ await storage1.save({
243
+ configHash: "test-hash",
244
+ createdAt: new Date().toISOString(),
245
+ updatedAt: new Date().toISOString(),
246
+ completedRunIds: ["run-001", "run-002"],
247
+ results: {},
248
+ totalPlanned: 0,
249
+ });
250
+ await storage2.save({
251
+ configHash: "test-hash",
252
+ createdAt: new Date().toISOString(),
253
+ updatedAt: new Date().toISOString(),
254
+ completedRunIds: ["run-003", "run-004"],
255
+ results: {},
256
+ totalPlanned: 0,
257
+ });
258
+ // Find shards
259
+ const shards = await FileStorage.findShards(testDir);
260
+ assert.strictEqual(shards.length, 2);
261
+ assert.ok(shards.includes(shard1));
262
+ assert.ok(shards.includes(shard2));
263
+ });
264
+ it("should return empty array when no shards exist", async () => {
265
+ const shards = await FileStorage.findShards(testDir);
266
+ assert.deepStrictEqual(shards, []);
267
+ });
268
+ it("should filter shards by config hash", async () => {
269
+ const shard1 = join(testDir, "checkpoint-worker-00.json");
270
+ const shard2 = join(testDir, "checkpoint-worker-01.json");
271
+ const storage1 = new FileStorage(shard1);
272
+ const storage2 = new FileStorage(shard2);
273
+ await storage1.save({
274
+ configHash: "hash-1",
275
+ createdAt: new Date().toISOString(),
276
+ updatedAt: new Date().toISOString(),
277
+ completedRunIds: ["run-001"],
278
+ results: {},
279
+ totalPlanned: 0,
280
+ });
281
+ await storage2.save({
282
+ configHash: "hash-2",
283
+ createdAt: new Date().toISOString(),
284
+ updatedAt: new Date().toISOString(),
285
+ completedRunIds: ["run-002"],
286
+ results: {},
287
+ totalPlanned: 0,
288
+ });
289
+ // Find shards with specific hash
290
+ const shards = await FileStorage.findShards(testDir);
291
+ assert.strictEqual(shards.length, 2);
292
+ });
293
+ it("should merge shards without duplicates", async () => {
294
+ // Create main checkpoint
295
+ const result1 = {
296
+ run: {
297
+ runId: "run-001",
298
+ sut: "sut-1",
299
+ sutRole: "primary",
300
+ sutVersion: "1.0.0",
301
+ caseId: "case-001",
302
+ caseClass: "test-class",
303
+ seed: 42,
304
+ repetition: 0,
305
+ },
306
+ correctness: {
307
+ expectedExists: false,
308
+ producedOutput: true,
309
+ valid: true,
310
+ matchesExpected: null,
311
+ },
312
+ outputs: { summary: {} },
313
+ metrics: { numeric: { test: 1 } },
314
+ provenance: {
315
+ runtime: { platform: "linux", arch: "x64", nodeVersion: "v22.0.0" },
316
+ },
317
+ };
318
+ await checkpoint.saveIncremental(result1);
319
+ // Create worker shard with overlapping run
320
+ const workerStorage = new FileStorage(join(testDir, "checkpoint-worker-00.json"));
321
+ await workerStorage.save({
322
+ configHash: "test-hash",
323
+ createdAt: new Date().toISOString(),
324
+ updatedAt: new Date().toISOString(),
325
+ completedRunIds: ["run-001", "run-002"], // run-001 overlaps with main
326
+ results: {},
327
+ totalPlanned: 0,
328
+ });
329
+ // Merge
330
+ const shards = await FileStorage.findShards(testDir);
331
+ const merged = await checkpoint.mergeShards(shards);
332
+ // Verify no duplicates
333
+ const uniqueIds = new Set(merged.completedRunIds);
334
+ assert.strictEqual(uniqueIds.size, 2); // run-001, run-002 (no duplicates)
335
+ assert.strictEqual(merged.completedRunIds.length, 2);
336
+ });
337
+ it("should merge all completed run IDs from shards", async () => {
338
+ // Save main checkpoint with some runs
339
+ const result1 = {
340
+ run: {
341
+ runId: "run-001",
342
+ sut: "sut-1",
343
+ sutRole: "primary",
344
+ sutVersion: "1.0.0",
345
+ caseId: "case-001",
346
+ seed: 42,
347
+ repetition: 0,
348
+ },
349
+ correctness: {
350
+ expectedExists: false,
351
+ producedOutput: true,
352
+ valid: true,
353
+ matchesExpected: null,
354
+ },
355
+ outputs: { summary: {} },
356
+ metrics: { numeric: { test: 1 } },
357
+ provenance: {
358
+ runtime: { platform: "linux", arch: "x64", nodeVersion: "v22.0.0" },
359
+ },
360
+ };
361
+ await checkpoint.saveIncremental(result1);
362
+ // Reload to update in-memory data (saveIncremental only writes to file)
363
+ await checkpoint.load();
364
+ // Create two worker shards with different runs
365
+ const shard1 = join(testDir, "checkpoint-worker-00.json");
366
+ const shard2 = join(testDir, "checkpoint-worker-01.json");
367
+ const storage1 = new FileStorage(shard1);
368
+ const storage2 = new FileStorage(shard2);
369
+ await storage1.save({
370
+ configHash: "test-hash",
371
+ createdAt: new Date().toISOString(),
372
+ updatedAt: new Date().toISOString(),
373
+ completedRunIds: ["run-002", "run-003"],
374
+ results: {},
375
+ totalPlanned: 0,
376
+ });
377
+ await storage2.save({
378
+ configHash: "test-hash",
379
+ createdAt: new Date().toISOString(),
380
+ updatedAt: new Date().toISOString(),
381
+ completedRunIds: ["run-004", "run-005"],
382
+ results: {},
383
+ totalPlanned: 0,
384
+ });
385
+ // Merge shards
386
+ const shards = await FileStorage.findShards(testDir);
387
+ const merged = await checkpoint.mergeShards(shards);
388
+ // Verify all runs are included
389
+ const runIds = new Set(merged.completedRunIds);
390
+ assert.strictEqual(runIds.size, 5);
391
+ assert.ok(runIds.has("run-001"));
392
+ assert.ok(runIds.has("run-002"));
393
+ assert.ok(runIds.has("run-003"));
394
+ assert.ok(runIds.has("run-004"));
395
+ assert.ok(runIds.has("run-005"));
155
396
  });
156
- // Merge
157
- const shards = await FileStorage.findShards(testDir);
158
- const merged = await checkpoint.mergeShards(shards);
159
- // Verify no duplicates
160
- const uniqueIds = new Set(merged.completedRunIds);
161
- assert.strictEqual(uniqueIds.size, 2); // run-001, run-002 (no duplicates)
162
- assert.strictEqual(merged.completedRunIds.length, 2);
163
397
  });
164
- it("diagnostic-5: config hash should include all executor config properties", async () => {
165
- // This test verifies which properties are included in the hash
166
- const config1 = {
167
- continueOnError: true,
168
- repetitions: 1,
169
- seedBase: 42,
170
- timeoutMs: 300_000,
171
- collectProvenance: true,
172
- };
173
- const config2 = {
174
- ...config1,
175
- concurrency: 12, // Additional property
176
- };
177
- const hash1 = createHash("sha256")
178
- .update(JSON.stringify(config1, Object.keys(config1).sort()))
179
- .digest("hex");
180
- const hash2 = createHash("sha256")
181
- .update(JSON.stringify(config2, Object.keys(config2).sort()))
182
- .digest("hex");
183
- console.log("Hash without concurrency:", hash1);
184
- console.log("Hash with concurrency:", hash2);
185
- console.log("Match:", hash1 === hash2);
186
- // They should be different because concurrency is different
187
- assert.notStrictEqual(hash1, hash2);
398
+ describe("edge cases", () => {
399
+ it("should handle loading non-existent checkpoint file", async () => {
400
+ const fresh = new CheckpointManager({
401
+ storage: new FileStorage(join(testDir, "non-existent.json")),
402
+ });
403
+ // Should not throw, just return empty checkpoint
404
+ await fresh.load();
405
+ assert.strictEqual(fresh.getResults().length, 0);
406
+ });
407
+ it("should handle empty shard list during merge", async () => {
408
+ const merged = await checkpoint.mergeShards([]);
409
+ assert.strictEqual(merged.completedRunIds.length, 0);
410
+ assert.strictEqual(merged.totalPlanned, 0);
411
+ });
412
+ it("should handle shard with different config hash", async () => {
413
+ // Save main checkpoint with one config
414
+ const result = {
415
+ run: {
416
+ runId: "run-001",
417
+ sut: "test-sut",
418
+ sutRole: "primary",
419
+ sutVersion: "1.0.0",
420
+ caseId: "case-001",
421
+ seed: 42,
422
+ repetition: 0,
423
+ },
424
+ correctness: {
425
+ expectedExists: false,
426
+ producedOutput: true,
427
+ valid: true,
428
+ matchesExpected: null,
429
+ },
430
+ outputs: { summary: {} },
431
+ metrics: { numeric: { test: 1 } },
432
+ provenance: {
433
+ runtime: { platform: "linux", arch: "x64", nodeVersion: "v22.0.0" },
434
+ },
435
+ };
436
+ await checkpoint.saveIncremental(result);
437
+ // Reload to update in-memory data
438
+ await checkpoint.load();
439
+ // Create shard with different config hash
440
+ const shard1 = join(testDir, "checkpoint-worker-00.json");
441
+ const storage1 = new FileStorage(shard1);
442
+ await storage1.save({
443
+ configHash: "different-hash", // Different config
444
+ createdAt: new Date().toISOString(),
445
+ updatedAt: new Date().toISOString(),
446
+ completedRunIds: ["run-002"],
447
+ results: {},
448
+ totalPlanned: 0,
449
+ });
450
+ // Merge merges all shards regardless of config hash
451
+ // (implementation doesn't filter by hash)
452
+ const shards = await FileStorage.findShards(testDir);
453
+ const merged = await checkpoint.mergeShards(shards);
454
+ // Should include both runs (merge doesn't filter by hash)
455
+ assert.ok(merged.completedRunIds.includes("run-001"));
456
+ assert.ok(merged.completedRunIds.includes("run-002"));
457
+ });
188
458
  });
189
459
  });
190
460
  /**
@@ -207,7 +477,6 @@ const createMockSut = () => ({
207
477
  });
208
478
  /**
209
479
  * Create a mock case for testing.
210
- * @param id
211
480
  */
212
481
  const createMockCase = (id) => ({
213
482
  case: {