ppef 1.0.1 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (175) hide show
  1. package/bin/ppef.mjs +20 -0
  2. package/dist/__tests__/framework-pipeline.integration.test.js +13 -11
  3. package/dist/__tests__/framework-pipeline.integration.test.js.map +1 -1
  4. package/dist/__tests__/index-exports.unit.test.d.ts +8 -0
  5. package/dist/__tests__/index-exports.unit.test.d.ts.map +1 -0
  6. package/dist/__tests__/index-exports.unit.test.js +127 -0
  7. package/dist/__tests__/index-exports.unit.test.js.map +1 -0
  8. package/dist/__tests__/registry-executor.integration.test.js +12 -9
  9. package/dist/__tests__/registry-executor.integration.test.js.map +1 -1
  10. package/dist/aggregation/__tests__/aggregators.unit.test.d.ts +7 -0
  11. package/dist/aggregation/__tests__/aggregators.unit.test.d.ts.map +1 -0
  12. package/dist/aggregation/__tests__/aggregators.unit.test.js +350 -0
  13. package/dist/aggregation/__tests__/aggregators.unit.test.js.map +1 -0
  14. package/dist/aggregation/__tests__/pipeline.unit.test.d.ts +7 -0
  15. package/dist/aggregation/__tests__/pipeline.unit.test.d.ts.map +1 -0
  16. package/dist/aggregation/__tests__/pipeline.unit.test.js +213 -0
  17. package/dist/aggregation/__tests__/pipeline.unit.test.js.map +1 -0
  18. package/dist/aggregation/aggregators.d.ts +9 -0
  19. package/dist/aggregation/aggregators.d.ts.map +1 -1
  20. package/dist/aggregation/aggregators.js +1 -1
  21. package/dist/aggregation/aggregators.js.map +1 -1
  22. package/dist/aggregation/index.d.ts +1 -1
  23. package/dist/aggregation/index.d.ts.map +1 -1
  24. package/dist/aggregation/index.js +1 -1
  25. package/dist/aggregation/index.js.map +1 -1
  26. package/dist/claims/__tests__/evaluator.unit.test.d.ts +12 -0
  27. package/dist/claims/__tests__/evaluator.unit.test.d.ts.map +1 -0
  28. package/dist/claims/__tests__/evaluator.unit.test.js +801 -0
  29. package/dist/claims/__tests__/evaluator.unit.test.js.map +1 -0
  30. package/dist/cli/__tests__/aggregate.command.unit.test.d.ts +7 -0
  31. package/dist/cli/__tests__/aggregate.command.unit.test.d.ts.map +1 -0
  32. package/dist/cli/__tests__/aggregate.command.unit.test.js +396 -0
  33. package/dist/cli/__tests__/aggregate.command.unit.test.js.map +1 -0
  34. package/dist/cli/__tests__/commands.unit.test.d.ts +10 -0
  35. package/dist/cli/__tests__/commands.unit.test.d.ts.map +1 -0
  36. package/dist/cli/__tests__/commands.unit.test.js +217 -0
  37. package/dist/cli/__tests__/commands.unit.test.js.map +1 -0
  38. package/dist/cli/__tests__/index.unit.test.d.ts +10 -0
  39. package/dist/cli/__tests__/index.unit.test.d.ts.map +1 -0
  40. package/dist/cli/__tests__/index.unit.test.js +65 -0
  41. package/dist/cli/__tests__/index.unit.test.js.map +1 -0
  42. package/dist/cli/__tests__/logger.unit.test.d.ts +11 -0
  43. package/dist/cli/__tests__/logger.unit.test.d.ts.map +1 -0
  44. package/dist/cli/__tests__/logger.unit.test.js +180 -0
  45. package/dist/cli/__tests__/logger.unit.test.js.map +1 -0
  46. package/dist/cli/__tests__/module-loader.unit.test.d.ts +11 -0
  47. package/dist/cli/__tests__/module-loader.unit.test.d.ts.map +1 -0
  48. package/dist/cli/__tests__/module-loader.unit.test.js +262 -0
  49. package/dist/cli/__tests__/module-loader.unit.test.js.map +1 -0
  50. package/dist/cli/__tests__/output-writer.unit.test.d.ts +10 -0
  51. package/dist/cli/__tests__/output-writer.unit.test.d.ts.map +1 -0
  52. package/dist/cli/__tests__/output-writer.unit.test.js +216 -0
  53. package/dist/cli/__tests__/output-writer.unit.test.js.map +1 -0
  54. package/dist/cli/__tests__/plan.command.unit.test.d.ts +7 -0
  55. package/dist/cli/__tests__/plan.command.unit.test.d.ts.map +1 -0
  56. package/dist/cli/__tests__/plan.command.unit.test.js +289 -0
  57. package/dist/cli/__tests__/plan.command.unit.test.js.map +1 -0
  58. package/dist/cli/__tests__/run.command.unit.test.d.ts +7 -0
  59. package/dist/cli/__tests__/run.command.unit.test.d.ts.map +1 -0
  60. package/dist/cli/__tests__/run.command.unit.test.js +422 -0
  61. package/dist/cli/__tests__/run.command.unit.test.js.map +1 -0
  62. package/dist/cli/__tests__/validate.command.unit.test.d.ts +7 -0
  63. package/dist/cli/__tests__/validate.command.unit.test.d.ts.map +1 -0
  64. package/dist/cli/__tests__/validate.command.unit.test.js +226 -0
  65. package/dist/cli/__tests__/validate.command.unit.test.js.map +1 -0
  66. package/dist/cli/command-deps.d.ts +125 -0
  67. package/dist/cli/command-deps.d.ts.map +1 -0
  68. package/dist/cli/command-deps.js +7 -0
  69. package/dist/cli/command-deps.js.map +1 -0
  70. package/dist/cli/commands/aggregate.d.ts +35 -0
  71. package/dist/cli/commands/aggregate.d.ts.map +1 -0
  72. package/dist/cli/commands/aggregate.js +121 -0
  73. package/dist/cli/commands/aggregate.js.map +1 -0
  74. package/dist/cli/commands/plan.d.ts +36 -0
  75. package/dist/cli/commands/plan.d.ts.map +1 -0
  76. package/dist/cli/commands/plan.js +109 -0
  77. package/dist/cli/commands/plan.js.map +1 -0
  78. package/dist/cli/commands/run.d.ts +33 -0
  79. package/dist/cli/commands/run.d.ts.map +1 -0
  80. package/dist/cli/commands/run.js +185 -0
  81. package/dist/cli/commands/run.js.map +1 -0
  82. package/dist/cli/commands/validate.d.ts +27 -0
  83. package/dist/cli/commands/validate.d.ts.map +1 -0
  84. package/dist/cli/commands/validate.js +88 -0
  85. package/dist/cli/commands/validate.js.map +1 -0
  86. package/dist/cli/config-loader.d.ts +30 -0
  87. package/dist/cli/config-loader.d.ts.map +1 -0
  88. package/dist/cli/config-loader.js +181 -0
  89. package/dist/cli/config-loader.js.map +1 -0
  90. package/dist/cli/index.d.ts +26 -0
  91. package/dist/cli/index.d.ts.map +1 -0
  92. package/dist/cli/index.js +58 -0
  93. package/dist/cli/index.js.map +1 -0
  94. package/dist/cli/logger.d.ts +75 -0
  95. package/dist/cli/logger.d.ts.map +1 -0
  96. package/dist/cli/logger.js +131 -0
  97. package/dist/cli/logger.js.map +1 -0
  98. package/dist/cli/module-loader.d.ts +46 -0
  99. package/dist/cli/module-loader.d.ts.map +1 -0
  100. package/dist/cli/module-loader.js +116 -0
  101. package/dist/cli/module-loader.js.map +1 -0
  102. package/dist/cli/output-writer.d.ts +51 -0
  103. package/dist/cli/output-writer.d.ts.map +1 -0
  104. package/dist/cli/output-writer.js +65 -0
  105. package/dist/cli/output-writer.js.map +1 -0
  106. package/dist/cli/types.d.ts +174 -0
  107. package/dist/cli/types.d.ts.map +1 -0
  108. package/dist/cli/types.js +7 -0
  109. package/dist/cli/types.js.map +1 -0
  110. package/dist/collector/__tests__/result-collector.unit.test.d.ts +7 -0
  111. package/dist/collector/__tests__/result-collector.unit.test.d.ts.map +1 -0
  112. package/dist/collector/__tests__/result-collector.unit.test.js +1021 -0
  113. package/dist/collector/__tests__/result-collector.unit.test.js.map +1 -0
  114. package/dist/collector/__tests__/schema.unit.test.d.ts +7 -0
  115. package/dist/collector/__tests__/schema.unit.test.d.ts.map +1 -0
  116. package/dist/collector/__tests__/schema.unit.test.js +360 -0
  117. package/dist/collector/__tests__/schema.unit.test.js.map +1 -0
  118. package/dist/executor/__tests__/checkpoint-manager.unit.test.js +83 -1
  119. package/dist/executor/__tests__/checkpoint-manager.unit.test.js.map +1 -1
  120. package/dist/executor/__tests__/checkpoint-merge-bug.diagnostic.test.d.ts +3 -6
  121. package/dist/executor/__tests__/checkpoint-merge-bug.diagnostic.test.d.ts.map +1 -1
  122. package/dist/executor/__tests__/checkpoint-merge-bug.diagnostic.test.js +428 -159
  123. package/dist/executor/__tests__/checkpoint-merge-bug.diagnostic.test.js.map +1 -1
  124. package/dist/executor/__tests__/checkpoint-storage.unit.test.js +105 -1
  125. package/dist/executor/__tests__/checkpoint-storage.unit.test.js.map +1 -1
  126. package/dist/executor/__tests__/executor.unit.test.js +69 -1
  127. package/dist/executor/__tests__/executor.unit.test.js.map +1 -1
  128. package/dist/executor/__tests__/memory-monitor.unit.test.d.ts +7 -0
  129. package/dist/executor/__tests__/memory-monitor.unit.test.d.ts.map +1 -0
  130. package/dist/executor/__tests__/memory-monitor.unit.test.js +285 -0
  131. package/dist/executor/__tests__/memory-monitor.unit.test.js.map +1 -0
  132. package/dist/executor/__tests__/parallel-executor.unit.test.d.ts +2 -1
  133. package/dist/executor/__tests__/parallel-executor.unit.test.d.ts.map +1 -1
  134. package/dist/executor/__tests__/parallel-executor.unit.test.js +426 -156
  135. package/dist/executor/__tests__/parallel-executor.unit.test.js.map +1 -1
  136. package/dist/executor/__tests__/run-id.unit.test.d.ts +8 -0
  137. package/dist/executor/__tests__/run-id.unit.test.d.ts.map +1 -0
  138. package/dist/executor/__tests__/run-id.unit.test.js +156 -0
  139. package/dist/executor/__tests__/run-id.unit.test.js.map +1 -0
  140. package/dist/executor/__tests__/worker-entry.integration.test.d.ts +24 -0
  141. package/dist/executor/__tests__/worker-entry.integration.test.d.ts.map +1 -0
  142. package/dist/executor/__tests__/worker-entry.integration.test.js +82 -0
  143. package/dist/executor/__tests__/worker-entry.integration.test.js.map +1 -0
  144. package/dist/executor/__tests__/worker-entry.unit.test.d.ts +7 -0
  145. package/dist/executor/__tests__/worker-entry.unit.test.d.ts.map +1 -0
  146. package/dist/executor/__tests__/worker-entry.unit.test.js +364 -0
  147. package/dist/executor/__tests__/worker-entry.unit.test.js.map +1 -0
  148. package/dist/executor/parallel-executor.d.ts +186 -0
  149. package/dist/executor/parallel-executor.d.ts.map +1 -1
  150. package/dist/executor/parallel-executor.js +218 -83
  151. package/dist/executor/parallel-executor.js.map +1 -1
  152. package/dist/executor/run-id.d.ts.map +1 -1
  153. package/dist/executor/run-id.js +8 -1
  154. package/dist/executor/run-id.js.map +1 -1
  155. package/dist/executor/worker-entry.d.ts +2 -0
  156. package/dist/executor/worker-entry.d.ts.map +1 -1
  157. package/dist/executor/worker-entry.js +29 -54
  158. package/dist/executor/worker-entry.js.map +1 -1
  159. package/dist/executor/worker-executor.d.ts +156 -0
  160. package/dist/executor/worker-executor.d.ts.map +1 -0
  161. package/dist/executor/worker-executor.js +88 -0
  162. package/dist/executor/worker-executor.js.map +1 -0
  163. package/dist/robustness/__tests__/analyzer.unit.test.d.ts +11 -0
  164. package/dist/robustness/__tests__/analyzer.unit.test.d.ts.map +1 -0
  165. package/dist/robustness/__tests__/analyzer.unit.test.js +455 -0
  166. package/dist/robustness/__tests__/analyzer.unit.test.js.map +1 -0
  167. package/dist/robustness/__tests__/perturbations.unit.test.d.ts +11 -0
  168. package/dist/robustness/__tests__/perturbations.unit.test.d.ts.map +1 -0
  169. package/dist/robustness/__tests__/perturbations.unit.test.js +284 -0
  170. package/dist/robustness/__tests__/perturbations.unit.test.js.map +1 -0
  171. package/dist/statistical/__tests__/mann-whitney-u.unit.test.d.ts +7 -0
  172. package/dist/statistical/__tests__/mann-whitney-u.unit.test.d.ts.map +1 -0
  173. package/dist/statistical/__tests__/mann-whitney-u.unit.test.js +185 -0
  174. package/dist/statistical/__tests__/mann-whitney-u.unit.test.js.map +1 -0
  175. package/package.json +8 -1
@@ -1,12 +1,150 @@
1
1
  /**
2
2
  * Unit tests for ParallelExecutor
3
3
  *
4
- * Tests worker name generation, run batch distribution, and shard path generation.
4
+ * Tests worker name generation, run batch distribution, shard path generation,
5
+ * and the new DI-enabled ParallelExecutor class.
5
6
  */
6
- import { describe, it } from "node:test";
7
+ import { describe, it, beforeEach } from "node:test";
7
8
  import { strict as assert } from "node:assert";
8
- import { shardPath } from "../parallel-executor.js";
9
+ import { shardPath, generateWorkerNames, ParallelExecutor, } from "../parallel-executor.js";
10
+ /**
11
+ * Create a minimal ExecutorConfig for testing.
12
+ */
13
+ function createTestConfig(timeoutMs = 0) {
14
+ return {
15
+ continueOnError: false,
16
+ repetitions: 1,
17
+ seedBase: 42,
18
+ timeoutMs,
19
+ collectProvenance: false,
20
+ };
21
+ }
22
+ /**
23
+ * Create a minimal PlannedRun for testing.
24
+ */
25
+ function createTestRun(override) {
26
+ return {
27
+ sutId: "test-sut",
28
+ caseId: "test-case",
29
+ repetition: 0,
30
+ seed: 42,
31
+ ...override,
32
+ };
33
+ }
34
+ /**
35
+ * Mock logger for testing.
36
+ */
37
+ class MockLogger {
38
+ logs = [];
39
+ log(message) {
40
+ this.logs.push(message);
41
+ }
42
+ debug(message) {
43
+ this.logs.push(message);
44
+ }
45
+ info(message) {
46
+ this.logs.push(message);
47
+ }
48
+ warn(message) {
49
+ this.logs.push(message);
50
+ }
51
+ clear() {
52
+ this.logs = [];
53
+ }
54
+ }
55
+ /**
56
+ * Mock child process for testing.
57
+ */
58
+ class MockChildProcess {
59
+ exitCode = 0;
60
+ listeners = new Map();
61
+ on(event, listener) {
62
+ this.listeners.set(event, listener);
63
+ return this;
64
+ }
65
+ /**
66
+ * Simulate process exit.
67
+ */
68
+ exit(code) {
69
+ const listener = this.listeners.get("exit");
70
+ if (listener) {
71
+ listener(code);
72
+ }
73
+ }
74
+ }
75
+ /**
76
+ * Mock process spawner for testing.
77
+ */
78
+ class MockProcessSpawner {
79
+ spawnedProcesses = [];
80
+ spawn(command, args, options) {
81
+ const process = new MockChildProcess();
82
+ this.spawnedProcesses.push({
83
+ command,
84
+ args,
85
+ options: {
86
+ cwd: options.cwd ?? "/default",
87
+ stdio: options.stdio ?? "inherit",
88
+ env: options.env ?? {},
89
+ },
90
+ process,
91
+ });
92
+ return process;
93
+ }
94
+ clear() {
95
+ this.spawnedProcesses = [];
96
+ }
97
+ }
98
+ /**
99
+ * Mock system info for testing.
100
+ */
101
+ class MockSystemInfo {
102
+ cpuCount = 4;
103
+ nodePath = "/path/to/node";
104
+ packageRoot = "/path/to/project";
105
+ env = {
106
+ NODE_ENV: "test",
107
+ PATH: "/usr/bin:/bin",
108
+ };
109
+ }
9
110
  describe("ParallelExecutor", () => {
111
+ describe("generateWorkerNames", () => {
112
+ it("should generate the requested number of unique names", () => {
113
+ const count = 5;
114
+ const names = generateWorkerNames(count);
115
+ assert.strictEqual(names.length, count);
116
+ assert.strictEqual(new Set(names).size, count); // All unique
117
+ });
118
+ it("should generate names with hyphen-separated pattern", () => {
119
+ const names = generateWorkerNames(3);
120
+ for (const name of names) {
121
+ assert.ok(name.includes("-"), `Name ${name} should contain hyphens`);
122
+ const parts = name.split("-");
123
+ assert.strictEqual(parts.length, 3, `Name ${name} should have 3 parts`);
124
+ }
125
+ });
126
+ it("should generate unique names for large counts", () => {
127
+ const count = 20;
128
+ const names = generateWorkerNames(count);
129
+ assert.strictEqual(names.length, count);
130
+ assert.strictEqual(new Set(names).size, count);
131
+ });
132
+ it("should handle single worker", () => {
133
+ const names = generateWorkerNames(1);
134
+ assert.strictEqual(names.length, 1);
135
+ assert.ok(names[0].includes("-"));
136
+ });
137
+ it("should not repeat names across multiple calls", () => {
138
+ const count = 10;
139
+ const names1 = generateWorkerNames(count);
140
+ const names2 = generateWorkerNames(count);
141
+ // Each set should have unique names
142
+ assert.strictEqual(new Set(names1).size, count);
143
+ assert.strictEqual(new Set(names2).size, count);
144
+ // The two sets may overlap, but that's acceptable due to randomness
145
+ // What matters is each individual call returns unique names
146
+ });
147
+ });
10
148
  describe("shardPath", () => {
11
149
  it("should generate zero-padded worker indices", () => {
12
150
  const path0 = shardPath("/tmp/checkpoints", 0);
@@ -27,176 +165,308 @@ describe("ParallelExecutor", () => {
27
165
  assert.ok(path.includes("/my/checkpoint/dir"));
28
166
  assert.ok(path.includes("checkpoint-worker-05.json"));
29
167
  });
168
+ it("should handle relative checkpoint directories", () => {
169
+ const path = shardPath("checkpoints", 3);
170
+ assert.ok(path.includes("checkpoints"));
171
+ assert.ok(path.includes("checkpoint-worker-03.json"));
172
+ });
173
+ it("should handle empty checkpoint directory", () => {
174
+ const path = shardPath("", 7);
175
+ assert.ok(path.includes("checkpoint-worker-07.json"));
176
+ });
177
+ });
178
+ });
179
+ describe("ParallelExecutorOptions", () => {
180
+ describe("defaults", () => {
181
+ it("should use default checkpoint directory when not specified", () => {
182
+ const options = {};
183
+ const checkpointDir = options.checkpointDir ?? "/default/path";
184
+ assert.strictEqual(checkpointDir, "/default/path");
185
+ });
186
+ it("should use custom checkpoint directory when specified", () => {
187
+ const options = { checkpointDir: "/custom/path" };
188
+ const checkpointDir = options.checkpointDir;
189
+ assert.strictEqual(checkpointDir, "/custom/path");
190
+ });
191
+ it("should use default workers when not specified", () => {
192
+ const options = {};
193
+ // Default is CPU count, but we can't test that directly
194
+ // Just verify the undefined case falls back
195
+ assert.strictEqual(options.workers, undefined);
196
+ });
197
+ it("should use custom workers when specified", () => {
198
+ const options = { workers: 4 };
199
+ assert.strictEqual(options.workers, 4);
200
+ });
201
+ });
202
+ });
203
+ describe("path handling utilities", () => {
204
+ describe("dist directory detection", () => {
205
+ it("should detect Unix-style dist paths", () => {
206
+ const unixDistPath = "/path/to/project/dist/cli.js";
207
+ // Extract directory from file path
208
+ const lastSlashIndex = unixDistPath.lastIndexOf("/");
209
+ const entryDir = unixDistPath.substring(0, lastSlashIndex);
210
+ const endsWithUnixDist = entryDir.endsWith("/dist");
211
+ assert.ok(endsWithUnixDist);
212
+ });
213
+ it("should not detect non-dist paths", () => {
214
+ const nonDistPath = "/path/to/project/index.js";
215
+ const lastSlashIndex = nonDistPath.lastIndexOf("/");
216
+ const entryDir = nonDistPath.substring(0, lastSlashIndex);
217
+ const endsWithDist = entryDir.endsWith("/dist");
218
+ assert.ok(!endsWithDist);
219
+ });
220
+ it("should handle paths with dist in middle", () => {
221
+ const pathWithDistInMiddle = "/path/to/dist/middle/cli.js";
222
+ const lastSlashIndex = pathWithDistInMiddle.lastIndexOf("/");
223
+ const entryDir = pathWithDistInMiddle.substring(0, lastSlashIndex);
224
+ const endsWithDist = entryDir.endsWith("/dist");
225
+ assert.ok(!endsWithDist);
226
+ });
227
+ it("should detect nested dist paths", () => {
228
+ const nestedDistPath = "/path/to/project/sub/dist/cli.js";
229
+ const lastSlashIndex = nestedDistPath.lastIndexOf("/");
230
+ const entryDir = nestedDistPath.substring(0, lastSlashIndex);
231
+ const endsWithDist = entryDir.endsWith("/dist");
232
+ assert.ok(endsWithDist);
233
+ });
234
+ it("should handle Windows-style backslash dist paths", () => {
235
+ // Windows paths use backslashes
236
+ // String.raw`\dist` produces a literal backslash
237
+ const windowsBackslashDist = String.raw `\dist`;
238
+ assert.strictEqual(windowsBackslashDist, "\\dist");
239
+ // Test that endsWith works with backslash paths
240
+ const windowsDir = "C:\\path\\to\\project\\dist";
241
+ assert.ok(windowsDir.endsWith(windowsBackslashDist));
242
+ });
243
+ });
244
+ });
245
+ describe("ParallelExecutor class with DI", () => {
246
+ let mockLogger;
247
+ let mockSpawner;
248
+ let mockSystemInfo;
249
+ beforeEach(() => {
250
+ mockLogger = new MockLogger();
251
+ mockSpawner = new MockProcessSpawner();
252
+ mockSystemInfo = new MockSystemInfo();
253
+ });
254
+ describe("constructor", () => {
255
+ it("should use default dependencies when none provided", () => {
256
+ const executor = new ParallelExecutor();
257
+ assert.ok(executor);
258
+ });
259
+ it("should use provided dependencies", () => {
260
+ const executor = new ParallelExecutor(mockLogger, mockSpawner, mockSystemInfo);
261
+ assert.ok(executor);
262
+ });
263
+ it("should use partial dependencies", () => {
264
+ const executor = new ParallelExecutor(mockLogger);
265
+ assert.ok(executor);
266
+ });
30
267
  });
31
- describe("run batch distribution (conceptual)", () => {
32
- /**
33
- * The executeParallel function uses the following algorithm:
34
- * 1. Calculate batch size = ceil(runs.length / numWorkers)
35
- * 2. Split runs into batches using slice(i, i + batchSize)
36
- *
37
- * This test verifies the algorithm produces expected results.
38
- */
268
+ describe("createBatches", () => {
39
269
  it("should distribute runs evenly across workers", () => {
40
- const runs = Array.from({ length: 100 }, (_, index) => ({
41
- runId: `run${index}`,
42
- sutId: "sut",
43
- caseId: "case",
44
- repetition: 0,
45
- seed: index,
46
- }));
47
- const numberWorkers = 4;
48
- const batchSize = Math.ceil(runs.length / numberWorkers); // 25
49
- const batches = [];
50
- for (let index = 0; index < runs.length; index += batchSize) {
51
- batches.push(runs.slice(index, index + batchSize));
52
- }
53
- assert.strictEqual(batches.length, 4);
54
- assert.strictEqual(batches[0].length, 25);
55
- assert.strictEqual(batches[1].length, 25);
56
- assert.strictEqual(batches[2].length, 25);
57
- assert.strictEqual(batches[3].length, 25);
270
+ const executor = new ParallelExecutor(mockLogger, mockSpawner, mockSystemInfo);
271
+ const runs = [
272
+ { runId: "run1", sutId: "sut1", caseId: "case1", repetition: 0, seed: 1 },
273
+ { runId: "run2", sutId: "sut1", caseId: "case2", repetition: 0, seed: 2 },
274
+ { runId: "run3", sutId: "sut1", caseId: "case3", repetition: 0, seed: 3 },
275
+ { runId: "run4", sutId: "sut1", caseId: "case4", repetition: 0, seed: 4 },
276
+ ];
277
+ // Access private method via testing
278
+ //
279
+ const batches = executor._createBatches(runs, 2);
280
+ assert.strictEqual(batches.length, 2);
281
+ assert.strictEqual(batches[0].runIds.length, 2);
282
+ assert.strictEqual(batches[1].runIds.length, 2);
283
+ assert.deepStrictEqual(batches[0].runIds, ["run1", "run2"]);
284
+ assert.deepStrictEqual(batches[1].runIds, ["run3", "run4"]);
58
285
  });
59
286
  it("should handle uneven distribution", () => {
60
- const runs = Array.from({ length: 10 }, (_, index) => ({
61
- runId: `run${index}`,
62
- sutId: "sut",
63
- caseId: "case",
64
- repetition: 0,
65
- seed: index,
66
- }));
67
- const numberWorkers = 3;
68
- const batchSize = Math.ceil(runs.length / numberWorkers); // 4
69
- const batches = [];
70
- for (let index = 0; index < runs.length; index += batchSize) {
71
- batches.push(runs.slice(index, index + batchSize));
72
- }
73
- assert.strictEqual(batches.length, 3);
74
- assert.strictEqual(batches[0].length, 4);
75
- assert.strictEqual(batches[1].length, 4);
76
- assert.strictEqual(batches[2].length, 2); // Last batch gets remainder
77
- });
78
- it("should handle single worker", () => {
79
- const runs = Array.from({ length: 50 }, (_, index) => ({
80
- runId: `run${index}`,
81
- sutId: "sut",
82
- caseId: "case",
83
- repetition: 0,
84
- seed: index,
85
- }));
86
- const numberWorkers = 1;
87
- const batchSize = Math.ceil(runs.length / numberWorkers); // 50
88
- const batches = [];
89
- for (let index = 0; index < runs.length; index += batchSize) {
90
- batches.push(runs.slice(index, index + batchSize));
91
- }
92
- assert.strictEqual(batches.length, 1);
93
- assert.strictEqual(batches[0].length, 50);
287
+ const executor = new ParallelExecutor(mockLogger, mockSpawner, mockSystemInfo);
288
+ const runs = [
289
+ { runId: "run1", sutId: "sut1", caseId: "case1", repetition: 0, seed: 1 },
290
+ { runId: "run2", sutId: "sut1", caseId: "case2", repetition: 0, seed: 2 },
291
+ { runId: "run3", sutId: "sut1", caseId: "case3", repetition: 0, seed: 3 },
292
+ ];
293
+ //
294
+ const batches = executor._createBatches(runs, 2);
295
+ assert.strictEqual(batches.length, 2);
296
+ assert.strictEqual(batches[0].runIds.length, 2);
297
+ assert.strictEqual(batches[1].runIds.length, 1);
94
298
  });
95
299
  it("should handle more workers than runs", () => {
300
+ const executor = new ParallelExecutor(mockLogger, mockSpawner, mockSystemInfo);
96
301
  const runs = [
97
- { runId: "run0", sutId: "sut", caseId: "case", repetition: 0, seed: 0 },
98
- { runId: "run1", sutId: "sut", caseId: "case", repetition: 0, seed: 1 },
302
+ { runId: "run1", sutId: "sut1", caseId: "case1", repetition: 0, seed: 1 },
99
303
  ];
100
- const numberWorkers = 5;
101
- const batchSize = Math.ceil(runs.length / numberWorkers); // 1
102
- const batches = [];
103
- for (let index = 0; index < runs.length; index += batchSize) {
104
- batches.push(runs.slice(index, index + batchSize));
105
- }
106
- // With batchSize=1, we get 2 batches (one per run)
107
- // The last 3 workers would get empty batches in the actual implementation
108
- assert.ok(batches.length >= 2);
109
- assert.strictEqual(batches[0].length, 1);
110
- assert.strictEqual(batches[1].length, 1);
304
+ //
305
+ const batches = executor._createBatches(runs, 4);
306
+ assert.strictEqual(batches.length, 1);
307
+ assert.strictEqual(batches[0].runIds.length, 1);
111
308
  });
112
- });
113
- describe("run filter generation (conceptual)", () => {
114
- /**
115
- * Each worker receives a run filter as a JSON array of run IDs.
116
- * This test verifies the filter format.
117
- */
118
- it("should generate JSON array of run IDs for each batch", () => {
309
+ it("should handle empty runs", () => {
310
+ const executor = new ParallelExecutor(mockLogger, mockSpawner, mockSystemInfo);
311
+ const runs = [];
312
+ //
313
+ const batches = executor._createBatches(runs, 2);
314
+ assert.strictEqual(batches.length, 0);
315
+ });
316
+ it("should create proper batch metadata", () => {
317
+ const executor = new ParallelExecutor(mockLogger, mockSpawner, mockSystemInfo);
119
318
  const runs = [
120
- { runId: "alpha", sutId: "sut", caseId: "case", repetition: 0, seed: 0 },
121
- { runId: "bravo", sutId: "sut", caseId: "case", repetition: 0, seed: 1 },
122
- { runId: "charlie", sutId: "sut", caseId: "case", repetition: 0, seed: 2 },
319
+ { runId: "run1", sutId: "sut1", caseId: "case1", repetition: 0, seed: 1 },
320
+ { runId: "run2", sutId: "sut1", caseId: "case2", repetition: 0, seed: 2 },
123
321
  ];
124
- const numberWorkers = 2;
125
- const batchSize = Math.ceil(runs.length / numberWorkers); // 2
126
- const runFilters = [];
127
- for (let index = 0; index < runs.length; index += batchSize) {
128
- const batch = runs.slice(index, index + batchSize);
129
- const runIds = new Set(batch.map((r) => r.runId));
130
- runFilters.push(JSON.stringify([...runIds]));
131
- }
132
- assert.strictEqual(runFilters.length, 2);
133
- const filter0 = JSON.parse(runFilters[0]);
134
- const filter1 = JSON.parse(runFilters[1]);
135
- assert.ok(filter0.includes("alpha"));
136
- assert.ok(filter0.includes("bravo"));
137
- assert.ok(filter1.includes("charlie"));
322
+ //
323
+ const batches = executor._createBatches(runs, 2);
324
+ assert.strictEqual(batches[0].index, 0);
325
+ assert.strictEqual(batches[0].firstRunId, "run1");
326
+ assert.strictEqual(batches[0].lastRunId, "run1");
327
+ assert.ok(batches[0].filter.startsWith("["));
328
+ assert.ok(batches[0].filter.endsWith("]"));
138
329
  });
139
330
  });
140
- describe("worker names generation (conceptual)", () => {
141
- /**
142
- * Worker names follow the pattern: {adjective}-{noun}-{hex-suffix}
143
- * This test verifies names are unique and well-formatted.
144
- */
145
- it("should generate unique worker names", () => {
146
- // The actual implementation uses random adjectives, nouns, and hex suffix
147
- // Here we verify the pattern is followed
148
- const adjectives = ["swift", "nimble", "quick"];
149
- const nouns = ["runner", "worker", "processor"];
150
- const usedNames = new Set();
151
- const names = [];
152
- for (let index = 0; index < 10; index++) {
153
- const adj = adjectives[index % adjectives.length];
154
- const noun = nouns[index % nouns.length];
155
- const suffix = index.toString(16).padStart(4, "0"); // Simulate hex suffix
156
- const name = `${adj}-${noun}-${suffix}`;
157
- // Verify uniqueness
158
- assert.ok(!usedNames.has(name));
159
- usedNames.add(name);
160
- names.push(name);
161
- }
162
- assert.strictEqual(names.length, 10);
163
- assert.strictEqual(new Set(names).size, 10); // All unique
331
+ describe("createWorkerConfigs", () => {
332
+ it("should create worker configs with proper arguments", () => {
333
+ const executor = new ParallelExecutor(mockLogger, mockSpawner, mockSystemInfo);
334
+ const batches = [
335
+ {
336
+ index: 0,
337
+ runIds: ["run1", "run2"],
338
+ filter: '["run1","run2"]',
339
+ firstRunId: "run1",
340
+ lastRunId: "run2",
341
+ },
342
+ ];
343
+ //
344
+ const configs = executor._createWorkerConfigs(batches, ["worker-1"], "/path/to/cli.js", "/checkpoints", 5000);
345
+ assert.strictEqual(configs.length, 1);
346
+ assert.strictEqual(configs[0].index, 0);
347
+ assert.strictEqual(configs[0].name, "worker-1");
348
+ assert.strictEqual(configs[0].checkpointPath, "/checkpoints/checkpoint-worker-00.json");
349
+ assert.deepStrictEqual(configs[0].arguments, [
350
+ "/path/to/cli.js",
351
+ "evaluate",
352
+ "--phase=execute",
353
+ "--checkpoint-mode=file",
354
+ '--run-filter=["run1","run2"]',
355
+ "--timeout=5000",
356
+ ]);
164
357
  });
165
- });
166
- describe("spawn arguments (conceptual)", () => {
167
- /**
168
- * Verify that workers are spawned with correct arguments.
169
- */
170
- it("should include checkpoint-path argument for each worker", () => {
171
- const checkpointDir = "/tmp/checkpoints";
172
- const workerIndex = 2;
173
- const checkpointPath = shardPath(checkpointDir, workerIndex);
174
- assert.ok(checkpointPath.includes("checkpoint-worker-02.json"));
175
- assert.ok(checkpointPath.includes(checkpointDir));
176
- });
177
- it("should include worker-index in environment variables", () => {
178
- const workerIndex = 3;
179
- const environmentVariable = `GRAPHBOX_WORKER_INDEX=${workerIndex}`;
180
- assert.strictEqual(environmentVariable, "GRAPHBOX_WORKER_INDEX=3");
181
- });
182
- it("should include total-workers in environment variables", () => {
183
- const totalWorkers = 5;
184
- const environmentVariable = `GRAPHBOX_TOTAL_WORKERS=${totalWorkers}`;
185
- assert.strictEqual(environmentVariable, "GRAPHBOX_TOTAL_WORKERS=5");
358
+ it("should not include timeout when timeoutMs is 0", () => {
359
+ const executor = new ParallelExecutor(mockLogger, mockSpawner, mockSystemInfo);
360
+ const batches = [
361
+ {
362
+ index: 0,
363
+ runIds: ["run1"],
364
+ filter: '["run1"]',
365
+ firstRunId: "run1",
366
+ lastRunId: "run1",
367
+ },
368
+ ];
369
+ //
370
+ const configs = executor._createWorkerConfigs(batches, ["worker-1"], "/path/to/cli.js", "/checkpoints", 0);
371
+ assert.ok(!configs[0].arguments.includes("--timeout="));
372
+ });
373
+ it("should include proper environment variables", () => {
374
+ const executor = new ParallelExecutor(mockLogger, mockSpawner, mockSystemInfo);
375
+ const batches = [
376
+ {
377
+ index: 0,
378
+ runIds: ["run1"],
379
+ filter: '["run1"]',
380
+ firstRunId: "run1",
381
+ lastRunId: "run1",
382
+ },
383
+ ];
384
+ //
385
+ const configs = executor._createWorkerConfigs(batches, ["worker-1"], "/path/to/cli.js", "/checkpoints", 5000);
386
+ assert.strictEqual(configs[0].env.GRAPHBOX_WORKER_NAME, "worker-1");
387
+ assert.strictEqual(configs[0].env.GRAPHBOX_WORKER_INDEX, "0");
388
+ assert.strictEqual(configs[0].env.GRAPHBOX_TOTAL_WORKERS, "1");
389
+ assert.strictEqual(configs[0].env.GRAPHBOX_CHECKPOINT_DIR, "/checkpoints");
390
+ assert.strictEqual(configs[0].env.GRAPHBOX_CHECKPOINT_PATH, "/checkpoints/checkpoint-worker-00.json");
391
+ assert.strictEqual(configs[0].env.NODE_OPTIONS, "--max-old-space-size=4096");
186
392
  });
187
393
  });
188
- });
189
- describe("ParallelExecutorOptions", () => {
190
- describe("defaults", () => {
191
- it("should use default checkpoint directory when not specified", () => {
192
- const options = {};
193
- const checkpointDir = options.checkpointDir ?? "/default/path";
194
- assert.strictEqual(checkpointDir, "/default/path");
394
+ describe("execute", () => {
395
+ it("should spawn workers for each batch", async () => {
396
+ const executor = new ParallelExecutor(mockLogger, mockSpawner, mockSystemInfo);
397
+ const runs = [
398
+ createTestRun({ runId: "run1", caseId: "case1" }),
399
+ createTestRun({ runId: "run2", caseId: "case2" }),
400
+ createTestRun({ runId: "run3", caseId: "case3" }),
401
+ createTestRun({ runId: "run4", caseId: "case4" }),
402
+ ];
403
+ const config = createTestConfig(1000);
404
+ // Simulate worker exits
405
+ setTimeout(() => {
406
+ for (const spawned of mockSpawner.spawnedProcesses) {
407
+ spawned.process.exit(0);
408
+ }
409
+ }, 10);
410
+ const result = await executor.execute(runs, [], [], config);
411
+ assert.strictEqual(mockSpawner.spawnedProcesses.length, mockSystemInfo.cpuCount);
412
+ assert.strictEqual(result.results.length, 0);
413
+ assert.strictEqual(result.errors.length, 0);
195
414
  });
196
- it("should use custom checkpoint directory when specified", () => {
197
- const options = { checkpointDir: "/custom/path" };
198
- const checkpointDir = options.checkpointDir;
199
- assert.strictEqual(checkpointDir, "/custom/path");
415
+ it("should use system defaults when options not provided", async () => {
416
+ const executor = new ParallelExecutor(mockLogger, mockSpawner, mockSystemInfo);
417
+ const runs = [createTestRun({ runId: "run1", caseId: "case1" })];
418
+ // Simulate worker exit
419
+ setTimeout(() => {
420
+ for (const spawned of mockSpawner.spawnedProcesses) {
421
+ spawned.process.exit(0);
422
+ }
423
+ }, 10);
424
+ await executor.execute(runs, [], [], createTestConfig(0));
425
+ assert.strictEqual(mockSpawner.spawnedProcesses.length, 1);
426
+ assert.strictEqual(mockSpawner.spawnedProcesses[0].command, mockSystemInfo.nodePath);
427
+ });
428
+ it("should log execution information", async () => {
429
+ const executor = new ParallelExecutor(mockLogger, mockSpawner, mockSystemInfo);
430
+ const runs = [createTestRun({ runId: "run1", caseId: "case1" })];
431
+ // Simulate worker exit
432
+ setTimeout(() => {
433
+ for (const spawned of mockSpawner.spawnedProcesses) {
434
+ spawned.process.exit(0);
435
+ }
436
+ }, 10);
437
+ await executor.execute(runs, [], [], createTestConfig(5000));
438
+ assert.ok(mockLogger.logs.some((log) => log.includes("Spawning")));
439
+ assert.ok(mockLogger.logs.some((log) => log.includes("Checkpoint directory")));
440
+ assert.ok(mockLogger.logs.some((log) => log.includes("timeout")));
441
+ });
442
+ it("should handle custom worker count", async () => {
443
+ const executor = new ParallelExecutor(mockLogger, mockSpawner, mockSystemInfo);
444
+ const runs = [
445
+ createTestRun({ runId: "run1", caseId: "case1" }),
446
+ createTestRun({ runId: "run2", caseId: "case2" }),
447
+ createTestRun({ runId: "run3", caseId: "case3" }),
448
+ ];
449
+ // Simulate worker exits
450
+ setTimeout(() => {
451
+ for (const spawned of mockSpawner.spawnedProcesses) {
452
+ spawned.process.exit(0);
453
+ }
454
+ }, 10);
455
+ const result = await executor.execute(runs, [], [], createTestConfig(0), { workers: 2 });
456
+ assert.strictEqual(mockSpawner.spawnedProcesses.length, 2);
457
+ assert.strictEqual(result.results.length, 0);
458
+ });
459
+ it("should return empty results by design", async () => {
460
+ const executor = new ParallelExecutor(mockLogger, mockSpawner, mockSystemInfo);
461
+ const runs = [createTestRun({ runId: "run1", caseId: "case1" })];
462
+ // Simulate worker exit
463
+ setTimeout(() => {
464
+ for (const spawned of mockSpawner.spawnedProcesses) {
465
+ spawned.process.exit(0);
466
+ }
467
+ }, 10);
468
+ const result = await executor.execute(runs, [], [], createTestConfig(0));
469
+ assert.deepStrictEqual(result, { results: [], errors: [] });
200
470
  });
201
471
  });
202
472
  });