elsabro 7.3.0 → 7.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,476 @@
1
+ 'use strict';
2
+
3
+ const { describe, it, before, after, beforeEach } = require('node:test');
4
+ const assert = require('node:assert/strict');
5
+ const { executeCondition, ExecutorError } = require('../src/executors');
6
+ const { CheckpointManager } = require('../src/checkpoint');
7
+ const fs = require('fs');
8
+ const path = require('path');
9
+
10
+ // Helper to create a mock context
11
+ function makeContext(overrides = {}) {
12
+ return {
13
+ inputs: {},
14
+ nodes: {},
15
+ steps: {},
16
+ state: {},
17
+ _iterations: {},
18
+ ...overrides
19
+ };
20
+ }
21
+
22
+ describe('Checkpoint save before condition errors', () => {
23
+ it('saves checkpoint before error when condition field is missing', async () => {
24
+ let checkpointData = null;
25
+
26
+ const callbacks = {
27
+ onCheckpoint: async (data) => {
28
+ checkpointData = data;
29
+ }
30
+ };
31
+
32
+ const node = {
33
+ id: 'bad_condition',
34
+ type: 'condition',
35
+ true: 'success_node',
36
+ false: 'failure_node'
37
+ // condition is missing!
38
+ };
39
+
40
+ const context = makeContext({ inputs: { test: 'value' } });
41
+
42
+ // Should throw ExecutorError
43
+ await assert.rejects(
44
+ executeCondition(node, context, callbacks),
45
+ ExecutorError
46
+ );
47
+
48
+ // Should have saved a checkpoint
49
+ assert.notEqual(checkpointData, null, 'Checkpoint should be saved');
50
+ assert.equal(checkpointData.currentNode, 'bad_condition');
51
+ assert.equal(checkpointData.nextNode, null);
52
+ assert.equal(checkpointData.stoppedAt, 'bad_condition');
53
+ assert.equal(checkpointData.reason, 'Condition node has no "condition" field');
54
+ assert.ok(checkpointData.context, 'Context should be serialized');
55
+ });
56
+
57
+ it('saves checkpoint before error when true branch is undefined', async () => {
58
+ let checkpointData = null;
59
+
60
+ const callbacks = {
61
+ onCheckpoint: async (data) => {
62
+ checkpointData = data;
63
+ }
64
+ };
65
+
66
+ const node = {
67
+ id: 'missing_true',
68
+ type: 'condition',
69
+ condition: '{{inputs.test === "pass"}}',
70
+ // true branch is missing!
71
+ false: 'failure_node'
72
+ };
73
+
74
+ const context = makeContext({ inputs: { test: 'pass' } });
75
+
76
+ // Should throw ExecutorError
77
+ await assert.rejects(
78
+ executeCondition(node, context, callbacks),
79
+ ExecutorError
80
+ );
81
+
82
+ // Should have saved a checkpoint
83
+ assert.notEqual(checkpointData, null, 'Checkpoint should be saved');
84
+ assert.equal(checkpointData.currentNode, 'missing_true');
85
+ assert.equal(checkpointData.nextNode, null);
86
+ assert.equal(checkpointData.stoppedAt, 'missing_true');
87
+ assert.ok(checkpointData.reason.includes('true'), 'Reason should mention true branch');
88
+ assert.ok(checkpointData.context, 'Context should be serialized');
89
+ });
90
+
91
+ it('saves checkpoint before error when false branch is undefined', async () => {
92
+ let checkpointData = null;
93
+
94
+ const callbacks = {
95
+ onCheckpoint: async (data) => {
96
+ checkpointData = data;
97
+ }
98
+ };
99
+
100
+ const node = {
101
+ id: 'missing_false',
102
+ type: 'condition',
103
+ condition: '{{inputs.test === "pass"}}',
104
+ true: 'success_node'
105
+ // false branch is missing!
106
+ };
107
+
108
+ const context = makeContext({ inputs: { test: 'fail' } });
109
+
110
+ // Should throw ExecutorError
111
+ await assert.rejects(
112
+ executeCondition(node, context, callbacks),
113
+ ExecutorError
114
+ );
115
+
116
+ // Should have saved a checkpoint
117
+ assert.notEqual(checkpointData, null, 'Checkpoint should be saved');
118
+ assert.equal(checkpointData.currentNode, 'missing_false');
119
+ assert.equal(checkpointData.nextNode, null);
120
+ assert.equal(checkpointData.stoppedAt, 'missing_false');
121
+ assert.ok(checkpointData.reason.includes('false'), 'Reason should mention false branch');
122
+ assert.ok(checkpointData.context, 'Context should be serialized');
123
+ });
124
+
125
+ it('does not error when onCheckpoint callback is not provided', async () => {
126
+ const node = {
127
+ id: 'no_callback',
128
+ type: 'condition',
129
+ true: 'a',
130
+ false: 'b'
131
+ // condition is missing
132
+ };
133
+
134
+ const context = makeContext();
135
+
136
+ // Should still throw ExecutorError even without checkpoint callback
137
+ await assert.rejects(
138
+ executeCondition(node, context, {}),
139
+ ExecutorError
140
+ );
141
+ });
142
+
143
+ it('preserves context data in checkpoint', async () => {
144
+ let checkpointData = null;
145
+
146
+ const callbacks = {
147
+ onCheckpoint: async (data) => {
148
+ checkpointData = data;
149
+ }
150
+ };
151
+
152
+ const node = {
153
+ id: 'context_test',
154
+ type: 'condition',
155
+ condition: '{{inputs.check}}',
156
+ true: 'next'
157
+ // false branch missing
158
+ };
159
+
160
+ const context = makeContext({
161
+ inputs: { check: false, user: 'alice' },
162
+ nodes: { prev: { outputs: { result: 42 } } },
163
+ state: { counter: 10 }
164
+ });
165
+
166
+ // Should throw ExecutorError
167
+ await assert.rejects(
168
+ executeCondition(node, context, callbacks),
169
+ ExecutorError
170
+ );
171
+
172
+ // Verify context was properly serialized
173
+ assert.notEqual(checkpointData, null);
174
+ assert.ok(checkpointData.context.inputs, 'Context should have inputs');
175
+ assert.equal(checkpointData.context.inputs.user, 'alice');
176
+ assert.ok(checkpointData.context.nodes, 'Context should have nodes');
177
+ assert.ok(checkpointData.context.state, 'Context should have state');
178
+ });
179
+ });
180
+
181
+ describe('CheckpointManager auto-cleanup', () => {
182
+ const testDir = path.join(process.cwd(), '.test-checkpoints');
183
+ let manager;
184
+
185
+ before(() => {
186
+ // Create test directory
187
+ if (fs.existsSync(testDir)) {
188
+ fs.rmSync(testDir, { recursive: true });
189
+ }
190
+ fs.mkdirSync(testDir, { recursive: true });
191
+ manager = new CheckpointManager({ dir: testDir });
192
+ });
193
+
194
+ beforeEach(() => {
195
+ // Clean directory before each test to ensure isolation
196
+ if (fs.existsSync(testDir)) {
197
+ const files = fs.readdirSync(testDir);
198
+ files.forEach(file => {
199
+ fs.unlinkSync(path.join(testDir, file));
200
+ });
201
+ }
202
+ });
203
+
204
+ after(() => {
205
+ // Clean up test directory
206
+ if (fs.existsSync(testDir)) {
207
+ fs.rmSync(testDir, { recursive: true });
208
+ }
209
+ });
210
+
211
+ it('removes checkpoints older than 7 days', () => {
212
+ const flowId = 'test_flow';
213
+ const now = Date.now();
214
+ const eightDaysAgo = now - (8 * 24 * 60 * 60 * 1000);
215
+ const sixDaysAgo = now - (6 * 24 * 60 * 60 * 1000);
216
+
217
+ // Create old checkpoint (8 days ago)
218
+ const oldFilename = `${flowId}-${eightDaysAgo}-0001.json`;
219
+ fs.writeFileSync(
220
+ path.join(testDir, oldFilename),
221
+ JSON.stringify({ flowId, timestamp: eightDaysAgo })
222
+ );
223
+
224
+ // Create recent checkpoint (6 days ago)
225
+ const recentFilename = `${flowId}-${sixDaysAgo}-0002.json`;
226
+ fs.writeFileSync(
227
+ path.join(testDir, recentFilename),
228
+ JSON.stringify({ flowId, timestamp: sixDaysAgo })
229
+ );
230
+
231
+ // Run auto-cleanup with 7-day threshold
232
+ const removed = manager.autoCleanup(flowId, 7);
233
+
234
+ // Verify old checkpoint removed, recent kept
235
+ assert.equal(removed, 1, 'Should remove 1 old checkpoint');
236
+ assert.ok(!fs.existsSync(path.join(testDir, oldFilename)), 'Old checkpoint should be removed');
237
+ assert.ok(fs.existsSync(path.join(testDir, recentFilename)), 'Recent checkpoint should be kept');
238
+ });
239
+
240
+ it('removes checkpoints for all flows when flowId not specified', () => {
241
+ const now = Date.now();
242
+ const eightDaysAgo = now - (8 * 24 * 60 * 60 * 1000);
243
+
244
+ // Create old checkpoints for different flows
245
+ const flow1File = `flow1-${eightDaysAgo}-0001.json`;
246
+ const flow2File = `flow2-${eightDaysAgo}-0001.json`;
247
+
248
+ fs.writeFileSync(
249
+ path.join(testDir, flow1File),
250
+ JSON.stringify({ flowId: 'flow1', timestamp: eightDaysAgo })
251
+ );
252
+ fs.writeFileSync(
253
+ path.join(testDir, flow2File),
254
+ JSON.stringify({ flowId: 'flow2', timestamp: eightDaysAgo })
255
+ );
256
+
257
+ // Run auto-cleanup without flowId (cleans all flows)
258
+ const removed = manager.autoCleanup(null, 7);
259
+
260
+ // Verify both old checkpoints removed
261
+ assert.equal(removed, 2, 'Should remove 2 old checkpoints');
262
+ assert.ok(!fs.existsSync(path.join(testDir, flow1File)), 'flow1 checkpoint should be removed');
263
+ assert.ok(!fs.existsSync(path.join(testDir, flow2File)), 'flow2 checkpoint should be removed');
264
+ });
265
+
266
+ it('returns 0 when checkpoint directory does not exist', () => {
267
+ const nonExistentManager = new CheckpointManager({ dir: '/non/existent/path' });
268
+ const removed = nonExistentManager.autoCleanup('test_flow', 7);
269
+ assert.equal(removed, 0, 'Should return 0 when directory does not exist');
270
+ });
271
+
272
+ it('handles custom days threshold', () => {
273
+ const flowId = 'custom_flow';
274
+ const now = Date.now();
275
+ const tenDaysAgo = now - (10 * 24 * 60 * 60 * 1000);
276
+ const fiveDaysAgo = now - (5 * 24 * 60 * 60 * 1000);
277
+
278
+ // Create checkpoints at different ages
279
+ const oldFilename = `${flowId}-${tenDaysAgo}-0001.json`;
280
+ const recentFilename = `${flowId}-${fiveDaysAgo}-0002.json`;
281
+
282
+ fs.writeFileSync(
283
+ path.join(testDir, oldFilename),
284
+ JSON.stringify({ flowId, timestamp: tenDaysAgo })
285
+ );
286
+ fs.writeFileSync(
287
+ path.join(testDir, recentFilename),
288
+ JSON.stringify({ flowId, timestamp: fiveDaysAgo })
289
+ );
290
+
291
+ // Run auto-cleanup with 3-day threshold
292
+ const removed = manager.autoCleanup(flowId, 3);
293
+
294
+ // Both should be removed (both older than 3 days)
295
+ assert.equal(removed, 2, 'Should remove 2 checkpoints older than 3 days');
296
+ assert.ok(!fs.existsSync(path.join(testDir, oldFilename)), 'Old checkpoint should be removed');
297
+ assert.ok(!fs.existsSync(path.join(testDir, recentFilename)), 'Recent checkpoint should be removed');
298
+ });
299
+
300
+ it('keeps checkpoints exactly at threshold boundary', () => {
301
+ const flowId = 'boundary_flow';
302
+ const now = Date.now();
303
+ // Use a margin to account for time between Date.now() calls
304
+ // Make one clearly within threshold and one clearly outside
305
+ const sixDaysAgo = now - (6 * 24 * 60 * 60 * 1000);
306
+ const eightDaysAgo = now - (8 * 24 * 60 * 60 * 1000);
307
+
308
+ // Create checkpoint within threshold (6 days old, should be kept)
309
+ const recentFilename = `${flowId}-${sixDaysAgo}-0001.json`;
310
+ fs.writeFileSync(
311
+ path.join(testDir, recentFilename),
312
+ JSON.stringify({ flowId, timestamp: sixDaysAgo })
313
+ );
314
+
315
+ // Create checkpoint outside threshold (8 days old, should be removed)
316
+ const oldFilename = `${flowId}-${eightDaysAgo}-0002.json`;
317
+ fs.writeFileSync(
318
+ path.join(testDir, oldFilename),
319
+ JSON.stringify({ flowId, timestamp: eightDaysAgo })
320
+ );
321
+
322
+ // Run auto-cleanup with 7-day threshold
323
+ const removed = manager.autoCleanup(flowId, 7);
324
+
325
+ // Only the 8-day-old checkpoint should be removed
326
+ assert.equal(removed, 1, 'Should remove only checkpoints older than threshold');
327
+ assert.ok(fs.existsSync(path.join(testDir, recentFilename)), 'Recent checkpoint (6 days) should be kept');
328
+ assert.ok(!fs.existsSync(path.join(testDir, oldFilename)), 'Old checkpoint (8 days) should be removed');
329
+ });
330
+
331
+ it('skips files with invalid filename patterns', () => {
332
+ const flowId = 'pattern_flow';
333
+ const now = Date.now();
334
+ const tenDaysAgo = now - (10 * 24 * 60 * 60 * 1000);
335
+
336
+ // Create valid checkpoint
337
+ const validFilename = `${flowId}-${tenDaysAgo}-0001.json`;
338
+ fs.writeFileSync(
339
+ path.join(testDir, validFilename),
340
+ JSON.stringify({ flowId, timestamp: tenDaysAgo })
341
+ );
342
+
343
+ // Create files with invalid patterns (should be ignored)
344
+ const invalidFiles = [
345
+ 'invalid.json',
346
+ `${flowId}-notanumber-0001.json`,
347
+ `${flowId}.json`,
348
+ 'random-file.txt'
349
+ ];
350
+
351
+ invalidFiles.forEach(filename => {
352
+ fs.writeFileSync(path.join(testDir, filename), 'test content');
353
+ });
354
+
355
+ // Run auto-cleanup
356
+ const removed = manager.autoCleanup(flowId, 7);
357
+
358
+ // Only the valid old checkpoint should be removed
359
+ assert.equal(removed, 1, 'Should only remove valid checkpoint files');
360
+ assert.ok(!fs.existsSync(path.join(testDir, validFilename)), 'Valid old checkpoint should be removed');
361
+
362
+ // Invalid files should still exist
363
+ invalidFiles.forEach(filename => {
364
+ assert.ok(fs.existsSync(path.join(testDir, filename)), `Invalid file ${filename} should be ignored and kept`);
365
+ });
366
+ });
367
+
368
+ it('handles zero threshold (removes all checkpoints)', () => {
369
+ const flowId = 'zero_flow';
370
+ const now = Date.now();
371
+ const oneHourAgo = now - (60 * 60 * 1000);
372
+
373
+ // Create very recent checkpoint
374
+ const recentFilename = `${flowId}-${oneHourAgo}-0001.json`;
375
+ fs.writeFileSync(
376
+ path.join(testDir, recentFilename),
377
+ JSON.stringify({ flowId, timestamp: oneHourAgo })
378
+ );
379
+
380
+ // Run auto-cleanup with 0-day threshold
381
+ const removed = manager.autoCleanup(flowId, 0);
382
+
383
+ // Even recent checkpoint should be removed with 0 threshold
384
+ assert.equal(removed, 1, 'Should remove all checkpoints with 0-day threshold');
385
+ assert.ok(!fs.existsSync(path.join(testDir, recentFilename)), 'Recent checkpoint should be removed with 0 threshold');
386
+ });
387
+
388
+ it('returns 0 when no checkpoints match criteria', () => {
389
+ const flowId = 'no_match_flow';
390
+ const now = Date.now();
391
+ const twoDaysAgo = now - (2 * 24 * 60 * 60 * 1000);
392
+
393
+ // Create recent checkpoint
394
+ const recentFilename = `${flowId}-${twoDaysAgo}-0001.json`;
395
+ fs.writeFileSync(
396
+ path.join(testDir, recentFilename),
397
+ JSON.stringify({ flowId, timestamp: twoDaysAgo })
398
+ );
399
+
400
+ // Run auto-cleanup with 7-day threshold (checkpoint is only 2 days old)
401
+ const removed = manager.autoCleanup(flowId, 7);
402
+
403
+ // Nothing should be removed
404
+ assert.equal(removed, 0, 'Should return 0 when no checkpoints match removal criteria');
405
+ assert.ok(fs.existsSync(path.join(testDir, recentFilename)), 'Recent checkpoint should be kept');
406
+ });
407
+
408
+ it('cleans up multiple old checkpoints for same flow', () => {
409
+ const flowId = 'multi_flow';
410
+ const now = Date.now();
411
+
412
+ // Create multiple old checkpoints
413
+ const oldCheckpoints = [];
414
+ for (let i = 0; i < 5; i++) {
415
+ const daysAgo = (8 + i) * 24 * 60 * 60 * 1000;
416
+ const timestamp = now - daysAgo;
417
+ const filename = `${flowId}-${timestamp}-${String(i).padStart(4, '0')}.json`;
418
+ oldCheckpoints.push(filename);
419
+ fs.writeFileSync(
420
+ path.join(testDir, filename),
421
+ JSON.stringify({ flowId, timestamp })
422
+ );
423
+ }
424
+
425
+ // Create one recent checkpoint
426
+ const recentTimestamp = now - (2 * 24 * 60 * 60 * 1000);
427
+ const recentFilename = `${flowId}-${recentTimestamp}-0005.json`;
428
+ fs.writeFileSync(
429
+ path.join(testDir, recentFilename),
430
+ JSON.stringify({ flowId, timestamp: recentTimestamp })
431
+ );
432
+
433
+ // Run auto-cleanup with 7-day threshold
434
+ const removed = manager.autoCleanup(flowId, 7);
435
+
436
+ // All 5 old checkpoints should be removed
437
+ assert.equal(removed, 5, 'Should remove all old checkpoints');
438
+
439
+ oldCheckpoints.forEach(filename => {
440
+ assert.ok(!fs.existsSync(path.join(testDir, filename)), `Old checkpoint ${filename} should be removed`);
441
+ });
442
+
443
+ assert.ok(fs.existsSync(path.join(testDir, recentFilename)), 'Recent checkpoint should be kept');
444
+ });
445
+
446
+ it('does not remove non-json files in checkpoint directory', () => {
447
+ const flowId = 'mixed_flow';
448
+ const now = Date.now();
449
+ const tenDaysAgo = now - (10 * 24 * 60 * 60 * 1000);
450
+
451
+ // Create old checkpoint
452
+ const oldCheckpoint = `${flowId}-${tenDaysAgo}-0001.json`;
453
+ fs.writeFileSync(
454
+ path.join(testDir, oldCheckpoint),
455
+ JSON.stringify({ flowId, timestamp: tenDaysAgo })
456
+ );
457
+
458
+ // Create non-JSON files
459
+ const otherFiles = ['.gitkeep', 'README.md', 'backup.txt'];
460
+ otherFiles.forEach(filename => {
461
+ fs.writeFileSync(path.join(testDir, filename), 'content');
462
+ });
463
+
464
+ // Run auto-cleanup
465
+ const removed = manager.autoCleanup(flowId, 7);
466
+
467
+ // Only the old checkpoint should be removed
468
+ assert.equal(removed, 1, 'Should only remove JSON checkpoint files');
469
+ assert.ok(!fs.existsSync(path.join(testDir, oldCheckpoint)), 'Old checkpoint should be removed');
470
+
471
+ // Other files should remain
472
+ otherFiles.forEach(filename => {
473
+ assert.ok(fs.existsSync(path.join(testDir, filename)), `Non-JSON file ${filename} should not be removed`);
474
+ });
475
+ });
476
+ });