@unrdf/self-healing-workflows 26.4.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,772 @@
1
+ /**
2
+ * @file Self-healing workflows test suite
3
+ * @description Comprehensive tests for error recovery, retry, circuit breaker, and health monitoring
4
+ */
5
+
6
+ import { describe, it, expect, beforeEach, vi } from 'vitest';
7
+ import {
8
+ SelfHealingEngine,
9
+ ErrorClassifier,
10
+ RetryStrategy,
11
+ CircuitBreaker,
12
+ RecoveryActionExecutor,
13
+ HealthMonitor,
14
+ immediateRetry,
15
+ exponentialRetry
16
+ } from '../src/index.mjs';
17
+
18
+ describe('ErrorClassifier', () => {
19
+ let classifier;
20
+
21
+ beforeEach(() => {
22
+ classifier = new ErrorClassifier();
23
+ });
24
+
25
+ it('should classify network errors correctly', () => {
26
+ const error = new Error('ECONNREFUSED: connection refused');
27
+ const classified = classifier.classify(error);
28
+
29
+ expect(classified.category).toBe('network');
30
+ expect(classified.severity).toBe('medium');
31
+ expect(classified.retryable).toBe(true);
32
+ expect(classified.matchedPattern).toBe('NetworkError');
33
+ });
34
+
35
+ it('should classify timeout errors correctly', () => {
36
+ const error = new Error('Operation timed out after 5000ms');
37
+ const classified = classifier.classify(error);
38
+
39
+ expect(classified.category).toBe('timeout');
40
+ expect(classified.severity).toBe('medium');
41
+ expect(classified.retryable).toBe(true);
42
+ });
43
+
44
+ it('should classify validation errors as non-retryable', () => {
45
+ const error = new Error('Validation failed: invalid input');
46
+ const classified = classifier.classify(error);
47
+
48
+ expect(classified.category).toBe('validation');
49
+ expect(classified.retryable).toBe(false);
50
+ });
51
+
52
+ it('should classify resource errors correctly', () => {
53
+ const error = new Error('ENOMEM: out of memory');
54
+ const classified = classifier.classify(error);
55
+
56
+ expect(classified.category).toBe('resource');
57
+ expect(classified.severity).toBe('high');
58
+ expect(classified.retryable).toBe(true);
59
+ });
60
+
61
+ it('should classify unknown errors', () => {
62
+ const error = new Error('Something went wrong');
63
+ const classified = classifier.classify(error);
64
+
65
+ expect(classified.category).toBe('unknown');
66
+ expect(classified.retryable).toBe(false);
67
+ });
68
+
69
+ it('should add custom patterns', () => {
70
+ classifier.addPattern({
71
+ name: 'CustomError',
72
+ category: 'network',
73
+ severity: 'low',
74
+ pattern: /custom error/i
75
+ });
76
+
77
+ const error = new Error('Custom error occurred');
78
+ const classified = classifier.classify(error);
79
+
80
+ expect(classified.matchedPattern).toBe('CustomError');
81
+ });
82
+
83
+ it('should classify batch of errors', () => {
84
+ const errors = [
85
+ new Error('ECONNREFUSED'),
86
+ new Error('timeout'),
87
+ new Error('validation')
88
+ ];
89
+
90
+ const classified = classifier.classifyBatch(errors);
91
+
92
+ expect(classified).toHaveLength(3);
93
+ expect(classified[0].category).toBe('network');
94
+ expect(classified[1].category).toBe('timeout');
95
+ expect(classified[2].category).toBe('validation');
96
+ });
97
+
98
+ it('should filter retryable errors', () => {
99
+ const classified = [
100
+ { category: 'network', retryable: true },
101
+ { category: 'validation', retryable: false },
102
+ { category: 'timeout', retryable: true }
103
+ ];
104
+
105
+ const retryable = classifier.filterRetryable(classified);
106
+
107
+ expect(retryable).toHaveLength(2);
108
+ });
109
+ });
110
+
111
+ describe('RetryStrategy', () => {
112
+ it('should retry operation on failure', async () => {
113
+ const retry = new RetryStrategy({ maxAttempts: 3, initialDelay: 10 });
114
+ let attempts = 0;
115
+
116
+ const operation = vi.fn(async () => {
117
+ attempts++;
118
+ if (attempts < 3) {
119
+ throw new Error('Temporary failure');
120
+ }
121
+ return 'success';
122
+ });
123
+
124
+ const result = await retry.execute(operation);
125
+
126
+ expect(result).toBe('success');
127
+ expect(attempts).toBe(3);
128
+ });
129
+
130
+ it('should throw after max attempts exhausted', async () => {
131
+ const retry = new RetryStrategy({ maxAttempts: 2, initialDelay: 10 });
132
+ const operation = vi.fn(async () => {
133
+ throw new Error('Permanent failure');
134
+ });
135
+
136
+ await expect(retry.execute(operation)).rejects.toThrow('Operation failed after 2 attempts');
137
+ expect(operation).toHaveBeenCalledTimes(2);
138
+ });
139
+
140
+ it('should calculate exponential backoff delays', () => {
141
+ const retry = new RetryStrategy({
142
+ initialDelay: 1000,
143
+ backoffMultiplier: 2,
144
+ jitter: false
145
+ });
146
+
147
+ expect(retry.calculateDelay(1)).toBe(1000);
148
+ expect(retry.calculateDelay(2)).toBe(2000);
149
+ expect(retry.calculateDelay(3)).toBe(4000);
150
+ expect(retry.calculateDelay(4)).toBe(8000);
151
+ });
152
+
153
+ it('should cap delay at maxDelay', () => {
154
+ const retry = new RetryStrategy({
155
+ initialDelay: 1000,
156
+ maxDelay: 5000,
157
+ backoffMultiplier: 2,
158
+ jitter: false
159
+ });
160
+
161
+ expect(retry.calculateDelay(5)).toBe(5000);
162
+ expect(retry.calculateDelay(10)).toBe(5000);
163
+ });
164
+
165
+ it('should add jitter to delays', () => {
166
+ const retry = new RetryStrategy({
167
+ initialDelay: 1000,
168
+ jitter: true
169
+ });
170
+
171
+ const delay = retry.calculateDelay(1);
172
+ expect(delay).toBeGreaterThanOrEqual(800); // -20%
173
+ expect(delay).toBeLessThanOrEqual(1200); // +20%
174
+ });
175
+
176
+ it('should call onRetry callback', async () => {
177
+ const retry = new RetryStrategy({ maxAttempts: 3, initialDelay: 10 });
178
+ const onRetry = vi.fn();
179
+ let attempts = 0;
180
+
181
+ await retry.execute(
182
+ async () => {
183
+ attempts++;
184
+ if (attempts < 2) throw new Error('Fail');
185
+ return 'success';
186
+ },
187
+ { onRetry }
188
+ );
189
+
190
+ expect(onRetry).toHaveBeenCalledTimes(1);
191
+ });
192
+
193
+ it('should execute with metadata', async () => {
194
+ const retry = new RetryStrategy({ maxAttempts: 3, initialDelay: 10 });
195
+ let attempts = 0;
196
+
197
+ const result = await retry.executeWithMetadata(async () => {
198
+ attempts++;
199
+ if (attempts < 2) throw new Error('Fail');
200
+ return 'success';
201
+ });
202
+
203
+ expect(result.success).toBe(true);
204
+ expect(result.result).toBe('success');
205
+ expect(result.attempts).toBe(2);
206
+ expect(result.retryHistory).toHaveLength(1);
207
+ });
208
+
209
+ it('should support immediate retry helper', async () => {
210
+ let attempts = 0;
211
+
212
+ const result = await immediateRetry(async () => {
213
+ attempts++;
214
+ if (attempts < 2) throw new Error('Fail');
215
+ return 'success';
216
+ });
217
+
218
+ expect(result).toBe('success');
219
+ expect(attempts).toBe(2);
220
+ });
221
+
222
+ it('should support exponential retry helper', async () => {
223
+ let attempts = 0;
224
+
225
+ const result = await exponentialRetry(async () => {
226
+ attempts++;
227
+ if (attempts < 2) throw new Error('Fail');
228
+ return 'success';
229
+ });
230
+
231
+ expect(result).toBe('success');
232
+ });
233
+ });
234
+
235
+ describe('CircuitBreaker', () => {
236
+ it('should remain closed on successful operations', async () => {
237
+ const breaker = new CircuitBreaker({ failureThreshold: 3 });
238
+
239
+ await breaker.execute(async () => 'success');
240
+
241
+ expect(breaker.getState()).toBe('closed');
242
+ });
243
+
244
+ it('should open after threshold failures', async () => {
245
+ const breaker = new CircuitBreaker({ failureThreshold: 3, timeout: 100 });
246
+
247
+ for (let i = 0; i < 3; i++) {
248
+ try {
249
+ await breaker.execute(async () => {
250
+ throw new Error('Failure');
251
+ });
252
+ } catch (e) {
253
+ // Expected
254
+ }
255
+ }
256
+
257
+ expect(breaker.getState()).toBe('open');
258
+ });
259
+
260
+ it('should reject requests when open', async () => {
261
+ const breaker = new CircuitBreaker({ failureThreshold: 2, timeout: 100 });
262
+
263
+ // Trigger failures to open circuit
264
+ for (let i = 0; i < 2; i++) {
265
+ try {
266
+ await breaker.execute(async () => {
267
+ throw new Error('Failure');
268
+ });
269
+ } catch (e) {
270
+ // Expected
271
+ }
272
+ }
273
+
274
+ // Next request should be rejected
275
+ await expect(
276
+ breaker.execute(async () => 'success')
277
+ ).rejects.toThrow('Circuit breaker is OPEN');
278
+ });
279
+
280
+ it('should use fallback when circuit is open', async () => {
281
+ const breaker = new CircuitBreaker({ failureThreshold: 2, timeout: 100 });
282
+ const fallback = vi.fn(() => 'fallback-result');
283
+
284
+ // Trigger failures
285
+ for (let i = 0; i < 2; i++) {
286
+ try {
287
+ await breaker.execute(async () => {
288
+ throw new Error('Failure');
289
+ });
290
+ } catch (e) {
291
+ // Expected
292
+ }
293
+ }
294
+
295
+ // Use fallback
296
+ const result = await breaker.execute(
297
+ async () => 'primary',
298
+ { fallback }
299
+ );
300
+
301
+ expect(result).toBe('fallback-result');
302
+ expect(fallback).toHaveBeenCalled();
303
+ });
304
+
305
+ it('should transition to half-open after reset timeout', async () => {
306
+ const breaker = new CircuitBreaker({
307
+ failureThreshold: 2,
308
+ resetTimeout: 50,
309
+ timeout: 100
310
+ });
311
+
312
+ // Open circuit
313
+ for (let i = 0; i < 2; i++) {
314
+ try {
315
+ await breaker.execute(async () => {
316
+ throw new Error('Failure');
317
+ });
318
+ } catch (e) {
319
+ // Expected
320
+ }
321
+ }
322
+
323
+ expect(breaker.getState()).toBe('open');
324
+
325
+ // Wait for reset timeout
326
+ await new Promise(resolve => setTimeout(resolve, 60));
327
+
328
+ // Next request should transition to half-open
329
+ try {
330
+ await breaker.execute(async () => 'success');
331
+ } catch (e) {
332
+ // May fail, but state should change
333
+ }
334
+
335
+ expect(['half-open', 'closed']).toContain(breaker.getState());
336
+ });
337
+
338
+ it('should close after success threshold in half-open', async () => {
339
+ const breaker = new CircuitBreaker({
340
+ failureThreshold: 2,
341
+ successThreshold: 2,
342
+ resetTimeout: 50,
343
+ timeout: 100
344
+ });
345
+
346
+ // Open circuit
347
+ for (let i = 0; i < 2; i++) {
348
+ try {
349
+ await breaker.execute(async () => {
350
+ throw new Error('Failure');
351
+ });
352
+ } catch (e) {
353
+ // Expected
354
+ }
355
+ }
356
+
357
+ // Wait for reset
358
+ await new Promise(resolve => setTimeout(resolve, 60));
359
+
360
+ // Force to half-open and execute successes
361
+ await breaker.execute(async () => 'success');
362
+ await breaker.execute(async () => 'success');
363
+
364
+ expect(breaker.getState()).toBe('closed');
365
+ });
366
+
367
+ it('should track statistics', async () => {
368
+ const breaker = new CircuitBreaker({ timeout: 100 });
369
+
370
+ await breaker.execute(async () => 'success');
371
+
372
+ try {
373
+ await breaker.execute(async () => {
374
+ throw new Error('Failure');
375
+ });
376
+ } catch (e) {
377
+ // Expected
378
+ }
379
+
380
+ const stats = breaker.getStats();
381
+
382
+ expect(stats.totalRequests).toBe(2);
383
+ expect(stats.successfulRequests).toBe(1);
384
+ expect(stats.failedRequests).toBe(1);
385
+ expect(stats.successRate).toBe(0.5);
386
+ });
387
+
388
+ it('should enforce timeout on operations', async () => {
389
+ const breaker = new CircuitBreaker({ timeout: 50 });
390
+
391
+ await expect(
392
+ breaker.execute(async () => {
393
+ await new Promise(resolve => setTimeout(resolve, 100));
394
+ return 'success';
395
+ })
396
+ ).rejects.toThrow('Operation timed out');
397
+ });
398
+ });
399
+
400
+ describe('RecoveryActionExecutor', () => {
401
+ it('should register and execute recovery actions', async () => {
402
+ const executor = new RecoveryActionExecutor();
403
+ const mockAction = vi.fn(async () => ({ recovered: true }));
404
+
405
+ executor.register({
406
+ type: 'retry',
407
+ name: 'test-action',
408
+ execute: mockAction,
409
+ priority: 50
410
+ });
411
+
412
+ const result = await executor.execute('retry', 'test-action', { data: 'test' });
413
+
414
+ expect(result).toEqual({ recovered: true });
415
+ expect(mockAction).toHaveBeenCalled();
416
+ });
417
+
418
+ it('should select best action based on priority', () => {
419
+ const executor = new RecoveryActionExecutor();
420
+
421
+ executor.register({
422
+ type: 'retry',
423
+ name: 'low-priority',
424
+ execute: async () => ({}),
425
+ priority: 10
426
+ });
427
+
428
+ executor.register({
429
+ type: 'retry',
430
+ name: 'high-priority',
431
+ execute: async () => ({}),
432
+ priority: 90
433
+ });
434
+
435
+ const selected = executor.selectAction({ category: 'network', retryable: true });
436
+
437
+ expect(selected.action.name).toBe('high-priority');
438
+ });
439
+
440
+ it('should filter actions by condition', () => {
441
+ const executor = new RecoveryActionExecutor();
442
+
443
+ executor.register({
444
+ type: 'retry',
445
+ name: 'conditional',
446
+ execute: async () => ({}),
447
+ condition: (error) => error.category === 'network',
448
+ priority: 90 // Higher priority than default retry-operation (80)
449
+ });
450
+
451
+ const networkError = { category: 'network', retryable: true };
452
+ const timeoutError = { category: 'timeout', retryable: true };
453
+
454
+ const networkSelected = executor.selectAction(networkError);
455
+ const timeoutSelected = executor.selectAction(timeoutError);
456
+
457
+ expect(networkSelected.action.name).toBe('conditional');
458
+ expect(timeoutSelected.action.name).not.toBe('conditional');
459
+ });
460
+
461
+ it('should track action statistics', async () => {
462
+ const executor = new RecoveryActionExecutor();
463
+
464
+ await executor.execute('retry', 'retry-operation', {
465
+ operation: async () => 'success',
466
+ maxAttempts: 1
467
+ });
468
+
469
+ const stats = executor.getStats();
470
+ const retryStats = stats['retry:retry-operation'];
471
+
472
+ expect(retryStats.attempts).toBeGreaterThan(0);
473
+ expect(retryStats.successes).toBeGreaterThan(0);
474
+ });
475
+
476
+ it('should execute skip action', async () => {
477
+ const executor = new RecoveryActionExecutor();
478
+
479
+ const result = await executor.execute('skip', 'skip-and-continue', {
480
+ error: { message: 'Test error' }
481
+ });
482
+
483
+ expect(result.skipped).toBe(true);
484
+ });
485
+
486
+ it('should execute compensate action', async () => {
487
+ const executor = new RecoveryActionExecutor();
488
+ const compensationFn = vi.fn(async () => {});
489
+
490
+ const result = await executor.execute('compensate', 'compensating-transaction', {
491
+ compensationFn
492
+ });
493
+
494
+ expect(result.compensated).toBe(true);
495
+ expect(compensationFn).toHaveBeenCalled();
496
+ });
497
+ });
498
+
499
+ describe('HealthMonitor', () => {
500
+ it('should register and execute health checks', async () => {
501
+ const monitor = new HealthMonitor({ timeout: 100 });
502
+ const checkFn = vi.fn(async () => {});
503
+
504
+ monitor.registerCheck('test-check', checkFn);
505
+
506
+ const result = await monitor.check();
507
+
508
+ expect(result.status).toBe('healthy');
509
+ expect(result.checks).toHaveLength(1);
510
+ expect(checkFn).toHaveBeenCalled();
511
+ });
512
+
513
+ it('should detect unhealthy checks', async () => {
514
+ const monitor = new HealthMonitor({ timeout: 100, unhealthyThreshold: 1 });
515
+
516
+ monitor.registerCheck('failing-check', async () => {
517
+ throw new Error('Check failed');
518
+ });
519
+
520
+ const result = await monitor.check();
521
+
522
+ expect(result.checks[0].status).toBe('unhealthy');
523
+ });
524
+
525
+ it('should calculate overall status', async () => {
526
+ const monitor = new HealthMonitor({ timeout: 100 });
527
+
528
+ monitor.registerCheck('healthy-1', async () => {});
529
+ monitor.registerCheck('healthy-2', async () => {});
530
+
531
+ const result = await monitor.check();
532
+
533
+ expect(result.status).toBe('healthy');
534
+ });
535
+
536
+ it('should report degraded status with partial failures', async () => {
537
+ const monitor = new HealthMonitor({ timeout: 100 });
538
+
539
+ monitor.registerCheck('healthy', async () => {});
540
+ monitor.registerCheck('unhealthy', async () => {
541
+ throw new Error('Failed');
542
+ });
543
+
544
+ const result = await monitor.check();
545
+
546
+ expect(result.status).toBe('degraded');
547
+ });
548
+
549
+ it('should enforce check timeout', async () => {
550
+ const monitor = new HealthMonitor({ timeout: 50 });
551
+
552
+ monitor.registerCheck('slow-check', async () => {
553
+ await new Promise(resolve => setTimeout(resolve, 100));
554
+ });
555
+
556
+ const result = await monitor.check();
557
+
558
+ expect(result.checks[0].status).toBe('unhealthy');
559
+ expect(result.checks[0].message).toContain('timeout');
560
+ });
561
+
562
+ it('should start and stop periodic checks', async () => {
563
+ const monitor = new HealthMonitor({ interval: 50, timeout: 100 });
564
+ const checkFn = vi.fn(async () => {});
565
+
566
+ monitor.registerCheck('periodic', checkFn);
567
+
568
+ monitor.start();
569
+ await new Promise(resolve => setTimeout(resolve, 120));
570
+ monitor.stop();
571
+
572
+ expect(checkFn).toHaveBeenCalled();
573
+ });
574
+
575
+ it('should notify listeners on status change', async () => {
576
+ const monitor = new HealthMonitor({ timeout: 100 });
577
+ const listener = vi.fn();
578
+
579
+ monitor.onStatusChange(listener);
580
+ await monitor.check();
581
+
582
+ expect(listener).toHaveBeenCalled();
583
+ });
584
+ });
585
+
586
+ describe('SelfHealingEngine', () => {
587
+ it('should execute operation successfully', async () => {
588
+ const engine = new SelfHealingEngine();
589
+
590
+ const result = await engine.execute(async () => {
591
+ return 'success';
592
+ });
593
+
594
+ expect(result).toBe('success');
595
+ });
596
+
597
+ it('should retry on transient failures', async () => {
598
+ const engine = new SelfHealingEngine({
599
+ retry: { maxAttempts: 3, initialDelay: 10 }
600
+ });
601
+
602
+ let attempts = 0;
603
+
604
+ const result = await engine.execute(async () => {
605
+ attempts++;
606
+ if (attempts < 3) {
607
+ throw new Error('ECONNREFUSED');
608
+ }
609
+ return 'success';
610
+ });
611
+
612
+ expect(result).toBe('success');
613
+ expect(attempts).toBe(3);
614
+ });
615
+
616
+ it('should use fallback on failure', async () => {
617
+ const engine = new SelfHealingEngine({
618
+ retry: { maxAttempts: 1, initialDelay: 10 }
619
+ });
620
+
621
+ const result = await engine.execute(
622
+ async () => {
623
+ throw new Error('Permanent failure');
624
+ },
625
+ {
626
+ fallback: () => 'fallback-value'
627
+ }
628
+ );
629
+
630
+ expect(result).toBe('fallback-value');
631
+ });
632
+
633
+ it('should track recovery statistics', async () => {
634
+ const engine = new SelfHealingEngine();
635
+
636
+ await engine.execute(async () => 'success');
637
+
638
+ const stats = engine.getStats();
639
+
640
+ expect(stats.totalAttempts).toBe(1);
641
+ expect(stats.successfulRecoveries).toBe(1);
642
+ expect(stats.successRate).toBe(1);
643
+ });
644
+
645
+ it('should enforce max concurrent recoveries', async () => {
646
+ const engine = new SelfHealingEngine({ maxConcurrentRecoveries: 2 });
647
+
648
+ const promises = [];
649
+ for (let i = 0; i < 3; i++) {
650
+ promises.push(
651
+ engine.execute(async () => {
652
+ await new Promise(resolve => setTimeout(resolve, 50));
653
+ return 'success';
654
+ }).catch(e => e)
655
+ );
656
+ }
657
+
658
+ const results = await Promise.all(promises);
659
+ const errors = results.filter(r => r instanceof Error);
660
+
661
+ expect(errors.length).toBeGreaterThan(0);
662
+ });
663
+
664
+ it('should measure success rate above 85%', async () => {
665
+ const engine = new SelfHealingEngine({
666
+ retry: { maxAttempts: 3, initialDelay: 10 },
667
+ maxConcurrentRecoveries: 150 // Allow all 100 operations to run
668
+ });
669
+
670
+ // Create operations that fail on first attempt but succeed on retry
671
+ const operations = Array.from({ length: 100 }, (_, i) => {
672
+ let attempts = 0;
673
+ return async () => {
674
+ attempts++;
675
+ // 15% of operations fail on first attempt
676
+ if (i < 15 && attempts === 1) {
677
+ throw new Error('ECONNREFUSED');
678
+ }
679
+ return 'success';
680
+ };
681
+ });
682
+
683
+ const results = await Promise.allSettled(
684
+ operations.map(op => engine.execute(op))
685
+ );
686
+
687
+ const successful = results.filter(r => r.status === 'fulfilled').length;
688
+ const successRate = successful / results.length;
689
+
690
+ const stats = engine.getStats();
691
+
692
+ // Should achieve >85% success rate with retries (100% in this case)
693
+ expect(successRate).toBeGreaterThanOrEqual(0.85);
694
+ expect(stats.successRate).toBeGreaterThanOrEqual(0.85);
695
+ });
696
+
697
+ it('should integrate with circuit breaker', async () => {
698
+ const engine = new SelfHealingEngine({
699
+ circuitBreaker: { failureThreshold: 3, timeout: 100 }
700
+ });
701
+
702
+ // Cause failures to open circuit
703
+ for (let i = 0; i < 3; i++) {
704
+ try {
705
+ await engine.execute(async () => {
706
+ throw new Error('Service unavailable');
707
+ });
708
+ } catch (e) {
709
+ // Expected
710
+ }
711
+ }
712
+
713
+ expect(engine.getCircuitBreakerState()).toBe('open');
714
+ });
715
+
716
+ it('should provide comprehensive status', () => {
717
+ const engine = new SelfHealingEngine();
718
+
719
+ const status = engine.getStatus();
720
+
721
+ expect(status).toHaveProperty('stats');
722
+ expect(status).toHaveProperty('circuitBreaker');
723
+ expect(status).toHaveProperty('health');
724
+ expect(status).toHaveProperty('activeRecoveries');
725
+ });
726
+
727
+ it('should support custom error patterns', () => {
728
+ const engine = new SelfHealingEngine();
729
+
730
+ engine.addErrorPattern({
731
+ name: 'CustomAPI',
732
+ category: 'dependency',
733
+ severity: 'high',
734
+ pattern: /API_ERROR/
735
+ });
736
+
737
+ const patterns = engine.classifier.getPatterns();
738
+ const custom = patterns.find(p => p.name === 'CustomAPI');
739
+
740
+ expect(custom).toBeDefined();
741
+ });
742
+
743
+ it('should support custom recovery actions', () => {
744
+ const engine = new SelfHealingEngine();
745
+ const customAction = vi.fn(async () => ({ custom: true }));
746
+
747
+ engine.addRecoveryAction({
748
+ type: 'fallback',
749
+ name: 'custom-action',
750
+ execute: customAction,
751
+ priority: 70
752
+ });
753
+
754
+ const actions = engine.recoveryExecutor.getActions();
755
+ const custom = actions.find(a => a.name === 'custom-action');
756
+
757
+ expect(custom).toBeDefined();
758
+ });
759
+
760
+ it('should monitor health status', async () => {
761
+ const engine = new SelfHealingEngine();
762
+
763
+ engine.startHealthMonitoring();
764
+
765
+ const health = await engine.getHealth();
766
+
767
+ expect(health.status).toBeDefined();
768
+ expect(health.checks.length).toBeGreaterThan(0);
769
+
770
+ engine.stopHealthMonitoring();
771
+ });
772
+ });