@julr/tenace 1.0.0-next.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1034 -0
- package/build/src/adapters/cache/memory.d.ts +23 -0
- package/build/src/adapters/cache/memory.js +2 -0
- package/build/src/adapters/cache/types.d.ts +56 -0
- package/build/src/adapters/cache/types.js +1 -0
- package/build/src/adapters/lock/types.d.ts +104 -0
- package/build/src/adapters/lock/types.js +1 -0
- package/build/src/adapters/rate_limiter/memory.d.ts +14 -0
- package/build/src/adapters/rate_limiter/memory.js +2 -0
- package/build/src/adapters/rate_limiter/types.d.ts +101 -0
- package/build/src/adapters/rate_limiter/types.js +1 -0
- package/build/src/backoff.d.ts +79 -0
- package/build/src/chaos/manager.d.ts +29 -0
- package/build/src/chaos/policies.d.ts +10 -0
- package/build/src/chaos/types.d.ts +75 -0
- package/build/src/collection.d.ts +81 -0
- package/build/src/config.d.ts +38 -0
- package/build/src/errors/errors.d.ts +79 -0
- package/build/src/errors/main.d.ts +1 -0
- package/build/src/errors/main.js +2 -0
- package/build/src/errors-BODHnryv.js +67 -0
- package/build/src/internal/adapter_policies.d.ts +31 -0
- package/build/src/internal/cockatiel_factories.d.ts +18 -0
- package/build/src/internal/telemetry.d.ts +50 -0
- package/build/src/main.d.ts +176 -0
- package/build/src/main.js +1125 -0
- package/build/src/memory-DWyezb1O.js +37 -0
- package/build/src/memory-DXkg8s6y.js +60 -0
- package/build/src/plugin.d.ts +30 -0
- package/build/src/policy_configurator.d.ts +108 -0
- package/build/src/semaphore.d.ts +71 -0
- package/build/src/tenace_builder.d.ts +22 -0
- package/build/src/tenace_policy.d.ts +41 -0
- package/build/src/types/backoff.d.ts +57 -0
- package/build/src/types/collection.d.ts +46 -0
- package/build/src/types/main.d.ts +5 -0
- package/build/src/types/main.js +1 -0
- package/build/src/types/plugin.d.ts +61 -0
- package/build/src/types/types.d.ts +241 -0
- package/build/src/wait_for.d.ts +23 -0
- package/package.json +135 -0
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import type { CacheAdapter } from './types.ts';
|
|
2
|
+
/**
|
|
3
|
+
* In-memory cache adapter using Bentocache.
|
|
4
|
+
*/
|
|
5
|
+
export declare class MemoryCacheAdapter implements CacheAdapter {
|
|
6
|
+
#private;
|
|
7
|
+
constructor(options?: {
|
|
8
|
+
maxSize?: string;
|
|
9
|
+
maxItems?: number;
|
|
10
|
+
});
|
|
11
|
+
get<T>(key: string): Promise<T | undefined>;
|
|
12
|
+
set<T>(key: string, value: T, ttlMs: number): Promise<void>;
|
|
13
|
+
delete(key: string): Promise<void>;
|
|
14
|
+
has(key: string): Promise<boolean>;
|
|
15
|
+
/**
|
|
16
|
+
* Clear all entries
|
|
17
|
+
*/
|
|
18
|
+
clear(): Promise<void>;
|
|
19
|
+
/**
|
|
20
|
+
* Disconnect from cache
|
|
21
|
+
*/
|
|
22
|
+
disconnect(): Promise<void>;
|
|
23
|
+
}
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Interface for cache adapters.
|
|
3
|
+
* Implement this to create custom cache backends (Redis, Memcached, etc.)
|
|
4
|
+
*/
|
|
5
|
+
export interface CacheAdapter {
|
|
6
|
+
/**
|
|
7
|
+
* Get a value from cache
|
|
8
|
+
*/
|
|
9
|
+
get<T>(key: string): Promise<T | undefined>;
|
|
10
|
+
/**
|
|
11
|
+
* Set a value in cache with TTL
|
|
12
|
+
*/
|
|
13
|
+
set<T>(key: string, value: T, ttlMs: number): Promise<void>;
|
|
14
|
+
/**
|
|
15
|
+
* Delete a value from cache
|
|
16
|
+
*/
|
|
17
|
+
delete(key: string): Promise<void>;
|
|
18
|
+
/**
|
|
19
|
+
* Check if a key exists
|
|
20
|
+
*/
|
|
21
|
+
has(key: string): Promise<boolean>;
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Options for cache behavior
|
|
25
|
+
*/
|
|
26
|
+
export interface CacheOptions {
|
|
27
|
+
/**
|
|
28
|
+
* Custom cache adapter (uses default if not provided)
|
|
29
|
+
*/
|
|
30
|
+
adapter?: CacheAdapter;
|
|
31
|
+
/**
|
|
32
|
+
* Cache key (required)
|
|
33
|
+
*/
|
|
34
|
+
key: string;
|
|
35
|
+
/**
|
|
36
|
+
* Time to live in milliseconds
|
|
37
|
+
*/
|
|
38
|
+
ttl: number;
|
|
39
|
+
/**
|
|
40
|
+
* If true, continue without cache when adapter fails (Redis down, etc.)
|
|
41
|
+
* @default false
|
|
42
|
+
*/
|
|
43
|
+
optional?: boolean;
|
|
44
|
+
/**
|
|
45
|
+
* Called on cache hit
|
|
46
|
+
*/
|
|
47
|
+
onHit?: (event: {
|
|
48
|
+
key: string;
|
|
49
|
+
}) => void;
|
|
50
|
+
/**
|
|
51
|
+
* Called on cache miss
|
|
52
|
+
*/
|
|
53
|
+
onMiss?: (event: {
|
|
54
|
+
key: string;
|
|
55
|
+
}) => void;
|
|
56
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Result of a lock acquisition attempt
|
|
3
|
+
*/
|
|
4
|
+
export interface LockAcquireResult {
|
|
5
|
+
/**
|
|
6
|
+
* Whether the lock was successfully acquired
|
|
7
|
+
*/
|
|
8
|
+
acquired: boolean;
|
|
9
|
+
}
|
|
10
|
+
/**
|
|
11
|
+
* Options for acquiring a lock
|
|
12
|
+
*/
|
|
13
|
+
export interface LockAcquireOptions {
|
|
14
|
+
/**
|
|
15
|
+
* Retry policy for acquiring the lock
|
|
16
|
+
*/
|
|
17
|
+
retry?: {
|
|
18
|
+
/**
|
|
19
|
+
* Maximum number of retry attempts (default: 0 - no retry)
|
|
20
|
+
*/
|
|
21
|
+
attempts?: number;
|
|
22
|
+
/**
|
|
23
|
+
* Delay between retries in milliseconds (default: 250)
|
|
24
|
+
*/
|
|
25
|
+
delay?: number;
|
|
26
|
+
/**
|
|
27
|
+
* Maximum time to wait before giving up in milliseconds
|
|
28
|
+
*/
|
|
29
|
+
timeout?: number;
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* Adapter interface for distributed locks.
|
|
34
|
+
* Implementations can use Redis, PostgreSQL, or any other backend.
|
|
35
|
+
*
|
|
36
|
+
* @example
|
|
37
|
+
* ```ts
|
|
38
|
+
* // Using with Verrou
|
|
39
|
+
* import { Verrou } from '@verrou/core'
|
|
40
|
+
*
|
|
41
|
+
* class VerrouLockAdapter implements LockAdapter {
|
|
42
|
+
* #verrou: Verrou
|
|
43
|
+
*
|
|
44
|
+
* constructor(verrou: Verrou) {
|
|
45
|
+
* this.#verrou = verrou
|
|
46
|
+
* }
|
|
47
|
+
*
|
|
48
|
+
* async run<T>(key: string, ttl: number, fn: () => Promise<T>, options?: LockAcquireOptions): Promise<[boolean, T | undefined]> {
|
|
49
|
+
* const lock = this.#verrou.createLock(key, ttl)
|
|
50
|
+
* return lock.run(fn, { retry: options?.retry })
|
|
51
|
+
* }
|
|
52
|
+
* }
|
|
53
|
+
* ```
|
|
54
|
+
*/
|
|
55
|
+
export interface LockAdapter {
|
|
56
|
+
/**
|
|
57
|
+
* Acquires a lock and executes the function.
|
|
58
|
+
* Returns a tuple with [acquired, result].
|
|
59
|
+
*
|
|
60
|
+
* @param key - The resource key to lock
|
|
61
|
+
* @param ttl - Time-to-live in milliseconds (lock auto-releases after this duration)
|
|
62
|
+
* @param fn - The function to execute while holding the lock
|
|
63
|
+
* @param options - Options for acquiring the lock
|
|
64
|
+
* @returns A tuple of [wasAcquired, result]
|
|
65
|
+
*/
|
|
66
|
+
run<T>(key: string, ttl: number, fn: () => Promise<T>, options?: LockAcquireOptions): Promise<[boolean, T | undefined]>;
|
|
67
|
+
}
|
|
68
|
+
/**
|
|
69
|
+
* Options for distributed lock
|
|
70
|
+
*/
|
|
71
|
+
export interface DistributedLockOptions {
|
|
72
|
+
/**
|
|
73
|
+
* The resource key to lock
|
|
74
|
+
*/
|
|
75
|
+
key: string;
|
|
76
|
+
/**
|
|
77
|
+
* Time-to-live in milliseconds. Lock auto-releases after this duration.
|
|
78
|
+
* This prevents deadlocks if the process crashes.
|
|
79
|
+
*/
|
|
80
|
+
ttl: number;
|
|
81
|
+
/**
|
|
82
|
+
* The lock adapter to use.
|
|
83
|
+
* Unlike cache and rate limiter, there is no built-in adapter.
|
|
84
|
+
* You must provide an adapter (e.g., from @verrou/core) either here
|
|
85
|
+
* or globally via configStore.configure({ lock: adapter }).
|
|
86
|
+
*/
|
|
87
|
+
adapter?: LockAdapter;
|
|
88
|
+
/**
|
|
89
|
+
* Retry policy for acquiring the lock
|
|
90
|
+
*/
|
|
91
|
+
retry?: LockAcquireOptions['retry'];
|
|
92
|
+
/**
|
|
93
|
+
* Called when lock is successfully acquired
|
|
94
|
+
*/
|
|
95
|
+
onAcquired?: (event: {
|
|
96
|
+
key: string;
|
|
97
|
+
}) => void;
|
|
98
|
+
/**
|
|
99
|
+
* Called when lock acquisition fails
|
|
100
|
+
*/
|
|
101
|
+
onRejected?: (event: {
|
|
102
|
+
key: string;
|
|
103
|
+
}) => void;
|
|
104
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import type { RateLimiterAdapter, RateLimitConfig, RateLimitResult, RateLimitState } from './types.ts';
|
|
2
|
+
/**
|
|
3
|
+
* In-memory rate limiter adapter using rate-limiter-flexible.
|
|
4
|
+
*/
|
|
5
|
+
export declare class MemoryRateLimiterAdapter implements RateLimiterAdapter {
|
|
6
|
+
#private;
|
|
7
|
+
acquire(key: string, options: RateLimitConfig): Promise<RateLimitResult>;
|
|
8
|
+
getState(key: string): Promise<RateLimitState>;
|
|
9
|
+
reset(key: string): Promise<void>;
|
|
10
|
+
/**
|
|
11
|
+
* Clear all rate limit state
|
|
12
|
+
*/
|
|
13
|
+
clear(): void;
|
|
14
|
+
}
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Interface for rate limiter adapters.
|
|
3
|
+
* Implement this to create distributed rate limiters (Redis, etc.)
|
|
4
|
+
*/
|
|
5
|
+
export interface RateLimiterAdapter {
|
|
6
|
+
/**
|
|
7
|
+
* Try to acquire a permit. Returns true if allowed, false if rate limited.
|
|
8
|
+
*/
|
|
9
|
+
acquire(key: string, options: RateLimitConfig): Promise<RateLimitResult>;
|
|
10
|
+
/**
|
|
11
|
+
* Get current state for a key
|
|
12
|
+
*/
|
|
13
|
+
getState(key: string): Promise<RateLimitState>;
|
|
14
|
+
/**
|
|
15
|
+
* Reset the rate limiter for a key
|
|
16
|
+
*/
|
|
17
|
+
reset(key: string): Promise<void>;
|
|
18
|
+
}
|
|
19
|
+
/**
|
|
20
|
+
* Rate limit configuration
|
|
21
|
+
*/
|
|
22
|
+
export interface RateLimitConfig {
|
|
23
|
+
/**
|
|
24
|
+
* Maximum number of calls allowed in the window
|
|
25
|
+
*/
|
|
26
|
+
maxCalls: number;
|
|
27
|
+
/**
|
|
28
|
+
* Time window in milliseconds
|
|
29
|
+
*/
|
|
30
|
+
windowMs: number;
|
|
31
|
+
/**
|
|
32
|
+
* Strategy for rate limiting
|
|
33
|
+
* - fixed-window: Simple counter that resets at window boundaries
|
|
34
|
+
* - sliding-window: Smoother rate limiting with sliding time window
|
|
35
|
+
* @default 'sliding-window'
|
|
36
|
+
*/
|
|
37
|
+
strategy?: 'fixed-window' | 'sliding-window';
|
|
38
|
+
}
|
|
39
|
+
/**
|
|
40
|
+
* Result of a rate limit check
|
|
41
|
+
*/
|
|
42
|
+
export interface RateLimitResult {
|
|
43
|
+
/**
|
|
44
|
+
* Whether the request is allowed
|
|
45
|
+
*/
|
|
46
|
+
allowed: boolean;
|
|
47
|
+
/**
|
|
48
|
+
* Number of remaining calls in current window
|
|
49
|
+
*/
|
|
50
|
+
remaining: number;
|
|
51
|
+
/**
|
|
52
|
+
* Time in ms until the rate limit resets
|
|
53
|
+
*/
|
|
54
|
+
resetInMs: number;
|
|
55
|
+
/**
|
|
56
|
+
* If not allowed, time in ms to wait before retrying
|
|
57
|
+
*/
|
|
58
|
+
retryAfterMs?: number;
|
|
59
|
+
}
|
|
60
|
+
/**
|
|
61
|
+
* Current state of a rate limiter
|
|
62
|
+
*/
|
|
63
|
+
export interface RateLimitState {
|
|
64
|
+
/**
|
|
65
|
+
* Number of calls made in current window
|
|
66
|
+
*/
|
|
67
|
+
calls: number;
|
|
68
|
+
/**
|
|
69
|
+
* Number of remaining calls
|
|
70
|
+
*/
|
|
71
|
+
remaining: number;
|
|
72
|
+
/**
|
|
73
|
+
* Time in ms until reset
|
|
74
|
+
*/
|
|
75
|
+
resetInMs: number;
|
|
76
|
+
}
|
|
77
|
+
/**
|
|
78
|
+
* Options for rate limiting behavior
|
|
79
|
+
*/
|
|
80
|
+
export interface RateLimitOptions extends RateLimitConfig {
|
|
81
|
+
/**
|
|
82
|
+
* Custom rate limiter adapter (uses default if not provided)
|
|
83
|
+
*/
|
|
84
|
+
adapter?: RateLimiterAdapter;
|
|
85
|
+
/**
|
|
86
|
+
* Key to identify the rate limit bucket
|
|
87
|
+
*/
|
|
88
|
+
key: string;
|
|
89
|
+
/**
|
|
90
|
+
* If true, continue without rate limiting when adapter fails (Redis down, etc.)
|
|
91
|
+
* @default false
|
|
92
|
+
*/
|
|
93
|
+
optional?: boolean;
|
|
94
|
+
/**
|
|
95
|
+
* Called when rate limit is exceeded
|
|
96
|
+
*/
|
|
97
|
+
onRejected?: (event: {
|
|
98
|
+
key: string;
|
|
99
|
+
retryAfterMs?: number;
|
|
100
|
+
}) => void;
|
|
101
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
import type { ExponentialBackoffOptions, ExponentialJitterBackoffOptions, LinearBackoffOptions } from './types/backoff.ts';
|
|
2
|
+
import { type Duration, type RetryDelayFunction } from './types/types.ts';
|
|
3
|
+
/**
|
|
4
|
+
* Backoff strategies for retry delays.
|
|
5
|
+
*
|
|
6
|
+
* @example
|
|
7
|
+
* ```ts
|
|
8
|
+
* import { backoff, tenace } from '@julr/tenace'
|
|
9
|
+
*
|
|
10
|
+
* // Constant delay
|
|
11
|
+
* tenace()
|
|
12
|
+
* .withRetry({ times: 3, delay: backoff.constant('1s') })
|
|
13
|
+
* .execute(fn)
|
|
14
|
+
*
|
|
15
|
+
* // Exponential backoff
|
|
16
|
+
* tenace()
|
|
17
|
+
* .withRetry({ times: 5, delay: backoff.exponential({ initial: 100, max: 30_000 }) })
|
|
18
|
+
* .execute(fn)
|
|
19
|
+
*
|
|
20
|
+
* // Exponential with jitter (prevents thundering herd)
|
|
21
|
+
* tenace()
|
|
22
|
+
* .withRetry({ times: 5, delay: backoff.exponentialWithJitter({ initial: 100, max: 30_000 }) })
|
|
23
|
+
* .execute(fn)
|
|
24
|
+
* ```
|
|
25
|
+
*/
|
|
26
|
+
export declare const backoff: {
|
|
27
|
+
/**
|
|
28
|
+
* Constant delay between retries.
|
|
29
|
+
*
|
|
30
|
+
* @example
|
|
31
|
+
* backoff.constant(1000)
|
|
32
|
+
* backoff.constant('1s')
|
|
33
|
+
*/
|
|
34
|
+
constant(delay: Duration): RetryDelayFunction;
|
|
35
|
+
/**
|
|
36
|
+
* Exponential backoff: initial * exponent^attempt
|
|
37
|
+
* Delay doubles (by default) with each attempt, capped at max.
|
|
38
|
+
*
|
|
39
|
+
* @example
|
|
40
|
+
* // 100ms, 200ms, 400ms, 800ms, 1600ms...
|
|
41
|
+
* backoff.exponential({ initial: 100 })
|
|
42
|
+
*
|
|
43
|
+
* // With custom max
|
|
44
|
+
* backoff.exponential({ initial: 100, max: 10_000 })
|
|
45
|
+
*
|
|
46
|
+
* // With custom exponent (triple each time)
|
|
47
|
+
* backoff.exponential({ initial: 100, exponent: 3 })
|
|
48
|
+
*/
|
|
49
|
+
exponential(options?: ExponentialBackoffOptions): RetryDelayFunction;
|
|
50
|
+
/**
|
|
51
|
+
* Exponential backoff with jitter to prevent thundering herd.
|
|
52
|
+
*
|
|
53
|
+
* Jitter strategies:
|
|
54
|
+
* - 'full': Random delay between 0 and calculated exponential delay
|
|
55
|
+
* - 'decorrelated': AWS-style decorrelated jitter (recommended)
|
|
56
|
+
*
|
|
57
|
+
* @see https://aws.amazon.com/blogs/architecture/exponential-backoff-and-jitter/
|
|
58
|
+
*
|
|
59
|
+
* @example
|
|
60
|
+
* // Full jitter (random between 0 and exponential delay)
|
|
61
|
+
* backoff.exponentialWithJitter({ initial: 100, max: 30_000 })
|
|
62
|
+
*
|
|
63
|
+
* // Decorrelated jitter (better distribution)
|
|
64
|
+
* backoff.exponentialWithJitter({ initial: 100, max: 30_000, jitter: 'decorrelated' })
|
|
65
|
+
*/
|
|
66
|
+
exponentialWithJitter(options?: ExponentialJitterBackoffOptions): RetryDelayFunction;
|
|
67
|
+
/**
|
|
68
|
+
* Linear backoff: initial + (step * attempt)
|
|
69
|
+
* Delay increases linearly with each attempt, capped at max.
|
|
70
|
+
*
|
|
71
|
+
* @example
|
|
72
|
+
* // 100ms, 200ms, 300ms, 400ms...
|
|
73
|
+
* backoff.linear({ initial: 100, step: 100 })
|
|
74
|
+
*
|
|
75
|
+
* // With max cap
|
|
76
|
+
* backoff.linear({ initial: 100, step: 500, max: 5_000 })
|
|
77
|
+
*/
|
|
78
|
+
linear(options?: LinearBackoffOptions): RetryDelayFunction;
|
|
79
|
+
};
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import type { GlobalChaosConfig, NormalizedGlobalChaosConfig } from './types.ts';
|
|
2
|
+
/**
|
|
3
|
+
* Manages global chaos configuration.
|
|
4
|
+
* Singleton that can be enabled/disabled to affect all Tenace calls.
|
|
5
|
+
*/
|
|
6
|
+
declare class ChaosManager {
|
|
7
|
+
#private;
|
|
8
|
+
/**
|
|
9
|
+
* Enable global chaos with the given configuration.
|
|
10
|
+
* Supports shorthands:
|
|
11
|
+
* - { fault: 1 } = 100% fault rate with default error
|
|
12
|
+
* - { latency: 500 } = 500ms fixed delay with rate=1
|
|
13
|
+
*/
|
|
14
|
+
enable(config: GlobalChaosConfig): void;
|
|
15
|
+
/**
|
|
16
|
+
* Disable global chaos
|
|
17
|
+
*/
|
|
18
|
+
disable(): void;
|
|
19
|
+
/**
|
|
20
|
+
* Check if global chaos is enabled
|
|
21
|
+
*/
|
|
22
|
+
isEnabled(): boolean;
|
|
23
|
+
/**
|
|
24
|
+
* Get the normalized global chaos config
|
|
25
|
+
*/
|
|
26
|
+
getConfig(): NormalizedGlobalChaosConfig | null;
|
|
27
|
+
}
|
|
28
|
+
export declare const chaosManager: ChaosManager;
|
|
29
|
+
export {};
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import type { IPolicy } from 'cockatiel';
|
|
2
|
+
import type { NormalizedChaosFaultConfig, NormalizedChaosLatencyConfig } from './types.ts';
|
|
3
|
+
/**
|
|
4
|
+
* Creates a chaos fault policy that randomly throws errors
|
|
5
|
+
*/
|
|
6
|
+
export declare function createChaosFaultPolicy(config: NormalizedChaosFaultConfig): IPolicy;
|
|
7
|
+
/**
|
|
8
|
+
* Creates a chaos latency policy that adds random delays
|
|
9
|
+
*/
|
|
10
|
+
export declare function createChaosLatencyPolicy(config: NormalizedChaosLatencyConfig): IPolicy;
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Configuration for chaos fault injection
|
|
3
|
+
*/
|
|
4
|
+
export interface ChaosFaultConfig {
|
|
5
|
+
/**
|
|
6
|
+
* Injection rate (0-1). 1 means 100% of calls will throw an error.
|
|
7
|
+
*/
|
|
8
|
+
rate: number;
|
|
9
|
+
/**
|
|
10
|
+
* Error to throw. Defaults to Error('Chaos fault') if not specified.
|
|
11
|
+
*/
|
|
12
|
+
error?: Error;
|
|
13
|
+
/**
|
|
14
|
+
* Multiple errors to randomly pick from
|
|
15
|
+
*/
|
|
16
|
+
errors?: Error[];
|
|
17
|
+
}
|
|
18
|
+
/**
|
|
19
|
+
* Configuration for chaos latency injection
|
|
20
|
+
*/
|
|
21
|
+
export interface ChaosLatencyConfig {
|
|
22
|
+
/**
|
|
23
|
+
* Injection rate (0-1). 1 means 100% of calls will have latency added.
|
|
24
|
+
*/
|
|
25
|
+
rate: number;
|
|
26
|
+
/**
|
|
27
|
+
* Delay in milliseconds, or a range { min, max } for random delay
|
|
28
|
+
*/
|
|
29
|
+
delay: number | {
|
|
30
|
+
min: number;
|
|
31
|
+
max: number;
|
|
32
|
+
};
|
|
33
|
+
}
|
|
34
|
+
/**
|
|
35
|
+
* Normalized chaos fault config (always has rate and error/errors)
|
|
36
|
+
*/
|
|
37
|
+
export interface NormalizedChaosFaultConfig {
|
|
38
|
+
rate: number;
|
|
39
|
+
error?: Error;
|
|
40
|
+
errors?: Error[];
|
|
41
|
+
}
|
|
42
|
+
/**
|
|
43
|
+
* Normalized chaos latency config (always has rate and delay object)
|
|
44
|
+
*/
|
|
45
|
+
export interface NormalizedChaosLatencyConfig {
|
|
46
|
+
rate: number;
|
|
47
|
+
delay: {
|
|
48
|
+
min: number;
|
|
49
|
+
max: number;
|
|
50
|
+
};
|
|
51
|
+
}
|
|
52
|
+
/**
|
|
53
|
+
* Global chaos configuration with shorthand support
|
|
54
|
+
*/
|
|
55
|
+
export interface GlobalChaosConfig {
|
|
56
|
+
/**
|
|
57
|
+
* Fault injection config.
|
|
58
|
+
* - number: injection rate (0-1) with default error
|
|
59
|
+
* - ChaosFaultConfig: full configuration
|
|
60
|
+
*/
|
|
61
|
+
fault?: ChaosFaultConfig | number;
|
|
62
|
+
/**
|
|
63
|
+
* Latency injection config.
|
|
64
|
+
* - number: fixed delay in ms with rate=1
|
|
65
|
+
* - ChaosLatencyConfig: full configuration
|
|
66
|
+
*/
|
|
67
|
+
latency?: ChaosLatencyConfig | number;
|
|
68
|
+
}
|
|
69
|
+
/**
|
|
70
|
+
* Normalized global chaos config (after shorthand expansion)
|
|
71
|
+
*/
|
|
72
|
+
export interface NormalizedGlobalChaosConfig {
|
|
73
|
+
fault?: NormalizedChaosFaultConfig;
|
|
74
|
+
latency?: NormalizedChaosLatencyConfig;
|
|
75
|
+
}
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
import type { SettledResult, TaskProgress, TaskRetryOptions } from './types/main.ts';
|
|
2
|
+
import { type Duration } from './types/types.ts';
|
|
3
|
+
/**
|
|
4
|
+
* Builder for executing collections of async tasks with concurrency control
|
|
5
|
+
* and per-task tenace policies.
|
|
6
|
+
*
|
|
7
|
+
* @example
|
|
8
|
+
* ```ts
|
|
9
|
+
* // Execute tasks with limited concurrency
|
|
10
|
+
* const results = await Tenace.all([
|
|
11
|
+
* () => fetchUser(1),
|
|
12
|
+
* () => fetchUser(2),
|
|
13
|
+
* () => fetchUser(3),
|
|
14
|
+
* ])
|
|
15
|
+
* .withConcurrency(2)
|
|
16
|
+
* .withRetryPerTask(3)
|
|
17
|
+
* .execute()
|
|
18
|
+
*
|
|
19
|
+
* // Map over items
|
|
20
|
+
* const users = await Tenace.map([1, 2, 3], (id) => fetchUser(id))
|
|
21
|
+
* .withConcurrency(5)
|
|
22
|
+
* .execute()
|
|
23
|
+
*
|
|
24
|
+
* // Get all results (success and failures)
|
|
25
|
+
* const settled = await Tenace.all(tasks)
|
|
26
|
+
* .withConcurrency(5)
|
|
27
|
+
* .settle()
|
|
28
|
+
* ```
|
|
29
|
+
*/
|
|
30
|
+
export declare class CollectionBuilder<T> {
|
|
31
|
+
#private;
|
|
32
|
+
constructor(tasks: Array<() => Promise<T>>);
|
|
33
|
+
/**
|
|
34
|
+
* Set the maximum number of concurrent tasks.
|
|
35
|
+
* Default is Infinity (no limit).
|
|
36
|
+
*/
|
|
37
|
+
withConcurrency(limit: number): this;
|
|
38
|
+
/**
|
|
39
|
+
* Add retry logic to each task individually.
|
|
40
|
+
* Each task will be retried up to `attempts` times on failure.
|
|
41
|
+
*/
|
|
42
|
+
withRetryPerTask(attempts: number, options?: TaskRetryOptions): this;
|
|
43
|
+
/**
|
|
44
|
+
* Add timeout to each task individually.
|
|
45
|
+
*/
|
|
46
|
+
withTimeoutPerTask(duration: Duration, strategy?: 'aggressive' | 'cooperative'): this;
|
|
47
|
+
/**
|
|
48
|
+
* Whether to stop execution when a task fails.
|
|
49
|
+
* Default is true (stop on first error).
|
|
50
|
+
* Set to false to continue executing remaining tasks.
|
|
51
|
+
*/
|
|
52
|
+
stopOnError(stop: boolean): this;
|
|
53
|
+
/**
|
|
54
|
+
* Provide an AbortSignal to cancel all pending tasks.
|
|
55
|
+
*/
|
|
56
|
+
withSignal(signal: AbortSignal): this;
|
|
57
|
+
/**
|
|
58
|
+
* Callback when a task completes successfully.
|
|
59
|
+
*/
|
|
60
|
+
onTaskComplete(callback: (value: T, progress: TaskProgress) => void): this;
|
|
61
|
+
/**
|
|
62
|
+
* Callback when a task fails (after all retries).
|
|
63
|
+
*/
|
|
64
|
+
onTaskError(callback: (error: Error, progress: TaskProgress) => void): this;
|
|
65
|
+
/**
|
|
66
|
+
* Callback for progress updates (called after each task completes or fails).
|
|
67
|
+
*/
|
|
68
|
+
onProgress(callback: (progress: TaskProgress) => void): this;
|
|
69
|
+
/**
|
|
70
|
+
* Execute all tasks and return results.
|
|
71
|
+
* Throws on first error by default (unless stopOnError(false) is set).
|
|
72
|
+
* Results are in the same order as input tasks.
|
|
73
|
+
*/
|
|
74
|
+
execute(): Promise<T[]>;
|
|
75
|
+
/**
|
|
76
|
+
* Execute all tasks and return settled results (never throws).
|
|
77
|
+
* Each result indicates success or failure with the value/error.
|
|
78
|
+
* Results are in the same order as input tasks.
|
|
79
|
+
*/
|
|
80
|
+
settle(): Promise<SettledResult<T>[]>;
|
|
81
|
+
}
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import type { RateLimiterAdapter } from './adapters/rate_limiter/types.ts';
|
|
2
|
+
import type { LockAdapter } from './adapters/lock/types.ts';
|
|
3
|
+
import type { CacheAdapter } from './adapters/cache/types.ts';
|
|
4
|
+
/**
|
|
5
|
+
* Global configuration for Tenace
|
|
6
|
+
*/
|
|
7
|
+
export interface TenaceConfig {
|
|
8
|
+
/**
|
|
9
|
+
* Default cache adapter
|
|
10
|
+
*/
|
|
11
|
+
cache?: CacheAdapter;
|
|
12
|
+
/**
|
|
13
|
+
* Default rate limiter adapter
|
|
14
|
+
*/
|
|
15
|
+
rateLimiter?: RateLimiterAdapter;
|
|
16
|
+
/**
|
|
17
|
+
* Default lock adapter for distributed locks.
|
|
18
|
+
* Unlike cache and rate limiter, there is no built-in memory adapter.
|
|
19
|
+
* You must provide an adapter (e.g., from @verrou/core).
|
|
20
|
+
*/
|
|
21
|
+
lock?: LockAdapter;
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Internal configuration state
|
|
25
|
+
*/
|
|
26
|
+
declare class ConfigStore {
|
|
27
|
+
#private;
|
|
28
|
+
configure(config: TenaceConfig): void;
|
|
29
|
+
getCache(): CacheAdapter;
|
|
30
|
+
getRateLimiter(): RateLimiterAdapter;
|
|
31
|
+
getLock(): LockAdapter | undefined;
|
|
32
|
+
/**
|
|
33
|
+
* Reset configuration (useful for testing)
|
|
34
|
+
*/
|
|
35
|
+
reset(): void;
|
|
36
|
+
}
|
|
37
|
+
export declare const configStore: ConfigStore;
|
|
38
|
+
export {};
|