@julr/tenace 1.0.0-next.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/README.md +1034 -0
  2. package/build/src/adapters/cache/memory.d.ts +23 -0
  3. package/build/src/adapters/cache/memory.js +2 -0
  4. package/build/src/adapters/cache/types.d.ts +56 -0
  5. package/build/src/adapters/cache/types.js +1 -0
  6. package/build/src/adapters/lock/types.d.ts +104 -0
  7. package/build/src/adapters/lock/types.js +1 -0
  8. package/build/src/adapters/rate_limiter/memory.d.ts +14 -0
  9. package/build/src/adapters/rate_limiter/memory.js +2 -0
  10. package/build/src/adapters/rate_limiter/types.d.ts +101 -0
  11. package/build/src/adapters/rate_limiter/types.js +1 -0
  12. package/build/src/backoff.d.ts +79 -0
  13. package/build/src/chaos/manager.d.ts +29 -0
  14. package/build/src/chaos/policies.d.ts +10 -0
  15. package/build/src/chaos/types.d.ts +75 -0
  16. package/build/src/collection.d.ts +81 -0
  17. package/build/src/config.d.ts +38 -0
  18. package/build/src/errors/errors.d.ts +79 -0
  19. package/build/src/errors/main.d.ts +1 -0
  20. package/build/src/errors/main.js +2 -0
  21. package/build/src/errors-BODHnryv.js +67 -0
  22. package/build/src/internal/adapter_policies.d.ts +31 -0
  23. package/build/src/internal/cockatiel_factories.d.ts +18 -0
  24. package/build/src/internal/telemetry.d.ts +50 -0
  25. package/build/src/main.d.ts +176 -0
  26. package/build/src/main.js +1125 -0
  27. package/build/src/memory-DWyezb1O.js +37 -0
  28. package/build/src/memory-DXkg8s6y.js +60 -0
  29. package/build/src/plugin.d.ts +30 -0
  30. package/build/src/policy_configurator.d.ts +108 -0
  31. package/build/src/semaphore.d.ts +71 -0
  32. package/build/src/tenace_builder.d.ts +22 -0
  33. package/build/src/tenace_policy.d.ts +41 -0
  34. package/build/src/types/backoff.d.ts +57 -0
  35. package/build/src/types/collection.d.ts +46 -0
  36. package/build/src/types/main.d.ts +5 -0
  37. package/build/src/types/main.js +1 -0
  38. package/build/src/types/plugin.d.ts +61 -0
  39. package/build/src/types/types.d.ts +241 -0
  40. package/build/src/wait_for.d.ts +23 -0
  41. package/package.json +135 -0
@@ -0,0 +1,37 @@
1
+ import { memoryDriver } from "bentocache/drivers/memory";
2
+ import { BentoCache, bentostore } from "bentocache";
3
+ var MemoryCacheAdapter = class {
4
+ #bento;
5
+ constructor(options = {}) {
6
+ this.#bento = new BentoCache({
7
+ default: "cache",
8
+ stores: { cache: bentostore().useL1Layer(memoryDriver({
9
+ maxSize: options.maxSize ?? "10mb",
10
+ maxItems: options.maxItems ?? 1e3
11
+ })) }
12
+ });
13
+ }
14
+ async get(key) {
15
+ return this.#bento.get({ key });
16
+ }
17
+ async set(key, value, ttlMs) {
18
+ await this.#bento.set({
19
+ key,
20
+ value,
21
+ ttl: `${ttlMs}ms`
22
+ });
23
+ }
24
+ async delete(key) {
25
+ await this.#bento.delete({ key });
26
+ }
27
+ async has(key) {
28
+ return this.#bento.has({ key });
29
+ }
30
+ async clear() {
31
+ await this.#bento.clear();
32
+ }
33
+ async disconnect() {
34
+ await this.#bento.disconnect();
35
+ }
36
+ };
37
+ export { MemoryCacheAdapter as t };
@@ -0,0 +1,60 @@
1
+ import { RateLimiterMemory } from "rate-limiter-flexible";
2
+ var MemoryRateLimiterAdapter = class {
3
+ #limiters = /* @__PURE__ */ new Map();
4
+ #getLimiter(options) {
5
+ const cacheKey = `${options.key}:${options.config.maxCalls}:${options.config.windowMs}`;
6
+ let limiter = this.#limiters.get(cacheKey);
7
+ if (!limiter) {
8
+ limiter = new RateLimiterMemory({
9
+ points: options.config.maxCalls,
10
+ duration: Math.ceil(options.config.windowMs / 1e3)
11
+ });
12
+ this.#limiters.set(cacheKey, limiter);
13
+ }
14
+ return limiter;
15
+ }
16
+ async acquire(key, options) {
17
+ const limiter = this.#getLimiter({
18
+ key,
19
+ config: options
20
+ });
21
+ try {
22
+ const result = await limiter.consume(key, 1);
23
+ return {
24
+ allowed: true,
25
+ remaining: result.remainingPoints,
26
+ resetInMs: result.msBeforeNext
27
+ };
28
+ } catch (error) {
29
+ const result = error;
30
+ return {
31
+ allowed: false,
32
+ remaining: result.remainingPoints,
33
+ resetInMs: result.msBeforeNext,
34
+ retryAfterMs: result.msBeforeNext
35
+ };
36
+ }
37
+ }
38
+ async getState(key) {
39
+ for (const [cacheKey, limiter] of this.#limiters) if (cacheKey.startsWith(`${key}:`)) {
40
+ const result = await limiter.get(key);
41
+ if (result) return {
42
+ calls: result.consumedPoints,
43
+ remaining: result.remainingPoints,
44
+ resetInMs: result.msBeforeNext
45
+ };
46
+ }
47
+ return {
48
+ calls: 0,
49
+ remaining: Infinity,
50
+ resetInMs: 0
51
+ };
52
+ }
53
+ async reset(key) {
54
+ for (const [cacheKey, limiter] of this.#limiters) if (cacheKey.startsWith(`${key}:`)) await limiter.delete(key);
55
+ }
56
+ clear() {
57
+ this.#limiters.clear();
58
+ }
59
+ };
60
+ export { MemoryRateLimiterAdapter as t };
@@ -0,0 +1,30 @@
1
+ /**
2
+ * Plugin system for Tenace.
3
+ * Allows external packages (like @julr/tenace-otel) to hook into resilience events.
4
+ */
5
+ import type { TenacePlugin } from './types/plugin.ts';
6
+ export type { TenacePlugin } from './types/plugin.ts';
7
+ /**
8
+ * Register a plugin to receive resilience events.
9
+ */
10
+ export declare function use(plugin: TenacePlugin): void;
11
+ /**
12
+ * Get all registered plugins.
13
+ */
14
+ export declare function getPlugins(): TenacePlugin[];
15
+ /**
16
+ * Clear all registered plugins (useful for testing).
17
+ */
18
+ export declare function clearPlugins(): void;
19
+ /**
20
+ * Register a single hook and return an unsubscribe function.
21
+ */
22
+ export declare function registerHook<K extends keyof TenacePlugin>(hook: K, fn: NonNullable<TenacePlugin[K]>): () => void;
23
+ /**
24
+ * Notify all plugins of an event.
25
+ */
26
+ export declare function notifyPlugins<K extends keyof TenacePlugin>(hook: K, event: TenacePlugin[K] extends ((e: infer E) => void) | undefined ? E : never): void;
27
+ /**
28
+ * Wrap execution with span if any plugin supports it.
29
+ */
30
+ export declare function wrapWithPluginSpan<T>(name: string, attributes: Record<string, string | number | boolean>, fn: () => Promise<T>): Promise<T>;
@@ -0,0 +1,108 @@
1
+ import type { ChaosFaultConfig, ChaosLatencyConfig } from './chaos/types.ts';
2
+ import type { RateLimitOptions } from './adapters/rate_limiter/types.ts';
3
+ import type { DistributedLockOptions } from './adapters/lock/types.ts';
4
+ import type { CacheOptions } from './adapters/cache/types.ts';
5
+ import { type CircuitBreakerHooks, type Duration, type PolicyLayer, type RetryConfig } from './types/types.ts';
6
+ /**
7
+ * Abstract base class for resilience configuration.
8
+ * Provides all the `.withX()` methods that add layers to the policy stack.
9
+ * The order of `.withX()` calls determines the execution order.
10
+ */
11
+ export declare abstract class PolicyConfigurator<T, TSelf extends PolicyConfigurator<T, TSelf>> {
12
+ #private;
13
+ protected layers: PolicyLayer[];
14
+ protected abstract createInstance(layers: PolicyLayer[]): TSelf;
15
+ /**
16
+ * Adds a timeout to the operation.
17
+ * Position in the chain determines behavior:
18
+ * - Before retry: timeout applies to each attempt
19
+ * - After retry: timeout applies to the whole operation including retries
20
+ */
21
+ withTimeout(duration: Duration, strategyOrOptions?: 'aggressive' | 'cooperative' | {
22
+ strategy?: 'aggressive' | 'cooperative';
23
+ onTimeout?: () => void;
24
+ }): TSelf;
25
+ /**
26
+ * Adds retry logic with customizable delay.
27
+ * Position in the chain determines what gets retried.
28
+ *
29
+ * @example
30
+ * // Simple retry with 3 attempts
31
+ * .withRetry()
32
+ *
33
+ * @example
34
+ * // Retry 5 times with 1 second delay
35
+ * .withRetry({ times: 5, delay: 1000 })
36
+ *
37
+ * @example
38
+ * // Exponential backoff
39
+ * .withRetry({ times: 5, delay: (attempt) => Math.min(1000 * 2 ** attempt, 30000) })
40
+ */
41
+ withRetry(options?: RetryConfig): TSelf;
42
+ /**
43
+ * Adds circuit breaker protection with consecutive failure threshold.
44
+ * The circuit breaker instance is created immediately and shared across all executions.
45
+ */
46
+ withCircuitBreaker(options: {
47
+ failureThreshold: number;
48
+ halfOpenAfter: Duration;
49
+ hooks?: CircuitBreakerHooks;
50
+ }): TSelf;
51
+ /**
52
+ * Adds circuit breaker with sampling strategy.
53
+ */
54
+ withSamplingCircuitBreaker(options: {
55
+ threshold: number;
56
+ duration: Duration;
57
+ halfOpenAfter: Duration;
58
+ minimumRps?: number;
59
+ hooks?: CircuitBreakerHooks;
60
+ }): TSelf;
61
+ /**
62
+ * Adds a fallback function that provides a default value on failure.
63
+ * Typically placed at the outermost position to catch all errors.
64
+ */
65
+ withFallback(fn: () => T | Promise<T>, options?: {
66
+ onFallback?: () => void;
67
+ }): TSelf;
68
+ /**
69
+ * Adds OpenTelemetry span tracing.
70
+ */
71
+ withSpan(name: string, attributes?: Record<string, string | number | boolean>): TSelf;
72
+ /**
73
+ * Adds bulkhead isolation (concurrency limiter).
74
+ * The bulkhead instance is created immediately and shared across all executions.
75
+ */
76
+ withBulkhead(limit: number, queueOrOptions?: number | {
77
+ queue?: number;
78
+ onRejected?: () => void;
79
+ }): TSelf;
80
+ /**
81
+ * Adds caching for the operation.
82
+ */
83
+ withCache(options: CacheOptions): TSelf;
84
+ /**
85
+ * Adds rate limiting for the operation.
86
+ */
87
+ withRateLimit(options: RateLimitOptions): TSelf;
88
+ /**
89
+ * Adds distributed lock protection.
90
+ */
91
+ withDistributedLock(options: DistributedLockOptions): TSelf;
92
+ /**
93
+ * Adds chaos fault injection (throws random errors).
94
+ * Place at the end of the chain (innermost) to test resilience patterns.
95
+ *
96
+ * @example
97
+ * .withChaosFault({ rate: 0.1, error: new Error('Random failure') })
98
+ */
99
+ withChaosFault(options: ChaosFaultConfig): TSelf;
100
+ /**
101
+ * Adds chaos latency injection (adds random delays).
102
+ * Place at the end of the chain (innermost) to test resilience patterns.
103
+ *
104
+ * @example
105
+ * .withChaosLatency({ rate: 0.2, delay: { min: 100, max: 2000 } })
106
+ */
107
+ withChaosLatency(options: ChaosLatencyConfig): TSelf;
108
+ }
@@ -0,0 +1,71 @@
1
+ /**
2
+ * A semaphore for limiting concurrent operations.
3
+ * Inspired by Effect's Semaphore and p-limit.
4
+ *
5
+ * @example
6
+ * ```ts
7
+ * const sem = new Semaphore(3) // max 3 concurrent operations
8
+ *
9
+ * // Option 1: Use run() for individual calls
10
+ * const result = await sem.run(() => fetchData())
11
+ *
12
+ * // Option 2: Use wrap() to create a limited function
13
+ * const limitedFetch = sem.wrap(fetchData)
14
+ * await limitedFetch()
15
+ *
16
+ * // Option 3: Use withPermit() for manual control
17
+ * const release = await sem.acquire()
18
+ * try {
19
+ * await doSomething()
20
+ * } finally {
21
+ * release()
22
+ * }
23
+ * ```
24
+ */
25
+ export declare class Semaphore {
26
+ #private;
27
+ constructor(permits: number);
28
+ /**
29
+ * Current number of available permits
30
+ */
31
+ get availablePermits(): number;
32
+ /**
33
+ * Number of currently running operations
34
+ */
35
+ get activeCount(): number;
36
+ /**
37
+ * Number of operations waiting in queue
38
+ */
39
+ get pendingCount(): number;
40
+ /**
41
+ * Maximum number of concurrent operations
42
+ */
43
+ get permits(): number;
44
+ /**
45
+ * Acquire a permit. Returns a release function.
46
+ */
47
+ acquire(): Promise<() => void>;
48
+ /**
49
+ * Run a function with a permit.
50
+ * Automatically acquires and releases the permit.
51
+ */
52
+ run<T>(fn: () => T | Promise<T>): Promise<T>;
53
+ /**
54
+ * Wrap a function to automatically use this semaphore.
55
+ */
56
+ wrap<TArgs extends unknown[], TReturn>(fn: (...args: TArgs) => TReturn | Promise<TReturn>): (...args: TArgs) => Promise<TReturn>;
57
+ /**
58
+ * Process an array of items with limited concurrency.
59
+ * Similar to p-limit's map() or p-map.
60
+ */
61
+ map<TInput, TOutput>(items: Iterable<TInput>, fn: (item: TInput, index: number) => TOutput | Promise<TOutput>): Promise<TOutput[]>;
62
+ /**
63
+ * Clear all pending operations in the queue.
64
+ * Does not affect currently running operations.
65
+ */
66
+ clearQueue(): void;
67
+ }
68
+ /**
69
+ * Create a semaphore with the given number of permits.
70
+ */
71
+ export declare function semaphore(permits: number): Semaphore;
@@ -0,0 +1,22 @@
1
+ import { type PolicyLayer, type TenaceFunction } from './types/types.ts';
2
+ import { PolicyConfigurator } from './policy_configurator.ts';
3
+ /**
4
+ * Fluent builder for Tenace policies.
5
+ * Each `.withX()` call adds a layer to the execution chain in order.
6
+ */
7
+ export declare class TenaceBuilder<T> extends PolicyConfigurator<T, TenaceBuilder<T>> {
8
+ #private;
9
+ constructor(options?: {
10
+ fn?: TenaceFunction<T> | undefined;
11
+ layers?: PolicyLayer[];
12
+ });
13
+ protected createInstance(layers: PolicyLayer[]): TenaceBuilder<T>;
14
+ /**
15
+ * Executes the function with the configured policies.
16
+ */
17
+ execute(fn?: TenaceFunction<T>): Promise<T>;
18
+ /**
19
+ * Creates a wrapped function that applies the policy when called.
20
+ */
21
+ wrap(fn: TenaceFunction<T>): () => Promise<T>;
22
+ }
@@ -0,0 +1,41 @@
1
+ import { type CircuitBreakerAccessor, type PolicyLayer, type TenaceFunction } from './types/types.ts';
2
+ import { TenaceBuilder } from './tenace_builder.ts';
3
+ import { PolicyConfigurator } from './policy_configurator.ts';
4
+ /**
5
+ * Reusable policy that can be applied to multiple functions.
6
+ * Circuit breaker and bulkhead state is shared across all calls.
7
+ */
8
+ export declare class TenacePolicy extends PolicyConfigurator<unknown, TenacePolicy> {
9
+ #private;
10
+ constructor(layers?: PolicyLayer[]);
11
+ protected createInstance(layers: PolicyLayer[]): TenacePolicy;
12
+ /**
13
+ * Access the circuit breaker state and controls.
14
+ * Returns null if no circuit breaker is configured.
15
+ *
16
+ * @example
17
+ * ```ts
18
+ * const policy = Tenace.policy()
19
+ * .withCircuitBreaker({ failureThreshold: 5, halfOpenAfter: '30s' })
20
+ *
21
+ * // Check state
22
+ * if (policy.circuitBreaker?.isOpen) {
23
+ * console.log('Circuit is open, service might be down')
24
+ * }
25
+ *
26
+ * // Manual isolation (maintenance mode)
27
+ * const handle = policy.circuitBreaker?.isolate()
28
+ * // ... all calls now fail with CircuitIsolatedError
29
+ * handle?.dispose() // release isolation
30
+ * ```
31
+ */
32
+ get circuitBreaker(): CircuitBreakerAccessor | null;
33
+ /**
34
+ * Creates a wrapped function.
35
+ */
36
+ wrap<T>(fn: TenaceFunction<T>): () => Promise<T>;
37
+ /**
38
+ * Creates a builder for the given function with the policy's shared state.
39
+ */
40
+ call<T>(fn: TenaceFunction<T>): TenaceBuilder<T>;
41
+ }
@@ -0,0 +1,57 @@
1
+ import type { Duration } from './types.ts';
2
+ /**
3
+ * Options for exponential backoff
4
+ */
5
+ export interface ExponentialBackoffOptions {
6
+ /**
7
+ * Initial delay in ms or as a duration string
8
+ * @default 100
9
+ */
10
+ initial?: Duration;
11
+ /**
12
+ * Maximum delay in ms or as a duration string
13
+ * @default 30000
14
+ */
15
+ max?: Duration;
16
+ /**
17
+ * Exponential factor
18
+ * @default 2
19
+ */
20
+ exponent?: number;
21
+ }
22
+ /**
23
+ * Jitter strategy for exponential backoff
24
+ * - 'full': Random delay between 0 and calculated delay
25
+ * - 'decorrelated': AWS-style decorrelated jitter (better distribution)
26
+ */
27
+ export type JitterStrategy = 'full' | 'decorrelated';
28
+ /**
29
+ * Options for exponential backoff with jitter
30
+ */
31
+ export interface ExponentialJitterBackoffOptions extends ExponentialBackoffOptions {
32
+ /**
33
+ * Jitter strategy
34
+ * @default 'full'
35
+ */
36
+ jitter?: JitterStrategy;
37
+ }
38
+ /**
39
+ * Options for linear backoff
40
+ */
41
+ export interface LinearBackoffOptions {
42
+ /**
43
+ * Initial delay in ms or as a duration string
44
+ * @default 100
45
+ */
46
+ initial?: Duration;
47
+ /**
48
+ * Step to add for each attempt in ms or as a duration string
49
+ * @default 100
50
+ */
51
+ step?: Duration;
52
+ /**
53
+ * Maximum delay in ms or as a duration string
54
+ * @default 30000
55
+ */
56
+ max?: Duration;
57
+ }
@@ -0,0 +1,46 @@
1
+ import type { Duration, RetryDelayFunction } from './types.ts';
2
+ /**
3
+ * Result of a settled task
4
+ */
5
+ export type SettledResult<T> = {
6
+ status: 'fulfilled';
7
+ value: T;
8
+ index: number;
9
+ } | {
10
+ status: 'rejected';
11
+ reason: Error;
12
+ index: number;
13
+ };
14
+ /**
15
+ * Progress info passed to callbacks
16
+ */
17
+ export interface TaskProgress {
18
+ completed: number;
19
+ failed: number;
20
+ total: number;
21
+ index: number;
22
+ }
23
+ /**
24
+ * Options for retry per task
25
+ */
26
+ export interface TaskRetryOptions {
27
+ /**
28
+ * Only retry if this function returns true
29
+ */
30
+ retryIf?: (error: Error) => boolean;
31
+ /**
32
+ * Abort retrying if this function returns true
33
+ */
34
+ abortIf?: (error: Error) => boolean;
35
+ /**
36
+ * Delay between retries in ms, or a function that returns the delay.
37
+ * Can be a number, a string duration ('1s', '500ms'), or a function.
38
+ * Use backoff helpers for common patterns.
39
+ *
40
+ * @example
41
+ * delay: 1000
42
+ * delay: '1s'
43
+ * delay: backoff.exponential({ initial: 100, max: 30_000 })
44
+ */
45
+ delay?: Duration | RetryDelayFunction;
46
+ }
@@ -0,0 +1,5 @@
1
+ export type * from './collection.ts';
2
+ export type * from './backoff.ts';
3
+ export type * from './plugin.ts';
4
+ export type { BreakerConfig, CircuitBreakerAccessor, CircuitBreakerConfig, CircuitBreakerHooks, CircuitState, Duration, IsolationHandle, TenaceContext, TenaceFunction, RetryConfig, RetryDelayFunction, SpanConfig, TimeoutStrategy, WaitForOptions, } from './types.ts';
5
+ export type { ChaosFaultConfig, ChaosLatencyConfig, GlobalChaosConfig } from '../chaos/types.ts';
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,61 @@
1
+ export interface RetryEvent {
2
+ attempt: number;
3
+ delay: number;
4
+ error: Error;
5
+ operationName?: string;
6
+ }
7
+ export interface RetryExhaustedEvent {
8
+ error: Error;
9
+ operationName?: string;
10
+ }
11
+ export interface TimeoutEvent {
12
+ operationName?: string;
13
+ }
14
+ export interface CircuitEvent {
15
+ operationName?: string;
16
+ }
17
+ export interface BulkheadEvent {
18
+ operationName?: string;
19
+ }
20
+ export interface RateLimitEvent {
21
+ key: string;
22
+ retryAfterMs?: number | undefined;
23
+ operationName?: string;
24
+ }
25
+ export interface CacheEvent {
26
+ key: string;
27
+ operationName?: string;
28
+ }
29
+ export interface FallbackEvent {
30
+ operationName?: string;
31
+ }
32
+ export interface LockEvent {
33
+ key: string;
34
+ operationName?: string;
35
+ }
36
+ export interface PipelineEvent {
37
+ operationName?: string;
38
+ duration?: number;
39
+ error?: Error;
40
+ }
41
+ export interface TenacePlugin {
42
+ /**
43
+ * Wrapper for creating spans (used by withSpan and retry attempts)
44
+ */
45
+ wrapWithSpan?<T>(name: string, attributes: Record<string, string | number | boolean>, fn: () => Promise<T>): Promise<T>;
46
+ onRetry?(event: RetryEvent): void;
47
+ onRetryExhausted?(event: RetryExhaustedEvent): void;
48
+ onTimeout?(event: TimeoutEvent): void;
49
+ onCircuitOpened?(event: CircuitEvent): void;
50
+ onCircuitClosed?(event: CircuitEvent): void;
51
+ onCircuitHalfOpened?(event: CircuitEvent): void;
52
+ onBulkheadRejected?(event: BulkheadEvent): void;
53
+ onRateLimitRejected?(event: RateLimitEvent): void;
54
+ onCacheHit?(event: CacheEvent): void;
55
+ onCacheMiss?(event: CacheEvent): void;
56
+ onFallback?(event: FallbackEvent): void;
57
+ onLockAcquired?(event: LockEvent): void;
58
+ onLockRejected?(event: LockEvent): void;
59
+ onPipelineStart?(event: PipelineEvent): void;
60
+ onPipelineEnd?(event: PipelineEvent): void;
61
+ }