@git-stunts/alfred 0.2.1 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -14,20 +14,22 @@ o88o o8888o o888o o888o d888b `Y8bod8P' `Y8bod88P"
14
14
  [![NPM Version](https://img.shields.io/npm/v/@git-stunts/alfred)](https://www.npmjs.com/package/@git-stunts/alfred)
15
15
  [![CI](https://github.com/git-stunts/alfred/actions/workflows/ci.yml/badge.svg)](https://github.com/git-stunts/alfred/actions/workflows/ci.yml)
16
16
 
17
- > *"Why do we fall, Bruce?"*
17
+ > _"Why do we fall, Bruce?"_
18
18
  >
19
- > *"So we can `retry({ backoff: 'exponential', jitter: 'decorrelated' })`."*
19
+ > _"So we can `retry({ backoff: 'exponential', jitter: 'decorrelated' })`."_
20
20
 
21
- Resilience patterns for async operations. *Tuff 'nuff for most stuff!*
21
+ Resilience patterns for async operations. _Tuff 'nuff for most stuff!_
22
22
 
23
23
  ## Installation
24
24
 
25
25
  ### NPM
26
+
26
27
  ```bash
27
28
  npm install @git-stunts/alfred
28
29
  ```
29
30
 
30
31
  ### JSR (Deno, Bun, Node)
32
+
31
33
  ```bash
32
34
  npx jsr add @git-stunts/alfred
33
35
  ```
@@ -35,6 +37,7 @@ npx jsr add @git-stunts/alfred
35
37
  ## Multi-Runtime Support
36
38
 
37
39
  Alfred is designed to be platform-agnostic and is tested against:
40
+
38
41
  - **Node.js** (>= 20.0.0)
39
42
  - **Bun** (>= 1.0.0)
40
43
  - **Deno** (>= 1.35.0)
@@ -47,10 +50,11 @@ It uses standard Web APIs (AbortController, AbortSignal) and provides runtime-aw
47
50
  import { retry, circuitBreaker, timeout, compose } from '@git-stunts/alfred';
48
51
 
49
52
  // Simple retry with exponential backoff
50
- const data = await retry(
51
- () => fetch('https://api.example.com/data'),
52
- { retries: 3, backoff: 'exponential', delay: 100 }
53
- );
53
+ const data = await retry(() => fetch('https://api.example.com/data'), {
54
+ retries: 3,
55
+ backoff: 'exponential',
56
+ delay: 100,
57
+ });
54
58
 
55
59
  // Circuit breaker - fail fast when service is down
56
60
  const breaker = circuitBreaker({ threshold: 5, duration: 60000 });
@@ -84,13 +88,13 @@ await retry(() => mightFail(), {
84
88
  retries: 5,
85
89
  backoff: 'exponential',
86
90
  delay: 100,
87
- maxDelay: 10000
91
+ maxDelay: 10000,
88
92
  });
89
93
 
90
94
  // Only retry specific errors
91
95
  await retry(() => mightFail(), {
92
96
  retries: 3,
93
- shouldRetry: (err) => err.code === 'ECONNREFUSED'
97
+ shouldRetry: (err) => err.code === 'ECONNREFUSED',
94
98
  });
95
99
 
96
100
  // With jitter to prevent thundering herd
@@ -98,21 +102,21 @@ await retry(() => mightFail(), {
98
102
  retries: 3,
99
103
  backoff: 'exponential',
100
104
  delay: 100,
101
- jitter: 'full' // or 'equal' or 'decorrelated'
105
+ jitter: 'full', // or 'equal' or 'decorrelated'
102
106
  });
103
107
  ```
104
108
 
105
109
  **Options:**
106
110
 
107
- | Option | Type | Default | Description |
108
- |--------|------|---------|-------------|
109
- | `retries` | `number` | `3` | Maximum retry attempts |
110
- | `delay` | `number` | `1000` | Base delay in milliseconds |
111
- | `maxDelay` | `number` | `30000` | Maximum delay cap |
112
- | `backoff` | `'constant' \| 'linear' \| 'exponential'` | `'constant'` | Backoff strategy |
113
- | `jitter` | `'none' \| 'full' \| 'equal' \| 'decorrelated'` | `'none'` | Jitter strategy |
114
- | `shouldRetry` | `(error) => boolean` | `() => true` | Predicate to filter retryable errors |
115
- | `onRetry` | `(error, attempt, delay) => void` | - | Callback on each retry |
111
+ | Option | Type | Default | Description |
112
+ | ------------- | ----------------------------------------------- | ------------ | ------------------------------------ |
113
+ | `retries` | `number` | `3` | Maximum retry attempts |
114
+ | `delay` | `number` | `1000` | Base delay in milliseconds |
115
+ | `maxDelay` | `number` | `30000` | Maximum delay cap |
116
+ | `backoff` | `'constant' \| 'linear' \| 'exponential'` | `'constant'` | Backoff strategy |
117
+ | `jitter` | `'none' \| 'full' \| 'equal' \| 'decorrelated'` | `'none'` | Jitter strategy |
118
+ | `shouldRetry` | `(error) => boolean` | `() => true` | Predicate to filter retryable errors |
119
+ | `onRetry` | `(error, attempt, delay) => void` | - | Callback on each retry |
116
120
 
117
121
  ### `circuitBreaker(options)`
118
122
 
@@ -122,11 +126,11 @@ Fails fast when a service is degraded, preventing cascade failures.
122
126
  import { circuitBreaker } from '@git-stunts/alfred';
123
127
 
124
128
  const breaker = circuitBreaker({
125
- threshold: 5, // Open after 5 failures
126
- duration: 60000, // Stay open for 60 seconds
129
+ threshold: 5, // Open after 5 failures
130
+ duration: 60000, // Stay open for 60 seconds
127
131
  onOpen: () => console.log('Circuit opened!'),
128
132
  onClose: () => console.log('Circuit closed!'),
129
- onHalfOpen: () => console.log('Testing recovery...')
133
+ onHalfOpen: () => console.log('Testing recovery...'),
130
134
  });
131
135
 
132
136
  // Circuit has three states:
@@ -145,15 +149,15 @@ try {
145
149
 
146
150
  **Options:**
147
151
 
148
- | Option | Type | Default | Description |
149
- |--------|------|---------|-------------|
150
- | `threshold` | `number` | required | Failures before opening |
151
- | `duration` | `number` | required | How long to stay open (ms) |
152
- | `successThreshold` | `number` | `1` | Successes to close from half-open |
153
- | `shouldTrip` | `(error) => boolean` | `() => true` | Which errors count as failures |
154
- | `onOpen` | `() => void` | - | Called when circuit opens |
155
- | `onClose` | `() => void` | - | Called when circuit closes |
156
- | `onHalfOpen` | `() => void` | - | Called when entering half-open |
152
+ | Option | Type | Default | Description |
153
+ | ------------------ | -------------------- | ------------ | --------------------------------- |
154
+ | `threshold` | `number` | required | Failures before opening |
155
+ | `duration` | `number` | required | How long to stay open (ms) |
156
+ | `successThreshold` | `number` | `1` | Successes to close from half-open |
157
+ | `shouldTrip` | `(error) => boolean` | `() => true` | Which errors count as failures |
158
+ | `onOpen` | `() => void` | - | Called when circuit opens |
159
+ | `onClose` | `() => void` | - | Called when circuit closes |
160
+ | `onHalfOpen` | `() => void` | - | Called when entering half-open |
157
161
 
158
162
  ### `bulkhead(options)`
159
163
 
@@ -163,8 +167,8 @@ Limits the number of concurrent executions to prevent resource exhaustion.
163
167
  import { bulkhead } from '@git-stunts/alfred';
164
168
 
165
169
  const limiter = bulkhead({
166
- limit: 10, // Max 10 concurrent executions
167
- queueLimit: 20 // Max 20 pending requests in queue
170
+ limit: 10, // Max 10 concurrent executions
171
+ queueLimit: 20, // Max 20 pending requests in queue
168
172
  });
169
173
 
170
174
  // Returns an object with:
@@ -183,10 +187,10 @@ console.log(`Current load: ${limiter.stats.active} active tasks`);
183
187
 
184
188
  **Options:**
185
189
 
186
- | Option | Type | Default | Description |
187
- |--------|------|---------|-------------|
188
- | `limit` | `number` | required | Maximum concurrent executions |
189
- | `queueLimit` | `number` | `0` | Maximum pending requests in queue |
190
+ | Option | Type | Default | Description |
191
+ | ------------ | -------- | -------- | --------------------------------- |
192
+ | `limit` | `number` | required | Maximum concurrent executions |
193
+ | `queueLimit` | `number` | `0` | Maximum pending requests in queue |
190
194
 
191
195
  ### `timeout(ms, options)`
192
196
 
@@ -200,7 +204,7 @@ const result = await timeout(5000, () => slowOperation());
200
204
 
201
205
  // With callback
202
206
  const result = await timeout(5000, () => slowOperation(), {
203
- onTimeout: (elapsed) => console.log(`Timed out after ${elapsed}ms`)
207
+ onTimeout: (elapsed) => console.log(`Timed out after ${elapsed}ms`),
204
208
  });
205
209
  ```
206
210
 
@@ -231,10 +235,10 @@ Combines multiple policies. Policies execute from left to right (outermost to in
231
235
  import { compose, retry, circuitBreaker, timeout } from '@git-stunts/alfred';
232
236
 
233
237
  const resilient = compose(
234
- timeout(30000), // Total timeout
235
- retry({ retries: 3, backoff: 'exponential' }), // Retry failures
238
+ timeout(30000), // Total timeout
239
+ retry({ retries: 3, backoff: 'exponential' }), // Retry failures
236
240
  circuitBreaker({ threshold: 5, duration: 60000 }), // Fail fast if broken
237
- bulkhead({ limit: 5, queueLimit: 10 }) // Limit concurrency
241
+ bulkhead({ limit: 5, queueLimit: 10 }) // Limit concurrency
238
242
  );
239
243
 
240
244
  // Execution order:
@@ -249,23 +253,15 @@ await resilient.execute(() => riskyOperation());
249
253
  Alfred provides a composable telemetry system to monitor policy behavior.
250
254
 
251
255
  ```javascript
252
- import {
253
- Policy,
254
- ConsoleSink,
255
- InMemorySink,
256
- MultiSink
257
- } from '@git-stunts/alfred';
256
+ import { Policy, ConsoleSink, InMemorySink, MultiSink } from '@git-stunts/alfred';
258
257
 
259
258
  // 1. Create a sink (or multiple)
260
- const sink = new MultiSink([
261
- new ConsoleSink(),
262
- new InMemorySink()
263
- ]);
259
+ const sink = new MultiSink([new ConsoleSink(), new InMemorySink()]);
264
260
 
265
261
  // 2. Attach to policies
266
262
  const policy = Policy.retry({
267
263
  retries: 3,
268
- telemetry: sink
264
+ telemetry: sink,
269
265
  });
270
266
 
271
267
  // All policies emit events:
@@ -297,7 +293,7 @@ test('retries with exponential backoff', async () => {
297
293
  retries: 3,
298
294
  backoff: 'exponential',
299
295
  delay: 1000,
300
- clock
296
+ clock,
301
297
  });
302
298
 
303
299
  // First attempt fails immediately
@@ -319,11 +315,7 @@ test('retries with exponential backoff', async () => {
319
315
  ## Error Types
320
316
 
321
317
  ```javascript
322
- import {
323
- RetryExhaustedError,
324
- CircuitOpenError,
325
- TimeoutError
326
- } from '@git-stunts/alfred';
318
+ import { RetryExhaustedError, CircuitOpenError, TimeoutError } from '@git-stunts/alfred';
327
319
 
328
320
  try {
329
321
  await resilientOperation();
package/package.json CHANGED
@@ -1,8 +1,7 @@
1
1
  {
2
2
  "name": "@git-stunts/alfred",
3
- "version": "0.2.1",
3
+ "version": "0.3.0",
4
4
  "description": "Why do we fall, Bruce? Production-grade resilience patterns for async operations.",
5
-
6
5
  "type": "module",
7
6
  "main": "src/index.js",
8
7
  "types": "./src/index.d.ts",
package/src/compose.js CHANGED
@@ -33,7 +33,7 @@
33
33
  export function compose(...policies) {
34
34
  if (policies.length === 0) {
35
35
  return {
36
- execute: (fn) => fn()
36
+ execute: (fn) => fn(),
37
37
  };
38
38
  }
39
39
 
@@ -55,7 +55,7 @@ export function compose(...policies) {
55
55
  }
56
56
 
57
57
  return chain();
58
- }
58
+ },
59
59
  };
60
60
  }
61
61
 
@@ -86,7 +86,7 @@ export function fallback(primary, secondary) {
86
86
  } catch {
87
87
  return await secondary.execute(fn);
88
88
  }
89
- }
89
+ },
90
90
  };
91
91
  }
92
92
 
@@ -148,6 +148,6 @@ export function race(policyA, policyB) {
148
148
  policyA.execute(fn).then(handleSuccess, (e) => handleFailure(e, true));
149
149
  policyB.execute(fn).then(handleSuccess, (e) => handleFailure(e, false));
150
150
  });
151
- }
151
+ },
152
152
  };
153
153
  }
package/src/index.d.ts CHANGED
@@ -2,35 +2,40 @@
2
2
  * @module @git-stunts/alfred
3
3
  * @description Production-grade resilience patterns for async operations.
4
4
  * Includes Retry, Circuit Breaker, Timeout, and Bulkhead policies.
5
- *
5
+ *
6
6
  * @example
7
7
  * ```ts
8
8
  * import { compose, retry, circuitBreaker, timeout } from "@git-stunts/alfred";
9
- *
9
+ *
10
10
  * const policy = compose(
11
11
  * retry({ retries: 3 }),
12
12
  * circuitBreaker({ threshold: 5, duration: 60000 }),
13
13
  * timeout(5000)
14
14
  * );
15
- *
15
+ *
16
16
  * await policy.execute(() => fetch("https://api.example.com"));
17
17
  * ```
18
18
  */
19
19
 
20
+ /**
21
+ * A value that can be either static or resolved dynamically via a function.
22
+ */
23
+ export type Resolvable<T> = T | (() => T);
24
+
20
25
  /**
21
26
  * Options for the Retry policy.
22
27
  */
23
28
  export interface RetryOptions {
24
29
  /** Maximum number of retry attempts. Default: 3 */
25
- retries?: number;
30
+ retries?: Resolvable<number>;
26
31
  /** Base delay in milliseconds. Default: 1000 */
27
- delay?: number;
32
+ delay?: Resolvable<number>;
28
33
  /** Maximum delay cap in milliseconds. Default: 30000 */
29
- maxDelay?: number;
34
+ maxDelay?: Resolvable<number>;
30
35
  /** Backoff strategy. Default: 'constant' */
31
- backoff?: 'constant' | 'linear' | 'exponential';
36
+ backoff?: Resolvable<'constant' | 'linear' | 'exponential'>;
32
37
  /** Jitter strategy to prevent thundering herd. Default: 'none' */
33
- jitter?: 'none' | 'full' | 'equal' | 'decorrelated';
38
+ jitter?: Resolvable<'none' | 'full' | 'equal' | 'decorrelated'>;
34
39
  /** Predicate to determine if an error is retryable. Default: always true */
35
40
  shouldRetry?: (error: Error) => boolean;
36
41
  /** Callback invoked before each retry. */
@@ -46,11 +51,11 @@ export interface RetryOptions {
46
51
  */
47
52
  export interface CircuitBreakerOptions {
48
53
  /** Number of failures before opening the circuit. */
49
- threshold: number;
54
+ threshold: Resolvable<number>;
50
55
  /** Milliseconds to stay open before transitioning to half-open. */
51
- duration: number;
56
+ duration: Resolvable<number>;
52
57
  /** Consecutive successes required to close the circuit from half-open. Default: 1 */
53
- successThreshold?: number;
58
+ successThreshold?: Resolvable<number>;
54
59
  /** Predicate to determine if an error counts as a failure. Default: always true */
55
60
  shouldTrip?: (error: Error) => boolean;
56
61
  /** Callback when circuit opens. */
@@ -80,9 +85,23 @@ export interface TimeoutOptions {
80
85
  */
81
86
  export interface BulkheadOptions {
82
87
  /** Maximum concurrent executions. */
83
- limit: number;
88
+ limit: Resolvable<number>;
84
89
  /** Maximum pending requests in queue. Default: 0 */
85
- queueLimit?: number;
90
+ queueLimit?: Resolvable<number>;
91
+ /** Telemetry sink for observability. */
92
+ telemetry?: TelemetrySink;
93
+ /** Clock implementation for testing. */
94
+ clock?: any;
95
+ }
96
+
97
+ /**
98
+ * Options for the Hedge policy.
99
+ */
100
+ export interface HedgeOptions {
101
+ /** Milliseconds to wait before spawning a hedge. */
102
+ delay: Resolvable<number>;
103
+ /** Maximum number of hedged attempts to spawn. Default: 1 */
104
+ maxHedges?: Resolvable<number>;
86
105
  /** Telemetry sink for observability. */
87
106
  telemetry?: TelemetrySink;
88
107
  /** Clock implementation for testing. */
@@ -97,6 +116,8 @@ export interface TelemetryEvent {
97
116
  type: string;
98
117
  /** Unix timestamp of the event. */
99
118
  timestamp: number;
119
+ /** Metric increments (counters) to be aggregated by MetricsSink. */
120
+ metrics?: Record<string, number>;
100
121
  /** Additional metadata (error, duration, attempts, etc.). */
101
122
  [key: string]: any;
102
123
  }
@@ -143,6 +164,34 @@ export class MultiSink implements TelemetrySink {
143
164
  emit(event: TelemetryEvent): void;
144
165
  }
145
166
 
167
+ /**
168
+ * Sink that aggregates metrics in memory.
169
+ */
170
+ export class MetricsSink implements TelemetrySink {
171
+ emit(event: TelemetryEvent): void;
172
+ /** Returns a snapshot of the current metrics. */
173
+ get stats(): {
174
+ retries: number;
175
+ failures: number;
176
+ successes: number;
177
+ circuitBreaks: number;
178
+ circuitRejections: number;
179
+ bulkheadRejections: number;
180
+ timeouts: number;
181
+ hedges: number;
182
+ latency: {
183
+ count: number;
184
+ sum: number;
185
+ min: number;
186
+ max: number;
187
+ avg: number;
188
+ };
189
+ [key: string]: number | { count: number; sum: number; min: number; max: number; avg: number };
190
+ };
191
+ /** Resets all metrics to zero. */
192
+ clear(): void;
193
+ }
194
+
146
195
  /**
147
196
  * Error thrown when all retry attempts are exhausted.
148
197
  */
@@ -181,7 +230,7 @@ export class BulkheadRejectedError extends Error {
181
230
 
182
231
  /**
183
232
  * Executes an async function with configurable retry logic.
184
- *
233
+ *
185
234
  * @param fn The async operation to execute.
186
235
  * @param options Retry configuration options.
187
236
  * @returns The result of the operation.
@@ -205,19 +254,23 @@ export interface CircuitBreaker {
205
254
 
206
255
  /**
207
256
  * Creates a Circuit Breaker policy.
208
- *
257
+ *
209
258
  * @param options Configuration options.
210
259
  */
211
260
  export function circuitBreaker(options: CircuitBreakerOptions): CircuitBreaker;
212
261
 
213
262
  /**
214
263
  * Executes a function with a time limit.
215
- *
264
+ *
216
265
  * @param ms Timeout duration in milliseconds.
217
266
  * @param fn The function to execute. Accepts an AbortSignal if defined.
218
267
  * @param options Configuration options.
219
268
  */
220
- export function timeout<T>(ms: number, fn: ((signal: AbortSignal) => Promise<T>) | (() => Promise<T>), options?: TimeoutOptions): Promise<T>;
269
+ export function timeout<T>(
270
+ ms: Resolvable<number>,
271
+ fn: ((signal: AbortSignal) => Promise<T>) | (() => Promise<T>),
272
+ options?: TimeoutOptions
273
+ ): Promise<T>;
221
274
 
222
275
  /**
223
276
  * Represents a Bulkhead instance.
@@ -235,15 +288,28 @@ export interface Bulkhead {
235
288
 
236
289
  /**
237
290
  * Creates a Bulkhead policy for concurrency limiting.
238
- *
291
+ *
239
292
  * @param options Configuration options.
240
293
  */
241
294
  export function bulkhead(options: BulkheadOptions): Bulkhead;
242
295
 
296
+ /**
297
+ * Represents a Hedge policy instance.
298
+ */
299
+ export interface Hedge {
300
+ execute<T>(fn: (signal?: AbortSignal) => Promise<T>): Promise<T>;
301
+ }
302
+
303
+ /**
304
+ * Creates a Hedge policy for speculative execution.
305
+ * @param options Configuration options.
306
+ */
307
+ export function hedge(options: HedgeOptions): Hedge;
308
+
243
309
  /**
244
310
  * Composes multiple policies into a single executable policy.
245
311
  * Policies execute from left to right (outermost to innermost).
246
- *
312
+ *
247
313
  * @param policies The policies to compose.
248
314
  */
249
315
  export function compose(...policies: any[]): { execute<T>(fn: () => Promise<T>): Promise<T> };
@@ -251,12 +317,18 @@ export function compose(...policies: any[]): { execute<T>(fn: () => Promise<T>):
251
317
  /**
252
318
  * Creates a fallback policy. If the primary policy fails, the secondary is executed.
253
319
  */
254
- export function fallback(primary: any, secondary: any): { execute<T>(fn: () => Promise<T>): Promise<T> };
320
+ export function fallback(
321
+ primary: any,
322
+ secondary: any
323
+ ): { execute<T>(fn: () => Promise<T>): Promise<T> };
255
324
 
256
325
  /**
257
326
  * Creates a race policy. Executes both policies concurrently; the first to succeed wins.
258
327
  */
259
- export function race(primary: any, secondary: any): { execute<T>(fn: () => Promise<T>): Promise<T> };
328
+ export function race(
329
+ primary: any,
330
+ secondary: any
331
+ ): { execute<T>(fn: () => Promise<T>): Promise<T> };
260
332
 
261
333
  /**
262
334
  * Fluent API for building resilience policies.
@@ -268,9 +340,11 @@ export class Policy {
268
340
  /** Creates a Circuit Breaker policy wrapper. */
269
341
  static circuitBreaker(options: CircuitBreakerOptions): Policy;
270
342
  /** Creates a Timeout policy wrapper. */
271
- static timeout(ms: number, options?: TimeoutOptions): Policy;
343
+ static timeout(ms: Resolvable<number>, options?: TimeoutOptions): Policy;
272
344
  /** Creates a Bulkhead policy wrapper. */
273
345
  static bulkhead(options: BulkheadOptions): Policy;
346
+ /** Creates a Hedge policy wrapper. */
347
+ static hedge(options: HedgeOptions): Policy;
274
348
  /** Creates a pass-through (no-op) policy. */
275
349
  static noop(): Policy;
276
350
 
@@ -294,4 +368,4 @@ export class TestClock {
294
368
  sleep(ms: number): Promise<void>;
295
369
  tick(ms?: number): Promise<void>;
296
370
  advance(ms: number): Promise<void>;
297
- }
371
+ }
package/src/index.js CHANGED
@@ -6,11 +6,11 @@
6
6
  // @ts-self-types="./index.d.ts"
7
7
 
8
8
  // Error types
9
- export {
10
- RetryExhaustedError,
11
- CircuitOpenError,
9
+ export {
10
+ RetryExhaustedError,
11
+ CircuitOpenError,
12
12
  TimeoutError,
13
- BulkheadRejectedError
13
+ BulkheadRejectedError,
14
14
  } from './errors.js';
15
15
 
16
16
  // Resilience policies
@@ -18,6 +18,7 @@ export { retry } from './policies/retry.js';
18
18
  export { circuitBreaker } from './policies/circuit-breaker.js';
19
19
  export { timeout } from './policies/timeout.js';
20
20
  export { bulkhead } from './policies/bulkhead.js';
21
+ export { hedge } from './policies/hedge.js';
21
22
 
22
23
  // Composition utilities
23
24
  export { compose, fallback, race } from './compose.js';
@@ -27,3 +28,6 @@ export { Policy, Policy as default } from './policy.js';
27
28
 
28
29
  // Clock utilities
29
30
  export { SystemClock, TestClock } from './utils/clock.js';
31
+
32
+ // Telemetry
33
+ export { InMemorySink, ConsoleSink, NoopSink, MultiSink, MetricsSink } from './telemetry.js';
@@ -10,6 +10,7 @@
10
10
  import { BulkheadRejectedError } from '../errors.js';
11
11
  import { SystemClock } from '../utils/clock.js';
12
12
  import { NoopSink } from '../telemetry.js';
13
+ import { resolve as resolveValue } from '../utils/resolvable.js';
13
14
 
14
15
  /**
15
16
  * @typedef {Object} BulkheadOptions
@@ -28,11 +29,11 @@ import { NoopSink } from '../telemetry.js';
28
29
 
29
30
  class BulkheadPolicy {
30
31
  constructor(options) {
31
- const {
32
- limit,
33
- queueLimit = 0,
32
+ const {
33
+ limit,
34
+ queueLimit = 0,
34
35
  telemetry = new NoopSink(),
35
- clock = new SystemClock()
36
+ clock = new SystemClock(),
36
37
  } = options;
37
38
 
38
39
  if (limit <= 0) {
@@ -49,24 +50,38 @@ class BulkheadPolicy {
49
50
  }
50
51
 
51
52
  processQueue() {
52
- if (this.active < this.limit && this.queue.length > 0) {
53
- const { fn, resolve, reject } = this.queue.shift();
53
+ const limit = resolveValue(this.limit);
54
+ if (this.active < limit && this.queue.length > 0) {
55
+ const { fn, resolve: promiseResolve, reject } = this.queue.shift();
54
56
  this.active++;
55
-
57
+
56
58
  this.emitEvent('bulkhead.execute', {
57
59
  active: this.active,
58
- pending: this.queue.length
60
+ pending: this.queue.length,
59
61
  });
60
62
 
61
63
  Promise.resolve()
62
64
  .then(() => fn())
63
- .then(resolve, reject)
65
+ .then(
66
+ (result) => {
67
+ this.emitEvent('bulkhead.complete', {
68
+ active: this.active,
69
+ pending: this.queue.length,
70
+ metrics: { successes: 1 },
71
+ });
72
+ promiseResolve(result);
73
+ },
74
+ (error) => {
75
+ this.emitEvent('bulkhead.complete', {
76
+ active: this.active,
77
+ pending: this.queue.length,
78
+ metrics: { failures: 1 },
79
+ });
80
+ reject(error);
81
+ }
82
+ )
64
83
  .finally(() => {
65
84
  this.active--;
66
- this.emitEvent('bulkhead.complete', {
67
- active: this.active,
68
- pending: this.queue.length
69
- });
70
85
  this.processQueue();
71
86
  });
72
87
  }
@@ -76,36 +91,48 @@ class BulkheadPolicy {
76
91
  this.telemetry.emit({
77
92
  type,
78
93
  timestamp: this.clock.now(),
79
- ...data
94
+ ...data,
80
95
  });
81
96
  }
82
97
 
83
98
  async execute(fn) {
84
- if (this.active < this.limit) {
99
+ const limit = resolveValue(this.limit);
100
+ const queueLimit = resolveValue(this.queueLimit);
101
+
102
+ if (this.active < limit) {
85
103
  this.active++;
86
104
  this.emitEvent('bulkhead.execute', {
87
105
  active: this.active,
88
- pending: this.queue.length
106
+ pending: this.queue.length,
89
107
  });
90
108
 
91
109
  try {
92
- return await fn();
93
- } finally {
94
- this.active--;
110
+ const result = await fn();
111
+ this.emitEvent('bulkhead.complete', {
112
+ active: this.active,
113
+ pending: this.queue.length,
114
+ metrics: { successes: 1 },
115
+ });
116
+ return result;
117
+ } catch (error) {
95
118
  this.emitEvent('bulkhead.complete', {
96
119
  active: this.active,
97
- pending: this.queue.length
120
+ pending: this.queue.length,
121
+ metrics: { failures: 1 },
98
122
  });
123
+ throw error;
124
+ } finally {
125
+ this.active--;
99
126
  this.processQueue();
100
127
  }
101
128
  }
102
129
 
103
- if (this.queue.length < this.queueLimit) {
130
+ if (this.queue.length < queueLimit) {
104
131
  this.emitEvent('bulkhead.queued', {
105
132
  active: this.active,
106
- pending: this.queue.length + 1
133
+ pending: this.queue.length + 1,
107
134
  });
108
-
135
+
109
136
  return new Promise((resolve, reject) => {
110
137
  this.queue.push({ fn, resolve, reject });
111
138
  });
@@ -113,16 +140,17 @@ class BulkheadPolicy {
113
140
 
114
141
  this.emitEvent('bulkhead.reject', {
115
142
  active: this.active,
116
- pending: this.queue.length
143
+ pending: this.queue.length,
144
+ metrics: { bulkheadRejections: 1 },
117
145
  });
118
- throw new BulkheadRejectedError(this.limit, this.queueLimit);
146
+ throw new BulkheadRejectedError(limit, queueLimit);
119
147
  }
120
148
 
121
149
  get stats() {
122
- return {
123
- active: this.active,
124
- pending: this.queue.length,
125
- available: Math.max(0, this.limit - this.active)
150
+ return {
151
+ active: this.active,
152
+ pending: this.queue.length,
153
+ available: Math.max(0, resolveValue(this.limit) - this.active),
126
154
  };
127
155
  }
128
156
  }
@@ -135,11 +163,11 @@ class BulkheadPolicy {
135
163
  */
136
164
  export function bulkhead(options) {
137
165
  const policy = new BulkheadPolicy(options);
138
-
166
+
139
167
  return {
140
168
  execute: (fn) => policy.execute(fn),
141
169
  get stats() {
142
170
  return policy.stats;
143
- }
171
+ },
144
172
  };
145
- }
173
+ }
@@ -1,6 +1,7 @@
1
1
  import { CircuitOpenError } from '../errors.js';
2
2
  import { SystemClock } from '../utils/clock.js';
3
3
  import { NoopSink } from '../telemetry.js';
4
+ import { resolve } from '../utils/resolvable.js';
4
5
 
5
6
  /**
6
7
  * Circuit breaker states.
@@ -10,7 +11,7 @@ import { NoopSink } from '../telemetry.js';
10
11
  const State = {
11
12
  CLOSED: 'CLOSED',
12
13
  OPEN: 'OPEN',
13
- HALF_OPEN: 'HALF_OPEN'
14
+ HALF_OPEN: 'HALF_OPEN',
14
15
  };
15
16
 
16
17
  /**
@@ -43,7 +44,7 @@ class CircuitBreakerPolicy {
43
44
  onClose,
44
45
  onHalfOpen,
45
46
  clock = new SystemClock(),
46
- telemetry = new NoopSink()
47
+ telemetry = new NoopSink(),
47
48
  } = options;
48
49
 
49
50
  if (threshold === undefined || threshold === null) {
@@ -62,7 +63,7 @@ class CircuitBreakerPolicy {
62
63
  onClose,
63
64
  onHalfOpen,
64
65
  clock,
65
- telemetry
66
+ telemetry,
66
67
  };
67
68
 
68
69
  this._state = State.CLOSED;
@@ -79,7 +80,7 @@ class CircuitBreakerPolicy {
79
80
  this.options.telemetry.emit({
80
81
  type,
81
82
  timestamp: this.options.clock.now(),
82
- ...data
83
+ ...data,
83
84
  });
84
85
  }
85
86
 
@@ -87,7 +88,10 @@ class CircuitBreakerPolicy {
87
88
  this._state = State.OPEN;
88
89
  this.openedAt = new Date(this.options.clock.now());
89
90
  this.options.onOpen?.();
90
- this.emitEvent('circuit.open', { failureCount: this.failureCount });
91
+ this.emitEvent('circuit.open', {
92
+ failureCount: this.failureCount,
93
+ metrics: { circuitBreaks: 1 },
94
+ });
91
95
  }
92
96
 
93
97
  close() {
@@ -111,15 +115,18 @@ class CircuitBreakerPolicy {
111
115
  return false;
112
116
  }
113
117
  const elapsed = this.options.clock.now() - this.openedAt.getTime();
114
- return elapsed >= this.options.duration;
118
+ return elapsed >= resolve(this.options.duration);
115
119
  }
116
120
 
117
121
  recordSuccess() {
118
- this.emitEvent('circuit.success', { state: this._state });
122
+ this.emitEvent('circuit.success', {
123
+ state: this._state,
124
+ metrics: { successes: 1 },
125
+ });
119
126
 
120
127
  if (this._state === State.HALF_OPEN) {
121
128
  this.successCount++;
122
- if (this.successCount >= this.options.successThreshold) {
129
+ if (this.successCount >= resolve(this.options.successThreshold)) {
123
130
  this.close();
124
131
  }
125
132
  } else if (this._state === State.CLOSED) {
@@ -134,14 +141,15 @@ class CircuitBreakerPolicy {
134
141
 
135
142
  this.emitEvent('circuit.failure', {
136
143
  error,
137
- state: this._state
144
+ state: this._state,
145
+ metrics: { failures: 1 },
138
146
  });
139
147
 
140
148
  if (this._state === State.HALF_OPEN) {
141
149
  this.open();
142
150
  } else if (this._state === State.CLOSED) {
143
151
  this.failureCount++;
144
- if (this.failureCount >= this.options.threshold) {
152
+ if (this.failureCount >= resolve(this.options.threshold)) {
145
153
  this.open();
146
154
  }
147
155
  }
@@ -155,7 +163,8 @@ class CircuitBreakerPolicy {
155
163
  if (this._state === State.OPEN) {
156
164
  this.emitEvent('circuit.reject', {
157
165
  openedAt: this.openedAt,
158
- failureCount: this.failureCount
166
+ failureCount: this.failureCount,
167
+ metrics: { circuitRejections: 1 },
159
168
  });
160
169
  throw new CircuitOpenError(this.openedAt, this.failureCount);
161
170
  }
@@ -184,6 +193,6 @@ export function circuitBreaker(options) {
184
193
  execute: (fn) => policy.execute(fn),
185
194
  get state() {
186
195
  return policy.state;
187
- }
196
+ },
188
197
  };
189
- }
198
+ }
@@ -0,0 +1,123 @@
1
+ /**
2
+ * @fileoverview Hedge policy for speculative execution.
3
+ *
4
+ * Starts concurrent "hedged" attempts if the primary attempt takes too long,
5
+ * helping to reduce tail latency in distributed systems.
6
+ *
7
+ * @module @git-stunts/alfred/policies/hedge
8
+ */
9
+
10
+ import { SystemClock } from '../utils/clock.js';
11
+ import { NoopSink } from '../telemetry.js';
12
+ import { resolve } from '../utils/resolvable.js';
13
+
14
+ /**
15
+ * @typedef {Object} HedgeOptions
16
+ * @property {number} delay - Milliseconds to wait before spawning a hedge.
17
+ * @property {number} [maxHedges=1] - Maximum number of hedged attempts to spawn.
18
+ * @property {import('../telemetry.js').TelemetrySink} [telemetry] - Telemetry sink.
19
+ * @property {{ now(): number, sleep(ms: number): Promise<void> }} [clock] - Clock for testing.
20
+ */
21
+
22
+ class HedgeExecutor {
23
+ constructor(fn, options) {
24
+ this.fn = fn;
25
+ this.options = {
26
+ telemetry: new NoopSink(),
27
+ clock: new SystemClock(),
28
+ maxHedges: 1,
29
+ ...options,
30
+ };
31
+ this.abortControllers = [];
32
+ this._finished = false;
33
+ }
34
+
35
+ async execute() {
36
+ const delay = resolve(this.options.delay);
37
+ const maxHedges = resolve(this.options.maxHedges);
38
+ const attempts = [];
39
+
40
+ // Start primary attempt
41
+ attempts.push(this.createAttempt(0));
42
+
43
+ // Schedule hedges
44
+ for (let i = 1; i <= maxHedges; i++) {
45
+ attempts.push(this.scheduleHedge(i, delay * i));
46
+ }
47
+
48
+ try {
49
+ return await Promise.any(attempts);
50
+ } finally {
51
+ this.cancelAll();
52
+ }
53
+ }
54
+
55
+ createAttempt(index) {
56
+ const controller = new AbortController();
57
+ this.abortControllers.push(controller);
58
+ const { clock, telemetry } = this.options;
59
+
60
+ const startTime = clock.now();
61
+ telemetry.emit({
62
+ type: 'hedge.attempt',
63
+ timestamp: startTime,
64
+ index,
65
+ metrics: index > 0 ? { hedges: 1 } : {},
66
+ });
67
+
68
+ return this.fn(controller.signal)
69
+ .then((result) => {
70
+ const endTime = clock.now();
71
+ telemetry.emit({
72
+ type: 'hedge.success',
73
+ timestamp: endTime,
74
+ index,
75
+ duration: endTime - startTime,
76
+ metrics: { successes: 1 },
77
+ });
78
+ return result;
79
+ })
80
+ .catch((error) => {
81
+ if (error.name !== 'AbortError') {
82
+ const endTime = clock.now();
83
+ telemetry.emit({
84
+ type: 'hedge.failure',
85
+ timestamp: endTime,
86
+ index,
87
+ error,
88
+ duration: endTime - startTime,
89
+ metrics: { failures: 1 },
90
+ });
91
+ }
92
+ throw error;
93
+ });
94
+ }
95
+
96
+ scheduleHedge(index, delayMs) {
97
+ return this.options.clock.sleep(delayMs).then(() => {
98
+ if (this._finished) {
99
+ return new Promise(() => {}); // Never resolve if we are done
100
+ }
101
+ return this.createAttempt(index);
102
+ });
103
+ }
104
+
105
+ cancelAll() {
106
+ this._finished = true;
107
+ for (const controller of this.abortControllers) {
108
+ controller.abort();
109
+ }
110
+ }
111
+ }
112
+
113
+ /**
114
+ * Creates a Hedge policy.
115
+ *
116
+ * @param {HedgeOptions} options - Hedge configuration
117
+ * @returns {{ execute: <T>(fn: () => Promise<T>) => Promise<T> }}
118
+ */
119
+ export function hedge(options) {
120
+ return {
121
+ execute: (fn) => new HedgeExecutor(fn, options).execute(),
122
+ };
123
+ }
@@ -11,6 +11,7 @@ import { SystemClock } from '../utils/clock.js';
11
11
  import { createJitter } from '../utils/jitter.js';
12
12
  import { RetryExhaustedError } from '../errors.js';
13
13
  import { NoopSink } from '../telemetry.js';
14
+ import { resolve } from '../utils/resolvable.js';
14
15
 
15
16
  /**
16
17
  * @typedef {'constant' | 'linear' | 'exponential'} BackoffStrategy
@@ -38,7 +39,7 @@ const DEFAULT_OPTIONS = {
38
39
  delay: 1000,
39
40
  maxDelay: 30000,
40
41
  backoff: 'constant',
41
- jitter: 'none'
42
+ jitter: 'none',
42
43
  };
43
44
 
44
45
  function calculateBackoff(strategy, baseDelay, attempt) {
@@ -59,34 +60,38 @@ class RetryExecutor {
59
60
  this.options = { ...DEFAULT_OPTIONS, ...options };
60
61
  this.clock = options.clock || new SystemClock();
61
62
  this.telemetry = options.telemetry || new NoopSink();
62
- this.applyJitter = createJitter(this.options.jitter);
63
- this.prevDelay = this.options.delay;
63
+ // this.applyJitter is now created dynamically in calculateDelay
64
+ this.prevDelay = resolve(this.options.delay);
64
65
  }
65
66
 
66
67
  calculateDelay(attempt) {
67
- const { backoff, delay: baseDelay, maxDelay, jitter } = this.options;
68
+ const backoff = resolve(this.options.backoff);
69
+ const baseDelay = resolve(this.options.delay);
70
+ const maxDelay = resolve(this.options.maxDelay);
71
+ const jitter = resolve(this.options.jitter);
72
+
68
73
  const rawDelay = calculateBackoff(backoff, baseDelay, attempt);
74
+ const applyJitter = createJitter(jitter);
69
75
 
70
76
  if (jitter === 'decorrelated') {
71
- const actual = this.applyJitter(baseDelay, this.prevDelay, maxDelay);
77
+ const actual = applyJitter(baseDelay, this.prevDelay, maxDelay);
72
78
  this.prevDelay = actual;
73
79
  return actual;
74
80
  }
75
-
76
- return Math.min(this.applyJitter(rawDelay), maxDelay);
81
+
82
+ return Math.min(applyJitter(rawDelay), maxDelay);
77
83
  }
78
84
 
79
85
  async execute() {
80
- const totalAttempts = this.options.retries + 1;
81
-
82
- for (let attempt = 1; attempt <= totalAttempts; attempt++) {
83
- const shouldStop = await this.tryAttempt(attempt, totalAttempts);
86
+ // Loop condition: attempt <= (current_retries + 1)
87
+ // We start at 1.
88
+ for (let attempt = 1; attempt <= resolve(this.options.retries) + 1; attempt++) {
89
+ const shouldStop = await this.tryAttempt(attempt);
84
90
  if (shouldStop) {
85
91
  return shouldStop.result;
86
92
  }
87
93
  }
88
-
89
- // Should be unreachable if logic is correct, but satisfied strict returns
94
+
90
95
  throw new Error('Unexpected retry loop termination');
91
96
  }
92
97
 
@@ -102,22 +107,24 @@ class RetryExecutor {
102
107
  // But we need to calculate delay first
103
108
  const delay = this.calculateDelay(attempt);
104
109
  this.emitScheduled(attempt, delay, error);
105
-
110
+
106
111
  if (this.options.onRetry) {
107
112
  this.options.onRetry(error, attempt, delay);
108
113
  }
109
-
114
+
110
115
  await this.clock.sleep(delay);
111
116
  return null; // Continue loop
112
117
  }
113
118
  }
114
119
 
115
120
  emitSuccess(attempt, startTime) {
121
+ const endTime = this.clock.now();
116
122
  this.telemetry.emit({
117
123
  type: 'retry.success',
118
- timestamp: this.clock.now(),
124
+ timestamp: endTime,
119
125
  attempt,
120
- duration: this.clock.now() - startTime
126
+ duration: endTime - startTime,
127
+ metrics: { successes: 1 },
121
128
  });
122
129
  }
123
130
 
@@ -127,30 +134,33 @@ class RetryExecutor {
127
134
  timestamp: this.clock.now(),
128
135
  attempt,
129
136
  delay,
130
- error
137
+ error,
138
+ metrics: { retries: 1 },
131
139
  });
132
140
  }
133
141
 
134
142
  handleFailure(error, attempt, startTime) {
143
+ const endTime = this.clock.now();
135
144
  this.telemetry.emit({
136
145
  type: 'retry.failure',
137
- timestamp: this.clock.now(),
146
+ timestamp: endTime,
138
147
  attempt,
139
148
  error,
140
- duration: this.clock.now() - startTime
149
+ duration: endTime - startTime,
150
+ metrics: { failures: 1 },
141
151
  });
142
152
 
143
153
  if (this.options.shouldRetry && !this.options.shouldRetry(error)) {
144
154
  throw error;
145
155
  }
146
156
 
147
- const totalAttempts = this.options.retries + 1;
157
+ const totalAttempts = resolve(this.options.retries) + 1;
148
158
  if (attempt >= totalAttempts) {
149
159
  this.telemetry.emit({
150
160
  type: 'retry.exhausted',
151
- timestamp: this.clock.now(),
161
+ timestamp: endTime,
152
162
  attempts: attempt,
153
- error
163
+ error,
154
164
  });
155
165
  throw new RetryExhaustedError(attempt, error);
156
166
  }
@@ -167,4 +177,4 @@ class RetryExecutor {
167
177
  */
168
178
  export async function retry(fn, options = {}) {
169
179
  return new RetryExecutor(fn, options).execute();
170
- }
180
+ }
@@ -9,6 +9,7 @@
9
9
 
10
10
  import { TimeoutError } from '../errors.js';
11
11
  import { NoopSink } from '../telemetry.js';
12
+ import { resolve } from '../utils/resolvable.js';
12
13
 
13
14
  /**
14
15
  * @typedef {Object} TimeoutOptions
@@ -45,6 +46,7 @@ import { NoopSink } from '../telemetry.js';
45
46
  */
46
47
  export async function timeout(ms, fn, options = {}) {
47
48
  const { onTimeout, telemetry = new NoopSink() } = options;
49
+ const timeoutMs = resolve(ms);
48
50
  const controller = new AbortController();
49
51
  const startTime = Date.now();
50
52
 
@@ -58,16 +60,17 @@ export async function timeout(ms, fn, options = {}) {
58
60
  if (onTimeout) {
59
61
  onTimeout(elapsed);
60
62
  }
61
-
63
+
62
64
  telemetry.emit({
63
65
  type: 'timeout',
64
66
  timestamp: Date.now(),
65
- timeout: ms,
66
- elapsed
67
+ timeout: timeoutMs,
68
+ elapsed,
69
+ metrics: { timeouts: 1, failures: 1 },
67
70
  });
68
71
 
69
- reject(new TimeoutError(ms, elapsed));
70
- }, ms);
72
+ reject(new TimeoutError(timeoutMs, elapsed));
73
+ }, timeoutMs);
71
74
  });
72
75
 
73
76
  try {
package/src/policy.js CHANGED
@@ -34,6 +34,7 @@ import { retry } from './policies/retry.js';
34
34
  import { circuitBreaker } from './policies/circuit-breaker.js';
35
35
  import { timeout } from './policies/timeout.js';
36
36
  import { bulkhead } from './policies/bulkhead.js';
37
+ import { hedge } from './policies/hedge.js';
37
38
  import { compose, fallback, race } from './compose.js';
38
39
 
39
40
  /**
@@ -134,6 +135,17 @@ export class Policy {
134
135
  return new Policy((fn) => limiter.execute(fn));
135
136
  }
136
137
 
138
+ /**
139
+ * Creates a Policy that speculatively executes hedged attempts.
140
+ *
141
+ * @param {import('./policies/hedge.js').HedgeOptions} options
142
+ * @returns {Policy}
143
+ */
144
+ static hedge(options) {
145
+ const hedger = hedge(options);
146
+ return new Policy((fn) => hedger.execute(fn));
147
+ }
148
+
137
149
  /**
138
150
  * Creates a no-op Policy that passes through to the function directly.
139
151
  *
@@ -181,10 +193,7 @@ export class Policy {
181
193
  return new Policy((fn) => {
182
194
  // Compose: outer wraps inner
183
195
  // When outer calls its "fn", that fn is actually inner's execution
184
- return compose(
185
- { execute: outer },
186
- { execute: inner }
187
- ).execute(fn);
196
+ return compose({ execute: outer }, { execute: inner }).execute(fn);
188
197
  });
189
198
  }
190
199
 
@@ -212,10 +221,7 @@ export class Policy {
212
221
  const secondary = otherPolicy._executor;
213
222
 
214
223
  return new Policy((fn) => {
215
- return fallback(
216
- { execute: primary },
217
- { execute: secondary }
218
- ).execute(fn);
224
+ return fallback({ execute: primary }, { execute: secondary }).execute(fn);
219
225
  });
220
226
  }
221
227
 
@@ -243,10 +249,7 @@ export class Policy {
243
249
  const second = otherPolicy._executor;
244
250
 
245
251
  return new Policy((fn) => {
246
- return race(
247
- { execute: first },
248
- { execute: second }
249
- ).execute(fn);
252
+ return race({ execute: first }, { execute: second }).execute(fn);
250
253
  });
251
254
  }
252
255
 
package/src/telemetry.js CHANGED
@@ -5,8 +5,9 @@
5
5
 
6
6
  /**
7
7
  * @typedef {Object} TelemetryEvent
8
- * @property {string} type - Event type (e.g. 'retry', 'circuit.open')
8
+ * @property {string} type - Event type (e.g. 'retry.failure', 'circuit.open')
9
9
  * @property {number} timestamp - Event timestamp
10
+ * @property {Record<string, number>} [metrics] - Metric increments (counters)
10
11
  * @property {Object} [metadata] - Additional event data
11
12
  */
12
13
 
@@ -73,3 +74,85 @@ export class MultiSink {
73
74
  }
74
75
  }
75
76
  }
77
+
78
+ /**
79
+ * Sink that aggregates metrics in memory based on the `metrics` field in events.
80
+ * @implements {TelemetrySink}
81
+ */
82
+ export class MetricsSink {
83
+ constructor() {
84
+ this.clear();
85
+ }
86
+
87
+ /**
88
+ * Processes a telemetry event and updates internal counters.
89
+ * @param {TelemetryEvent} event
90
+ */
91
+ emit(event) {
92
+ const { duration, metrics } = event;
93
+
94
+ if (metrics && typeof metrics === 'object') {
95
+ this._updateMetrics(metrics);
96
+ }
97
+
98
+ if (typeof duration === 'number' && Number.isFinite(duration) && duration >= 0) {
99
+ this._updateLatency(duration);
100
+ }
101
+ }
102
+
103
+ _updateMetrics(metrics) {
104
+ for (const [key, value] of Object.entries(metrics)) {
105
+ if (typeof value !== 'number') {
106
+ continue;
107
+ }
108
+
109
+ const current = this.metrics[key];
110
+ if (current === undefined || typeof current === 'number') {
111
+ this.metrics[key] = (current || 0) + value;
112
+ }
113
+ }
114
+ }
115
+
116
+ _updateLatency(ms) {
117
+ const { latency } = this.metrics;
118
+ latency.count++;
119
+ latency.sum += ms;
120
+ latency.min = Math.min(latency.min, ms);
121
+ latency.max = Math.max(latency.max, ms);
122
+ }
123
+
124
+ /**
125
+ * Returns a snapshot of the current metrics.
126
+ */
127
+ get stats() {
128
+ const { latency, ...rest } = this.metrics;
129
+ const hasData = latency.count > 0;
130
+
131
+ return {
132
+ ...rest,
133
+ latency: {
134
+ ...latency,
135
+ min: hasData ? latency.min : 0,
136
+ max: hasData ? latency.max : 0,
137
+ avg: hasData ? latency.sum / latency.count : 0,
138
+ },
139
+ };
140
+ }
141
+
142
+ /**
143
+ * Resets all metrics to zero.
144
+ */
145
+ clear() {
146
+ this.metrics = {
147
+ retries: 0,
148
+ failures: 0,
149
+ successes: 0,
150
+ circuitBreaks: 0,
151
+ circuitRejections: 0,
152
+ bulkheadRejections: 0,
153
+ timeouts: 0,
154
+ hedges: 0,
155
+ latency: { count: 0, sum: 0, min: Infinity, max: 0 },
156
+ };
157
+ }
158
+ }
package/src/testing.d.ts CHANGED
@@ -37,6 +37,7 @@ export interface BulkheadOptions {
37
37
  export interface TelemetryEvent {
38
38
  type: string;
39
39
  timestamp: number;
40
+ metrics?: Record<string, number>;
40
41
  [key: string]: any;
41
42
  }
42
43
 
@@ -96,7 +97,11 @@ export interface CircuitBreaker {
96
97
 
97
98
  export function circuitBreaker(options: CircuitBreakerOptions): CircuitBreaker;
98
99
 
99
- export function timeout<T>(ms: number, fn: ((signal: AbortSignal) => Promise<T>) | (() => Promise<T>), options?: TimeoutOptions): Promise<T>;
100
+ export function timeout<T>(
101
+ ms: number,
102
+ fn: ((signal: AbortSignal) => Promise<T>) | (() => Promise<T>),
103
+ options?: TimeoutOptions
104
+ ): Promise<T>;
100
105
 
101
106
  export interface Bulkhead {
102
107
  execute<T>(fn: () => Promise<T>): Promise<T>;
@@ -106,8 +111,14 @@ export interface Bulkhead {
106
111
  export function bulkhead(options: BulkheadOptions): Bulkhead;
107
112
 
108
113
  export function compose(...policies: any[]): { execute<T>(fn: () => Promise<T>): Promise<T> };
109
- export function fallback(primary: any, secondary: any): { execute<T>(fn: () => Promise<T>): Promise<T> };
110
- export function race(primary: any, secondary: any): { execute<T>(fn: () => Promise<T>): Promise<T> };
114
+ export function fallback(
115
+ primary: any,
116
+ secondary: any
117
+ ): { execute<T>(fn: () => Promise<T>): Promise<T> };
118
+ export function race(
119
+ primary: any,
120
+ secondary: any
121
+ ): { execute<T>(fn: () => Promise<T>): Promise<T> };
111
122
 
112
123
  export class Policy {
113
124
  constructor(executor: (fn: () => Promise<any>) => Promise<any>);
@@ -7,7 +7,7 @@ export class SystemClock {
7
7
  }
8
8
 
9
9
  async sleep(ms) {
10
- return new Promise(resolve => {
10
+ return new Promise((resolve) => {
11
11
  const timer = setTimeout(resolve, ms);
12
12
  if (typeof timer === 'object' && typeof timer.unref === 'function') {
13
13
  timer.unref();
@@ -39,10 +39,10 @@ export class TestClock {
39
39
  * @returns {Promise<void>}
40
40
  */
41
41
  sleep(ms) {
42
- return new Promise(resolve => {
42
+ return new Promise((resolve) => {
43
43
  this._pendingTimers.push({
44
44
  triggerAt: this._time + ms,
45
- resolve
45
+ resolve,
46
46
  });
47
47
  // Sort by trigger time
48
48
  this._pendingTimers.sort((a, b) => a.triggerAt - b.triggerAt);
@@ -0,0 +1,10 @@
1
+ /**
2
+ * Resolves a value that might be dynamic.
3
+ *
4
+ * @template T
5
+ * @param {T | (() => T)} value - The value or a function returning the value.
6
+ * @returns {T} The resolved value.
7
+ */
8
+ export function resolve(value) {
9
+ return typeof value === 'function' ? value() : value;
10
+ }