@julr/tenace 1.0.0-next.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1034 -0
- package/build/src/adapters/cache/memory.d.ts +23 -0
- package/build/src/adapters/cache/memory.js +2 -0
- package/build/src/adapters/cache/types.d.ts +56 -0
- package/build/src/adapters/cache/types.js +1 -0
- package/build/src/adapters/lock/types.d.ts +104 -0
- package/build/src/adapters/lock/types.js +1 -0
- package/build/src/adapters/rate_limiter/memory.d.ts +14 -0
- package/build/src/adapters/rate_limiter/memory.js +2 -0
- package/build/src/adapters/rate_limiter/types.d.ts +101 -0
- package/build/src/adapters/rate_limiter/types.js +1 -0
- package/build/src/backoff.d.ts +79 -0
- package/build/src/chaos/manager.d.ts +29 -0
- package/build/src/chaos/policies.d.ts +10 -0
- package/build/src/chaos/types.d.ts +75 -0
- package/build/src/collection.d.ts +81 -0
- package/build/src/config.d.ts +38 -0
- package/build/src/errors/errors.d.ts +79 -0
- package/build/src/errors/main.d.ts +1 -0
- package/build/src/errors/main.js +2 -0
- package/build/src/errors-BODHnryv.js +67 -0
- package/build/src/internal/adapter_policies.d.ts +31 -0
- package/build/src/internal/cockatiel_factories.d.ts +18 -0
- package/build/src/internal/telemetry.d.ts +50 -0
- package/build/src/main.d.ts +176 -0
- package/build/src/main.js +1125 -0
- package/build/src/memory-DWyezb1O.js +37 -0
- package/build/src/memory-DXkg8s6y.js +60 -0
- package/build/src/plugin.d.ts +30 -0
- package/build/src/policy_configurator.d.ts +108 -0
- package/build/src/semaphore.d.ts +71 -0
- package/build/src/tenace_builder.d.ts +22 -0
- package/build/src/tenace_policy.d.ts +41 -0
- package/build/src/types/backoff.d.ts +57 -0
- package/build/src/types/collection.d.ts +46 -0
- package/build/src/types/main.d.ts +5 -0
- package/build/src/types/main.js +1 -0
- package/build/src/types/plugin.d.ts +61 -0
- package/build/src/types/types.d.ts +241 -0
- package/build/src/wait_for.d.ts +23 -0
- package/package.json +135 -0
package/README.md
ADDED
|
@@ -0,0 +1,1034 @@
|
|
|
1
|
+
# Tenace
|
|
2
|
+
|
|
3
|
+
A fluent resilience library for Node.js. Make any async operation resilient with timeout, retry, circuit breaker, bulkhead, and more.
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
pnpm add @julr/tenace
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
### Imports
|
|
12
|
+
|
|
13
|
+
```ts
|
|
14
|
+
// Main entry point
|
|
15
|
+
import { Tenace, Semaphore, configStore, backoff } from '@julr/tenace'
|
|
16
|
+
|
|
17
|
+
// Error classes
|
|
18
|
+
import { TimeoutError, CircuitOpenError, RateLimitError, ... } from '@julr/tenace/errors'
|
|
19
|
+
|
|
20
|
+
// Type definitions
|
|
21
|
+
import type { RetryConfig, CircuitState, ... } from '@julr/tenace/types'
|
|
22
|
+
|
|
23
|
+
// Adapters (for cache, rate limiter, distributed lock)
|
|
24
|
+
import { MemoryCacheAdapter, MemoryRateLimiterAdapter } from '@julr/tenace/adapters'
|
|
25
|
+
import type { CacheAdapter, RateLimiterAdapter, LockAdapter } from '@julr/tenace/adapters'
|
|
26
|
+
```
|
|
27
|
+
|
|
28
|
+
## Quick Start
|
|
29
|
+
|
|
30
|
+
```ts
|
|
31
|
+
import { Tenace } from '@julr/tenace'
|
|
32
|
+
|
|
33
|
+
// Without Tenace: hope for the best
|
|
34
|
+
const user = await fetch('/api/users/1')
|
|
35
|
+
|
|
36
|
+
// With Tenace: be resilient
|
|
37
|
+
const user = await Tenace.call(() => fetch('/api/users/1'))
|
|
38
|
+
.withTimeout('5s')
|
|
39
|
+
.withRetry({ times: 3 })
|
|
40
|
+
.withFallback(() => ({ id: 1, name: 'Guest' }))
|
|
41
|
+
.execute()
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
## Understanding the Pipeline
|
|
45
|
+
|
|
46
|
+
Tenace uses a **pipeline** system for policies. **The order you add policies = the order errors flow through them.**
|
|
47
|
+
|
|
48
|
+
### The Golden Rule
|
|
49
|
+
|
|
50
|
+
> **First added = Innermost layer (handles errors first)**
|
|
51
|
+
> **Last added = Outermost layer (catches errors last)**
|
|
52
|
+
|
|
53
|
+
Think of it like a pipeline: `fn → timeout → retry → fallback`
|
|
54
|
+
|
|
55
|
+
```
|
|
56
|
+
┌─────────────────────────────────────────────────────────────┐
|
|
57
|
+
│ .withFallback() ← 4th: Catches ALL errors (outermost) │
|
|
58
|
+
│ ┌───────────────────────────────────────────────────────┐ │
|
|
59
|
+
│ │ .withRetry() ← 3rd: Retries timeout errors │ │
|
|
60
|
+
│ │ ┌─────────────────────────────────────────────────┐ │ │
|
|
61
|
+
│ │ │ .withTimeout() ← 2nd: Timeout per attempt │ │ │
|
|
62
|
+
│ │ │ ┌───────────────────────────────────────────┐ │ │ │
|
|
63
|
+
│ │ │ │ .withCircuitBreaker() ← 1st: Tracks │ │ │ │
|
|
64
|
+
│ │ │ │ ┌─────────────────────────────────────┐ │ │ │ │
|
|
65
|
+
│ │ │ │ │ │ │ │ │ │
|
|
66
|
+
│ │ │ │ │ YOUR FUNCTION │ │ │ │ │
|
|
67
|
+
│ │ │ │ │ │ │ │ │ │
|
|
68
|
+
│ │ │ │ └─────────────────────────────────────┘ │ │ │ │
|
|
69
|
+
│ │ │ └───────────────────────────────────────────┘ │ │ │
|
|
70
|
+
│ │ └─────────────────────────────────────────────────┘ │ │
|
|
71
|
+
│ └───────────────────────────────────────────────────────┘ │
|
|
72
|
+
└─────────────────────────────────────────────────────────────┘
|
|
73
|
+
```
|
|
74
|
+
|
|
75
|
+
### Order Matters: Common Patterns
|
|
76
|
+
|
|
77
|
+
#### Pattern 1: Timeout per attempt vs total timeout
|
|
78
|
+
|
|
79
|
+
```ts
|
|
80
|
+
// ✅ 5s timeout PER attempt
|
|
81
|
+
// timeout is inner (first), retry is outer (second)
|
|
82
|
+
Tenace.call(fn)
|
|
83
|
+
.withTimeout('5s')
|
|
84
|
+
.withRetry({ times: 5 })
|
|
85
|
+
.execute()
|
|
86
|
+
|
|
87
|
+
// ⚠️ TOTAL timeout of 5s for ALL retries combined
|
|
88
|
+
// retry is inner (first), timeout is outer (second)
|
|
89
|
+
Tenace.call(fn)
|
|
90
|
+
.withRetry({ times: 5 })
|
|
91
|
+
.withTimeout('5s')
|
|
92
|
+
.execute()
|
|
93
|
+
```
|
|
94
|
+
|
|
95
|
+
#### Pattern 2: Where to place fallback
|
|
96
|
+
|
|
97
|
+
```ts
|
|
98
|
+
// ✅ Fallback catches EVERYTHING (recommended)
|
|
99
|
+
// retry first (inner) → fallback second (outer)
|
|
100
|
+
Tenace.call(fn)
|
|
101
|
+
.withRetry({ times: 3 })
|
|
102
|
+
.withTimeout('5s')
|
|
103
|
+
.withFallback(() => defaultValue)
|
|
104
|
+
.execute()
|
|
105
|
+
|
|
106
|
+
// ⚠️ Fallback only catches fn errors, not retry/timeout
|
|
107
|
+
// fallback is inner, retry is outer
|
|
108
|
+
Tenace.call(fn)
|
|
109
|
+
.withFallback(() => defaultValue)
|
|
110
|
+
.withRetry({ times: 3 })
|
|
111
|
+
.withTimeout('5s')
|
|
112
|
+
.execute()
|
|
113
|
+
```
|
|
114
|
+
|
|
115
|
+
#### Pattern 3: Circuit breaker placement
|
|
116
|
+
|
|
117
|
+
```ts
|
|
118
|
+
// ✅ RECOMMENDED: Circuit breaker inside retry
|
|
119
|
+
// CB first (inner) → retry second (outer)
|
|
120
|
+
// Each retry attempt is tracked separately
|
|
121
|
+
Tenace.call(fn)
|
|
122
|
+
.withCircuitBreaker({ failureThreshold: 5, halfOpenAfter: '30s' })
|
|
123
|
+
.withRetry({ times: 3 })
|
|
124
|
+
.execute()
|
|
125
|
+
|
|
126
|
+
// ⚠️ Circuit breaker outside retry
|
|
127
|
+
// retry is inner, CB is outer
|
|
128
|
+
// Only the final result (after all retries) is tracked
|
|
129
|
+
Tenace.call(fn)
|
|
130
|
+
.withRetry({ times: 3 })
|
|
131
|
+
.withCircuitBreaker({ failureThreshold: 5, halfOpenAfter: '30s' })
|
|
132
|
+
.execute()
|
|
133
|
+
```
|
|
134
|
+
|
|
135
|
+
### Recommended Order
|
|
136
|
+
|
|
137
|
+
For most use cases (reading order = error handling order):
|
|
138
|
+
|
|
139
|
+
```ts
|
|
140
|
+
Tenace.call(fn)
|
|
141
|
+
.withCircuitBreaker(...) // 1. Failure tracking (innermost)
|
|
142
|
+
.withTimeout('5s') // 2. Timeout per attempt
|
|
143
|
+
.withRetry({ times: 3 }) // 3. Retry on timeout/error
|
|
144
|
+
.withFallback(() => defaultValue) // 4. Catch-all safety net (outermost)
|
|
145
|
+
.execute()
|
|
146
|
+
```
|
|
147
|
+
|
|
148
|
+
### Errors That Stop Retries
|
|
149
|
+
|
|
150
|
+
The retry policy will **NOT** retry these errors (fail-fast behavior):
|
|
151
|
+
|
|
152
|
+
| Error | Reason |
|
|
153
|
+
| ------------------- | --------------------------------------------- |
|
|
154
|
+
| `CircuitOpenError` | Circuit breaker is open, no point retrying |
|
|
155
|
+
| `BulkheadFullError` | System is overloaded, retrying makes it worse |
|
|
156
|
+
|
|
157
|
+
All other errors (including `TimeoutError`) **will** be retried.
|
|
158
|
+
|
|
159
|
+
## Policies
|
|
160
|
+
|
|
161
|
+
### Timeout
|
|
162
|
+
|
|
163
|
+
Set a maximum execution time. Supports two strategies:
|
|
164
|
+
|
|
165
|
+
```ts
|
|
166
|
+
// Cooperative (default): Passes AbortSignal, function should respect it
|
|
167
|
+
Tenace.call(({ signal }) => fetch('/api', { signal }))
|
|
168
|
+
.withTimeout('5s')
|
|
169
|
+
.execute()
|
|
170
|
+
|
|
171
|
+
// Aggressive: Rejects immediately on timeout (function may continue in background)
|
|
172
|
+
Tenace.call(() => slowOperation())
|
|
173
|
+
.withTimeout('5s', 'aggressive')
|
|
174
|
+
.execute()
|
|
175
|
+
```
|
|
176
|
+
|
|
177
|
+
### Retry
|
|
178
|
+
|
|
179
|
+
Retry failed operations with configurable backoff:
|
|
180
|
+
|
|
181
|
+
```ts
|
|
182
|
+
// Simple retry
|
|
183
|
+
Tenace.call(fn)
|
|
184
|
+
.withRetry({ times: 3 })
|
|
185
|
+
.execute()
|
|
186
|
+
|
|
187
|
+
// With fixed delay
|
|
188
|
+
Tenace.call(fn)
|
|
189
|
+
.withRetry({ times: 3, delay: '1s' })
|
|
190
|
+
.execute()
|
|
191
|
+
|
|
192
|
+
// With exponential backoff
|
|
193
|
+
Tenace.call(fn)
|
|
194
|
+
.withRetry({
|
|
195
|
+
times: 5,
|
|
196
|
+
delay: (attempt) => Math.min(1000 * 2 ** attempt, 30000),
|
|
197
|
+
})
|
|
198
|
+
.execute()
|
|
199
|
+
|
|
200
|
+
// Dynamic delay based on error (e.g., rate limit retry-after)
|
|
201
|
+
Tenace.call(fn)
|
|
202
|
+
.withRetry({
|
|
203
|
+
times: 5,
|
|
204
|
+
delay: (attempt, error) => {
|
|
205
|
+
if (error instanceof RateLimitError) return error.retryAfterMs
|
|
206
|
+
return 1000 * 2 ** attempt
|
|
207
|
+
},
|
|
208
|
+
})
|
|
209
|
+
.execute()
|
|
210
|
+
|
|
211
|
+
// Conditional retry
|
|
212
|
+
Tenace.call(fn)
|
|
213
|
+
.withRetry({
|
|
214
|
+
times: 3,
|
|
215
|
+
retryIf: (error) => error.status === 503, // Only retry 503s
|
|
216
|
+
abortIf: (error) => error.status === 401, // Never retry 401s
|
|
217
|
+
})
|
|
218
|
+
.execute()
|
|
219
|
+
```
|
|
220
|
+
|
|
221
|
+
#### Backoff Presets
|
|
222
|
+
|
|
223
|
+
Use the `backoff` helper for common backoff patterns:
|
|
224
|
+
|
|
225
|
+
```ts
|
|
226
|
+
import { Tenace, backoff } from '@julr/tenace'
|
|
227
|
+
|
|
228
|
+
// Constant delay (1s between each retry)
|
|
229
|
+
Tenace.call(fn)
|
|
230
|
+
.withRetry({ times: 3, delay: backoff.constant('1s') })
|
|
231
|
+
.execute()
|
|
232
|
+
|
|
233
|
+
// Exponential backoff: 100ms, 200ms, 400ms, 800ms... (capped at 30s)
|
|
234
|
+
Tenace.call(fn)
|
|
235
|
+
.withRetry({ times: 5, delay: backoff.exponential({ initial: 100, max: 30_000 }) })
|
|
236
|
+
.execute()
|
|
237
|
+
|
|
238
|
+
// Exponential with jitter (prevents thundering herd)
|
|
239
|
+
Tenace.call(fn)
|
|
240
|
+
.withRetry({ times: 5, delay: backoff.exponentialWithJitter({ initial: 100, max: 30_000 }) })
|
|
241
|
+
.execute()
|
|
242
|
+
|
|
243
|
+
// Linear backoff: 100ms, 200ms, 300ms, 400ms...
|
|
244
|
+
Tenace.call(fn)
|
|
245
|
+
.withRetry({ times: 5, delay: backoff.linear({ initial: 100, step: 100, max: 5_000 }) })
|
|
246
|
+
.execute()
|
|
247
|
+
```
|
|
248
|
+
|
|
249
|
+
Available presets:
|
|
250
|
+
|
|
251
|
+
| Preset | Description | Options |
|
|
252
|
+
| ------------------------------------- | --------------------------- | ---------------------------------------- |
|
|
253
|
+
| `backoff.constant(delay)` | Fixed delay between retries | `Duration` |
|
|
254
|
+
| `backoff.exponential(opts)` | Doubles each time (default) | `{ initial?, max?, exponent? }` |
|
|
255
|
+
| `backoff.exponentialWithJitter(opts)` | Exponential + randomization | `{ initial?, max?, exponent?, jitter? }` |
|
|
256
|
+
| `backoff.linear(opts)` | Increases by fixed step | `{ initial?, step?, max? }` |
|
|
257
|
+
|
|
258
|
+
Jitter strategies for `exponentialWithJitter`:
|
|
259
|
+
- `'full'` (default): Random delay between 0 and calculated delay
|
|
260
|
+
- `'decorrelated'`: AWS-style decorrelated jitter ([recommended](https://aws.amazon.com/blogs/architecture/exponential-backoff-and-jitter/))
|
|
261
|
+
|
|
262
|
+
### Circuit Breaker
|
|
263
|
+
|
|
264
|
+
Stop calling a failing service. After N failures, the circuit "opens" and all calls fail immediately with `CircuitOpenError`.
|
|
265
|
+
|
|
266
|
+
```ts
|
|
267
|
+
const policy = Tenace.policy()
|
|
268
|
+
.withCircuitBreaker({
|
|
269
|
+
failureThreshold: 5, // Open after 5 consecutive failures
|
|
270
|
+
halfOpenAfter: '30s', // Try again after 30s
|
|
271
|
+
hooks: {
|
|
272
|
+
onOpen: () => console.log('Circuit opened!'),
|
|
273
|
+
onClose: () => console.log('Circuit closed.'),
|
|
274
|
+
onHalfOpen: () => console.log('Circuit half-open, testing...'),
|
|
275
|
+
onStateChange: (state) => metrics.gauge('circuit', state),
|
|
276
|
+
},
|
|
277
|
+
})
|
|
278
|
+
|
|
279
|
+
// Check circuit breaker state
|
|
280
|
+
policy.circuitBreaker?.state // 'closed' | 'open' | 'half-open'
|
|
281
|
+
policy.circuitBreaker?.isOpen // boolean
|
|
282
|
+
|
|
283
|
+
// Manual isolation (maintenance mode)
|
|
284
|
+
const handle = policy.circuitBreaker?.isolate()
|
|
285
|
+
// All calls now fail with CircuitIsolatedError
|
|
286
|
+
handle?.dispose() // Release isolation
|
|
287
|
+
```
|
|
288
|
+
|
|
289
|
+
### Bulkhead
|
|
290
|
+
|
|
291
|
+
Limit concurrent executions to protect downstream services:
|
|
292
|
+
|
|
293
|
+
```ts
|
|
294
|
+
const policy = Tenace.policy()
|
|
295
|
+
.withBulkhead(10, 100) // max 10 concurrent, 100 in queue
|
|
296
|
+
|
|
297
|
+
await policy.call(() => heavyOperation()).execute()
|
|
298
|
+
// Throws BulkheadFullError if queue is also full
|
|
299
|
+
```
|
|
300
|
+
|
|
301
|
+
### Fallback
|
|
302
|
+
|
|
303
|
+
Return a default value when everything fails:
|
|
304
|
+
|
|
305
|
+
```ts
|
|
306
|
+
const user = await Tenace.call(() => fetchUser(id))
|
|
307
|
+
.withFallback(() => ({ id, name: 'Unknown', cached: true }))
|
|
308
|
+
.execute()
|
|
309
|
+
|
|
310
|
+
// Async fallback
|
|
311
|
+
const user = await Tenace.call(() => fetchUser(id))
|
|
312
|
+
.withFallback(async () => {
|
|
313
|
+
return cache.get(`user:${id}`)
|
|
314
|
+
})
|
|
315
|
+
.execute()
|
|
316
|
+
```
|
|
317
|
+
|
|
318
|
+
## Adapters
|
|
319
|
+
|
|
320
|
+
Cache, Rate Limiter, and Distributed Lock are **integrated into the pipeline**. Order matters - just like other policies!
|
|
321
|
+
|
|
322
|
+
### Cache
|
|
323
|
+
|
|
324
|
+
Cache successful results to avoid redundant calls:
|
|
325
|
+
|
|
326
|
+
```ts
|
|
327
|
+
// Basic caching
|
|
328
|
+
const user = await Tenace.call(() => fetchUser(id))
|
|
329
|
+
.withCache({ key: `user:${id}`, ttl: 60_000 }) // 1 minute TTL
|
|
330
|
+
.execute()
|
|
331
|
+
```
|
|
332
|
+
|
|
333
|
+
#### Order patterns for cache
|
|
334
|
+
|
|
335
|
+
```ts
|
|
336
|
+
// ✅ Fallback BEFORE cache = fallback values ARE cached
|
|
337
|
+
// fallback (inner) → cache (outer) = cache stores fallback result
|
|
338
|
+
Tenace.call(fn)
|
|
339
|
+
.withFallback(() => defaultValue)
|
|
340
|
+
.withCache({ key: 'x', ttl: 60_000 })
|
|
341
|
+
.execute()
|
|
342
|
+
|
|
343
|
+
// ⚠️ Cache BEFORE fallback = fallback values NOT cached
|
|
344
|
+
// cache (inner) → fallback (outer) = cache doesn't see fallback result
|
|
345
|
+
Tenace.call(fn)
|
|
346
|
+
.withCache({ key: 'x', ttl: 60_000 })
|
|
347
|
+
.withFallback(() => defaultValue)
|
|
348
|
+
.execute()
|
|
349
|
+
|
|
350
|
+
// ✅ RateLimit BEFORE cache = no token consumed on cache hit
|
|
351
|
+
// rateLimit (inner) → cache (outer) = cache checks first
|
|
352
|
+
Tenace.call(fn)
|
|
353
|
+
.withRateLimit({ key: 'api', maxCalls: 100, windowMs: 60_000 })
|
|
354
|
+
.withCache({ key: 'x', ttl: 60_000 })
|
|
355
|
+
.execute()
|
|
356
|
+
```
|
|
357
|
+
|
|
358
|
+
#### Graceful degradation
|
|
359
|
+
|
|
360
|
+
```ts
|
|
361
|
+
// If Redis is down, continue without cache (don't fail the request)
|
|
362
|
+
await Tenace.call(fn)
|
|
363
|
+
.withCache({ key: 'x', ttl: 60_000, optional: true })
|
|
364
|
+
.execute()
|
|
365
|
+
```
|
|
366
|
+
|
|
367
|
+
**Built-in adapter**: `MemoryCacheAdapter` (uses Bentocache under the hood)
|
|
368
|
+
|
|
369
|
+
```ts
|
|
370
|
+
import { MemoryCacheAdapter } from '@julr/tenace/adapters'
|
|
371
|
+
|
|
372
|
+
// Use globally
|
|
373
|
+
import { configStore } from '@julr/tenace'
|
|
374
|
+
configStore.configure({
|
|
375
|
+
cache: new MemoryCacheAdapter({ maxSize: '50mb', maxItems: 5000 })
|
|
376
|
+
})
|
|
377
|
+
|
|
378
|
+
// Or per-call
|
|
379
|
+
const customCache = new MemoryCacheAdapter()
|
|
380
|
+
await Tenace.call(fn)
|
|
381
|
+
.withCache({ key: 'my-key', ttl: 60_000, adapter: customCache })
|
|
382
|
+
.execute()
|
|
383
|
+
```
|
|
384
|
+
|
|
385
|
+
**Custom adapter**: Implement `CacheAdapter` interface
|
|
386
|
+
|
|
387
|
+
```ts
|
|
388
|
+
import type { CacheAdapter } from '@julr/tenace/adapters'
|
|
389
|
+
|
|
390
|
+
class RedisCacheAdapter implements CacheAdapter {
|
|
391
|
+
async get<T>(key: string): Promise<T | undefined> { /* ... */ }
|
|
392
|
+
async set<T>(key: string, value: T, ttlMs: number): Promise<void> { /* ... */ }
|
|
393
|
+
async delete(key: string): Promise<void> { /* ... */ }
|
|
394
|
+
async has(key: string): Promise<boolean> { /* ... */ }
|
|
395
|
+
}
|
|
396
|
+
```
|
|
397
|
+
|
|
398
|
+
### Rate Limiter
|
|
399
|
+
|
|
400
|
+
Limit call frequency to protect APIs or respect external rate limits:
|
|
401
|
+
|
|
402
|
+
```ts
|
|
403
|
+
// 100 calls per minute
|
|
404
|
+
const result = await Tenace.call(() => callExternalApi())
|
|
405
|
+
.withRateLimit({
|
|
406
|
+
key: 'external-api',
|
|
407
|
+
maxCalls: 100,
|
|
408
|
+
windowMs: 60_000,
|
|
409
|
+
})
|
|
410
|
+
.execute()
|
|
411
|
+
```
|
|
412
|
+
|
|
413
|
+
Throws `RateLimitError` when limit is exceeded. The error includes `retryAfterMs`.
|
|
414
|
+
|
|
415
|
+
#### Order patterns for rate limit
|
|
416
|
+
|
|
417
|
+
```ts
|
|
418
|
+
// ✅ RateLimit BEFORE fallback = fallback catches RateLimitError
|
|
419
|
+
// rateLimit (inner) → fallback (outer)
|
|
420
|
+
Tenace.call(fn)
|
|
421
|
+
.withRateLimit({ key: 'api', maxCalls: 100, windowMs: 60_000 })
|
|
422
|
+
.withFallback(() => defaultValue)
|
|
423
|
+
.execute()
|
|
424
|
+
|
|
425
|
+
// ⚠️ Fallback BEFORE rateLimit = RateLimitError NOT caught by fallback
|
|
426
|
+
// fallback (inner) → rateLimit (outer)
|
|
427
|
+
Tenace.call(fn)
|
|
428
|
+
.withFallback(() => defaultValue)
|
|
429
|
+
.withRateLimit({ key: 'api', maxCalls: 100, windowMs: 60_000 })
|
|
430
|
+
.execute()
|
|
431
|
+
|
|
432
|
+
// ✅ Retry with error-based delay for rate limit handling
|
|
433
|
+
await Tenace.call(fn)
|
|
434
|
+
.withFallback(() => defaultValue)
|
|
435
|
+
.withRetry({
|
|
436
|
+
times: 3,
|
|
437
|
+
delay: (attempt, error) => {
|
|
438
|
+
if (error instanceof RateLimitError) return error.retryAfterMs
|
|
439
|
+
return 1000
|
|
440
|
+
}
|
|
441
|
+
})
|
|
442
|
+
.withRateLimit({ key: 'api', maxCalls: 100, windowMs: 60_000 })
|
|
443
|
+
.execute()
|
|
444
|
+
```
|
|
445
|
+
|
|
446
|
+
#### Graceful degradation
|
|
447
|
+
|
|
448
|
+
```ts
|
|
449
|
+
// If Redis is down, allow the call through (don't enforce rate limit)
|
|
450
|
+
await Tenace.call(fn)
|
|
451
|
+
.withRateLimit({ key: 'api', maxCalls: 100, windowMs: 60_000, optional: true })
|
|
452
|
+
.execute()
|
|
453
|
+
```
|
|
454
|
+
|
|
455
|
+
**Built-in adapter**: `MemoryRateLimiterAdapter` (uses rate-limiter-flexible)
|
|
456
|
+
|
|
457
|
+
```ts
|
|
458
|
+
import { MemoryRateLimiterAdapter } from '@julr/tenace/adapters'
|
|
459
|
+
import { configStore } from '@julr/tenace'
|
|
460
|
+
|
|
461
|
+
configStore.configure({
|
|
462
|
+
rateLimiter: new MemoryRateLimiterAdapter()
|
|
463
|
+
})
|
|
464
|
+
```
|
|
465
|
+
|
|
466
|
+
**Custom adapter**: Implement `RateLimiterAdapter` interface
|
|
467
|
+
|
|
468
|
+
```ts
|
|
469
|
+
import type { RateLimiterAdapter, RateLimitConfig, RateLimitResult, RateLimitState } from '@julr/tenace/adapters'
|
|
470
|
+
|
|
471
|
+
class RedisRateLimiterAdapter implements RateLimiterAdapter {
|
|
472
|
+
async acquire(key: string, options: RateLimitConfig): Promise<RateLimitResult> { /* ... */ }
|
|
473
|
+
async getState(key: string): Promise<RateLimitState> { /* ... */ }
|
|
474
|
+
async reset(key: string): Promise<void> { /* ... */ }
|
|
475
|
+
}
|
|
476
|
+
```
|
|
477
|
+
|
|
478
|
+
### Distributed Lock
|
|
479
|
+
|
|
480
|
+
Ensure only one process executes a critical section at a time (useful for distributed systems):
|
|
481
|
+
|
|
482
|
+
```ts
|
|
483
|
+
// Process payment with distributed lock
|
|
484
|
+
const result = await Tenace.call(() => processPayment(orderId))
|
|
485
|
+
.withDistributedLock({
|
|
486
|
+
key: `payment:${orderId}`,
|
|
487
|
+
ttl: 30_000, // Lock expires after 30s (prevents deadlocks)
|
|
488
|
+
})
|
|
489
|
+
.execute()
|
|
490
|
+
```
|
|
491
|
+
|
|
492
|
+
Throws `LockNotAcquiredError` if the lock cannot be acquired.
|
|
493
|
+
|
|
494
|
+
#### Order patterns for lock
|
|
495
|
+
|
|
496
|
+
```ts
|
|
497
|
+
// ✅ Retry BEFORE lock = lock held during ALL retries (atomicity)
|
|
498
|
+
// retry (inner) → lock (outer) = lock acquired once, held during all retries
|
|
499
|
+
Tenace.call(fn)
|
|
500
|
+
.withRetry({ times: 3 })
|
|
501
|
+
.withDistributedLock({ key: 'payment', ttl: 30_000 })
|
|
502
|
+
.execute()
|
|
503
|
+
|
|
504
|
+
// ⚠️ Lock BEFORE retry = each retry acquires its own lock
|
|
505
|
+
// lock (inner) → retry (outer) = lock released between retries
|
|
506
|
+
Tenace.call(fn)
|
|
507
|
+
.withDistributedLock({ key: 'payment', ttl: 10_000 })
|
|
508
|
+
.withRetry({ times: 3 })
|
|
509
|
+
.execute()
|
|
510
|
+
|
|
511
|
+
// ✅ Lock BEFORE fallback = fallback catches LockNotAcquiredError
|
|
512
|
+
// lock (inner) → fallback (outer)
|
|
513
|
+
Tenace.call(fn)
|
|
514
|
+
.withDistributedLock({ key: 'payment', ttl: 30_000 })
|
|
515
|
+
.withFallback(() => defaultValue)
|
|
516
|
+
.execute()
|
|
517
|
+
```
|
|
518
|
+
|
|
519
|
+
**No built-in adapter** - You must provide one (e.g., using [@verrou/core](https://github.com/Julien-R44/verrou)):
|
|
520
|
+
|
|
521
|
+
```ts
|
|
522
|
+
import { Verrou } from '@verrou/core'
|
|
523
|
+
import { redisStore } from '@verrou/core/drivers/redis'
|
|
524
|
+
import { configStore } from '@julr/tenace'
|
|
525
|
+
import type { LockAdapter } from '@julr/tenace/adapters'
|
|
526
|
+
|
|
527
|
+
const verrou = new Verrou({ default: 'redis', stores: { redis: redisStore({ /* ... */ }) } })
|
|
528
|
+
|
|
529
|
+
class VerrouLockAdapter implements LockAdapter {
|
|
530
|
+
async run<T>(key: string, ttl: number, fn: () => Promise<T>, options?: { retry?: { attempts?: number; delay?: number; timeout?: number } }): Promise<[boolean, T | undefined]> {
|
|
531
|
+
const lock = verrou.createLock(key, ttl)
|
|
532
|
+
return lock.run(fn, { retry: options?.retry })
|
|
533
|
+
}
|
|
534
|
+
}
|
|
535
|
+
|
|
536
|
+
configStore.configure({ lock: new VerrouLockAdapter() })
|
|
537
|
+
```
|
|
538
|
+
|
|
539
|
+
**With retry** for lock acquisition:
|
|
540
|
+
|
|
541
|
+
```ts
|
|
542
|
+
await Tenace.call(fn)
|
|
543
|
+
.withDistributedLock({
|
|
544
|
+
key: 'critical-section',
|
|
545
|
+
ttl: 10_000,
|
|
546
|
+
retry: {
|
|
547
|
+
attempts: 5,
|
|
548
|
+
delay: 100,
|
|
549
|
+
timeout: 5000,
|
|
550
|
+
}
|
|
551
|
+
})
|
|
552
|
+
.execute()
|
|
553
|
+
```
|
|
554
|
+
|
|
555
|
+
## Reusable Policies
|
|
556
|
+
|
|
557
|
+
Create a policy once, reuse everywhere. **Circuit breaker and bulkhead states are shared across calls.**
|
|
558
|
+
|
|
559
|
+
```ts
|
|
560
|
+
const apiPolicy = Tenace.policy()
|
|
561
|
+
.withTimeout('5s')
|
|
562
|
+
.withRetry({ times: 3, delay: (attempt) => 100 * 2 ** attempt })
|
|
563
|
+
.withCircuitBreaker({ failureThreshold: 5, halfOpenAfter: '30s' })
|
|
564
|
+
|
|
565
|
+
// All calls share the same circuit breaker state!
|
|
566
|
+
await apiPolicy.call(() => fetch('/api/users')).execute()
|
|
567
|
+
await apiPolicy.call(() => fetch('/api/posts')).execute()
|
|
568
|
+
|
|
569
|
+
// If 5 total failures occur, ALL calls will fail with CircuitOpenError
|
|
570
|
+
```
|
|
571
|
+
|
|
572
|
+
### Health Check Example
|
|
573
|
+
|
|
574
|
+
```ts
|
|
575
|
+
class UserService {
|
|
576
|
+
#policy = Tenace.policy()
|
|
577
|
+
.withCircuitBreaker({ failureThreshold: 5, halfOpenAfter: '30s' })
|
|
578
|
+
|
|
579
|
+
async getUser(id: string) {
|
|
580
|
+
return this.#policy.call(() => this.api.fetchUser(id)).execute()
|
|
581
|
+
}
|
|
582
|
+
|
|
583
|
+
// Expose for Kubernetes probes
|
|
584
|
+
getHealthStatus() {
|
|
585
|
+
if (this.#policy.circuitBreaker?.isOpen) {
|
|
586
|
+
return { status: 'degraded', reason: 'circuit-open' }
|
|
587
|
+
}
|
|
588
|
+
return { status: 'healthy' }
|
|
589
|
+
}
|
|
590
|
+
}
|
|
591
|
+
```
|
|
592
|
+
|
|
593
|
+
## Batch Operations
|
|
594
|
+
|
|
595
|
+
### Process items with concurrency control
|
|
596
|
+
|
|
597
|
+
```ts
|
|
598
|
+
const userIds = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
|
|
599
|
+
|
|
600
|
+
const users = await Tenace.map(userIds, (id) => fetchUser(id))
|
|
601
|
+
.withConcurrency(3) // max 3 parallel requests
|
|
602
|
+
.withRetryPerTask(2) // retry each task up to 2 times
|
|
603
|
+
.withTimeoutPerTask('5s') // 5s timeout per task
|
|
604
|
+
.execute()
|
|
605
|
+
```
|
|
606
|
+
|
|
607
|
+
### Run multiple tasks in parallel
|
|
608
|
+
|
|
609
|
+
```ts
|
|
610
|
+
const [users, posts, config] = await Tenace.all([
|
|
611
|
+
() => fetchUsers(),
|
|
612
|
+
() => fetchPosts(),
|
|
613
|
+
() => fetchConfig(),
|
|
614
|
+
])
|
|
615
|
+
.withConcurrency(3)
|
|
616
|
+
.execute()
|
|
617
|
+
```
|
|
618
|
+
|
|
619
|
+
### Get results even if some fail
|
|
620
|
+
|
|
621
|
+
```ts
|
|
622
|
+
const results = await Tenace.map(urls, (url) => fetch(url))
|
|
623
|
+
.withConcurrency(5)
|
|
624
|
+
.settle() // Never throws
|
|
625
|
+
|
|
626
|
+
for (const result of results) {
|
|
627
|
+
if (result.status === 'fulfilled') {
|
|
628
|
+
console.log('Success:', result.value)
|
|
629
|
+
} else {
|
|
630
|
+
console.log('Failed:', result.reason.message)
|
|
631
|
+
}
|
|
632
|
+
}
|
|
633
|
+
```
|
|
634
|
+
|
|
635
|
+
### Progress tracking
|
|
636
|
+
|
|
637
|
+
```ts
|
|
638
|
+
await Tenace.map(files, (file) => uploadFile(file))
|
|
639
|
+
.withConcurrency(5)
|
|
640
|
+
.onProgress(({ completed, total }) => {
|
|
641
|
+
console.log(`Progress: ${completed}/${total}`)
|
|
642
|
+
})
|
|
643
|
+
.onTaskComplete((value, { index }) => {
|
|
644
|
+
console.log(`File ${index} uploaded`)
|
|
645
|
+
})
|
|
646
|
+
.onTaskError((error, { index }) => {
|
|
647
|
+
console.log(`File ${index} failed: ${error.message}`)
|
|
648
|
+
})
|
|
649
|
+
.execute()
|
|
650
|
+
```
|
|
651
|
+
|
|
652
|
+
## Semaphore
|
|
653
|
+
|
|
654
|
+
Low-level concurrency control:
|
|
655
|
+
|
|
656
|
+
```ts
|
|
657
|
+
import { Semaphore } from '@julr/tenace'
|
|
658
|
+
|
|
659
|
+
const sem = new Semaphore(5) // max 5 concurrent
|
|
660
|
+
|
|
661
|
+
// Run with automatic acquire/release
|
|
662
|
+
await sem.run(() => doWork())
|
|
663
|
+
|
|
664
|
+
// Wrap a function
|
|
665
|
+
const limitedFetch = sem.wrap(fetch)
|
|
666
|
+
await limitedFetch('/api/data')
|
|
667
|
+
|
|
668
|
+
// Map with concurrency
|
|
669
|
+
const results = await sem.map(items, (item) => process(item))
|
|
670
|
+
|
|
671
|
+
// Manual acquire/release
|
|
672
|
+
const release = await sem.acquire()
|
|
673
|
+
try {
|
|
674
|
+
await doWork()
|
|
675
|
+
} finally {
|
|
676
|
+
release()
|
|
677
|
+
}
|
|
678
|
+
```
|
|
679
|
+
|
|
680
|
+
## Waiting for Conditions
|
|
681
|
+
|
|
682
|
+
Poll until a condition becomes true. Useful for waiting on eventual consistency or service readiness.
|
|
683
|
+
|
|
684
|
+
```ts
|
|
685
|
+
// Wait for a service to be healthy
|
|
686
|
+
await Tenace.waitFor(() => isServiceHealthy(), {
|
|
687
|
+
interval: '1s',
|
|
688
|
+
timeout: '30s',
|
|
689
|
+
})
|
|
690
|
+
|
|
691
|
+
// Wait for a database connection
|
|
692
|
+
await Tenace.waitFor(async () => {
|
|
693
|
+
try {
|
|
694
|
+
await db.ping()
|
|
695
|
+
return true
|
|
696
|
+
} catch {
|
|
697
|
+
return false
|
|
698
|
+
}
|
|
699
|
+
}, { interval: '500ms', timeout: '1m' })
|
|
700
|
+
|
|
701
|
+
// Wait for eventual consistency
|
|
702
|
+
await orderApi.create(order)
|
|
703
|
+
await Tenace.waitFor(
|
|
704
|
+
() => orderApi.exists(order.id),
|
|
705
|
+
{ interval: '100ms', timeout: '5s' }
|
|
706
|
+
)
|
|
707
|
+
```
|
|
708
|
+
|
|
709
|
+
Options:
|
|
710
|
+
- `interval`: Time between checks (default: `100ms`)
|
|
711
|
+
- `timeout`: Max wait time (default: `10s`)
|
|
712
|
+
- `message`: Custom error message
|
|
713
|
+
- `before`: Run check immediately vs wait first (default: `true`)
|
|
714
|
+
- `signal`: AbortSignal to cancel
|
|
715
|
+
|
|
716
|
+
## Chaos Engineering
|
|
717
|
+
|
|
718
|
+
Test your resilience patterns by injecting failures and latency.
|
|
719
|
+
|
|
720
|
+
### Global Chaos
|
|
721
|
+
|
|
722
|
+
Enable chaos globally to affect all Tenace calls:
|
|
723
|
+
|
|
724
|
+
```ts
|
|
725
|
+
import { Tenace } from '@julr/tenace'
|
|
726
|
+
|
|
727
|
+
// Enable 100% failure rate
|
|
728
|
+
Tenace.chaos.enable({ fault: 1 })
|
|
729
|
+
|
|
730
|
+
// Enable 500ms latency
|
|
731
|
+
Tenace.chaos.enable({ latency: 500 })
|
|
732
|
+
|
|
733
|
+
// Combined with configuration
|
|
734
|
+
Tenace.chaos.enable({
|
|
735
|
+
fault: { rate: 0.1, error: new Error('Random failure') },
|
|
736
|
+
latency: { rate: 0.2, delay: { min: 100, max: 2000 } },
|
|
737
|
+
})
|
|
738
|
+
|
|
739
|
+
// Disable
|
|
740
|
+
Tenace.chaos.disable()
|
|
741
|
+
|
|
742
|
+
// Check status
|
|
743
|
+
Tenace.chaos.isEnabled()
|
|
744
|
+
```
|
|
745
|
+
|
|
746
|
+
### Testing Example
|
|
747
|
+
|
|
748
|
+
```ts
|
|
749
|
+
import { test } from 'vitest'
|
|
750
|
+
|
|
751
|
+
test.afterEach(() => Tenace.chaos.disable())
|
|
752
|
+
|
|
753
|
+
test('returns cached user when API fails', async () => {
|
|
754
|
+
await cache.set('user:123', { id: 123, name: 'John' })
|
|
755
|
+
Tenace.chaos.enable({ fault: 1 })
|
|
756
|
+
|
|
757
|
+
const user = await userService.getUser(123)
|
|
758
|
+
expect(user.name).toBe('John')
|
|
759
|
+
})
|
|
760
|
+
|
|
761
|
+
test('handles slow responses gracefully', async () => {
|
|
762
|
+
Tenace.chaos.enable({ latency: 2000 })
|
|
763
|
+
|
|
764
|
+
const start = Date.now()
|
|
765
|
+
const result = await userService.getUser(123) // Has 1s timeout + fallback
|
|
766
|
+
|
|
767
|
+
expect(Date.now() - start).toBeLessThan(1500)
|
|
768
|
+
expect(result.cached).toBe(true)
|
|
769
|
+
})
|
|
770
|
+
```
|
|
771
|
+
|
|
772
|
+
### Per-call Chaos
|
|
773
|
+
|
|
774
|
+
Add chaos to specific calls:
|
|
775
|
+
|
|
776
|
+
```ts
|
|
777
|
+
await Tenace.call(() => fetchUser(id))
|
|
778
|
+
.withChaosLatency({ rate: 1, delay: 2000 })
|
|
779
|
+
.execute()
|
|
780
|
+
|
|
781
|
+
await Tenace.call(() => sendEmail(to, body))
|
|
782
|
+
.withChaosFault({ rate: 0.5, error: new Error('SMTP timeout') })
|
|
783
|
+
.execute()
|
|
784
|
+
```
|
|
785
|
+
|
|
786
|
+
## Hooks
|
|
787
|
+
|
|
788
|
+
Tenace provides two hook systems for observing and reacting to resilience events.
|
|
789
|
+
|
|
790
|
+
### Config Hooks
|
|
791
|
+
|
|
792
|
+
Attach hooks directly to a specific policy:
|
|
793
|
+
|
|
794
|
+
```ts
|
|
795
|
+
// Retry hooks
|
|
796
|
+
Tenace.call(fn)
|
|
797
|
+
.withRetry({
|
|
798
|
+
times: 3,
|
|
799
|
+
onRetry: (e) => console.log(`Retry ${e.attempt}: ${e.error.message}`),
|
|
800
|
+
onRetryExhausted: (e) => console.log('All retries failed:', e.error),
|
|
801
|
+
})
|
|
802
|
+
.execute()
|
|
803
|
+
|
|
804
|
+
// Timeout hook
|
|
805
|
+
Tenace.call(fn)
|
|
806
|
+
.withTimeout('5s', { strategy: 'aggressive', onTimeout: () => console.log('Timed out!') })
|
|
807
|
+
.execute()
|
|
808
|
+
|
|
809
|
+
// Bulkhead hook
|
|
810
|
+
Tenace.call(fn)
|
|
811
|
+
.withBulkhead(10, { queue: 5, onRejected: () => console.log('Bulkhead full') })
|
|
812
|
+
.execute()
|
|
813
|
+
|
|
814
|
+
// Fallback hook
|
|
815
|
+
Tenace.call(fn)
|
|
816
|
+
.withFallback(() => 'default', { onFallback: () => console.log('Using fallback') })
|
|
817
|
+
.execute()
|
|
818
|
+
|
|
819
|
+
// Cache hooks
|
|
820
|
+
Tenace.call(fn)
|
|
821
|
+
.withCache({
|
|
822
|
+
key: 'user:123',
|
|
823
|
+
ttl: 5000,
|
|
824
|
+
onHit: ({ key }) => console.log(`Cache hit: ${key}`),
|
|
825
|
+
onMiss: ({ key }) => console.log(`Cache miss: ${key}`),
|
|
826
|
+
})
|
|
827
|
+
.execute()
|
|
828
|
+
|
|
829
|
+
// RateLimit hook
|
|
830
|
+
Tenace.call(fn)
|
|
831
|
+
.withRateLimit({
|
|
832
|
+
key: 'api',
|
|
833
|
+
maxCalls: 10,
|
|
834
|
+
windowMs: 1000,
|
|
835
|
+
onRejected: ({ key, retryAfterMs }) => console.log(`Rate limited: ${key}`),
|
|
836
|
+
})
|
|
837
|
+
.execute()
|
|
838
|
+
|
|
839
|
+
// Lock hooks
|
|
840
|
+
Tenace.call(fn)
|
|
841
|
+
.withDistributedLock({
|
|
842
|
+
key: 'resource',
|
|
843
|
+
ttl: 5000,
|
|
844
|
+
onAcquired: ({ key }) => console.log(`Lock acquired: ${key}`),
|
|
845
|
+
onRejected: ({ key }) => console.log(`Lock failed: ${key}`),
|
|
846
|
+
})
|
|
847
|
+
.execute()
|
|
848
|
+
```
|
|
849
|
+
|
|
850
|
+
### Global Hooks
|
|
851
|
+
|
|
852
|
+
Register hooks that apply to ALL Tenace operations:
|
|
853
|
+
|
|
854
|
+
```ts
|
|
855
|
+
// Register a global hook (returns unsubscribe function)
|
|
856
|
+
const unsubscribe = Tenace.hooks.onRetry((e) => {
|
|
857
|
+
console.log(`Retry ${e.attempt}: ${e.error.message}`)
|
|
858
|
+
})
|
|
859
|
+
|
|
860
|
+
// Later, unsubscribe
|
|
861
|
+
unsubscribe()
|
|
862
|
+
```
|
|
863
|
+
|
|
864
|
+
Available global hooks:
|
|
865
|
+
|
|
866
|
+
| Hook | Description |
|
|
867
|
+
|------|-------------|
|
|
868
|
+
| `Tenace.hooks.onRetry(fn)` | Retry triggered |
|
|
869
|
+
| `Tenace.hooks.onRetryExhausted(fn)` | All retries failed |
|
|
870
|
+
| `Tenace.hooks.onTimeout(fn)` | Operation timed out |
|
|
871
|
+
| `Tenace.hooks.onCircuitOpened(fn)` | Circuit breaker opened |
|
|
872
|
+
| `Tenace.hooks.onCircuitClosed(fn)` | Circuit breaker closed |
|
|
873
|
+
| `Tenace.hooks.onCircuitHalfOpened(fn)` | Circuit breaker half-open |
|
|
874
|
+
| `Tenace.hooks.onBulkheadRejected(fn)` | Bulkhead full |
|
|
875
|
+
| `Tenace.hooks.onCacheHit(fn)` | Cache hit |
|
|
876
|
+
| `Tenace.hooks.onCacheMiss(fn)` | Cache miss |
|
|
877
|
+
| `Tenace.hooks.onRateLimitRejected(fn)` | Rate limit exceeded |
|
|
878
|
+
| `Tenace.hooks.onFallback(fn)` | Fallback triggered |
|
|
879
|
+
| `Tenace.hooks.onLockAcquired(fn)` | Lock acquired |
|
|
880
|
+
| `Tenace.hooks.onLockRejected(fn)` | Lock not acquired |
|
|
881
|
+
|
|
882
|
+
### Plugin API
|
|
883
|
+
|
|
884
|
+
For more advanced use cases (e.g., telemetry packages), use the plugin API:
|
|
885
|
+
|
|
886
|
+
```ts
|
|
887
|
+
import { use, type TenacePlugin } from '@julr/tenace'
|
|
888
|
+
|
|
889
|
+
const myPlugin: TenacePlugin = {
|
|
890
|
+
onRetry: (e) => metrics.increment('retry', { attempt: e.attempt }),
|
|
891
|
+
onCircuitOpened: () => metrics.increment('circuit_open'),
|
|
892
|
+
}
|
|
893
|
+
|
|
894
|
+
use(myPlugin)
|
|
895
|
+
```
|
|
896
|
+
|
|
897
|
+
## Error Types
|
|
898
|
+
|
|
899
|
+
All errors extend `TenaceError`:
|
|
900
|
+
|
|
901
|
+
```ts
|
|
902
|
+
import {
|
|
903
|
+
TenaceError, // Base class
|
|
904
|
+
TimeoutError, // Operation timed out
|
|
905
|
+
CancelledError, // Operation cancelled via AbortSignal
|
|
906
|
+
CircuitOpenError, // Circuit breaker is open
|
|
907
|
+
CircuitIsolatedError, // Circuit breaker manually isolated
|
|
908
|
+
BulkheadFullError, // Bulkhead capacity exceeded
|
|
909
|
+
AbortError, // AbortSignal triggered
|
|
910
|
+
RateLimitError, // Rate limit exceeded (has retryAfterMs)
|
|
911
|
+
LockNotAcquiredError, // Distributed lock not acquired
|
|
912
|
+
WaitForTimeoutError, // waitFor() timed out
|
|
913
|
+
} from '@julr/tenace/errors'
|
|
914
|
+
```
|
|
915
|
+
|
|
916
|
+
### Usage
|
|
917
|
+
|
|
918
|
+
```ts
|
|
919
|
+
import { TimeoutError, CircuitOpenError } from '@julr/tenace/errors'
|
|
920
|
+
|
|
921
|
+
try {
|
|
922
|
+
await Tenace.call(() => slowOperation())
|
|
923
|
+
.withTimeout('1s')
|
|
924
|
+
.withCircuitBreaker({ failureThreshold: 5, halfOpenAfter: '30s' })
|
|
925
|
+
.execute()
|
|
926
|
+
} catch (error) {
|
|
927
|
+
if (error instanceof TimeoutError) {
|
|
928
|
+
console.log('Operation timed out')
|
|
929
|
+
} else if (error instanceof CircuitOpenError) {
|
|
930
|
+
console.log('Circuit breaker is open, service is down')
|
|
931
|
+
}
|
|
932
|
+
}
|
|
933
|
+
```
|
|
934
|
+
|
|
935
|
+
## Durations
|
|
936
|
+
|
|
937
|
+
All duration parameters accept numbers (milliseconds) or human-readable strings:
|
|
938
|
+
|
|
939
|
+
```ts
|
|
940
|
+
.withTimeout('5s') // 5 seconds
|
|
941
|
+
.withTimeout('500ms') // 500 milliseconds
|
|
942
|
+
.withTimeout('1m') // 1 minute
|
|
943
|
+
.withTimeout('1h') // 1 hour
|
|
944
|
+
.withTimeout(5000) // 5000 ms
|
|
945
|
+
```
|
|
946
|
+
|
|
947
|
+
## API Reference
|
|
948
|
+
|
|
949
|
+
### `Tenace.call(fn)`
|
|
950
|
+
|
|
951
|
+
Execute a single operation with resilience.
|
|
952
|
+
|
|
953
|
+
```ts
|
|
954
|
+
Tenace.call(fn)
|
|
955
|
+
.withTimeout(duration, strategy?) // 'cooperative' | 'aggressive'
|
|
956
|
+
.withRetry(options?) // { times, delay, retryIf, abortIf } - use backoff.* for delay
|
|
957
|
+
.withCircuitBreaker(options) // { failureThreshold, halfOpenAfter, hooks? }
|
|
958
|
+
.withBulkhead(limit, queue?) // Concurrency limiter
|
|
959
|
+
.withFallback(fn) // Default value on failure
|
|
960
|
+
.withCache(options) // { key, ttl, adapter? }
|
|
961
|
+
.withRateLimit(options) // { key, maxCalls, windowMs, adapter? }
|
|
962
|
+
.withDistributedLock(options) // { key, timeout?, adapter? }
|
|
963
|
+
.withSpan(name, attributes?) // OpenTelemetry tracing
|
|
964
|
+
.withChaosFault(options) // { rate, error?, errors? }
|
|
965
|
+
.withChaosLatency(options) // { rate, delay }
|
|
966
|
+
.execute() // Run and return result
|
|
967
|
+
```
|
|
968
|
+
|
|
969
|
+
### `Tenace.policy()`
|
|
970
|
+
|
|
971
|
+
Create a reusable policy with shared state for circuit breaker and bulkhead.
|
|
972
|
+
|
|
973
|
+
```ts
|
|
974
|
+
const policy = Tenace.policy()
|
|
975
|
+
.withTimeout(duration)
|
|
976
|
+
.withRetry(options)
|
|
977
|
+
.withCircuitBreaker(options)
|
|
978
|
+
.withBulkhead(limit, queue?)
|
|
979
|
+
|
|
980
|
+
await policy.call(fn).execute()
|
|
981
|
+
|
|
982
|
+
// Access circuit breaker state
|
|
983
|
+
policy.circuitBreaker?.state
|
|
984
|
+
policy.circuitBreaker?.isolate()
|
|
985
|
+
```
|
|
986
|
+
|
|
987
|
+
### `Tenace.all(tasks)` / `Tenace.map(items, fn)`
|
|
988
|
+
|
|
989
|
+
Batch operations with concurrency control.
|
|
990
|
+
|
|
991
|
+
```ts
|
|
992
|
+
Tenace.all(tasks) // or Tenace.map(items, fn)
|
|
993
|
+
.withConcurrency(limit)
|
|
994
|
+
.withRetryPerTask(times, options?) // options: { delay, retryIf, abortIf } - use backoff.* for delay
|
|
995
|
+
.withTimeoutPerTask(duration, strategy?)
|
|
996
|
+
.withSignal(signal)
|
|
997
|
+
.stopOnError(boolean)
|
|
998
|
+
.onProgress(fn)
|
|
999
|
+
.onTaskComplete(fn)
|
|
1000
|
+
.onTaskError(fn)
|
|
1001
|
+
.execute() // Throws on first error
|
|
1002
|
+
.settle() // Returns all results (never throws)
|
|
1003
|
+
```
|
|
1004
|
+
|
|
1005
|
+
### `Tenace.waitFor(condition, options?)`
|
|
1006
|
+
|
|
1007
|
+
Wait for a condition to become true.
|
|
1008
|
+
|
|
1009
|
+
```ts
|
|
1010
|
+
await Tenace.waitFor(
|
|
1011
|
+
() => boolean | Promise<boolean>,
|
|
1012
|
+
{
|
|
1013
|
+
interval?: Duration, // Default: 100ms
|
|
1014
|
+
timeout?: Duration, // Default: 10s
|
|
1015
|
+
message?: string,
|
|
1016
|
+
before?: boolean, // Default: true
|
|
1017
|
+
signal?: AbortSignal,
|
|
1018
|
+
}
|
|
1019
|
+
)
|
|
1020
|
+
```
|
|
1021
|
+
|
|
1022
|
+
### `Tenace.chaos`
|
|
1023
|
+
|
|
1024
|
+
Global chaos injection for testing.
|
|
1025
|
+
|
|
1026
|
+
```ts
|
|
1027
|
+
Tenace.chaos.enable({ fault?, latency? })
|
|
1028
|
+
Tenace.chaos.disable()
|
|
1029
|
+
Tenace.chaos.isEnabled()
|
|
1030
|
+
```
|
|
1031
|
+
|
|
1032
|
+
## License
|
|
1033
|
+
|
|
1034
|
+
MIT
|