flowx-control 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (112) hide show
  1. package/README.md +443 -0
  2. package/dist/batch.d.mts +37 -0
  3. package/dist/batch.d.ts +37 -0
  4. package/dist/batch.js +75 -0
  5. package/dist/batch.js.map +1 -0
  6. package/dist/batch.mjs +73 -0
  7. package/dist/batch.mjs.map +1 -0
  8. package/dist/bulkhead.d.mts +30 -0
  9. package/dist/bulkhead.d.ts +30 -0
  10. package/dist/bulkhead.js +85 -0
  11. package/dist/bulkhead.js.map +1 -0
  12. package/dist/bulkhead.mjs +83 -0
  13. package/dist/bulkhead.mjs.map +1 -0
  14. package/dist/circuit-breaker.d.mts +44 -0
  15. package/dist/circuit-breaker.d.ts +44 -0
  16. package/dist/circuit-breaker.js +132 -0
  17. package/dist/circuit-breaker.js.map +1 -0
  18. package/dist/circuit-breaker.mjs +130 -0
  19. package/dist/circuit-breaker.mjs.map +1 -0
  20. package/dist/debounce.d.mts +30 -0
  21. package/dist/debounce.d.ts +30 -0
  22. package/dist/debounce.js +96 -0
  23. package/dist/debounce.js.map +1 -0
  24. package/dist/debounce.mjs +94 -0
  25. package/dist/debounce.mjs.map +1 -0
  26. package/dist/deferred.d.mts +27 -0
  27. package/dist/deferred.d.ts +27 -0
  28. package/dist/deferred.js +42 -0
  29. package/dist/deferred.js.map +1 -0
  30. package/dist/deferred.mjs +40 -0
  31. package/dist/deferred.mjs.map +1 -0
  32. package/dist/fallback.d.mts +33 -0
  33. package/dist/fallback.d.ts +33 -0
  34. package/dist/fallback.js +43 -0
  35. package/dist/fallback.js.map +1 -0
  36. package/dist/fallback.mjs +40 -0
  37. package/dist/fallback.mjs.map +1 -0
  38. package/dist/hedge.d.mts +18 -0
  39. package/dist/hedge.d.ts +18 -0
  40. package/dist/hedge.js +47 -0
  41. package/dist/hedge.js.map +1 -0
  42. package/dist/hedge.mjs +45 -0
  43. package/dist/hedge.mjs.map +1 -0
  44. package/dist/index.d.mts +18 -0
  45. package/dist/index.d.ts +18 -0
  46. package/dist/index.js +1151 -0
  47. package/dist/index.js.map +1 -0
  48. package/dist/index.mjs +1122 -0
  49. package/dist/index.mjs.map +1 -0
  50. package/dist/memo.d.mts +35 -0
  51. package/dist/memo.d.ts +35 -0
  52. package/dist/memo.js +74 -0
  53. package/dist/memo.js.map +1 -0
  54. package/dist/memo.mjs +72 -0
  55. package/dist/memo.mjs.map +1 -0
  56. package/dist/mutex.d.mts +24 -0
  57. package/dist/mutex.d.ts +24 -0
  58. package/dist/mutex.js +46 -0
  59. package/dist/mutex.js.map +1 -0
  60. package/dist/mutex.mjs +44 -0
  61. package/dist/mutex.mjs.map +1 -0
  62. package/dist/pipeline.d.mts +42 -0
  63. package/dist/pipeline.d.ts +42 -0
  64. package/dist/pipeline.js +30 -0
  65. package/dist/pipeline.js.map +1 -0
  66. package/dist/pipeline.mjs +27 -0
  67. package/dist/pipeline.mjs.map +1 -0
  68. package/dist/poll.d.mts +35 -0
  69. package/dist/poll.d.ts +35 -0
  70. package/dist/poll.js +111 -0
  71. package/dist/poll.js.map +1 -0
  72. package/dist/poll.mjs +109 -0
  73. package/dist/poll.mjs.map +1 -0
  74. package/dist/queue.d.mts +47 -0
  75. package/dist/queue.d.ts +47 -0
  76. package/dist/queue.js +121 -0
  77. package/dist/queue.js.map +1 -0
  78. package/dist/queue.mjs +119 -0
  79. package/dist/queue.mjs.map +1 -0
  80. package/dist/rate-limit.d.mts +28 -0
  81. package/dist/rate-limit.d.ts +28 -0
  82. package/dist/rate-limit.js +94 -0
  83. package/dist/rate-limit.js.map +1 -0
  84. package/dist/rate-limit.mjs +92 -0
  85. package/dist/rate-limit.mjs.map +1 -0
  86. package/dist/retry.d.mts +45 -0
  87. package/dist/retry.d.ts +45 -0
  88. package/dist/retry.js +111 -0
  89. package/dist/retry.js.map +1 -0
  90. package/dist/retry.mjs +108 -0
  91. package/dist/retry.mjs.map +1 -0
  92. package/dist/semaphore.d.mts +27 -0
  93. package/dist/semaphore.d.ts +27 -0
  94. package/dist/semaphore.js +47 -0
  95. package/dist/semaphore.js.map +1 -0
  96. package/dist/semaphore.mjs +45 -0
  97. package/dist/semaphore.mjs.map +1 -0
  98. package/dist/throttle.d.mts +25 -0
  99. package/dist/throttle.d.ts +25 -0
  100. package/dist/throttle.js +97 -0
  101. package/dist/throttle.js.map +1 -0
  102. package/dist/throttle.mjs +95 -0
  103. package/dist/throttle.mjs.map +1 -0
  104. package/dist/timeout.d.mts +19 -0
  105. package/dist/timeout.d.ts +19 -0
  106. package/dist/timeout.js +79 -0
  107. package/dist/timeout.js.map +1 -0
  108. package/dist/timeout.mjs +77 -0
  109. package/dist/timeout.mjs.map +1 -0
  110. package/dist/types-BsCO2J40.d.mts +35 -0
  111. package/dist/types-BsCO2J40.d.ts +35 -0
  112. package/package.json +167 -0
package/README.md ADDED
@@ -0,0 +1,443 @@
1
+ <p align="center">
2
+ <img src="https://img.shields.io/npm/v/flowx-control?style=flat-square&color=blue" alt="npm version" />
3
+ <img src="https://img.shields.io/badge/coverage-100%25-brightgreen?style=flat-square" alt="coverage" />
4
+ <img src="https://img.shields.io/npm/l/flowx-control?style=flat-square" alt="license" />
5
+ <img src="https://img.shields.io/badge/dependencies-0-brightgreen?style=flat-square" alt="zero deps" />
6
+ <img src="https://img.shields.io/badge/TypeScript-first-blue?style=flat-square&logo=typescript&logoColor=white" alt="typescript" />
7
+ <img src="https://img.shields.io/npm/dm/flowx-control?style=flat-square" alt="downloads" />
8
+ </p>
9
+
10
+ # FlowX
11
+
12
+ **Production-grade resilience & async flow control for TypeScript/JavaScript.**
13
+
14
+ > Stop shipping fragile async code. FlowX gives you battle-tested patterns — retry, circuit breaker, rate limiter, bulkhead, queue, and 12 more — in a single, zero-dependency, tree-shakable package with **100% test coverage**.
15
+
16
+ ---
17
+
18
+ ## Why FlowX?
19
+
20
+ | | FlowX | Others |
21
+ |--|-------|--------|
22
+ | **Dependencies** | 0 | 3–15+ |
23
+ | **Test Coverage** | 100% statements, branches, functions, lines | Partial |
24
+ | **TypeScript** | Native `.d.ts` + `.d.mts` | Bolted-on types |
25
+ | **Tree-shaking** | Per-module deep imports | Monolithic bundle |
26
+ | **Module Support** | ESM + CJS + Types | Usually one |
27
+ | **Patterns** | 17 resilience & flow primitives | 2–5 |
28
+
29
+ ---
30
+
31
+ ## Install
32
+
33
+ ```bash
34
+ npm install flowx-control
35
+ ```
36
+
37
+ ```bash
38
+ yarn add flowx-control
39
+ ```
40
+
41
+ ```bash
42
+ pnpm add flowx-control
43
+ ```
44
+
45
+ ---
46
+
47
+ ## Quick Start
48
+
49
+ ```ts
50
+ import { retry, createCircuitBreaker, withTimeout, rateLimit } from 'flowx-control';
51
+
52
+ // Retry with exponential backoff
53
+ const data = await retry(() => fetch('/api/data'), {
54
+ maxAttempts: 5,
55
+ delay: 1000,
56
+ backoff: 'exponential',
57
+ });
58
+
59
+ // Circuit breaker — stop cascading failures
60
+ const breaker = createCircuitBreaker(fetchUser, {
61
+ failureThreshold: 5,
62
+ resetTimeout: 30_000,
63
+ });
64
+ const user = await breaker.fire(userId);
65
+
66
+ // Timeout — never wait forever
67
+ const result = await withTimeout(() => fetch('/slow'), 5000, {
68
+ fallback: () => cachedResponse,
69
+ });
70
+
71
+ // Rate limiter — respect API limits
72
+ const limiter = createRateLimiter({ limit: 10, interval: 1000 });
73
+ await limiter.execute(() => callExternalApi());
74
+ ```
75
+
76
+ ---
77
+
78
+ ## All 17 Modules
79
+
80
+ ### 🛡️ Resilience
81
+
82
+ <details>
83
+ <summary><strong>retry</strong> — Retry with backoff & jitter</summary>
84
+
85
+ ```ts
86
+ import { retry } from 'flowx-control/retry';
87
+
88
+ const data = await retry(() => fetch('/api'), {
89
+ maxAttempts: 5,
90
+ delay: 1000,
91
+ backoff: 'exponential', // 'fixed' | 'linear' | 'exponential' | custom fn
92
+ jitter: true,
93
+ retryIf: (err) => err.status !== 404,
94
+ onRetry: (err, attempt) => console.log(`Attempt ${attempt}`),
95
+ signal: abortController.signal,
96
+ });
97
+ ```
98
+ </details>
99
+
100
+ <details>
101
+ <summary><strong>circuitBreaker</strong> — Stop cascading failures</summary>
102
+
103
+ ```ts
104
+ import { createCircuitBreaker } from 'flowx-control/circuit-breaker';
105
+
106
+ const breaker = createCircuitBreaker(callApi, {
107
+ failureThreshold: 5,
108
+ resetTimeout: 30000,
109
+ halfOpenLimit: 1,
110
+ successThreshold: 2,
111
+ shouldTrip: (err) => err.status >= 500,
112
+ onStateChange: (from, to) => log(`${from} → ${to}`),
113
+ });
114
+
115
+ const result = await breaker.fire(args);
116
+ console.log(breaker.state); // 'closed' | 'open' | 'half-open'
117
+ console.log(breaker.failureCount);
118
+ breaker.reset();
119
+ ```
120
+ </details>
121
+
122
+ <details>
123
+ <summary><strong>fallback</strong> — Graceful degradation</summary>
124
+
125
+ ```ts
126
+ import { withFallback, fallbackChain } from 'flowx-control/fallback';
127
+
128
+ const data = await withFallback(
129
+ () => fetchFromPrimary(),
130
+ 'default-value',
131
+ { onFallback: (err, idx) => console.warn(err) }
132
+ );
133
+
134
+ const result = await fallbackChain([
135
+ () => fetchFromPrimary(),
136
+ () => fetchFromCache(),
137
+ () => fetchFromFallback(),
138
+ ]);
139
+ ```
140
+ </details>
141
+
142
+ <details>
143
+ <summary><strong>timeout</strong> — Never wait forever</summary>
144
+
145
+ ```ts
146
+ import { withTimeout } from 'flowx-control/timeout';
147
+
148
+ const result = await withTimeout(() => fetch('/slow-api'), 5000, {
149
+ fallback: () => cachedData,
150
+ message: 'API took too long',
151
+ signal: controller.signal,
152
+ });
153
+ ```
154
+ </details>
155
+
156
+ ### 🚦 Concurrency
157
+
158
+ <details>
159
+ <summary><strong>bulkhead</strong> — Isolate concurrent operations</summary>
160
+
161
+ ```ts
162
+ import { createBulkhead } from 'flowx-control/bulkhead';
163
+
164
+ const bulkhead = createBulkhead({
165
+ maxConcurrent: 10,
166
+ maxQueue: 100,
167
+ queueTimeout: 5000,
168
+ });
169
+
170
+ const result = await bulkhead.execute(() => processRequest());
171
+ console.log(bulkhead.activeCount, bulkhead.queueSize);
172
+ ```
173
+ </details>
174
+
175
+ <details>
176
+ <summary><strong>queue</strong> — Priority async task queue</summary>
177
+
178
+ ```ts
179
+ import { createQueue } from 'flowx-control/queue';
180
+
181
+ const queue = createQueue({ concurrency: 5, timeout: 10000 });
182
+
183
+ const result = await queue.add(() => processJob(), { priority: 1 });
184
+ const results = await queue.addAll(tasks.map(t => () => process(t)));
185
+
186
+ await queue.onIdle(); // wait until all done
187
+ queue.pause();
188
+ queue.resume();
189
+ ```
190
+ </details>
191
+
192
+ <details>
193
+ <summary><strong>semaphore</strong> — Counting resource lock</summary>
194
+
195
+ ```ts
196
+ import { createSemaphore } from 'flowx-control/semaphore';
197
+
198
+ const sem = createSemaphore(3); // max 3 concurrent
199
+ const release = await sem.acquire();
200
+ try {
201
+ await doWork();
202
+ } finally {
203
+ release();
204
+ }
205
+ ```
206
+ </details>
207
+
208
+ <details>
209
+ <summary><strong>mutex</strong> — Mutual exclusion lock</summary>
210
+
211
+ ```ts
212
+ import { createMutex } from 'flowx-control/mutex';
213
+
214
+ const mutex = createMutex();
215
+ const release = await mutex.acquire();
216
+ try {
217
+ await criticalSection();
218
+ } finally {
219
+ release();
220
+ }
221
+ ```
222
+ </details>
223
+
224
+ ### 🎛️ Flow Control
225
+
226
+ <details>
227
+ <summary><strong>rateLimit</strong> — Token bucket rate limiting</summary>
228
+
229
+ ```ts
230
+ import { createRateLimiter } from 'flowx-control/rate-limit';
231
+
232
+ const limiter = createRateLimiter({
233
+ limit: 100,
234
+ interval: 60_000,
235
+ strategy: 'queue', // 'queue' | 'reject'
236
+ });
237
+
238
+ await limiter.execute(() => callApi());
239
+ console.log(limiter.remaining);
240
+ limiter.reset();
241
+ ```
242
+ </details>
243
+
244
+ <details>
245
+ <summary><strong>throttle</strong> — Rate-limit function calls</summary>
246
+
247
+ ```ts
248
+ import { throttle } from 'flowx-control/throttle';
249
+
250
+ const save = throttle(saveToDb, 1000, {
251
+ leading: true,
252
+ trailing: true,
253
+ });
254
+
255
+ await save(data);
256
+ save.cancel();
257
+ ```
258
+ </details>
259
+
260
+ <details>
261
+ <summary><strong>debounce</strong> — Delay until activity pauses</summary>
262
+
263
+ ```ts
264
+ import { debounce } from 'flowx-control/debounce';
265
+
266
+ const search = debounce(searchApi, 300, {
267
+ leading: false,
268
+ trailing: true,
269
+ maxWait: 1000,
270
+ });
271
+
272
+ await search(query);
273
+ await search.flush();
274
+ search.cancel();
275
+ ```
276
+ </details>
277
+
278
+ <details>
279
+ <summary><strong>batch</strong> — Process collections in chunks</summary>
280
+
281
+ ```ts
282
+ import { batch } from 'flowx-control/batch';
283
+
284
+ const result = await batch(urls, async (url, i) => {
285
+ return fetch(url).then(r => r.json());
286
+ }, {
287
+ batchSize: 10,
288
+ concurrency: 3,
289
+ onProgress: (done, total) => console.log(`${done}/${total}`),
290
+ signal: controller.signal,
291
+ });
292
+
293
+ console.log(result.succeeded, result.failed, result.errors);
294
+ ```
295
+ </details>
296
+
297
+ <details>
298
+ <summary><strong>pipeline</strong> — Compose async operations</summary>
299
+
300
+ ```ts
301
+ import { pipeline, pipe } from 'flowx-control/pipeline';
302
+
303
+ const transform = pipe(
304
+ (input: string) => input.trim(),
305
+ (str) => str.toUpperCase(),
306
+ async (str) => await translate(str),
307
+ );
308
+
309
+ const result = await transform(' hello world ');
310
+ ```
311
+ </details>
312
+
313
+ ### 🛠️ Utilities
314
+
315
+ <details>
316
+ <summary><strong>poll</strong> — Repeated polling with backoff</summary>
317
+
318
+ ```ts
319
+ import { poll } from 'flowx-control/poll';
320
+
321
+ const { result, stop } = poll(() => checkJobStatus(jobId), {
322
+ interval: 2000,
323
+ until: (status) => status === 'complete',
324
+ maxAttempts: 30,
325
+ backoff: 'exponential',
326
+ signal: controller.signal,
327
+ });
328
+
329
+ const finalStatus = await result;
330
+ ```
331
+ </details>
332
+
333
+ <details>
334
+ <summary><strong>hedge</strong> — Hedged/speculative requests</summary>
335
+
336
+ ```ts
337
+ import { hedge } from 'flowx-control/hedge';
338
+
339
+ // If primary doesn't respond in 200ms, fire a parallel request
340
+ const data = await hedge(() => fetch('/api'), {
341
+ delay: 200,
342
+ maxHedges: 2,
343
+ });
344
+ ```
345
+ </details>
346
+
347
+ <details>
348
+ <summary><strong>memo</strong> — Async memoization with TTL</summary>
349
+
350
+ ```ts
351
+ import { memo } from 'flowx-control/memo';
352
+
353
+ const cachedFetch = memo(fetchUserById, {
354
+ ttl: 60_000,
355
+ maxSize: 1000,
356
+ key: (id) => `user:${id}`,
357
+ });
358
+
359
+ const user = await cachedFetch(123);
360
+ cachedFetch.clear();
361
+ ```
362
+ </details>
363
+
364
+ <details>
365
+ <summary><strong>deferred</strong> — Externally resolvable promise</summary>
366
+
367
+ ```ts
368
+ import { deferred } from 'flowx-control/deferred';
369
+
370
+ const d = deferred<string>();
371
+ setTimeout(() => d.resolve('hello'), 1000);
372
+ const value = await d.promise; // 'hello'
373
+ ```
374
+ </details>
375
+
376
+ ---
377
+
378
+ ## Deep Imports (Tree-shaking)
379
+
380
+ Import only what you need — zero unused code in your bundle:
381
+
382
+ ```ts
383
+ // Only pulls in ~2KB instead of the full 28KB
384
+ import { retry } from 'flowx-control/retry';
385
+ import { createQueue } from 'flowx-control/queue';
386
+ ```
387
+
388
+ ---
389
+
390
+ ## Error Hierarchy
391
+
392
+ All errors extend `FlowXError` with a machine-readable `code`:
393
+
394
+ | Error | Code | Thrown by |
395
+ |-------|------|----------|
396
+ | `TimeoutError` | `ERR_TIMEOUT` | `withTimeout` |
397
+ | `CircuitBreakerError` | `ERR_CIRCUIT_OPEN` | `circuitBreaker` |
398
+ | `BulkheadError` | `ERR_BULKHEAD_FULL` | `bulkhead` |
399
+ | `AbortError` | `ERR_ABORTED` | `poll`, `batch`, `timeout` |
400
+ | `RateLimitError` | `ERR_RATE_LIMIT` | `rateLimit` |
401
+
402
+ ```ts
403
+ import { TimeoutError, FlowXError } from 'flowx-control';
404
+
405
+ try {
406
+ await withTimeout(fn, 1000);
407
+ } catch (err) {
408
+ if (err instanceof TimeoutError) {
409
+ console.log(err.code); // 'ERR_TIMEOUT'
410
+ }
411
+ }
412
+ ```
413
+
414
+ ---
415
+
416
+ ## Compatibility
417
+
418
+ | Environment | Support |
419
+ |-------------|---------|
420
+ | Node.js | ≥ 16 |
421
+ | Bun | ✅ |
422
+ | Deno | ✅ |
423
+ | Browsers | ✅ (ESM) |
424
+ | TypeScript | ≥ 4.7 |
425
+
426
+ ---
427
+
428
+ ## Contributing
429
+
430
+ ```bash
431
+ git clone https://github.com/Avinashvelu03/FlowX.git
432
+ cd FlowX
433
+ npm install
434
+ npm test # Run tests with 100% coverage
435
+ npm run lint # ESLint
436
+ npm run build # Build ESM + CJS + DTS
437
+ ```
438
+
439
+ ---
440
+
441
+ ## License
442
+
443
+ MIT © [Avinash](https://github.com/Avinashvelu03)
@@ -0,0 +1,37 @@
1
+ interface BatchOptions {
2
+ /** Maximum concurrent batch operations (default: Infinity) */
3
+ concurrency?: number;
4
+ /** Number of items per batch (default: 1) */
5
+ batchSize?: number;
6
+ /** Progress callback */
7
+ onProgress?: (completed: number, total: number) => void;
8
+ /** AbortSignal for cancellation */
9
+ signal?: AbortSignal;
10
+ }
11
+ interface BatchResult<T> {
12
+ /** All results in order */
13
+ results: T[];
14
+ /** Total items processed */
15
+ total: number;
16
+ /** Number of succeeded items */
17
+ succeeded: number;
18
+ /** Number of failed items */
19
+ failed: number;
20
+ /** Errors indexed by position */
21
+ errors: Map<number, Error>;
22
+ }
23
+ /**
24
+ * Process an array of items in batches with concurrency control.
25
+ *
26
+ * @example
27
+ * ```ts
28
+ * const results = await batch(
29
+ * urls,
30
+ * async (url) => fetch(url).then(r => r.json()),
31
+ * { concurrency: 5, batchSize: 10 },
32
+ * );
33
+ * ```
34
+ */
35
+ declare function batch<TItem, TResult>(items: TItem[], fn: (item: TItem, index: number) => Promise<TResult>, options?: BatchOptions): Promise<BatchResult<TResult>>;
36
+
37
+ export { type BatchOptions, type BatchResult, batch };
@@ -0,0 +1,37 @@
1
+ interface BatchOptions {
2
+ /** Maximum concurrent batch operations (default: Infinity) */
3
+ concurrency?: number;
4
+ /** Number of items per batch (default: 1) */
5
+ batchSize?: number;
6
+ /** Progress callback */
7
+ onProgress?: (completed: number, total: number) => void;
8
+ /** AbortSignal for cancellation */
9
+ signal?: AbortSignal;
10
+ }
11
+ interface BatchResult<T> {
12
+ /** All results in order */
13
+ results: T[];
14
+ /** Total items processed */
15
+ total: number;
16
+ /** Number of succeeded items */
17
+ succeeded: number;
18
+ /** Number of failed items */
19
+ failed: number;
20
+ /** Errors indexed by position */
21
+ errors: Map<number, Error>;
22
+ }
23
+ /**
24
+ * Process an array of items in batches with concurrency control.
25
+ *
26
+ * @example
27
+ * ```ts
28
+ * const results = await batch(
29
+ * urls,
30
+ * async (url) => fetch(url).then(r => r.json()),
31
+ * { concurrency: 5, batchSize: 10 },
32
+ * );
33
+ * ```
34
+ */
35
+ declare function batch<TItem, TResult>(items: TItem[], fn: (item: TItem, index: number) => Promise<TResult>, options?: BatchOptions): Promise<BatchResult<TResult>>;
36
+
37
+ export { type BatchOptions, type BatchResult, batch };
package/dist/batch.js ADDED
@@ -0,0 +1,75 @@
1
+ 'use strict';
2
+
3
+ // src/types.ts
4
+ var FlowXError = class extends Error {
5
+ constructor(message, code) {
6
+ super(message);
7
+ this.name = "FlowXError";
8
+ this.code = code;
9
+ Object.setPrototypeOf(this, new.target.prototype);
10
+ }
11
+ };
12
+ var AbortError = class extends FlowXError {
13
+ constructor(message = "Operation aborted") {
14
+ super(message, "ERR_ABORTED");
15
+ this.name = "AbortError";
16
+ }
17
+ };
18
+
19
+ // src/batch.ts
20
+ async function batch(items, fn, options) {
21
+ const { concurrency = Infinity, batchSize = 1, onProgress, signal } = options ?? {};
22
+ if (batchSize < 1) throw new RangeError("batchSize must be >= 1");
23
+ if (concurrency < 1) throw new RangeError("concurrency must be >= 1");
24
+ const results = new Array(items.length);
25
+ const errors = /* @__PURE__ */ new Map();
26
+ let completed = 0;
27
+ let succeeded = 0;
28
+ let failed = 0;
29
+ const batches = [];
30
+ for (let i = 0; i < items.length; i += batchSize) {
31
+ batches.push({
32
+ items: items.slice(i, i + batchSize),
33
+ startIndex: i
34
+ });
35
+ }
36
+ let batchIndex = 0;
37
+ async function processBatch() {
38
+ while (batchIndex < batches.length) {
39
+ if (signal?.aborted) throw new AbortError();
40
+ const currentBatch = batches[batchIndex++];
41
+ for (let i = 0; i < currentBatch.items.length; i++) {
42
+ if (signal?.aborted) throw new AbortError();
43
+ const globalIndex = currentBatch.startIndex + i;
44
+ try {
45
+ results[globalIndex] = await fn(currentBatch.items[i], globalIndex);
46
+ succeeded++;
47
+ } catch (error) {
48
+ const err = error instanceof Error ? error : new Error(String(error));
49
+ errors.set(globalIndex, err);
50
+ results[globalIndex] = void 0;
51
+ failed++;
52
+ }
53
+ completed++;
54
+ onProgress?.(completed, items.length);
55
+ }
56
+ }
57
+ }
58
+ const workers = [];
59
+ const workerCount = Math.min(concurrency, batches.length);
60
+ for (let i = 0; i < workerCount; i++) {
61
+ workers.push(processBatch());
62
+ }
63
+ await Promise.all(workers);
64
+ return {
65
+ results,
66
+ total: items.length,
67
+ succeeded,
68
+ failed,
69
+ errors
70
+ };
71
+ }
72
+
73
+ exports.batch = batch;
74
+ //# sourceMappingURL=batch.js.map
75
+ //# sourceMappingURL=batch.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/types.ts","../src/batch.ts"],"names":[],"mappings":";;;AAmBO,IAAM,UAAA,GAAN,cAAyB,KAAA,CAAM;AAAA,EAEpC,WAAA,CAAY,SAAiB,IAAA,EAAc;AACzC,IAAA,KAAA,CAAM,OAAO,CAAA;AACb,IAAA,IAAA,CAAK,IAAA,GAAO,YAAA;AACZ,IAAA,IAAA,CAAK,IAAA,GAAO,IAAA;AACZ,IAAA,MAAA,CAAO,cAAA,CAAe,IAAA,EAAM,GAAA,CAAA,MAAA,CAAW,SAAS,CAAA;AAAA,EAClD;AACF,CAAA;AA2BO,IAAM,UAAA,GAAN,cAAyB,UAAA,CAAW;AAAA,EACzC,WAAA,CAAY,UAAU,mBAAA,EAAqB;AACzC,IAAA,KAAA,CAAM,SAAS,aAAa,CAAA;AAC5B,IAAA,IAAA,CAAK,IAAA,GAAO,YAAA;AAAA,EACd;AACF,CAAA;;;AClBA,eAAsB,KAAA,CACpB,KAAA,EACA,EAAA,EACA,OAAA,EAC+B;AAC/B,EAAA,MAAM,EAAE,cAAc,QAAA,EAAU,SAAA,GAAY,GAAG,UAAA,EAAY,MAAA,EAAO,GAAI,OAAA,IAAW,EAAC;AAElF,EAAA,IAAI,SAAA,GAAY,CAAA,EAAG,MAAM,IAAI,WAAW,wBAAwB,CAAA;AAChE,EAAA,IAAI,WAAA,GAAc,CAAA,EAAG,MAAM,IAAI,WAAW,0BAA0B,CAAA;AAEpE,EAAA,MAAM,OAAA,GAAqB,IAAI,KAAA,CAAM,KAAA,CAAM,MAAM,CAAA;AACjD,EAAA,MAAM,MAAA,uBAAa,GAAA,EAAmB;AACtC,EAAA,IAAI,SAAA,GAAY,CAAA;AAChB,EAAA,IAAI,SAAA,GAAY,CAAA;AAChB,EAAA,IAAI,MAAA,GAAS,CAAA;AAGb,EAAA,MAAM,UAAyD,EAAC;AAChE,EAAA,KAAA,IAAS,IAAI,CAAA,EAAG,CAAA,GAAI,KAAA,CAAM,MAAA,EAAQ,KAAK,SAAA,EAAW;AAChD,IAAA,OAAA,CAAQ,IAAA,CAAK;AAAA,MACX,KAAA,EAAO,KAAA,CAAM,KAAA,CAAM,CAAA,EAAG,IAAI,SAAS,CAAA;AAAA,MACnC,UAAA,EAAY;AAAA,KACb,CAAA;AAAA,EACH;AAGA,EAAA,IAAI,UAAA,GAAa,CAAA;AAEjB,EAAA,eAAe,YAAA,GAA8B;AAC3C,IAAA,OAAO,UAAA,GAAa,QAAQ,MAAA,EAAQ;AAClC,MAAA,IAAI,MAAA,EAAQ,OAAA,EAAS,MAAM,IAAI,UAAA,EAAW;AAE1C,MAAA,MAAM,YAAA,GAAe,QAAQ,UAAA,EAAY,CAAA;AAEzC,MAAA,KAAA,IAAS,IAAI,CAAA,EAAG,CAAA,GAAI,YAAA,CAAa,KAAA,CAAM,QAAQ,CAAA,EAAA,EAAK;AAClD,QAAA,IAAI,MAAA,EAAQ,OAAA,EAAS,MAAM,IAAI,UAAA,EAAW;AAE1C,QAAA,MAAM,WAAA,GAAc,aAAa,UAAA,GAAa,CAAA;AAC9C,QAAA,IAAI;AACF,UAAA,OAAA,CAAQ,WAAW,IAAI,MAAM,EAAA,CAAG,aAAa,KAAA,CAAM,CAAC,GAAG,WAAW,CAAA;AAClE,UAAA,SAAA,EAAA;AAAA,QACF,SAAS,KAAA,EAAO;AACd,UAAA,MAAM,GAAA,GAAM,iBAAiB,KAAA,GAAQ,KAAA,GAAQ,IAAI,KAAA,CAAM,MAAA,CAAO,KAAK,CAAC,CAAA;AACpE,UAAA,MAAA,CAAO,GAAA,CAAI,aAAa,GAAG,CAAA;AAC3B,UAAA,OAAA,CAAQ,WAAW,CAAA,GAAI,MAAA;AACvB,UAAA,MAAA,EAAA;AAAA,QACF;AACA,QAAA,SAAA,EAAA;AACA,QAAA,UAAA,GAAa,SAAA,EAAW,MAAM,MAAM,CAAA;AAAA,MACtC;AAAA,IACF;AAAA,EACF;AAEA,EAAA,MAAM,UAA2B,EAAC;AAClC,EAAA,MAAM,WAAA,GAAc,IAAA,CAAK,GAAA,CAAI,WAAA,EAAa,QAAQ,MAAM,CAAA;AAExD,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,WAAA,EAAa,CAAA,EAAA,EAAK;AACpC,IAAA,OAAA,CAAQ,IAAA,CAAK,cAAc,CAAA;AAAA,EAC7B;AAEA,EAAA,MAAM,OAAA,CAAQ,IAAI,OAAO,CAAA;AAEzB,EAAA,OAAO;AAAA,IACL,OAAA;AAAA,IACA,OAAO,KAAA,CAAM,MAAA;AAAA,IACb,SAAA;AAAA,IACA,MAAA;AAAA,IACA;AAAA,GACF;AACF","file":"batch.js","sourcesContent":["// ============================================================================\n// FlowX — Types & Error Hierarchy\n// ============================================================================\n\n/** Generic async function signature */\nexport type AsyncFn<TArgs extends any[] = any[], TReturn = any> = (\n ...args: TArgs\n) => Promise<TReturn>;\n\n/** Backoff strategy for retry/poll operations */\nexport type BackoffStrategy =\n | 'fixed'\n | 'linear'\n | 'exponential'\n | ((attempt: number, delay: number) => number);\n\n// ── Error Classes ───────────────────────────────────────────────────────────\n\n/** Base error class for all FlowX errors */\nexport class FlowXError extends Error {\n public readonly code: string;\n constructor(message: string, code: string) {\n super(message);\n this.name = 'FlowXError';\n this.code = code;\n Object.setPrototypeOf(this, new.target.prototype);\n }\n}\n\n/** Thrown when a promise exceeds its timeout */\nexport class TimeoutError extends FlowXError {\n constructor(message = 'Operation timed out') {\n super(message, 'ERR_TIMEOUT');\n this.name = 'TimeoutError';\n }\n}\n\n/** Thrown when a circuit breaker is open */\nexport class CircuitBreakerError extends FlowXError {\n constructor(message = 'Circuit breaker is open') {\n super(message, 'ERR_CIRCUIT_OPEN');\n this.name = 'CircuitBreakerError';\n }\n}\n\n/** Thrown when a bulkhead rejects due to capacity */\nexport class BulkheadError extends FlowXError {\n constructor(message = 'Bulkhead capacity exceeded') {\n super(message, 'ERR_BULKHEAD_FULL');\n this.name = 'BulkheadError';\n }\n}\n\n/** Thrown when an operation is aborted */\nexport class AbortError extends FlowXError {\n constructor(message = 'Operation aborted') {\n super(message, 'ERR_ABORTED');\n this.name = 'AbortError';\n }\n}\n\n/** Thrown when rate limit is exceeded */\nexport class RateLimitError extends FlowXError {\n constructor(message = 'Rate limit exceeded') {\n super(message, 'ERR_RATE_LIMIT');\n this.name = 'RateLimitError';\n }\n}\n\n// ── Utility Helpers ─────────────────────────────────────────────────────────\n\n/** Sleep for the specified duration, respecting AbortSignal */\nexport function sleep(ms: number, signal?: AbortSignal): Promise<void> {\n return new Promise<void>((resolve, reject) => {\n if (signal?.aborted) {\n reject(new AbortError());\n return;\n }\n\n let onAbort: (() => void) | undefined;\n\n const timer = setTimeout(() => {\n if (signal && onAbort) {\n signal.removeEventListener('abort', onAbort);\n }\n resolve();\n }, ms);\n\n if (signal) {\n onAbort = () => {\n clearTimeout(timer);\n reject(new AbortError());\n };\n signal.addEventListener('abort', onAbort, { once: true });\n }\n });\n}\n\n/** Calculate delay based on backoff strategy */\nexport function calculateDelay(\n attempt: number,\n baseDelay: number,\n strategy: BackoffStrategy,\n jitter: boolean | number = false,\n): number {\n let delay: number;\n\n if (typeof strategy === 'function') {\n delay = strategy(attempt, baseDelay);\n } else {\n switch (strategy) {\n case 'fixed':\n delay = baseDelay;\n break;\n case 'linear':\n delay = baseDelay * attempt;\n break;\n case 'exponential':\n delay = baseDelay * Math.pow(2, attempt - 1);\n break;\n }\n }\n\n if (jitter) {\n const factor = typeof jitter === 'number' ? jitter : 1;\n delay = delay * (1 - factor * 0.5 + Math.random() * factor);\n }\n\n return Math.max(0, Math.floor(delay));\n}\n","// ============================================================================\n// FlowX — Batch Processing with Concurrency\n// ============================================================================\nimport { AbortError } from './types';\n\nexport interface BatchOptions {\n /** Maximum concurrent batch operations (default: Infinity) */\n concurrency?: number;\n /** Number of items per batch (default: 1) */\n batchSize?: number;\n /** Progress callback */\n onProgress?: (completed: number, total: number) => void;\n /** AbortSignal for cancellation */\n signal?: AbortSignal;\n}\n\nexport interface BatchResult<T> {\n /** All results in order */\n results: T[];\n /** Total items processed */\n total: number;\n /** Number of succeeded items */\n succeeded: number;\n /** Number of failed items */\n failed: number;\n /** Errors indexed by position */\n errors: Map<number, Error>;\n}\n\n/**\n * Process an array of items in batches with concurrency control.\n *\n * @example\n * ```ts\n * const results = await batch(\n * urls,\n * async (url) => fetch(url).then(r => r.json()),\n * { concurrency: 5, batchSize: 10 },\n * );\n * ```\n */\nexport async function batch<TItem, TResult>(\n items: TItem[],\n fn: (item: TItem, index: number) => Promise<TResult>,\n options?: BatchOptions,\n): Promise<BatchResult<TResult>> {\n const { concurrency = Infinity, batchSize = 1, onProgress, signal } = options ?? {};\n\n if (batchSize < 1) throw new RangeError('batchSize must be >= 1');\n if (concurrency < 1) throw new RangeError('concurrency must be >= 1');\n\n const results: TResult[] = new Array(items.length);\n const errors = new Map<number, Error>();\n let completed = 0;\n let succeeded = 0;\n let failed = 0;\n\n // Split items into batches\n const batches: Array<{ items: TItem[]; startIndex: number }> = [];\n for (let i = 0; i < items.length; i += batchSize) {\n batches.push({\n items: items.slice(i, i + batchSize),\n startIndex: i,\n });\n }\n\n // Process batches with concurrency control\n let batchIndex = 0;\n\n async function processBatch(): Promise<void> {\n while (batchIndex < batches.length) {\n if (signal?.aborted) throw new AbortError();\n\n const currentBatch = batches[batchIndex++];\n\n for (let i = 0; i < currentBatch.items.length; i++) {\n if (signal?.aborted) throw new AbortError();\n\n const globalIndex = currentBatch.startIndex + i;\n try {\n results[globalIndex] = await fn(currentBatch.items[i], globalIndex);\n succeeded++;\n } catch (error) {\n const err = error instanceof Error ? error : new Error(String(error));\n errors.set(globalIndex, err);\n results[globalIndex] = undefined as TResult;\n failed++;\n }\n completed++;\n onProgress?.(completed, items.length);\n }\n }\n }\n\n const workers: Promise<void>[] = [];\n const workerCount = Math.min(concurrency, batches.length);\n\n for (let i = 0; i < workerCount; i++) {\n workers.push(processBatch());\n }\n\n await Promise.all(workers);\n\n return {\n results,\n total: items.length,\n succeeded,\n failed,\n errors,\n };\n}\n"]}
package/dist/batch.mjs ADDED
@@ -0,0 +1,73 @@
1
+ // src/types.ts
2
+ var FlowXError = class extends Error {
3
+ constructor(message, code) {
4
+ super(message);
5
+ this.name = "FlowXError";
6
+ this.code = code;
7
+ Object.setPrototypeOf(this, new.target.prototype);
8
+ }
9
+ };
10
+ var AbortError = class extends FlowXError {
11
+ constructor(message = "Operation aborted") {
12
+ super(message, "ERR_ABORTED");
13
+ this.name = "AbortError";
14
+ }
15
+ };
16
+
17
+ // src/batch.ts
18
+ async function batch(items, fn, options) {
19
+ const { concurrency = Infinity, batchSize = 1, onProgress, signal } = options ?? {};
20
+ if (batchSize < 1) throw new RangeError("batchSize must be >= 1");
21
+ if (concurrency < 1) throw new RangeError("concurrency must be >= 1");
22
+ const results = new Array(items.length);
23
+ const errors = /* @__PURE__ */ new Map();
24
+ let completed = 0;
25
+ let succeeded = 0;
26
+ let failed = 0;
27
+ const batches = [];
28
+ for (let i = 0; i < items.length; i += batchSize) {
29
+ batches.push({
30
+ items: items.slice(i, i + batchSize),
31
+ startIndex: i
32
+ });
33
+ }
34
+ let batchIndex = 0;
35
+ async function processBatch() {
36
+ while (batchIndex < batches.length) {
37
+ if (signal?.aborted) throw new AbortError();
38
+ const currentBatch = batches[batchIndex++];
39
+ for (let i = 0; i < currentBatch.items.length; i++) {
40
+ if (signal?.aborted) throw new AbortError();
41
+ const globalIndex = currentBatch.startIndex + i;
42
+ try {
43
+ results[globalIndex] = await fn(currentBatch.items[i], globalIndex);
44
+ succeeded++;
45
+ } catch (error) {
46
+ const err = error instanceof Error ? error : new Error(String(error));
47
+ errors.set(globalIndex, err);
48
+ results[globalIndex] = void 0;
49
+ failed++;
50
+ }
51
+ completed++;
52
+ onProgress?.(completed, items.length);
53
+ }
54
+ }
55
+ }
56
+ const workers = [];
57
+ const workerCount = Math.min(concurrency, batches.length);
58
+ for (let i = 0; i < workerCount; i++) {
59
+ workers.push(processBatch());
60
+ }
61
+ await Promise.all(workers);
62
+ return {
63
+ results,
64
+ total: items.length,
65
+ succeeded,
66
+ failed,
67
+ errors
68
+ };
69
+ }
70
+
71
+ export { batch };
72
+ //# sourceMappingURL=batch.mjs.map
73
+ //# sourceMappingURL=batch.mjs.map