flowfn 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +1305 -0
- package/dist/index.d.ts +1305 -0
- package/dist/index.js +3180 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +3088 -0
- package/dist/index.mjs.map +1 -0
- package/docs/API.md +801 -0
- package/docs/USAGE.md +619 -0
- package/package.json +75 -0
- package/src/adapters/base.ts +46 -0
- package/src/adapters/memory.ts +183 -0
- package/src/adapters/postgres/index.ts +383 -0
- package/src/adapters/postgres/postgres.test.ts +100 -0
- package/src/adapters/postgres/schema.ts +110 -0
- package/src/adapters/redis.test.ts +124 -0
- package/src/adapters/redis.ts +331 -0
- package/src/core/flow-fn.test.ts +70 -0
- package/src/core/flow-fn.ts +198 -0
- package/src/core/metrics.ts +198 -0
- package/src/core/scheduler.test.ts +80 -0
- package/src/core/scheduler.ts +154 -0
- package/src/index.ts +57 -0
- package/src/monitoring/health.ts +261 -0
- package/src/patterns/backoff.ts +30 -0
- package/src/patterns/batching.ts +248 -0
- package/src/patterns/circuit-breaker.test.ts +52 -0
- package/src/patterns/circuit-breaker.ts +52 -0
- package/src/patterns/priority.ts +146 -0
- package/src/patterns/rate-limit.ts +290 -0
- package/src/patterns/retry.test.ts +62 -0
- package/src/queue/batch.test.ts +35 -0
- package/src/queue/dependencies.test.ts +33 -0
- package/src/queue/dlq.ts +222 -0
- package/src/queue/job.ts +67 -0
- package/src/queue/queue.ts +243 -0
- package/src/queue/types.ts +153 -0
- package/src/queue/worker.ts +66 -0
- package/src/storage/event-log.ts +205 -0
- package/src/storage/job-storage.ts +206 -0
- package/src/storage/workflow-storage.ts +182 -0
- package/src/stream/stream.ts +194 -0
- package/src/stream/types.ts +81 -0
- package/src/utils/hashing.ts +29 -0
- package/src/utils/id-generator.ts +109 -0
- package/src/utils/serialization.ts +142 -0
- package/src/utils/time.ts +167 -0
- package/src/workflow/advanced.test.ts +43 -0
- package/src/workflow/events.test.ts +39 -0
- package/src/workflow/types.ts +132 -0
- package/src/workflow/workflow.test.ts +55 -0
- package/src/workflow/workflow.ts +422 -0
- package/tests/dlq.test.ts +205 -0
- package/tests/health.test.ts +228 -0
- package/tests/integration.test.ts +253 -0
- package/tests/stream.test.ts +233 -0
- package/tests/workflow.test.ts +286 -0
- package/tsconfig.json +17 -0
- package/tsup.config.ts +10 -0
- package/vitest.config.ts +15 -0
|
@@ -0,0 +1,261 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Monitoring and health check system for FlowFn
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
export interface HealthStatus {
|
|
6
|
+
healthy: boolean;
|
|
7
|
+
timestamp: number;
|
|
8
|
+
checks: HealthCheck[];
|
|
9
|
+
details?: Record<string, any>;
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export interface HealthCheck {
|
|
13
|
+
name: string;
|
|
14
|
+
status: "pass" | "fail" | "warn";
|
|
15
|
+
message?: string;
|
|
16
|
+
responseTime?: number;
|
|
17
|
+
details?: Record<string, any>;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export interface MonitoringMetrics {
|
|
21
|
+
// Queue metrics
|
|
22
|
+
queues?: {
|
|
23
|
+
[queueName: string]: {
|
|
24
|
+
waiting: number;
|
|
25
|
+
active: number;
|
|
26
|
+
completed: number;
|
|
27
|
+
failed: number;
|
|
28
|
+
delayed: number;
|
|
29
|
+
throughput: number; // jobs/min
|
|
30
|
+
avgDuration: number; // ms
|
|
31
|
+
errorRate: number; // %
|
|
32
|
+
};
|
|
33
|
+
};
|
|
34
|
+
|
|
35
|
+
// Stream metrics
|
|
36
|
+
streams?: {
|
|
37
|
+
[streamName: string]: {
|
|
38
|
+
messageCount: number;
|
|
39
|
+
publishRate: number; // msgs/sec
|
|
40
|
+
consumerCount: number;
|
|
41
|
+
lag: number;
|
|
42
|
+
};
|
|
43
|
+
};
|
|
44
|
+
|
|
45
|
+
// Workflow metrics
|
|
46
|
+
workflows?: {
|
|
47
|
+
[workflowName: string]: {
|
|
48
|
+
totalExecutions: number;
|
|
49
|
+
running: number;
|
|
50
|
+
completed: number;
|
|
51
|
+
failed: number;
|
|
52
|
+
successRate: number;
|
|
53
|
+
avgDuration: number;
|
|
54
|
+
};
|
|
55
|
+
};
|
|
56
|
+
|
|
57
|
+
// System metrics
|
|
58
|
+
system?: {
|
|
59
|
+
uptime: number;
|
|
60
|
+
memoryUsage: number;
|
|
61
|
+
cpuUsage?: number;
|
|
62
|
+
};
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
export interface HealthChecker {
|
|
66
|
+
/**
|
|
67
|
+
* Perform health check
|
|
68
|
+
*/
|
|
69
|
+
check(): Promise<HealthStatus>;
|
|
70
|
+
|
|
71
|
+
/**
|
|
72
|
+
* Add custom health check
|
|
73
|
+
*/
|
|
74
|
+
addCheck(name: string, checker: () => Promise<HealthCheck>): void;
|
|
75
|
+
|
|
76
|
+
/**
|
|
77
|
+
* Remove health check
|
|
78
|
+
*/
|
|
79
|
+
removeCheck(name: string): void;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
/**
|
|
83
|
+
* Health checker implementation
|
|
84
|
+
*/
|
|
85
|
+
export class HealthCheckerImpl implements HealthChecker {
|
|
86
|
+
private checks: Map<string, () => Promise<HealthCheck>> = new Map();
|
|
87
|
+
private startTime = Date.now();
|
|
88
|
+
|
|
89
|
+
constructor() {
|
|
90
|
+
// Add default system checks
|
|
91
|
+
this.addCheck("uptime", async () => ({
|
|
92
|
+
name: "uptime",
|
|
93
|
+
status: "pass",
|
|
94
|
+
responseTime: 0,
|
|
95
|
+
details: {
|
|
96
|
+
uptime: Date.now() - this.startTime,
|
|
97
|
+
startTime: this.startTime,
|
|
98
|
+
},
|
|
99
|
+
}));
|
|
100
|
+
|
|
101
|
+
this.addCheck("memory", async () => {
|
|
102
|
+
if (typeof process !== "undefined" && process.memoryUsage) {
|
|
103
|
+
const mem = process.memoryUsage();
|
|
104
|
+
const usedMB = mem.heapUsed / 1024 / 1024;
|
|
105
|
+
const totalMB = mem.heapTotal / 1024 / 1024;
|
|
106
|
+
const usagePercent = (usedMB / totalMB) * 100;
|
|
107
|
+
|
|
108
|
+
return {
|
|
109
|
+
name: "memory",
|
|
110
|
+
status: usagePercent > 90 ? "warn" : "pass",
|
|
111
|
+
message: usagePercent > 90 ? "High memory usage" : undefined,
|
|
112
|
+
responseTime: 0,
|
|
113
|
+
details: {
|
|
114
|
+
heapUsedMB: Math.round(usedMB),
|
|
115
|
+
heapTotalMB: Math.round(totalMB),
|
|
116
|
+
usagePercent: Math.round(usagePercent),
|
|
117
|
+
},
|
|
118
|
+
};
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
return {
|
|
122
|
+
name: "memory",
|
|
123
|
+
status: "pass",
|
|
124
|
+
responseTime: 0,
|
|
125
|
+
};
|
|
126
|
+
});
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
addCheck(name: string, checker: () => Promise<HealthCheck>): void {
|
|
130
|
+
this.checks.set(name, checker);
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
removeCheck(name: string): void {
|
|
134
|
+
this.checks.delete(name);
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
async check(): Promise<HealthStatus> {
|
|
138
|
+
const results: HealthCheck[] = [];
|
|
139
|
+
let allHealthy = true;
|
|
140
|
+
|
|
141
|
+
for (const [name, checker] of this.checks.entries()) {
|
|
142
|
+
try {
|
|
143
|
+
const start = Date.now();
|
|
144
|
+
const result = await checker();
|
|
145
|
+
result.responseTime = Date.now() - start;
|
|
146
|
+
results.push(result);
|
|
147
|
+
|
|
148
|
+
if (result.status === "fail") {
|
|
149
|
+
allHealthy = false;
|
|
150
|
+
}
|
|
151
|
+
} catch (error) {
|
|
152
|
+
results.push({
|
|
153
|
+
name,
|
|
154
|
+
status: "fail",
|
|
155
|
+
message: (error as Error).message,
|
|
156
|
+
responseTime: 0,
|
|
157
|
+
});
|
|
158
|
+
allHealthy = false;
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
return {
|
|
163
|
+
healthy: allHealthy,
|
|
164
|
+
timestamp: Date.now(),
|
|
165
|
+
checks: results,
|
|
166
|
+
};
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
/**
|
|
171
|
+
* Event tracking for monitoring
|
|
172
|
+
*/
|
|
173
|
+
export interface TrackedEvent {
|
|
174
|
+
id: string;
|
|
175
|
+
type: string;
|
|
176
|
+
category: "queue" | "stream" | "workflow" | "system";
|
|
177
|
+
severity: "info" | "warn" | "error";
|
|
178
|
+
message: string;
|
|
179
|
+
timestamp: number;
|
|
180
|
+
metadata?: Record<string, any>;
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
export interface EventTracker {
|
|
184
|
+
/**
|
|
185
|
+
* Track an event
|
|
186
|
+
*/
|
|
187
|
+
track(event: Omit<TrackedEvent, "id" | "timestamp">): void;
|
|
188
|
+
|
|
189
|
+
/**
|
|
190
|
+
* Get events
|
|
191
|
+
*/
|
|
192
|
+
getEvents(filter?: {
|
|
193
|
+
category?: string;
|
|
194
|
+
severity?: string;
|
|
195
|
+
since?: number;
|
|
196
|
+
limit?: number;
|
|
197
|
+
}): TrackedEvent[];
|
|
198
|
+
|
|
199
|
+
/**
|
|
200
|
+
* Clear old events
|
|
201
|
+
*/
|
|
202
|
+
cleanup(maxAge: number): number;
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
export class MemoryEventTracker implements EventTracker {
|
|
206
|
+
private events: TrackedEvent[] = [];
|
|
207
|
+
private maxEvents = 10000;
|
|
208
|
+
|
|
209
|
+
track(event: Omit<TrackedEvent, "id" | "timestamp">): void {
|
|
210
|
+
const trackedEvent: TrackedEvent = {
|
|
211
|
+
...event,
|
|
212
|
+
id: `evt_${Date.now()}_${Math.random().toString(36).substring(7)}`,
|
|
213
|
+
timestamp: Date.now(),
|
|
214
|
+
};
|
|
215
|
+
|
|
216
|
+
this.events.push(trackedEvent);
|
|
217
|
+
|
|
218
|
+
// Trim if exceeds max
|
|
219
|
+
if (this.events.length > this.maxEvents) {
|
|
220
|
+
this.events = this.events.slice(-this.maxEvents);
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
getEvents(filter?: {
|
|
225
|
+
category?: string;
|
|
226
|
+
severity?: string;
|
|
227
|
+
since?: number;
|
|
228
|
+
limit?: number;
|
|
229
|
+
}): TrackedEvent[] {
|
|
230
|
+
let filtered = [...this.events];
|
|
231
|
+
|
|
232
|
+
if (filter?.category) {
|
|
233
|
+
filtered = filtered.filter((e) => e.category === filter.category);
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
if (filter?.severity) {
|
|
237
|
+
filtered = filtered.filter((e) => e.severity === filter.severity);
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
if (filter?.since !== undefined) {
|
|
241
|
+
filtered = filtered.filter((e) => e.timestamp >= filter.since!);
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
if (filter?.limit) {
|
|
245
|
+
filtered = filtered.slice(-filter.limit);
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
return filtered;
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
cleanup(maxAge: number): number {
|
|
252
|
+
const now = Date.now();
|
|
253
|
+
const before = this.events.length;
|
|
254
|
+
this.events = this.events.filter((e) => now - e.timestamp <= maxAge);
|
|
255
|
+
return before - this.events.length;
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
clear(): void {
|
|
259
|
+
this.events = [];
|
|
260
|
+
}
|
|
261
|
+
}
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import { BackoffOptions } from '../queue/types.js';
|
|
2
|
+
|
|
3
|
+
export function calculateBackoff(attemptsMade: number, options: BackoffOptions): number {
|
|
4
|
+
const { type, delay, maxDelay } = options;
|
|
5
|
+
|
|
6
|
+
let resultDelay: number;
|
|
7
|
+
|
|
8
|
+
switch (type) {
|
|
9
|
+
case 'fixed':
|
|
10
|
+
resultDelay = delay;
|
|
11
|
+
break;
|
|
12
|
+
case 'exponential':
|
|
13
|
+
resultDelay = delay * Math.pow(2, attemptsMade - 1);
|
|
14
|
+
break;
|
|
15
|
+
case 'custom':
|
|
16
|
+
// Custom should probably be a function passed in opts,
|
|
17
|
+
// but for serialization we might need a registry.
|
|
18
|
+
// Default to fixed for now if not handled.
|
|
19
|
+
resultDelay = delay;
|
|
20
|
+
break;
|
|
21
|
+
default:
|
|
22
|
+
resultDelay = delay;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
if (maxDelay && resultDelay > maxDelay) {
|
|
26
|
+
return maxDelay;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
return resultDelay;
|
|
30
|
+
}
|
|
@@ -0,0 +1,248 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Batching utilities for FlowFn
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
export interface BatchOptions {
|
|
6
|
+
/**
|
|
7
|
+
* Maximum number of items per batch
|
|
8
|
+
*/
|
|
9
|
+
maxSize: number;
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* Maximum time to wait before flushing batch (ms)
|
|
13
|
+
*/
|
|
14
|
+
maxWait?: number;
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Minimum size before processing (default: 1)
|
|
18
|
+
*/
|
|
19
|
+
minSize?: number;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
export interface BatchProcessor<T, R> {
|
|
23
|
+
/**
|
|
24
|
+
* Process a batch of items
|
|
25
|
+
*/
|
|
26
|
+
(items: T[]): Promise<R[]>;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
/**
|
|
30
|
+
* Batch accumulator for collecting and processing items in batches
|
|
31
|
+
*/
|
|
32
|
+
export class BatchAccumulator<T, R = any> {
|
|
33
|
+
private batch: T[] = [];
|
|
34
|
+
private timer: NodeJS.Timeout | null = null;
|
|
35
|
+
private options: Required<BatchOptions>;
|
|
36
|
+
private processor: BatchProcessor<T, R>;
|
|
37
|
+
private pending: Array<{
|
|
38
|
+
resolve: (value: R) => void;
|
|
39
|
+
reject: (error: Error) => void;
|
|
40
|
+
}> = [];
|
|
41
|
+
|
|
42
|
+
constructor(processor: BatchProcessor<T, R>, options: BatchOptions) {
|
|
43
|
+
this.processor = processor;
|
|
44
|
+
this.options = {
|
|
45
|
+
minSize: 1,
|
|
46
|
+
maxWait: 1000,
|
|
47
|
+
...options,
|
|
48
|
+
};
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* Add an item to the batch
|
|
53
|
+
*/
|
|
54
|
+
async add(item: T): Promise<R> {
|
|
55
|
+
return new Promise<R>((resolve, reject) => {
|
|
56
|
+
this.batch.push(item);
|
|
57
|
+
this.pending.push({ resolve, reject });
|
|
58
|
+
|
|
59
|
+
// Flush if batch is full
|
|
60
|
+
if (this.batch.length >= this.options.maxSize) {
|
|
61
|
+
this.flush();
|
|
62
|
+
} else if (!this.timer) {
|
|
63
|
+
// Set timer for max wait
|
|
64
|
+
this.timer = setTimeout(() => {
|
|
65
|
+
this.flush();
|
|
66
|
+
}, this.options.maxWait);
|
|
67
|
+
}
|
|
68
|
+
});
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
/**
|
|
72
|
+
* Manually flush the current batch
|
|
73
|
+
*/
|
|
74
|
+
async flush(): Promise<void> {
|
|
75
|
+
if (this.timer) {
|
|
76
|
+
clearTimeout(this.timer);
|
|
77
|
+
this.timer = null;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
if (this.batch.length < this.options.minSize) {
|
|
81
|
+
return;
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
const currentBatch = this.batch;
|
|
85
|
+
const currentPending = this.pending;
|
|
86
|
+
|
|
87
|
+
this.batch = [];
|
|
88
|
+
this.pending = [];
|
|
89
|
+
|
|
90
|
+
try {
|
|
91
|
+
const results = await this.processor(currentBatch);
|
|
92
|
+
|
|
93
|
+
// Resolve all pending promises
|
|
94
|
+
for (let i = 0; i < currentPending.length; i++) {
|
|
95
|
+
currentPending[i].resolve(results[i]);
|
|
96
|
+
}
|
|
97
|
+
} catch (error) {
|
|
98
|
+
// Reject all pending promises
|
|
99
|
+
for (const pending of currentPending) {
|
|
100
|
+
pending.reject(error as Error);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
/**
|
|
106
|
+
* Get current batch size
|
|
107
|
+
*/
|
|
108
|
+
size(): number {
|
|
109
|
+
return this.batch.length;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
/**
|
|
113
|
+
* Clear the batch without processing
|
|
114
|
+
*/
|
|
115
|
+
clear(): void {
|
|
116
|
+
if (this.timer) {
|
|
117
|
+
clearTimeout(this.timer);
|
|
118
|
+
this.timer = null;
|
|
119
|
+
}
|
|
120
|
+
this.batch = [];
|
|
121
|
+
|
|
122
|
+
// Reject all pending
|
|
123
|
+
for (const pending of this.pending) {
|
|
124
|
+
pending.reject(new Error("Batch cleared"));
|
|
125
|
+
}
|
|
126
|
+
this.pending = [];
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
/**
|
|
131
|
+
* Create a batched version of a function
|
|
132
|
+
*/
|
|
133
|
+
export function batch<T, R>(
|
|
134
|
+
fn: (items: T[]) => Promise<R[]>,
|
|
135
|
+
options: BatchOptions
|
|
136
|
+
): (item: T) => Promise<R> {
|
|
137
|
+
const accumulator = new BatchAccumulator(fn, options);
|
|
138
|
+
return (item: T) => accumulator.add(item);
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
/**
|
|
142
|
+
* Batch array into chunks
|
|
143
|
+
*/
|
|
144
|
+
export function chunk<T>(array: T[], size: number): T[][] {
|
|
145
|
+
const chunks: T[][] = [];
|
|
146
|
+
for (let i = 0; i < array.length; i += size) {
|
|
147
|
+
chunks.push(array.slice(i, i + size));
|
|
148
|
+
}
|
|
149
|
+
return chunks;
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
/**
|
|
153
|
+
* Process array in batches with delay between batches
|
|
154
|
+
*/
|
|
155
|
+
export async function processBatches<T, R>(
|
|
156
|
+
items: T[],
|
|
157
|
+
processor: (batch: T[]) => Promise<R[]>,
|
|
158
|
+
options: { batchSize: number; delayMs?: number }
|
|
159
|
+
): Promise<R[]> {
|
|
160
|
+
const batches = chunk(items, options.batchSize);
|
|
161
|
+
const results: R[] = [];
|
|
162
|
+
|
|
163
|
+
for (let i = 0; i < batches.length; i++) {
|
|
164
|
+
const batchResults = await processor(batches[i]);
|
|
165
|
+
results.push(...batchResults);
|
|
166
|
+
|
|
167
|
+
// Add delay between batches (except after last batch)
|
|
168
|
+
if (i < batches.length - 1 && options.delayMs) {
|
|
169
|
+
await new Promise((resolve) => setTimeout(resolve, options.delayMs));
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
return results;
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
/**
|
|
177
|
+
* Batch write operations with auto-flush
|
|
178
|
+
*/
|
|
179
|
+
export class BatchWriter<T> {
|
|
180
|
+
private accumulator: BatchAccumulator<T, void>;
|
|
181
|
+
|
|
182
|
+
constructor(writer: (items: T[]) => Promise<void>, options: BatchOptions) {
|
|
183
|
+
this.accumulator = new BatchAccumulator(async (items) => {
|
|
184
|
+
await writer(items);
|
|
185
|
+
return new Array(items.length).fill(undefined);
|
|
186
|
+
}, options);
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
async write(item: T): Promise<void> {
|
|
190
|
+
return this.accumulator.add(item);
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
async flush(): Promise<void> {
|
|
194
|
+
return this.accumulator.flush();
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
size(): number {
|
|
198
|
+
return this.accumulator.size();
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
clear(): void {
|
|
202
|
+
this.accumulator.clear();
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
/**
|
|
207
|
+
* Group items by a key function and process in batches
|
|
208
|
+
*/
|
|
209
|
+
export async function batchByKey<T, R>(
|
|
210
|
+
items: T[],
|
|
211
|
+
keyFn: (item: T) => string,
|
|
212
|
+
processor: (key: string, items: T[]) => Promise<R[]>,
|
|
213
|
+
options?: { concurrency?: number }
|
|
214
|
+
): Promise<R[]> {
|
|
215
|
+
const grouped = new Map<string, T[]>();
|
|
216
|
+
|
|
217
|
+
// Group items by key
|
|
218
|
+
for (const item of items) {
|
|
219
|
+
const key = keyFn(item);
|
|
220
|
+
if (!grouped.has(key)) {
|
|
221
|
+
grouped.set(key, []);
|
|
222
|
+
}
|
|
223
|
+
grouped.get(key)!.push(item);
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
// Process each group
|
|
227
|
+
const results: R[] = [];
|
|
228
|
+
const entries = Array.from(grouped.entries());
|
|
229
|
+
|
|
230
|
+
if (options?.concurrency) {
|
|
231
|
+
// Process with concurrency limit
|
|
232
|
+
for (let i = 0; i < entries.length; i += options.concurrency) {
|
|
233
|
+
const batch = entries.slice(i, i + options.concurrency);
|
|
234
|
+
const batchResults = await Promise.all(
|
|
235
|
+
batch.map(([key, items]) => processor(key, items))
|
|
236
|
+
);
|
|
237
|
+
results.push(...batchResults.flat());
|
|
238
|
+
}
|
|
239
|
+
} else {
|
|
240
|
+
// Process sequentially
|
|
241
|
+
for (const [key, keyItems] of entries) {
|
|
242
|
+
const batchResults = await processor(key, keyItems);
|
|
243
|
+
results.push(...batchResults);
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
return results;
|
|
248
|
+
}
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import { describe, it, expect, vi } from 'vitest';
|
|
2
|
+
import { circuitBreaker } from './circuit-breaker.js';
|
|
3
|
+
|
|
4
|
+
describe('Circuit Breaker', () => {
|
|
5
|
+
it('should open after threshold is reached', async () => {
|
|
6
|
+
let calls = 0;
|
|
7
|
+
const handler = async () => {
|
|
8
|
+
calls++;
|
|
9
|
+
throw new Error('fail');
|
|
10
|
+
};
|
|
11
|
+
|
|
12
|
+
const cb = circuitBreaker({ threshold: 2, timeout: 1000 }, handler);
|
|
13
|
+
|
|
14
|
+
// First failure
|
|
15
|
+
await expect(cb({} as any)).rejects.toThrow('fail');
|
|
16
|
+
expect(calls).toBe(1);
|
|
17
|
+
|
|
18
|
+
// Second failure -> Should open
|
|
19
|
+
await expect(cb({} as any)).rejects.toThrow('fail');
|
|
20
|
+
expect(calls).toBe(2);
|
|
21
|
+
|
|
22
|
+
// Third call -> Should be rejected immediately without calling handler
|
|
23
|
+
await expect(cb({} as any)).rejects.toThrow('Circuit Breaker is OPEN');
|
|
24
|
+
expect(calls).toBe(2);
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
it('should transition to half-open after timeout', async () => {
|
|
28
|
+
vi.useFakeTimers();
|
|
29
|
+
let calls = 0;
|
|
30
|
+
const handler = async () => {
|
|
31
|
+
calls++;
|
|
32
|
+
if (calls > 2) return 'success';
|
|
33
|
+
throw new Error('fail');
|
|
34
|
+
};
|
|
35
|
+
|
|
36
|
+
const cb = circuitBreaker({ threshold: 2, timeout: 1000 }, handler);
|
|
37
|
+
|
|
38
|
+
await expect(cb({} as any)).rejects.toThrow('fail');
|
|
39
|
+
await expect(cb({} as any)).rejects.toThrow('fail');
|
|
40
|
+
await expect(cb({} as any)).rejects.toThrow('Circuit Breaker is OPEN');
|
|
41
|
+
|
|
42
|
+
// Fast forward
|
|
43
|
+
await vi.advanceTimersByTimeAsync(1001);
|
|
44
|
+
|
|
45
|
+
// Should call handler again (half-open)
|
|
46
|
+
const result = await cb({} as any);
|
|
47
|
+
expect(result).toBe('success');
|
|
48
|
+
expect(calls).toBe(3);
|
|
49
|
+
|
|
50
|
+
vi.useRealTimers();
|
|
51
|
+
});
|
|
52
|
+
});
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import { Job } from '../queue/types.js';
|
|
2
|
+
|
|
3
|
+
export interface CircuitBreakerOptions {
|
|
4
|
+
threshold: number;
|
|
5
|
+
timeout: number;
|
|
6
|
+
resetTimeout?: number;
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
export type CircuitState = 'CLOSED' | 'OPEN' | 'HALF_OPEN';
|
|
10
|
+
|
|
11
|
+
export function circuitBreaker<T>(
|
|
12
|
+
options: CircuitBreakerOptions,
|
|
13
|
+
handler: (job: Job<T>) => Promise<any>
|
|
14
|
+
) {
|
|
15
|
+
let state: CircuitState = 'CLOSED';
|
|
16
|
+
let failures = 0;
|
|
17
|
+
let lastFailureTime = 0;
|
|
18
|
+
let lastSuccessTime = 0;
|
|
19
|
+
|
|
20
|
+
return async (job: Job<T>) => {
|
|
21
|
+
const now = Date.now();
|
|
22
|
+
|
|
23
|
+
if (state === 'OPEN') {
|
|
24
|
+
if (now - lastFailureTime > options.timeout) {
|
|
25
|
+
state = 'HALF_OPEN';
|
|
26
|
+
} else {
|
|
27
|
+
throw new Error('Circuit Breaker is OPEN');
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
try {
|
|
32
|
+
const result = await handler(job);
|
|
33
|
+
|
|
34
|
+
if (state === 'HALF_OPEN') {
|
|
35
|
+
state = 'CLOSED';
|
|
36
|
+
failures = 0;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
lastSuccessTime = now;
|
|
40
|
+
return result;
|
|
41
|
+
} catch (err) {
|
|
42
|
+
failures++;
|
|
43
|
+
lastFailureTime = now;
|
|
44
|
+
|
|
45
|
+
if (state === 'HALF_OPEN' || failures >= options.threshold) {
|
|
46
|
+
state = 'OPEN';
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
throw err;
|
|
50
|
+
}
|
|
51
|
+
};
|
|
52
|
+
}
|