flowfn 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +1305 -0
- package/dist/index.d.ts +1305 -0
- package/dist/index.js +3180 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +3088 -0
- package/dist/index.mjs.map +1 -0
- package/docs/API.md +801 -0
- package/docs/USAGE.md +619 -0
- package/package.json +75 -0
- package/src/adapters/base.ts +46 -0
- package/src/adapters/memory.ts +183 -0
- package/src/adapters/postgres/index.ts +383 -0
- package/src/adapters/postgres/postgres.test.ts +100 -0
- package/src/adapters/postgres/schema.ts +110 -0
- package/src/adapters/redis.test.ts +124 -0
- package/src/adapters/redis.ts +331 -0
- package/src/core/flow-fn.test.ts +70 -0
- package/src/core/flow-fn.ts +198 -0
- package/src/core/metrics.ts +198 -0
- package/src/core/scheduler.test.ts +80 -0
- package/src/core/scheduler.ts +154 -0
- package/src/index.ts +57 -0
- package/src/monitoring/health.ts +261 -0
- package/src/patterns/backoff.ts +30 -0
- package/src/patterns/batching.ts +248 -0
- package/src/patterns/circuit-breaker.test.ts +52 -0
- package/src/patterns/circuit-breaker.ts +52 -0
- package/src/patterns/priority.ts +146 -0
- package/src/patterns/rate-limit.ts +290 -0
- package/src/patterns/retry.test.ts +62 -0
- package/src/queue/batch.test.ts +35 -0
- package/src/queue/dependencies.test.ts +33 -0
- package/src/queue/dlq.ts +222 -0
- package/src/queue/job.ts +67 -0
- package/src/queue/queue.ts +243 -0
- package/src/queue/types.ts +153 -0
- package/src/queue/worker.ts +66 -0
- package/src/storage/event-log.ts +205 -0
- package/src/storage/job-storage.ts +206 -0
- package/src/storage/workflow-storage.ts +182 -0
- package/src/stream/stream.ts +194 -0
- package/src/stream/types.ts +81 -0
- package/src/utils/hashing.ts +29 -0
- package/src/utils/id-generator.ts +109 -0
- package/src/utils/serialization.ts +142 -0
- package/src/utils/time.ts +167 -0
- package/src/workflow/advanced.test.ts +43 -0
- package/src/workflow/events.test.ts +39 -0
- package/src/workflow/types.ts +132 -0
- package/src/workflow/workflow.test.ts +55 -0
- package/src/workflow/workflow.ts +422 -0
- package/tests/dlq.test.ts +205 -0
- package/tests/health.test.ts +228 -0
- package/tests/integration.test.ts +253 -0
- package/tests/stream.test.ts +233 -0
- package/tests/workflow.test.ts +286 -0
- package/tsconfig.json +17 -0
- package/tsup.config.ts +10 -0
- package/vitest.config.ts +15 -0
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Priority queue pattern for FlowFn
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
export interface PriorityItem<T> {
|
|
6
|
+
value: T;
|
|
7
|
+
priority: number;
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Priority queue implementation using binary heap
|
|
12
|
+
*/
|
|
13
|
+
export class PriorityQueue<T> {
|
|
14
|
+
private heap: PriorityItem<T>[] = [];
|
|
15
|
+
private compareFn: (a: number, b: number) => number;
|
|
16
|
+
|
|
17
|
+
constructor(mode: "min" | "max" = "max") {
|
|
18
|
+
// Max heap by default (higher priority first)
|
|
19
|
+
this.compareFn =
|
|
20
|
+
mode === "max"
|
|
21
|
+
? (a, b) => a - b // Max heap: parent >= children
|
|
22
|
+
: (a, b) => b - a; // Min heap: parent <= children
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* Add item with priority
|
|
27
|
+
*/
|
|
28
|
+
enqueue(value: T, priority: number): void {
|
|
29
|
+
this.heap.push({ value, priority });
|
|
30
|
+
this.bubbleUp(this.heap.length - 1);
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* Remove and return highest priority item
|
|
35
|
+
*/
|
|
36
|
+
dequeue(): T | undefined {
|
|
37
|
+
if (this.heap.length === 0) return undefined;
|
|
38
|
+
if (this.heap.length === 1) return this.heap.pop()!.value;
|
|
39
|
+
|
|
40
|
+
const result = this.heap[0];
|
|
41
|
+
this.heap[0] = this.heap.pop()!;
|
|
42
|
+
this.bubbleDown(0);
|
|
43
|
+
|
|
44
|
+
return result.value;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* Peek at highest priority item without removing
|
|
49
|
+
*/
|
|
50
|
+
peek(): T | undefined {
|
|
51
|
+
return this.heap[0]?.value;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
/**
|
|
55
|
+
* Get queue size
|
|
56
|
+
*/
|
|
57
|
+
size(): number {
|
|
58
|
+
return this.heap.length;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* Check if queue is empty
|
|
63
|
+
*/
|
|
64
|
+
isEmpty(): boolean {
|
|
65
|
+
return this.heap.length === 0;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Clear the queue
|
|
70
|
+
*/
|
|
71
|
+
clear(): void {
|
|
72
|
+
this.heap = [];
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
/**
|
|
76
|
+
* Convert to array (sorted by priority)
|
|
77
|
+
*/
|
|
78
|
+
toArray(): T[] {
|
|
79
|
+
const copy = [...this.heap];
|
|
80
|
+
const result: T[] = [];
|
|
81
|
+
|
|
82
|
+
while (this.heap.length > 0) {
|
|
83
|
+
result.push(this.dequeue()!);
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
this.heap = copy;
|
|
87
|
+
return result;
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
private bubbleUp(index: number): void {
|
|
91
|
+
while (index > 0) {
|
|
92
|
+
const parentIndex = Math.floor((index - 1) / 2);
|
|
93
|
+
|
|
94
|
+
if (
|
|
95
|
+
this.compareFn(
|
|
96
|
+
this.heap[index].priority,
|
|
97
|
+
this.heap[parentIndex].priority
|
|
98
|
+
) <= 0
|
|
99
|
+
) {
|
|
100
|
+
break;
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
[this.heap[index], this.heap[parentIndex]] = [
|
|
104
|
+
this.heap[parentIndex],
|
|
105
|
+
this.heap[index],
|
|
106
|
+
];
|
|
107
|
+
index = parentIndex;
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
private bubbleDown(index: number): void {
|
|
112
|
+
while (true) {
|
|
113
|
+
const leftChild = 2 * index + 1;
|
|
114
|
+
const rightChild = 2 * index + 2;
|
|
115
|
+
let largest = index;
|
|
116
|
+
|
|
117
|
+
if (
|
|
118
|
+
leftChild < this.heap.length &&
|
|
119
|
+
this.compareFn(
|
|
120
|
+
this.heap[leftChild].priority,
|
|
121
|
+
this.heap[largest].priority
|
|
122
|
+
) > 0
|
|
123
|
+
) {
|
|
124
|
+
largest = leftChild;
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
if (
|
|
128
|
+
rightChild < this.heap.length &&
|
|
129
|
+
this.compareFn(
|
|
130
|
+
this.heap[rightChild].priority,
|
|
131
|
+
this.heap[largest].priority
|
|
132
|
+
) > 0
|
|
133
|
+
) {
|
|
134
|
+
largest = rightChild;
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
if (largest === index) break;
|
|
138
|
+
|
|
139
|
+
[this.heap[index], this.heap[largest]] = [
|
|
140
|
+
this.heap[largest],
|
|
141
|
+
this.heap[index],
|
|
142
|
+
];
|
|
143
|
+
index = largest;
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
}
|
|
@@ -0,0 +1,290 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Rate limiting pattern for FlowFn
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
export interface RateLimitOptions {
|
|
6
|
+
/**
|
|
7
|
+
* Maximum number of requests allowed
|
|
8
|
+
*/
|
|
9
|
+
limit: number;
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* Time window in milliseconds
|
|
13
|
+
*/
|
|
14
|
+
window: number;
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Strategy for handling rate limit exceeded
|
|
18
|
+
*/
|
|
19
|
+
strategy?: "throw" | "delay" | "drop";
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Custom key generator for partitioning rate limits
|
|
23
|
+
*/
|
|
24
|
+
keyGenerator?: (...args: any[]) => string;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
export interface RateLimitResult {
|
|
28
|
+
allowed: boolean;
|
|
29
|
+
remaining: number;
|
|
30
|
+
resetAt: number;
|
|
31
|
+
retryAfter?: number;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
export class RateLimiter {
|
|
35
|
+
private counters: Map<string, { count: number; resetAt: number }> = new Map();
|
|
36
|
+
private options: RateLimitOptions;
|
|
37
|
+
|
|
38
|
+
constructor(options: RateLimitOptions) {
|
|
39
|
+
this.options = {
|
|
40
|
+
strategy: "throw",
|
|
41
|
+
...options,
|
|
42
|
+
};
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
/**
|
|
46
|
+
* Check if request is allowed
|
|
47
|
+
*/
|
|
48
|
+
async check(key: string = "default"): Promise<RateLimitResult> {
|
|
49
|
+
const now = Date.now();
|
|
50
|
+
let counter = this.counters.get(key);
|
|
51
|
+
|
|
52
|
+
// Reset counter if window has passed
|
|
53
|
+
if (!counter || now >= counter.resetAt) {
|
|
54
|
+
counter = {
|
|
55
|
+
count: 0,
|
|
56
|
+
resetAt: now + this.options.window,
|
|
57
|
+
};
|
|
58
|
+
this.counters.set(key, counter);
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
const allowed = counter.count < this.options.limit;
|
|
62
|
+
const remaining = Math.max(0, this.options.limit - counter.count);
|
|
63
|
+
const retryAfter = allowed ? undefined : counter.resetAt - now;
|
|
64
|
+
|
|
65
|
+
if (allowed) {
|
|
66
|
+
counter.count++;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
return {
|
|
70
|
+
allowed,
|
|
71
|
+
remaining,
|
|
72
|
+
resetAt: counter.resetAt,
|
|
73
|
+
retryAfter,
|
|
74
|
+
};
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
/**
|
|
78
|
+
* Execute a function with rate limiting
|
|
79
|
+
*/
|
|
80
|
+
async execute<T>(fn: () => Promise<T>, key: string = "default"): Promise<T> {
|
|
81
|
+
const result = await this.check(key);
|
|
82
|
+
|
|
83
|
+
if (!result.allowed) {
|
|
84
|
+
switch (this.options.strategy) {
|
|
85
|
+
case "throw":
|
|
86
|
+
throw new Error(
|
|
87
|
+
`Rate limit exceeded. Retry after ${result.retryAfter}ms`
|
|
88
|
+
);
|
|
89
|
+
|
|
90
|
+
case "delay":
|
|
91
|
+
await new Promise((resolve) =>
|
|
92
|
+
setTimeout(resolve, result.retryAfter)
|
|
93
|
+
);
|
|
94
|
+
return this.execute(fn, key);
|
|
95
|
+
|
|
96
|
+
case "drop":
|
|
97
|
+
throw new Error("Request dropped due to rate limit");
|
|
98
|
+
|
|
99
|
+
default:
|
|
100
|
+
throw new Error("Unknown rate limit strategy");
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
return fn();
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
/**
|
|
108
|
+
* Clear all rate limit counters
|
|
109
|
+
*/
|
|
110
|
+
reset(): void {
|
|
111
|
+
this.counters.clear();
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
/**
|
|
115
|
+
* Clear rate limit counter for specific key
|
|
116
|
+
*/
|
|
117
|
+
resetKey(key: string): void {
|
|
118
|
+
this.counters.delete(key);
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
/**
|
|
122
|
+
* Get current limit info for a key
|
|
123
|
+
*/
|
|
124
|
+
getInfo(key: string = "default"): RateLimitResult {
|
|
125
|
+
const now = Date.now();
|
|
126
|
+
const counter = this.counters.get(key);
|
|
127
|
+
|
|
128
|
+
if (!counter || now >= counter.resetAt) {
|
|
129
|
+
return {
|
|
130
|
+
allowed: true,
|
|
131
|
+
remaining: this.options.limit,
|
|
132
|
+
resetAt: now + this.options.window,
|
|
133
|
+
};
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
const allowed = counter.count < this.options.limit;
|
|
137
|
+
const remaining = Math.max(0, this.options.limit - counter.count);
|
|
138
|
+
|
|
139
|
+
return {
|
|
140
|
+
allowed,
|
|
141
|
+
remaining,
|
|
142
|
+
resetAt: counter.resetAt,
|
|
143
|
+
retryAfter: allowed ? undefined : counter.resetAt - now,
|
|
144
|
+
};
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
/**
|
|
149
|
+
* Create a rate limiter
|
|
150
|
+
*/
|
|
151
|
+
export function createRateLimiter(options: RateLimitOptions): RateLimiter {
|
|
152
|
+
return new RateLimiter(options);
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
/**
|
|
156
|
+
* Sliding window rate limiter (more accurate)
|
|
157
|
+
*/
|
|
158
|
+
export class SlidingWindowRateLimiter {
|
|
159
|
+
private requests: Map<string, number[]> = new Map();
|
|
160
|
+
private options: RateLimitOptions;
|
|
161
|
+
|
|
162
|
+
constructor(options: RateLimitOptions) {
|
|
163
|
+
this.options = options;
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
async check(key: string = "default"): Promise<RateLimitResult> {
|
|
167
|
+
const now = Date.now();
|
|
168
|
+
const windowStart = now - this.options.window;
|
|
169
|
+
|
|
170
|
+
let requests = this.requests.get(key) || [];
|
|
171
|
+
|
|
172
|
+
// Remove old requests outside the window
|
|
173
|
+
requests = requests.filter((timestamp) => timestamp > windowStart);
|
|
174
|
+
this.requests.set(key, requests);
|
|
175
|
+
|
|
176
|
+
const allowed = requests.length < this.options.limit;
|
|
177
|
+
const remaining = Math.max(0, this.options.limit - requests.length);
|
|
178
|
+
|
|
179
|
+
if (allowed) {
|
|
180
|
+
requests.push(now);
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
const oldestRequest = requests[0];
|
|
184
|
+
const resetAt = oldestRequest
|
|
185
|
+
? oldestRequest + this.options.window
|
|
186
|
+
: now + this.options.window;
|
|
187
|
+
const retryAfter = allowed ? undefined : resetAt - now;
|
|
188
|
+
|
|
189
|
+
return {
|
|
190
|
+
allowed,
|
|
191
|
+
remaining,
|
|
192
|
+
resetAt,
|
|
193
|
+
retryAfter,
|
|
194
|
+
};
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
async execute<T>(fn: () => Promise<T>, key: string = "default"): Promise<T> {
|
|
198
|
+
const result = await this.check(key);
|
|
199
|
+
|
|
200
|
+
if (!result.allowed) {
|
|
201
|
+
switch (this.options.strategy || "throw") {
|
|
202
|
+
case "throw":
|
|
203
|
+
throw new Error(
|
|
204
|
+
`Rate limit exceeded. Retry after ${result.retryAfter}ms`
|
|
205
|
+
);
|
|
206
|
+
case "delay":
|
|
207
|
+
await new Promise((resolve) =>
|
|
208
|
+
setTimeout(resolve, result.retryAfter)
|
|
209
|
+
);
|
|
210
|
+
return this.execute(fn, key);
|
|
211
|
+
case "drop":
|
|
212
|
+
throw new Error("Request dropped due to rate limit");
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
return fn();
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
reset(): void {
|
|
220
|
+
this.requests.clear();
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
/**
|
|
225
|
+
* Token bucket rate limiter
|
|
226
|
+
*/
|
|
227
|
+
export class TokenBucketRateLimiter {
|
|
228
|
+
private buckets: Map<string, { tokens: number; lastRefill: number }> =
|
|
229
|
+
new Map();
|
|
230
|
+
private capacity: number;
|
|
231
|
+
private refillRate: number; // tokens per second
|
|
232
|
+
private refillInterval: number;
|
|
233
|
+
|
|
234
|
+
constructor(options: {
|
|
235
|
+
capacity: number;
|
|
236
|
+
refillRate: number;
|
|
237
|
+
refillInterval?: number;
|
|
238
|
+
}) {
|
|
239
|
+
this.capacity = options.capacity;
|
|
240
|
+
this.refillRate = options.refillRate;
|
|
241
|
+
this.refillInterval = options.refillInterval || 1000; // default 1 second
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
private refill(key: string): void {
|
|
245
|
+
const now = Date.now();
|
|
246
|
+
let bucket = this.buckets.get(key);
|
|
247
|
+
|
|
248
|
+
if (!bucket) {
|
|
249
|
+
bucket = { tokens: this.capacity, lastRefill: now };
|
|
250
|
+
this.buckets.set(key, bucket);
|
|
251
|
+
return;
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
const timePassed = now - bucket.lastRefill;
|
|
255
|
+
const intervals = timePassed / this.refillInterval;
|
|
256
|
+
const tokensToAdd = intervals * this.refillRate;
|
|
257
|
+
|
|
258
|
+
bucket.tokens = Math.min(this.capacity, bucket.tokens + tokensToAdd);
|
|
259
|
+
bucket.lastRefill = now;
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
async check(
|
|
263
|
+
key: string = "default",
|
|
264
|
+
cost: number = 1
|
|
265
|
+
): Promise<RateLimitResult> {
|
|
266
|
+
this.refill(key);
|
|
267
|
+
const bucket = this.buckets.get(key)!;
|
|
268
|
+
|
|
269
|
+
const allowed = bucket.tokens >= cost;
|
|
270
|
+
|
|
271
|
+
if (allowed) {
|
|
272
|
+
bucket.tokens -= cost;
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
return {
|
|
276
|
+
allowed,
|
|
277
|
+
remaining: Math.floor(bucket.tokens),
|
|
278
|
+
resetAt: bucket.lastRefill + this.refillInterval,
|
|
279
|
+
retryAfter: allowed
|
|
280
|
+
? undefined
|
|
281
|
+
: Math.ceil(
|
|
282
|
+
((cost - bucket.tokens) / this.refillRate) * this.refillInterval
|
|
283
|
+
),
|
|
284
|
+
};
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
reset(): void {
|
|
288
|
+
this.buckets.clear();
|
|
289
|
+
}
|
|
290
|
+
}
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
|
2
|
+
import { createFlow } from '../core/flow-fn.js';
|
|
3
|
+
|
|
4
|
+
describe('Retry & Backoff', () => {
|
|
5
|
+
let flow;
|
|
6
|
+
|
|
7
|
+
beforeEach(() => {
|
|
8
|
+
flow = createFlow({ adapter: 'memory' });
|
|
9
|
+
});
|
|
10
|
+
|
|
11
|
+
afterEach(async () => {
|
|
12
|
+
await flow.close();
|
|
13
|
+
});
|
|
14
|
+
|
|
15
|
+
it('should retry a failed job with backoff', async () => {
|
|
16
|
+
const queue = flow.queue('retry-test');
|
|
17
|
+
let attempts = 0;
|
|
18
|
+
|
|
19
|
+
await queue.add('test-job', { foo: 'bar' }, {
|
|
20
|
+
attempts: 3,
|
|
21
|
+
backoff: { type: 'fixed', delay: 10 }
|
|
22
|
+
});
|
|
23
|
+
|
|
24
|
+
await new Promise<void>((resolve) => {
|
|
25
|
+
queue.process(async (job) => {
|
|
26
|
+
attempts++;
|
|
27
|
+
if (attempts < 3) {
|
|
28
|
+
throw new Error('Transient failure');
|
|
29
|
+
}
|
|
30
|
+
resolve();
|
|
31
|
+
});
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
expect(attempts).toBe(3);
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
it('should move to failed after max attempts', async () => {
|
|
38
|
+
const queue = flow.queue('fail-test');
|
|
39
|
+
let attempts = 0;
|
|
40
|
+
|
|
41
|
+
await queue.add('test-job', { foo: 'bar' }, {
|
|
42
|
+
attempts: 2,
|
|
43
|
+
backoff: { type: 'fixed', delay: 10 }
|
|
44
|
+
});
|
|
45
|
+
|
|
46
|
+
const failedPromise = new Promise<void>((resolve) => {
|
|
47
|
+
queue.on('failed', (job) => {
|
|
48
|
+
if (job.attemptsMade === 2) {
|
|
49
|
+
resolve();
|
|
50
|
+
}
|
|
51
|
+
});
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
queue.process(async (job) => {
|
|
55
|
+
attempts++;
|
|
56
|
+
throw new Error('Permanent failure');
|
|
57
|
+
});
|
|
58
|
+
|
|
59
|
+
await failedPromise;
|
|
60
|
+
expect(attempts).toBe(2);
|
|
61
|
+
});
|
|
62
|
+
});
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
|
2
|
+
import { createFlow } from '../core/flow-fn.js';
|
|
3
|
+
|
|
4
|
+
describe('Batch Processing', () => {
|
|
5
|
+
let flow;
|
|
6
|
+
|
|
7
|
+
beforeEach(() => {
|
|
8
|
+
flow = createFlow({ adapter: 'memory' });
|
|
9
|
+
});
|
|
10
|
+
|
|
11
|
+
afterEach(async () => {
|
|
12
|
+
await flow.close();
|
|
13
|
+
});
|
|
14
|
+
|
|
15
|
+
it('should process jobs in batches', async () => {
|
|
16
|
+
const queue = flow.queue('batch-test');
|
|
17
|
+
let processedBatches = 0;
|
|
18
|
+
|
|
19
|
+
await queue.addBulk([
|
|
20
|
+
{ name: 'j1', data: 1 },
|
|
21
|
+
{ name: 'j2', data: 2 },
|
|
22
|
+
{ name: 'j3', data: 3 }
|
|
23
|
+
]);
|
|
24
|
+
|
|
25
|
+
await new Promise<void>((resolve) => {
|
|
26
|
+
queue.processBatch('test', 2, async (jobs) => {
|
|
27
|
+
processedBatches++;
|
|
28
|
+
if (processedBatches === 2) resolve();
|
|
29
|
+
return jobs.map(j => j.data * 2);
|
|
30
|
+
});
|
|
31
|
+
});
|
|
32
|
+
|
|
33
|
+
expect(processedBatches).toBe(2); // One batch of 2, one batch of 1
|
|
34
|
+
});
|
|
35
|
+
});
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
|
2
|
+
import { createFlow } from '../core/flow-fn.js';
|
|
3
|
+
|
|
4
|
+
describe('Job Dependencies', () => {
|
|
5
|
+
let flow;
|
|
6
|
+
|
|
7
|
+
beforeEach(() => {
|
|
8
|
+
flow = createFlow({ adapter: 'memory' });
|
|
9
|
+
});
|
|
10
|
+
|
|
11
|
+
afterEach(async () => {
|
|
12
|
+
await flow.close();
|
|
13
|
+
});
|
|
14
|
+
|
|
15
|
+
it('should wait for dependencies to complete', async () => {
|
|
16
|
+
const queue = flow.queue('dep-test');
|
|
17
|
+
let order: string[] = [];
|
|
18
|
+
|
|
19
|
+
const job1 = await queue.add('job1', { id: 1 });
|
|
20
|
+
const job2 = await queue.add('job2', { id: 2 }, { waitFor: [job1.id] });
|
|
21
|
+
|
|
22
|
+
// Start processing
|
|
23
|
+
queue.process(async (job) => {
|
|
24
|
+
order.push(job.name);
|
|
25
|
+
return { done: true };
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
// Wait for both to complete
|
|
29
|
+
await new Promise(r => setTimeout(r, 500));
|
|
30
|
+
|
|
31
|
+
expect(order).toEqual(['job1', 'job2']);
|
|
32
|
+
});
|
|
33
|
+
});
|