flowfn 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +1305 -0
- package/dist/index.d.ts +1305 -0
- package/dist/index.js +3180 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +3088 -0
- package/dist/index.mjs.map +1 -0
- package/docs/API.md +801 -0
- package/docs/USAGE.md +619 -0
- package/package.json +75 -0
- package/src/adapters/base.ts +46 -0
- package/src/adapters/memory.ts +183 -0
- package/src/adapters/postgres/index.ts +383 -0
- package/src/adapters/postgres/postgres.test.ts +100 -0
- package/src/adapters/postgres/schema.ts +110 -0
- package/src/adapters/redis.test.ts +124 -0
- package/src/adapters/redis.ts +331 -0
- package/src/core/flow-fn.test.ts +70 -0
- package/src/core/flow-fn.ts +198 -0
- package/src/core/metrics.ts +198 -0
- package/src/core/scheduler.test.ts +80 -0
- package/src/core/scheduler.ts +154 -0
- package/src/index.ts +57 -0
- package/src/monitoring/health.ts +261 -0
- package/src/patterns/backoff.ts +30 -0
- package/src/patterns/batching.ts +248 -0
- package/src/patterns/circuit-breaker.test.ts +52 -0
- package/src/patterns/circuit-breaker.ts +52 -0
- package/src/patterns/priority.ts +146 -0
- package/src/patterns/rate-limit.ts +290 -0
- package/src/patterns/retry.test.ts +62 -0
- package/src/queue/batch.test.ts +35 -0
- package/src/queue/dependencies.test.ts +33 -0
- package/src/queue/dlq.ts +222 -0
- package/src/queue/job.ts +67 -0
- package/src/queue/queue.ts +243 -0
- package/src/queue/types.ts +153 -0
- package/src/queue/worker.ts +66 -0
- package/src/storage/event-log.ts +205 -0
- package/src/storage/job-storage.ts +206 -0
- package/src/storage/workflow-storage.ts +182 -0
- package/src/stream/stream.ts +194 -0
- package/src/stream/types.ts +81 -0
- package/src/utils/hashing.ts +29 -0
- package/src/utils/id-generator.ts +109 -0
- package/src/utils/serialization.ts +142 -0
- package/src/utils/time.ts +167 -0
- package/src/workflow/advanced.test.ts +43 -0
- package/src/workflow/events.test.ts +39 -0
- package/src/workflow/types.ts +132 -0
- package/src/workflow/workflow.test.ts +55 -0
- package/src/workflow/workflow.ts +422 -0
- package/tests/dlq.test.ts +205 -0
- package/tests/health.test.ts +228 -0
- package/tests/integration.test.ts +253 -0
- package/tests/stream.test.ts +233 -0
- package/tests/workflow.test.ts +286 -0
- package/tsconfig.json +17 -0
- package/tsup.config.ts +10 -0
- package/vitest.config.ts +15 -0
package/docs/USAGE.md
ADDED
|
@@ -0,0 +1,619 @@
|
|
|
1
|
+
# FlowFn Usage Guide
|
|
2
|
+
|
|
3
|
+
Practical guide to building with FlowFn.
|
|
4
|
+
|
|
5
|
+
## Quick Start
|
|
6
|
+
|
|
7
|
+
### Installation
|
|
8
|
+
|
|
9
|
+
```bash
|
|
10
|
+
npm install @flowfn/core
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
### Basic Setup
|
|
14
|
+
|
|
15
|
+
```typescript
|
|
16
|
+
import { createFlow } from "@flowfn/core";
|
|
17
|
+
|
|
18
|
+
const flow = createFlow({
|
|
19
|
+
adapter: "memory", // or 'redis', 'postgres'
|
|
20
|
+
});
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
---
|
|
24
|
+
|
|
25
|
+
## Common Patterns
|
|
26
|
+
|
|
27
|
+
### 1. Background Job Processing
|
|
28
|
+
|
|
29
|
+
```typescript
|
|
30
|
+
const emailQueue = flow.queue("emails");
|
|
31
|
+
|
|
32
|
+
// Producer: Add jobs
|
|
33
|
+
await emailQueue.add("send-welcome", {
|
|
34
|
+
to: "user@example.com",
|
|
35
|
+
template: "welcome",
|
|
36
|
+
});
|
|
37
|
+
|
|
38
|
+
// Consumer: Process jobs
|
|
39
|
+
emailQueue.process(async (job) => {
|
|
40
|
+
await sendEmail(job.data.to, job.data.template);
|
|
41
|
+
return { sent: true };
|
|
42
|
+
});
|
|
43
|
+
```
|
|
44
|
+
|
|
45
|
+
### 2. Job with Retries
|
|
46
|
+
|
|
47
|
+
```typescript
|
|
48
|
+
await queue.add(
|
|
49
|
+
"api-call",
|
|
50
|
+
{ url: "/api/data" },
|
|
51
|
+
{
|
|
52
|
+
attempts: 5,
|
|
53
|
+
backoff: {
|
|
54
|
+
type: "exponential",
|
|
55
|
+
delay: 1000,
|
|
56
|
+
maxDelay: 60000,
|
|
57
|
+
},
|
|
58
|
+
}
|
|
59
|
+
);
|
|
60
|
+
```
|
|
61
|
+
|
|
62
|
+
### 3. Scheduled Jobs
|
|
63
|
+
|
|
64
|
+
```typescript
|
|
65
|
+
await queue.add(
|
|
66
|
+
"daily-report",
|
|
67
|
+
{},
|
|
68
|
+
{
|
|
69
|
+
delay: calculateDelayUntilMidnight(),
|
|
70
|
+
}
|
|
71
|
+
);
|
|
72
|
+
```
|
|
73
|
+
|
|
74
|
+
### 4. Job Dependencies
|
|
75
|
+
|
|
76
|
+
```typescript
|
|
77
|
+
const job1 = await queue.add("fetch-data", {});
|
|
78
|
+
const job2 = await queue.add(
|
|
79
|
+
"process-data",
|
|
80
|
+
{},
|
|
81
|
+
{
|
|
82
|
+
waitFor: [job1.id], // Waits for job1 to complete
|
|
83
|
+
}
|
|
84
|
+
);
|
|
85
|
+
```
|
|
86
|
+
|
|
87
|
+
### 5. Batch Processing
|
|
88
|
+
|
|
89
|
+
```typescript
|
|
90
|
+
queue.processBatch(
|
|
91
|
+
"bulk-import",
|
|
92
|
+
{
|
|
93
|
+
batchSize: 100,
|
|
94
|
+
maxWait: 5000,
|
|
95
|
+
},
|
|
96
|
+
async (jobs) => {
|
|
97
|
+
const data = jobs.map((j) => j.data);
|
|
98
|
+
await bulkInsert(data);
|
|
99
|
+
return jobs.map(() => ({ success: true }));
|
|
100
|
+
}
|
|
101
|
+
);
|
|
102
|
+
```
|
|
103
|
+
|
|
104
|
+
---
|
|
105
|
+
|
|
106
|
+
## Event Streaming
|
|
107
|
+
|
|
108
|
+
### 1. Pub/Sub Pattern
|
|
109
|
+
|
|
110
|
+
```typescript
|
|
111
|
+
const events = flow.stream("user-events");
|
|
112
|
+
|
|
113
|
+
// Publisher
|
|
114
|
+
await events.publish({
|
|
115
|
+
type: "user.created",
|
|
116
|
+
userId: "123",
|
|
117
|
+
timestamp: Date.now(),
|
|
118
|
+
});
|
|
119
|
+
|
|
120
|
+
// Subscriber
|
|
121
|
+
await events.subscribe(async (message) => {
|
|
122
|
+
console.log("Event:", message.data);
|
|
123
|
+
await message.ack();
|
|
124
|
+
});
|
|
125
|
+
```
|
|
126
|
+
|
|
127
|
+
### 2. Consumer Groups
|
|
128
|
+
|
|
129
|
+
```typescript
|
|
130
|
+
// Multiple workers processing same stream
|
|
131
|
+
const consumer1 = events.createConsumer("worker-1", {
|
|
132
|
+
groupId: "processors",
|
|
133
|
+
});
|
|
134
|
+
|
|
135
|
+
const consumer2 = events.createConsumer("worker-2", {
|
|
136
|
+
groupId: "processors",
|
|
137
|
+
});
|
|
138
|
+
|
|
139
|
+
// Each message goes to only one worker
|
|
140
|
+
await consumer1.subscribe(handleEvent);
|
|
141
|
+
await consumer2.subscribe(handleEvent);
|
|
142
|
+
```
|
|
143
|
+
|
|
144
|
+
### 3. Event Replay
|
|
145
|
+
|
|
146
|
+
```typescript
|
|
147
|
+
// Replay all historical events
|
|
148
|
+
const consumer = stream.createConsumer("replayer", {
|
|
149
|
+
groupId: "replay-group",
|
|
150
|
+
fromBeginning: true, // Start from first message
|
|
151
|
+
});
|
|
152
|
+
|
|
153
|
+
await consumer.subscribe(async (msg) => {
|
|
154
|
+
await reprocessEvent(msg.data);
|
|
155
|
+
await msg.ack();
|
|
156
|
+
});
|
|
157
|
+
```
|
|
158
|
+
|
|
159
|
+
### 4. Stream Retention
|
|
160
|
+
|
|
161
|
+
```typescript
|
|
162
|
+
const stream = flow.stream("events", {
|
|
163
|
+
maxLength: 10000, // Keep only newest 10k messages
|
|
164
|
+
retention: 86400000, // 24 hours in ms
|
|
165
|
+
});
|
|
166
|
+
|
|
167
|
+
// Manual trim
|
|
168
|
+
await stream.trim({
|
|
169
|
+
maxLength: 5000,
|
|
170
|
+
maxAgeSeconds: 3600,
|
|
171
|
+
});
|
|
172
|
+
```
|
|
173
|
+
|
|
174
|
+
---
|
|
175
|
+
|
|
176
|
+
## Workflow Orchestration
|
|
177
|
+
|
|
178
|
+
### 1. Simple Workflow
|
|
179
|
+
|
|
180
|
+
```typescript
|
|
181
|
+
const orderFlow = flow
|
|
182
|
+
.workflow<Order>("process-order")
|
|
183
|
+
.step("validate", async (ctx) => {
|
|
184
|
+
const valid = await validateOrder(ctx.input);
|
|
185
|
+
if (!valid) throw new Error("Invalid order");
|
|
186
|
+
ctx.set("validated", true);
|
|
187
|
+
})
|
|
188
|
+
.step("charge", async (ctx) => {
|
|
189
|
+
const charge = await chargePayment(ctx.input);
|
|
190
|
+
ctx.set("chargeId", charge.id);
|
|
191
|
+
})
|
|
192
|
+
.step("fulfill", async (ctx) => {
|
|
193
|
+
await fulfillOrder(ctx.input, ctx.get("chargeId"));
|
|
194
|
+
})
|
|
195
|
+
.build();
|
|
196
|
+
|
|
197
|
+
const execution = await orderFlow.execute({
|
|
198
|
+
orderId: "123",
|
|
199
|
+
amount: 99.99,
|
|
200
|
+
});
|
|
201
|
+
```
|
|
202
|
+
|
|
203
|
+
### 2. Parallel Steps
|
|
204
|
+
|
|
205
|
+
```typescript
|
|
206
|
+
const workflow = flow
|
|
207
|
+
.workflow("notify-user")
|
|
208
|
+
.parallel([
|
|
209
|
+
async (ctx) => await sendEmail(ctx.input),
|
|
210
|
+
async (ctx) => await sendSMS(ctx.input),
|
|
211
|
+
async (ctx) => await sendPushNotification(ctx.input),
|
|
212
|
+
])
|
|
213
|
+
.build();
|
|
214
|
+
```
|
|
215
|
+
|
|
216
|
+
### 3. Conditional Logic
|
|
217
|
+
|
|
218
|
+
```typescript
|
|
219
|
+
const premiumBranch = flow
|
|
220
|
+
.workflow("premium")
|
|
221
|
+
.step("premium-features", async (ctx) => {});
|
|
222
|
+
|
|
223
|
+
const standardBranch = flow
|
|
224
|
+
.workflow("standard")
|
|
225
|
+
.step("standard-features", async (ctx) => {});
|
|
226
|
+
|
|
227
|
+
const workflow = flow
|
|
228
|
+
.workflow("user-onboarding")
|
|
229
|
+
.step("create-account", async (ctx) => {})
|
|
230
|
+
.branch({
|
|
231
|
+
condition: (ctx) => ctx.input.subscription === "premium",
|
|
232
|
+
then: premiumBranch,
|
|
233
|
+
else: standardBranch,
|
|
234
|
+
})
|
|
235
|
+
.build();
|
|
236
|
+
```
|
|
237
|
+
|
|
238
|
+
### 4. Saga Pattern (with Compensation)
|
|
239
|
+
|
|
240
|
+
```typescript
|
|
241
|
+
const workflow = flow
|
|
242
|
+
.workflow("booking")
|
|
243
|
+
.saga("reserve-flight", {
|
|
244
|
+
execute: async (ctx) => {
|
|
245
|
+
const booking = await reserveFlight(ctx.input);
|
|
246
|
+
ctx.set("flightBooking", booking.id);
|
|
247
|
+
},
|
|
248
|
+
compensate: async (ctx) => {
|
|
249
|
+
await cancelFlight(ctx.get("flightBooking"));
|
|
250
|
+
},
|
|
251
|
+
})
|
|
252
|
+
.saga("reserve-hotel", {
|
|
253
|
+
execute: async (ctx) => {
|
|
254
|
+
const booking = await reserveHotel(ctx.input);
|
|
255
|
+
ctx.set("hotelBooking", booking.id);
|
|
256
|
+
},
|
|
257
|
+
compensate: async (ctx) => {
|
|
258
|
+
await cancelHotel(ctx.get("hotelBooking"));
|
|
259
|
+
},
|
|
260
|
+
})
|
|
261
|
+
.saga("charge-payment", {
|
|
262
|
+
execute: async (ctx) => {
|
|
263
|
+
const charge = await chargeCard(ctx.input);
|
|
264
|
+
ctx.set("chargeId", charge.id);
|
|
265
|
+
},
|
|
266
|
+
compensate: async (ctx) => {
|
|
267
|
+
await refundCharge(ctx.get("chargeId"));
|
|
268
|
+
},
|
|
269
|
+
})
|
|
270
|
+
.build();
|
|
271
|
+
|
|
272
|
+
// If any step fails, compensations run in reverse order
|
|
273
|
+
```
|
|
274
|
+
|
|
275
|
+
### 5. Workflow Management
|
|
276
|
+
|
|
277
|
+
```typescript
|
|
278
|
+
// List executions
|
|
279
|
+
const executions = await workflow.listExecutions({
|
|
280
|
+
status: "running",
|
|
281
|
+
limit: 10,
|
|
282
|
+
});
|
|
283
|
+
|
|
284
|
+
// Cancel execution
|
|
285
|
+
await workflow.cancelExecution(executionId);
|
|
286
|
+
|
|
287
|
+
// Retry failed execution
|
|
288
|
+
const retried = await workflow.retryExecution(failedExecutionId);
|
|
289
|
+
|
|
290
|
+
// Get history
|
|
291
|
+
const history = await workflow.getExecutionHistory(executionId);
|
|
292
|
+
|
|
293
|
+
// Get metrics
|
|
294
|
+
const metrics = await workflow.getMetrics();
|
|
295
|
+
// { totalExecutions, successRate, avgDuration }
|
|
296
|
+
```
|
|
297
|
+
|
|
298
|
+
---
|
|
299
|
+
|
|
300
|
+
## Advanced Patterns
|
|
301
|
+
|
|
302
|
+
### 1. Queue → Workflow Integration
|
|
303
|
+
|
|
304
|
+
```typescript
|
|
305
|
+
const queue = flow.queue("orders");
|
|
306
|
+
const workflow = flow
|
|
307
|
+
.workflow("process-order")
|
|
308
|
+
.step("validate", async (ctx) => {})
|
|
309
|
+
.step("process", async (ctx) => {})
|
|
310
|
+
.build();
|
|
311
|
+
|
|
312
|
+
queue.process(async (job) => {
|
|
313
|
+
await workflow.execute(job.data);
|
|
314
|
+
return { workflowStarted: true };
|
|
315
|
+
});
|
|
316
|
+
```
|
|
317
|
+
|
|
318
|
+
### 2. Workflow → Stream Integration
|
|
319
|
+
|
|
320
|
+
```typescript
|
|
321
|
+
const stream = flow.stream("order-events");
|
|
322
|
+
|
|
323
|
+
const workflow = flow
|
|
324
|
+
.workflow("order")
|
|
325
|
+
.step("validate", async (ctx) => {
|
|
326
|
+
await stream.publish({ status: "validating", order: ctx.input });
|
|
327
|
+
// ... validation logic
|
|
328
|
+
})
|
|
329
|
+
.step("process", async (ctx) => {
|
|
330
|
+
await stream.publish({ status: "processing", order: ctx.input });
|
|
331
|
+
// ... processing logic
|
|
332
|
+
})
|
|
333
|
+
.step("complete", async (ctx) => {
|
|
334
|
+
await stream.publish({ status: "completed", order: ctx.input });
|
|
335
|
+
})
|
|
336
|
+
.build();
|
|
337
|
+
```
|
|
338
|
+
|
|
339
|
+
### 3. Dead Letter Queue
|
|
340
|
+
|
|
341
|
+
```typescript
|
|
342
|
+
import { MemoryDLQManager } from "@flowfn/core";
|
|
343
|
+
|
|
344
|
+
const dlq = new MemoryDLQManager({
|
|
345
|
+
maxRetries: 3,
|
|
346
|
+
onDLQ: async (job, reason) => {
|
|
347
|
+
await notifyTeam(`Job ${job.id} failed: ${reason}`);
|
|
348
|
+
},
|
|
349
|
+
});
|
|
350
|
+
|
|
351
|
+
queue.process(async (job) => {
|
|
352
|
+
try {
|
|
353
|
+
await processJob(job.data);
|
|
354
|
+
} catch (error) {
|
|
355
|
+
if (job.attemptsMade >= 3) {
|
|
356
|
+
await dlq.moveToDLQ(job, error.message);
|
|
357
|
+
}
|
|
358
|
+
throw error;
|
|
359
|
+
}
|
|
360
|
+
});
|
|
361
|
+
|
|
362
|
+
// Later: retry from DLQ
|
|
363
|
+
const stats = await dlq.getStats();
|
|
364
|
+
console.log(`DLQ has ${stats.total} jobs`);
|
|
365
|
+
|
|
366
|
+
const retriedCount = await dlq.retryAll("my-queue");
|
|
367
|
+
```
|
|
368
|
+
|
|
369
|
+
### 4. Rate Limiting
|
|
370
|
+
|
|
371
|
+
```typescript
|
|
372
|
+
import { TokenBucketRateLimiter } from "@flowfn/core";
|
|
373
|
+
|
|
374
|
+
const limiter = new TokenBucketRateLimiter({
|
|
375
|
+
capacity: 100,
|
|
376
|
+
refillRate: 10,
|
|
377
|
+
refillInterval: 1000,
|
|
378
|
+
});
|
|
379
|
+
|
|
380
|
+
queue.process(async (job) => {
|
|
381
|
+
await limiter.acquire(); // Wait if rate limit exceeded
|
|
382
|
+
await callExternalAPI(job.data);
|
|
383
|
+
});
|
|
384
|
+
```
|
|
385
|
+
|
|
386
|
+
### 5. Priority Processing
|
|
387
|
+
|
|
388
|
+
```typescript
|
|
389
|
+
// Add with priorities
|
|
390
|
+
await queue.add("urgent", data, { priority: 1 });
|
|
391
|
+
await queue.add("normal", data, { priority: 5 });
|
|
392
|
+
await queue.add("low", data, { priority: 10 });
|
|
393
|
+
|
|
394
|
+
// Lower number = higher priority
|
|
395
|
+
// Jobs processed in priority order
|
|
396
|
+
```
|
|
397
|
+
|
|
398
|
+
---
|
|
399
|
+
|
|
400
|
+
## Monitoring & Observability
|
|
401
|
+
|
|
402
|
+
### 1. Health Checks
|
|
403
|
+
|
|
404
|
+
```typescript
|
|
405
|
+
const health = await flow.healthCheck();
|
|
406
|
+
|
|
407
|
+
if (!health.healthy) {
|
|
408
|
+
console.error("System unhealthy:", health.checks);
|
|
409
|
+
}
|
|
410
|
+
|
|
411
|
+
// Check individual systems
|
|
412
|
+
const queueHealth = health.checks.find((c) => c.name === "queues");
|
|
413
|
+
```
|
|
414
|
+
|
|
415
|
+
### 2. Metrics Collection
|
|
416
|
+
|
|
417
|
+
```typescript
|
|
418
|
+
// Record custom metrics
|
|
419
|
+
flow.metrics.record("orders.processed", 1, {
|
|
420
|
+
status: "success",
|
|
421
|
+
region: "us-east",
|
|
422
|
+
});
|
|
423
|
+
|
|
424
|
+
// Get time series
|
|
425
|
+
const metrics = flow.metrics.getTimeSeries("orders.processed", {
|
|
426
|
+
tags: { status: "success" },
|
|
427
|
+
since: Date.now() - 3600000, // Last hour
|
|
428
|
+
});
|
|
429
|
+
|
|
430
|
+
console.log("Average:", metrics?.avg);
|
|
431
|
+
console.log("P95:", metrics?.p95);
|
|
432
|
+
```
|
|
433
|
+
|
|
434
|
+
### 3. Event Tracking
|
|
435
|
+
|
|
436
|
+
```typescript
|
|
437
|
+
const tracker = flow.getEventTracker();
|
|
438
|
+
|
|
439
|
+
// Track events
|
|
440
|
+
tracker.track({
|
|
441
|
+
type: "order.failed",
|
|
442
|
+
category: "queue",
|
|
443
|
+
severity: "error",
|
|
444
|
+
message: "Payment declined",
|
|
445
|
+
metadata: { orderId: "123", reason: "insufficient_funds" },
|
|
446
|
+
});
|
|
447
|
+
|
|
448
|
+
// Query events
|
|
449
|
+
const errors = tracker.getEvents({
|
|
450
|
+
severity: "error",
|
|
451
|
+
since: Date.now() - 86400000, // Last 24h
|
|
452
|
+
limit: 100,
|
|
453
|
+
});
|
|
454
|
+
```
|
|
455
|
+
|
|
456
|
+
---
|
|
457
|
+
|
|
458
|
+
## Production Deployment
|
|
459
|
+
|
|
460
|
+
### 1. Redis Adapter
|
|
461
|
+
|
|
462
|
+
```typescript
|
|
463
|
+
import { createFlow } from "@flowfn/core";
|
|
464
|
+
import { RedisAdapter } from "@flowfn/core";
|
|
465
|
+
import Redis from "ioredis";
|
|
466
|
+
|
|
467
|
+
const redis = new Redis({
|
|
468
|
+
host: "localhost",
|
|
469
|
+
port: 6379,
|
|
470
|
+
});
|
|
471
|
+
|
|
472
|
+
const flow = createFlow({
|
|
473
|
+
adapter: new RedisAdapter(redis),
|
|
474
|
+
});
|
|
475
|
+
```
|
|
476
|
+
|
|
477
|
+
### 2. Postgres Adapter
|
|
478
|
+
|
|
479
|
+
```typescript
|
|
480
|
+
import { PostgresAdapter } from "@flowfn/core";
|
|
481
|
+
import { drizzle } from "drizzle-orm/node-postgres";
|
|
482
|
+
import { Pool } from "pg";
|
|
483
|
+
|
|
484
|
+
const pool = new Pool({
|
|
485
|
+
connectionString: process.env.DATABASE_URL,
|
|
486
|
+
});
|
|
487
|
+
|
|
488
|
+
const db = drizzle(pool);
|
|
489
|
+
const flow = createFlow({
|
|
490
|
+
adapter: new PostgresAdapter(db),
|
|
491
|
+
});
|
|
492
|
+
```
|
|
493
|
+
|
|
494
|
+
### 3. Graceful Shutdown
|
|
495
|
+
|
|
496
|
+
```typescript
|
|
497
|
+
async function shutdown() {
|
|
498
|
+
console.log("Shutting down...");
|
|
499
|
+
await flow.close();
|
|
500
|
+
process.exit(0);
|
|
501
|
+
}
|
|
502
|
+
|
|
503
|
+
process.on("SIGTERM", shutdown);
|
|
504
|
+
process.on("SIGINT", shutdown);
|
|
505
|
+
```
|
|
506
|
+
|
|
507
|
+
### 4. Error Handling
|
|
508
|
+
|
|
509
|
+
```typescript
|
|
510
|
+
const flow = createFlow({
|
|
511
|
+
adapter: "redis",
|
|
512
|
+
onError: (error, context) => {
|
|
513
|
+
console.error("FlowFn error:", error, context);
|
|
514
|
+
// Send to error tracking service
|
|
515
|
+
},
|
|
516
|
+
});
|
|
517
|
+
```
|
|
518
|
+
|
|
519
|
+
---
|
|
520
|
+
|
|
521
|
+
## Best Practices
|
|
522
|
+
|
|
523
|
+
### 1. Job Design
|
|
524
|
+
|
|
525
|
+
✅ **DO:**
|
|
526
|
+
|
|
527
|
+
- Keep jobs idempotent
|
|
528
|
+
- Use job IDs for deduplication
|
|
529
|
+
- Store minimal data in jobs
|
|
530
|
+
- Use timeouts for long-running jobs
|
|
531
|
+
|
|
532
|
+
❌ **DON'T:**
|
|
533
|
+
|
|
534
|
+
- Store large payloads
|
|
535
|
+
- Assume job execution order
|
|
536
|
+
- Ignore retry logic
|
|
537
|
+
- Skip error handling
|
|
538
|
+
|
|
539
|
+
### 2. Stream Design
|
|
540
|
+
|
|
541
|
+
✅ **DO:**
|
|
542
|
+
|
|
543
|
+
- Set retention policies
|
|
544
|
+
- Use consumer groups for scaling
|
|
545
|
+
- Implement proper ack/nack
|
|
546
|
+
- Monitor lag
|
|
547
|
+
|
|
548
|
+
❌ **DON'T:**
|
|
549
|
+
|
|
550
|
+
- Let streams grow unbounded
|
|
551
|
+
- Process same message twice
|
|
552
|
+
- Skip message acknowledgment
|
|
553
|
+
|
|
554
|
+
### 3. Workflow Design
|
|
555
|
+
|
|
556
|
+
✅ **DO:**
|
|
557
|
+
|
|
558
|
+
- Keep steps small and focused
|
|
559
|
+
- Use sagas for distributed transactions
|
|
560
|
+
- Store intermediate state
|
|
561
|
+
- Add proper error handling
|
|
562
|
+
|
|
563
|
+
❌ **DON'T:**
|
|
564
|
+
|
|
565
|
+
- Make workflows too complex
|
|
566
|
+
- Skip compensation logic
|
|
567
|
+
- Ignore timeouts
|
|
568
|
+
- Store sensitive data in state
|
|
569
|
+
|
|
570
|
+
---
|
|
571
|
+
|
|
572
|
+
## Troubleshooting
|
|
573
|
+
|
|
574
|
+
### Jobs Not Processing
|
|
575
|
+
|
|
576
|
+
```typescript
|
|
577
|
+
// Check queue status
|
|
578
|
+
const stats = await queue.getJobCounts();
|
|
579
|
+
console.log(stats); // { waiting, active, completed, failed }
|
|
580
|
+
|
|
581
|
+
// Check specific job
|
|
582
|
+
const job = await queue.getJob(jobId);
|
|
583
|
+
console.log(job?.state, job?.failedReason);
|
|
584
|
+
```
|
|
585
|
+
|
|
586
|
+
### High Memory Usage
|
|
587
|
+
|
|
588
|
+
```typescript
|
|
589
|
+
// Clean old jobs
|
|
590
|
+
await queue.clean(86400000, "completed"); // 24h grace
|
|
591
|
+
await queue.clean(604800000, "failed"); // 7d grace
|
|
592
|
+
|
|
593
|
+
// Trim streams
|
|
594
|
+
await stream.trim({ maxLength: 10000 });
|
|
595
|
+
```
|
|
596
|
+
|
|
597
|
+
### Workflow Not Completing
|
|
598
|
+
|
|
599
|
+
```typescript
|
|
600
|
+
// Check execution status
|
|
601
|
+
const execution = await workflow.getExecution(executionId);
|
|
602
|
+
console.log(execution.status, execution.error);
|
|
603
|
+
|
|
604
|
+
// Check history
|
|
605
|
+
const history = await workflow.getExecutionHistory(executionId);
|
|
606
|
+
console.log(history);
|
|
607
|
+
```
|
|
608
|
+
|
|
609
|
+
---
|
|
610
|
+
|
|
611
|
+
## Examples Repository
|
|
612
|
+
|
|
613
|
+
Full working examples available at: [github.com/flowfn/examples](https://github.com/flowfn/examples)
|
|
614
|
+
|
|
615
|
+
- E-commerce order processing
|
|
616
|
+
- Email notification system
|
|
617
|
+
- Data pipeline with ETL
|
|
618
|
+
- Microservices orchestration
|
|
619
|
+
- Event-driven architecture
|
package/package.json
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "flowfn",
|
|
3
|
+
"version": "0.0.1",
|
|
4
|
+
"description": "Self-hosted flow control platform for developers - Queues, Streams, and Workflows",
|
|
5
|
+
"main": "dist/index.js",
|
|
6
|
+
"module": "dist/index.mjs",
|
|
7
|
+
"types": "dist/index.d.ts",
|
|
8
|
+
"scripts": {
|
|
9
|
+
"build": "tsup",
|
|
10
|
+
"dev": "tsup --watch",
|
|
11
|
+
"test": "vitest",
|
|
12
|
+
"lint": "tsc --noEmit"
|
|
13
|
+
},
|
|
14
|
+
"keywords": [
|
|
15
|
+
"queue",
|
|
16
|
+
"stream",
|
|
17
|
+
"workflow",
|
|
18
|
+
"redis",
|
|
19
|
+
"kafka",
|
|
20
|
+
"sqs",
|
|
21
|
+
"postgres",
|
|
22
|
+
"superfunctions"
|
|
23
|
+
],
|
|
24
|
+
"author": "21n",
|
|
25
|
+
"license": "MIT",
|
|
26
|
+
"bugs": {
|
|
27
|
+
"url": "https://github.com/21nCo/super-functions/issues"
|
|
28
|
+
},
|
|
29
|
+
"repository": {
|
|
30
|
+
"type": "git",
|
|
31
|
+
"url": "git+https://github.com/21nCo/super-functions.git",
|
|
32
|
+
"directory": "flowfn/typescript"
|
|
33
|
+
},
|
|
34
|
+
"publishConfig": {
|
|
35
|
+
"access": "public"
|
|
36
|
+
},
|
|
37
|
+
"superfunctions": {
|
|
38
|
+
"initFunction": "flowFn",
|
|
39
|
+
"schemaVersion": 1,
|
|
40
|
+
"namespace": "flowfn"
|
|
41
|
+
},
|
|
42
|
+
"dependencies": {
|
|
43
|
+
"uuid": "^9.0.0",
|
|
44
|
+
"zod": "^3.22.0",
|
|
45
|
+
"eventemitter3": "^5.0.1",
|
|
46
|
+
"@superfunctions/http": "*",
|
|
47
|
+
"ioredis": "^5.3.2",
|
|
48
|
+
"drizzle-orm": "^0.29.0",
|
|
49
|
+
"cron-parser": "^4.9.0"
|
|
50
|
+
},
|
|
51
|
+
"devDependencies": {
|
|
52
|
+
"ioredis-mock": "^8.9.0",
|
|
53
|
+
"@types/node": "^20.11.0",
|
|
54
|
+
"@types/uuid": "^9.0.0",
|
|
55
|
+
"tsup": "^8.0.0",
|
|
56
|
+
"typescript": "^5.3.0",
|
|
57
|
+
"vitest": "^2.0.0"
|
|
58
|
+
},
|
|
59
|
+
"peerDependencies": {
|
|
60
|
+
"ioredis": "^5.0.0",
|
|
61
|
+
"kafkajs": "^2.0.0",
|
|
62
|
+
"@aws-sdk/client-sqs": "^3.0.0"
|
|
63
|
+
},
|
|
64
|
+
"peerDependenciesMeta": {
|
|
65
|
+
"ioredis": {
|
|
66
|
+
"optional": true
|
|
67
|
+
},
|
|
68
|
+
"kafkajs": {
|
|
69
|
+
"optional": true
|
|
70
|
+
},
|
|
71
|
+
"@aws-sdk/client-sqs": {
|
|
72
|
+
"optional": true
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
}
|