flowfn 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +1305 -0
- package/dist/index.d.ts +1305 -0
- package/dist/index.js +3180 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +3088 -0
- package/dist/index.mjs.map +1 -0
- package/docs/API.md +801 -0
- package/docs/USAGE.md +619 -0
- package/package.json +75 -0
- package/src/adapters/base.ts +46 -0
- package/src/adapters/memory.ts +183 -0
- package/src/adapters/postgres/index.ts +383 -0
- package/src/adapters/postgres/postgres.test.ts +100 -0
- package/src/adapters/postgres/schema.ts +110 -0
- package/src/adapters/redis.test.ts +124 -0
- package/src/adapters/redis.ts +331 -0
- package/src/core/flow-fn.test.ts +70 -0
- package/src/core/flow-fn.ts +198 -0
- package/src/core/metrics.ts +198 -0
- package/src/core/scheduler.test.ts +80 -0
- package/src/core/scheduler.ts +154 -0
- package/src/index.ts +57 -0
- package/src/monitoring/health.ts +261 -0
- package/src/patterns/backoff.ts +30 -0
- package/src/patterns/batching.ts +248 -0
- package/src/patterns/circuit-breaker.test.ts +52 -0
- package/src/patterns/circuit-breaker.ts +52 -0
- package/src/patterns/priority.ts +146 -0
- package/src/patterns/rate-limit.ts +290 -0
- package/src/patterns/retry.test.ts +62 -0
- package/src/queue/batch.test.ts +35 -0
- package/src/queue/dependencies.test.ts +33 -0
- package/src/queue/dlq.ts +222 -0
- package/src/queue/job.ts +67 -0
- package/src/queue/queue.ts +243 -0
- package/src/queue/types.ts +153 -0
- package/src/queue/worker.ts +66 -0
- package/src/storage/event-log.ts +205 -0
- package/src/storage/job-storage.ts +206 -0
- package/src/storage/workflow-storage.ts +182 -0
- package/src/stream/stream.ts +194 -0
- package/src/stream/types.ts +81 -0
- package/src/utils/hashing.ts +29 -0
- package/src/utils/id-generator.ts +109 -0
- package/src/utils/serialization.ts +142 -0
- package/src/utils/time.ts +167 -0
- package/src/workflow/advanced.test.ts +43 -0
- package/src/workflow/events.test.ts +39 -0
- package/src/workflow/types.ts +132 -0
- package/src/workflow/workflow.test.ts +55 -0
- package/src/workflow/workflow.ts +422 -0
- package/tests/dlq.test.ts +205 -0
- package/tests/health.test.ts +228 -0
- package/tests/integration.test.ts +253 -0
- package/tests/stream.test.ts +233 -0
- package/tests/workflow.test.ts +286 -0
- package/tsconfig.json +17 -0
- package/tsup.config.ts +10 -0
- package/vitest.config.ts +15 -0
package/docs/API.md
ADDED
|
@@ -0,0 +1,801 @@
|
|
|
1
|
+
# FlowFn API Reference
|
|
2
|
+
|
|
3
|
+
Complete API reference for FlowFn TypeScript SDK v1.0.
|
|
4
|
+
|
|
5
|
+
---
|
|
6
|
+
|
|
7
|
+
## Table of Contents
|
|
8
|
+
|
|
9
|
+
- [Core API](#core-api)
|
|
10
|
+
- [Queue API](#queue-api)
|
|
11
|
+
- [Stream API](#stream-api)
|
|
12
|
+
- [Workflow API](#workflow-api)
|
|
13
|
+
- [Monitoring API](#monitoring-api)
|
|
14
|
+
- [Patterns](#patterns)
|
|
15
|
+
- [Utilities](#utilities)
|
|
16
|
+
- [Storage](#storage)
|
|
17
|
+
|
|
18
|
+
---
|
|
19
|
+
|
|
20
|
+
## Core API
|
|
21
|
+
|
|
22
|
+
### `createFlow(config)`
|
|
23
|
+
|
|
24
|
+
Creates and initializes a FlowFn instance.
|
|
25
|
+
|
|
26
|
+
**Parameters:**
|
|
27
|
+
|
|
28
|
+
- `config: FlowFnConfig` - Configuration object
|
|
29
|
+
|
|
30
|
+
**Returns:** `FlowFn` instance
|
|
31
|
+
|
|
32
|
+
**Example:**
|
|
33
|
+
|
|
34
|
+
```typescript
|
|
35
|
+
import { createFlow } from "@flowfn/core";
|
|
36
|
+
|
|
37
|
+
const flow = createFlow({
|
|
38
|
+
adapter: "memory",
|
|
39
|
+
namespace: "my-app",
|
|
40
|
+
});
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
**FlowFnConfig:**
|
|
44
|
+
|
|
45
|
+
```typescript
|
|
46
|
+
interface FlowFnConfig {
|
|
47
|
+
adapter: FlowAdapter | "memory" | "redis" | "postgres";
|
|
48
|
+
namespace?: string;
|
|
49
|
+
defaultJobOptions?: JobOptions;
|
|
50
|
+
defaultQueueOptions?: QueueOptions;
|
|
51
|
+
defaultStreamOptions?: StreamOptions;
|
|
52
|
+
telemetry?: {
|
|
53
|
+
enabled: boolean;
|
|
54
|
+
provider?: "opentelemetry" | "custom";
|
|
55
|
+
};
|
|
56
|
+
onError?: (error: Error, context: any) => void;
|
|
57
|
+
}
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
### FlowFn Methods
|
|
61
|
+
|
|
62
|
+
#### `flow.queue<T>(name, options?)`
|
|
63
|
+
|
|
64
|
+
Creates or gets a queue.
|
|
65
|
+
|
|
66
|
+
**Parameters:**
|
|
67
|
+
|
|
68
|
+
- `name: string` - Queue name
|
|
69
|
+
- `options?: QueueOptions` - Queue configuration
|
|
70
|
+
|
|
71
|
+
**Returns:** `Queue<T>`
|
|
72
|
+
|
|
73
|
+
#### `flow.stream<T>(name, options?)`
|
|
74
|
+
|
|
75
|
+
Creates or gets a stream.
|
|
76
|
+
|
|
77
|
+
**Parameters:**
|
|
78
|
+
|
|
79
|
+
- `name: string` - Stream name
|
|
80
|
+
- `options?: StreamOptions` - Stream configuration
|
|
81
|
+
|
|
82
|
+
**Returns:** `Stream<T>`
|
|
83
|
+
|
|
84
|
+
#### `flow.workflow<T>(name)`
|
|
85
|
+
|
|
86
|
+
Creates a workflow builder.
|
|
87
|
+
|
|
88
|
+
**Parameters:**
|
|
89
|
+
|
|
90
|
+
- `name: string` - Workflow name
|
|
91
|
+
|
|
92
|
+
**Returns:** `WorkflowBuilder<T>`
|
|
93
|
+
|
|
94
|
+
#### `flow.healthCheck()`
|
|
95
|
+
|
|
96
|
+
Performs system health check.
|
|
97
|
+
|
|
98
|
+
**Returns:** `Promise<HealthStatus>`
|
|
99
|
+
|
|
100
|
+
#### `flow.close()`
|
|
101
|
+
|
|
102
|
+
Closes all connections and resources.
|
|
103
|
+
|
|
104
|
+
**Returns:** `Promise<void>`
|
|
105
|
+
|
|
106
|
+
---
|
|
107
|
+
|
|
108
|
+
## Queue API
|
|
109
|
+
|
|
110
|
+
### Queue Methods
|
|
111
|
+
|
|
112
|
+
#### `queue.add(name, data, options?)`
|
|
113
|
+
|
|
114
|
+
Adds a job to the queue.
|
|
115
|
+
|
|
116
|
+
**Parameters:**
|
|
117
|
+
|
|
118
|
+
- `name: string` - Job name
|
|
119
|
+
- `data: T` - Job payload
|
|
120
|
+
- `options?: JobOptions` - Job configuration
|
|
121
|
+
|
|
122
|
+
**Returns:** `Promise<Job<T>>`
|
|
123
|
+
|
|
124
|
+
**Example:**
|
|
125
|
+
|
|
126
|
+
```typescript
|
|
127
|
+
const job = await queue.add(
|
|
128
|
+
"process-order",
|
|
129
|
+
{
|
|
130
|
+
orderId: "123",
|
|
131
|
+
amount: 100,
|
|
132
|
+
},
|
|
133
|
+
{
|
|
134
|
+
priority: 1,
|
|
135
|
+
delay: 5000,
|
|
136
|
+
attempts: 3,
|
|
137
|
+
}
|
|
138
|
+
);
|
|
139
|
+
```
|
|
140
|
+
|
|
141
|
+
#### `queue.addBulk(jobs)`
|
|
142
|
+
|
|
143
|
+
Adds multiple jobs at once.
|
|
144
|
+
|
|
145
|
+
**Parameters:**
|
|
146
|
+
|
|
147
|
+
- `jobs: Array<{name, data, opts?}>` - Array of jobs
|
|
148
|
+
|
|
149
|
+
**Returns:** `Promise<Job<T>[]>`
|
|
150
|
+
|
|
151
|
+
#### `queue.process(handler)` / `queue.process(concurrency, handler)`
|
|
152
|
+
|
|
153
|
+
Starts processing queue jobs.
|
|
154
|
+
|
|
155
|
+
**Parameters:**
|
|
156
|
+
|
|
157
|
+
- `concurrency?: number` - Max concurrent jobs (default: 1)
|
|
158
|
+
- `handler: JobHandler<T>` - Job processor function
|
|
159
|
+
|
|
160
|
+
**Example:**
|
|
161
|
+
|
|
162
|
+
```typescript
|
|
163
|
+
queue.process(5, async (job) => {
|
|
164
|
+
console.log("Processing", job.data);
|
|
165
|
+
return { result: "success" };
|
|
166
|
+
});
|
|
167
|
+
```
|
|
168
|
+
|
|
169
|
+
#### `queue.processBatch(name, options, handler)`
|
|
170
|
+
|
|
171
|
+
Process jobs in batches.
|
|
172
|
+
|
|
173
|
+
**Parameters:**
|
|
174
|
+
|
|
175
|
+
- `name: string` - Batch processor name
|
|
176
|
+
- `options: BatchOptions` - Batch configuration
|
|
177
|
+
- `handler: BatchHandler<T>` - Batch processor
|
|
178
|
+
|
|
179
|
+
**Example:**
|
|
180
|
+
|
|
181
|
+
```typescript
|
|
182
|
+
queue.processBatch(
|
|
183
|
+
"bulk-process",
|
|
184
|
+
{
|
|
185
|
+
batchSize: 10,
|
|
186
|
+
maxWait: 5000,
|
|
187
|
+
},
|
|
188
|
+
async (jobs) => {
|
|
189
|
+
return await processBatch(jobs.map((j) => j.data));
|
|
190
|
+
}
|
|
191
|
+
);
|
|
192
|
+
```
|
|
193
|
+
|
|
194
|
+
#### `queue.getJob(jobId)`
|
|
195
|
+
|
|
196
|
+
Retrieves a specific job.
|
|
197
|
+
|
|
198
|
+
**Returns:** `Promise<Job<T> | null>`
|
|
199
|
+
|
|
200
|
+
#### `queue.getJobs(status)`
|
|
201
|
+
|
|
202
|
+
Retrieves jobs by status.
|
|
203
|
+
|
|
204
|
+
**Parameters:**
|
|
205
|
+
|
|
206
|
+
- `status: JobStatus` - 'waiting', 'active', 'completed', 'failed', 'delayed', 'paused'
|
|
207
|
+
|
|
208
|
+
**Returns:** `Promise<Job<T>[]>`
|
|
209
|
+
|
|
210
|
+
#### `queue.clean(grace, status)`
|
|
211
|
+
|
|
212
|
+
Removes old jobs.
|
|
213
|
+
|
|
214
|
+
**Parameters:**
|
|
215
|
+
|
|
216
|
+
- `grace: number` - Grace period in ms
|
|
217
|
+
- `status: JobStatus` - Status to clean
|
|
218
|
+
|
|
219
|
+
**Returns:** `Promise<number>` - Number of jobs cleaned
|
|
220
|
+
|
|
221
|
+
### JobOptions
|
|
222
|
+
|
|
223
|
+
```typescript
|
|
224
|
+
interface JobOptions {
|
|
225
|
+
priority?: number; // Higher = higher priority
|
|
226
|
+
delay?: number; // Delay in ms before processing
|
|
227
|
+
attempts?: number; // Max retry attempts
|
|
228
|
+
backoff?: BackoffOptions; // Retry backoff strategy
|
|
229
|
+
timeout?: number; // Job timeout in ms
|
|
230
|
+
removeOnComplete?: boolean | number;
|
|
231
|
+
removeOnFail?: boolean | number;
|
|
232
|
+
jobId?: string; // Custom job ID
|
|
233
|
+
preventDuplicates?: boolean;
|
|
234
|
+
deduplicationKey?: string;
|
|
235
|
+
waitFor?: string[]; // Job dependencies
|
|
236
|
+
}
|
|
237
|
+
```
|
|
238
|
+
|
|
239
|
+
### QueueOptions
|
|
240
|
+
|
|
241
|
+
```typescript
|
|
242
|
+
interface QueueOptions {
|
|
243
|
+
defaultJobOptions?: JobOptions;
|
|
244
|
+
limiter?: {
|
|
245
|
+
max: number;
|
|
246
|
+
duration: number;
|
|
247
|
+
};
|
|
248
|
+
dlq?: {
|
|
249
|
+
enabled: boolean;
|
|
250
|
+
maxRetries?: number;
|
|
251
|
+
queueName?: string;
|
|
252
|
+
};
|
|
253
|
+
}
|
|
254
|
+
```
|
|
255
|
+
|
|
256
|
+
---
|
|
257
|
+
|
|
258
|
+
## Stream API
|
|
259
|
+
|
|
260
|
+
### Stream Methods
|
|
261
|
+
|
|
262
|
+
#### `stream.publish(data, options?)`
|
|
263
|
+
|
|
264
|
+
Publishes a message to the stream.
|
|
265
|
+
|
|
266
|
+
**Parameters:**
|
|
267
|
+
|
|
268
|
+
- `data: T` - Message payload
|
|
269
|
+
- `options?: PublishOptions` - Publish configuration
|
|
270
|
+
|
|
271
|
+
**Returns:** `Promise<string>` - Message ID
|
|
272
|
+
|
|
273
|
+
**Example:**
|
|
274
|
+
|
|
275
|
+
```typescript
|
|
276
|
+
const messageId = await stream.publish(
|
|
277
|
+
{
|
|
278
|
+
event: "user.created",
|
|
279
|
+
userId: "123",
|
|
280
|
+
},
|
|
281
|
+
{
|
|
282
|
+
key: "user-123",
|
|
283
|
+
headers: { source: "api" },
|
|
284
|
+
}
|
|
285
|
+
);
|
|
286
|
+
```
|
|
287
|
+
|
|
288
|
+
#### `stream.publishBatch(messages)`
|
|
289
|
+
|
|
290
|
+
Publishes multiple messages.
|
|
291
|
+
|
|
292
|
+
**Returns:** `Promise<string[]>` - Message IDs
|
|
293
|
+
|
|
294
|
+
#### `stream.subscribe(handler, options?)`
|
|
295
|
+
|
|
296
|
+
Subscribes to stream messages.
|
|
297
|
+
|
|
298
|
+
**Parameters:**
|
|
299
|
+
|
|
300
|
+
- `handler: MessageHandler<T>` - Message handler
|
|
301
|
+
- `options?: SubscribeOptions` - Subscribe options
|
|
302
|
+
|
|
303
|
+
**Returns:** `Promise<Subscription>`
|
|
304
|
+
|
|
305
|
+
**Example:**
|
|
306
|
+
|
|
307
|
+
```typescript
|
|
308
|
+
await stream.subscribe(async (message) => {
|
|
309
|
+
console.log("Received:", message.data);
|
|
310
|
+
await message.ack();
|
|
311
|
+
});
|
|
312
|
+
```
|
|
313
|
+
|
|
314
|
+
#### `stream.createConsumer(consumerId, options)`
|
|
315
|
+
|
|
316
|
+
Creates a consumer group consumer.
|
|
317
|
+
|
|
318
|
+
**Parameters:**
|
|
319
|
+
|
|
320
|
+
- `consumerId: string` - Unique consumer ID
|
|
321
|
+
- `options: ConsumerOptions` - Consumer configuration
|
|
322
|
+
|
|
323
|
+
**Returns:** `Consumer<T>`
|
|
324
|
+
|
|
325
|
+
**Example:**
|
|
326
|
+
|
|
327
|
+
```typescript
|
|
328
|
+
const consumer = stream.createConsumer("worker-1", {
|
|
329
|
+
groupId: "processors",
|
|
330
|
+
fromBeginning: true,
|
|
331
|
+
autoCommit: true,
|
|
332
|
+
});
|
|
333
|
+
|
|
334
|
+
await consumer.subscribe(async (msg) => {
|
|
335
|
+
await processMessage(msg.data);
|
|
336
|
+
await msg.ack();
|
|
337
|
+
});
|
|
338
|
+
```
|
|
339
|
+
|
|
340
|
+
#### `stream.trim(strategy)`
|
|
341
|
+
|
|
342
|
+
Trims old messages from the stream.
|
|
343
|
+
|
|
344
|
+
**Parameters:**
|
|
345
|
+
|
|
346
|
+
- `strategy: TrimStrategy` - Trim configuration
|
|
347
|
+
|
|
348
|
+
**Returns:** `Promise<number>` - Messages trimmed
|
|
349
|
+
|
|
350
|
+
**Example:**
|
|
351
|
+
|
|
352
|
+
```typescript
|
|
353
|
+
// Keep only newest 1000 messages
|
|
354
|
+
await stream.trim({ maxLength: 1000 });
|
|
355
|
+
|
|
356
|
+
// Remove messages older than 1 hour
|
|
357
|
+
await stream.trim({ maxAgeSeconds: 3600 });
|
|
358
|
+
```
|
|
359
|
+
|
|
360
|
+
#### `stream.getMessages(start, end, count?)`
|
|
361
|
+
|
|
362
|
+
Retrieves messages by ID range.
|
|
363
|
+
|
|
364
|
+
**Returns:** `Promise<Message<T>[]>`
|
|
365
|
+
|
|
366
|
+
#### `stream.replay(fromTimestamp, handler)`
|
|
367
|
+
|
|
368
|
+
Replays messages from a timestamp.
|
|
369
|
+
|
|
370
|
+
**Returns:** `Promise<number>` - Messages replayed
|
|
371
|
+
|
|
372
|
+
### StreamOptions
|
|
373
|
+
|
|
374
|
+
```typescript
|
|
375
|
+
interface StreamOptions {
|
|
376
|
+
maxLength?: number; // Auto-trim to this length
|
|
377
|
+
retention?: number; // Retention period in ms
|
|
378
|
+
partitions?: number; // Number of partitions
|
|
379
|
+
}
|
|
380
|
+
```
|
|
381
|
+
|
|
382
|
+
---
|
|
383
|
+
|
|
384
|
+
## Workflow API
|
|
385
|
+
|
|
386
|
+
### WorkflowBuilder Methods
|
|
387
|
+
|
|
388
|
+
#### `workflow.step(name, handler)`
|
|
389
|
+
|
|
390
|
+
Adds a sequential step.
|
|
391
|
+
|
|
392
|
+
**Example:**
|
|
393
|
+
|
|
394
|
+
```typescript
|
|
395
|
+
workflow
|
|
396
|
+
.step("validate", async (ctx) => {
|
|
397
|
+
const valid = await validate(ctx.input);
|
|
398
|
+
ctx.set("isValid", valid);
|
|
399
|
+
})
|
|
400
|
+
.step("process", async (ctx) => {
|
|
401
|
+
if (ctx.get("isValid")) {
|
|
402
|
+
await process(ctx.input);
|
|
403
|
+
}
|
|
404
|
+
});
|
|
405
|
+
```
|
|
406
|
+
|
|
407
|
+
#### `workflow.parallel(steps)`
|
|
408
|
+
|
|
409
|
+
Executes steps in parallel.
|
|
410
|
+
|
|
411
|
+
**Example:**
|
|
412
|
+
|
|
413
|
+
```typescript
|
|
414
|
+
workflow.parallel([
|
|
415
|
+
async (ctx) => {
|
|
416
|
+
await sendEmail(ctx.input);
|
|
417
|
+
},
|
|
418
|
+
async (ctx) => {
|
|
419
|
+
await sendSMS(ctx.input);
|
|
420
|
+
},
|
|
421
|
+
async (ctx) => {
|
|
422
|
+
await logEvent(ctx.input);
|
|
423
|
+
},
|
|
424
|
+
]);
|
|
425
|
+
```
|
|
426
|
+
|
|
427
|
+
#### `workflow.branch(options)`
|
|
428
|
+
|
|
429
|
+
Adds conditional branching.
|
|
430
|
+
|
|
431
|
+
**Example:**
|
|
432
|
+
|
|
433
|
+
```typescript
|
|
434
|
+
const thenBranch = flow
|
|
435
|
+
.workflow("premium")
|
|
436
|
+
.step("premium-processing", async (ctx) => {});
|
|
437
|
+
|
|
438
|
+
const elseBranch = flow
|
|
439
|
+
.workflow("standard")
|
|
440
|
+
.step("standard-processing", async (ctx) => {});
|
|
441
|
+
|
|
442
|
+
workflow.branch({
|
|
443
|
+
condition: (ctx) => ctx.input.tier === "premium",
|
|
444
|
+
then: thenBranch,
|
|
445
|
+
else: elseBranch,
|
|
446
|
+
});
|
|
447
|
+
```
|
|
448
|
+
|
|
449
|
+
#### `workflow.saga(name, saga)`
|
|
450
|
+
|
|
451
|
+
Adds a saga with compensation.
|
|
452
|
+
|
|
453
|
+
**Example:**
|
|
454
|
+
|
|
455
|
+
```typescript
|
|
456
|
+
workflow.saga("payment", {
|
|
457
|
+
execute: async (ctx) => {
|
|
458
|
+
const charge = await chargeCard(ctx.input);
|
|
459
|
+
ctx.set("chargeId", charge.id);
|
|
460
|
+
},
|
|
461
|
+
compensate: async (ctx) => {
|
|
462
|
+
await refundCharge(ctx.get("chargeId"));
|
|
463
|
+
},
|
|
464
|
+
});
|
|
465
|
+
```
|
|
466
|
+
|
|
467
|
+
#### `workflow.delay(duration)`
|
|
468
|
+
|
|
469
|
+
Adds a delay step.
|
|
470
|
+
|
|
471
|
+
**Example:**
|
|
472
|
+
|
|
473
|
+
```typescript
|
|
474
|
+
workflow.delay({ ms: 60000 }); // 1 minute delay
|
|
475
|
+
```
|
|
476
|
+
|
|
477
|
+
#### `workflow.build()`
|
|
478
|
+
|
|
479
|
+
Builds and returns the workflow.
|
|
480
|
+
|
|
481
|
+
**Returns:** `Workflow<T>`
|
|
482
|
+
|
|
483
|
+
### Workflow Methods
|
|
484
|
+
|
|
485
|
+
#### `workflow.execute(input)`
|
|
486
|
+
|
|
487
|
+
Executes the workflow.
|
|
488
|
+
|
|
489
|
+
**Returns:** `Promise<WorkflowExecution>`
|
|
490
|
+
|
|
491
|
+
#### `workflow.getExecution(executionId)`
|
|
492
|
+
|
|
493
|
+
Retrieves an execution.
|
|
494
|
+
|
|
495
|
+
**Returns:** `Promise<WorkflowExecution>`
|
|
496
|
+
|
|
497
|
+
#### `workflow.listExecutions(options?)`
|
|
498
|
+
|
|
499
|
+
Lists all executions.
|
|
500
|
+
|
|
501
|
+
**Returns:** `Promise<WorkflowExecution[]>`
|
|
502
|
+
|
|
503
|
+
#### `workflow.cancelExecution(executionId)`
|
|
504
|
+
|
|
505
|
+
Cancels a running execution.
|
|
506
|
+
|
|
507
|
+
**Returns:** `Promise<void>`
|
|
508
|
+
|
|
509
|
+
#### `workflow.retryExecution(executionId)`
|
|
510
|
+
|
|
511
|
+
Retries a failed execution.
|
|
512
|
+
|
|
513
|
+
**Returns:** `Promise<WorkflowExecution>`
|
|
514
|
+
|
|
515
|
+
#### `workflow.getExecutionHistory(executionId)`
|
|
516
|
+
|
|
517
|
+
Gets execution event history.
|
|
518
|
+
|
|
519
|
+
**Returns:** `Promise<WorkflowEvent[]>`
|
|
520
|
+
|
|
521
|
+
#### `workflow.getMetrics()`
|
|
522
|
+
|
|
523
|
+
Gets workflow metrics.
|
|
524
|
+
|
|
525
|
+
**Returns:** `Promise<WorkflowMetrics>`
|
|
526
|
+
|
|
527
|
+
---
|
|
528
|
+
|
|
529
|
+
## Monitoring API
|
|
530
|
+
|
|
531
|
+
### Metrics
|
|
532
|
+
|
|
533
|
+
#### `flow.metrics.record(name, value, tags?)`
|
|
534
|
+
|
|
535
|
+
Records a metric data point.
|
|
536
|
+
|
|
537
|
+
**Example:**
|
|
538
|
+
|
|
539
|
+
```typescript
|
|
540
|
+
flow.metrics.record("orders.processed", 1, {
|
|
541
|
+
region: "us-east",
|
|
542
|
+
status: "success",
|
|
543
|
+
});
|
|
544
|
+
```
|
|
545
|
+
|
|
546
|
+
#### `flow.metrics.getTimeSeries(name, options?)`
|
|
547
|
+
|
|
548
|
+
Retrieves time-series data.
|
|
549
|
+
|
|
550
|
+
**Returns:** `TimeSeriesMetrics | null`
|
|
551
|
+
|
|
552
|
+
#### `flow.metrics.getQueueMetrics(name)`
|
|
553
|
+
|
|
554
|
+
Gets queue-specific metrics.
|
|
555
|
+
|
|
556
|
+
**Returns:** `Promise<QueueMetrics>`
|
|
557
|
+
|
|
558
|
+
### Health Checks
|
|
559
|
+
|
|
560
|
+
#### Custom Health Checks
|
|
561
|
+
|
|
562
|
+
```typescript
|
|
563
|
+
const health = await flow.healthCheck();
|
|
564
|
+
|
|
565
|
+
// Add custom check
|
|
566
|
+
health.addCheck("database", async () => ({
|
|
567
|
+
name: "database",
|
|
568
|
+
status: (await db.ping()) ? "pass" : "fail",
|
|
569
|
+
message: "Database connection",
|
|
570
|
+
}));
|
|
571
|
+
```
|
|
572
|
+
|
|
573
|
+
### Event Tracking
|
|
574
|
+
|
|
575
|
+
```typescript
|
|
576
|
+
const tracker = flow.getEventTracker();
|
|
577
|
+
|
|
578
|
+
tracker.track({
|
|
579
|
+
type: "order.created",
|
|
580
|
+
category: "queue",
|
|
581
|
+
severity: "info",
|
|
582
|
+
message: "New order received",
|
|
583
|
+
metadata: { orderId: "123" },
|
|
584
|
+
});
|
|
585
|
+
|
|
586
|
+
const events = tracker.getEvents({
|
|
587
|
+
category: "queue",
|
|
588
|
+
severity: "error",
|
|
589
|
+
since: Date.now() - 3600000,
|
|
590
|
+
});
|
|
591
|
+
```
|
|
592
|
+
|
|
593
|
+
---
|
|
594
|
+
|
|
595
|
+
## Patterns
|
|
596
|
+
|
|
597
|
+
### Rate Limiting
|
|
598
|
+
|
|
599
|
+
```typescript
|
|
600
|
+
import { RateLimiter } from "@flowfn/core";
|
|
601
|
+
|
|
602
|
+
const limiter = new RateLimiter({
|
|
603
|
+
max: 100,
|
|
604
|
+
window: 60000, // 1 minute
|
|
605
|
+
strategy: "throw", // or 'delay' or 'drop'
|
|
606
|
+
});
|
|
607
|
+
|
|
608
|
+
await limiter.acquire();
|
|
609
|
+
```
|
|
610
|
+
|
|
611
|
+
### Token Bucket
|
|
612
|
+
|
|
613
|
+
```typescript
|
|
614
|
+
import { TokenBucketRateLimiter } from "@flowfn/core";
|
|
615
|
+
|
|
616
|
+
const limiter = new TokenBucketRateLimiter({
|
|
617
|
+
capacity: 100,
|
|
618
|
+
refillRate: 10,
|
|
619
|
+
refillInterval: 1000,
|
|
620
|
+
});
|
|
621
|
+
```
|
|
622
|
+
|
|
623
|
+
### Batching
|
|
624
|
+
|
|
625
|
+
```typescript
|
|
626
|
+
import { BatchAccumulator } from "@flowfn/core";
|
|
627
|
+
|
|
628
|
+
const batch = new BatchAccumulator({
|
|
629
|
+
maxSize: 10,
|
|
630
|
+
maxWait: 5000,
|
|
631
|
+
processor: async (items) => {
|
|
632
|
+
await processBatch(items);
|
|
633
|
+
},
|
|
634
|
+
});
|
|
635
|
+
|
|
636
|
+
await batch.add(item);
|
|
637
|
+
```
|
|
638
|
+
|
|
639
|
+
### Priority Queue
|
|
640
|
+
|
|
641
|
+
```typescript
|
|
642
|
+
import { PriorityQueue } from "@flowfn/core";
|
|
643
|
+
|
|
644
|
+
const pq = new PriorityQueue<Task>({
|
|
645
|
+
comparator: (a, b) => a.priority - b.priority,
|
|
646
|
+
});
|
|
647
|
+
|
|
648
|
+
pq.enqueue({ priority: 1, task: "high" });
|
|
649
|
+
pq.enqueue({ priority: 5, task: "low" });
|
|
650
|
+
|
|
651
|
+
const next = pq.dequeue(); // Gets highest priority
|
|
652
|
+
```
|
|
653
|
+
|
|
654
|
+
---
|
|
655
|
+
|
|
656
|
+
## Utilities
|
|
657
|
+
|
|
658
|
+
### Hashing
|
|
659
|
+
|
|
660
|
+
```typescript
|
|
661
|
+
import { hashJob, generateDeduplicationKey } from "@flowfn/core";
|
|
662
|
+
|
|
663
|
+
const hash = hashJob({ userId: "123", action: "process" });
|
|
664
|
+
const dedupKey = generateDeduplicationKey("queue-name", hash);
|
|
665
|
+
```
|
|
666
|
+
|
|
667
|
+
### ID Generation
|
|
668
|
+
|
|
669
|
+
```typescript
|
|
670
|
+
import { generateId, generateJobId } from "@flowfn/core";
|
|
671
|
+
|
|
672
|
+
const jobId = generateJobId(); // UUID v4
|
|
673
|
+
const execId = generateId("nanoid"); // Nanoid
|
|
674
|
+
const detId = generateId("uuid-v5", "namespace", "name"); // Deterministic
|
|
675
|
+
```
|
|
676
|
+
|
|
677
|
+
### Time Utilities
|
|
678
|
+
|
|
679
|
+
```typescript
|
|
680
|
+
import { toMilliseconds, sleep, parseDuration } from "@flowfn/core";
|
|
681
|
+
|
|
682
|
+
const ms = toMilliseconds({ hours: 1, minutes: 30 });
|
|
683
|
+
await sleep(1000);
|
|
684
|
+
const duration = parseDuration("1h30m");
|
|
685
|
+
```
|
|
686
|
+
|
|
687
|
+
### Serialization
|
|
688
|
+
|
|
689
|
+
```typescript
|
|
690
|
+
import { serialize, deserialize, serializeCompressed } from "@flowfn/core";
|
|
691
|
+
|
|
692
|
+
const json = serialize({ date: new Date(), value: 123 });
|
|
693
|
+
const obj = deserialize(json);
|
|
694
|
+
const compressed = await serializeCompressed(largeObject);
|
|
695
|
+
```
|
|
696
|
+
|
|
697
|
+
---
|
|
698
|
+
|
|
699
|
+
## Storage
|
|
700
|
+
|
|
701
|
+
### Job Storage
|
|
702
|
+
|
|
703
|
+
```typescript
|
|
704
|
+
import { MemoryJobStorage } from "@flowfn/core";
|
|
705
|
+
|
|
706
|
+
const storage = new MemoryJobStorage();
|
|
707
|
+
|
|
708
|
+
await storage.save(job);
|
|
709
|
+
const job = await storage.get(jobId);
|
|
710
|
+
const jobs = await storage.list({ status: "completed" });
|
|
711
|
+
await storage.cleanup({ olderThan: Date.now() - 86400000 });
|
|
712
|
+
```
|
|
713
|
+
|
|
714
|
+
### Workflow Storage
|
|
715
|
+
|
|
716
|
+
```typescript
|
|
717
|
+
import { MemoryWorkflowStorage } from "@flowfn/core";
|
|
718
|
+
|
|
719
|
+
const storage = new MemoryWorkflowStorage();
|
|
720
|
+
|
|
721
|
+
await storage.save(workflowId, execution);
|
|
722
|
+
const executions = await storage.list(workflowId, {
|
|
723
|
+
status: "running",
|
|
724
|
+
limit: 10,
|
|
725
|
+
});
|
|
726
|
+
```
|
|
727
|
+
|
|
728
|
+
### Event Log
|
|
729
|
+
|
|
730
|
+
```typescript
|
|
731
|
+
import { MemoryEventLog } from "@flowfn/core";
|
|
732
|
+
|
|
733
|
+
const log = new MemoryEventLog();
|
|
734
|
+
|
|
735
|
+
await log.append({
|
|
736
|
+
type: "order.created",
|
|
737
|
+
aggregateId: "order-123",
|
|
738
|
+
aggregateType: "order",
|
|
739
|
+
data: { amount: 100 },
|
|
740
|
+
});
|
|
741
|
+
|
|
742
|
+
const events = await log.getAggregateEvents("order-123");
|
|
743
|
+
```
|
|
744
|
+
|
|
745
|
+
---
|
|
746
|
+
|
|
747
|
+
## Error Handling
|
|
748
|
+
|
|
749
|
+
### Job Errors
|
|
750
|
+
|
|
751
|
+
```typescript
|
|
752
|
+
queue.process(async (job) => {
|
|
753
|
+
try {
|
|
754
|
+
await processJob(job.data);
|
|
755
|
+
} catch (error) {
|
|
756
|
+
await job.log(`Error: ${error.message}`);
|
|
757
|
+
throw error; // Will retry based on attempts
|
|
758
|
+
}
|
|
759
|
+
});
|
|
760
|
+
```
|
|
761
|
+
|
|
762
|
+
### DLQ (Dead Letter Queue)
|
|
763
|
+
|
|
764
|
+
```typescript
|
|
765
|
+
import { MemoryDLQManager } from "@flowfn/core";
|
|
766
|
+
|
|
767
|
+
const dlq = new MemoryDLQManager({
|
|
768
|
+
maxRetries: 3,
|
|
769
|
+
onDLQ: async (job, reason) => {
|
|
770
|
+
console.error("Job moved to DLQ:", job.id, reason);
|
|
771
|
+
},
|
|
772
|
+
});
|
|
773
|
+
|
|
774
|
+
// Job automatically moves to DLQ after max retries
|
|
775
|
+
const dlqJob = await dlq.moveToDLQ(failedJob, "Max retries exceeded");
|
|
776
|
+
|
|
777
|
+
// Retry from DLQ
|
|
778
|
+
const retriedJob = await dlq.retry(jobId);
|
|
779
|
+
```
|
|
780
|
+
|
|
781
|
+
---
|
|
782
|
+
|
|
783
|
+
## TypeScript Types
|
|
784
|
+
|
|
785
|
+
All FlowFn types are fully typed for TypeScript:
|
|
786
|
+
|
|
787
|
+
```typescript
|
|
788
|
+
import {
|
|
789
|
+
FlowFn,
|
|
790
|
+
Queue,
|
|
791
|
+
Stream,
|
|
792
|
+
Workflow,
|
|
793
|
+
Job,
|
|
794
|
+
Message,
|
|
795
|
+
WorkflowExecution,
|
|
796
|
+
HealthStatus,
|
|
797
|
+
MetricsManager,
|
|
798
|
+
} from "@flowfn/core";
|
|
799
|
+
```
|
|
800
|
+
|
|
801
|
+
Full type definitions are available in the package's `.d.ts` files.
|