flowfn 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +1305 -0
- package/dist/index.d.ts +1305 -0
- package/dist/index.js +3180 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +3088 -0
- package/dist/index.mjs.map +1 -0
- package/docs/API.md +801 -0
- package/docs/USAGE.md +619 -0
- package/package.json +75 -0
- package/src/adapters/base.ts +46 -0
- package/src/adapters/memory.ts +183 -0
- package/src/adapters/postgres/index.ts +383 -0
- package/src/adapters/postgres/postgres.test.ts +100 -0
- package/src/adapters/postgres/schema.ts +110 -0
- package/src/adapters/redis.test.ts +124 -0
- package/src/adapters/redis.ts +331 -0
- package/src/core/flow-fn.test.ts +70 -0
- package/src/core/flow-fn.ts +198 -0
- package/src/core/metrics.ts +198 -0
- package/src/core/scheduler.test.ts +80 -0
- package/src/core/scheduler.ts +154 -0
- package/src/index.ts +57 -0
- package/src/monitoring/health.ts +261 -0
- package/src/patterns/backoff.ts +30 -0
- package/src/patterns/batching.ts +248 -0
- package/src/patterns/circuit-breaker.test.ts +52 -0
- package/src/patterns/circuit-breaker.ts +52 -0
- package/src/patterns/priority.ts +146 -0
- package/src/patterns/rate-limit.ts +290 -0
- package/src/patterns/retry.test.ts +62 -0
- package/src/queue/batch.test.ts +35 -0
- package/src/queue/dependencies.test.ts +33 -0
- package/src/queue/dlq.ts +222 -0
- package/src/queue/job.ts +67 -0
- package/src/queue/queue.ts +243 -0
- package/src/queue/types.ts +153 -0
- package/src/queue/worker.ts +66 -0
- package/src/storage/event-log.ts +205 -0
- package/src/storage/job-storage.ts +206 -0
- package/src/storage/workflow-storage.ts +182 -0
- package/src/stream/stream.ts +194 -0
- package/src/stream/types.ts +81 -0
- package/src/utils/hashing.ts +29 -0
- package/src/utils/id-generator.ts +109 -0
- package/src/utils/serialization.ts +142 -0
- package/src/utils/time.ts +167 -0
- package/src/workflow/advanced.test.ts +43 -0
- package/src/workflow/events.test.ts +39 -0
- package/src/workflow/types.ts +132 -0
- package/src/workflow/workflow.test.ts +55 -0
- package/src/workflow/workflow.ts +422 -0
- package/tests/dlq.test.ts +205 -0
- package/tests/health.test.ts +228 -0
- package/tests/integration.test.ts +253 -0
- package/tests/stream.test.ts +233 -0
- package/tests/workflow.test.ts +286 -0
- package/tsconfig.json +17 -0
- package/tsup.config.ts +10 -0
- package/vitest.config.ts +15 -0
|
@@ -0,0 +1,205 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Tests for Dead Letter Queue (DLQ)
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import { describe, it, expect, beforeEach } from "vitest";
|
|
6
|
+
import { MemoryDLQManager, DLQJob } from "../src/queue/dlq.js";
|
|
7
|
+
import { Job } from "../src/queue/types.js";
|
|
8
|
+
|
|
9
|
+
describe("DLQ Manager", () => {
|
|
10
|
+
let dlq: MemoryDLQManager;
|
|
11
|
+
|
|
12
|
+
beforeEach(() => {
|
|
13
|
+
dlq = new MemoryDLQManager({
|
|
14
|
+
maxRetries: 3,
|
|
15
|
+
queueName: "test-dlq",
|
|
16
|
+
});
|
|
17
|
+
});
|
|
18
|
+
|
|
19
|
+
it("should move job to DLQ", async () => {
|
|
20
|
+
const job: Partial<Job> = {
|
|
21
|
+
id: "job-1",
|
|
22
|
+
name: "test-job",
|
|
23
|
+
data: { foo: "bar" },
|
|
24
|
+
state: "failed",
|
|
25
|
+
failedReason: "Test failure",
|
|
26
|
+
attemptsMade: 3,
|
|
27
|
+
} as Job;
|
|
28
|
+
|
|
29
|
+
const dlqJob = await dlq.moveToDLQ(job as Job, "Max retries exceeded");
|
|
30
|
+
|
|
31
|
+
expect(dlqJob.id).toBe("job-1");
|
|
32
|
+
expect(dlqJob.originalQueue).toBe("test-job");
|
|
33
|
+
expect(dlqJob.dlqReason).toBe("Max retries exceeded");
|
|
34
|
+
expect(dlqJob.dlqTimestamp).toBeGreaterThan(0);
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
it("should get all DLQ jobs", async () => {
|
|
38
|
+
const job1: Partial<Job> = {
|
|
39
|
+
id: "job-1",
|
|
40
|
+
name: "queue-1",
|
|
41
|
+
data: {},
|
|
42
|
+
state: "failed",
|
|
43
|
+
} as Job;
|
|
44
|
+
|
|
45
|
+
const job2: Partial<Job> = {
|
|
46
|
+
id: "job-2",
|
|
47
|
+
name: "queue-2",
|
|
48
|
+
data: {},
|
|
49
|
+
state: "failed",
|
|
50
|
+
} as Job;
|
|
51
|
+
|
|
52
|
+
await dlq.moveToDLQ(job1 as Job, "reason1");
|
|
53
|
+
await dlq.moveToDLQ(job2 as Job, "reason2");
|
|
54
|
+
|
|
55
|
+
const all = await dlq.getAll();
|
|
56
|
+
expect(all).toHaveLength(2);
|
|
57
|
+
});
|
|
58
|
+
|
|
59
|
+
it("should get jobs by queue", async () => {
|
|
60
|
+
const job1: Partial<Job> = {
|
|
61
|
+
id: "job-1",
|
|
62
|
+
name: "queue-1",
|
|
63
|
+
data: {},
|
|
64
|
+
state: "failed",
|
|
65
|
+
} as Job;
|
|
66
|
+
|
|
67
|
+
const job2: Partial<Job> = {
|
|
68
|
+
id: "job-2",
|
|
69
|
+
name: "queue-2",
|
|
70
|
+
data: {},
|
|
71
|
+
state: "failed",
|
|
72
|
+
} as Job;
|
|
73
|
+
|
|
74
|
+
await dlq.moveToDLQ(job1 as Job, "reason");
|
|
75
|
+
await dlq.moveToDLQ(job2 as Job, "reason");
|
|
76
|
+
|
|
77
|
+
const queue1Jobs = await dlq.getByQueue("queue-1");
|
|
78
|
+
expect(queue1Jobs).toHaveLength(1);
|
|
79
|
+
expect(queue1Jobs[0].id).toBe("job-1");
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
it("should retry DLQ job", async () => {
|
|
83
|
+
const job: Partial<Job> = {
|
|
84
|
+
id: "job-1",
|
|
85
|
+
name: "test-queue",
|
|
86
|
+
data: { value: 123 },
|
|
87
|
+
state: "failed",
|
|
88
|
+
attemptsMade: 3,
|
|
89
|
+
} as Job;
|
|
90
|
+
|
|
91
|
+
await dlq.moveToDLQ(job as Job, "Test failure");
|
|
92
|
+
|
|
93
|
+
const retriedJob = await dlq.retry("job-1");
|
|
94
|
+
|
|
95
|
+
expect(retriedJob.state).toBe("waiting");
|
|
96
|
+
expect(retriedJob.attemptsMade).toBe(0);
|
|
97
|
+
expect(retriedJob.failedReason).toBeUndefined();
|
|
98
|
+
|
|
99
|
+
// Should be removed from DLQ
|
|
100
|
+
const remaining = await dlq.getAll();
|
|
101
|
+
expect(remaining).toHaveLength(0);
|
|
102
|
+
});
|
|
103
|
+
|
|
104
|
+
it("should retry all jobs from a queue", async () => {
|
|
105
|
+
const job1: Partial<Job> = {
|
|
106
|
+
id: "job-1",
|
|
107
|
+
name: "queue-1",
|
|
108
|
+
data: {},
|
|
109
|
+
state: "failed",
|
|
110
|
+
} as Job;
|
|
111
|
+
|
|
112
|
+
const job2: Partial<Job> = {
|
|
113
|
+
id: "job-2",
|
|
114
|
+
name: "queue-1",
|
|
115
|
+
data: {},
|
|
116
|
+
state: "failed",
|
|
117
|
+
} as Job;
|
|
118
|
+
|
|
119
|
+
await dlq.moveToDLQ(job1 as Job, "reason");
|
|
120
|
+
await dlq.moveToDLQ(job2 as Job, "reason");
|
|
121
|
+
|
|
122
|
+
const count = await dlq.retryAll("queue-1");
|
|
123
|
+
expect(count).toBe(2);
|
|
124
|
+
|
|
125
|
+
const remaining = await dlq.getAll();
|
|
126
|
+
expect(remaining).toHaveLength(0);
|
|
127
|
+
});
|
|
128
|
+
|
|
129
|
+
it("should clean expired jobs", async () => {
|
|
130
|
+
const job: Partial<Job> = {
|
|
131
|
+
id: "job-1",
|
|
132
|
+
name: "test-queue",
|
|
133
|
+
data: {},
|
|
134
|
+
state: "failed",
|
|
135
|
+
} as Job;
|
|
136
|
+
|
|
137
|
+
await dlq.moveToDLQ(job as Job, "reason");
|
|
138
|
+
|
|
139
|
+
// Clean jobs older than 0ms (should clean all)
|
|
140
|
+
const count = await dlq.clean(0);
|
|
141
|
+
expect(count).toBe(1);
|
|
142
|
+
|
|
143
|
+
const remaining = await dlq.getAll();
|
|
144
|
+
expect(remaining).toHaveLength(0);
|
|
145
|
+
});
|
|
146
|
+
|
|
147
|
+
it("should get stats", async () => {
|
|
148
|
+
const job1: Partial<Job> = {
|
|
149
|
+
id: "job-1",
|
|
150
|
+
name: "queue-1",
|
|
151
|
+
data: {},
|
|
152
|
+
state: "failed",
|
|
153
|
+
} as Job;
|
|
154
|
+
|
|
155
|
+
const job2: Partial<Job> = {
|
|
156
|
+
id: "job-2",
|
|
157
|
+
name: "queue-2",
|
|
158
|
+
data: {},
|
|
159
|
+
state: "failed",
|
|
160
|
+
} as Job;
|
|
161
|
+
|
|
162
|
+
const job3: Partial<Job> = {
|
|
163
|
+
id: "job-3",
|
|
164
|
+
name: "queue-1",
|
|
165
|
+
data: {},
|
|
166
|
+
state: "failed",
|
|
167
|
+
} as Job;
|
|
168
|
+
|
|
169
|
+
await dlq.moveToDLQ(job1 as Job, "reason");
|
|
170
|
+
await dlq.moveToDLQ(job2 as Job, "reason");
|
|
171
|
+
await dlq.moveToDLQ(job3 as Job, "reason");
|
|
172
|
+
|
|
173
|
+
const stats = await dlq.getStats();
|
|
174
|
+
expect(stats.total).toBe(3);
|
|
175
|
+
expect(stats.byQueue["queue-1"]).toBe(2);
|
|
176
|
+
expect(stats.byQueue["queue-2"]).toBe(1);
|
|
177
|
+
});
|
|
178
|
+
|
|
179
|
+
it("should call onDLQ callback", async () => {
|
|
180
|
+
let callbackCalled = false;
|
|
181
|
+
let callbackJob: Job | null = null;
|
|
182
|
+
let callbackReason = "";
|
|
183
|
+
|
|
184
|
+
const dlqWithCallback = new MemoryDLQManager({
|
|
185
|
+
onDLQ: (job, reason) => {
|
|
186
|
+
callbackCalled = true;
|
|
187
|
+
callbackJob = job;
|
|
188
|
+
callbackReason = reason;
|
|
189
|
+
},
|
|
190
|
+
});
|
|
191
|
+
|
|
192
|
+
const job: Partial<Job> = {
|
|
193
|
+
id: "job-1",
|
|
194
|
+
name: "test-queue",
|
|
195
|
+
data: {},
|
|
196
|
+
state: "failed",
|
|
197
|
+
} as Job;
|
|
198
|
+
|
|
199
|
+
await dlqWithCallback.moveToDLQ(job as Job, "Test reason");
|
|
200
|
+
|
|
201
|
+
expect(callbackCalled).toBe(true);
|
|
202
|
+
expect(callbackJob?.id).toBe("job-1");
|
|
203
|
+
expect(callbackReason).toBe("Test reason");
|
|
204
|
+
});
|
|
205
|
+
});
|
|
@@ -0,0 +1,228 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Tests for Health Monitoring
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import { describe, it, expect, beforeEach } from "vitest";
|
|
6
|
+
import {
|
|
7
|
+
HealthCheckerImpl,
|
|
8
|
+
MemoryEventTracker,
|
|
9
|
+
} from "../src/monitoring/health.js";
|
|
10
|
+
|
|
11
|
+
describe("Health Checker", () => {
|
|
12
|
+
let healthChecker: HealthCheckerImpl;
|
|
13
|
+
|
|
14
|
+
beforeEach(() => {
|
|
15
|
+
healthChecker = new HealthCheckerImpl();
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
it("should perform default health checks", async () => {
|
|
19
|
+
const health = await healthChecker.check();
|
|
20
|
+
|
|
21
|
+
expect(health.healthy).toBe(true);
|
|
22
|
+
expect(health.timestamp).toBeGreaterThan(0);
|
|
23
|
+
expect(health.checks.length).toBeGreaterThan(0);
|
|
24
|
+
|
|
25
|
+
// Should have uptime and memory checks
|
|
26
|
+
const checkNames = health.checks.map((c) => c.name);
|
|
27
|
+
expect(checkNames).toContain("uptime");
|
|
28
|
+
expect(checkNames).toContain("memory");
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
it("should add custom health check", async () => {
|
|
32
|
+
healthChecker.addCheck("custom", async () => ({
|
|
33
|
+
name: "custom",
|
|
34
|
+
status: "pass",
|
|
35
|
+
message: "Custom check passed",
|
|
36
|
+
}));
|
|
37
|
+
|
|
38
|
+
const health = await healthChecker.check();
|
|
39
|
+
const customCheck = health.checks.find((c) => c.name === "custom");
|
|
40
|
+
|
|
41
|
+
expect(customCheck).toBeDefined();
|
|
42
|
+
expect(customCheck?.status).toBe("pass");
|
|
43
|
+
expect(customCheck?.message).toBe("Custom check passed");
|
|
44
|
+
});
|
|
45
|
+
|
|
46
|
+
it("should handle failing health checks", async () => {
|
|
47
|
+
healthChecker.addCheck("failing", async () => ({
|
|
48
|
+
name: "failing",
|
|
49
|
+
status: "fail",
|
|
50
|
+
message: "This check failed",
|
|
51
|
+
}));
|
|
52
|
+
|
|
53
|
+
const health = await healthChecker.check();
|
|
54
|
+
|
|
55
|
+
expect(health.healthy).toBe(false);
|
|
56
|
+
const failingCheck = health.checks.find((c) => c.name === "failing");
|
|
57
|
+
expect(failingCheck?.status).toBe("fail");
|
|
58
|
+
});
|
|
59
|
+
|
|
60
|
+
it("should remove health check", async () => {
|
|
61
|
+
healthChecker.addCheck("temp", async () => ({
|
|
62
|
+
name: "temp",
|
|
63
|
+
status: "pass",
|
|
64
|
+
}));
|
|
65
|
+
|
|
66
|
+
let health = await healthChecker.check();
|
|
67
|
+
expect(health.checks.some((c) => c.name === "temp")).toBe(true);
|
|
68
|
+
|
|
69
|
+
healthChecker.removeCheck("temp");
|
|
70
|
+
|
|
71
|
+
health = await healthChecker.check();
|
|
72
|
+
expect(health.checks.some((c) => c.name === "temp")).toBe(false);
|
|
73
|
+
});
|
|
74
|
+
|
|
75
|
+
it("should track response time", async () => {
|
|
76
|
+
healthChecker.addCheck("slow", async () => {
|
|
77
|
+
await new Promise((r) => setTimeout(r, 10));
|
|
78
|
+
return {
|
|
79
|
+
name: "slow",
|
|
80
|
+
status: "pass",
|
|
81
|
+
};
|
|
82
|
+
});
|
|
83
|
+
|
|
84
|
+
const health = await healthChecker.check();
|
|
85
|
+
const slowCheck = health.checks.find((c) => c.name === "slow");
|
|
86
|
+
|
|
87
|
+
expect(slowCheck?.responseTime).toBeGreaterThan(0);
|
|
88
|
+
});
|
|
89
|
+
|
|
90
|
+
it("should handle check errors", async () => {
|
|
91
|
+
healthChecker.addCheck("error", async () => {
|
|
92
|
+
throw new Error("Check error");
|
|
93
|
+
});
|
|
94
|
+
|
|
95
|
+
const health = await healthChecker.check();
|
|
96
|
+
|
|
97
|
+
expect(health.healthy).toBe(false);
|
|
98
|
+
const errorCheck = health.checks.find((c) => c.name === "error");
|
|
99
|
+
expect(errorCheck?.status).toBe("fail");
|
|
100
|
+
expect(errorCheck?.message).toBe("Check error");
|
|
101
|
+
});
|
|
102
|
+
});
|
|
103
|
+
|
|
104
|
+
describe("Event Tracker", () => {
|
|
105
|
+
let tracker: MemoryEventTracker;
|
|
106
|
+
|
|
107
|
+
beforeEach(() => {
|
|
108
|
+
tracker = new MemoryEventTracker();
|
|
109
|
+
});
|
|
110
|
+
|
|
111
|
+
it("should track events", () => {
|
|
112
|
+
tracker.track({
|
|
113
|
+
type: "test.event",
|
|
114
|
+
category: "system",
|
|
115
|
+
severity: "info",
|
|
116
|
+
message: "Test event",
|
|
117
|
+
});
|
|
118
|
+
|
|
119
|
+
const events = tracker.getEvents();
|
|
120
|
+
expect(events).toHaveLength(1);
|
|
121
|
+
expect(events[0].type).toBe("test.event");
|
|
122
|
+
expect(events[0].id).toBeDefined();
|
|
123
|
+
expect(events[0].timestamp).toBeGreaterThan(0);
|
|
124
|
+
});
|
|
125
|
+
|
|
126
|
+
it("should filter by category", () => {
|
|
127
|
+
tracker.track({
|
|
128
|
+
type: "queue.event",
|
|
129
|
+
category: "queue",
|
|
130
|
+
severity: "info",
|
|
131
|
+
message: "Queue event",
|
|
132
|
+
});
|
|
133
|
+
|
|
134
|
+
tracker.track({
|
|
135
|
+
type: "stream.event",
|
|
136
|
+
category: "stream",
|
|
137
|
+
severity: "info",
|
|
138
|
+
message: "Stream event",
|
|
139
|
+
});
|
|
140
|
+
|
|
141
|
+
const queueEvents = tracker.getEvents({ category: "queue" });
|
|
142
|
+
expect(queueEvents).toHaveLength(1);
|
|
143
|
+
expect(queueEvents[0].category).toBe("queue");
|
|
144
|
+
});
|
|
145
|
+
|
|
146
|
+
it("should filter by severity", () => {
|
|
147
|
+
tracker.track({
|
|
148
|
+
type: "info.event",
|
|
149
|
+
category: "system",
|
|
150
|
+
severity: "info",
|
|
151
|
+
message: "Info",
|
|
152
|
+
});
|
|
153
|
+
|
|
154
|
+
tracker.track({
|
|
155
|
+
type: "error.event",
|
|
156
|
+
category: "system",
|
|
157
|
+
severity: "error",
|
|
158
|
+
message: "Error",
|
|
159
|
+
});
|
|
160
|
+
|
|
161
|
+
const errors = tracker.getEvents({ severity: "error" });
|
|
162
|
+
expect(errors).toHaveLength(1);
|
|
163
|
+
expect(errors[0].severity).toBe("error");
|
|
164
|
+
});
|
|
165
|
+
|
|
166
|
+
it("should filter by timestamp", () => {
|
|
167
|
+
const now = Date.now();
|
|
168
|
+
|
|
169
|
+
tracker.track({
|
|
170
|
+
type: "event1",
|
|
171
|
+
category: "system",
|
|
172
|
+
severity: "info",
|
|
173
|
+
message: "Event 1",
|
|
174
|
+
});
|
|
175
|
+
|
|
176
|
+
const events = tracker.getEvents({ since: now });
|
|
177
|
+
expect(events.length).toBeGreaterThan(0);
|
|
178
|
+
expect(events[0].timestamp).toBeGreaterThanOrEqual(now);
|
|
179
|
+
});
|
|
180
|
+
|
|
181
|
+
it("should limit results", () => {
|
|
182
|
+
for (let i = 0; i < 10; i++) {
|
|
183
|
+
tracker.track({
|
|
184
|
+
type: `event${i}`,
|
|
185
|
+
category: "system",
|
|
186
|
+
severity: "info",
|
|
187
|
+
message: `Event ${i}`,
|
|
188
|
+
});
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
const limited = tracker.getEvents({ limit: 5 });
|
|
192
|
+
expect(limited).toHaveLength(5);
|
|
193
|
+
});
|
|
194
|
+
|
|
195
|
+
it("should cleanup old events", () => {
|
|
196
|
+
tracker.track({
|
|
197
|
+
type: "old.event",
|
|
198
|
+
category: "system",
|
|
199
|
+
severity: "info",
|
|
200
|
+
message: "Old event",
|
|
201
|
+
});
|
|
202
|
+
|
|
203
|
+
const cleaned = tracker.cleanup(0); // Clean all
|
|
204
|
+
expect(cleaned).toBeGreaterThan(0);
|
|
205
|
+
|
|
206
|
+
const remaining = tracker.getEvents();
|
|
207
|
+
expect(remaining).toHaveLength(0);
|
|
208
|
+
});
|
|
209
|
+
|
|
210
|
+
it("should track metadata", () => {
|
|
211
|
+
tracker.track({
|
|
212
|
+
type: "meta.event",
|
|
213
|
+
category: "system",
|
|
214
|
+
severity: "info",
|
|
215
|
+
message: "Event with metadata",
|
|
216
|
+
metadata: {
|
|
217
|
+
key: "value",
|
|
218
|
+
count: 42,
|
|
219
|
+
},
|
|
220
|
+
});
|
|
221
|
+
|
|
222
|
+
const events = tracker.getEvents();
|
|
223
|
+
expect(events[0].metadata).toEqual({
|
|
224
|
+
key: "value",
|
|
225
|
+
count: 42,
|
|
226
|
+
});
|
|
227
|
+
});
|
|
228
|
+
});
|
|
@@ -0,0 +1,253 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Integration tests for FlowFn
|
|
3
|
+
* Tests the complete stack: queues, streams, workflows together
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
|
7
|
+
import { createFlow } from "../src/core/flow-fn.js";
|
|
8
|
+
import { FlowFn } from "../src/core/flow-fn.js";
|
|
9
|
+
|
|
10
|
+
describe("FlowFn Integration Tests", () => {
|
|
11
|
+
let flow: FlowFn;
|
|
12
|
+
|
|
13
|
+
beforeEach(() => {
|
|
14
|
+
flow = createFlow({ adapter: "memory" });
|
|
15
|
+
});
|
|
16
|
+
|
|
17
|
+
afterEach(async () => {
|
|
18
|
+
await flow.close();
|
|
19
|
+
});
|
|
20
|
+
|
|
21
|
+
describe("Queue + Stream Integration", () => {
|
|
22
|
+
it("should process queue jobs and publish to stream", async () => {
|
|
23
|
+
const queue = flow.queue("processing");
|
|
24
|
+
const stream = flow.stream("events");
|
|
25
|
+
const results: any[] = [];
|
|
26
|
+
|
|
27
|
+
// Subscribe to stream
|
|
28
|
+
await stream.subscribe(async (msg) => {
|
|
29
|
+
results.push(msg.data);
|
|
30
|
+
await msg.ack();
|
|
31
|
+
});
|
|
32
|
+
|
|
33
|
+
// Process queue and publish to stream
|
|
34
|
+
queue.process(async (job) => {
|
|
35
|
+
await stream.publish({ processed: job.data });
|
|
36
|
+
return { success: true };
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
// Add jobs
|
|
40
|
+
await queue.add("task", { value: 1 });
|
|
41
|
+
await queue.add("task", { value: 2 });
|
|
42
|
+
|
|
43
|
+
// Wait for processing
|
|
44
|
+
await new Promise((r) => setTimeout(r, 100));
|
|
45
|
+
|
|
46
|
+
expect(results.length).toBeGreaterThan(0);
|
|
47
|
+
}, 10000);
|
|
48
|
+
});
|
|
49
|
+
|
|
50
|
+
describe("Workflow + Queue Integration", () => {
|
|
51
|
+
it("should trigger workflow from queue job", async () => {
|
|
52
|
+
const queue = flow.queue("triggers");
|
|
53
|
+
let workflowExecuted = false;
|
|
54
|
+
|
|
55
|
+
const workflow = flow
|
|
56
|
+
.workflow("process-order")
|
|
57
|
+
.step("validate", async (ctx) => {
|
|
58
|
+
ctx.set("validated", true);
|
|
59
|
+
})
|
|
60
|
+
.step("execute", async (ctx) => {
|
|
61
|
+
workflowExecuted = true;
|
|
62
|
+
ctx.set("executed", true);
|
|
63
|
+
})
|
|
64
|
+
.build();
|
|
65
|
+
|
|
66
|
+
queue.process(async (job) => {
|
|
67
|
+
await workflow.execute(job.data);
|
|
68
|
+
return { done: true };
|
|
69
|
+
});
|
|
70
|
+
|
|
71
|
+
await queue.add("order", { orderId: "123" });
|
|
72
|
+
|
|
73
|
+
// Wait for processing
|
|
74
|
+
await new Promise((r) => setTimeout(r, 200));
|
|
75
|
+
|
|
76
|
+
expect(workflowExecuted).toBe(true);
|
|
77
|
+
}, 10000);
|
|
78
|
+
});
|
|
79
|
+
|
|
80
|
+
describe("Health Check Integration", () => {
|
|
81
|
+
it("should report healthy system", async () => {
|
|
82
|
+
const health = await flow.healthCheck();
|
|
83
|
+
|
|
84
|
+
expect(health.healthy).toBe(true);
|
|
85
|
+
expect(health.timestamp).toBeGreaterThan(0);
|
|
86
|
+
expect(health.checks.length).toBeGreaterThan(0);
|
|
87
|
+
|
|
88
|
+
// Should have default checks
|
|
89
|
+
const checkNames = health.checks.map((c) => c.name);
|
|
90
|
+
expect(checkNames).toContain("uptime");
|
|
91
|
+
expect(checkNames).toContain("memory");
|
|
92
|
+
});
|
|
93
|
+
|
|
94
|
+
it("should track health check events", async () => {
|
|
95
|
+
await flow.healthCheck();
|
|
96
|
+
|
|
97
|
+
const tracker = flow.getEventTracker();
|
|
98
|
+
const events = tracker.getEvents({ type: "health.check" });
|
|
99
|
+
|
|
100
|
+
expect(events.length).toBeGreaterThan(0);
|
|
101
|
+
expect(events[0].category).toBe("system");
|
|
102
|
+
});
|
|
103
|
+
});
|
|
104
|
+
|
|
105
|
+
describe("Metrics Integration", () => {
|
|
106
|
+
it("should collect queue metrics", async () => {
|
|
107
|
+
const queue = flow.queue("metrics-test");
|
|
108
|
+
|
|
109
|
+
// Record some metrics
|
|
110
|
+
flow.metrics.record("queue.throughput", 10, { queue: "metrics-test" });
|
|
111
|
+
flow.metrics.record("queue.throughput", 20, { queue: "metrics-test" });
|
|
112
|
+
|
|
113
|
+
const metrics = await flow.metrics.getQueueMetrics("metrics-test");
|
|
114
|
+
|
|
115
|
+
expect(metrics).toBeDefined();
|
|
116
|
+
expect(metrics.waiting).toBeDefined();
|
|
117
|
+
});
|
|
118
|
+
});
|
|
119
|
+
|
|
120
|
+
describe("Complete E2E Flow", () => {
|
|
121
|
+
it("should handle queue → workflow → stream flow", async () => {
|
|
122
|
+
const queue = flow.queue("orders");
|
|
123
|
+
const stream = flow.stream("order-events");
|
|
124
|
+
const results: string[] = [];
|
|
125
|
+
|
|
126
|
+
// Subscribe to events
|
|
127
|
+
await stream.subscribe(async (msg) => {
|
|
128
|
+
results.push(msg.data.status);
|
|
129
|
+
await msg.ack();
|
|
130
|
+
});
|
|
131
|
+
|
|
132
|
+
// Workflow for order processing
|
|
133
|
+
const workflow = flow
|
|
134
|
+
.workflow("order-processing")
|
|
135
|
+
.step("validate", async (ctx) => {
|
|
136
|
+
await stream.publish({ status: "validating", order: ctx.input });
|
|
137
|
+
ctx.set("valid", true);
|
|
138
|
+
})
|
|
139
|
+
.step("process", async (ctx) => {
|
|
140
|
+
await stream.publish({ status: "processing", order: ctx.input });
|
|
141
|
+
ctx.set("processed", true);
|
|
142
|
+
})
|
|
143
|
+
.step("complete", async (ctx) => {
|
|
144
|
+
await stream.publish({ status: "completed", order: ctx.input });
|
|
145
|
+
})
|
|
146
|
+
.build();
|
|
147
|
+
|
|
148
|
+
// Queue processor triggers workflow
|
|
149
|
+
queue.process(async (job) => {
|
|
150
|
+
await workflow.execute(job.data);
|
|
151
|
+
return { success: true };
|
|
152
|
+
});
|
|
153
|
+
|
|
154
|
+
// Add order
|
|
155
|
+
await queue.add("new-order", { id: "order-123", amount: 100 });
|
|
156
|
+
|
|
157
|
+
// Wait for complete flow
|
|
158
|
+
await new Promise((r) => setTimeout(r, 300));
|
|
159
|
+
|
|
160
|
+
// Should have all events
|
|
161
|
+
expect(results).toContain("validating");
|
|
162
|
+
expect(results).toContain("processing");
|
|
163
|
+
expect(results).toContain("completed");
|
|
164
|
+
}, 10000);
|
|
165
|
+
});
|
|
166
|
+
|
|
167
|
+
describe("Error Handling Integration", () => {
|
|
168
|
+
it("should handle workflow failures gracefully", async () => {
|
|
169
|
+
const workflow = flow
|
|
170
|
+
.workflow("failing-workflow")
|
|
171
|
+
.step("step1", async (ctx) => {
|
|
172
|
+
ctx.set("step1", true);
|
|
173
|
+
})
|
|
174
|
+
.step("step2", async (ctx) => {
|
|
175
|
+
throw new Error("Intentional failure");
|
|
176
|
+
})
|
|
177
|
+
.step("step3", async (ctx) => {
|
|
178
|
+
ctx.set("step3", true); // Should not execute
|
|
179
|
+
})
|
|
180
|
+
.build();
|
|
181
|
+
|
|
182
|
+
const execution = await workflow.execute({ test: true });
|
|
183
|
+
|
|
184
|
+
// Wait for execution
|
|
185
|
+
await new Promise((r) => setTimeout(r, 100));
|
|
186
|
+
|
|
187
|
+
const result = await workflow.getExecution(execution.id);
|
|
188
|
+
expect(result.status).toBe("failed");
|
|
189
|
+
expect(result.error).toBeDefined();
|
|
190
|
+
}, 10000);
|
|
191
|
+
});
|
|
192
|
+
|
|
193
|
+
describe("Concurrent Operations", () => {
|
|
194
|
+
it("should handle multiple queues concurrently", async () => {
|
|
195
|
+
const queue1 = flow.queue("concurrent-1");
|
|
196
|
+
const queue2 = flow.queue("concurrent-2");
|
|
197
|
+
|
|
198
|
+
let count1 = 0;
|
|
199
|
+
let count2 = 0;
|
|
200
|
+
|
|
201
|
+
queue1.process(async () => {
|
|
202
|
+
count1++;
|
|
203
|
+
});
|
|
204
|
+
queue2.process(async () => {
|
|
205
|
+
count2++;
|
|
206
|
+
});
|
|
207
|
+
|
|
208
|
+
// Add jobs to both queues
|
|
209
|
+
await Promise.all([
|
|
210
|
+
queue1.add("job", { id: 1 }),
|
|
211
|
+
queue1.add("job", { id: 2 }),
|
|
212
|
+
queue2.add("job", { id: 3 }),
|
|
213
|
+
queue2.add("job", { id: 4 }),
|
|
214
|
+
]);
|
|
215
|
+
|
|
216
|
+
await new Promise((r) => setTimeout(r, 200));
|
|
217
|
+
|
|
218
|
+
expect(count1).toBe(2);
|
|
219
|
+
expect(count2).toBe(2);
|
|
220
|
+
}, 10000);
|
|
221
|
+
|
|
222
|
+
it("should handle multiple streams concurrently", async () => {
|
|
223
|
+
const stream1 = flow.stream("stream-1");
|
|
224
|
+
const stream2 = flow.stream("stream-2");
|
|
225
|
+
|
|
226
|
+
const results1: any[] = [];
|
|
227
|
+
const results2: any[] = [];
|
|
228
|
+
|
|
229
|
+
await stream1.subscribe(async (msg) => {
|
|
230
|
+
results1.push(msg.data);
|
|
231
|
+
await msg.ack();
|
|
232
|
+
});
|
|
233
|
+
|
|
234
|
+
await stream2.subscribe(async (msg) => {
|
|
235
|
+
results2.push(msg.data);
|
|
236
|
+
await msg.ack();
|
|
237
|
+
});
|
|
238
|
+
|
|
239
|
+
// Publish to both streams
|
|
240
|
+
await Promise.all([
|
|
241
|
+
stream1.publish({ stream: 1, value: "a" }),
|
|
242
|
+
stream2.publish({ stream: 2, value: "b" }),
|
|
243
|
+
stream1.publish({ stream: 1, value: "c" }),
|
|
244
|
+
stream2.publish({ stream: 2, value: "d" }),
|
|
245
|
+
]);
|
|
246
|
+
|
|
247
|
+
await new Promise((r) => setTimeout(r, 100));
|
|
248
|
+
|
|
249
|
+
expect(results1.length).toBe(2);
|
|
250
|
+
expect(results2.length).toBe(2);
|
|
251
|
+
}, 10000);
|
|
252
|
+
});
|
|
253
|
+
});
|