@nicnocquee/dataqueue 1.24.0 → 1.26.0-beta.20260223195940
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +44 -0
- package/ai/build-docs-content.ts +96 -0
- package/ai/build-llms-full.ts +42 -0
- package/ai/docs-content.json +278 -0
- package/ai/rules/advanced.md +132 -0
- package/ai/rules/basic.md +159 -0
- package/ai/rules/react-dashboard.md +83 -0
- package/ai/skills/dataqueue-advanced/SKILL.md +320 -0
- package/ai/skills/dataqueue-core/SKILL.md +234 -0
- package/ai/skills/dataqueue-react/SKILL.md +189 -0
- package/dist/cli.cjs +1149 -14
- package/dist/cli.cjs.map +1 -1
- package/dist/cli.d.cts +66 -1
- package/dist/cli.d.ts +66 -1
- package/dist/cli.js +1146 -13
- package/dist/cli.js.map +1 -1
- package/dist/index.cjs +4630 -928
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +1033 -15
- package/dist/index.d.ts +1033 -15
- package/dist/index.js +4626 -929
- package/dist/index.js.map +1 -1
- package/dist/mcp-server.cjs +186 -0
- package/dist/mcp-server.cjs.map +1 -0
- package/dist/mcp-server.d.cts +32 -0
- package/dist/mcp-server.d.ts +32 -0
- package/dist/mcp-server.js +175 -0
- package/dist/mcp-server.js.map +1 -0
- package/migrations/1751131910825_add_timeout_seconds_to_job_queue.sql +2 -2
- package/migrations/1751186053000_add_job_events_table.sql +12 -8
- package/migrations/1751984773000_add_tags_to_job_queue.sql +1 -1
- package/migrations/1765809419000_add_force_kill_on_timeout_to_job_queue.sql +1 -1
- package/migrations/1771100000000_add_idempotency_key_to_job_queue.sql +7 -0
- package/migrations/1781200000000_add_wait_support.sql +12 -0
- package/migrations/1781200000001_create_waitpoints_table.sql +18 -0
- package/migrations/1781200000002_add_performance_indexes.sql +34 -0
- package/migrations/1781200000003_add_progress_to_job_queue.sql +7 -0
- package/migrations/1781200000004_create_cron_schedules_table.sql +33 -0
- package/migrations/1781200000005_add_retry_config_to_job_queue.sql +17 -0
- package/package.json +40 -23
- package/src/backend.ts +328 -0
- package/src/backends/postgres.ts +2040 -0
- package/src/backends/redis-scripts.ts +865 -0
- package/src/backends/redis.test.ts +1906 -0
- package/src/backends/redis.ts +1792 -0
- package/src/cli.test.ts +82 -6
- package/src/cli.ts +73 -10
- package/src/cron.test.ts +126 -0
- package/src/cron.ts +40 -0
- package/src/db-util.ts +4 -2
- package/src/index.test.ts +688 -1
- package/src/index.ts +277 -39
- package/src/init-command.test.ts +449 -0
- package/src/init-command.ts +709 -0
- package/src/install-mcp-command.test.ts +216 -0
- package/src/install-mcp-command.ts +185 -0
- package/src/install-rules-command.test.ts +218 -0
- package/src/install-rules-command.ts +233 -0
- package/src/install-skills-command.test.ts +176 -0
- package/src/install-skills-command.ts +124 -0
- package/src/mcp-server.test.ts +162 -0
- package/src/mcp-server.ts +231 -0
- package/src/processor.test.ts +559 -18
- package/src/processor.ts +456 -49
- package/src/queue.test.ts +682 -6
- package/src/queue.ts +135 -944
- package/src/supervisor.test.ts +340 -0
- package/src/supervisor.ts +162 -0
- package/src/test-util.ts +32 -0
- package/src/types.ts +726 -17
- package/src/wait.test.ts +698 -0
- package/LICENSE +0 -21
|
@@ -0,0 +1,1906 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
|
|
2
|
+
import { initJobQueue } from '../index.js';
|
|
3
|
+
import { createRedisTestPrefix, cleanupRedisPrefix } from '../test-util.js';
|
|
4
|
+
import type { RedisJobQueueConfig } from '../types.js';
|
|
5
|
+
|
|
6
|
+
interface TestPayloadMap {
|
|
7
|
+
email: { to: string };
|
|
8
|
+
sms: { to: string };
|
|
9
|
+
test: { foo: string };
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
const REDIS_URL = process.env.REDIS_TEST_URL || 'redis://localhost:6379';
|
|
13
|
+
|
|
14
|
+
describe('Redis backend integration', () => {
|
|
15
|
+
let prefix: string;
|
|
16
|
+
let jobQueue: ReturnType<typeof initJobQueue<TestPayloadMap>>;
|
|
17
|
+
let redisClient: any;
|
|
18
|
+
|
|
19
|
+
beforeEach(async () => {
|
|
20
|
+
prefix = createRedisTestPrefix();
|
|
21
|
+
const config: RedisJobQueueConfig = {
|
|
22
|
+
backend: 'redis',
|
|
23
|
+
redisConfig: {
|
|
24
|
+
url: REDIS_URL,
|
|
25
|
+
keyPrefix: prefix,
|
|
26
|
+
},
|
|
27
|
+
};
|
|
28
|
+
jobQueue = initJobQueue<TestPayloadMap>(config);
|
|
29
|
+
redisClient = jobQueue.getRedisClient();
|
|
30
|
+
});
|
|
31
|
+
|
|
32
|
+
afterEach(async () => {
|
|
33
|
+
await cleanupRedisPrefix(redisClient, prefix);
|
|
34
|
+
await redisClient.quit();
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
it('should add a job and retrieve it', async () => {
|
|
38
|
+
const jobId = await jobQueue.addJob({
|
|
39
|
+
jobType: 'email',
|
|
40
|
+
payload: { to: 'test@example.com' },
|
|
41
|
+
});
|
|
42
|
+
expect(typeof jobId).toBe('number');
|
|
43
|
+
const job = await jobQueue.getJob(jobId);
|
|
44
|
+
expect(job).not.toBeNull();
|
|
45
|
+
expect(job?.jobType).toBe('email');
|
|
46
|
+
expect(job?.payload).toEqual({ to: 'test@example.com' });
|
|
47
|
+
expect(job?.status).toBe('pending');
|
|
48
|
+
});
|
|
49
|
+
|
|
50
|
+
it('should get jobs by status', async () => {
|
|
51
|
+
const jobId1 = await jobQueue.addJob({
|
|
52
|
+
jobType: 'email',
|
|
53
|
+
payload: { to: 'a@example.com' },
|
|
54
|
+
});
|
|
55
|
+
const jobId2 = await jobQueue.addJob({
|
|
56
|
+
jobType: 'sms',
|
|
57
|
+
payload: { to: 'b@example.com' },
|
|
58
|
+
});
|
|
59
|
+
const jobs = await jobQueue.getJobsByStatus('pending');
|
|
60
|
+
const ids = jobs.map((j) => j.id);
|
|
61
|
+
expect(ids).toContain(jobId1);
|
|
62
|
+
expect(ids).toContain(jobId2);
|
|
63
|
+
});
|
|
64
|
+
|
|
65
|
+
it('should get all jobs', async () => {
|
|
66
|
+
await jobQueue.addJob({ jobType: 'email', payload: { to: 'a@b.com' } });
|
|
67
|
+
await jobQueue.addJob({ jobType: 'sms', payload: { to: 'c@d.com' } });
|
|
68
|
+
const jobs = await jobQueue.getAllJobs();
|
|
69
|
+
expect(jobs.length).toBe(2);
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
it('should process a job with a registered handler', async () => {
|
|
73
|
+
const handler = vi.fn(async (_payload: any, _signal: any) => {});
|
|
74
|
+
const jobId = await jobQueue.addJob({
|
|
75
|
+
jobType: 'test',
|
|
76
|
+
payload: { foo: 'bar' },
|
|
77
|
+
});
|
|
78
|
+
const processor = jobQueue.createProcessor(
|
|
79
|
+
{
|
|
80
|
+
email: vi.fn(async () => {}),
|
|
81
|
+
sms: vi.fn(async () => {}),
|
|
82
|
+
test: handler,
|
|
83
|
+
},
|
|
84
|
+
{ pollInterval: 100 },
|
|
85
|
+
);
|
|
86
|
+
await processor.start();
|
|
87
|
+
expect(handler).toHaveBeenCalledWith(
|
|
88
|
+
{ foo: 'bar' },
|
|
89
|
+
expect.any(Object),
|
|
90
|
+
expect.any(Object),
|
|
91
|
+
);
|
|
92
|
+
const job = await jobQueue.getJob(jobId);
|
|
93
|
+
expect(job?.status).toBe('completed');
|
|
94
|
+
});
|
|
95
|
+
|
|
96
|
+
it('should retry a failed job', async () => {
|
|
97
|
+
const jobId = await jobQueue.addJob({
|
|
98
|
+
jobType: 'email',
|
|
99
|
+
payload: { to: 'fail@example.com' },
|
|
100
|
+
});
|
|
101
|
+
// Use a handler that fails
|
|
102
|
+
const processor = jobQueue.createProcessor(
|
|
103
|
+
{
|
|
104
|
+
email: async () => {
|
|
105
|
+
throw new Error('boom');
|
|
106
|
+
},
|
|
107
|
+
sms: vi.fn(async () => {}),
|
|
108
|
+
test: vi.fn(async () => {}),
|
|
109
|
+
},
|
|
110
|
+
{ pollInterval: 100 },
|
|
111
|
+
);
|
|
112
|
+
await processor.start();
|
|
113
|
+
let job = await jobQueue.getJob(jobId);
|
|
114
|
+
expect(job?.status).toBe('failed');
|
|
115
|
+
|
|
116
|
+
await jobQueue.retryJob(jobId);
|
|
117
|
+
job = await jobQueue.getJob(jobId);
|
|
118
|
+
expect(job?.status).toBe('pending');
|
|
119
|
+
});
|
|
120
|
+
|
|
121
|
+
it('should cancel a pending job', async () => {
|
|
122
|
+
const jobId = await jobQueue.addJob({
|
|
123
|
+
jobType: 'email',
|
|
124
|
+
payload: { to: 'cancelme@example.com' },
|
|
125
|
+
});
|
|
126
|
+
await jobQueue.cancelJob(jobId);
|
|
127
|
+
const job = await jobQueue.getJob(jobId);
|
|
128
|
+
expect(job?.status).toBe('cancelled');
|
|
129
|
+
});
|
|
130
|
+
|
|
131
|
+
it('should not cancel a non-pending job', async () => {
|
|
132
|
+
const jobId = await jobQueue.addJob({
|
|
133
|
+
jobType: 'test',
|
|
134
|
+
payload: { foo: 'done' },
|
|
135
|
+
});
|
|
136
|
+
// Process it first
|
|
137
|
+
const processor = jobQueue.createProcessor(
|
|
138
|
+
{
|
|
139
|
+
email: vi.fn(async () => {}),
|
|
140
|
+
sms: vi.fn(async () => {}),
|
|
141
|
+
test: vi.fn(async () => {}),
|
|
142
|
+
},
|
|
143
|
+
{ pollInterval: 100 },
|
|
144
|
+
);
|
|
145
|
+
await processor.start();
|
|
146
|
+
const completedJob = await jobQueue.getJob(jobId);
|
|
147
|
+
expect(completedJob?.status).toBe('completed');
|
|
148
|
+
|
|
149
|
+
await jobQueue.cancelJob(jobId);
|
|
150
|
+
const job = await jobQueue.getJob(jobId);
|
|
151
|
+
expect(job?.status).toBe('completed'); // unchanged
|
|
152
|
+
});
|
|
153
|
+
|
|
154
|
+
it('should cancel all upcoming jobs', async () => {
|
|
155
|
+
const jobId1 = await jobQueue.addJob({
|
|
156
|
+
jobType: 'email',
|
|
157
|
+
payload: { to: 'a@example.com' },
|
|
158
|
+
});
|
|
159
|
+
const jobId2 = await jobQueue.addJob({
|
|
160
|
+
jobType: 'email',
|
|
161
|
+
payload: { to: 'b@example.com' },
|
|
162
|
+
});
|
|
163
|
+
const cancelled = await jobQueue.cancelAllUpcomingJobs();
|
|
164
|
+
expect(cancelled).toBe(2);
|
|
165
|
+
const job1 = await jobQueue.getJob(jobId1);
|
|
166
|
+
const job2 = await jobQueue.getJob(jobId2);
|
|
167
|
+
expect(job1?.status).toBe('cancelled');
|
|
168
|
+
expect(job2?.status).toBe('cancelled');
|
|
169
|
+
});
|
|
170
|
+
|
|
171
|
+
it('should cancel all upcoming jobs by jobType', async () => {
|
|
172
|
+
const jobId1 = await jobQueue.addJob({
|
|
173
|
+
jobType: 'email',
|
|
174
|
+
payload: { to: 'a@example.com' },
|
|
175
|
+
});
|
|
176
|
+
const jobId2 = await jobQueue.addJob({
|
|
177
|
+
jobType: 'sms',
|
|
178
|
+
payload: { to: 'b@example.com' },
|
|
179
|
+
});
|
|
180
|
+
const cancelled = await jobQueue.cancelAllUpcomingJobs({
|
|
181
|
+
jobType: 'email',
|
|
182
|
+
});
|
|
183
|
+
expect(cancelled).toBe(1);
|
|
184
|
+
expect((await jobQueue.getJob(jobId1))?.status).toBe('cancelled');
|
|
185
|
+
expect((await jobQueue.getJob(jobId2))?.status).toBe('pending');
|
|
186
|
+
});
|
|
187
|
+
|
|
188
|
+
it('should cancel all upcoming jobs by priority', async () => {
|
|
189
|
+
const jobId1 = await jobQueue.addJob({
|
|
190
|
+
jobType: 'email',
|
|
191
|
+
payload: { to: 'a@example.com' },
|
|
192
|
+
priority: 1,
|
|
193
|
+
});
|
|
194
|
+
const jobId2 = await jobQueue.addJob({
|
|
195
|
+
jobType: 'email',
|
|
196
|
+
payload: { to: 'b@example.com' },
|
|
197
|
+
priority: 2,
|
|
198
|
+
});
|
|
199
|
+
const cancelled = await jobQueue.cancelAllUpcomingJobs({ priority: 2 });
|
|
200
|
+
expect(cancelled).toBe(1);
|
|
201
|
+
expect((await jobQueue.getJob(jobId1))?.status).toBe('pending');
|
|
202
|
+
expect((await jobQueue.getJob(jobId2))?.status).toBe('cancelled');
|
|
203
|
+
});
|
|
204
|
+
|
|
205
|
+
it('should edit a pending job', async () => {
|
|
206
|
+
const jobId = await jobQueue.addJob({
|
|
207
|
+
jobType: 'email',
|
|
208
|
+
payload: { to: 'original@example.com' },
|
|
209
|
+
priority: 0,
|
|
210
|
+
maxAttempts: 3,
|
|
211
|
+
});
|
|
212
|
+
|
|
213
|
+
await jobQueue.editJob(jobId, {
|
|
214
|
+
payload: { to: 'updated@example.com' },
|
|
215
|
+
priority: 10,
|
|
216
|
+
maxAttempts: 5,
|
|
217
|
+
});
|
|
218
|
+
|
|
219
|
+
const job = await jobQueue.getJob(jobId);
|
|
220
|
+
expect(job?.payload).toEqual({ to: 'updated@example.com' });
|
|
221
|
+
expect(job?.priority).toBe(10);
|
|
222
|
+
expect(job?.maxAttempts).toBe(5);
|
|
223
|
+
});
|
|
224
|
+
|
|
225
|
+
it('should edit all pending jobs', async () => {
|
|
226
|
+
await jobQueue.addJob({
|
|
227
|
+
jobType: 'email',
|
|
228
|
+
payload: { to: 'a@example.com' },
|
|
229
|
+
priority: 0,
|
|
230
|
+
});
|
|
231
|
+
await jobQueue.addJob({
|
|
232
|
+
jobType: 'email',
|
|
233
|
+
payload: { to: 'b@example.com' },
|
|
234
|
+
priority: 0,
|
|
235
|
+
});
|
|
236
|
+
const smsId = await jobQueue.addJob({
|
|
237
|
+
jobType: 'sms',
|
|
238
|
+
payload: { to: 'c@example.com' },
|
|
239
|
+
priority: 0,
|
|
240
|
+
});
|
|
241
|
+
|
|
242
|
+
const edited = await jobQueue.editAllPendingJobs(
|
|
243
|
+
{ jobType: 'email' },
|
|
244
|
+
{ priority: 5 },
|
|
245
|
+
);
|
|
246
|
+
expect(edited).toBe(2);
|
|
247
|
+
const smsJob = await jobQueue.getJob(smsId);
|
|
248
|
+
expect(smsJob?.priority).toBe(0); // unchanged
|
|
249
|
+
});
|
|
250
|
+
|
|
251
|
+
it('should record and retrieve job events', async () => {
|
|
252
|
+
const jobId = await jobQueue.addJob({
|
|
253
|
+
jobType: 'email',
|
|
254
|
+
payload: { to: 'events@example.com' },
|
|
255
|
+
});
|
|
256
|
+
const events = await jobQueue.getJobEvents(jobId);
|
|
257
|
+
expect(events.length).toBeGreaterThanOrEqual(1);
|
|
258
|
+
expect(events[0].eventType).toBe('added');
|
|
259
|
+
});
|
|
260
|
+
|
|
261
|
+
it('should record edited event', async () => {
|
|
262
|
+
const jobId = await jobQueue.addJob({
|
|
263
|
+
jobType: 'email',
|
|
264
|
+
payload: { to: 'original@example.com' },
|
|
265
|
+
});
|
|
266
|
+
await jobQueue.editJob(jobId, {
|
|
267
|
+
payload: { to: 'updated@example.com' },
|
|
268
|
+
priority: 10,
|
|
269
|
+
});
|
|
270
|
+
const events = await jobQueue.getJobEvents(jobId);
|
|
271
|
+
const editEvent = events.find((e) => e.eventType === 'edited');
|
|
272
|
+
expect(editEvent).not.toBeUndefined();
|
|
273
|
+
expect(editEvent?.metadata).toMatchObject({
|
|
274
|
+
payload: { to: 'updated@example.com' },
|
|
275
|
+
priority: 10,
|
|
276
|
+
});
|
|
277
|
+
});
|
|
278
|
+
|
|
279
|
+
it('should support idempotency keys', async () => {
|
|
280
|
+
const jobId1 = await jobQueue.addJob({
|
|
281
|
+
jobType: 'email',
|
|
282
|
+
payload: { to: 'idem@example.com' },
|
|
283
|
+
idempotencyKey: 'unique-key-123',
|
|
284
|
+
});
|
|
285
|
+
const jobId2 = await jobQueue.addJob({
|
|
286
|
+
jobType: 'email',
|
|
287
|
+
payload: { to: 'idem@example.com' },
|
|
288
|
+
idempotencyKey: 'unique-key-123',
|
|
289
|
+
});
|
|
290
|
+
expect(jobId1).toBe(jobId2);
|
|
291
|
+
});
|
|
292
|
+
|
|
293
|
+
it('should support tags and getJobsByTags', async () => {
|
|
294
|
+
await jobQueue.addJob({
|
|
295
|
+
jobType: 'email',
|
|
296
|
+
payload: { to: 'tagged1@example.com' },
|
|
297
|
+
tags: ['foo', 'bar'],
|
|
298
|
+
});
|
|
299
|
+
await jobQueue.addJob({
|
|
300
|
+
jobType: 'email',
|
|
301
|
+
payload: { to: 'tagged2@example.com' },
|
|
302
|
+
tags: ['foo'],
|
|
303
|
+
});
|
|
304
|
+
await jobQueue.addJob({
|
|
305
|
+
jobType: 'email',
|
|
306
|
+
payload: { to: 'tagged3@example.com' },
|
|
307
|
+
tags: ['baz'],
|
|
308
|
+
});
|
|
309
|
+
|
|
310
|
+
// mode: 'all' - has both foo AND bar
|
|
311
|
+
const allJobs = await jobQueue.getJobsByTags(['foo', 'bar'], 'all');
|
|
312
|
+
expect(allJobs.length).toBe(1);
|
|
313
|
+
expect(allJobs[0].payload).toEqual({ to: 'tagged1@example.com' });
|
|
314
|
+
|
|
315
|
+
// mode: 'any' - has foo OR bar
|
|
316
|
+
const anyJobs = await jobQueue.getJobsByTags(['foo', 'bar'], 'any');
|
|
317
|
+
expect(anyJobs.length).toBe(2);
|
|
318
|
+
|
|
319
|
+
// mode: 'exact' - exactly ['foo', 'bar']
|
|
320
|
+
const exactJobs = await jobQueue.getJobsByTags(['foo', 'bar'], 'exact');
|
|
321
|
+
expect(exactJobs.length).toBe(1);
|
|
322
|
+
|
|
323
|
+
// mode: 'none' - neither foo nor bar
|
|
324
|
+
const noneJobs = await jobQueue.getJobsByTags(['foo', 'bar'], 'none');
|
|
325
|
+
expect(noneJobs.length).toBe(1);
|
|
326
|
+
expect(noneJobs[0].payload).toEqual({ to: 'tagged3@example.com' });
|
|
327
|
+
});
|
|
328
|
+
|
|
329
|
+
it('should support priority ordering in processing', async () => {
|
|
330
|
+
const processed: string[] = [];
|
|
331
|
+
const jobId1 = await jobQueue.addJob({
|
|
332
|
+
jobType: 'email',
|
|
333
|
+
payload: { to: 'low@example.com' },
|
|
334
|
+
priority: 1,
|
|
335
|
+
});
|
|
336
|
+
const jobId2 = await jobQueue.addJob({
|
|
337
|
+
jobType: 'email',
|
|
338
|
+
payload: { to: 'high@example.com' },
|
|
339
|
+
priority: 10,
|
|
340
|
+
});
|
|
341
|
+
const processor = jobQueue.createProcessor(
|
|
342
|
+
{
|
|
343
|
+
email: async (payload: any) => {
|
|
344
|
+
processed.push(payload.to);
|
|
345
|
+
},
|
|
346
|
+
sms: vi.fn(async () => {}),
|
|
347
|
+
test: vi.fn(async () => {}),
|
|
348
|
+
},
|
|
349
|
+
{ batchSize: 10, concurrency: 1 },
|
|
350
|
+
);
|
|
351
|
+
await processor.start();
|
|
352
|
+
// Higher priority should be first
|
|
353
|
+
expect(processed[0]).toBe('high@example.com');
|
|
354
|
+
expect(processed[1]).toBe('low@example.com');
|
|
355
|
+
});
|
|
356
|
+
|
|
357
|
+
it('should cleanup old completed jobs', async () => {
|
|
358
|
+
const jobId = await jobQueue.addJob({
|
|
359
|
+
jobType: 'test',
|
|
360
|
+
payload: { foo: 'cleanup' },
|
|
361
|
+
});
|
|
362
|
+
// Complete it
|
|
363
|
+
const processor = jobQueue.createProcessor({
|
|
364
|
+
email: vi.fn(async () => {}),
|
|
365
|
+
sms: vi.fn(async () => {}),
|
|
366
|
+
test: vi.fn(async () => {}),
|
|
367
|
+
});
|
|
368
|
+
await processor.start();
|
|
369
|
+
const completedJob = await jobQueue.getJob(jobId);
|
|
370
|
+
expect(completedJob?.status).toBe('completed');
|
|
371
|
+
|
|
372
|
+
// Manually set updatedAt to 31 days ago
|
|
373
|
+
const oldMs = Date.now() - 31 * 24 * 60 * 60 * 1000;
|
|
374
|
+
await redisClient.hset(
|
|
375
|
+
`${prefix}job:${jobId}`,
|
|
376
|
+
'updatedAt',
|
|
377
|
+
oldMs.toString(),
|
|
378
|
+
);
|
|
379
|
+
|
|
380
|
+
const deleted = await jobQueue.cleanupOldJobs(30);
|
|
381
|
+
expect(deleted).toBe(1);
|
|
382
|
+
const job = await jobQueue.getJob(jobId);
|
|
383
|
+
expect(job).toBeNull();
|
|
384
|
+
});
|
|
385
|
+
|
|
386
|
+
it('should cleanup old completed jobs in batches', async () => {
|
|
387
|
+
const ids: number[] = [];
|
|
388
|
+
for (let i = 0; i < 5; i++) {
|
|
389
|
+
const jobId = await jobQueue.addJob({
|
|
390
|
+
jobType: 'test',
|
|
391
|
+
payload: { foo: `batch-${i}` },
|
|
392
|
+
});
|
|
393
|
+
ids.push(jobId);
|
|
394
|
+
}
|
|
395
|
+
// Complete all jobs
|
|
396
|
+
const processor = jobQueue.createProcessor({
|
|
397
|
+
email: vi.fn(async () => {}),
|
|
398
|
+
sms: vi.fn(async () => {}),
|
|
399
|
+
test: vi.fn(async () => {}),
|
|
400
|
+
});
|
|
401
|
+
await processor.start();
|
|
402
|
+
for (const id of ids) {
|
|
403
|
+
const job = await jobQueue.getJob(id);
|
|
404
|
+
expect(job?.status).toBe('completed');
|
|
405
|
+
}
|
|
406
|
+
|
|
407
|
+
// Backdate all to 31 days ago
|
|
408
|
+
const oldMs = Date.now() - 31 * 24 * 60 * 60 * 1000;
|
|
409
|
+
for (const id of ids) {
|
|
410
|
+
await redisClient.hset(
|
|
411
|
+
`${prefix}job:${id}`,
|
|
412
|
+
'updatedAt',
|
|
413
|
+
oldMs.toString(),
|
|
414
|
+
);
|
|
415
|
+
}
|
|
416
|
+
|
|
417
|
+
// Cleanup with small batchSize to force multiple SSCAN iterations
|
|
418
|
+
const deleted = await jobQueue.cleanupOldJobs(30, 2);
|
|
419
|
+
expect(deleted).toBe(5);
|
|
420
|
+
for (const id of ids) {
|
|
421
|
+
const job = await jobQueue.getJob(id);
|
|
422
|
+
expect(job).toBeNull();
|
|
423
|
+
}
|
|
424
|
+
});
|
|
425
|
+
|
|
426
|
+
it('should reclaim stuck jobs', async () => {
|
|
427
|
+
const jobId = await jobQueue.addJob({
|
|
428
|
+
jobType: 'email',
|
|
429
|
+
payload: { to: 'stuck@example.com' },
|
|
430
|
+
});
|
|
431
|
+
// Manually set to processing with old lockedAt
|
|
432
|
+
const oldMs = Date.now() - 15 * 60 * 1000; // 15 minutes ago
|
|
433
|
+
await redisClient.hmset(
|
|
434
|
+
`${prefix}job:${jobId}`,
|
|
435
|
+
'status',
|
|
436
|
+
'processing',
|
|
437
|
+
'lockedAt',
|
|
438
|
+
oldMs.toString(),
|
|
439
|
+
'lockedBy',
|
|
440
|
+
'dead-worker',
|
|
441
|
+
);
|
|
442
|
+
await redisClient.srem(`${prefix}status:pending`, jobId.toString());
|
|
443
|
+
await redisClient.sadd(`${prefix}status:processing`, jobId.toString());
|
|
444
|
+
await redisClient.zrem(`${prefix}queue`, jobId.toString());
|
|
445
|
+
|
|
446
|
+
const reclaimed = await jobQueue.reclaimStuckJobs(10);
|
|
447
|
+
expect(reclaimed).toBe(1);
|
|
448
|
+
const job = await jobQueue.getJob(jobId);
|
|
449
|
+
expect(job?.status).toBe('pending');
|
|
450
|
+
expect(job?.lockedAt).toBeNull();
|
|
451
|
+
});
|
|
452
|
+
|
|
453
|
+
it('should not reclaim a job whose timeoutMs exceeds the reclaim threshold', async () => {
|
|
454
|
+
const jobId = await jobQueue.addJob({
|
|
455
|
+
jobType: 'email',
|
|
456
|
+
payload: { to: 'long-timeout@example.com' },
|
|
457
|
+
timeoutMs: 30 * 60 * 1000, // 30 minutes
|
|
458
|
+
});
|
|
459
|
+
// Simulate: processing for 15 minutes (exceeds 10-min global threshold but within 30-min job timeout)
|
|
460
|
+
const oldMs = Date.now() - 15 * 60 * 1000;
|
|
461
|
+
await redisClient.hmset(
|
|
462
|
+
`${prefix}job:${jobId}`,
|
|
463
|
+
'status',
|
|
464
|
+
'processing',
|
|
465
|
+
'lockedAt',
|
|
466
|
+
oldMs.toString(),
|
|
467
|
+
'lockedBy',
|
|
468
|
+
'some-worker',
|
|
469
|
+
);
|
|
470
|
+
await redisClient.srem(`${prefix}status:pending`, jobId.toString());
|
|
471
|
+
await redisClient.sadd(`${prefix}status:processing`, jobId.toString());
|
|
472
|
+
await redisClient.zrem(`${prefix}queue`, jobId.toString());
|
|
473
|
+
|
|
474
|
+
const reclaimed = await jobQueue.reclaimStuckJobs(10);
|
|
475
|
+
expect(reclaimed).toBe(0);
|
|
476
|
+
const job = await jobQueue.getJob(jobId);
|
|
477
|
+
expect(job?.status).toBe('processing');
|
|
478
|
+
});
|
|
479
|
+
|
|
480
|
+
it('should reclaim a job whose timeoutMs has also been exceeded', async () => {
|
|
481
|
+
const jobId = await jobQueue.addJob({
|
|
482
|
+
jobType: 'email',
|
|
483
|
+
payload: { to: 'expired-timeout@example.com' },
|
|
484
|
+
timeoutMs: 20 * 60 * 1000, // 20 minutes
|
|
485
|
+
});
|
|
486
|
+
// Simulate: processing for 25 minutes (exceeds both 10-min threshold and 20-min job timeout)
|
|
487
|
+
const oldMs = Date.now() - 25 * 60 * 1000;
|
|
488
|
+
await redisClient.hmset(
|
|
489
|
+
`${prefix}job:${jobId}`,
|
|
490
|
+
'status',
|
|
491
|
+
'processing',
|
|
492
|
+
'lockedAt',
|
|
493
|
+
oldMs.toString(),
|
|
494
|
+
'lockedBy',
|
|
495
|
+
'some-worker',
|
|
496
|
+
);
|
|
497
|
+
await redisClient.srem(`${prefix}status:pending`, jobId.toString());
|
|
498
|
+
await redisClient.sadd(`${prefix}status:processing`, jobId.toString());
|
|
499
|
+
await redisClient.zrem(`${prefix}queue`, jobId.toString());
|
|
500
|
+
|
|
501
|
+
const reclaimed = await jobQueue.reclaimStuckJobs(10);
|
|
502
|
+
expect(reclaimed).toBe(1);
|
|
503
|
+
const job = await jobQueue.getJob(jobId);
|
|
504
|
+
expect(job?.status).toBe('pending');
|
|
505
|
+
});
|
|
506
|
+
|
|
507
|
+
it('getPool should throw for Redis backend', () => {
|
|
508
|
+
expect(() => jobQueue.getPool()).toThrow(
|
|
509
|
+
'getPool() is only available with the PostgreSQL backend',
|
|
510
|
+
);
|
|
511
|
+
});
|
|
512
|
+
|
|
513
|
+
it('getRedisClient should return the Redis client', () => {
|
|
514
|
+
const client = jobQueue.getRedisClient() as { get: unknown };
|
|
515
|
+
expect(client).toBeDefined();
|
|
516
|
+
expect(typeof client.get).toBe('function');
|
|
517
|
+
});
|
|
518
|
+
|
|
519
|
+
it('should get jobs with filters', async () => {
|
|
520
|
+
await jobQueue.addJob({
|
|
521
|
+
jobType: 'email',
|
|
522
|
+
payload: { to: 'a@example.com' },
|
|
523
|
+
priority: 1,
|
|
524
|
+
});
|
|
525
|
+
await jobQueue.addJob({
|
|
526
|
+
jobType: 'sms',
|
|
527
|
+
payload: { to: 'b@example.com' },
|
|
528
|
+
priority: 2,
|
|
529
|
+
});
|
|
530
|
+
await jobQueue.addJob({
|
|
531
|
+
jobType: 'email',
|
|
532
|
+
payload: { to: 'c@example.com' },
|
|
533
|
+
priority: 3,
|
|
534
|
+
});
|
|
535
|
+
|
|
536
|
+
const emailJobs = await jobQueue.getJobs({ jobType: 'email' });
|
|
537
|
+
expect(emailJobs.length).toBe(2);
|
|
538
|
+
|
|
539
|
+
const priorityJobs = await jobQueue.getJobs({ priority: 2 });
|
|
540
|
+
expect(priorityJobs.length).toBe(1);
|
|
541
|
+
expect(priorityJobs[0].jobType).toBe('sms');
|
|
542
|
+
});
|
|
543
|
+
|
|
544
|
+
it('should cancel all upcoming jobs by tags', async () => {
|
|
545
|
+
const jobId1 = await jobQueue.addJob({
|
|
546
|
+
jobType: 'email',
|
|
547
|
+
payload: { to: 'tag1@example.com' },
|
|
548
|
+
tags: ['foo', 'bar'],
|
|
549
|
+
});
|
|
550
|
+
const jobId2 = await jobQueue.addJob({
|
|
551
|
+
jobType: 'email',
|
|
552
|
+
payload: { to: 'tag2@example.com' },
|
|
553
|
+
tags: ['baz'],
|
|
554
|
+
});
|
|
555
|
+
const cancelled = await jobQueue.cancelAllUpcomingJobs({
|
|
556
|
+
tags: { values: ['foo'], mode: 'all' },
|
|
557
|
+
});
|
|
558
|
+
expect(cancelled).toBe(1);
|
|
559
|
+
expect((await jobQueue.getJob(jobId1))?.status).toBe('cancelled');
|
|
560
|
+
expect((await jobQueue.getJob(jobId2))?.status).toBe('pending');
|
|
561
|
+
});
|
|
562
|
+
|
|
563
|
+
it('should handle scheduled jobs (runAt in the future)', async () => {
|
|
564
|
+
const futureDate = new Date(Date.now() + 60 * 60 * 1000); // 1 hour later
|
|
565
|
+
const jobId = await jobQueue.addJob({
|
|
566
|
+
jobType: 'email',
|
|
567
|
+
payload: { to: 'scheduled@example.com' },
|
|
568
|
+
runAt: futureDate,
|
|
569
|
+
});
|
|
570
|
+
|
|
571
|
+
// Should not be picked up immediately
|
|
572
|
+
const processor = jobQueue.createProcessor({
|
|
573
|
+
email: vi.fn(async () => {}),
|
|
574
|
+
sms: vi.fn(async () => {}),
|
|
575
|
+
test: vi.fn(async () => {}),
|
|
576
|
+
});
|
|
577
|
+
const processed = await processor.start();
|
|
578
|
+
expect(processed).toBe(0);
|
|
579
|
+
|
|
580
|
+
const job = await jobQueue.getJob(jobId);
|
|
581
|
+
expect(job?.status).toBe('pending');
|
|
582
|
+
});
|
|
583
|
+
|
|
584
|
+
// ── Configurable retry strategy tests ────────────────────────────────
|
|
585
|
+
|
|
586
|
+
it('stores retry config on a job', async () => {
|
|
587
|
+
const jobId = await jobQueue.addJob({
|
|
588
|
+
jobType: 'email',
|
|
589
|
+
payload: { to: 'retry-config@example.com' },
|
|
590
|
+
retryDelay: 30,
|
|
591
|
+
retryBackoff: false,
|
|
592
|
+
retryDelayMax: 120,
|
|
593
|
+
});
|
|
594
|
+
|
|
595
|
+
const job = await jobQueue.getJob(jobId);
|
|
596
|
+
expect(job?.retryDelay).toBe(30);
|
|
597
|
+
expect(job?.retryBackoff).toBe(false);
|
|
598
|
+
expect(job?.retryDelayMax).toBe(120);
|
|
599
|
+
});
|
|
600
|
+
|
|
601
|
+
it('returns null retry config for jobs without it', async () => {
|
|
602
|
+
const jobId = await jobQueue.addJob({
|
|
603
|
+
jobType: 'email',
|
|
604
|
+
payload: { to: 'no-retry-config@example.com' },
|
|
605
|
+
});
|
|
606
|
+
|
|
607
|
+
const job = await jobQueue.getJob(jobId);
|
|
608
|
+
expect(job?.retryDelay).toBeNull();
|
|
609
|
+
expect(job?.retryBackoff).toBeNull();
|
|
610
|
+
expect(job?.retryDelayMax).toBeNull();
|
|
611
|
+
});
|
|
612
|
+
|
|
613
|
+
it('uses legacy backoff when no retry config is set', async () => {
|
|
614
|
+
const jobId = await jobQueue.addJob({
|
|
615
|
+
jobType: 'email',
|
|
616
|
+
payload: { to: 'legacy-retry@example.com' },
|
|
617
|
+
maxAttempts: 3,
|
|
618
|
+
});
|
|
619
|
+
|
|
620
|
+
const handler = vi.fn(async () => {
|
|
621
|
+
throw new Error('fail');
|
|
622
|
+
});
|
|
623
|
+
const processor = jobQueue.createProcessor({
|
|
624
|
+
email: handler,
|
|
625
|
+
sms: vi.fn(async () => {}),
|
|
626
|
+
test: vi.fn(async () => {}),
|
|
627
|
+
});
|
|
628
|
+
await processor.start();
|
|
629
|
+
|
|
630
|
+
const job = await jobQueue.getJob(jobId);
|
|
631
|
+
expect(job?.status).toBe('failed');
|
|
632
|
+
expect(job?.nextAttemptAt).not.toBeNull();
|
|
633
|
+
const delayMs =
|
|
634
|
+
job!.nextAttemptAt!.getTime() - job!.lastFailedAt!.getTime();
|
|
635
|
+
// Legacy: 2^1 * 60s = 120s = 120000ms
|
|
636
|
+
expect(delayMs).toBeGreaterThanOrEqual(115000);
|
|
637
|
+
expect(delayMs).toBeLessThanOrEqual(125000);
|
|
638
|
+
});
|
|
639
|
+
|
|
640
|
+
it('uses fixed delay when retryBackoff is false', async () => {
|
|
641
|
+
const jobId = await jobQueue.addJob({
|
|
642
|
+
jobType: 'email',
|
|
643
|
+
payload: { to: 'fixed-retry@example.com' },
|
|
644
|
+
maxAttempts: 3,
|
|
645
|
+
retryDelay: 10,
|
|
646
|
+
retryBackoff: false,
|
|
647
|
+
});
|
|
648
|
+
|
|
649
|
+
const handler = vi.fn(async () => {
|
|
650
|
+
throw new Error('fail');
|
|
651
|
+
});
|
|
652
|
+
const processor = jobQueue.createProcessor({
|
|
653
|
+
email: handler,
|
|
654
|
+
sms: vi.fn(async () => {}),
|
|
655
|
+
test: vi.fn(async () => {}),
|
|
656
|
+
});
|
|
657
|
+
await processor.start();
|
|
658
|
+
|
|
659
|
+
const job = await jobQueue.getJob(jobId);
|
|
660
|
+
expect(job?.status).toBe('failed');
|
|
661
|
+
expect(job?.nextAttemptAt).not.toBeNull();
|
|
662
|
+
const delaySec =
|
|
663
|
+
(job!.nextAttemptAt!.getTime() - job!.lastFailedAt!.getTime()) / 1000;
|
|
664
|
+
expect(delaySec).toBeGreaterThanOrEqual(9);
|
|
665
|
+
expect(delaySec).toBeLessThanOrEqual(11);
|
|
666
|
+
});
|
|
667
|
+
|
|
668
|
+
it('uses exponential backoff with custom retryDelay', async () => {
|
|
669
|
+
const jobId = await jobQueue.addJob({
|
|
670
|
+
jobType: 'email',
|
|
671
|
+
payload: { to: 'expo-retry@example.com' },
|
|
672
|
+
maxAttempts: 3,
|
|
673
|
+
retryDelay: 5,
|
|
674
|
+
retryBackoff: true,
|
|
675
|
+
});
|
|
676
|
+
|
|
677
|
+
const handler = vi.fn(async () => {
|
|
678
|
+
throw new Error('fail');
|
|
679
|
+
});
|
|
680
|
+
const processor = jobQueue.createProcessor({
|
|
681
|
+
email: handler,
|
|
682
|
+
sms: vi.fn(async () => {}),
|
|
683
|
+
test: vi.fn(async () => {}),
|
|
684
|
+
});
|
|
685
|
+
await processor.start();
|
|
686
|
+
|
|
687
|
+
const job = await jobQueue.getJob(jobId);
|
|
688
|
+
expect(job?.status).toBe('failed');
|
|
689
|
+
expect(job?.nextAttemptAt).not.toBeNull();
|
|
690
|
+
// 5 * 2^1 = 10s, with jitter [5, 10]
|
|
691
|
+
const delaySec =
|
|
692
|
+
(job!.nextAttemptAt!.getTime() - job!.lastFailedAt!.getTime()) / 1000;
|
|
693
|
+
expect(delaySec).toBeGreaterThanOrEqual(4);
|
|
694
|
+
expect(delaySec).toBeLessThanOrEqual(11);
|
|
695
|
+
});
|
|
696
|
+
|
|
697
|
+
it('caps exponential backoff with retryDelayMax', async () => {
|
|
698
|
+
const jobId = await jobQueue.addJob({
|
|
699
|
+
jobType: 'email',
|
|
700
|
+
payload: { to: 'capped-retry@example.com' },
|
|
701
|
+
maxAttempts: 5,
|
|
702
|
+
retryDelay: 100,
|
|
703
|
+
retryBackoff: true,
|
|
704
|
+
retryDelayMax: 30,
|
|
705
|
+
});
|
|
706
|
+
|
|
707
|
+
const handler = vi.fn(async () => {
|
|
708
|
+
throw new Error('fail');
|
|
709
|
+
});
|
|
710
|
+
const processor = jobQueue.createProcessor({
|
|
711
|
+
email: handler,
|
|
712
|
+
sms: vi.fn(async () => {}),
|
|
713
|
+
test: vi.fn(async () => {}),
|
|
714
|
+
});
|
|
715
|
+
await processor.start();
|
|
716
|
+
|
|
717
|
+
const job = await jobQueue.getJob(jobId);
|
|
718
|
+
expect(job?.status).toBe('failed');
|
|
719
|
+
expect(job?.nextAttemptAt).not.toBeNull();
|
|
720
|
+
// 100 * 2^1 = 200 capped to 30, with jitter [15, 30]
|
|
721
|
+
const delaySec =
|
|
722
|
+
(job!.nextAttemptAt!.getTime() - job!.lastFailedAt!.getTime()) / 1000;
|
|
723
|
+
expect(delaySec).toBeGreaterThanOrEqual(14);
|
|
724
|
+
expect(delaySec).toBeLessThanOrEqual(31);
|
|
725
|
+
});
|
|
726
|
+
|
|
727
|
+
it('allows editing retry config via editJob', async () => {
|
|
728
|
+
const jobId = await jobQueue.addJob({
|
|
729
|
+
jobType: 'email',
|
|
730
|
+
payload: { to: 'edit-retry@example.com' },
|
|
731
|
+
});
|
|
732
|
+
|
|
733
|
+
await jobQueue.editJob(jobId, {
|
|
734
|
+
retryDelay: 15,
|
|
735
|
+
retryBackoff: false,
|
|
736
|
+
retryDelayMax: 60,
|
|
737
|
+
});
|
|
738
|
+
|
|
739
|
+
const job = await jobQueue.getJob(jobId);
|
|
740
|
+
expect(job?.retryDelay).toBe(15);
|
|
741
|
+
expect(job?.retryBackoff).toBe(false);
|
|
742
|
+
expect(job?.retryDelayMax).toBe(60);
|
|
743
|
+
});
|
|
744
|
+
});
|
|
745
|
+
|
|
746
|
+
describe('Redis cron schedules integration', () => {
|
|
747
|
+
let prefix: string;
|
|
748
|
+
let jobQueue: ReturnType<typeof initJobQueue<TestPayloadMap>>;
|
|
749
|
+
let redisClient: any;
|
|
750
|
+
|
|
751
|
+
beforeEach(async () => {
|
|
752
|
+
prefix = createRedisTestPrefix();
|
|
753
|
+
const config: RedisJobQueueConfig = {
|
|
754
|
+
backend: 'redis',
|
|
755
|
+
redisConfig: {
|
|
756
|
+
url: REDIS_URL,
|
|
757
|
+
keyPrefix: prefix,
|
|
758
|
+
},
|
|
759
|
+
};
|
|
760
|
+
jobQueue = initJobQueue<TestPayloadMap>(config);
|
|
761
|
+
redisClient = jobQueue.getRedisClient();
|
|
762
|
+
});
|
|
763
|
+
|
|
764
|
+
afterEach(async () => {
|
|
765
|
+
vi.restoreAllMocks();
|
|
766
|
+
await cleanupRedisPrefix(redisClient, prefix);
|
|
767
|
+
await redisClient.quit();
|
|
768
|
+
});
|
|
769
|
+
|
|
770
|
+
it('creates a cron schedule and retrieves it by ID', async () => {
|
|
771
|
+
// Act
|
|
772
|
+
const id = await jobQueue.addCronJob({
|
|
773
|
+
scheduleName: 'every-5-min-email',
|
|
774
|
+
cronExpression: '*/5 * * * *',
|
|
775
|
+
jobType: 'email',
|
|
776
|
+
payload: { to: 'cron@example.com' },
|
|
777
|
+
});
|
|
778
|
+
|
|
779
|
+
// Assert
|
|
780
|
+
const schedule = await jobQueue.getCronJob(id);
|
|
781
|
+
expect(schedule).not.toBeNull();
|
|
782
|
+
expect(schedule!.scheduleName).toBe('every-5-min-email');
|
|
783
|
+
expect(schedule!.cronExpression).toBe('*/5 * * * *');
|
|
784
|
+
expect(schedule!.jobType).toBe('email');
|
|
785
|
+
expect(schedule!.payload).toEqual({ to: 'cron@example.com' });
|
|
786
|
+
expect(schedule!.status).toBe('active');
|
|
787
|
+
expect(schedule!.allowOverlap).toBe(false);
|
|
788
|
+
expect(schedule!.timezone).toBe('UTC');
|
|
789
|
+
expect(schedule!.nextRunAt).toBeInstanceOf(Date);
|
|
790
|
+
});
|
|
791
|
+
|
|
792
|
+
it('retrieves a cron schedule by name', async () => {
|
|
793
|
+
// Setup
|
|
794
|
+
await jobQueue.addCronJob({
|
|
795
|
+
scheduleName: 'my-schedule',
|
|
796
|
+
cronExpression: '0 * * * *',
|
|
797
|
+
jobType: 'email',
|
|
798
|
+
payload: { to: 'test@example.com' },
|
|
799
|
+
});
|
|
800
|
+
|
|
801
|
+
// Act
|
|
802
|
+
const schedule = await jobQueue.getCronJobByName('my-schedule');
|
|
803
|
+
|
|
804
|
+
// Assert
|
|
805
|
+
expect(schedule).not.toBeNull();
|
|
806
|
+
expect(schedule!.scheduleName).toBe('my-schedule');
|
|
807
|
+
});
|
|
808
|
+
|
|
809
|
+
it('returns null for nonexistent schedule', async () => {
|
|
810
|
+
// Act
|
|
811
|
+
const byId = await jobQueue.getCronJob(99999);
|
|
812
|
+
const byName = await jobQueue.getCronJobByName('nonexistent');
|
|
813
|
+
|
|
814
|
+
// Assert
|
|
815
|
+
expect(byId).toBeNull();
|
|
816
|
+
expect(byName).toBeNull();
|
|
817
|
+
});
|
|
818
|
+
|
|
819
|
+
it('rejects duplicate schedule names', async () => {
|
|
820
|
+
// Setup
|
|
821
|
+
await jobQueue.addCronJob({
|
|
822
|
+
scheduleName: 'unique-name',
|
|
823
|
+
cronExpression: '* * * * *',
|
|
824
|
+
jobType: 'email',
|
|
825
|
+
payload: { to: 'a@example.com' },
|
|
826
|
+
});
|
|
827
|
+
|
|
828
|
+
// Act & Assert
|
|
829
|
+
await expect(
|
|
830
|
+
jobQueue.addCronJob({
|
|
831
|
+
scheduleName: 'unique-name',
|
|
832
|
+
cronExpression: '*/5 * * * *',
|
|
833
|
+
jobType: 'sms',
|
|
834
|
+
payload: { to: 'b@example.com' },
|
|
835
|
+
}),
|
|
836
|
+
).rejects.toThrow();
|
|
837
|
+
});
|
|
838
|
+
|
|
839
|
+
it('rejects invalid cron expressions', async () => {
|
|
840
|
+
// Act & Assert
|
|
841
|
+
await expect(
|
|
842
|
+
jobQueue.addCronJob({
|
|
843
|
+
scheduleName: 'bad-cron',
|
|
844
|
+
cronExpression: 'not a cron',
|
|
845
|
+
jobType: 'email',
|
|
846
|
+
payload: { to: 'a@example.com' },
|
|
847
|
+
}),
|
|
848
|
+
).rejects.toThrow('Invalid cron expression');
|
|
849
|
+
});
|
|
850
|
+
|
|
851
|
+
it('lists active and paused schedules', async () => {
|
|
852
|
+
// Setup
|
|
853
|
+
const id1 = await jobQueue.addCronJob({
|
|
854
|
+
scheduleName: 'schedule-1',
|
|
855
|
+
cronExpression: '* * * * *',
|
|
856
|
+
jobType: 'email',
|
|
857
|
+
payload: { to: 'a@example.com' },
|
|
858
|
+
});
|
|
859
|
+
await jobQueue.addCronJob({
|
|
860
|
+
scheduleName: 'schedule-2',
|
|
861
|
+
cronExpression: '*/5 * * * *',
|
|
862
|
+
jobType: 'sms',
|
|
863
|
+
payload: { to: 'b@example.com' },
|
|
864
|
+
});
|
|
865
|
+
await jobQueue.pauseCronJob(id1);
|
|
866
|
+
|
|
867
|
+
// Act
|
|
868
|
+
const all = await jobQueue.listCronJobs();
|
|
869
|
+
const active = await jobQueue.listCronJobs('active');
|
|
870
|
+
const paused = await jobQueue.listCronJobs('paused');
|
|
871
|
+
|
|
872
|
+
// Assert
|
|
873
|
+
expect(all).toHaveLength(2);
|
|
874
|
+
expect(active).toHaveLength(1);
|
|
875
|
+
expect(active[0].scheduleName).toBe('schedule-2');
|
|
876
|
+
expect(paused).toHaveLength(1);
|
|
877
|
+
expect(paused[0].scheduleName).toBe('schedule-1');
|
|
878
|
+
});
|
|
879
|
+
|
|
880
|
+
it('pauses and resumes a schedule', async () => {
|
|
881
|
+
// Setup
|
|
882
|
+
const id = await jobQueue.addCronJob({
|
|
883
|
+
scheduleName: 'pausable',
|
|
884
|
+
cronExpression: '* * * * *',
|
|
885
|
+
jobType: 'email',
|
|
886
|
+
payload: { to: 'a@example.com' },
|
|
887
|
+
});
|
|
888
|
+
|
|
889
|
+
// Act — pause
|
|
890
|
+
await jobQueue.pauseCronJob(id);
|
|
891
|
+
const paused = await jobQueue.getCronJob(id);
|
|
892
|
+
|
|
893
|
+
// Assert
|
|
894
|
+
expect(paused!.status).toBe('paused');
|
|
895
|
+
|
|
896
|
+
// Act — resume
|
|
897
|
+
await jobQueue.resumeCronJob(id);
|
|
898
|
+
const resumed = await jobQueue.getCronJob(id);
|
|
899
|
+
|
|
900
|
+
// Assert
|
|
901
|
+
expect(resumed!.status).toBe('active');
|
|
902
|
+
});
|
|
903
|
+
|
|
904
|
+
it('edits a schedule and recalculates nextRunAt when expression changes', async () => {
|
|
905
|
+
// Setup
|
|
906
|
+
const id = await jobQueue.addCronJob({
|
|
907
|
+
scheduleName: 'editable',
|
|
908
|
+
cronExpression: '* * * * *',
|
|
909
|
+
jobType: 'email',
|
|
910
|
+
payload: { to: 'old@example.com' },
|
|
911
|
+
});
|
|
912
|
+
const before = await jobQueue.getCronJob(id);
|
|
913
|
+
|
|
914
|
+
// Act
|
|
915
|
+
await jobQueue.editCronJob(id, {
|
|
916
|
+
cronExpression: '0 0 * * *',
|
|
917
|
+
payload: { to: 'new@example.com' },
|
|
918
|
+
});
|
|
919
|
+
|
|
920
|
+
// Assert
|
|
921
|
+
const after = await jobQueue.getCronJob(id);
|
|
922
|
+
expect(after!.cronExpression).toBe('0 0 * * *');
|
|
923
|
+
expect(after!.payload).toEqual({ to: 'new@example.com' });
|
|
924
|
+
expect(after!.nextRunAt!.getTime()).not.toBe(before!.nextRunAt!.getTime());
|
|
925
|
+
});
|
|
926
|
+
|
|
927
|
+
it('removes a schedule', async () => {
|
|
928
|
+
// Setup
|
|
929
|
+
const id = await jobQueue.addCronJob({
|
|
930
|
+
scheduleName: 'removable',
|
|
931
|
+
cronExpression: '* * * * *',
|
|
932
|
+
jobType: 'email',
|
|
933
|
+
payload: { to: 'a@example.com' },
|
|
934
|
+
});
|
|
935
|
+
|
|
936
|
+
// Act
|
|
937
|
+
await jobQueue.removeCronJob(id);
|
|
938
|
+
|
|
939
|
+
// Assert
|
|
940
|
+
const removed = await jobQueue.getCronJob(id);
|
|
941
|
+
expect(removed).toBeNull();
|
|
942
|
+
});
|
|
943
|
+
|
|
944
|
+
it('enqueueDueCronJobs enqueues a job when nextRunAt is due', async () => {
|
|
945
|
+
// Setup — create schedule then force nextRunAt into the past
|
|
946
|
+
const id = await jobQueue.addCronJob({
|
|
947
|
+
scheduleName: 'due-now',
|
|
948
|
+
cronExpression: '* * * * *',
|
|
949
|
+
jobType: 'email',
|
|
950
|
+
payload: { to: 'due@example.com' },
|
|
951
|
+
});
|
|
952
|
+
const pastMs = (Date.now() - 60_000).toString();
|
|
953
|
+
await redisClient.hset(`${prefix}cron:${id}`, 'nextRunAt', pastMs);
|
|
954
|
+
await redisClient.zadd(`${prefix}cron_due`, Number(pastMs), id.toString());
|
|
955
|
+
|
|
956
|
+
// Act
|
|
957
|
+
const count = await jobQueue.enqueueDueCronJobs();
|
|
958
|
+
|
|
959
|
+
// Assert
|
|
960
|
+
expect(count).toBe(1);
|
|
961
|
+
const jobs = await jobQueue.getJobsByStatus('pending');
|
|
962
|
+
const cronJob = jobs.find(
|
|
963
|
+
(j) =>
|
|
964
|
+
j.jobType === 'email' && (j.payload as any).to === 'due@example.com',
|
|
965
|
+
);
|
|
966
|
+
expect(cronJob).toBeDefined();
|
|
967
|
+
});
|
|
968
|
+
|
|
969
|
+
it('enqueueDueCronJobs advances nextRunAt and sets lastJobId', async () => {
|
|
970
|
+
// Setup
|
|
971
|
+
const id = await jobQueue.addCronJob({
|
|
972
|
+
scheduleName: 'advance-test',
|
|
973
|
+
cronExpression: '* * * * *',
|
|
974
|
+
jobType: 'email',
|
|
975
|
+
payload: { to: 'advance@example.com' },
|
|
976
|
+
});
|
|
977
|
+
const pastMs = (Date.now() - 60_000).toString();
|
|
978
|
+
await redisClient.hset(`${prefix}cron:${id}`, 'nextRunAt', pastMs);
|
|
979
|
+
await redisClient.zadd(`${prefix}cron_due`, Number(pastMs), id.toString());
|
|
980
|
+
|
|
981
|
+
// Act
|
|
982
|
+
await jobQueue.enqueueDueCronJobs();
|
|
983
|
+
|
|
984
|
+
// Assert
|
|
985
|
+
const schedule = await jobQueue.getCronJob(id);
|
|
986
|
+
expect(schedule!.lastJobId).not.toBeNull();
|
|
987
|
+
expect(schedule!.lastEnqueuedAt).toBeInstanceOf(Date);
|
|
988
|
+
expect(schedule!.nextRunAt).toBeInstanceOf(Date);
|
|
989
|
+
expect(schedule!.nextRunAt!.getTime()).toBeGreaterThan(Date.now() - 5000);
|
|
990
|
+
});
|
|
991
|
+
|
|
992
|
+
it('enqueueDueCronJobs skips paused schedules', async () => {
|
|
993
|
+
// Setup
|
|
994
|
+
const id = await jobQueue.addCronJob({
|
|
995
|
+
scheduleName: 'paused-skip',
|
|
996
|
+
cronExpression: '* * * * *',
|
|
997
|
+
jobType: 'email',
|
|
998
|
+
payload: { to: 'paused@example.com' },
|
|
999
|
+
});
|
|
1000
|
+
const pastMs = (Date.now() - 60_000).toString();
|
|
1001
|
+
await redisClient.hset(`${prefix}cron:${id}`, 'nextRunAt', pastMs);
|
|
1002
|
+
await redisClient.zadd(`${prefix}cron_due`, Number(pastMs), id.toString());
|
|
1003
|
+
await jobQueue.pauseCronJob(id);
|
|
1004
|
+
|
|
1005
|
+
// Act
|
|
1006
|
+
const count = await jobQueue.enqueueDueCronJobs();
|
|
1007
|
+
|
|
1008
|
+
// Assert
|
|
1009
|
+
expect(count).toBe(0);
|
|
1010
|
+
});
|
|
1011
|
+
|
|
1012
|
+
it('enqueueDueCronJobs skips schedules not yet due', async () => {
|
|
1013
|
+
// Setup — nextRunAt is in the future by default
|
|
1014
|
+
await jobQueue.addCronJob({
|
|
1015
|
+
scheduleName: 'future-schedule',
|
|
1016
|
+
cronExpression: '0 0 1 1 *',
|
|
1017
|
+
jobType: 'email',
|
|
1018
|
+
payload: { to: 'future@example.com' },
|
|
1019
|
+
});
|
|
1020
|
+
|
|
1021
|
+
// Act
|
|
1022
|
+
const count = await jobQueue.enqueueDueCronJobs();
|
|
1023
|
+
|
|
1024
|
+
// Assert
|
|
1025
|
+
expect(count).toBe(0);
|
|
1026
|
+
});
|
|
1027
|
+
|
|
1028
|
+
it('enqueueDueCronJobs skips when allowOverlap=false and last job is still active', async () => {
|
|
1029
|
+
// Setup
|
|
1030
|
+
const id = await jobQueue.addCronJob({
|
|
1031
|
+
scheduleName: 'no-overlap',
|
|
1032
|
+
cronExpression: '* * * * *',
|
|
1033
|
+
jobType: 'email',
|
|
1034
|
+
payload: { to: 'overlap@example.com' },
|
|
1035
|
+
allowOverlap: false,
|
|
1036
|
+
});
|
|
1037
|
+
const pastMs = (Date.now() - 60_000).toString();
|
|
1038
|
+
await redisClient.hset(`${prefix}cron:${id}`, 'nextRunAt', pastMs);
|
|
1039
|
+
await redisClient.zadd(`${prefix}cron_due`, Number(pastMs), id.toString());
|
|
1040
|
+
|
|
1041
|
+
// First enqueue should succeed
|
|
1042
|
+
const count1 = await jobQueue.enqueueDueCronJobs();
|
|
1043
|
+
expect(count1).toBe(1);
|
|
1044
|
+
|
|
1045
|
+
// Force nextRunAt into the past again
|
|
1046
|
+
const pastMs2 = (Date.now() - 60_000).toString();
|
|
1047
|
+
await redisClient.hset(`${prefix}cron:${id}`, 'nextRunAt', pastMs2);
|
|
1048
|
+
await redisClient.zadd(`${prefix}cron_due`, Number(pastMs2), id.toString());
|
|
1049
|
+
|
|
1050
|
+
// Act — second enqueue should be skipped because previous job is pending
|
|
1051
|
+
const count2 = await jobQueue.enqueueDueCronJobs();
|
|
1052
|
+
|
|
1053
|
+
// Assert
|
|
1054
|
+
expect(count2).toBe(0);
|
|
1055
|
+
});
|
|
1056
|
+
|
|
1057
|
+
it('enqueueDueCronJobs enqueues when allowOverlap=true even if last job is still active', async () => {
|
|
1058
|
+
// Setup
|
|
1059
|
+
const id = await jobQueue.addCronJob({
|
|
1060
|
+
scheduleName: 'with-overlap',
|
|
1061
|
+
cronExpression: '* * * * *',
|
|
1062
|
+
jobType: 'email',
|
|
1063
|
+
payload: { to: 'overlap@example.com' },
|
|
1064
|
+
allowOverlap: true,
|
|
1065
|
+
});
|
|
1066
|
+
const pastMs = (Date.now() - 60_000).toString();
|
|
1067
|
+
await redisClient.hset(`${prefix}cron:${id}`, 'nextRunAt', pastMs);
|
|
1068
|
+
await redisClient.zadd(`${prefix}cron_due`, Number(pastMs), id.toString());
|
|
1069
|
+
|
|
1070
|
+
// First enqueue
|
|
1071
|
+
const count1 = await jobQueue.enqueueDueCronJobs();
|
|
1072
|
+
expect(count1).toBe(1);
|
|
1073
|
+
|
|
1074
|
+
// Force nextRunAt into the past again
|
|
1075
|
+
const pastMs2 = (Date.now() - 60_000).toString();
|
|
1076
|
+
await redisClient.hset(`${prefix}cron:${id}`, 'nextRunAt', pastMs2);
|
|
1077
|
+
await redisClient.zadd(`${prefix}cron_due`, Number(pastMs2), id.toString());
|
|
1078
|
+
|
|
1079
|
+
// Act — second enqueue should succeed because allowOverlap=true
|
|
1080
|
+
const count2 = await jobQueue.enqueueDueCronJobs();
|
|
1081
|
+
|
|
1082
|
+
// Assert
|
|
1083
|
+
expect(count2).toBe(1);
|
|
1084
|
+
|
|
1085
|
+
// Verify two pending jobs
|
|
1086
|
+
const jobs = await jobQueue.getJobsByStatus('pending');
|
|
1087
|
+
const cronJobs = jobs.filter(
|
|
1088
|
+
(j) =>
|
|
1089
|
+
j.jobType === 'email' &&
|
|
1090
|
+
(j.payload as any).to === 'overlap@example.com',
|
|
1091
|
+
);
|
|
1092
|
+
expect(cronJobs).toHaveLength(2);
|
|
1093
|
+
});
|
|
1094
|
+
});
|
|
1095
|
+
|
|
1096
|
+
describe('Redis parity features', () => {
|
|
1097
|
+
let prefix: string;
|
|
1098
|
+
let jobQueue: ReturnType<typeof initJobQueue<TestPayloadMap>>;
|
|
1099
|
+
let redisClient: any;
|
|
1100
|
+
|
|
1101
|
+
beforeEach(async () => {
|
|
1102
|
+
prefix = createRedisTestPrefix();
|
|
1103
|
+
const config: RedisJobQueueConfig = {
|
|
1104
|
+
backend: 'redis',
|
|
1105
|
+
redisConfig: {
|
|
1106
|
+
url: REDIS_URL,
|
|
1107
|
+
keyPrefix: prefix,
|
|
1108
|
+
},
|
|
1109
|
+
};
|
|
1110
|
+
jobQueue = initJobQueue<TestPayloadMap>(config);
|
|
1111
|
+
redisClient = jobQueue.getRedisClient();
|
|
1112
|
+
});
|
|
1113
|
+
|
|
1114
|
+
afterEach(async () => {
|
|
1115
|
+
vi.restoreAllMocks();
|
|
1116
|
+
await cleanupRedisPrefix(redisClient, prefix);
|
|
1117
|
+
await redisClient.quit();
|
|
1118
|
+
});
|
|
1119
|
+
|
|
1120
|
+
// ── Cursor-based pagination ─────────────────────────────────────────
|
|
1121
|
+
|
|
1122
|
+
it('getJobs supports cursor-based pagination', async () => {
|
|
1123
|
+
// Setup
|
|
1124
|
+
const id1 = await jobQueue.addJob({
|
|
1125
|
+
jobType: 'email',
|
|
1126
|
+
payload: { to: 'a@example.com' },
|
|
1127
|
+
});
|
|
1128
|
+
const id2 = await jobQueue.addJob({
|
|
1129
|
+
jobType: 'email',
|
|
1130
|
+
payload: { to: 'b@example.com' },
|
|
1131
|
+
});
|
|
1132
|
+
const id3 = await jobQueue.addJob({
|
|
1133
|
+
jobType: 'email',
|
|
1134
|
+
payload: { to: 'c@example.com' },
|
|
1135
|
+
});
|
|
1136
|
+
|
|
1137
|
+
// Act — first page (no cursor, limit 2)
|
|
1138
|
+
const page1 = await jobQueue.getJobs({}, 2);
|
|
1139
|
+
|
|
1140
|
+
// Assert
|
|
1141
|
+
expect(page1).toHaveLength(2);
|
|
1142
|
+
// Descending by id: id3, id2
|
|
1143
|
+
expect(page1[0].id).toBe(id3);
|
|
1144
|
+
expect(page1[1].id).toBe(id2);
|
|
1145
|
+
|
|
1146
|
+
// Act — second page using cursor
|
|
1147
|
+
const page2 = await jobQueue.getJobs({ cursor: page1[1].id }, 2);
|
|
1148
|
+
|
|
1149
|
+
// Assert
|
|
1150
|
+
expect(page2).toHaveLength(1);
|
|
1151
|
+
expect(page2[0].id).toBe(id1);
|
|
1152
|
+
});
|
|
1153
|
+
|
|
1154
|
+
// ── retryJob status validation ──────────────────────────────────────
|
|
1155
|
+
|
|
1156
|
+
it('retryJob only retries failed or processing jobs', async () => {
|
|
1157
|
+
// Setup — completed job
|
|
1158
|
+
const jobId = await jobQueue.addJob({
|
|
1159
|
+
jobType: 'test',
|
|
1160
|
+
payload: { foo: 'retry-test' },
|
|
1161
|
+
});
|
|
1162
|
+
const processor = jobQueue.createProcessor({
|
|
1163
|
+
email: vi.fn(async () => {}),
|
|
1164
|
+
sms: vi.fn(async () => {}),
|
|
1165
|
+
test: vi.fn(async () => {}),
|
|
1166
|
+
});
|
|
1167
|
+
await processor.start();
|
|
1168
|
+
const completedJob = await jobQueue.getJob(jobId);
|
|
1169
|
+
expect(completedJob?.status).toBe('completed');
|
|
1170
|
+
|
|
1171
|
+
// Act — retry a completed job (should be a no-op)
|
|
1172
|
+
await jobQueue.retryJob(jobId);
|
|
1173
|
+
|
|
1174
|
+
// Assert — still completed
|
|
1175
|
+
const job = await jobQueue.getJob(jobId);
|
|
1176
|
+
expect(job?.status).toBe('completed');
|
|
1177
|
+
});
|
|
1178
|
+
|
|
1179
|
+
it('retryJob retries a failed job', async () => {
|
|
1180
|
+
// Setup
|
|
1181
|
+
const jobId = await jobQueue.addJob({
|
|
1182
|
+
jobType: 'email',
|
|
1183
|
+
payload: { to: 'fail-retry@example.com' },
|
|
1184
|
+
});
|
|
1185
|
+
const processor = jobQueue.createProcessor({
|
|
1186
|
+
email: async () => {
|
|
1187
|
+
throw new Error('boom');
|
|
1188
|
+
},
|
|
1189
|
+
sms: vi.fn(async () => {}),
|
|
1190
|
+
test: vi.fn(async () => {}),
|
|
1191
|
+
});
|
|
1192
|
+
await processor.start();
|
|
1193
|
+
const failedJob = await jobQueue.getJob(jobId);
|
|
1194
|
+
expect(failedJob?.status).toBe('failed');
|
|
1195
|
+
|
|
1196
|
+
// Act
|
|
1197
|
+
await jobQueue.retryJob(jobId);
|
|
1198
|
+
|
|
1199
|
+
// Assert
|
|
1200
|
+
const job = await jobQueue.getJob(jobId);
|
|
1201
|
+
expect(job?.status).toBe('pending');
|
|
1202
|
+
});
|
|
1203
|
+
|
|
1204
|
+
// ── cancelJob with waiting status ───────────────────────────────────
|
|
1205
|
+
|
|
1206
|
+
it('cancelJob cancels a waiting job', async () => {
|
|
1207
|
+
// Setup — add a job and manually set it to waiting
|
|
1208
|
+
const jobId = await jobQueue.addJob({
|
|
1209
|
+
jobType: 'email',
|
|
1210
|
+
payload: { to: 'waiting-cancel@example.com' },
|
|
1211
|
+
});
|
|
1212
|
+
const futureMs = Date.now() + 60_000;
|
|
1213
|
+
await redisClient.hmset(
|
|
1214
|
+
`${prefix}job:${jobId}`,
|
|
1215
|
+
'status',
|
|
1216
|
+
'waiting',
|
|
1217
|
+
'waitUntil',
|
|
1218
|
+
futureMs.toString(),
|
|
1219
|
+
);
|
|
1220
|
+
await redisClient.srem(`${prefix}status:pending`, jobId.toString());
|
|
1221
|
+
await redisClient.sadd(`${prefix}status:waiting`, jobId.toString());
|
|
1222
|
+
await redisClient.zrem(`${prefix}queue`, jobId.toString());
|
|
1223
|
+
|
|
1224
|
+
// Act
|
|
1225
|
+
await jobQueue.cancelJob(jobId);
|
|
1226
|
+
|
|
1227
|
+
// Assert
|
|
1228
|
+
const job = await jobQueue.getJob(jobId);
|
|
1229
|
+
expect(job?.status).toBe('cancelled');
|
|
1230
|
+
expect(job?.waitUntil).toBeNull();
|
|
1231
|
+
expect(job?.waitTokenId).toBeNull();
|
|
1232
|
+
});
|
|
1233
|
+
|
|
1234
|
+
// ── completeJob clears wait fields ──────────────────────────────────
|
|
1235
|
+
|
|
1236
|
+
it('completeJob clears wait-related fields', async () => {
|
|
1237
|
+
// Setup
|
|
1238
|
+
const jobId = await jobQueue.addJob({
|
|
1239
|
+
jobType: 'test',
|
|
1240
|
+
payload: { foo: 'wait-clear' },
|
|
1241
|
+
});
|
|
1242
|
+
// Manually set wait fields
|
|
1243
|
+
await redisClient.hmset(
|
|
1244
|
+
`${prefix}job:${jobId}`,
|
|
1245
|
+
'stepData',
|
|
1246
|
+
JSON.stringify({ step1: { __completed: true, result: 42 } }),
|
|
1247
|
+
'waitUntil',
|
|
1248
|
+
(Date.now() + 60000).toString(),
|
|
1249
|
+
'waitTokenId',
|
|
1250
|
+
'wp_test',
|
|
1251
|
+
);
|
|
1252
|
+
|
|
1253
|
+
// Process the job to completion
|
|
1254
|
+
const processor = jobQueue.createProcessor({
|
|
1255
|
+
email: vi.fn(async () => {}),
|
|
1256
|
+
sms: vi.fn(async () => {}),
|
|
1257
|
+
test: vi.fn(async () => {}),
|
|
1258
|
+
});
|
|
1259
|
+
await processor.start();
|
|
1260
|
+
|
|
1261
|
+
// Assert
|
|
1262
|
+
const job = await jobQueue.getJob(jobId);
|
|
1263
|
+
expect(job?.status).toBe('completed');
|
|
1264
|
+
expect(job?.stepData).toBeUndefined();
|
|
1265
|
+
expect(job?.waitUntil).toBeNull();
|
|
1266
|
+
expect(job?.waitTokenId).toBeNull();
|
|
1267
|
+
});
|
|
1268
|
+
|
|
1269
|
+
// ── cleanupOldJobEvents ─────────────────────────────────────────────
|
|
1270
|
+
|
|
1271
|
+
it('cleanupOldJobEvents removes old events', async () => {
|
|
1272
|
+
// Setup
|
|
1273
|
+
const jobId = await jobQueue.addJob({
|
|
1274
|
+
jobType: 'email',
|
|
1275
|
+
payload: { to: 'events-cleanup@example.com' },
|
|
1276
|
+
});
|
|
1277
|
+
|
|
1278
|
+
// Create an old event (31 days ago)
|
|
1279
|
+
const oldMs = Date.now() - 31 * 24 * 60 * 60 * 1000;
|
|
1280
|
+
const oldEvent = JSON.stringify({
|
|
1281
|
+
id: 999,
|
|
1282
|
+
jobId,
|
|
1283
|
+
eventType: 'added',
|
|
1284
|
+
createdAt: oldMs,
|
|
1285
|
+
metadata: null,
|
|
1286
|
+
});
|
|
1287
|
+
await redisClient.rpush(`${prefix}events:${jobId}`, oldEvent);
|
|
1288
|
+
|
|
1289
|
+
// Get events before cleanup
|
|
1290
|
+
const eventsBefore = await jobQueue.getJobEvents(jobId);
|
|
1291
|
+
const countBefore = eventsBefore.length;
|
|
1292
|
+
expect(countBefore).toBeGreaterThanOrEqual(2); // at least the original 'added' + our old event
|
|
1293
|
+
|
|
1294
|
+
// Act
|
|
1295
|
+
const deleted = await jobQueue.cleanupOldJobEvents(30);
|
|
1296
|
+
|
|
1297
|
+
// Assert
|
|
1298
|
+
expect(deleted).toBeGreaterThanOrEqual(1);
|
|
1299
|
+
const eventsAfter = await jobQueue.getJobEvents(jobId);
|
|
1300
|
+
expect(eventsAfter.length).toBeLessThan(countBefore);
|
|
1301
|
+
});
|
|
1302
|
+
|
|
1303
|
+
it('cleanupOldJobEvents removes orphaned event lists', async () => {
|
|
1304
|
+
// Setup — create events for a non-existent job
|
|
1305
|
+
const orphanEvent = JSON.stringify({
|
|
1306
|
+
id: 888,
|
|
1307
|
+
jobId: 99999,
|
|
1308
|
+
eventType: 'added',
|
|
1309
|
+
createdAt: Date.now(),
|
|
1310
|
+
metadata: null,
|
|
1311
|
+
});
|
|
1312
|
+
await redisClient.rpush(`${prefix}events:99999`, orphanEvent);
|
|
1313
|
+
|
|
1314
|
+
// Act
|
|
1315
|
+
const deleted = await jobQueue.cleanupOldJobEvents(30);
|
|
1316
|
+
|
|
1317
|
+
// Assert
|
|
1318
|
+
expect(deleted).toBe(1);
|
|
1319
|
+
const remaining = await redisClient.llen(`${prefix}events:99999`);
|
|
1320
|
+
expect(remaining).toBe(0);
|
|
1321
|
+
});
|
|
1322
|
+
|
|
1323
|
+
// ── Waiting system ──────────────────────────────────────────────────
|
|
1324
|
+
|
|
1325
|
+
it('createToken and getToken work via the public API', async () => {
|
|
1326
|
+
// Act
|
|
1327
|
+
const token = await jobQueue.createToken({ timeout: '10m' });
|
|
1328
|
+
|
|
1329
|
+
// Assert
|
|
1330
|
+
expect(token.id).toMatch(/^wp_/);
|
|
1331
|
+
const record = await jobQueue.getToken(token.id);
|
|
1332
|
+
expect(record).not.toBeNull();
|
|
1333
|
+
expect(record!.status).toBe('waiting');
|
|
1334
|
+
expect(record!.timeoutAt).toBeInstanceOf(Date);
|
|
1335
|
+
});
|
|
1336
|
+
|
|
1337
|
+
it('completeToken completes the token and provides data', async () => {
|
|
1338
|
+
// Setup
|
|
1339
|
+
const token = await jobQueue.createToken();
|
|
1340
|
+
|
|
1341
|
+
// Act
|
|
1342
|
+
await jobQueue.completeToken(token.id, { result: 'success' });
|
|
1343
|
+
|
|
1344
|
+
// Assert
|
|
1345
|
+
const record = await jobQueue.getToken(token.id);
|
|
1346
|
+
expect(record!.status).toBe('completed');
|
|
1347
|
+
expect(record!.output).toEqual({ result: 'success' });
|
|
1348
|
+
});
|
|
1349
|
+
|
|
1350
|
+
it('completeToken resumes a waiting job', async () => {
|
|
1351
|
+
// Setup — add a job, process it to create a token, then manually put it in waiting
|
|
1352
|
+
const jobId = await jobQueue.addJob({
|
|
1353
|
+
jobType: 'email',
|
|
1354
|
+
payload: { to: 'token-resume@example.com' },
|
|
1355
|
+
});
|
|
1356
|
+
|
|
1357
|
+
// Create a token associated with this job
|
|
1358
|
+
// We need to use the backend directly since createToken from public API uses null jobId
|
|
1359
|
+
const backend = jobQueue as any; // accessing the backend is tricky from the public API
|
|
1360
|
+
// Instead, create a token, then manually associate it
|
|
1361
|
+
const token = await jobQueue.createToken();
|
|
1362
|
+
|
|
1363
|
+
// Manually update the token's jobId and put the job in waiting state
|
|
1364
|
+
await redisClient.hset(
|
|
1365
|
+
`${prefix}waitpoint:${token.id}`,
|
|
1366
|
+
'jobId',
|
|
1367
|
+
jobId.toString(),
|
|
1368
|
+
);
|
|
1369
|
+
await redisClient.hmset(
|
|
1370
|
+
`${prefix}job:${jobId}`,
|
|
1371
|
+
'status',
|
|
1372
|
+
'waiting',
|
|
1373
|
+
'waitTokenId',
|
|
1374
|
+
token.id,
|
|
1375
|
+
);
|
|
1376
|
+
await redisClient.srem(`${prefix}status:pending`, jobId.toString());
|
|
1377
|
+
await redisClient.sadd(`${prefix}status:waiting`, jobId.toString());
|
|
1378
|
+
await redisClient.zrem(`${prefix}queue`, jobId.toString());
|
|
1379
|
+
|
|
1380
|
+
// Act
|
|
1381
|
+
await jobQueue.completeToken(token.id, { data: 42 });
|
|
1382
|
+
|
|
1383
|
+
// Assert
|
|
1384
|
+
const job = await jobQueue.getJob(jobId);
|
|
1385
|
+
expect(job?.status).toBe('pending');
|
|
1386
|
+
expect(job?.waitTokenId).toBeNull();
|
|
1387
|
+
});
|
|
1388
|
+
|
|
1389
|
+
it('expireTimedOutTokens expires tokens past their timeout', async () => {
|
|
1390
|
+
// Setup — create a token with a very short timeout, then backdate it
|
|
1391
|
+
const token = await jobQueue.createToken({ timeout: '1s' });
|
|
1392
|
+
// Force the timeout to be in the past
|
|
1393
|
+
const pastMs = Date.now() - 10_000;
|
|
1394
|
+
await redisClient.hset(
|
|
1395
|
+
`${prefix}waitpoint:${token.id}`,
|
|
1396
|
+
'timeoutAt',
|
|
1397
|
+
pastMs.toString(),
|
|
1398
|
+
);
|
|
1399
|
+
await redisClient.zadd(`${prefix}waitpoint_timeout`, pastMs, token.id);
|
|
1400
|
+
|
|
1401
|
+
// Act
|
|
1402
|
+
const expired = await jobQueue.expireTimedOutTokens();
|
|
1403
|
+
|
|
1404
|
+
// Assert
|
|
1405
|
+
expect(expired).toBe(1);
|
|
1406
|
+
const record = await jobQueue.getToken(token.id);
|
|
1407
|
+
expect(record!.status).toBe('timed_out');
|
|
1408
|
+
});
|
|
1409
|
+
|
|
1410
|
+
it('expireTimedOutTokens resumes a waiting job when its token times out', async () => {
|
|
1411
|
+
// Setup
|
|
1412
|
+
const jobId = await jobQueue.addJob({
|
|
1413
|
+
jobType: 'email',
|
|
1414
|
+
payload: { to: 'timeout-resume@example.com' },
|
|
1415
|
+
});
|
|
1416
|
+
const token = await jobQueue.createToken({ timeout: '1s' });
|
|
1417
|
+
|
|
1418
|
+
// Associate token with job and put job in waiting
|
|
1419
|
+
await redisClient.hset(
|
|
1420
|
+
`${prefix}waitpoint:${token.id}`,
|
|
1421
|
+
'jobId',
|
|
1422
|
+
jobId.toString(),
|
|
1423
|
+
);
|
|
1424
|
+
await redisClient.hmset(
|
|
1425
|
+
`${prefix}job:${jobId}`,
|
|
1426
|
+
'status',
|
|
1427
|
+
'waiting',
|
|
1428
|
+
'waitTokenId',
|
|
1429
|
+
token.id,
|
|
1430
|
+
);
|
|
1431
|
+
await redisClient.srem(`${prefix}status:pending`, jobId.toString());
|
|
1432
|
+
await redisClient.sadd(`${prefix}status:waiting`, jobId.toString());
|
|
1433
|
+
await redisClient.zrem(`${prefix}queue`, jobId.toString());
|
|
1434
|
+
|
|
1435
|
+
// Force the timeout to be in the past
|
|
1436
|
+
const pastMs = Date.now() - 10_000;
|
|
1437
|
+
await redisClient.hset(
|
|
1438
|
+
`${prefix}waitpoint:${token.id}`,
|
|
1439
|
+
'timeoutAt',
|
|
1440
|
+
pastMs.toString(),
|
|
1441
|
+
);
|
|
1442
|
+
await redisClient.zadd(`${prefix}waitpoint_timeout`, pastMs, token.id);
|
|
1443
|
+
|
|
1444
|
+
// Act
|
|
1445
|
+
await jobQueue.expireTimedOutTokens();
|
|
1446
|
+
|
|
1447
|
+
// Assert
|
|
1448
|
+
const job = await jobQueue.getJob(jobId);
|
|
1449
|
+
expect(job?.status).toBe('pending');
|
|
1450
|
+
expect(job?.waitTokenId).toBeNull();
|
|
1451
|
+
});
|
|
1452
|
+
|
|
1453
|
+
it('getNextBatch promotes time-based waiting jobs', async () => {
|
|
1454
|
+
// Setup — add a job and manually set it to waiting with a past waitUntil
|
|
1455
|
+
const jobId = await jobQueue.addJob({
|
|
1456
|
+
jobType: 'test',
|
|
1457
|
+
payload: { foo: 'wait-promote' },
|
|
1458
|
+
});
|
|
1459
|
+
const pastMs = Date.now() - 5000;
|
|
1460
|
+
await redisClient.hmset(
|
|
1461
|
+
`${prefix}job:${jobId}`,
|
|
1462
|
+
'status',
|
|
1463
|
+
'waiting',
|
|
1464
|
+
'waitUntil',
|
|
1465
|
+
pastMs.toString(),
|
|
1466
|
+
'waitTokenId',
|
|
1467
|
+
'null',
|
|
1468
|
+
);
|
|
1469
|
+
await redisClient.srem(`${prefix}status:pending`, jobId.toString());
|
|
1470
|
+
await redisClient.sadd(`${prefix}status:waiting`, jobId.toString());
|
|
1471
|
+
await redisClient.zrem(`${prefix}queue`, jobId.toString());
|
|
1472
|
+
await redisClient.zadd(`${prefix}waiting`, pastMs, jobId.toString());
|
|
1473
|
+
|
|
1474
|
+
// Act — process jobs, the waiting job should get promoted and processed
|
|
1475
|
+
const handler = vi.fn(async () => {});
|
|
1476
|
+
const processor = jobQueue.createProcessor({
|
|
1477
|
+
email: vi.fn(async () => {}),
|
|
1478
|
+
sms: vi.fn(async () => {}),
|
|
1479
|
+
test: handler,
|
|
1480
|
+
});
|
|
1481
|
+
const processed = await processor.start();
|
|
1482
|
+
|
|
1483
|
+
// Assert
|
|
1484
|
+
expect(processed).toBe(1);
|
|
1485
|
+
expect(handler).toHaveBeenCalled();
|
|
1486
|
+
const job = await jobQueue.getJob(jobId);
|
|
1487
|
+
expect(job?.status).toBe('completed');
|
|
1488
|
+
});
|
|
1489
|
+
|
|
1490
|
+
it('getNextBatch does NOT promote token-based waiting jobs', async () => {
|
|
1491
|
+
// Setup — add a job waiting for a token
|
|
1492
|
+
const jobId = await jobQueue.addJob({
|
|
1493
|
+
jobType: 'test',
|
|
1494
|
+
payload: { foo: 'token-wait-nopromote' },
|
|
1495
|
+
});
|
|
1496
|
+
const pastMs = Date.now() - 5000;
|
|
1497
|
+
await redisClient.hmset(
|
|
1498
|
+
`${prefix}job:${jobId}`,
|
|
1499
|
+
'status',
|
|
1500
|
+
'waiting',
|
|
1501
|
+
'waitUntil',
|
|
1502
|
+
pastMs.toString(),
|
|
1503
|
+
'waitTokenId',
|
|
1504
|
+
'wp_some_token',
|
|
1505
|
+
);
|
|
1506
|
+
await redisClient.srem(`${prefix}status:pending`, jobId.toString());
|
|
1507
|
+
await redisClient.sadd(`${prefix}status:waiting`, jobId.toString());
|
|
1508
|
+
await redisClient.zrem(`${prefix}queue`, jobId.toString());
|
|
1509
|
+
await redisClient.zadd(`${prefix}waiting`, pastMs, jobId.toString());
|
|
1510
|
+
|
|
1511
|
+
// Act
|
|
1512
|
+
const processor = jobQueue.createProcessor({
|
|
1513
|
+
email: vi.fn(async () => {}),
|
|
1514
|
+
sms: vi.fn(async () => {}),
|
|
1515
|
+
test: vi.fn(async () => {}),
|
|
1516
|
+
});
|
|
1517
|
+
const processed = await processor.start();
|
|
1518
|
+
|
|
1519
|
+
// Assert — should not pick up the token-based waiting job
|
|
1520
|
+
expect(processed).toBe(0);
|
|
1521
|
+
const job = await jobQueue.getJob(jobId);
|
|
1522
|
+
expect(job?.status).toBe('waiting');
|
|
1523
|
+
});
|
|
1524
|
+
|
|
1525
|
+
it('waitFor pauses a job and resumes after time elapses', async () => {
|
|
1526
|
+
// Setup
|
|
1527
|
+
let invocationCount = 0;
|
|
1528
|
+
const jobId = await jobQueue.addJob({
|
|
1529
|
+
jobType: 'test',
|
|
1530
|
+
payload: { foo: 'waitfor-test' },
|
|
1531
|
+
});
|
|
1532
|
+
|
|
1533
|
+
// First invocation: handler calls ctx.waitFor
|
|
1534
|
+
const handler = vi.fn(async (_payload: any, _signal: any, ctx: any) => {
|
|
1535
|
+
invocationCount++;
|
|
1536
|
+
if (invocationCount === 1) {
|
|
1537
|
+
await ctx.waitFor({ seconds: 1 });
|
|
1538
|
+
}
|
|
1539
|
+
});
|
|
1540
|
+
|
|
1541
|
+
const processor = jobQueue.createProcessor({
|
|
1542
|
+
email: vi.fn(async () => {}),
|
|
1543
|
+
sms: vi.fn(async () => {}),
|
|
1544
|
+
test: handler,
|
|
1545
|
+
});
|
|
1546
|
+
await processor.start();
|
|
1547
|
+
|
|
1548
|
+
// Assert — job should be in waiting state
|
|
1549
|
+
let job = await jobQueue.getJob(jobId);
|
|
1550
|
+
expect(job?.status).toBe('waiting');
|
|
1551
|
+
expect(job?.waitUntil).toBeInstanceOf(Date);
|
|
1552
|
+
expect(job?.stepData).toBeDefined();
|
|
1553
|
+
|
|
1554
|
+
// Manually advance: set waitUntil to past and add to waiting sorted set
|
|
1555
|
+
const pastMs = Date.now() - 5000;
|
|
1556
|
+
await redisClient.hset(
|
|
1557
|
+
`${prefix}job:${jobId}`,
|
|
1558
|
+
'waitUntil',
|
|
1559
|
+
pastMs.toString(),
|
|
1560
|
+
);
|
|
1561
|
+
await redisClient.zadd(`${prefix}waiting`, pastMs, jobId.toString());
|
|
1562
|
+
|
|
1563
|
+
// Second invocation: job resumes and completes
|
|
1564
|
+
await processor.start();
|
|
1565
|
+
|
|
1566
|
+
// Assert
|
|
1567
|
+
job = await jobQueue.getJob(jobId);
|
|
1568
|
+
expect(job?.status).toBe('completed');
|
|
1569
|
+
expect(invocationCount).toBe(2);
|
|
1570
|
+
});
|
|
1571
|
+
|
|
1572
|
+
it('ctx.run memoizes step results across re-invocations', async () => {
|
|
1573
|
+
// Setup
|
|
1574
|
+
let invocationCount = 0;
|
|
1575
|
+
let stepCallCount = 0;
|
|
1576
|
+
const jobId = await jobQueue.addJob({
|
|
1577
|
+
jobType: 'test',
|
|
1578
|
+
payload: { foo: 'memoize-test' },
|
|
1579
|
+
});
|
|
1580
|
+
|
|
1581
|
+
const handler = vi.fn(async (_payload: any, _signal: any, ctx: any) => {
|
|
1582
|
+
invocationCount++;
|
|
1583
|
+
const result = await ctx.run('step1', async () => {
|
|
1584
|
+
stepCallCount++;
|
|
1585
|
+
return 42;
|
|
1586
|
+
});
|
|
1587
|
+
expect(result).toBe(42);
|
|
1588
|
+
|
|
1589
|
+
if (invocationCount === 1) {
|
|
1590
|
+
await ctx.waitFor({ seconds: 1 });
|
|
1591
|
+
}
|
|
1592
|
+
});
|
|
1593
|
+
|
|
1594
|
+
const processor = jobQueue.createProcessor({
|
|
1595
|
+
email: vi.fn(async () => {}),
|
|
1596
|
+
sms: vi.fn(async () => {}),
|
|
1597
|
+
test: handler,
|
|
1598
|
+
});
|
|
1599
|
+
|
|
1600
|
+
// First invocation
|
|
1601
|
+
await processor.start();
|
|
1602
|
+
let job = await jobQueue.getJob(jobId);
|
|
1603
|
+
expect(job?.status).toBe('waiting');
|
|
1604
|
+
expect(stepCallCount).toBe(1);
|
|
1605
|
+
|
|
1606
|
+
// Advance time
|
|
1607
|
+
const pastMs = Date.now() - 5000;
|
|
1608
|
+
await redisClient.hset(
|
|
1609
|
+
`${prefix}job:${jobId}`,
|
|
1610
|
+
'waitUntil',
|
|
1611
|
+
pastMs.toString(),
|
|
1612
|
+
);
|
|
1613
|
+
await redisClient.zadd(`${prefix}waiting`, pastMs, jobId.toString());
|
|
1614
|
+
|
|
1615
|
+
// Second invocation
|
|
1616
|
+
await processor.start();
|
|
1617
|
+
|
|
1618
|
+
// Assert — step1 should NOT have been called again (memoized)
|
|
1619
|
+
job = await jobQueue.getJob(jobId);
|
|
1620
|
+
expect(job?.status).toBe('completed');
|
|
1621
|
+
expect(stepCallCount).toBe(1);
|
|
1622
|
+
expect(invocationCount).toBe(2);
|
|
1623
|
+
});
|
|
1624
|
+
|
|
1625
|
+
it('waitForToken pauses and resumes on token completion', async () => {
|
|
1626
|
+
// Setup
|
|
1627
|
+
let invocationCount = 0;
|
|
1628
|
+
let tokenId: string;
|
|
1629
|
+
const jobId = await jobQueue.addJob({
|
|
1630
|
+
jobType: 'test',
|
|
1631
|
+
payload: { foo: 'token-wait-test' },
|
|
1632
|
+
});
|
|
1633
|
+
|
|
1634
|
+
const handler = vi.fn(async (_payload: any, _signal: any, ctx: any) => {
|
|
1635
|
+
invocationCount++;
|
|
1636
|
+
if (invocationCount === 1) {
|
|
1637
|
+
const token = await ctx.createToken({ timeout: '1h' });
|
|
1638
|
+
tokenId = token.id;
|
|
1639
|
+
const result = await ctx.waitForToken(token.id);
|
|
1640
|
+
// Should not reach here on first invocation (throws WaitSignal)
|
|
1641
|
+
expect(result.ok).toBe(true);
|
|
1642
|
+
} else {
|
|
1643
|
+
// Second invocation: token should be completed
|
|
1644
|
+
// The step data should have the result cached
|
|
1645
|
+
}
|
|
1646
|
+
});
|
|
1647
|
+
|
|
1648
|
+
const processor = jobQueue.createProcessor({
|
|
1649
|
+
email: vi.fn(async () => {}),
|
|
1650
|
+
sms: vi.fn(async () => {}),
|
|
1651
|
+
test: handler,
|
|
1652
|
+
});
|
|
1653
|
+
|
|
1654
|
+
// First invocation — should pause on waitForToken
|
|
1655
|
+
await processor.start();
|
|
1656
|
+
|
|
1657
|
+
let job = await jobQueue.getJob(jobId);
|
|
1658
|
+
expect(job?.status).toBe('waiting');
|
|
1659
|
+
expect(job?.waitTokenId).toBe(tokenId!);
|
|
1660
|
+
|
|
1661
|
+
// Complete the token externally
|
|
1662
|
+
await jobQueue.completeToken(tokenId!, { answer: 'yes' });
|
|
1663
|
+
|
|
1664
|
+
// Verify job is back to pending
|
|
1665
|
+
job = await jobQueue.getJob(jobId);
|
|
1666
|
+
expect(job?.status).toBe('pending');
|
|
1667
|
+
|
|
1668
|
+
// Second invocation — should complete
|
|
1669
|
+
await processor.start();
|
|
1670
|
+
|
|
1671
|
+
job = await jobQueue.getJob(jobId);
|
|
1672
|
+
expect(job?.status).toBe('completed');
|
|
1673
|
+
expect(invocationCount).toBe(2);
|
|
1674
|
+
});
|
|
1675
|
+
});
|
|
1676
|
+
|
|
1677
|
+
// ── BYOC (Bring Your Own Connection) tests for Redis ────────────────────
|
|
1678
|
+
|
|
1679
|
+
describe('Redis BYOC: init with external client', () => {
|
|
1680
|
+
let prefix: string;
|
|
1681
|
+
let externalClient: any;
|
|
1682
|
+
let jobQueue: ReturnType<typeof initJobQueue<TestPayloadMap>>;
|
|
1683
|
+
|
|
1684
|
+
beforeEach(async () => {
|
|
1685
|
+
prefix = createRedisTestPrefix();
|
|
1686
|
+
const { default: IORedis } = await import('ioredis');
|
|
1687
|
+
externalClient = new (IORedis as any)(REDIS_URL);
|
|
1688
|
+
jobQueue = initJobQueue<TestPayloadMap>({
|
|
1689
|
+
backend: 'redis',
|
|
1690
|
+
client: externalClient,
|
|
1691
|
+
keyPrefix: prefix,
|
|
1692
|
+
});
|
|
1693
|
+
});
|
|
1694
|
+
|
|
1695
|
+
afterEach(async () => {
|
|
1696
|
+
await cleanupRedisPrefix(externalClient, prefix);
|
|
1697
|
+
await externalClient.quit();
|
|
1698
|
+
});
|
|
1699
|
+
|
|
1700
|
+
it('uses the provided client for addJob and getJob', async () => {
|
|
1701
|
+
// Act
|
|
1702
|
+
const jobId = await jobQueue.addJob({
|
|
1703
|
+
jobType: 'email',
|
|
1704
|
+
payload: { to: 'byoc-redis@example.com' },
|
|
1705
|
+
});
|
|
1706
|
+
|
|
1707
|
+
// Assert
|
|
1708
|
+
const job = await jobQueue.getJob(jobId);
|
|
1709
|
+
expect(job).not.toBeNull();
|
|
1710
|
+
expect(job?.jobType).toBe('email');
|
|
1711
|
+
expect(job?.payload).toEqual({ to: 'byoc-redis@example.com' });
|
|
1712
|
+
});
|
|
1713
|
+
|
|
1714
|
+
it('returns the same client instance from getRedisClient()', () => {
|
|
1715
|
+
// Act
|
|
1716
|
+
const returned = jobQueue.getRedisClient();
|
|
1717
|
+
|
|
1718
|
+
// Assert
|
|
1719
|
+
expect(returned).toBe(externalClient);
|
|
1720
|
+
});
|
|
1721
|
+
});
|
|
1722
|
+
|
|
1723
|
+
describe('Redis BYOC: addJob with db option throws', () => {
|
|
1724
|
+
let prefix: string;
|
|
1725
|
+
let jobQueue: ReturnType<typeof initJobQueue<TestPayloadMap>>;
|
|
1726
|
+
let redisClient: any;
|
|
1727
|
+
|
|
1728
|
+
beforeEach(async () => {
|
|
1729
|
+
prefix = createRedisTestPrefix();
|
|
1730
|
+
jobQueue = initJobQueue<TestPayloadMap>({
|
|
1731
|
+
backend: 'redis',
|
|
1732
|
+
redisConfig: { url: REDIS_URL, keyPrefix: prefix },
|
|
1733
|
+
});
|
|
1734
|
+
redisClient = jobQueue.getRedisClient();
|
|
1735
|
+
});
|
|
1736
|
+
|
|
1737
|
+
afterEach(async () => {
|
|
1738
|
+
await cleanupRedisPrefix(redisClient, prefix);
|
|
1739
|
+
await redisClient.quit();
|
|
1740
|
+
});
|
|
1741
|
+
|
|
1742
|
+
it('throws a clear error when db option is provided', async () => {
|
|
1743
|
+
// Setup — fake db client
|
|
1744
|
+
const fakeDb = { query: async () => ({ rows: [], rowCount: 0 }) };
|
|
1745
|
+
|
|
1746
|
+
// Act & Assert
|
|
1747
|
+
await expect(
|
|
1748
|
+
jobQueue.addJob(
|
|
1749
|
+
{ jobType: 'email', payload: { to: 'fail@example.com' } },
|
|
1750
|
+
{ db: fakeDb },
|
|
1751
|
+
),
|
|
1752
|
+
).rejects.toThrow('The db option is not supported with the Redis backend.');
|
|
1753
|
+
});
|
|
1754
|
+
});
|
|
1755
|
+
|
|
1756
|
+
describe('Redis addJobs batch insert', () => {
|
|
1757
|
+
let prefix: string;
|
|
1758
|
+
let jobQueue: ReturnType<typeof initJobQueue<TestPayloadMap>>;
|
|
1759
|
+
let redisClient: any;
|
|
1760
|
+
|
|
1761
|
+
beforeEach(async () => {
|
|
1762
|
+
prefix = createRedisTestPrefix();
|
|
1763
|
+
jobQueue = initJobQueue<TestPayloadMap>({
|
|
1764
|
+
backend: 'redis',
|
|
1765
|
+
redisConfig: { url: REDIS_URL, keyPrefix: prefix },
|
|
1766
|
+
});
|
|
1767
|
+
redisClient = jobQueue.getRedisClient();
|
|
1768
|
+
});
|
|
1769
|
+
|
|
1770
|
+
afterEach(async () => {
|
|
1771
|
+
await cleanupRedisPrefix(redisClient, prefix);
|
|
1772
|
+
await redisClient.quit();
|
|
1773
|
+
});
|
|
1774
|
+
|
|
1775
|
+
it('inserts multiple jobs and returns IDs in order', async () => {
|
|
1776
|
+
// Act
|
|
1777
|
+
const ids = await jobQueue.addJobs([
|
|
1778
|
+
{ jobType: 'email', payload: { to: 'a@test.com' } },
|
|
1779
|
+
{ jobType: 'sms', payload: { to: '+1234' } },
|
|
1780
|
+
{ jobType: 'email', payload: { to: 'b@test.com' } },
|
|
1781
|
+
]);
|
|
1782
|
+
|
|
1783
|
+
// Assert
|
|
1784
|
+
expect(ids).toHaveLength(3);
|
|
1785
|
+
|
|
1786
|
+
const job0 = await jobQueue.getJob(ids[0]);
|
|
1787
|
+
expect(job0?.jobType).toBe('email');
|
|
1788
|
+
expect(job0?.payload).toEqual({ to: 'a@test.com' });
|
|
1789
|
+
|
|
1790
|
+
const job1 = await jobQueue.getJob(ids[1]);
|
|
1791
|
+
expect(job1?.jobType).toBe('sms');
|
|
1792
|
+
expect(job1?.payload).toEqual({ to: '+1234' });
|
|
1793
|
+
|
|
1794
|
+
const job2 = await jobQueue.getJob(ids[2]);
|
|
1795
|
+
expect(job2?.jobType).toBe('email');
|
|
1796
|
+
expect(job2?.payload).toEqual({ to: 'b@test.com' });
|
|
1797
|
+
});
|
|
1798
|
+
|
|
1799
|
+
it('returns empty array for empty input', async () => {
|
|
1800
|
+
// Act
|
|
1801
|
+
const ids = await jobQueue.addJobs([]);
|
|
1802
|
+
|
|
1803
|
+
// Assert
|
|
1804
|
+
expect(ids).toEqual([]);
|
|
1805
|
+
});
|
|
1806
|
+
|
|
1807
|
+
it('handles idempotency keys for new jobs', async () => {
|
|
1808
|
+
// Act
|
|
1809
|
+
const ids = await jobQueue.addJobs([
|
|
1810
|
+
{
|
|
1811
|
+
jobType: 'email',
|
|
1812
|
+
payload: { to: 'a@test.com' },
|
|
1813
|
+
idempotencyKey: 'r-key-a',
|
|
1814
|
+
},
|
|
1815
|
+
{
|
|
1816
|
+
jobType: 'email',
|
|
1817
|
+
payload: { to: 'b@test.com' },
|
|
1818
|
+
idempotencyKey: 'r-key-b',
|
|
1819
|
+
},
|
|
1820
|
+
]);
|
|
1821
|
+
|
|
1822
|
+
// Assert
|
|
1823
|
+
expect(ids).toHaveLength(2);
|
|
1824
|
+
expect(ids[0]).not.toBe(ids[1]);
|
|
1825
|
+
|
|
1826
|
+
const job0 = await jobQueue.getJob(ids[0]);
|
|
1827
|
+
expect(job0?.idempotencyKey).toBe('r-key-a');
|
|
1828
|
+
});
|
|
1829
|
+
|
|
1830
|
+
it('returns existing IDs for conflicting idempotency keys', async () => {
|
|
1831
|
+
// Setup
|
|
1832
|
+
const existingId = await jobQueue.addJob({
|
|
1833
|
+
jobType: 'email',
|
|
1834
|
+
payload: { to: 'existing@test.com' },
|
|
1835
|
+
idempotencyKey: 'r-dup',
|
|
1836
|
+
});
|
|
1837
|
+
|
|
1838
|
+
// Act
|
|
1839
|
+
const ids = await jobQueue.addJobs([
|
|
1840
|
+
{ jobType: 'email', payload: { to: 'new@test.com' } },
|
|
1841
|
+
{
|
|
1842
|
+
jobType: 'email',
|
|
1843
|
+
payload: { to: 'dup@test.com' },
|
|
1844
|
+
idempotencyKey: 'r-dup',
|
|
1845
|
+
},
|
|
1846
|
+
]);
|
|
1847
|
+
|
|
1848
|
+
// Assert
|
|
1849
|
+
expect(ids).toHaveLength(2);
|
|
1850
|
+
expect(ids[1]).toBe(existingId);
|
|
1851
|
+
expect(ids[0]).not.toBe(existingId);
|
|
1852
|
+
});
|
|
1853
|
+
|
|
1854
|
+
it('records added events for each inserted job', async () => {
|
|
1855
|
+
// Act
|
|
1856
|
+
const ids = await jobQueue.addJobs([
|
|
1857
|
+
{ jobType: 'email', payload: { to: 'a@test.com' } },
|
|
1858
|
+
{ jobType: 'sms', payload: { to: '+999' } },
|
|
1859
|
+
]);
|
|
1860
|
+
|
|
1861
|
+
// Assert
|
|
1862
|
+
const events0 = await jobQueue.getJobEvents(ids[0]);
|
|
1863
|
+
expect(events0.filter((e) => e.eventType === 'added')).toHaveLength(1);
|
|
1864
|
+
|
|
1865
|
+
const events1 = await jobQueue.getJobEvents(ids[1]);
|
|
1866
|
+
expect(events1.filter((e) => e.eventType === 'added')).toHaveLength(1);
|
|
1867
|
+
});
|
|
1868
|
+
|
|
1869
|
+
it('throws when db option is used with addJobs', async () => {
|
|
1870
|
+
// Setup
|
|
1871
|
+
const fakeDb = { query: async () => ({ rows: [], rowCount: 0 }) };
|
|
1872
|
+
|
|
1873
|
+
// Act & Assert
|
|
1874
|
+
await expect(
|
|
1875
|
+
jobQueue.addJobs(
|
|
1876
|
+
[{ jobType: 'email', payload: { to: 'fail@test.com' } }],
|
|
1877
|
+
{ db: fakeDb },
|
|
1878
|
+
),
|
|
1879
|
+
).rejects.toThrow('The db option is not supported with the Redis backend.');
|
|
1880
|
+
});
|
|
1881
|
+
|
|
1882
|
+
it('stores tags and priority correctly per job', async () => {
|
|
1883
|
+
// Act
|
|
1884
|
+
const ids = await jobQueue.addJobs([
|
|
1885
|
+
{
|
|
1886
|
+
jobType: 'email',
|
|
1887
|
+
payload: { to: 'a@test.com' },
|
|
1888
|
+
tags: ['urgent'],
|
|
1889
|
+
priority: 10,
|
|
1890
|
+
},
|
|
1891
|
+
{ jobType: 'sms', payload: { to: '+1' }, priority: 5 },
|
|
1892
|
+
{ jobType: 'email', payload: { to: 'c@test.com' }, tags: ['low'] },
|
|
1893
|
+
]);
|
|
1894
|
+
|
|
1895
|
+
// Assert
|
|
1896
|
+
const job0 = await jobQueue.getJob(ids[0]);
|
|
1897
|
+
expect(job0?.tags).toEqual(['urgent']);
|
|
1898
|
+
expect(job0?.priority).toBe(10);
|
|
1899
|
+
|
|
1900
|
+
const job1 = await jobQueue.getJob(ids[1]);
|
|
1901
|
+
expect(job1?.priority).toBe(5);
|
|
1902
|
+
|
|
1903
|
+
const job2 = await jobQueue.getJob(ids[2]);
|
|
1904
|
+
expect(job2?.tags).toEqual(['low']);
|
|
1905
|
+
});
|
|
1906
|
+
});
|