@pingpolls/redisq 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +2 -0
- package/.prototools +1 -0
- package/LICENSE +201 -0
- package/README.md +784 -0
- package/app.test.ts +529 -0
- package/app.ts +774 -0
- package/benchmark/stress-worker.ts +50 -0
- package/benchmark/stress.ts +359 -0
- package/biome.json +80 -0
- package/compose.yml +20 -0
- package/package.json +31 -0
- package/redis.conf +1 -0
- package/tsconfig.json +29 -0
package/app.ts
ADDED
|
@@ -0,0 +1,774 @@
|
|
|
1
|
+
import { RedisClient as BunRedisClient } from "bun";
|
|
2
|
+
import { Cron } from "croner";
|
|
3
|
+
|
|
4
|
+
export type QueueOptions =
|
|
5
|
+
| {
|
|
6
|
+
redis: BunRedisClient;
|
|
7
|
+
}
|
|
8
|
+
| {
|
|
9
|
+
host: string;
|
|
10
|
+
port: string;
|
|
11
|
+
user?: string;
|
|
12
|
+
password?: string;
|
|
13
|
+
namespace?: string;
|
|
14
|
+
tls?: boolean;
|
|
15
|
+
};
|
|
16
|
+
|
|
17
|
+
export type CreateQueueOptions<QueueName extends string = string> =
|
|
18
|
+
QueueName extends `${string}:batch`
|
|
19
|
+
? {
|
|
20
|
+
qname: QueueName;
|
|
21
|
+
maxsize?: number;
|
|
22
|
+
maxRetries?: number;
|
|
23
|
+
maxBackoffSeconds?: number;
|
|
24
|
+
every?: number;
|
|
25
|
+
}
|
|
26
|
+
: {
|
|
27
|
+
qname: QueueName;
|
|
28
|
+
maxsize?: number;
|
|
29
|
+
maxRetries?: number;
|
|
30
|
+
maxBackoffSeconds?: number;
|
|
31
|
+
};
|
|
32
|
+
|
|
33
|
+
export interface QueueAttributes {
|
|
34
|
+
maxsize: number;
|
|
35
|
+
created: number;
|
|
36
|
+
msgs: number;
|
|
37
|
+
isBatch: boolean;
|
|
38
|
+
maxRetries: number;
|
|
39
|
+
maxBackoffSeconds: number;
|
|
40
|
+
every?: number;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
export interface SendMessageOptions {
|
|
44
|
+
qname: string;
|
|
45
|
+
message: string;
|
|
46
|
+
delay?: number;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
export interface SendBatchMessageOptions {
|
|
50
|
+
qname: string;
|
|
51
|
+
batchId: string;
|
|
52
|
+
message: string;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
export interface Message {
|
|
56
|
+
id: string;
|
|
57
|
+
message: string;
|
|
58
|
+
sent: number;
|
|
59
|
+
attempt: number;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
export interface BatchMessage {
|
|
63
|
+
batchId: string;
|
|
64
|
+
messages: Omit<Message, "attempt">[];
|
|
65
|
+
sent: number;
|
|
66
|
+
attempt: number;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
interface StoredMessage {
|
|
70
|
+
id: string;
|
|
71
|
+
message: string;
|
|
72
|
+
sent: number;
|
|
73
|
+
attempt: number;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
interface StoredBatchMeta {
|
|
77
|
+
batchId: string;
|
|
78
|
+
sent: number;
|
|
79
|
+
attempt: number;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
export class RedisQueue {
|
|
83
|
+
private redis: BunRedisClient;
|
|
84
|
+
private redisUrl: string;
|
|
85
|
+
private ns: string;
|
|
86
|
+
private workers: Map<string, AbortController> = new Map();
|
|
87
|
+
private batchJobs: Map<string, Cron> = new Map();
|
|
88
|
+
private isClosing = false;
|
|
89
|
+
|
|
90
|
+
constructor(options: QueueOptions) {
|
|
91
|
+
if ("redis" in options) {
|
|
92
|
+
this.redis = options.redis;
|
|
93
|
+
this.redisUrl = "redis://localhost:6379";
|
|
94
|
+
this.ns = "rsmq";
|
|
95
|
+
} else {
|
|
96
|
+
const { host, port, user, password, namespace, tls } = options;
|
|
97
|
+
const protocol = tls ? "rediss" : "redis";
|
|
98
|
+
|
|
99
|
+
let auth = "";
|
|
100
|
+
if (user && password) {
|
|
101
|
+
auth = `${user}:${password}@`;
|
|
102
|
+
} else if (password) {
|
|
103
|
+
auth = `:${password}@`;
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
this.redisUrl = `${protocol}://${auth}${host}:${port}`;
|
|
107
|
+
this.redis = new BunRedisClient(this.redisUrl);
|
|
108
|
+
this.ns = namespace || "rsmq";
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
private getKey(qname: string, suffix = ""): string {
|
|
113
|
+
return suffix ? `${this.ns}:${qname}:${suffix}` : `${this.ns}:${qname}`;
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
private isBatchQueue(qname: string): boolean {
|
|
117
|
+
return qname.endsWith(":batch");
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
async createQueue<QueueName extends string>(
|
|
121
|
+
options: CreateQueueOptions<QueueName>,
|
|
122
|
+
): Promise<boolean> {
|
|
123
|
+
const {
|
|
124
|
+
qname,
|
|
125
|
+
maxsize = 65536,
|
|
126
|
+
maxRetries = 0,
|
|
127
|
+
maxBackoffSeconds = 30,
|
|
128
|
+
} = options;
|
|
129
|
+
|
|
130
|
+
if (!/^[a-zA-Z0-9_:-]{1,160}$/.test(qname)) {
|
|
131
|
+
throw new Error("Invalid queue name");
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
const key = this.getKey(qname);
|
|
135
|
+
const exists = await this.redis.exists(key);
|
|
136
|
+
|
|
137
|
+
if (exists) {
|
|
138
|
+
return false;
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
const isBatch = this.isBatchQueue(qname);
|
|
142
|
+
|
|
143
|
+
const attrs: Record<string, string | number> = {
|
|
144
|
+
created: Date.now(),
|
|
145
|
+
maxBackoffSeconds,
|
|
146
|
+
maxRetries,
|
|
147
|
+
maxsize,
|
|
148
|
+
type: isBatch ? "batch" : "single",
|
|
149
|
+
};
|
|
150
|
+
|
|
151
|
+
if (isBatch && "every" in options) {
|
|
152
|
+
attrs.every = options.every ?? 60;
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
await this.redis.hset(key, attrs);
|
|
156
|
+
return true;
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
async listQueues(): Promise<string[]> {
|
|
160
|
+
const pattern = `${this.ns}:*`;
|
|
161
|
+
const keys = await this.redis.keys(pattern);
|
|
162
|
+
|
|
163
|
+
return keys
|
|
164
|
+
.map((k) => k.replace(`${this.ns}:`, ""))
|
|
165
|
+
.filter((k) => !k.includes(":") || k.endsWith(":batch"));
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
async getQueue(qname: string): Promise<QueueAttributes | null> {
|
|
169
|
+
const key = this.getKey(qname);
|
|
170
|
+
const attrs = await this.redis.hgetall(key);
|
|
171
|
+
|
|
172
|
+
if (!attrs || Object.keys(attrs).length === 0) {
|
|
173
|
+
return null;
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
const isBatch = attrs.type === "batch";
|
|
177
|
+
const messagesKey = this.getKey(qname, "messages");
|
|
178
|
+
const delayedKey = this.getKey(qname, "delayed");
|
|
179
|
+
|
|
180
|
+
const [msgCount, delayedCount] = await Promise.all([
|
|
181
|
+
this.redis.send("HLEN", [messagesKey]),
|
|
182
|
+
this.redis.send("ZCARD", [delayedKey]),
|
|
183
|
+
]);
|
|
184
|
+
|
|
185
|
+
const result: QueueAttributes = {
|
|
186
|
+
created: Number(attrs.created),
|
|
187
|
+
isBatch,
|
|
188
|
+
maxBackoffSeconds: Number(attrs.maxBackoffSeconds),
|
|
189
|
+
maxRetries: Number(attrs.maxRetries),
|
|
190
|
+
maxsize: Number(attrs.maxsize),
|
|
191
|
+
msgs: (msgCount as number) + (delayedCount as number),
|
|
192
|
+
};
|
|
193
|
+
|
|
194
|
+
if (isBatch && attrs.every) {
|
|
195
|
+
result.every = Number(attrs.every);
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
return result;
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
async deleteQueue(qname: string): Promise<boolean> {
|
|
202
|
+
const pattern = `${this.ns}:${qname}*`;
|
|
203
|
+
const keys = await this.redis.keys(pattern);
|
|
204
|
+
|
|
205
|
+
if (keys.length === 0) {
|
|
206
|
+
return false;
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
await this.redis.del(...keys);
|
|
210
|
+
return true;
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
async sendMessage(options: SendMessageOptions): Promise<string> {
|
|
214
|
+
const { qname, message, delay } = options;
|
|
215
|
+
|
|
216
|
+
if (this.isBatchQueue(qname)) {
|
|
217
|
+
throw new Error("Use sendBatchMessage for batch queues");
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
const key = this.getKey(qname);
|
|
221
|
+
const attrs = await this.redis.hgetall(key);
|
|
222
|
+
|
|
223
|
+
if (!attrs || Object.keys(attrs).length === 0) {
|
|
224
|
+
throw new Error("Queue not found");
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
const maxsize = Number(attrs.maxsize);
|
|
228
|
+
if (message.length > maxsize) {
|
|
229
|
+
throw new Error(`Message too large. Max size: ${maxsize}`);
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
const now = Date.now();
|
|
233
|
+
const id = Bun.randomUUIDv7("hex", now);
|
|
234
|
+
|
|
235
|
+
const storedMsg: StoredMessage = {
|
|
236
|
+
attempt: 1,
|
|
237
|
+
id,
|
|
238
|
+
message,
|
|
239
|
+
sent: now,
|
|
240
|
+
};
|
|
241
|
+
|
|
242
|
+
const messagesKey = this.getKey(qname, "messages");
|
|
243
|
+
const encoded = this.encodeMessage(storedMsg);
|
|
244
|
+
|
|
245
|
+
// Use pipeline for atomic operations
|
|
246
|
+
if (delay && delay > 0) {
|
|
247
|
+
const delayedKey = this.getKey(qname, "delayed");
|
|
248
|
+
const deliverAt = now + delay;
|
|
249
|
+
await Promise.all([
|
|
250
|
+
this.redis.hset(messagesKey, { [id]: encoded }),
|
|
251
|
+
this.redis.send("ZADD", [delayedKey, deliverAt.toString(), id]),
|
|
252
|
+
]);
|
|
253
|
+
} else {
|
|
254
|
+
const queueKey = this.getKey(qname, "queue");
|
|
255
|
+
await Promise.all([
|
|
256
|
+
this.redis.hset(messagesKey, { [id]: encoded }),
|
|
257
|
+
this.redis.send("LPUSH", [queueKey, id]),
|
|
258
|
+
]);
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
return id;
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
async sendBatchMessage(options: SendBatchMessageOptions): Promise<string> {
|
|
265
|
+
const { qname, batchId, message } = options;
|
|
266
|
+
|
|
267
|
+
if (!this.isBatchQueue(qname)) {
|
|
268
|
+
throw new Error("Use sendMessage for non-batch queues");
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
const key = this.getKey(qname);
|
|
272
|
+
const attrs = await this.redis.hgetall(key);
|
|
273
|
+
|
|
274
|
+
if (!attrs || Object.keys(attrs).length === 0) {
|
|
275
|
+
throw new Error("Queue not found");
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
const maxsize = Number(attrs.maxsize);
|
|
279
|
+
if (message.length > maxsize) {
|
|
280
|
+
throw new Error(`Message too large. Max size: ${maxsize}`);
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
const now = Date.now();
|
|
284
|
+
const id = Bun.randomUUIDv7("hex", now);
|
|
285
|
+
|
|
286
|
+
const storedMsg: StoredMessage = {
|
|
287
|
+
attempt: 1,
|
|
288
|
+
id,
|
|
289
|
+
message,
|
|
290
|
+
sent: now,
|
|
291
|
+
};
|
|
292
|
+
|
|
293
|
+
const messagesKey = this.getKey(qname, "messages");
|
|
294
|
+
const encoded = this.encodeMessage(storedMsg);
|
|
295
|
+
const batchKey = this.getKey(qname, `batch:${batchId}`);
|
|
296
|
+
const batchMetaKey = this.getKey(qname, "batch-meta");
|
|
297
|
+
|
|
298
|
+
await this.redis.hset(messagesKey, { [id]: encoded });
|
|
299
|
+
await this.redis.sadd(batchKey, id);
|
|
300
|
+
|
|
301
|
+
const batchExists = await this.redis.hexists(batchMetaKey, batchId);
|
|
302
|
+
|
|
303
|
+
if (!batchExists) {
|
|
304
|
+
const batchMeta: StoredBatchMeta = {
|
|
305
|
+
attempt: 1,
|
|
306
|
+
batchId,
|
|
307
|
+
sent: now,
|
|
308
|
+
};
|
|
309
|
+
const encodedMeta = this.encodeBatchMeta(batchMeta);
|
|
310
|
+
const pendingBatchesKey = this.getKey(qname, "pending-batches");
|
|
311
|
+
|
|
312
|
+
await Promise.all([
|
|
313
|
+
this.redis.hset(batchMetaKey, { [batchId]: encodedMeta }),
|
|
314
|
+
this.redis.sadd(pendingBatchesKey, batchId),
|
|
315
|
+
]);
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
return id;
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
private encodeMessage(msg: StoredMessage): string {
|
|
322
|
+
return JSON.stringify({
|
|
323
|
+
attempt: msg.attempt,
|
|
324
|
+
id: msg.id,
|
|
325
|
+
message: msg.message,
|
|
326
|
+
sent: msg.sent,
|
|
327
|
+
});
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
private decodeMessage(encoded: string): StoredMessage {
|
|
331
|
+
const parsed = JSON.parse(encoded);
|
|
332
|
+
return {
|
|
333
|
+
attempt: parsed.attempt,
|
|
334
|
+
id: parsed.id,
|
|
335
|
+
message: parsed.message,
|
|
336
|
+
sent: parsed.sent,
|
|
337
|
+
};
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
private encodeBatchMeta(meta: StoredBatchMeta): string {
|
|
341
|
+
return JSON.stringify(meta);
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
private decodeBatchMeta(encoded: string): StoredBatchMeta {
|
|
345
|
+
return JSON.parse(encoded);
|
|
346
|
+
}
|
|
347
|
+
|
|
348
|
+
private async fetchMessage(
|
|
349
|
+
qname: string,
|
|
350
|
+
id: string,
|
|
351
|
+
): Promise<Message | null> {
|
|
352
|
+
const messagesKey = this.getKey(qname, "messages");
|
|
353
|
+
const encoded = await this.redis.hget(messagesKey, id);
|
|
354
|
+
|
|
355
|
+
if (!encoded) {
|
|
356
|
+
return null;
|
|
357
|
+
}
|
|
358
|
+
|
|
359
|
+
const stored = this.decodeMessage(encoded);
|
|
360
|
+
return {
|
|
361
|
+
attempt: stored.attempt,
|
|
362
|
+
id: stored.id,
|
|
363
|
+
message: stored.message,
|
|
364
|
+
sent: stored.sent,
|
|
365
|
+
};
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
// OPTIMIZED: Batch fetch multiple messages at once
|
|
369
|
+
private async fetchMessages(
|
|
370
|
+
qname: string,
|
|
371
|
+
ids: string[],
|
|
372
|
+
): Promise<Message[]> {
|
|
373
|
+
if (ids.length === 0) return [];
|
|
374
|
+
|
|
375
|
+
const messagesKey = this.getKey(qname, "messages");
|
|
376
|
+
const encoded = await this.redis.hmget(messagesKey, ...ids);
|
|
377
|
+
|
|
378
|
+
const messages: Message[] = [];
|
|
379
|
+
for (const encodedMsg of encoded) {
|
|
380
|
+
if (encodedMsg) {
|
|
381
|
+
const stored = this.decodeMessage(encodedMsg);
|
|
382
|
+
messages.push({
|
|
383
|
+
attempt: stored.attempt,
|
|
384
|
+
id: stored.id,
|
|
385
|
+
message: stored.message,
|
|
386
|
+
sent: stored.sent,
|
|
387
|
+
});
|
|
388
|
+
}
|
|
389
|
+
}
|
|
390
|
+
|
|
391
|
+
return messages;
|
|
392
|
+
}
|
|
393
|
+
|
|
394
|
+
private async fetchBatchMessage(
|
|
395
|
+
qname: string,
|
|
396
|
+
batchId: string,
|
|
397
|
+
): Promise<BatchMessage | null> {
|
|
398
|
+
const batchKey = this.getKey(qname, `batch:${batchId}`);
|
|
399
|
+
const batchMetaKey = this.getKey(qname, "batch-meta");
|
|
400
|
+
|
|
401
|
+
const [messageIds, metaEncoded] = await Promise.all([
|
|
402
|
+
this.redis.send("SMEMBERS", [batchKey]),
|
|
403
|
+
this.redis.hget(batchMetaKey, batchId),
|
|
404
|
+
]);
|
|
405
|
+
|
|
406
|
+
if (
|
|
407
|
+
!messageIds ||
|
|
408
|
+
(messageIds as string[]).length === 0 ||
|
|
409
|
+
!metaEncoded
|
|
410
|
+
) {
|
|
411
|
+
return null;
|
|
412
|
+
}
|
|
413
|
+
|
|
414
|
+
const meta = this.decodeBatchMeta(metaEncoded);
|
|
415
|
+
const messages = await this.fetchMessages(
|
|
416
|
+
qname,
|
|
417
|
+
messageIds as string[],
|
|
418
|
+
);
|
|
419
|
+
|
|
420
|
+
return {
|
|
421
|
+
attempt: meta.attempt,
|
|
422
|
+
batchId: meta.batchId,
|
|
423
|
+
messages: messages.map(({ attempt, ...rest }) => rest),
|
|
424
|
+
sent: meta.sent,
|
|
425
|
+
};
|
|
426
|
+
}
|
|
427
|
+
|
|
428
|
+
async deleteMessage(qname: string, id: string): Promise<boolean> {
|
|
429
|
+
const messagesKey = this.getKey(qname, "messages");
|
|
430
|
+
const deleted = await this.redis.hdel(messagesKey, id);
|
|
431
|
+
return deleted > 0;
|
|
432
|
+
}
|
|
433
|
+
|
|
434
|
+
private async deleteBatch(qname: string, batchId: string): Promise<void> {
|
|
435
|
+
const batchKey = this.getKey(qname, `batch:${batchId}`);
|
|
436
|
+
const batchMetaKey = this.getKey(qname, "batch-meta");
|
|
437
|
+
|
|
438
|
+
const messageIds = (await this.redis.send("SMEMBERS", [batchKey])) as [
|
|
439
|
+
string,
|
|
440
|
+
];
|
|
441
|
+
|
|
442
|
+
if (messageIds && messageIds.length > 0) {
|
|
443
|
+
const messagesKey = this.getKey(qname, "messages");
|
|
444
|
+
await this.redis.hdel(messagesKey, ...messageIds);
|
|
445
|
+
}
|
|
446
|
+
|
|
447
|
+
await Promise.all([
|
|
448
|
+
this.redis.del(batchKey),
|
|
449
|
+
this.redis.hdel(batchMetaKey, batchId),
|
|
450
|
+
]);
|
|
451
|
+
}
|
|
452
|
+
|
|
453
|
+
private async retryMessage(qname: string, id: string): Promise<void> {
|
|
454
|
+
if (this.isClosing) return;
|
|
455
|
+
|
|
456
|
+
const key = this.getKey(qname);
|
|
457
|
+
const attrs = await this.redis.hgetall(key);
|
|
458
|
+
|
|
459
|
+
if (!attrs || Object.keys(attrs).length === 0) {
|
|
460
|
+
return;
|
|
461
|
+
}
|
|
462
|
+
|
|
463
|
+
const maxRetries = Number(attrs.maxRetries);
|
|
464
|
+
const maxBackoffSeconds = Number(attrs.maxBackoffSeconds);
|
|
465
|
+
|
|
466
|
+
const messagesKey = this.getKey(qname, "messages");
|
|
467
|
+
const encoded = await this.redis.hget(messagesKey, id);
|
|
468
|
+
|
|
469
|
+
if (!encoded) {
|
|
470
|
+
return;
|
|
471
|
+
}
|
|
472
|
+
|
|
473
|
+
const msg = this.decodeMessage(encoded);
|
|
474
|
+
const { attempt } = msg;
|
|
475
|
+
|
|
476
|
+
if (maxRetries === 0 || (maxRetries !== -1 && attempt > maxRetries)) {
|
|
477
|
+
await this.redis.hdel(messagesKey, id);
|
|
478
|
+
return;
|
|
479
|
+
}
|
|
480
|
+
|
|
481
|
+
const randomMs = Math.floor(Math.random() * 1000);
|
|
482
|
+
const backoffSeconds = Math.min(
|
|
483
|
+
2 ** attempt + randomMs / 1000,
|
|
484
|
+
maxBackoffSeconds,
|
|
485
|
+
);
|
|
486
|
+
const backoffMs = backoffSeconds * 1000;
|
|
487
|
+
|
|
488
|
+
msg.attempt += 1;
|
|
489
|
+
const updatedEncoded = this.encodeMessage(msg);
|
|
490
|
+
|
|
491
|
+
const delayedKey = this.getKey(qname, "delayed");
|
|
492
|
+
const deliverAt = Date.now() + backoffMs;
|
|
493
|
+
|
|
494
|
+
await Promise.all([
|
|
495
|
+
this.redis.hset(messagesKey, { [id]: updatedEncoded }),
|
|
496
|
+
this.redis.send("ZADD", [delayedKey, deliverAt.toString(), id]),
|
|
497
|
+
]);
|
|
498
|
+
}
|
|
499
|
+
|
|
500
|
+
private async retryBatch(qname: string, batchId: string): Promise<void> {
|
|
501
|
+
if (this.isClosing) return;
|
|
502
|
+
|
|
503
|
+
const key = this.getKey(qname);
|
|
504
|
+
const attrs = await this.redis.hgetall(key);
|
|
505
|
+
|
|
506
|
+
if (!attrs || Object.keys(attrs).length === 0) {
|
|
507
|
+
return;
|
|
508
|
+
}
|
|
509
|
+
|
|
510
|
+
const maxRetries = Number(attrs.maxRetries);
|
|
511
|
+
|
|
512
|
+
const batchMetaKey = this.getKey(qname, "batch-meta");
|
|
513
|
+
const metaEncoded = await this.redis.hget(batchMetaKey, batchId);
|
|
514
|
+
|
|
515
|
+
if (!metaEncoded) {
|
|
516
|
+
return;
|
|
517
|
+
}
|
|
518
|
+
|
|
519
|
+
const meta = this.decodeBatchMeta(metaEncoded);
|
|
520
|
+
const { attempt } = meta;
|
|
521
|
+
|
|
522
|
+
if (maxRetries === 0 || (maxRetries !== -1 && attempt > maxRetries)) {
|
|
523
|
+
await this.deleteBatch(qname, batchId);
|
|
524
|
+
return;
|
|
525
|
+
}
|
|
526
|
+
|
|
527
|
+
meta.attempt += 1;
|
|
528
|
+
const updatedEncoded = this.encodeBatchMeta(meta);
|
|
529
|
+
const pendingBatchesKey = this.getKey(qname, "pending-batches");
|
|
530
|
+
|
|
531
|
+
await Promise.all([
|
|
532
|
+
this.redis.hset(batchMetaKey, { [batchId]: updatedEncoded }),
|
|
533
|
+
this.redis.sadd(pendingBatchesKey, batchId),
|
|
534
|
+
]);
|
|
535
|
+
}
|
|
536
|
+
|
|
537
|
+
private async processDelayedMessages(qname: string): Promise<void> {
|
|
538
|
+
const delayedKey = this.getKey(qname, "delayed");
|
|
539
|
+
const queueKey = this.getKey(qname, "queue");
|
|
540
|
+
const now = Date.now();
|
|
541
|
+
|
|
542
|
+
const ready = await this.redis.send("ZRANGEBYSCORE", [
|
|
543
|
+
delayedKey,
|
|
544
|
+
"-inf",
|
|
545
|
+
now.toString(),
|
|
546
|
+
]);
|
|
547
|
+
|
|
548
|
+
if (ready && (ready as string[]).length > 0) {
|
|
549
|
+
const pipeline = [];
|
|
550
|
+
for (const id of ready as string[]) {
|
|
551
|
+
pipeline.push(
|
|
552
|
+
this.redis.send("LPUSH", [queueKey, id]),
|
|
553
|
+
this.redis.send("ZREM", [delayedKey, id]),
|
|
554
|
+
);
|
|
555
|
+
}
|
|
556
|
+
await Promise.all(pipeline);
|
|
557
|
+
}
|
|
558
|
+
}
|
|
559
|
+
|
|
560
|
+
private async processBatches(
|
|
561
|
+
qname: string,
|
|
562
|
+
handler: (message: BatchMessage) => Promise<{ success: boolean }>,
|
|
563
|
+
silent: boolean,
|
|
564
|
+
): Promise<void> {
|
|
565
|
+
if (this.isClosing) return;
|
|
566
|
+
|
|
567
|
+
const pendingBatchesKey = this.getKey(qname, "pending-batches");
|
|
568
|
+
const batchIds = (await this.redis.send("SMEMBERS", [
|
|
569
|
+
pendingBatchesKey,
|
|
570
|
+
])) as string[] | null;
|
|
571
|
+
|
|
572
|
+
if (!batchIds || batchIds.length === 0) {
|
|
573
|
+
return;
|
|
574
|
+
}
|
|
575
|
+
|
|
576
|
+
const batchPromises = batchIds.map(async (batchId) => {
|
|
577
|
+
if (this.isClosing) return;
|
|
578
|
+
|
|
579
|
+
await this.redis.srem(pendingBatchesKey, batchId);
|
|
580
|
+
|
|
581
|
+
const batchMessage = await this.fetchBatchMessage(qname, batchId);
|
|
582
|
+
|
|
583
|
+
if (!batchMessage) {
|
|
584
|
+
return;
|
|
585
|
+
}
|
|
586
|
+
|
|
587
|
+
try {
|
|
588
|
+
const { success } = await handler(batchMessage);
|
|
589
|
+
|
|
590
|
+
if (success) {
|
|
591
|
+
await this.deleteBatch(qname, batchId);
|
|
592
|
+
} else {
|
|
593
|
+
await this.retryBatch(qname, batchId);
|
|
594
|
+
}
|
|
595
|
+
} catch (error) {
|
|
596
|
+
if (!silent) {
|
|
597
|
+
console.error(
|
|
598
|
+
`Batch ${batchId} handler error:`,
|
|
599
|
+
(error as Error).message,
|
|
600
|
+
);
|
|
601
|
+
}
|
|
602
|
+
await this.retryBatch(qname, batchId);
|
|
603
|
+
}
|
|
604
|
+
});
|
|
605
|
+
|
|
606
|
+
await Promise.all(batchPromises);
|
|
607
|
+
}
|
|
608
|
+
|
|
609
|
+
async startWorker<QueueName extends `${string}:batch` | (string & {})>(
|
|
610
|
+
qname: QueueName,
|
|
611
|
+
handler: (
|
|
612
|
+
message: QueueName extends `${string}:batch`
|
|
613
|
+
? BatchMessage
|
|
614
|
+
: Message,
|
|
615
|
+
) => Promise<{ success: boolean }>,
|
|
616
|
+
options: { concurrency?: number; silent?: boolean } = {},
|
|
617
|
+
): Promise<void> {
|
|
618
|
+
const { concurrency = 1, silent = false } = options;
|
|
619
|
+
|
|
620
|
+
if (this.workers.has(qname)) {
|
|
621
|
+
throw new Error(`Worker already running for queue: ${qname}`);
|
|
622
|
+
}
|
|
623
|
+
|
|
624
|
+
const isBatch = this.isBatchQueue(qname);
|
|
625
|
+
|
|
626
|
+
if (isBatch) {
|
|
627
|
+
const key = this.getKey(qname);
|
|
628
|
+
const attrs = await this.redis.hgetall(key);
|
|
629
|
+
|
|
630
|
+
if (!attrs || Object.keys(attrs).length === 0) {
|
|
631
|
+
throw new Error("Queue not found");
|
|
632
|
+
}
|
|
633
|
+
|
|
634
|
+
const every = Number(attrs.every || 60);
|
|
635
|
+
|
|
636
|
+
const job = new Cron(`*/${every} * * * * *`, async () => {
|
|
637
|
+
if (this.isClosing) return;
|
|
638
|
+
try {
|
|
639
|
+
await this.processBatches(
|
|
640
|
+
qname,
|
|
641
|
+
handler as (
|
|
642
|
+
message: BatchMessage,
|
|
643
|
+
) => Promise<{ success: boolean }>,
|
|
644
|
+
silent,
|
|
645
|
+
);
|
|
646
|
+
} catch (error) {
|
|
647
|
+
if (!silent && !this.isClosing) {
|
|
648
|
+
console.error(
|
|
649
|
+
"Batch processing error:",
|
|
650
|
+
(error as Error).message,
|
|
651
|
+
);
|
|
652
|
+
}
|
|
653
|
+
}
|
|
654
|
+
});
|
|
655
|
+
|
|
656
|
+
this.batchJobs.set(qname, job);
|
|
657
|
+
} else {
|
|
658
|
+
const controller = new AbortController();
|
|
659
|
+
this.workers.set(qname, controller);
|
|
660
|
+
|
|
661
|
+
const workers = Array.from({ length: concurrency }, (_, i) =>
|
|
662
|
+
this.runWorker(
|
|
663
|
+
qname,
|
|
664
|
+
handler as (
|
|
665
|
+
message: Message,
|
|
666
|
+
) => Promise<{ success: boolean }>,
|
|
667
|
+
controller.signal,
|
|
668
|
+
i,
|
|
669
|
+
silent,
|
|
670
|
+
),
|
|
671
|
+
);
|
|
672
|
+
|
|
673
|
+
await Promise.all(workers);
|
|
674
|
+
}
|
|
675
|
+
}
|
|
676
|
+
|
|
677
|
+
// OPTIMIZED: Process messages in parallel within worker
|
|
678
|
+
private async runWorker(
|
|
679
|
+
qname: string,
|
|
680
|
+
handler: (message: Message) => Promise<{ success: boolean }>,
|
|
681
|
+
signal: AbortSignal,
|
|
682
|
+
workerId: number,
|
|
683
|
+
silent: boolean,
|
|
684
|
+
): Promise<void> {
|
|
685
|
+
const queueKey = this.getKey(qname, "queue");
|
|
686
|
+
|
|
687
|
+
while (!signal.aborted) {
|
|
688
|
+
try {
|
|
689
|
+
await this.processDelayedMessages(qname);
|
|
690
|
+
|
|
691
|
+
const result = await this.redis.send("BRPOP", [
|
|
692
|
+
queueKey,
|
|
693
|
+
"0.1",
|
|
694
|
+
]);
|
|
695
|
+
|
|
696
|
+
if (result && Array.isArray(result) && result.length >= 2) {
|
|
697
|
+
const id = result[1] as string;
|
|
698
|
+
const message = await this.fetchMessage(qname, id);
|
|
699
|
+
|
|
700
|
+
if (message) {
|
|
701
|
+
// Process message without blocking the loop
|
|
702
|
+
handler(message)
|
|
703
|
+
.then(async ({ success }) => {
|
|
704
|
+
if (success) {
|
|
705
|
+
await this.deleteMessage(qname, message.id);
|
|
706
|
+
} else {
|
|
707
|
+
await this.retryMessage(qname, message.id);
|
|
708
|
+
}
|
|
709
|
+
})
|
|
710
|
+
.catch(async (error) => {
|
|
711
|
+
if (!silent) {
|
|
712
|
+
console.error(
|
|
713
|
+
`Worker ${workerId} handler error:`,
|
|
714
|
+
(error as Error).message,
|
|
715
|
+
);
|
|
716
|
+
}
|
|
717
|
+
await this.retryMessage(qname, message.id);
|
|
718
|
+
});
|
|
719
|
+
}
|
|
720
|
+
}
|
|
721
|
+
} catch (error) {
|
|
722
|
+
if (!signal.aborted) {
|
|
723
|
+
if (!silent) {
|
|
724
|
+
console.error(
|
|
725
|
+
`Worker ${workerId} error:`,
|
|
726
|
+
(error as Error).message,
|
|
727
|
+
);
|
|
728
|
+
}
|
|
729
|
+
await Bun.sleep(100);
|
|
730
|
+
}
|
|
731
|
+
}
|
|
732
|
+
}
|
|
733
|
+
}
|
|
734
|
+
|
|
735
|
+
stopWorker(qname: string): void {
|
|
736
|
+
const controller = this.workers.get(qname);
|
|
737
|
+
if (controller) {
|
|
738
|
+
controller.abort();
|
|
739
|
+
this.workers.delete(qname);
|
|
740
|
+
}
|
|
741
|
+
|
|
742
|
+
const job = this.batchJobs.get(qname);
|
|
743
|
+
if (job) {
|
|
744
|
+
job.stop();
|
|
745
|
+
this.batchJobs.delete(qname);
|
|
746
|
+
}
|
|
747
|
+
}
|
|
748
|
+
|
|
749
|
+
async close(): Promise<void> {
|
|
750
|
+
this.isClosing = true;
|
|
751
|
+
|
|
752
|
+
for (const qname of this.workers.keys()) {
|
|
753
|
+
const controller = this.workers.get(qname);
|
|
754
|
+
if (controller) {
|
|
755
|
+
controller.abort();
|
|
756
|
+
}
|
|
757
|
+
}
|
|
758
|
+
this.workers.clear();
|
|
759
|
+
|
|
760
|
+
for (const qname of this.batchJobs.keys()) {
|
|
761
|
+
const job = this.batchJobs.get(qname);
|
|
762
|
+
if (job) {
|
|
763
|
+
job.stop();
|
|
764
|
+
}
|
|
765
|
+
}
|
|
766
|
+
this.batchJobs.clear();
|
|
767
|
+
|
|
768
|
+
await Bun.sleep(100);
|
|
769
|
+
|
|
770
|
+
this.redis.close();
|
|
771
|
+
}
|
|
772
|
+
}
|
|
773
|
+
|
|
774
|
+
export default RedisQueue;
|