@gravito/stream 1.0.0-alpha.5 → 1.0.0-beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +8 -3
- package/dist/index.cjs +1016 -31676
- package/dist/index.d.cts +1132 -0
- package/dist/index.d.ts +1123 -36
- package/dist/index.js +1412 -0
- package/package.json +3 -3
- package/dist/Consumer.d.ts +0 -67
- package/dist/Consumer.d.ts.map +0 -1
- package/dist/Job.d.ts +0 -76
- package/dist/Job.d.ts.map +0 -1
- package/dist/OrbitQueue.d.ts +0 -74
- package/dist/OrbitQueue.d.ts.map +0 -1
- package/dist/QueueManager.d.ts +0 -86
- package/dist/QueueManager.d.ts.map +0 -1
- package/dist/Queueable.d.ts +0 -63
- package/dist/Queueable.d.ts.map +0 -1
- package/dist/Worker.d.ts +0 -48
- package/dist/Worker.d.ts.map +0 -1
- package/dist/core/src/ConfigManager.d.ts +0 -26
- package/dist/core/src/ConfigManager.d.ts.map +0 -1
- package/dist/core/src/Container.d.ts +0 -39
- package/dist/core/src/Container.d.ts.map +0 -1
- package/dist/core/src/Event.d.ts +0 -6
- package/dist/core/src/Event.d.ts.map +0 -1
- package/dist/core/src/EventManager.d.ts +0 -124
- package/dist/core/src/EventManager.d.ts.map +0 -1
- package/dist/core/src/GlobalErrorHandlers.d.ts +0 -32
- package/dist/core/src/GlobalErrorHandlers.d.ts.map +0 -1
- package/dist/core/src/HookManager.d.ts +0 -29
- package/dist/core/src/HookManager.d.ts.map +0 -1
- package/dist/core/src/Listener.d.ts +0 -5
- package/dist/core/src/Listener.d.ts.map +0 -1
- package/dist/core/src/Logger.d.ts +0 -21
- package/dist/core/src/Logger.d.ts.map +0 -1
- package/dist/core/src/PlanetCore.d.ts +0 -116
- package/dist/core/src/PlanetCore.d.ts.map +0 -1
- package/dist/core/src/Route.d.ts +0 -13
- package/dist/core/src/Route.d.ts.map +0 -1
- package/dist/core/src/Router.d.ts +0 -168
- package/dist/core/src/Router.d.ts.map +0 -1
- package/dist/core/src/ServiceProvider.d.ts +0 -17
- package/dist/core/src/ServiceProvider.d.ts.map +0 -1
- package/dist/core/src/exceptions/AuthenticationException.d.ts +0 -5
- package/dist/core/src/exceptions/AuthenticationException.d.ts.map +0 -1
- package/dist/core/src/exceptions/AuthorizationException.d.ts +0 -5
- package/dist/core/src/exceptions/AuthorizationException.d.ts.map +0 -1
- package/dist/core/src/exceptions/GravitoException.d.ts +0 -15
- package/dist/core/src/exceptions/GravitoException.d.ts.map +0 -1
- package/dist/core/src/exceptions/ModelNotFoundException.d.ts +0 -7
- package/dist/core/src/exceptions/ModelNotFoundException.d.ts.map +0 -1
- package/dist/core/src/exceptions/ValidationException.d.ts +0 -15
- package/dist/core/src/exceptions/ValidationException.d.ts.map +0 -1
- package/dist/core/src/exceptions/index.d.ts +0 -6
- package/dist/core/src/exceptions/index.d.ts.map +0 -1
- package/dist/core/src/helpers/Arr.d.ts +0 -15
- package/dist/core/src/helpers/Arr.d.ts.map +0 -1
- package/dist/core/src/helpers/Str.d.ts +0 -19
- package/dist/core/src/helpers/Str.d.ts.map +0 -1
- package/dist/core/src/helpers/data.d.ts +0 -6
- package/dist/core/src/helpers/data.d.ts.map +0 -1
- package/dist/core/src/helpers/errors.d.ts +0 -13
- package/dist/core/src/helpers/errors.d.ts.map +0 -1
- package/dist/core/src/helpers/response.d.ts +0 -19
- package/dist/core/src/helpers/response.d.ts.map +0 -1
- package/dist/core/src/helpers.d.ts +0 -39
- package/dist/core/src/helpers.d.ts.map +0 -1
- package/dist/core/src/http/CookieJar.d.ts +0 -34
- package/dist/core/src/http/CookieJar.d.ts.map +0 -1
- package/dist/core/src/http/middleware/ThrottleRequests.d.ts +0 -13
- package/dist/core/src/http/middleware/ThrottleRequests.d.ts.map +0 -1
- package/dist/core/src/index.d.ts +0 -32
- package/dist/core/src/index.d.ts.map +0 -1
- package/dist/core/src/security/Encrypter.d.ts +0 -25
- package/dist/core/src/security/Encrypter.d.ts.map +0 -1
- package/dist/core/src/security/Hasher.d.ts +0 -30
- package/dist/core/src/security/Hasher.d.ts.map +0 -1
- package/dist/core/src/types/events.d.ts +0 -95
- package/dist/core/src/types/events.d.ts.map +0 -1
- package/dist/drivers/DatabaseDriver.d.ts +0 -60
- package/dist/drivers/DatabaseDriver.d.ts.map +0 -1
- package/dist/drivers/KafkaDriver.d.ts +0 -134
- package/dist/drivers/KafkaDriver.d.ts.map +0 -1
- package/dist/drivers/MemoryDriver.d.ts +0 -45
- package/dist/drivers/MemoryDriver.d.ts.map +0 -1
- package/dist/drivers/QueueDriver.d.ts +0 -89
- package/dist/drivers/QueueDriver.d.ts.map +0 -1
- package/dist/drivers/RedisDriver.d.ts +0 -79
- package/dist/drivers/RedisDriver.d.ts.map +0 -1
- package/dist/drivers/SQSDriver.d.ts +0 -100
- package/dist/drivers/SQSDriver.d.ts.map +0 -1
- package/dist/index.cjs.map +0 -422
- package/dist/index.d.ts.map +0 -1
- package/dist/index.mjs +0 -32096
- package/dist/index.mjs.map +0 -422
- package/dist/orbit-db/src/DBService.d.ts +0 -270
- package/dist/orbit-db/src/DBService.d.ts.map +0 -1
- package/dist/orbit-db/src/EventBus.d.ts +0 -53
- package/dist/orbit-db/src/EventBus.d.ts.map +0 -1
- package/dist/orbit-db/src/MigrationDriver.d.ts +0 -55
- package/dist/orbit-db/src/MigrationDriver.d.ts.map +0 -1
- package/dist/orbit-db/src/Model.d.ts +0 -564
- package/dist/orbit-db/src/Model.d.ts.map +0 -1
- package/dist/orbit-db/src/ModelCollection.d.ts +0 -35
- package/dist/orbit-db/src/ModelCollection.d.ts.map +0 -1
- package/dist/orbit-db/src/index.d.ts +0 -34
- package/dist/orbit-db/src/index.d.ts.map +0 -1
- package/dist/orbit-db/src/types.d.ts +0 -146
- package/dist/orbit-db/src/types.d.ts.map +0 -1
- package/dist/orbit-queue/src/Consumer.d.ts +0 -67
- package/dist/orbit-queue/src/Consumer.d.ts.map +0 -1
- package/dist/orbit-queue/src/Job.d.ts +0 -76
- package/dist/orbit-queue/src/Job.d.ts.map +0 -1
- package/dist/orbit-queue/src/OrbitQueue.d.ts +0 -74
- package/dist/orbit-queue/src/OrbitQueue.d.ts.map +0 -1
- package/dist/orbit-queue/src/QueueManager.d.ts +0 -86
- package/dist/orbit-queue/src/QueueManager.d.ts.map +0 -1
- package/dist/orbit-queue/src/Queueable.d.ts +0 -63
- package/dist/orbit-queue/src/Queueable.d.ts.map +0 -1
- package/dist/orbit-queue/src/Worker.d.ts +0 -48
- package/dist/orbit-queue/src/Worker.d.ts.map +0 -1
- package/dist/orbit-queue/src/drivers/DatabaseDriver.d.ts +0 -60
- package/dist/orbit-queue/src/drivers/DatabaseDriver.d.ts.map +0 -1
- package/dist/orbit-queue/src/drivers/KafkaDriver.d.ts +0 -134
- package/dist/orbit-queue/src/drivers/KafkaDriver.d.ts.map +0 -1
- package/dist/orbit-queue/src/drivers/MemoryDriver.d.ts +0 -45
- package/dist/orbit-queue/src/drivers/MemoryDriver.d.ts.map +0 -1
- package/dist/orbit-queue/src/drivers/QueueDriver.d.ts +0 -89
- package/dist/orbit-queue/src/drivers/QueueDriver.d.ts.map +0 -1
- package/dist/orbit-queue/src/drivers/RedisDriver.d.ts +0 -79
- package/dist/orbit-queue/src/drivers/RedisDriver.d.ts.map +0 -1
- package/dist/orbit-queue/src/drivers/SQSDriver.d.ts +0 -100
- package/dist/orbit-queue/src/drivers/SQSDriver.d.ts.map +0 -1
- package/dist/orbit-queue/src/index.d.ts +0 -45
- package/dist/orbit-queue/src/index.d.ts.map +0 -1
- package/dist/orbit-queue/src/serializers/ClassNameSerializer.d.ts +0 -46
- package/dist/orbit-queue/src/serializers/ClassNameSerializer.d.ts.map +0 -1
- package/dist/orbit-queue/src/serializers/JobSerializer.d.ts +0 -36
- package/dist/orbit-queue/src/serializers/JobSerializer.d.ts.map +0 -1
- package/dist/orbit-queue/src/serializers/JsonSerializer.d.ts +0 -32
- package/dist/orbit-queue/src/serializers/JsonSerializer.d.ts.map +0 -1
- package/dist/orbit-queue/src/types.d.ts +0 -85
- package/dist/orbit-queue/src/types.d.ts.map +0 -1
- package/dist/serializers/ClassNameSerializer.d.ts +0 -46
- package/dist/serializers/ClassNameSerializer.d.ts.map +0 -1
- package/dist/serializers/JobSerializer.d.ts +0 -36
- package/dist/serializers/JobSerializer.d.ts.map +0 -1
- package/dist/serializers/JsonSerializer.d.ts +0 -32
- package/dist/serializers/JsonSerializer.d.ts.map +0 -1
- package/dist/types.d.ts +0 -85
- package/dist/types.d.ts.map +0 -1
package/dist/index.js
ADDED
|
@@ -0,0 +1,1412 @@
|
|
|
1
|
+
var __defProp = Object.defineProperty;
|
|
2
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
3
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
4
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
5
|
+
var __esm = (fn, res) => function __init() {
|
|
6
|
+
return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
|
|
7
|
+
};
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
21
|
+
|
|
22
|
+
// src/drivers/DatabaseDriver.ts
|
|
23
|
+
var DatabaseDriver_exports = {};
|
|
24
|
+
__export(DatabaseDriver_exports, {
|
|
25
|
+
DatabaseDriver: () => DatabaseDriver
|
|
26
|
+
});
|
|
27
|
+
var DatabaseDriver;
|
|
28
|
+
var init_DatabaseDriver = __esm({
|
|
29
|
+
"src/drivers/DatabaseDriver.ts"() {
|
|
30
|
+
"use strict";
|
|
31
|
+
DatabaseDriver = class {
|
|
32
|
+
tableName;
|
|
33
|
+
dbService;
|
|
34
|
+
constructor(config) {
|
|
35
|
+
this.tableName = config.table ?? "jobs";
|
|
36
|
+
this.dbService = config.dbService;
|
|
37
|
+
if (!this.dbService) {
|
|
38
|
+
throw new Error(
|
|
39
|
+
"[DatabaseDriver] dbService is required. Please provide a database service that implements DatabaseService interface."
|
|
40
|
+
);
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
/**
|
|
44
|
+
* Push a job to a queue.
|
|
45
|
+
*/
|
|
46
|
+
async push(queue, job) {
|
|
47
|
+
const availableAt = job.delaySeconds ? new Date(Date.now() + job.delaySeconds * 1e3) : /* @__PURE__ */ new Date();
|
|
48
|
+
await this.dbService.execute(
|
|
49
|
+
`INSERT INTO ${this.tableName} (queue, payload, attempts, available_at, created_at)
|
|
50
|
+
VALUES ($1, $2, $3, $4, $5)`,
|
|
51
|
+
[queue, job.data, job.attempts ?? 0, availableAt.toISOString(), (/* @__PURE__ */ new Date()).toISOString()]
|
|
52
|
+
);
|
|
53
|
+
}
|
|
54
|
+
/**
|
|
55
|
+
* Pop a job from the queue (FIFO, with delay support).
|
|
56
|
+
*/
|
|
57
|
+
async pop(queue) {
|
|
58
|
+
const result = await this.dbService.execute(
|
|
59
|
+
`SELECT id, payload, attempts, created_at, available_at
|
|
60
|
+
FROM ${this.tableName}
|
|
61
|
+
WHERE queue = $1
|
|
62
|
+
AND available_at <= NOW()
|
|
63
|
+
AND (reserved_at IS NULL OR reserved_at < NOW() - INTERVAL '5 minutes')
|
|
64
|
+
ORDER BY created_at ASC
|
|
65
|
+
LIMIT 1
|
|
66
|
+
FOR UPDATE SKIP LOCKED`,
|
|
67
|
+
[queue]
|
|
68
|
+
).catch(() => {
|
|
69
|
+
return this.dbService.execute(
|
|
70
|
+
`SELECT id, payload, attempts, created_at, available_at
|
|
71
|
+
FROM ${this.tableName}
|
|
72
|
+
WHERE queue = $1
|
|
73
|
+
AND available_at <= NOW()
|
|
74
|
+
AND (reserved_at IS NULL OR reserved_at < NOW() - INTERVAL '5 minutes')
|
|
75
|
+
ORDER BY created_at ASC
|
|
76
|
+
LIMIT 1
|
|
77
|
+
FOR UPDATE`,
|
|
78
|
+
[queue]
|
|
79
|
+
);
|
|
80
|
+
});
|
|
81
|
+
const rows = result;
|
|
82
|
+
if (!rows || rows.length === 0) {
|
|
83
|
+
return null;
|
|
84
|
+
}
|
|
85
|
+
const row = rows[0];
|
|
86
|
+
await this.dbService.execute(
|
|
87
|
+
`UPDATE ${this.tableName}
|
|
88
|
+
SET reserved_at = NOW()
|
|
89
|
+
WHERE id = $1`,
|
|
90
|
+
[row.id]
|
|
91
|
+
);
|
|
92
|
+
const createdAt = new Date(row.created_at).getTime();
|
|
93
|
+
const delaySeconds = row.available_at ? Math.max(0, Math.floor((new Date(row.available_at).getTime() - createdAt) / 1e3)) : void 0;
|
|
94
|
+
return {
|
|
95
|
+
id: row.id,
|
|
96
|
+
type: "class",
|
|
97
|
+
// Default; should be inferred from payload in a full implementation
|
|
98
|
+
data: row.payload,
|
|
99
|
+
createdAt,
|
|
100
|
+
attempts: row.attempts,
|
|
101
|
+
...delaySeconds !== void 0 ? { delaySeconds } : {}
|
|
102
|
+
};
|
|
103
|
+
}
|
|
104
|
+
/**
|
|
105
|
+
* Get queue size.
|
|
106
|
+
*/
|
|
107
|
+
async size(queue) {
|
|
108
|
+
const result = await this.dbService.execute(
|
|
109
|
+
`SELECT COUNT(*) as count
|
|
110
|
+
FROM ${this.tableName}
|
|
111
|
+
WHERE queue = $1
|
|
112
|
+
AND available_at <= NOW()
|
|
113
|
+
AND (reserved_at IS NULL OR reserved_at < NOW() - INTERVAL '5 minutes')`,
|
|
114
|
+
[queue]
|
|
115
|
+
);
|
|
116
|
+
return result?.[0]?.count ?? 0;
|
|
117
|
+
}
|
|
118
|
+
/**
|
|
119
|
+
* Clear a queue.
|
|
120
|
+
*/
|
|
121
|
+
async clear(queue) {
|
|
122
|
+
await this.dbService.execute(`DELETE FROM ${this.tableName} WHERE queue = $1`, [queue]);
|
|
123
|
+
}
|
|
124
|
+
/**
|
|
125
|
+
* Push multiple jobs.
|
|
126
|
+
*/
|
|
127
|
+
async pushMany(queue, jobs) {
|
|
128
|
+
if (jobs.length === 0) {
|
|
129
|
+
return;
|
|
130
|
+
}
|
|
131
|
+
await this.dbService.transaction(async (tx) => {
|
|
132
|
+
for (const job of jobs) {
|
|
133
|
+
const availableAt = job.delaySeconds ? new Date(Date.now() + job.delaySeconds * 1e3) : /* @__PURE__ */ new Date();
|
|
134
|
+
await tx.execute(
|
|
135
|
+
`INSERT INTO ${this.tableName} (queue, payload, attempts, available_at, created_at)
|
|
136
|
+
VALUES ($1, $2, $3, $4, $5)`,
|
|
137
|
+
[queue, job.data, job.attempts ?? 0, availableAt.toISOString(), (/* @__PURE__ */ new Date()).toISOString()]
|
|
138
|
+
);
|
|
139
|
+
}
|
|
140
|
+
});
|
|
141
|
+
}
|
|
142
|
+
};
|
|
143
|
+
}
|
|
144
|
+
});
|
|
145
|
+
|
|
146
|
+
// src/drivers/KafkaDriver.ts
|
|
147
|
+
var KafkaDriver_exports = {};
|
|
148
|
+
__export(KafkaDriver_exports, {
|
|
149
|
+
KafkaDriver: () => KafkaDriver
|
|
150
|
+
});
|
|
151
|
+
var KafkaDriver;
|
|
152
|
+
var init_KafkaDriver = __esm({
|
|
153
|
+
"src/drivers/KafkaDriver.ts"() {
|
|
154
|
+
"use strict";
|
|
155
|
+
KafkaDriver = class {
|
|
156
|
+
client;
|
|
157
|
+
consumerGroupId;
|
|
158
|
+
producer;
|
|
159
|
+
admin;
|
|
160
|
+
constructor(config) {
|
|
161
|
+
this.client = config.client;
|
|
162
|
+
this.consumerGroupId = config.consumerGroupId ?? "gravito-workers";
|
|
163
|
+
if (!this.client) {
|
|
164
|
+
throw new Error("[KafkaDriver] Kafka client is required. Please install kafkajs package.");
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
/**
|
|
168
|
+
* Ensure the producer is connected.
|
|
169
|
+
*/
|
|
170
|
+
async ensureProducer() {
|
|
171
|
+
if (!this.producer) {
|
|
172
|
+
this.producer = this.client.producer();
|
|
173
|
+
await this.producer.connect();
|
|
174
|
+
}
|
|
175
|
+
return this.producer;
|
|
176
|
+
}
|
|
177
|
+
/**
|
|
178
|
+
* Ensure the admin client is connected.
|
|
179
|
+
*/
|
|
180
|
+
async ensureAdmin() {
|
|
181
|
+
if (!this.admin) {
|
|
182
|
+
this.admin = this.client.admin();
|
|
183
|
+
await this.admin.connect();
|
|
184
|
+
}
|
|
185
|
+
return this.admin;
|
|
186
|
+
}
|
|
187
|
+
/**
|
|
188
|
+
* Push a job to a topic.
|
|
189
|
+
*/
|
|
190
|
+
async push(queue, job) {
|
|
191
|
+
const producer = await this.ensureProducer();
|
|
192
|
+
const payload = JSON.stringify({
|
|
193
|
+
id: job.id,
|
|
194
|
+
type: job.type,
|
|
195
|
+
data: job.data,
|
|
196
|
+
className: job.className,
|
|
197
|
+
createdAt: job.createdAt,
|
|
198
|
+
delaySeconds: job.delaySeconds,
|
|
199
|
+
attempts: job.attempts,
|
|
200
|
+
maxAttempts: job.maxAttempts
|
|
201
|
+
});
|
|
202
|
+
await producer.send({
|
|
203
|
+
topic: queue,
|
|
204
|
+
messages: [
|
|
205
|
+
{
|
|
206
|
+
key: job.id,
|
|
207
|
+
value: payload
|
|
208
|
+
}
|
|
209
|
+
]
|
|
210
|
+
});
|
|
211
|
+
}
|
|
212
|
+
/**
|
|
213
|
+
* Pop is not supported for Kafka.
|
|
214
|
+
*
|
|
215
|
+
* Note: Kafka uses a push-based model, so you should use `subscribe()`.
|
|
216
|
+
*/
|
|
217
|
+
async pop(_queue) {
|
|
218
|
+
throw new Error("[KafkaDriver] Kafka uses push-based model. Use subscribe() instead of pop().");
|
|
219
|
+
}
|
|
220
|
+
/**
|
|
221
|
+
* Kafka does not provide a direct queue size.
|
|
222
|
+
*
|
|
223
|
+
* Returns 0; use Kafka tooling/metrics for lag/size insights.
|
|
224
|
+
*/
|
|
225
|
+
async size(_queue) {
|
|
226
|
+
return 0;
|
|
227
|
+
}
|
|
228
|
+
/**
|
|
229
|
+
* Clear a queue by deleting the topic.
|
|
230
|
+
*/
|
|
231
|
+
async clear(queue) {
|
|
232
|
+
const admin = await this.ensureAdmin();
|
|
233
|
+
await admin.deleteTopics({ topics: [queue] });
|
|
234
|
+
}
|
|
235
|
+
/**
|
|
236
|
+
* Push multiple jobs.
|
|
237
|
+
*/
|
|
238
|
+
async pushMany(queue, jobs) {
|
|
239
|
+
if (jobs.length === 0) {
|
|
240
|
+
return;
|
|
241
|
+
}
|
|
242
|
+
const producer = await this.ensureProducer();
|
|
243
|
+
const messages = jobs.map((job) => {
|
|
244
|
+
const payload = JSON.stringify({
|
|
245
|
+
id: job.id,
|
|
246
|
+
type: job.type,
|
|
247
|
+
data: job.data,
|
|
248
|
+
className: job.className,
|
|
249
|
+
createdAt: job.createdAt,
|
|
250
|
+
delaySeconds: job.delaySeconds,
|
|
251
|
+
attempts: job.attempts,
|
|
252
|
+
maxAttempts: job.maxAttempts
|
|
253
|
+
});
|
|
254
|
+
return {
|
|
255
|
+
key: job.id,
|
|
256
|
+
value: payload
|
|
257
|
+
};
|
|
258
|
+
});
|
|
259
|
+
await producer.send({
|
|
260
|
+
topic: queue,
|
|
261
|
+
messages
|
|
262
|
+
});
|
|
263
|
+
}
|
|
264
|
+
/**
|
|
265
|
+
* Create a topic.
|
|
266
|
+
*/
|
|
267
|
+
async createTopic(topic, options) {
|
|
268
|
+
const admin = await this.ensureAdmin();
|
|
269
|
+
await admin.createTopics({
|
|
270
|
+
topics: [
|
|
271
|
+
{
|
|
272
|
+
topic,
|
|
273
|
+
numPartitions: options?.partitions ?? 1,
|
|
274
|
+
replicationFactor: options?.replicationFactor ?? 1
|
|
275
|
+
}
|
|
276
|
+
]
|
|
277
|
+
});
|
|
278
|
+
}
|
|
279
|
+
/**
|
|
280
|
+
* Delete a topic.
|
|
281
|
+
*/
|
|
282
|
+
async deleteTopic(topic) {
|
|
283
|
+
await this.clear(topic);
|
|
284
|
+
}
|
|
285
|
+
/**
|
|
286
|
+
* Subscribe to a topic (push-based model).
|
|
287
|
+
*/
|
|
288
|
+
async subscribe(queue, callback) {
|
|
289
|
+
const consumer = this.client.consumer({ groupId: this.consumerGroupId });
|
|
290
|
+
await consumer.connect();
|
|
291
|
+
await consumer.subscribe({ topics: [queue] });
|
|
292
|
+
await consumer.run({
|
|
293
|
+
eachMessage: async ({ message }) => {
|
|
294
|
+
if (!message.value) {
|
|
295
|
+
return;
|
|
296
|
+
}
|
|
297
|
+
const payload = JSON.parse(message.value.toString());
|
|
298
|
+
const job = {
|
|
299
|
+
id: payload.id,
|
|
300
|
+
type: payload.type,
|
|
301
|
+
data: payload.data,
|
|
302
|
+
className: payload.className,
|
|
303
|
+
createdAt: payload.createdAt,
|
|
304
|
+
delaySeconds: payload.delaySeconds,
|
|
305
|
+
attempts: payload.attempts,
|
|
306
|
+
maxAttempts: payload.maxAttempts
|
|
307
|
+
};
|
|
308
|
+
try {
|
|
309
|
+
await callback(job);
|
|
310
|
+
} catch (error) {
|
|
311
|
+
console.error("[KafkaDriver] Error processing message:", error);
|
|
312
|
+
}
|
|
313
|
+
}
|
|
314
|
+
});
|
|
315
|
+
}
|
|
316
|
+
};
|
|
317
|
+
}
|
|
318
|
+
});
|
|
319
|
+
|
|
320
|
+
// src/drivers/RedisDriver.ts
|
|
321
|
+
var RedisDriver_exports = {};
|
|
322
|
+
__export(RedisDriver_exports, {
|
|
323
|
+
RedisDriver: () => RedisDriver
|
|
324
|
+
});
|
|
325
|
+
var RedisDriver;
|
|
326
|
+
var init_RedisDriver = __esm({
|
|
327
|
+
"src/drivers/RedisDriver.ts"() {
|
|
328
|
+
"use strict";
|
|
329
|
+
RedisDriver = class {
|
|
330
|
+
prefix;
|
|
331
|
+
client;
|
|
332
|
+
constructor(config) {
|
|
333
|
+
this.client = config.client;
|
|
334
|
+
this.prefix = config.prefix ?? "queue:";
|
|
335
|
+
if (!this.client) {
|
|
336
|
+
throw new Error(
|
|
337
|
+
"[RedisDriver] Redis client is required. Please install ioredis or redis package."
|
|
338
|
+
);
|
|
339
|
+
}
|
|
340
|
+
}
|
|
341
|
+
/**
|
|
342
|
+
* Get full Redis key for a queue.
|
|
343
|
+
*/
|
|
344
|
+
getKey(queue) {
|
|
345
|
+
return `${this.prefix}${queue}`;
|
|
346
|
+
}
|
|
347
|
+
/**
|
|
348
|
+
* Push a job (LPUSH).
|
|
349
|
+
*/
|
|
350
|
+
async push(queue, job) {
|
|
351
|
+
const key = this.getKey(queue);
|
|
352
|
+
const payload = JSON.stringify({
|
|
353
|
+
id: job.id,
|
|
354
|
+
type: job.type,
|
|
355
|
+
data: job.data,
|
|
356
|
+
className: job.className,
|
|
357
|
+
createdAt: job.createdAt,
|
|
358
|
+
delaySeconds: job.delaySeconds,
|
|
359
|
+
attempts: job.attempts,
|
|
360
|
+
maxAttempts: job.maxAttempts
|
|
361
|
+
});
|
|
362
|
+
if (job.delaySeconds && job.delaySeconds > 0) {
|
|
363
|
+
const delayKey = `${key}:delayed`;
|
|
364
|
+
const score = Date.now() + job.delaySeconds * 1e3;
|
|
365
|
+
if (typeof this.client.zadd === "function") {
|
|
366
|
+
await this.client.zadd(delayKey, score, payload);
|
|
367
|
+
} else {
|
|
368
|
+
await this.client.lpush(key, payload);
|
|
369
|
+
}
|
|
370
|
+
} else {
|
|
371
|
+
await this.client.lpush(key, payload);
|
|
372
|
+
}
|
|
373
|
+
}
|
|
374
|
+
/**
|
|
375
|
+
* Pop a job (RPOP, FIFO).
|
|
376
|
+
*/
|
|
377
|
+
async pop(queue) {
|
|
378
|
+
const key = this.getKey(queue);
|
|
379
|
+
const delayKey = `${key}:delayed`;
|
|
380
|
+
if (typeof this.client.zrange === "function") {
|
|
381
|
+
const now = Date.now();
|
|
382
|
+
const delayedJobs = await this.client.zrange(delayKey, 0, 0, true);
|
|
383
|
+
if (delayedJobs && delayedJobs.length >= 2) {
|
|
384
|
+
const score = parseFloat(delayedJobs[1]);
|
|
385
|
+
if (score <= now) {
|
|
386
|
+
const payload2 = delayedJobs[0];
|
|
387
|
+
await this.client.zrem(delayKey, payload2);
|
|
388
|
+
return this.parsePayload(payload2);
|
|
389
|
+
}
|
|
390
|
+
}
|
|
391
|
+
}
|
|
392
|
+
const payload = await this.client.rpop(key);
|
|
393
|
+
if (!payload) {
|
|
394
|
+
return null;
|
|
395
|
+
}
|
|
396
|
+
return this.parsePayload(payload);
|
|
397
|
+
}
|
|
398
|
+
/**
|
|
399
|
+
* Parse Redis payload.
|
|
400
|
+
*/
|
|
401
|
+
parsePayload(payload) {
|
|
402
|
+
const parsed = JSON.parse(payload);
|
|
403
|
+
return {
|
|
404
|
+
id: parsed.id,
|
|
405
|
+
type: parsed.type,
|
|
406
|
+
data: parsed.data,
|
|
407
|
+
className: parsed.className,
|
|
408
|
+
createdAt: parsed.createdAt,
|
|
409
|
+
delaySeconds: parsed.delaySeconds,
|
|
410
|
+
attempts: parsed.attempts,
|
|
411
|
+
maxAttempts: parsed.maxAttempts
|
|
412
|
+
};
|
|
413
|
+
}
|
|
414
|
+
/**
|
|
415
|
+
* Get queue size.
|
|
416
|
+
*/
|
|
417
|
+
async size(queue) {
|
|
418
|
+
const key = this.getKey(queue);
|
|
419
|
+
return this.client.llen(key);
|
|
420
|
+
}
|
|
421
|
+
/**
|
|
422
|
+
* Clear a queue.
|
|
423
|
+
*/
|
|
424
|
+
async clear(queue) {
|
|
425
|
+
const key = this.getKey(queue);
|
|
426
|
+
const delayKey = `${key}:delayed`;
|
|
427
|
+
await this.client.del(key);
|
|
428
|
+
if (typeof this.client.del === "function") {
|
|
429
|
+
await this.client.del(delayKey);
|
|
430
|
+
}
|
|
431
|
+
}
|
|
432
|
+
/**
|
|
433
|
+
* Push multiple jobs.
|
|
434
|
+
*/
|
|
435
|
+
async pushMany(queue, jobs) {
|
|
436
|
+
if (jobs.length === 0) {
|
|
437
|
+
return;
|
|
438
|
+
}
|
|
439
|
+
const key = this.getKey(queue);
|
|
440
|
+
const payloads = jobs.map(
|
|
441
|
+
(job) => JSON.stringify({
|
|
442
|
+
id: job.id,
|
|
443
|
+
type: job.type,
|
|
444
|
+
data: job.data,
|
|
445
|
+
className: job.className,
|
|
446
|
+
createdAt: job.createdAt,
|
|
447
|
+
delaySeconds: job.delaySeconds,
|
|
448
|
+
attempts: job.attempts,
|
|
449
|
+
maxAttempts: job.maxAttempts
|
|
450
|
+
})
|
|
451
|
+
);
|
|
452
|
+
await this.client.lpush(key, ...payloads);
|
|
453
|
+
}
|
|
454
|
+
/**
|
|
455
|
+
* Pop multiple jobs.
|
|
456
|
+
*/
|
|
457
|
+
async popMany(queue, count) {
|
|
458
|
+
const key = this.getKey(queue);
|
|
459
|
+
const results = [];
|
|
460
|
+
for (let i = 0; i < count; i++) {
|
|
461
|
+
const payload = await this.client.rpop(key);
|
|
462
|
+
if (payload) {
|
|
463
|
+
results.push(this.parsePayload(payload));
|
|
464
|
+
} else {
|
|
465
|
+
break;
|
|
466
|
+
}
|
|
467
|
+
}
|
|
468
|
+
return results;
|
|
469
|
+
}
|
|
470
|
+
};
|
|
471
|
+
}
|
|
472
|
+
});
|
|
473
|
+
|
|
474
|
+
// src/drivers/SQSDriver.ts
|
|
475
|
+
var SQSDriver_exports = {};
|
|
476
|
+
__export(SQSDriver_exports, {
|
|
477
|
+
SQSDriver: () => SQSDriver
|
|
478
|
+
});
|
|
479
|
+
var SQSDriver;
|
|
480
|
+
var init_SQSDriver = __esm({
|
|
481
|
+
"src/drivers/SQSDriver.ts"() {
|
|
482
|
+
"use strict";
|
|
483
|
+
SQSDriver = class {
|
|
484
|
+
client;
|
|
485
|
+
queueUrlPrefix;
|
|
486
|
+
visibilityTimeout;
|
|
487
|
+
waitTimeSeconds;
|
|
488
|
+
queueUrls = /* @__PURE__ */ new Map();
|
|
489
|
+
constructor(config) {
|
|
490
|
+
this.client = config.client;
|
|
491
|
+
this.queueUrlPrefix = config.queueUrlPrefix ?? "";
|
|
492
|
+
this.visibilityTimeout = config.visibilityTimeout ?? 30;
|
|
493
|
+
this.waitTimeSeconds = config.waitTimeSeconds ?? 20;
|
|
494
|
+
if (!this.client) {
|
|
495
|
+
throw new Error(
|
|
496
|
+
"[SQSDriver] SQS client is required. Please install @aws-sdk/client-sqs package."
|
|
497
|
+
);
|
|
498
|
+
}
|
|
499
|
+
}
|
|
500
|
+
/**
|
|
501
|
+
* Resolve the full queue URL.
|
|
502
|
+
*/
|
|
503
|
+
async getQueueUrl(queue) {
|
|
504
|
+
if (this.queueUrls.has(queue)) {
|
|
505
|
+
return this.queueUrls.get(queue);
|
|
506
|
+
}
|
|
507
|
+
if (this.queueUrlPrefix) {
|
|
508
|
+
const url = `${this.queueUrlPrefix}/${queue}`;
|
|
509
|
+
this.queueUrls.set(queue, url);
|
|
510
|
+
return url;
|
|
511
|
+
}
|
|
512
|
+
this.queueUrls.set(queue, queue);
|
|
513
|
+
return queue;
|
|
514
|
+
}
|
|
515
|
+
/**
|
|
516
|
+
* Push a job to SQS.
|
|
517
|
+
*/
|
|
518
|
+
async push(queue, job) {
|
|
519
|
+
const { SendMessageCommand } = await import("@aws-sdk/client-sqs");
|
|
520
|
+
const queueUrl = await this.getQueueUrl(queue);
|
|
521
|
+
const payload = JSON.stringify({
|
|
522
|
+
id: job.id,
|
|
523
|
+
type: job.type,
|
|
524
|
+
data: job.data,
|
|
525
|
+
className: job.className,
|
|
526
|
+
createdAt: job.createdAt,
|
|
527
|
+
delaySeconds: job.delaySeconds,
|
|
528
|
+
attempts: job.attempts,
|
|
529
|
+
maxAttempts: job.maxAttempts
|
|
530
|
+
});
|
|
531
|
+
const delaySeconds = job.delaySeconds ? Math.min(job.delaySeconds, 900) : 0;
|
|
532
|
+
await this.client.send(
|
|
533
|
+
new SendMessageCommand({
|
|
534
|
+
QueueUrl: queueUrl,
|
|
535
|
+
MessageBody: payload,
|
|
536
|
+
DelaySeconds: delaySeconds
|
|
537
|
+
})
|
|
538
|
+
);
|
|
539
|
+
}
|
|
540
|
+
/**
|
|
541
|
+
* Pop a job (long polling).
|
|
542
|
+
*/
|
|
543
|
+
async pop(queue) {
|
|
544
|
+
const { ReceiveMessageCommand } = await import("@aws-sdk/client-sqs");
|
|
545
|
+
const queueUrl = await this.getQueueUrl(queue);
|
|
546
|
+
const response = await this.client.send(
|
|
547
|
+
new ReceiveMessageCommand({
|
|
548
|
+
QueueUrl: queueUrl,
|
|
549
|
+
MaxNumberOfMessages: 1,
|
|
550
|
+
WaitTimeSeconds: this.waitTimeSeconds,
|
|
551
|
+
VisibilityTimeout: this.visibilityTimeout
|
|
552
|
+
})
|
|
553
|
+
);
|
|
554
|
+
if (!response.Messages || response.Messages.length === 0) {
|
|
555
|
+
return null;
|
|
556
|
+
}
|
|
557
|
+
const message = response.Messages[0];
|
|
558
|
+
const payload = JSON.parse(message.Body ?? "{}");
|
|
559
|
+
return {
|
|
560
|
+
id: payload.id ?? message.MessageId,
|
|
561
|
+
type: payload.type,
|
|
562
|
+
data: payload.data,
|
|
563
|
+
className: payload.className,
|
|
564
|
+
createdAt: payload.createdAt,
|
|
565
|
+
delaySeconds: payload.delaySeconds,
|
|
566
|
+
attempts: payload.attempts,
|
|
567
|
+
maxAttempts: payload.maxAttempts,
|
|
568
|
+
// Store ReceiptHandle for acknowledgement
|
|
569
|
+
...message.ReceiptHandle && { receiptHandle: message.ReceiptHandle }
|
|
570
|
+
};
|
|
571
|
+
}
|
|
572
|
+
/**
|
|
573
|
+
* Get queue size (approximate).
|
|
574
|
+
*/
|
|
575
|
+
async size(queue) {
|
|
576
|
+
const { GetQueueAttributesCommand } = await import("@aws-sdk/client-sqs");
|
|
577
|
+
const queueUrl = await this.getQueueUrl(queue);
|
|
578
|
+
try {
|
|
579
|
+
const response = await this.client.send(
|
|
580
|
+
new GetQueueAttributesCommand({
|
|
581
|
+
QueueUrl: queueUrl,
|
|
582
|
+
AttributeNames: ["ApproximateNumberOfMessages"]
|
|
583
|
+
})
|
|
584
|
+
);
|
|
585
|
+
return parseInt(response.Attributes?.ApproximateNumberOfMessages ?? "0", 10);
|
|
586
|
+
} catch (error) {
|
|
587
|
+
console.error("[SQSDriver] Error getting queue size:", error);
|
|
588
|
+
return 0;
|
|
589
|
+
}
|
|
590
|
+
}
|
|
591
|
+
/**
|
|
592
|
+
* Clear a queue by receiving and deleting messages.
|
|
593
|
+
*
|
|
594
|
+
* Note: SQS does not provide a direct "purge" API via this wrapper. This method will
|
|
595
|
+
* keep receiving and deleting messages until the queue is empty.
|
|
596
|
+
*/
|
|
597
|
+
async clear(queue) {
|
|
598
|
+
const { DeleteMessageCommand } = await import("@aws-sdk/client-sqs");
|
|
599
|
+
const queueUrl = await this.getQueueUrl(queue);
|
|
600
|
+
while (true) {
|
|
601
|
+
const job = await this.pop(queue);
|
|
602
|
+
if (!job) {
|
|
603
|
+
break;
|
|
604
|
+
}
|
|
605
|
+
if (job.receiptHandle) {
|
|
606
|
+
await this.client.send(
|
|
607
|
+
new DeleteMessageCommand({
|
|
608
|
+
QueueUrl: queueUrl,
|
|
609
|
+
ReceiptHandle: job.receiptHandle
|
|
610
|
+
})
|
|
611
|
+
);
|
|
612
|
+
}
|
|
613
|
+
}
|
|
614
|
+
}
|
|
615
|
+
/**
|
|
616
|
+
* Push multiple jobs.
|
|
617
|
+
*/
|
|
618
|
+
async pushMany(queue, jobs) {
|
|
619
|
+
if (jobs.length === 0) {
|
|
620
|
+
return;
|
|
621
|
+
}
|
|
622
|
+
const { SendMessageBatchCommand } = await import("@aws-sdk/client-sqs");
|
|
623
|
+
const queueUrl = await this.getQueueUrl(queue);
|
|
624
|
+
const batchSize = 10;
|
|
625
|
+
for (let i = 0; i < jobs.length; i += batchSize) {
|
|
626
|
+
const batch = jobs.slice(i, i + batchSize);
|
|
627
|
+
const entries = batch.map((job, index) => {
|
|
628
|
+
const payload = JSON.stringify({
|
|
629
|
+
id: job.id,
|
|
630
|
+
type: job.type,
|
|
631
|
+
data: job.data,
|
|
632
|
+
className: job.className,
|
|
633
|
+
createdAt: job.createdAt,
|
|
634
|
+
delaySeconds: job.delaySeconds,
|
|
635
|
+
attempts: job.attempts,
|
|
636
|
+
maxAttempts: job.maxAttempts
|
|
637
|
+
});
|
|
638
|
+
return {
|
|
639
|
+
Id: `${job.id}-${index}`,
|
|
640
|
+
MessageBody: payload,
|
|
641
|
+
DelaySeconds: job.delaySeconds ? Math.min(job.delaySeconds, 900) : 0
|
|
642
|
+
};
|
|
643
|
+
});
|
|
644
|
+
await this.client.send(
|
|
645
|
+
new SendMessageBatchCommand({
|
|
646
|
+
QueueUrl: queueUrl,
|
|
647
|
+
Entries: entries
|
|
648
|
+
})
|
|
649
|
+
);
|
|
650
|
+
}
|
|
651
|
+
}
|
|
652
|
+
/**
|
|
653
|
+
* Acknowledge is not supported via messageId.
|
|
654
|
+
*/
|
|
655
|
+
async acknowledge(_messageId) {
|
|
656
|
+
throw new Error("[SQSDriver] Use deleteMessage() with ReceiptHandle instead of acknowledge().");
|
|
657
|
+
}
|
|
658
|
+
/**
|
|
659
|
+
* Delete a message (acknowledge processing completion).
|
|
660
|
+
*/
|
|
661
|
+
async deleteMessage(queue, receiptHandle) {
|
|
662
|
+
const { DeleteMessageCommand } = await import("@aws-sdk/client-sqs");
|
|
663
|
+
const queueUrl = await this.getQueueUrl(queue);
|
|
664
|
+
await this.client.send(
|
|
665
|
+
new DeleteMessageCommand({
|
|
666
|
+
QueueUrl: queueUrl,
|
|
667
|
+
ReceiptHandle: receiptHandle
|
|
668
|
+
})
|
|
669
|
+
);
|
|
670
|
+
}
|
|
671
|
+
};
|
|
672
|
+
}
|
|
673
|
+
});
|
|
674
|
+
|
|
675
|
+
// src/Worker.ts
|
|
676
|
+
var Worker = class {
|
|
677
|
+
constructor(options = {}) {
|
|
678
|
+
this.options = options;
|
|
679
|
+
}
|
|
680
|
+
/**
|
|
681
|
+
* Process a Job.
|
|
682
|
+
* @param job - Job instance
|
|
683
|
+
*/
|
|
684
|
+
async process(job) {
|
|
685
|
+
const maxAttempts = this.options.maxAttempts ?? 3;
|
|
686
|
+
const timeout = this.options.timeout;
|
|
687
|
+
let lastError = null;
|
|
688
|
+
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
|
689
|
+
try {
|
|
690
|
+
job.attempts = attempt;
|
|
691
|
+
job.maxAttempts = maxAttempts;
|
|
692
|
+
if (timeout) {
|
|
693
|
+
await Promise.race([
|
|
694
|
+
job.handle(),
|
|
695
|
+
new Promise(
|
|
696
|
+
(_, reject) => setTimeout(
|
|
697
|
+
() => reject(new Error(`Job timeout after ${timeout} seconds`)),
|
|
698
|
+
timeout * 1e3
|
|
699
|
+
)
|
|
700
|
+
)
|
|
701
|
+
]);
|
|
702
|
+
} else {
|
|
703
|
+
await job.handle();
|
|
704
|
+
}
|
|
705
|
+
return;
|
|
706
|
+
} catch (error) {
|
|
707
|
+
lastError = error instanceof Error ? error : new Error(String(error));
|
|
708
|
+
if (attempt === maxAttempts) {
|
|
709
|
+
await this.handleFailure(job, lastError);
|
|
710
|
+
throw lastError;
|
|
711
|
+
}
|
|
712
|
+
const delay = Math.min(1e3 * 2 ** (attempt - 1), 3e4);
|
|
713
|
+
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
714
|
+
}
|
|
715
|
+
}
|
|
716
|
+
}
|
|
717
|
+
/**
|
|
718
|
+
* Handle failure.
|
|
719
|
+
*/
|
|
720
|
+
async handleFailure(job, error) {
|
|
721
|
+
try {
|
|
722
|
+
await job.failed(error);
|
|
723
|
+
} catch (failedError) {
|
|
724
|
+
console.error("[Worker] Error in job.failed():", failedError);
|
|
725
|
+
}
|
|
726
|
+
if (this.options.onFailed) {
|
|
727
|
+
try {
|
|
728
|
+
await this.options.onFailed(job, error);
|
|
729
|
+
} catch (callbackError) {
|
|
730
|
+
console.error("[Worker] Error in onFailed callback:", callbackError);
|
|
731
|
+
}
|
|
732
|
+
}
|
|
733
|
+
}
|
|
734
|
+
};
|
|
735
|
+
|
|
736
|
+
// src/Consumer.ts
|
|
737
|
+
var Consumer = class {
|
|
738
|
+
constructor(queueManager, options) {
|
|
739
|
+
this.queueManager = queueManager;
|
|
740
|
+
this.options = options;
|
|
741
|
+
}
|
|
742
|
+
running = false;
|
|
743
|
+
stopRequested = false;
|
|
744
|
+
/**
|
|
745
|
+
* Start the consumer loop.
|
|
746
|
+
*/
|
|
747
|
+
async start() {
|
|
748
|
+
if (this.running) {
|
|
749
|
+
throw new Error("Consumer is already running");
|
|
750
|
+
}
|
|
751
|
+
this.running = true;
|
|
752
|
+
this.stopRequested = false;
|
|
753
|
+
const worker = new Worker(this.options.workerOptions);
|
|
754
|
+
const pollInterval = this.options.pollInterval ?? 1e3;
|
|
755
|
+
const keepAlive = this.options.keepAlive ?? true;
|
|
756
|
+
console.log("[Consumer] Started", {
|
|
757
|
+
queues: this.options.queues,
|
|
758
|
+
connection: this.options.connection
|
|
759
|
+
});
|
|
760
|
+
while (this.running && !this.stopRequested) {
|
|
761
|
+
let processed = false;
|
|
762
|
+
for (const queue of this.options.queues) {
|
|
763
|
+
try {
|
|
764
|
+
const job = await this.queueManager.pop(queue, this.options.connection);
|
|
765
|
+
if (job) {
|
|
766
|
+
processed = true;
|
|
767
|
+
await worker.process(job).catch((error) => {
|
|
768
|
+
console.error(`[Consumer] Error processing job in queue "${queue}":`, error);
|
|
769
|
+
});
|
|
770
|
+
}
|
|
771
|
+
} catch (error) {
|
|
772
|
+
console.error(`[Consumer] Error polling queue "${queue}":`, error);
|
|
773
|
+
}
|
|
774
|
+
}
|
|
775
|
+
if (!processed && !keepAlive) {
|
|
776
|
+
break;
|
|
777
|
+
}
|
|
778
|
+
if (!this.stopRequested) {
|
|
779
|
+
await new Promise((resolve) => setTimeout(resolve, pollInterval));
|
|
780
|
+
}
|
|
781
|
+
}
|
|
782
|
+
this.running = false;
|
|
783
|
+
console.log("[Consumer] Stopped");
|
|
784
|
+
}
|
|
785
|
+
/**
|
|
786
|
+
* Stop the consumer loop (graceful shutdown).
|
|
787
|
+
*/
|
|
788
|
+
async stop() {
|
|
789
|
+
console.log("[Consumer] Stopping...");
|
|
790
|
+
this.stopRequested = true;
|
|
791
|
+
while (this.running) {
|
|
792
|
+
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
793
|
+
}
|
|
794
|
+
}
|
|
795
|
+
/**
|
|
796
|
+
* Check whether the consumer is running.
|
|
797
|
+
*/
|
|
798
|
+
isRunning() {
|
|
799
|
+
return this.running;
|
|
800
|
+
}
|
|
801
|
+
};
|
|
802
|
+
|
|
803
|
+
// src/index.ts
|
|
804
|
+
init_DatabaseDriver();
|
|
805
|
+
init_KafkaDriver();
|
|
806
|
+
|
|
807
|
+
// src/drivers/MemoryDriver.ts
|
|
808
|
+
var MemoryDriver = class {
|
|
809
|
+
queues = /* @__PURE__ */ new Map();
|
|
810
|
+
/**
|
|
811
|
+
* Push a job to a queue.
|
|
812
|
+
*/
|
|
813
|
+
async push(queue, job) {
|
|
814
|
+
if (!this.queues.has(queue)) {
|
|
815
|
+
this.queues.set(queue, []);
|
|
816
|
+
}
|
|
817
|
+
this.queues.get(queue)?.push(job);
|
|
818
|
+
}
|
|
819
|
+
/**
|
|
820
|
+
* Pop a job from a queue (FIFO).
|
|
821
|
+
*/
|
|
822
|
+
async pop(queue) {
|
|
823
|
+
const queueJobs = this.queues.get(queue);
|
|
824
|
+
if (!queueJobs || queueJobs.length === 0) {
|
|
825
|
+
return null;
|
|
826
|
+
}
|
|
827
|
+
const now = Date.now();
|
|
828
|
+
const availableIndex = queueJobs.findIndex(
|
|
829
|
+
(job) => !job.delaySeconds || now >= job.createdAt + job.delaySeconds * 1e3
|
|
830
|
+
);
|
|
831
|
+
if (availableIndex === -1) {
|
|
832
|
+
return null;
|
|
833
|
+
}
|
|
834
|
+
return queueJobs.splice(availableIndex, 1)[0];
|
|
835
|
+
}
|
|
836
|
+
/**
|
|
837
|
+
* Get queue size.
|
|
838
|
+
*/
|
|
839
|
+
async size(queue) {
|
|
840
|
+
return this.queues.get(queue)?.length ?? 0;
|
|
841
|
+
}
|
|
842
|
+
/**
|
|
843
|
+
* Clear a queue.
|
|
844
|
+
*/
|
|
845
|
+
async clear(queue) {
|
|
846
|
+
this.queues.delete(queue);
|
|
847
|
+
}
|
|
848
|
+
/**
|
|
849
|
+
* Push multiple jobs.
|
|
850
|
+
*/
|
|
851
|
+
async pushMany(queue, jobs) {
|
|
852
|
+
if (!this.queues.has(queue)) {
|
|
853
|
+
this.queues.set(queue, []);
|
|
854
|
+
}
|
|
855
|
+
this.queues.get(queue)?.push(...jobs);
|
|
856
|
+
}
|
|
857
|
+
/**
|
|
858
|
+
* Pop multiple jobs.
|
|
859
|
+
*/
|
|
860
|
+
async popMany(queue, count) {
|
|
861
|
+
const results = [];
|
|
862
|
+
for (let i = 0; i < count; i++) {
|
|
863
|
+
const job = await this.pop(queue);
|
|
864
|
+
if (job) {
|
|
865
|
+
results.push(job);
|
|
866
|
+
} else {
|
|
867
|
+
break;
|
|
868
|
+
}
|
|
869
|
+
}
|
|
870
|
+
return results;
|
|
871
|
+
}
|
|
872
|
+
};
|
|
873
|
+
|
|
874
|
+
// src/index.ts
|
|
875
|
+
init_RedisDriver();
|
|
876
|
+
init_SQSDriver();
|
|
877
|
+
|
|
878
|
+
// src/Job.ts
|
|
879
|
+
var Job = class {
|
|
880
|
+
/**
|
|
881
|
+
* Queue name.
|
|
882
|
+
*/
|
|
883
|
+
queueName;
|
|
884
|
+
/**
|
|
885
|
+
* Connection name.
|
|
886
|
+
*/
|
|
887
|
+
connectionName;
|
|
888
|
+
/**
|
|
889
|
+
* Delay before execution (seconds).
|
|
890
|
+
*/
|
|
891
|
+
delaySeconds;
|
|
892
|
+
/**
|
|
893
|
+
* Current attempt number.
|
|
894
|
+
*/
|
|
895
|
+
attempts;
|
|
896
|
+
/**
|
|
897
|
+
* Maximum attempts.
|
|
898
|
+
*/
|
|
899
|
+
maxAttempts;
|
|
900
|
+
/**
|
|
901
|
+
* Set target queue.
|
|
902
|
+
*/
|
|
903
|
+
onQueue(queue) {
|
|
904
|
+
this.queueName = queue;
|
|
905
|
+
return this;
|
|
906
|
+
}
|
|
907
|
+
/**
|
|
908
|
+
* Set target connection.
|
|
909
|
+
*/
|
|
910
|
+
onConnection(connection) {
|
|
911
|
+
this.connectionName = connection;
|
|
912
|
+
return this;
|
|
913
|
+
}
|
|
914
|
+
/**
|
|
915
|
+
* Set delay (seconds).
|
|
916
|
+
*/
|
|
917
|
+
delay(delay) {
|
|
918
|
+
this.delaySeconds = delay;
|
|
919
|
+
return this;
|
|
920
|
+
}
|
|
921
|
+
/**
|
|
922
|
+
* Failure handler (optional).
|
|
923
|
+
*
|
|
924
|
+
* Called when the job fails and reaches the maximum number of attempts.
|
|
925
|
+
* Subclasses can override to implement custom failure handling.
|
|
926
|
+
*
|
|
927
|
+
* @param error - Error instance
|
|
928
|
+
*/
|
|
929
|
+
async failed(_error) {
|
|
930
|
+
}
|
|
931
|
+
};
|
|
932
|
+
|
|
933
|
+
// src/serializers/ClassNameSerializer.ts
|
|
934
|
+
var ClassNameSerializer = class {
|
|
935
|
+
/**
|
|
936
|
+
* Job class registry (for resolving classes by name).
|
|
937
|
+
*/
|
|
938
|
+
jobClasses = /* @__PURE__ */ new Map();
|
|
939
|
+
/**
|
|
940
|
+
* Register a Job class.
|
|
941
|
+
* @param jobClass - Job class
|
|
942
|
+
*/
|
|
943
|
+
register(jobClass) {
|
|
944
|
+
this.jobClasses.set(jobClass.name, jobClass);
|
|
945
|
+
}
|
|
946
|
+
/**
|
|
947
|
+
* Register multiple Job classes.
|
|
948
|
+
* @param jobClasses - Job class array
|
|
949
|
+
*/
|
|
950
|
+
registerMany(jobClasses) {
|
|
951
|
+
for (const jobClass of jobClasses) {
|
|
952
|
+
this.register(jobClass);
|
|
953
|
+
}
|
|
954
|
+
}
|
|
955
|
+
/**
|
|
956
|
+
* Serialize a Job.
|
|
957
|
+
*/
|
|
958
|
+
serialize(job) {
|
|
959
|
+
const id = `${Date.now()}-${Math.random().toString(36).substring(2, 9)}`;
|
|
960
|
+
const className = job.constructor.name;
|
|
961
|
+
const properties = {};
|
|
962
|
+
for (const key in job) {
|
|
963
|
+
if (Object.hasOwn(job, key) && typeof job[key] !== "function") {
|
|
964
|
+
properties[key] = job[key];
|
|
965
|
+
}
|
|
966
|
+
}
|
|
967
|
+
return {
|
|
968
|
+
id,
|
|
969
|
+
type: "class",
|
|
970
|
+
className,
|
|
971
|
+
data: JSON.stringify({
|
|
972
|
+
class: className,
|
|
973
|
+
properties
|
|
974
|
+
}),
|
|
975
|
+
createdAt: Date.now(),
|
|
976
|
+
...job.delaySeconds !== void 0 ? { delaySeconds: job.delaySeconds } : {},
|
|
977
|
+
attempts: job.attempts ?? 0,
|
|
978
|
+
...job.maxAttempts !== void 0 ? { maxAttempts: job.maxAttempts } : {}
|
|
979
|
+
};
|
|
980
|
+
}
|
|
981
|
+
/**
|
|
982
|
+
* Deserialize a Job.
|
|
983
|
+
*/
|
|
984
|
+
deserialize(serialized) {
|
|
985
|
+
if (serialized.type !== "class") {
|
|
986
|
+
throw new Error('Invalid serialization type: expected "class"');
|
|
987
|
+
}
|
|
988
|
+
if (!serialized.className) {
|
|
989
|
+
throw new Error("Missing className in serialized job");
|
|
990
|
+
}
|
|
991
|
+
const JobClass = this.jobClasses.get(serialized.className);
|
|
992
|
+
if (!JobClass) {
|
|
993
|
+
throw new Error(
|
|
994
|
+
`Job class "${serialized.className}" is not registered. Please register it using serializer.register().`
|
|
995
|
+
);
|
|
996
|
+
}
|
|
997
|
+
const parsed = JSON.parse(serialized.data);
|
|
998
|
+
const job = new JobClass();
|
|
999
|
+
if (parsed.properties) {
|
|
1000
|
+
Object.assign(job, parsed.properties);
|
|
1001
|
+
}
|
|
1002
|
+
if (serialized.delaySeconds !== void 0) {
|
|
1003
|
+
job.delaySeconds = serialized.delaySeconds;
|
|
1004
|
+
}
|
|
1005
|
+
if (serialized.attempts !== void 0) {
|
|
1006
|
+
job.attempts = serialized.attempts;
|
|
1007
|
+
}
|
|
1008
|
+
if (serialized.maxAttempts !== void 0) {
|
|
1009
|
+
job.maxAttempts = serialized.maxAttempts;
|
|
1010
|
+
}
|
|
1011
|
+
return job;
|
|
1012
|
+
}
|
|
1013
|
+
};
|
|
1014
|
+
|
|
1015
|
+
// src/serializers/JsonSerializer.ts
|
|
1016
|
+
var JsonSerializer = class {
|
|
1017
|
+
/**
|
|
1018
|
+
* Serialize a job.
|
|
1019
|
+
*/
|
|
1020
|
+
serialize(job) {
|
|
1021
|
+
const id = `${Date.now()}-${Math.random().toString(36).substring(2, 9)}`;
|
|
1022
|
+
return {
|
|
1023
|
+
id,
|
|
1024
|
+
type: "json",
|
|
1025
|
+
data: JSON.stringify({
|
|
1026
|
+
job: job.constructor.name,
|
|
1027
|
+
properties: { ...job }
|
|
1028
|
+
}),
|
|
1029
|
+
createdAt: Date.now(),
|
|
1030
|
+
...job.delaySeconds !== void 0 ? { delaySeconds: job.delaySeconds } : {},
|
|
1031
|
+
attempts: job.attempts ?? 0,
|
|
1032
|
+
...job.maxAttempts !== void 0 ? { maxAttempts: job.maxAttempts } : {}
|
|
1033
|
+
};
|
|
1034
|
+
}
|
|
1035
|
+
/**
|
|
1036
|
+
* Deserialize a job.
|
|
1037
|
+
*
|
|
1038
|
+
* Note: this implementation only restores properties and does not recreate class instances.
|
|
1039
|
+
* For class instances, use `ClassNameSerializer`.
|
|
1040
|
+
*/
|
|
1041
|
+
deserialize(serialized) {
|
|
1042
|
+
if (serialized.type !== "json") {
|
|
1043
|
+
throw new Error('Invalid serialization type: expected "json"');
|
|
1044
|
+
}
|
|
1045
|
+
const parsed = JSON.parse(serialized.data);
|
|
1046
|
+
const job = /* @__PURE__ */ Object.create({});
|
|
1047
|
+
Object.assign(job, parsed.properties);
|
|
1048
|
+
return job;
|
|
1049
|
+
}
|
|
1050
|
+
};
|
|
1051
|
+
|
|
1052
|
+
// src/QueueManager.ts
|
|
1053
|
+
var QueueManager = class {
|
|
1054
|
+
drivers = /* @__PURE__ */ new Map();
|
|
1055
|
+
serializers = /* @__PURE__ */ new Map();
|
|
1056
|
+
defaultConnection;
|
|
1057
|
+
defaultSerializer;
|
|
1058
|
+
constructor(config = {}) {
|
|
1059
|
+
this.defaultConnection = config.default ?? "default";
|
|
1060
|
+
const serializerType = config.defaultSerializer ?? "class";
|
|
1061
|
+
if (serializerType === "class") {
|
|
1062
|
+
this.defaultSerializer = new ClassNameSerializer();
|
|
1063
|
+
} else {
|
|
1064
|
+
this.defaultSerializer = new JsonSerializer();
|
|
1065
|
+
}
|
|
1066
|
+
if (!this.drivers.has("default")) {
|
|
1067
|
+
this.drivers.set("default", new MemoryDriver());
|
|
1068
|
+
}
|
|
1069
|
+
if (config.connections) {
|
|
1070
|
+
for (const [name, connectionConfig] of Object.entries(config.connections)) {
|
|
1071
|
+
this.registerConnection(name, connectionConfig);
|
|
1072
|
+
}
|
|
1073
|
+
}
|
|
1074
|
+
}
|
|
1075
|
+
/**
|
|
1076
|
+
* Register a connection.
|
|
1077
|
+
* @param name - Connection name
|
|
1078
|
+
* @param config - Connection config
|
|
1079
|
+
*/
|
|
1080
|
+
registerConnection(name, config) {
|
|
1081
|
+
const driverType = config.driver;
|
|
1082
|
+
switch (driverType) {
|
|
1083
|
+
case "memory":
|
|
1084
|
+
this.drivers.set(name, new MemoryDriver());
|
|
1085
|
+
break;
|
|
1086
|
+
case "database": {
|
|
1087
|
+
const { DatabaseDriver: DatabaseDriver2 } = (init_DatabaseDriver(), __toCommonJS(DatabaseDriver_exports));
|
|
1088
|
+
const dbService = config.dbService;
|
|
1089
|
+
if (!dbService) {
|
|
1090
|
+
throw new Error(
|
|
1091
|
+
"[QueueManager] DatabaseDriver requires dbService. Please provide a database service that implements DatabaseService interface."
|
|
1092
|
+
);
|
|
1093
|
+
}
|
|
1094
|
+
this.drivers.set(
|
|
1095
|
+
name,
|
|
1096
|
+
new DatabaseDriver2({
|
|
1097
|
+
// biome-ignore lint/suspicious/noExplicitAny: Dynamic driver loading requires type assertion
|
|
1098
|
+
dbService,
|
|
1099
|
+
// biome-ignore lint/suspicious/noExplicitAny: Dynamic driver config type
|
|
1100
|
+
table: config.table
|
|
1101
|
+
})
|
|
1102
|
+
);
|
|
1103
|
+
break;
|
|
1104
|
+
}
|
|
1105
|
+
case "redis": {
|
|
1106
|
+
const { RedisDriver: RedisDriver2 } = (init_RedisDriver(), __toCommonJS(RedisDriver_exports));
|
|
1107
|
+
const client = config.client;
|
|
1108
|
+
if (!client) {
|
|
1109
|
+
throw new Error(
|
|
1110
|
+
"[QueueManager] RedisDriver requires client. Please provide Redis client in connection config."
|
|
1111
|
+
);
|
|
1112
|
+
}
|
|
1113
|
+
this.drivers.set(
|
|
1114
|
+
name,
|
|
1115
|
+
new RedisDriver2({
|
|
1116
|
+
// biome-ignore lint/suspicious/noExplicitAny: Dynamic driver loading requires type assertion
|
|
1117
|
+
client,
|
|
1118
|
+
// biome-ignore lint/suspicious/noExplicitAny: Dynamic driver config type
|
|
1119
|
+
prefix: config.prefix
|
|
1120
|
+
})
|
|
1121
|
+
);
|
|
1122
|
+
break;
|
|
1123
|
+
}
|
|
1124
|
+
case "kafka": {
|
|
1125
|
+
const { KafkaDriver: KafkaDriver2 } = (init_KafkaDriver(), __toCommonJS(KafkaDriver_exports));
|
|
1126
|
+
const client = config.client;
|
|
1127
|
+
if (!client) {
|
|
1128
|
+
throw new Error(
|
|
1129
|
+
"[QueueManager] KafkaDriver requires client. Please provide Kafka client in connection config."
|
|
1130
|
+
);
|
|
1131
|
+
}
|
|
1132
|
+
this.drivers.set(
|
|
1133
|
+
name,
|
|
1134
|
+
new KafkaDriver2({
|
|
1135
|
+
// biome-ignore lint/suspicious/noExplicitAny: Dynamic driver loading requires type assertion
|
|
1136
|
+
client,
|
|
1137
|
+
// biome-ignore lint/suspicious/noExplicitAny: Dynamic driver config type
|
|
1138
|
+
consumerGroupId: config.consumerGroupId
|
|
1139
|
+
})
|
|
1140
|
+
);
|
|
1141
|
+
break;
|
|
1142
|
+
}
|
|
1143
|
+
case "sqs": {
|
|
1144
|
+
const { SQSDriver: SQSDriver2 } = (init_SQSDriver(), __toCommonJS(SQSDriver_exports));
|
|
1145
|
+
const client = config.client;
|
|
1146
|
+
if (!client) {
|
|
1147
|
+
throw new Error(
|
|
1148
|
+
"[QueueManager] SQSDriver requires client. Please provide SQS client in connection config."
|
|
1149
|
+
);
|
|
1150
|
+
}
|
|
1151
|
+
this.drivers.set(
|
|
1152
|
+
name,
|
|
1153
|
+
new SQSDriver2({
|
|
1154
|
+
// biome-ignore lint/suspicious/noExplicitAny: Dynamic driver loading requires type assertion
|
|
1155
|
+
client,
|
|
1156
|
+
// biome-ignore lint/suspicious/noExplicitAny: Dynamic driver config type
|
|
1157
|
+
queueUrlPrefix: config.queueUrlPrefix,
|
|
1158
|
+
// biome-ignore lint/suspicious/noExplicitAny: Dynamic driver config type
|
|
1159
|
+
visibilityTimeout: config.visibilityTimeout,
|
|
1160
|
+
// biome-ignore lint/suspicious/noExplicitAny: Dynamic driver config type
|
|
1161
|
+
waitTimeSeconds: config.waitTimeSeconds
|
|
1162
|
+
})
|
|
1163
|
+
);
|
|
1164
|
+
break;
|
|
1165
|
+
}
|
|
1166
|
+
default:
|
|
1167
|
+
throw new Error(
|
|
1168
|
+
`Driver "${driverType}" is not supported. Supported drivers: memory, database, redis, kafka, sqs`
|
|
1169
|
+
);
|
|
1170
|
+
}
|
|
1171
|
+
}
|
|
1172
|
+
/**
|
|
1173
|
+
* Get a driver for a connection.
|
|
1174
|
+
* @param connection - Connection name
|
|
1175
|
+
* @returns Driver instance
|
|
1176
|
+
*/
|
|
1177
|
+
getDriver(connection) {
|
|
1178
|
+
const driver = this.drivers.get(connection);
|
|
1179
|
+
if (!driver) {
|
|
1180
|
+
throw new Error(`Connection "${connection}" not found`);
|
|
1181
|
+
}
|
|
1182
|
+
return driver;
|
|
1183
|
+
}
|
|
1184
|
+
/**
|
|
1185
|
+
* Get a serializer.
|
|
1186
|
+
* @param type - Serializer type
|
|
1187
|
+
* @returns Serializer instance
|
|
1188
|
+
*/
|
|
1189
|
+
getSerializer(type) {
|
|
1190
|
+
if (type) {
|
|
1191
|
+
const serializer = this.serializers.get(type);
|
|
1192
|
+
if (!serializer) {
|
|
1193
|
+
throw new Error(`Serializer "${type}" not found`);
|
|
1194
|
+
}
|
|
1195
|
+
return serializer;
|
|
1196
|
+
}
|
|
1197
|
+
return this.defaultSerializer;
|
|
1198
|
+
}
|
|
1199
|
+
/**
|
|
1200
|
+
* Register Job classes (used by ClassNameSerializer).
|
|
1201
|
+
* @param jobClasses - Job class array
|
|
1202
|
+
*/
|
|
1203
|
+
registerJobClasses(jobClasses) {
|
|
1204
|
+
if (this.defaultSerializer instanceof ClassNameSerializer) {
|
|
1205
|
+
this.defaultSerializer.registerMany(jobClasses);
|
|
1206
|
+
}
|
|
1207
|
+
}
|
|
1208
|
+
/**
|
|
1209
|
+
* Push a Job to the queue.
|
|
1210
|
+
*
|
|
1211
|
+
* @template T - The type of the job.
|
|
1212
|
+
* @param job - Job instance to push.
|
|
1213
|
+
* @returns The same job instance (for fluent chaining).
|
|
1214
|
+
*
|
|
1215
|
+
* @example
|
|
1216
|
+
* ```typescript
|
|
1217
|
+
* await manager.push(new SendEmailJob('user@example.com'));
|
|
1218
|
+
* ```
|
|
1219
|
+
*/
|
|
1220
|
+
async push(job) {
|
|
1221
|
+
const connection = job.connectionName ?? this.defaultConnection;
|
|
1222
|
+
const queue = job.queueName ?? "default";
|
|
1223
|
+
const driver = this.getDriver(connection);
|
|
1224
|
+
const serializer = this.getSerializer();
|
|
1225
|
+
const serialized = serializer.serialize(job);
|
|
1226
|
+
await driver.push(queue, serialized);
|
|
1227
|
+
return job;
|
|
1228
|
+
}
|
|
1229
|
+
/**
|
|
1230
|
+
* Push multiple jobs to the queue.
|
|
1231
|
+
*
|
|
1232
|
+
* @template T - The type of the jobs.
|
|
1233
|
+
* @param jobs - Array of job instances.
|
|
1234
|
+
*
|
|
1235
|
+
* @example
|
|
1236
|
+
* ```typescript
|
|
1237
|
+
* await manager.pushMany([new JobA(), new JobB()]);
|
|
1238
|
+
* ```
|
|
1239
|
+
*/
|
|
1240
|
+
async pushMany(jobs) {
|
|
1241
|
+
if (jobs.length === 0) {
|
|
1242
|
+
return;
|
|
1243
|
+
}
|
|
1244
|
+
const groups = /* @__PURE__ */ new Map();
|
|
1245
|
+
const serializer = this.getSerializer();
|
|
1246
|
+
for (const job of jobs) {
|
|
1247
|
+
const connection = job.connectionName ?? this.defaultConnection;
|
|
1248
|
+
const queue = job.queueName ?? "default";
|
|
1249
|
+
const key = `${connection}:${queue}`;
|
|
1250
|
+
const serialized = serializer.serialize(job);
|
|
1251
|
+
if (!groups.has(key)) {
|
|
1252
|
+
groups.set(key, []);
|
|
1253
|
+
}
|
|
1254
|
+
groups.get(key)?.push(serialized);
|
|
1255
|
+
}
|
|
1256
|
+
for (const [key, serializedJobs] of groups.entries()) {
|
|
1257
|
+
const [connection, queue] = key.split(":");
|
|
1258
|
+
if (!connection || !queue) {
|
|
1259
|
+
continue;
|
|
1260
|
+
}
|
|
1261
|
+
const driver = this.getDriver(connection);
|
|
1262
|
+
if (driver.pushMany) {
|
|
1263
|
+
await driver.pushMany(queue, serializedJobs);
|
|
1264
|
+
} else {
|
|
1265
|
+
for (const job of serializedJobs) {
|
|
1266
|
+
await driver.push(queue, job);
|
|
1267
|
+
}
|
|
1268
|
+
}
|
|
1269
|
+
}
|
|
1270
|
+
}
|
|
1271
|
+
/**
|
|
1272
|
+
* Pop a job from the queue.
|
|
1273
|
+
*
|
|
1274
|
+
* @param queue - Queue name (default: 'default').
|
|
1275
|
+
* @param connection - Connection name (optional).
|
|
1276
|
+
* @returns Job instance or null if queue is empty.
|
|
1277
|
+
*
|
|
1278
|
+
* @example
|
|
1279
|
+
* ```typescript
|
|
1280
|
+
* const job = await manager.pop('emails');
|
|
1281
|
+
* if (job) await job.handle();
|
|
1282
|
+
* ```
|
|
1283
|
+
*/
|
|
1284
|
+
async pop(queue = "default", connection = this.defaultConnection) {
|
|
1285
|
+
const driver = this.getDriver(connection);
|
|
1286
|
+
const serializer = this.getSerializer();
|
|
1287
|
+
const serialized = await driver.pop(queue);
|
|
1288
|
+
if (!serialized) {
|
|
1289
|
+
return null;
|
|
1290
|
+
}
|
|
1291
|
+
try {
|
|
1292
|
+
return serializer.deserialize(serialized);
|
|
1293
|
+
} catch (error) {
|
|
1294
|
+
console.error("[QueueManager] Failed to deserialize job:", error);
|
|
1295
|
+
return null;
|
|
1296
|
+
}
|
|
1297
|
+
}
|
|
1298
|
+
/**
|
|
1299
|
+
* Get queue size.
|
|
1300
|
+
*
|
|
1301
|
+
* @param queue - Queue name (default: 'default').
|
|
1302
|
+
* @param connection - Connection name (optional).
|
|
1303
|
+
* @returns Number of jobs in the queue.
|
|
1304
|
+
*/
|
|
1305
|
+
async size(queue = "default", connection = this.defaultConnection) {
|
|
1306
|
+
const driver = this.getDriver(connection);
|
|
1307
|
+
return driver.size(queue);
|
|
1308
|
+
}
|
|
1309
|
+
/**
|
|
1310
|
+
* Clear all jobs from a queue.
|
|
1311
|
+
*
|
|
1312
|
+
* @param queue - Queue name (default: 'default').
|
|
1313
|
+
* @param connection - Connection name (optional).
|
|
1314
|
+
*/
|
|
1315
|
+
async clear(queue = "default", connection = this.defaultConnection) {
|
|
1316
|
+
const driver = this.getDriver(connection);
|
|
1317
|
+
await driver.clear(queue);
|
|
1318
|
+
}
|
|
1319
|
+
};
|
|
1320
|
+
|
|
1321
|
+
// src/OrbitStream.ts
|
|
1322
|
+
var OrbitStream = class _OrbitStream {
|
|
1323
|
+
constructor(options = {}) {
|
|
1324
|
+
this.options = options;
|
|
1325
|
+
}
|
|
1326
|
+
queueManager;
|
|
1327
|
+
consumer;
|
|
1328
|
+
/**
|
|
1329
|
+
* Static configuration helper.
|
|
1330
|
+
*/
|
|
1331
|
+
static configure(options) {
|
|
1332
|
+
return new _OrbitStream(options);
|
|
1333
|
+
}
|
|
1334
|
+
/**
|
|
1335
|
+
* Install into PlanetCore.
|
|
1336
|
+
*/
|
|
1337
|
+
install(core) {
|
|
1338
|
+
this.queueManager = new QueueManager(this.options);
|
|
1339
|
+
core.adapter.use("*", async (c, next) => {
|
|
1340
|
+
if (this.queueManager && this.options.connections) {
|
|
1341
|
+
for (const [name, config] of Object.entries(this.options.connections)) {
|
|
1342
|
+
if (config.driver === "database" && !config.dbService) {
|
|
1343
|
+
try {
|
|
1344
|
+
const dbService = c.get("db");
|
|
1345
|
+
if (dbService) {
|
|
1346
|
+
try {
|
|
1347
|
+
this.queueManager.getDriver(name);
|
|
1348
|
+
} catch {
|
|
1349
|
+
this.queueManager.registerConnection(name, {
|
|
1350
|
+
...config,
|
|
1351
|
+
dbService
|
|
1352
|
+
});
|
|
1353
|
+
}
|
|
1354
|
+
}
|
|
1355
|
+
} catch {
|
|
1356
|
+
}
|
|
1357
|
+
}
|
|
1358
|
+
}
|
|
1359
|
+
}
|
|
1360
|
+
c.set("queue", this.queueManager);
|
|
1361
|
+
await next();
|
|
1362
|
+
return void 0;
|
|
1363
|
+
});
|
|
1364
|
+
core.logger.info("[OrbitStream] Installed");
|
|
1365
|
+
if (this.options.autoStartWorker && process.env.NODE_ENV === "development" && this.options.workerOptions) {
|
|
1366
|
+
this.startWorker(this.options.workerOptions);
|
|
1367
|
+
}
|
|
1368
|
+
}
|
|
1369
|
+
/**
|
|
1370
|
+
* Start embedded worker.
|
|
1371
|
+
*/
|
|
1372
|
+
startWorker(options) {
|
|
1373
|
+
if (!this.queueManager) {
|
|
1374
|
+
throw new Error("QueueManager not initialized. Call install() first.");
|
|
1375
|
+
}
|
|
1376
|
+
if (this.consumer?.isRunning()) {
|
|
1377
|
+
throw new Error("Worker is already running");
|
|
1378
|
+
}
|
|
1379
|
+
this.consumer = new Consumer(this.queueManager, options);
|
|
1380
|
+
this.consumer.start().catch((error) => {
|
|
1381
|
+
console.error("[OrbitStream] Worker error:", error);
|
|
1382
|
+
});
|
|
1383
|
+
}
|
|
1384
|
+
/**
|
|
1385
|
+
* Stop embedded worker.
|
|
1386
|
+
*/
|
|
1387
|
+
async stopWorker() {
|
|
1388
|
+
if (this.consumer) {
|
|
1389
|
+
await this.consumer.stop();
|
|
1390
|
+
}
|
|
1391
|
+
}
|
|
1392
|
+
/**
|
|
1393
|
+
* Get QueueManager instance.
|
|
1394
|
+
*/
|
|
1395
|
+
getQueueManager() {
|
|
1396
|
+
return this.queueManager;
|
|
1397
|
+
}
|
|
1398
|
+
};
|
|
1399
|
+
export {
|
|
1400
|
+
ClassNameSerializer,
|
|
1401
|
+
Consumer,
|
|
1402
|
+
DatabaseDriver,
|
|
1403
|
+
Job,
|
|
1404
|
+
JsonSerializer,
|
|
1405
|
+
KafkaDriver,
|
|
1406
|
+
MemoryDriver,
|
|
1407
|
+
OrbitStream,
|
|
1408
|
+
QueueManager,
|
|
1409
|
+
RedisDriver,
|
|
1410
|
+
SQSDriver,
|
|
1411
|
+
Worker
|
|
1412
|
+
};
|