@koala42/redis-highway 0.1.10 → 0.1.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/batch-worker.js +17 -5
- package/dist/keys.d.ts +0 -4
- package/dist/keys.js +0 -6
- package/dist/queue.spec.js +5 -0
- package/dist/stream-message-entity.js +1 -11
- package/dist/worker.js +17 -4
- package/package.json +1 -1
package/dist/batch-worker.js
CHANGED
|
@@ -84,11 +84,24 @@ class BatchWorker {
|
|
|
84
84
|
const [nextCursor, messages] = result;
|
|
85
85
|
cursor = nextCursor;
|
|
86
86
|
if (messages && messages.length > 0) {
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
87
|
+
const pipeline = this.redis.pipeline();
|
|
88
|
+
for (const msg of messages) {
|
|
89
|
+
const entity = new stream_message_entity_1.StreamMessageEntity(msg);
|
|
90
|
+
pipeline.xack(this.streamName, this.groupName, entity.streamMessageId);
|
|
91
|
+
const statusKey = this.keys.getJobStatusKey(entity.messageUuid);
|
|
92
|
+
const timestamp = Date.now();
|
|
93
|
+
pipeline.eval(lua_1.LUA_FINALIZE_COMPLEX, 2, statusKey, this.streamName, this.groupName, timestamp, entity.streamMessageId);
|
|
94
|
+
if (entity.retryCount < this.maxRetries) {
|
|
95
|
+
pipeline.xadd(this.streamName, '*', 'id', entity.messageUuid, 'target', entity.routes.join(','), 'retryCount', entity.retryCount + 1, 'data', entity.data ? JSON.stringify(entity.data) : '');
|
|
96
|
+
pipeline.hset(statusKey, '__target', entity.routes.length);
|
|
97
|
+
}
|
|
98
|
+
else {
|
|
99
|
+
console.error(`[${this.groupName}] Job ${entity.messageUuid} run out of retries (stuck). Moving to DLQ`);
|
|
100
|
+
pipeline.xadd(this.keys.getDlqStreamKey(), '*', 'id', entity.messageUuid, 'group', this.groupName, 'error', 'Stuck message recovered max retries', 'payload', entity.data ? JSON.stringify(entity.data) : 'MISSING', 'failedAt', Date.now());
|
|
101
|
+
pipeline.del(statusKey);
|
|
102
|
+
}
|
|
90
103
|
}
|
|
91
|
-
|
|
104
|
+
await pipeline.exec();
|
|
92
105
|
}
|
|
93
106
|
else {
|
|
94
107
|
continueClaiming = false;
|
|
@@ -187,7 +200,6 @@ class BatchWorker {
|
|
|
187
200
|
for (const message of messages) {
|
|
188
201
|
if (message.routes.includes(this.groupName)) {
|
|
189
202
|
if (message.retryCount < this.maxRetries && message.data) {
|
|
190
|
-
console.log(`[${this.groupName}] Retrying job ${message.messageUuid} attempt ${message.retryCount + 1}/${this.maxRetries}`);
|
|
191
203
|
const payloadString = JSON.stringify(message.data);
|
|
192
204
|
pipeline.xadd(this.streamName, '*', 'id', message.messageUuid, 'target', this.groupName, 'retryCount', message.retryCount + 1, 'data', payloadString);
|
|
193
205
|
}
|
package/dist/keys.d.ts
CHANGED
|
@@ -8,10 +8,6 @@ export declare class KeyManager {
|
|
|
8
8
|
* And targets add their completed timestamps there
|
|
9
9
|
*/
|
|
10
10
|
getJobStatusKey(id: string): string;
|
|
11
|
-
/**
|
|
12
|
-
* Job data contains the job payload
|
|
13
|
-
*/
|
|
14
|
-
getJobDataKey(id: string): string;
|
|
15
11
|
/**
|
|
16
12
|
* Dead letter queue stream name
|
|
17
13
|
*/
|
package/dist/keys.js
CHANGED
|
@@ -16,12 +16,6 @@ class KeyManager {
|
|
|
16
16
|
getJobStatusKey(id) {
|
|
17
17
|
return `${this.streamName}:status:${id}`;
|
|
18
18
|
}
|
|
19
|
-
/**
|
|
20
|
-
* Job data contains the job payload
|
|
21
|
-
*/
|
|
22
|
-
getJobDataKey(id) {
|
|
23
|
-
return `${this.streamName}:data:${id}`;
|
|
24
|
-
}
|
|
25
19
|
/**
|
|
26
20
|
* Dead letter queue stream name
|
|
27
21
|
*/
|
package/dist/queue.spec.js
CHANGED
|
@@ -229,6 +229,11 @@ class TestWorker extends worker_1.Worker {
|
|
|
229
229
|
const pending = await redis.xpending(streamName, groupName);
|
|
230
230
|
// After processing, it should be ACKed, so pending count => 0 (if deleted)
|
|
231
231
|
// or if finalize runs, it deletes the message entirely.
|
|
232
|
+
// Wait for cleanup (finalize runs after process)
|
|
233
|
+
await waitFor(async () => {
|
|
234
|
+
const len = await redis.xlen(streamName);
|
|
235
|
+
return len === 0;
|
|
236
|
+
}, 2000);
|
|
232
237
|
const len = await redis.xlen(streamName);
|
|
233
238
|
(0, vitest_1.expect)(len).toBe(0);
|
|
234
239
|
});
|
|
@@ -14,17 +14,7 @@ class StreamMessageEntity {
|
|
|
14
14
|
this._messageUuid = this._fields['id'];
|
|
15
15
|
this._routes = this._fields['target'].split(',');
|
|
16
16
|
this._retryCount = parseInt(this._fields['retryCount'] || '0', 10);
|
|
17
|
-
|
|
18
|
-
this._data = JSON.parse(this._fields['data']);
|
|
19
|
-
}
|
|
20
|
-
catch (e) {
|
|
21
|
-
// Handle corrupt or missing data gracefully
|
|
22
|
-
// We can set it to null (need to update type to T | null) or a dummy.
|
|
23
|
-
// Since strict T is expected, we might have to cast or throw controlled error.
|
|
24
|
-
// For now, let's assume T can be null-ish or cast. But getter says T.
|
|
25
|
-
// Let's coerce to {} as any to avoid crash, let validation downstream handle it.
|
|
26
|
-
this._data = {};
|
|
27
|
-
}
|
|
17
|
+
this._data = JSON.parse(this._fields['data']);
|
|
28
18
|
}
|
|
29
19
|
get data() {
|
|
30
20
|
return this._data;
|
package/dist/worker.js
CHANGED
|
@@ -75,10 +75,25 @@ class Worker {
|
|
|
75
75
|
const [nextCursor, messages] = result;
|
|
76
76
|
cursor = nextCursor;
|
|
77
77
|
if (messages && messages.length > 0) {
|
|
78
|
-
|
|
78
|
+
const pipeline = this.redis.pipeline();
|
|
79
79
|
for (const msg of messages) {
|
|
80
|
-
|
|
80
|
+
const entity = new stream_message_entity_1.StreamMessageEntity(msg);
|
|
81
|
+
pipeline.xack(this.streamName, this.groupName, entity.streamMessageId);
|
|
82
|
+
const statusKey = this.keys.getJobStatusKey(entity.messageUuid);
|
|
83
|
+
const timestamp = Date.now();
|
|
84
|
+
pipeline.eval(lua_1.LUA_FINALIZE_COMPLEX, 2, statusKey, this.streamName, this.groupName, timestamp, entity.streamMessageId);
|
|
85
|
+
if (entity.retryCount < this.MAX_RETRIES) {
|
|
86
|
+
pipeline.xadd(this.streamName, '*', 'id', entity.messageUuid, 'target', entity.routes.join(','), 'retryCount', entity.retryCount + 1, 'data', entity.data ? JSON.stringify(entity.data) : '');
|
|
87
|
+
pipeline.hset(statusKey, '__target', entity.routes.length);
|
|
88
|
+
}
|
|
89
|
+
else {
|
|
90
|
+
console.error(`[${this.groupName}] Job ${entity.messageUuid} run outof retries (stuck). Moving to DLQ`);
|
|
91
|
+
// DLQ
|
|
92
|
+
pipeline.xadd(this.keys.getDlqStreamKey(), '*', 'id', entity.messageUuid, 'group', this.groupName, 'error', 'Stuck message recovered max retries', 'payload', entity.data ? JSON.stringify(entity.data) : '', 'failedAt', Date.now());
|
|
93
|
+
pipeline.del(statusKey);
|
|
94
|
+
}
|
|
81
95
|
}
|
|
96
|
+
await pipeline.exec();
|
|
82
97
|
}
|
|
83
98
|
else {
|
|
84
99
|
continueClaiming = false;
|
|
@@ -144,7 +159,6 @@ class Worker {
|
|
|
144
159
|
await this.redis.xack(this.streamName, this.groupName, msgId);
|
|
145
160
|
const payloadString = payloadData ? JSON.stringify(payloadData) : '';
|
|
146
161
|
if (currentRetries < this.MAX_RETRIES && payloadData) {
|
|
147
|
-
console.log(`[${this.groupName}] Retrying job ${uuid} (Attempt ${currentRetries + 1}/${this.MAX_RETRIES})`);
|
|
148
162
|
const pipeline = this.redis.pipeline();
|
|
149
163
|
pipeline.xadd(this.streamName, '*', 'id', uuid, 'target', this.groupName, 'retryCount', currentRetries + 1, 'data', payloadString);
|
|
150
164
|
await pipeline.exec();
|
|
@@ -158,7 +172,6 @@ class Worker {
|
|
|
158
172
|
async finalize(messageUuid, msgId) {
|
|
159
173
|
const timestamp = Date.now();
|
|
160
174
|
const statusKey = this.keys.getJobStatusKey(messageUuid);
|
|
161
|
-
const dataKey = this.keys.getJobDataKey(messageUuid);
|
|
162
175
|
const throughputKey = this.keys.getThroughputKey(this.groupName, timestamp);
|
|
163
176
|
const totalKey = this.keys.getTotalKey(this.groupName);
|
|
164
177
|
await this.redis.eval(lua_1.LUA_MARK_DONE, 5, statusKey, this.streamName, this.groupName, throughputKey, totalKey, this.groupName, timestamp, msgId);
|