@koala42/redis-highway 0.2.12 → 0.2.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +51 -0
- package/dist/base-worker.js +6 -4
- package/dist/dlq-message-entity.d.ts +19 -0
- package/dist/dlq-message-entity.js +47 -0
- package/dist/dlq-worker.d.ts +35 -0
- package/dist/dlq-worker.js +83 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +2 -0
- package/dist/interfaces.d.ts +5 -0
- package/dist/producer.d.ts +0 -1
- package/dist/producer.js +3 -3
- package/dist/stream-message-entity.d.ts +1 -0
- package/dist/stream-message-entity.js +7 -0
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -92,6 +92,57 @@ const batchWorker = new MyBatchWorker(
|
|
|
92
92
|
await batchWorker.start();
|
|
93
93
|
```
|
|
94
94
|
|
|
95
|
+
### DLQ Worker
|
|
96
|
+
Process messages from the Dead Letter Queue. Use this to handle jobs that have exhausted all retries.
|
|
97
|
+
|
|
98
|
+
**Important:** DLQ Worker has no built-in error handling or retry policy. If `process()` throws an error, the message is lost. This is by design - DLQ processing is meant for manual intervention, logging, or forwarding to external systems.
|
|
99
|
+
|
|
100
|
+
```typescript
|
|
101
|
+
import { Redis } from 'ioredis';
|
|
102
|
+
import { DlqWorker, DlqMessageEntity } from '@koala42/redis-highway';
|
|
103
|
+
|
|
104
|
+
class MyDlqWorker extends DlqWorker<{hello: string}> {
|
|
105
|
+
async process(message: DlqMessageEntity<{hello: string}>) {
|
|
106
|
+
console.log('Failed job data:', message.data);
|
|
107
|
+
console.log('Original error:', message.errorMessage);
|
|
108
|
+
console.log('Failed at:', new Date(message.failedAt));
|
|
109
|
+
console.log('Original consumer group:', message.group);
|
|
110
|
+
|
|
111
|
+
// Example: Log to external system, send alert, or store for manual review
|
|
112
|
+
await externalLogger.log(message);
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
const redis = new Redis();
|
|
117
|
+
const dlqWorker = new MyDlqWorker(redis, {
|
|
118
|
+
streamName: 'my-stream' // Must match your main worker's stream
|
|
119
|
+
});
|
|
120
|
+
|
|
121
|
+
await dlqWorker.start();
|
|
122
|
+
|
|
123
|
+
// To stop gracefully
|
|
124
|
+
// await dlqWorker.stop();
|
|
125
|
+
```
|
|
126
|
+
|
|
127
|
+
#### DLQ Worker Options
|
|
128
|
+
|
|
129
|
+
| Option | Type | Default | Description |
|
|
130
|
+
|--------|------|---------|-------------|
|
|
131
|
+
| `streamName` | string | - | **Required**. The Redis stream key (same as your main workers). |
|
|
132
|
+
| `blockTimeoutMs` | number | 5000 | Redis XREADGROUP block duration in milliseconds. |
|
|
133
|
+
| `waitTimeoutMs` | number | 5000 | Wait time between processing cycles when no messages are available. |
|
|
134
|
+
|
|
135
|
+
#### DlqMessageEntity Properties
|
|
136
|
+
|
|
137
|
+
| Property | Type | Description |
|
|
138
|
+
|----------|------|-------------|
|
|
139
|
+
| `data` | T | The original job payload. |
|
|
140
|
+
| `errorMessage` | string | The error message from the last failed attempt. |
|
|
141
|
+
| `failedAt` | number | Unix timestamp when the job was moved to DLQ. |
|
|
142
|
+
| `group` | string | The consumer group that failed to process this job. |
|
|
143
|
+
| `messageUuid` | string | The original job's unique identifier. |
|
|
144
|
+
| `streamMessageId` | string | The Redis stream message ID. |
|
|
145
|
+
|
|
95
146
|
### Metrics
|
|
96
147
|
|
|
97
148
|
```typescript
|
package/dist/base-worker.js
CHANGED
|
@@ -6,6 +6,7 @@ const keys_1 = require("./keys");
|
|
|
6
6
|
const uuid_1 = require("uuid");
|
|
7
7
|
const stream_message_entity_1 = require("./stream-message-entity");
|
|
8
8
|
const lua_1 = require("./lua");
|
|
9
|
+
const dlq_message_entity_1 = require("./dlq-message-entity");
|
|
9
10
|
class BaseWorker {
|
|
10
11
|
constructor(redis, options, controlOptions, metricsOptions) {
|
|
11
12
|
this.redis = redis;
|
|
@@ -44,8 +45,8 @@ class BaseWorker {
|
|
|
44
45
|
throw e;
|
|
45
46
|
}
|
|
46
47
|
}
|
|
47
|
-
this._fetchLoop();
|
|
48
|
-
this._autoClaimLoop();
|
|
48
|
+
this._fetchLoop().catch((e) => console.error("Fetch loop crashed", e));
|
|
49
|
+
this._autoClaimLoop().catch((e) => console.error('Auto claim loop crashed', e));
|
|
49
50
|
}
|
|
50
51
|
/**
|
|
51
52
|
* Gracefully stops the worker
|
|
@@ -60,6 +61,7 @@ class BaseWorker {
|
|
|
60
61
|
while (this._activeCount > 0) {
|
|
61
62
|
await new Promise((resolve) => setTimeout(resolve, 50));
|
|
62
63
|
}
|
|
64
|
+
await this.redis.xgroup('DELCONSUMER', this._streamName, this._groupName, this._consumerName).catch();
|
|
63
65
|
}
|
|
64
66
|
/**
|
|
65
67
|
* Auto claim loop
|
|
@@ -116,7 +118,7 @@ class BaseWorker {
|
|
|
116
118
|
if (message.retryCount < this._maxRetries) {
|
|
117
119
|
retryCountIncr++;
|
|
118
120
|
const newJobId = (0, uuid_1.v7)();
|
|
119
|
-
pipeline.xadd(this._streamName, '*',
|
|
121
|
+
pipeline.xadd(this._streamName, '*', ...stream_message_entity_1.StreamMessageEntity.getStreamFields(newJobId, this._groupName, message.serializedData, message.retryCount + 1));
|
|
120
122
|
const newStatusKey = this._keys.getJobStatusKey(newJobId);
|
|
121
123
|
pipeline.hset(newStatusKey, '__target', 1);
|
|
122
124
|
const statusKey = this._keys.getJobStatusKey(message.messageUuid);
|
|
@@ -126,7 +128,7 @@ class BaseWorker {
|
|
|
126
128
|
console.error(`[${this._groupName}] Job ${message.messageUuid} run out of retries. Moving to DLQ`);
|
|
127
129
|
messagesToDLQ.push(message);
|
|
128
130
|
// Add message to DLQ stream
|
|
129
|
-
pipeline.xadd(this._keys.getDlqStreamKey(), '*',
|
|
131
|
+
pipeline.xadd(this._keys.getDlqStreamKey(), '*', ...dlq_message_entity_1.DlqMessageEntity.getStreamFields(message.messageUuid, this._groupName, errorMessage, message.serializedData, timestamp));
|
|
130
132
|
const statusKey = this._keys.getJobStatusKey(message.messageUuid);
|
|
131
133
|
pipeline.eval(lua_1.LUA_FINALIZE, 2, statusKey, this._streamName, this._groupName, timestamp, message.streamMessageId);
|
|
132
134
|
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { StreamMessage } from "./interfaces";
|
|
2
|
+
export declare class DlqMessageEntity<T extends Record<string, unknown>> {
|
|
3
|
+
private readonly _streamMessageId;
|
|
4
|
+
private readonly _rawFields;
|
|
5
|
+
private readonly _fields;
|
|
6
|
+
private readonly _group;
|
|
7
|
+
private readonly _errorMessage;
|
|
8
|
+
private readonly _failedAt;
|
|
9
|
+
private readonly _messageUuid;
|
|
10
|
+
private readonly _data;
|
|
11
|
+
constructor(message: StreamMessage);
|
|
12
|
+
get data(): T;
|
|
13
|
+
get streamMessageId(): string;
|
|
14
|
+
get messageUuid(): string;
|
|
15
|
+
get group(): string;
|
|
16
|
+
get errorMessage(): string;
|
|
17
|
+
get failedAt(): number;
|
|
18
|
+
static getStreamFields(id: string, group: string, error: string, payload: string, failedAt: number): (string | number)[];
|
|
19
|
+
}
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.DlqMessageEntity = void 0;
|
|
4
|
+
class DlqMessageEntity {
|
|
5
|
+
constructor(message) {
|
|
6
|
+
this._rawFields = [];
|
|
7
|
+
this._fields = {};
|
|
8
|
+
this._streamMessageId = message[0];
|
|
9
|
+
this._rawFields = message[1];
|
|
10
|
+
for (let i = 0; i < this._rawFields.length; i += 2) {
|
|
11
|
+
this._fields[this._rawFields[i]] = this._rawFields[i + 1];
|
|
12
|
+
}
|
|
13
|
+
this._messageUuid = this._fields['id'];
|
|
14
|
+
this._group = this._fields['group'];
|
|
15
|
+
this._errorMessage = this._fields['error'];
|
|
16
|
+
this._failedAt = Number(this._fields['failedAt']);
|
|
17
|
+
this._data = JSON.parse(this._fields['payload']);
|
|
18
|
+
}
|
|
19
|
+
get data() {
|
|
20
|
+
return this._data;
|
|
21
|
+
}
|
|
22
|
+
get streamMessageId() {
|
|
23
|
+
return this._streamMessageId;
|
|
24
|
+
}
|
|
25
|
+
get messageUuid() {
|
|
26
|
+
return this._messageUuid;
|
|
27
|
+
}
|
|
28
|
+
get group() {
|
|
29
|
+
return this._group;
|
|
30
|
+
}
|
|
31
|
+
get errorMessage() {
|
|
32
|
+
return this._errorMessage;
|
|
33
|
+
}
|
|
34
|
+
get failedAt() {
|
|
35
|
+
return this._failedAt;
|
|
36
|
+
}
|
|
37
|
+
static getStreamFields(id, group, error, payload, failedAt) {
|
|
38
|
+
return [
|
|
39
|
+
'id', id,
|
|
40
|
+
'group', group,
|
|
41
|
+
'error', error,
|
|
42
|
+
'payload', payload,
|
|
43
|
+
'failedAt', failedAt
|
|
44
|
+
];
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
exports.DlqMessageEntity = DlqMessageEntity;
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import Redis from "ioredis";
|
|
2
|
+
import { KeyManager } from "./keys";
|
|
3
|
+
import { DlqWorkerOptions, XReadGroupResponse } from "./interfaces";
|
|
4
|
+
import { DlqMessageEntity } from "./dlq-message-entity";
|
|
5
|
+
export declare abstract class DlqWorker<T extends Record<string, unknown>> {
|
|
6
|
+
protected readonly _redis: Redis;
|
|
7
|
+
protected _isRunning: boolean;
|
|
8
|
+
protected readonly _keys: KeyManager;
|
|
9
|
+
protected readonly _consumerId: string;
|
|
10
|
+
protected readonly _dlqStreamName: string;
|
|
11
|
+
protected readonly _groupName = "dlq-worker";
|
|
12
|
+
protected readonly _consumerName: string;
|
|
13
|
+
protected readonly _blockTimeoutMs: number;
|
|
14
|
+
protected readonly _waitTimeoutMs: number;
|
|
15
|
+
constructor(_redis: Redis, options: DlqWorkerOptions);
|
|
16
|
+
/**
|
|
17
|
+
* Start DLQ worker
|
|
18
|
+
* @returns
|
|
19
|
+
*/
|
|
20
|
+
protected start(): Promise<void>;
|
|
21
|
+
/**
|
|
22
|
+
* Stop DLQ loop
|
|
23
|
+
*/
|
|
24
|
+
protected stop(): Promise<void>;
|
|
25
|
+
/**
|
|
26
|
+
* Background* DLQ loopw
|
|
27
|
+
*/
|
|
28
|
+
protected dlqLoop(): Promise<void>;
|
|
29
|
+
/**
|
|
30
|
+
* Reads job from DLQ
|
|
31
|
+
* @returns
|
|
32
|
+
*/
|
|
33
|
+
protected _readGroup(): Promise<XReadGroupResponse | null>;
|
|
34
|
+
abstract process(data: DlqMessageEntity<T>): Promise<void>;
|
|
35
|
+
}
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.DlqWorker = void 0;
|
|
4
|
+
const keys_1 = require("./keys");
|
|
5
|
+
const uuid_1 = require("uuid");
|
|
6
|
+
const dlq_message_entity_1 = require("./dlq-message-entity");
|
|
7
|
+
class DlqWorker {
|
|
8
|
+
constructor(_redis, options) {
|
|
9
|
+
this._redis = _redis;
|
|
10
|
+
this._isRunning = false;
|
|
11
|
+
this._consumerId = (0, uuid_1.v7)();
|
|
12
|
+
this._groupName = 'dlq-worker';
|
|
13
|
+
this._consumerName = `${this._groupName}-${this._consumerId}`;
|
|
14
|
+
this._keys = new keys_1.KeyManager(options.streamName);
|
|
15
|
+
this._dlqStreamName = this._keys.getDlqStreamKey();
|
|
16
|
+
this._blockTimeoutMs = options.blockTimeoutMs ?? 5000;
|
|
17
|
+
this._waitTimeoutMs = options.waitTimeoutMs ?? 5000;
|
|
18
|
+
}
|
|
19
|
+
/**
|
|
20
|
+
* Start DLQ worker
|
|
21
|
+
* @returns
|
|
22
|
+
*/
|
|
23
|
+
async start() {
|
|
24
|
+
if (this._isRunning) {
|
|
25
|
+
return;
|
|
26
|
+
}
|
|
27
|
+
this._isRunning = true;
|
|
28
|
+
try {
|
|
29
|
+
await this._redis.xgroup('CREATE', this._dlqStreamName, this._groupName, '0', 'MKSTREAM');
|
|
30
|
+
}
|
|
31
|
+
catch (e) {
|
|
32
|
+
if (!e.message.includes('BUSYGROUP')) {
|
|
33
|
+
throw e;
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
this.dlqLoop().catch((e) => console.error('DLQ loop crashed', e));
|
|
37
|
+
}
|
|
38
|
+
/**
|
|
39
|
+
* Stop DLQ loop
|
|
40
|
+
*/
|
|
41
|
+
async stop() {
|
|
42
|
+
this._isRunning = false;
|
|
43
|
+
await this._redis.xgroup('DELCONSUMER', this._dlqStreamName, this._groupName, this._consumerName).catch();
|
|
44
|
+
}
|
|
45
|
+
/**
|
|
46
|
+
* Background* DLQ loopw
|
|
47
|
+
*/
|
|
48
|
+
async dlqLoop() {
|
|
49
|
+
while (this._isRunning) {
|
|
50
|
+
try {
|
|
51
|
+
const results = await this._readGroup();
|
|
52
|
+
if (!results) {
|
|
53
|
+
await new Promise((resolve) => setTimeout(resolve, this._waitTimeoutMs));
|
|
54
|
+
continue;
|
|
55
|
+
}
|
|
56
|
+
const message = results[0][1][0];
|
|
57
|
+
if (!message) {
|
|
58
|
+
await new Promise((resolve) => setTimeout(resolve, this._waitTimeoutMs));
|
|
59
|
+
continue;
|
|
60
|
+
}
|
|
61
|
+
const dlqMessage = new dlq_message_entity_1.DlqMessageEntity(message);
|
|
62
|
+
// Ack and delete the message. XACKDEL is supported from 8.2
|
|
63
|
+
await this._redis.multi()
|
|
64
|
+
.xack(this._dlqStreamName, this._groupName, dlqMessage.streamMessageId)
|
|
65
|
+
.xdel(this._dlqStreamName, dlqMessage.streamMessageId)
|
|
66
|
+
.exec();
|
|
67
|
+
await this.process(dlqMessage);
|
|
68
|
+
}
|
|
69
|
+
catch (e) {
|
|
70
|
+
console.error(`[${this._groupName}] Failed processing DLQ job`, e);
|
|
71
|
+
await new Promise((resolve) => setTimeout(resolve, this._waitTimeoutMs));
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
/**
|
|
76
|
+
* Reads job from DLQ
|
|
77
|
+
* @returns
|
|
78
|
+
*/
|
|
79
|
+
async _readGroup() {
|
|
80
|
+
return this._redis.xreadgroup('GROUP', this._groupName, this._consumerName, 'COUNT', 1, 'BLOCK', this._blockTimeoutMs, 'STREAMS', this._dlqStreamName, '>');
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
exports.DlqWorker = DlqWorker;
|
package/dist/index.d.ts
CHANGED
package/dist/index.js
CHANGED
|
@@ -22,3 +22,5 @@ __exportStar(require("./interfaces"), exports);
|
|
|
22
22
|
__exportStar(require("./batch-worker"), exports);
|
|
23
23
|
__exportStar(require("./stream-message-entity"), exports);
|
|
24
24
|
__exportStar(require("./base-worker"), exports);
|
|
25
|
+
__exportStar(require("./dlq-message-entity"), exports);
|
|
26
|
+
__exportStar(require("./dlq-worker"), exports);
|
package/dist/interfaces.d.ts
CHANGED
|
@@ -5,6 +5,11 @@ export interface BaseWorkerOptions {
|
|
|
5
5
|
streamName: string;
|
|
6
6
|
concurrency: number;
|
|
7
7
|
}
|
|
8
|
+
export interface DlqWorkerOptions {
|
|
9
|
+
streamName: string;
|
|
10
|
+
blockTimeoutMs?: number;
|
|
11
|
+
waitTimeoutMs?: number;
|
|
12
|
+
}
|
|
8
13
|
export interface BaseWorkerControlOptions {
|
|
9
14
|
maxRetries: number;
|
|
10
15
|
blockTimeMs: number;
|
package/dist/producer.d.ts
CHANGED
package/dist/producer.js
CHANGED
|
@@ -3,6 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.Producer = void 0;
|
|
4
4
|
const uuid_1 = require("uuid");
|
|
5
5
|
const keys_1 = require("./keys");
|
|
6
|
+
const stream_message_entity_1 = require("./stream-message-entity");
|
|
6
7
|
class Producer {
|
|
7
8
|
constructor(redis, streamName) {
|
|
8
9
|
this.redis = redis;
|
|
@@ -19,17 +20,16 @@ class Producer {
|
|
|
19
20
|
async push(payload, targetGroups, opts) {
|
|
20
21
|
const serializedPayload = JSON.stringify(payload);
|
|
21
22
|
const id = (0, uuid_1.v7)();
|
|
22
|
-
const ttl = opts?.ttl || null; //
|
|
23
|
+
const ttl = opts?.ttl || null; // Defaults to null, no expiry
|
|
23
24
|
const pipeline = this.redis.pipeline();
|
|
24
25
|
const statusKey = this.keys.getJobStatusKey(id);
|
|
25
26
|
// Initialize job metadata - status
|
|
26
|
-
// TODO: improve target groups use groups join by "," instead of groups length
|
|
27
27
|
pipeline.hset(statusKey, '__target', targetGroups.length);
|
|
28
28
|
if (ttl) {
|
|
29
29
|
pipeline.expire(statusKey, ttl);
|
|
30
30
|
}
|
|
31
31
|
// Push message to stream
|
|
32
|
-
pipeline.xadd(this.streamName, '*',
|
|
32
|
+
pipeline.xadd(this.streamName, '*', ...stream_message_entity_1.StreamMessageEntity.getStreamFields(id, targetGroups, serializedPayload));
|
|
33
33
|
await pipeline.exec();
|
|
34
34
|
return id;
|
|
35
35
|
}
|
|
@@ -15,4 +15,5 @@ export declare class StreamMessageEntity<T extends Record<string, unknown>> {
|
|
|
15
15
|
get messageUuid(): string;
|
|
16
16
|
get routes(): string[];
|
|
17
17
|
get retryCount(): number;
|
|
18
|
+
static getStreamFields(id: string, target: string | string[], serializedPayload: string, retryCount?: number): (string | number)[];
|
|
18
19
|
}
|
|
@@ -35,5 +35,12 @@ class StreamMessageEntity {
|
|
|
35
35
|
get retryCount() {
|
|
36
36
|
return this._retryCount;
|
|
37
37
|
}
|
|
38
|
+
static getStreamFields(id, target, serializedPayload, retryCount) {
|
|
39
|
+
const fields = ['id', id, 'target', Array.isArray(target) ? target.join(',') : target, 'data', serializedPayload];
|
|
40
|
+
if (retryCount !== undefined) {
|
|
41
|
+
fields.push('retryCount', retryCount);
|
|
42
|
+
}
|
|
43
|
+
return fields;
|
|
44
|
+
}
|
|
38
45
|
}
|
|
39
46
|
exports.StreamMessageEntity = StreamMessageEntity;
|