@koala42/redis-highway 0.2.4 → 0.2.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/README.md +128 -60
  2. package/package.json +9 -3
package/README.md CHANGED
@@ -1,16 +1,14 @@
1
1
  # @koala42/redis-highway
2
2
 
3
- High performance Redis stream-based queue for Node.js. Supports Redis single instances and Valkey single instances
3
+ High performance Redis stream-based queue for Node.js. Supports Redis single instances and Valkey single instances.
4
+ Designed for high throughput and massive concurrency with low overhead.
4
5
 
5
- ## Missing features ATM
6
- - Better gracefull shutdown handling
7
- - In worker process functions expose more than just data: T like job id and current status
8
- - Option to customize/enable/disable metrics
9
- - Enable custom logger for workers and producers instead of console.logs
10
-
11
-
12
- ## Roadmap
13
- - Support redis cluster, that is probably possible only for job payloads and DLQ, since the stream is only one
6
+ ## Features
7
+ - **Lightweight**: Uses optimized Lua scripts and pipelines for maximum performance and reduced I/O.
8
+ - **Micro-Batching**: Supports batch processing for high-volume message consumption.
9
+ - **Granular Retries**: Consumer group isolation - if one group fails, only that group retries.
10
+ - **Reliability**: Auto-claiming of stuck messages (crashed consumers) and Dead Letter Queue (DLQ) support.
11
+ - **Metrics**: Built-in tracking for throughput, queue depth, DLQ size, and retries. Prometheus export ready.
14
12
 
15
13
  ## Installation
16
14
 
@@ -27,12 +25,13 @@ import { Redis } from 'ioredis';
27
25
  import { Producer } from '@koala42/redis-highway';
28
26
 
29
27
  const redis = new Redis();
30
- const producer = new Producer(redis, 'my-stream');
28
+ const producer = new Producer<{hello: string}>(redis, 'my-stream');
31
29
 
32
30
  // Send job
33
31
  await producer.push(
34
- JSON.stringify({ hello: 'world' }), // Message serialization is not done automatically
35
- ['group-A', 'group-B'] // Target specific consumer groups
32
+ { hello: 'world' }, // Type-safe payload
33
+ ['group-A', 'group-B'], // Target specific consumer groups
34
+ { ttl: 3600 } // Optional: expiration time in seconds
36
35
  );
37
36
  ```
38
37
 
@@ -42,17 +41,55 @@ await producer.push(
42
41
  import { Redis } from 'ioredis';
43
42
  import { Worker } from '@koala42/redis-highway';
44
43
 
45
- class MyWorker extends Worker<T> {
46
- async process(data: T) {
47
- console.log('Processing:', data);
48
- // throw new Error('fail'); // Automatic retry/DLQ logic
44
+ class MyWorker extends Worker<{hello: string}> {
45
+ async process(data: {hello: string}) {
46
+ console.log('Processing:', data.hello);
47
+ // throw new Error('fail'); // Triggers automatic retry logic
49
48
  }
50
49
  }
51
50
 
52
51
  const redis = new Redis();
53
- const worker = new MyWorker(redis, 'group-A', 'my-stream');
52
+ const worker = new MyWorker(
53
+ redis,
54
+ {
55
+ groupName: 'group-A',
56
+ streamName: 'my-stream',
57
+ concurrency: 10 // Number of concurrent jobs to process
58
+ }
59
+ );
54
60
 
55
61
  await worker.start();
62
+
63
+ // To stop gracefully
64
+ // await worker.stop();
65
+ ```
66
+
67
+ ### Batch Worker
68
+ Process messages in batches for higher throughput.
69
+
70
+ ```typescript
71
+ import { Redis } from 'ioredis';
72
+ import { BatchWorker } from '@koala42/redis-highway';
73
+
74
+ class MyBatchWorker extends BatchWorker<{hello: string}> {
75
+ async process(batchedData: {hello: string}[]) {
76
+ console.log(`Processing batch of ${batchedData.length} items`);
77
+ // Example: Bulk insert into database
78
+ }
79
+ }
80
+
81
+ const batchWorker = new MyBatchWorker(
82
+ redis,
83
+ {
84
+ groupName: 'group-B',
85
+ streamName: 'my-stream',
86
+ concurrency: 50, // Total items processing limit
87
+ batchSize: 10, // Items per batch
88
+ maxFetchCount: 50
89
+ }
90
+ );
91
+
92
+ await batchWorker.start();
56
93
  ```
57
94
 
58
95
  ### Metrics
@@ -62,63 +99,94 @@ import { Metrics } from '@koala42/redis-highway';
62
99
 
63
100
  const metrics = new Metrics(redis, 'my-stream');
64
101
 
65
- // Prometheus format
66
- const payload = await metrics.getPrometheusMetrics(['group-A']);
102
+ // Get raw metrics object
103
+ const stats = await metrics.getMetrics(['group-A', 'group-B']);
104
+ console.log(stats.throughput);
105
+
106
+ // Get Prometheus formatted string
107
+ const promMetrics = await metrics.getPrometheusMetrics(['group-A'], 'my_app_queue');
67
108
  ```
68
109
 
69
- ## Usage with NestJS
110
+ ## Configuration
111
+
112
+ ### Worker Options
113
+ The second argument to `Worker` and `BatchWorker` constructors is the primary configuration object.
114
+
115
+ | Option | Type | Description |
116
+ |--------|------|-------------|
117
+ | `groupName` | string | **Required**. The consumer group name (e.g., 'email-service'). |
118
+ | `streamName` | string | **Required**. The Redis stream key. |
119
+ | `concurrency` | number | **Required**. Maximum number of messages processed in parallel by this worker instance. |
120
+ | `batchSize` | number | **Required (BatchWorker only)**. Number of messages to process in a single call. |
121
+ | `maxFetchCount` | number | **Required (BatchWorker only)**. limit for XREADGROUP count. |
122
+
123
+ ### Control Options
124
+ The third argument is for fine-tuning retry and recovery behavior.
70
125
 
71
126
  ```typescript
127
+ const worker = new MyWorker(redis, { ... }, {
128
+ maxRetries: 3, // Default: 3
129
+ blockTimeMs: 2000, // Default: 2000. XREADGROUP block time.
130
+ minIdleTimeMs: 120000, // Default: 2 minutes. Time before a message is considered stuck.
131
+ claimIntervalMs: 120000,// Default: 2 minutes. How often to check for stuck messages.
132
+ collectMetrics: true // Default: true. Enable throughput tracking.
133
+ });
134
+ ```
72
135
 
73
- // Producer
136
+ | Option | Default | Description |
137
+ |--------|---------|-------------|
138
+ | `maxRetries` | 3 | Number of times to retry a failed message before moving it to DLQ. |
139
+ | `blockTimeMs` | 2000 | Redis blocking timeout for fetching new messages (in ms). |
140
+ | `minIdleTimeMs` | 120000 | Messages pending longer than this are candidates for auto-claim (recovery). |
141
+ | `claimIntervalMs` | 120000 | Interval for checking and claiming stuck messages. |
142
+ | `collectMetrics` | true | If true, increments throughput counters in Redis. |
143
+
144
+ ## Usage with NestJS
145
+
146
+ ```typescript
147
+ // Producer Service
74
148
  @Injectable()
75
149
  export class EntryService {
76
- privater readonly producer: Producer;
77
-
78
- constructor(){
79
- this.producer = new Producer(
80
- new Redis(...), // Or reuse existing ioredis connection
81
- 'my-stream'
82
- )
150
+ private readonly producer: Producer<MyPayload>;
151
+
152
+ constructor(@InjectRedis() private readonly redis: Redis) {
153
+ this.producer = new Producer(this.redis, 'my-stream');
83
154
  }
84
-
85
- public async sth(): Promise<void>{
86
- await producer.push(
87
- JSON.stringify({ hello: 'world' }), // Message serialization is not done automatically
88
- ['group-A', 'group-B'] // Target specific consumer groups
89
- );
155
+
156
+ async addToQueue(data: MyPayload) {
157
+ await this.producer.push(data, ['group-A']);
90
158
  }
91
159
  }
92
160
 
93
-
94
- // Processor
161
+ // Worker Service
95
162
  @Injectable()
96
- export class ProcessorService extends Worker<T> implements OnModuleInit, OnModuleDestroy {
97
- constructor(){
98
- super(
99
- new Redis(...), // or reuse existing redis conn
100
- 'group-A',
101
- 'my-stream',
102
- 50 // concurrency
103
- )
163
+ export class ProcessorService extends Worker<MyPayload> implements OnModuleInit, OnModuleDestroy {
164
+ constructor(@InjectRedis() redis: Redis) {
165
+ super(redis, {
166
+ groupName: 'group-A',
167
+ streamName: 'my-stream',
168
+ concurrency: 50
169
+ });
104
170
  }
105
-
106
- async onModuleInit(): Promise<void>{
107
- await this.start()
171
+
172
+ async onModuleInit() {
173
+ await this.start();
108
174
  }
109
-
110
- onModuleDestroy(){
111
- this.stop()
175
+
176
+ async onModuleDestroy() {
177
+ await this.stop();
112
178
  }
113
-
114
- async process(data: T): Promise<void>{
115
- console.log("Processing job", JSON.stringify(data))
179
+
180
+ async process(data: MyPayload) {
181
+ // Process your job here
116
182
  }
117
183
  }
118
- ````
184
+ ```
119
185
 
120
- ## Features
121
- - **Lightweight**: Uses light Lua scripts and pipelines wherever possible, making it highly concurrents for inserts and for processing as well, because of the reduced I/O load compared to BullMQ
122
- - **Granular Retries**: If one group fails, only that group retries.
123
- - **DLQ**: Dead Letter Queue support after max retries.
124
- - **Metrics**: Throughput, Waiting, DLQ, Prometheus export.
186
+ ## Roadmap & Missing Features
187
+ tracked in [Github Issues](https://github.com/Koala42/redis-highway/issues)
188
+
189
+ ## AI Usage Disclosure
190
+ - AI will not be used for the development, ever
191
+ - AI may be used to do code reviews
192
+ - AI may be used to write unit tests
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@koala42/redis-highway",
3
- "version": "0.2.4",
3
+ "version": "0.2.9",
4
4
  "description": "High performance redis queue",
5
5
  "license": "MIT",
6
6
  "author": {
@@ -8,6 +8,11 @@
8
8
  "name": "David Stranava",
9
9
  "url": "https://github.com/stranavad"
10
10
  },
11
+ "repository": {
12
+ "url": "https://github.com/Koala42/redis-highway",
13
+ "type": "github",
14
+ "directory": "packages/redis-highway"
15
+ },
11
16
  "type": "commonjs",
12
17
  "main": "dist/src/index.js",
13
18
  "types": "dist/src/index.d.ts",
@@ -16,7 +21,8 @@
16
21
  ],
17
22
  "scripts": {
18
23
  "clean": "rimraf dist",
19
- "test:all": "vitest run test/queue.spec.ts test/batch-worker.spec.ts",
24
+ "test": "vitest run test",
25
+ "test:all": "vitest run test",
20
26
  "build": "npm run clean && tsc",
21
27
  "prepublish": "npm run build"
22
28
  },
@@ -36,4 +42,4 @@
36
42
  "typescript": "^5.9.3",
37
43
  "vitest": "^4.0.16"
38
44
  }
39
- }
45
+ }