@koala42/redis-highway 0.2.3 → 0.2.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,7 +1,7 @@
1
1
  import { EventEmitter } from "events";
2
2
  import { KeyManager } from "./keys";
3
3
  import Redis from "ioredis";
4
- import { BaseWorkerControlOptions, BaseWorkerOptions, StreamMessage, XReadGroupResponse } from "./interfaces";
4
+ import { BaseWorkerControlOptions, BaseWorkerCustomMetricsOptions, BaseWorkerOptions, StreamMessage, XReadGroupResponse } from "./interfaces";
5
5
  import { StreamMessageEntity } from "./stream-message-entity";
6
6
  export declare abstract class BaseWorker<T extends Record<string, unknown>> {
7
7
  protected redis: Redis;
@@ -20,7 +20,8 @@ export declare abstract class BaseWorker<T extends Record<string, unknown>> {
20
20
  protected readonly _claimIntervalMs: number;
21
21
  protected readonly _minIdleTimeMs: number;
22
22
  protected readonly _collectMetrics: boolean;
23
- constructor(redis: Redis, options: BaseWorkerOptions, controlOptions: BaseWorkerControlOptions);
23
+ protected readonly _finalIncrementMetricKey: ((item: T) => string | null) | null;
24
+ constructor(redis: Redis, options: BaseWorkerOptions, controlOptions: BaseWorkerControlOptions, metricsOptions: BaseWorkerCustomMetricsOptions<T>);
24
25
  /**
25
26
  * Start the worker process
26
27
  * Starts fetch loop and auto claim loop
@@ -7,7 +7,7 @@ const uuid_1 = require("uuid");
7
7
  const stream_message_entity_1 = require("./stream-message-entity");
8
8
  const lua_1 = require("./lua");
9
9
  class BaseWorker {
10
- constructor(redis, options, controlOptions) {
10
+ constructor(redis, options, controlOptions, metricsOptions) {
11
11
  this.redis = redis;
12
12
  this._isRunning = false;
13
13
  this._activeCount = 0;
@@ -22,6 +22,7 @@ class BaseWorker {
22
22
  this._claimIntervalMs = controlOptions.claimIntervalMs;
23
23
  this._minIdleTimeMs = controlOptions.minIdleTimeMs;
24
24
  this._collectMetrics = controlOptions.collectMetrics;
25
+ this._finalIncrementMetricKey = metricsOptions.finalIncrementMetricKey ?? null;
25
26
  this._consumerName = `${this._groupName}-${this._consumerId}`;
26
27
  this._keys = new keys_1.KeyManager(options.streamName);
27
28
  this._blockingRedis = redis.duplicate();
@@ -178,7 +179,13 @@ class BaseWorker {
178
179
  }
179
180
  for (const msg of messages) {
180
181
  const statusKey = this._keys.getJobStatusKey(msg.messageUuid);
181
- pipeline.eval(lua_1.LUA_FINALIZE, 2, statusKey, this._streamName, this._groupName, timestamp, msg.streamMessageId);
182
+ const customMetricKey = this._finalIncrementMetricKey ? this._finalIncrementMetricKey(msg.data) : null;
183
+ if (customMetricKey) {
184
+ pipeline.eval(lua_1.LUA_FINALIZE_CUSTOM_METRIC, 3, statusKey, this._streamName, customMetricKey, this._groupName, timestamp, msg.streamMessageId);
185
+ }
186
+ else {
187
+ pipeline.eval(lua_1.LUA_FINALIZE, 2, statusKey, this._streamName, this._groupName, timestamp, msg.streamMessageId);
188
+ }
182
189
  }
183
190
  await pipeline.exec();
184
191
  }
@@ -1,10 +1,10 @@
1
1
  import Redis from "ioredis";
2
- import { BaseWorkerControlOptions, BatchWorkerOptions } from "./interfaces";
2
+ import { BaseWorkerControlOptions, BaseWorkerCustomMetricsOptions, BatchWorkerOptions } from "./interfaces";
3
3
  import { BaseWorker } from "./base-worker";
4
4
  export declare abstract class BatchWorker<T extends Record<string, unknown>> extends BaseWorker<T> {
5
5
  private readonly _batchSize;
6
6
  private readonly _maxFetchCount;
7
- constructor(redis: Redis, options: BatchWorkerOptions, controlOptions?: BaseWorkerControlOptions);
7
+ constructor(redis: Redis, options: BatchWorkerOptions, controlOptions?: BaseWorkerControlOptions, metricsOptions?: BaseWorkerCustomMetricsOptions<T>);
8
8
  protected _fetchLoop(): Promise<void>;
9
9
  /**
10
10
  * Spawn worker for current processing
@@ -5,8 +5,8 @@ const interfaces_1 = require("./interfaces");
5
5
  const stream_message_entity_1 = require("./stream-message-entity");
6
6
  const base_worker_1 = require("./base-worker");
7
7
  class BatchWorker extends base_worker_1.BaseWorker {
8
- constructor(redis, options, controlOptions = interfaces_1.defaultBaseWorkerControlOptions) {
9
- super(redis, options, controlOptions);
8
+ constructor(redis, options, controlOptions = interfaces_1.defaultBaseWorkerControlOptions, metricsOptions = interfaces_1.defaultBaseWorkerCustomMetrics) {
9
+ super(redis, options, controlOptions, metricsOptions);
10
10
  this._batchSize = options.batchSize;
11
11
  this._maxFetchCount = options.maxFetchCount;
12
12
  if (this._batchSize === 1) {
@@ -12,6 +12,10 @@ export interface BaseWorkerControlOptions {
12
12
  minIdleTimeMs: number;
13
13
  collectMetrics: boolean;
14
14
  }
15
+ export interface BaseWorkerCustomMetricsOptions<T extends Record<string, unknown>> {
16
+ finalIncrementMetricKey?: ((item: T) => string | null) | null;
17
+ }
18
+ export declare const defaultBaseWorkerCustomMetrics: BaseWorkerCustomMetricsOptions<{}>;
15
19
  export declare const defaultBaseWorkerControlOptions: BaseWorkerControlOptions;
16
20
  export interface BatchWorkerOptions extends BaseWorkerOptions {
17
21
  batchSize: number;
@@ -1,6 +1,9 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.defaultBaseWorkerControlOptions = void 0;
3
+ exports.defaultBaseWorkerControlOptions = exports.defaultBaseWorkerCustomMetrics = void 0;
4
+ exports.defaultBaseWorkerCustomMetrics = {
5
+ finalIncrementMetricKey: null
6
+ };
4
7
  exports.defaultBaseWorkerControlOptions = {
5
8
  maxRetries: 3,
6
9
  minIdleTimeMs: 120000,
package/dist/src/lua.d.ts CHANGED
@@ -1 +1,2 @@
1
1
  export declare const LUA_FINALIZE = "\n-- KEYS[1] = status key\n-- KEYS[2] = stream key\n-- ARGV[1] = group name\n-- ARGV[2] = timestamp\n-- ARGV[3] = msgId\n\n-- 1. Update status\nredis.call('HSET', KEYS[1], ARGV[1], ARGV[2])\n\n-- 2. Check completions\nlocal current_fields = redis.call('HLEN', KEYS[1])\nlocal target_str = redis.call('HGET', KEYS[1], '__target')\nlocal target = tonumber(target_str)\n\nif not target then\n return 0\nend\n\n-- 3. Cleanup if done\nif current_fields >= (target + 1) then\n redis.call('DEL', KEYS[1])\n redis.call('XDEL', KEYS[2], ARGV[3])\n return 1\nend\n\nreturn 0\n";
2
+ export declare const LUA_FINALIZE_CUSTOM_METRIC = "\n -- KEYS[1] = status key\n -- KEYS[2] = stream key\n -- KEYS[3] = custom metric increment key\n -- ARGV[1] = group name\n -- ARGV[2] = timestamp\n -- ARGV[3] = msgId\n\n -- 1. Update status\n redis.call('HSET', KEYS[1], ARGV[1], ARGV[2])\n\n -- 2. Check completions\n local current_fields = redis.call('HLEN', KEYS[1])\n local target_str = redis.call('HGET', KEYS[1], '__target')\n local target = tonumber(target_str)\n\n if not target then\n return 0\n end\n\n -- 3. Cleanup if done\n if current_fields >= (target + 1) then\n redis.call('DEL', KEYS[1])\n redis.call('XDEL', KEYS[2], ARGV[3])\n redis.pcall('INCRBY', KEYS[3], 1)\n return 1\n end\n\n return 0\n";
package/dist/src/lua.js CHANGED
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.LUA_FINALIZE = void 0;
3
+ exports.LUA_FINALIZE_CUSTOM_METRIC = exports.LUA_FINALIZE = void 0;
4
4
  exports.LUA_FINALIZE = `
5
5
  -- KEYS[1] = status key
6
6
  -- KEYS[2] = stream key
@@ -29,3 +29,33 @@ end
29
29
 
30
30
  return 0
31
31
  `;
32
+ exports.LUA_FINALIZE_CUSTOM_METRIC = `
33
+ -- KEYS[1] = status key
34
+ -- KEYS[2] = stream key
35
+ -- KEYS[3] = custom metric increment key
36
+ -- ARGV[1] = group name
37
+ -- ARGV[2] = timestamp
38
+ -- ARGV[3] = msgId
39
+
40
+ -- 1. Update status
41
+ redis.call('HSET', KEYS[1], ARGV[1], ARGV[2])
42
+
43
+ -- 2. Check completions
44
+ local current_fields = redis.call('HLEN', KEYS[1])
45
+ local target_str = redis.call('HGET', KEYS[1], '__target')
46
+ local target = tonumber(target_str)
47
+
48
+ if not target then
49
+ return 0
50
+ end
51
+
52
+ -- 3. Cleanup if done
53
+ if current_fields >= (target + 1) then
54
+ redis.call('DEL', KEYS[1])
55
+ redis.call('XDEL', KEYS[2], ARGV[3])
56
+ redis.pcall('INCRBY', KEYS[3], 1)
57
+ return 1
58
+ end
59
+
60
+ return 0
61
+ `;
@@ -1,8 +1,8 @@
1
1
  import Redis from "ioredis";
2
- import { BaseWorkerOptions, BaseWorkerControlOptions } from "./interfaces";
2
+ import { BaseWorkerOptions, BaseWorkerControlOptions, BaseWorkerCustomMetricsOptions } from "./interfaces";
3
3
  import { BaseWorker } from "./base-worker";
4
4
  export declare abstract class Worker<T extends Record<string, unknown>> extends BaseWorker<T> {
5
- constructor(redis: Redis, options: BaseWorkerOptions, controlOptions?: BaseWorkerControlOptions);
5
+ constructor(redis: Redis, options: BaseWorkerOptions, controlOptions?: BaseWorkerControlOptions, metricsOptions?: BaseWorkerCustomMetricsOptions<T>);
6
6
  /**
7
7
  * Fetch loop (the main loop)
8
8
  * Based on free slots (concurrency - active count) gets new messages
@@ -5,8 +5,8 @@ const interfaces_1 = require("./interfaces");
5
5
  const stream_message_entity_1 = require("./stream-message-entity");
6
6
  const base_worker_1 = require("./base-worker");
7
7
  class Worker extends base_worker_1.BaseWorker {
8
- constructor(redis, options, controlOptions = interfaces_1.defaultBaseWorkerControlOptions) {
9
- super(redis, options, controlOptions);
8
+ constructor(redis, options, controlOptions = interfaces_1.defaultBaseWorkerControlOptions, metricsOptions = interfaces_1.defaultBaseWorkerCustomMetrics) {
9
+ super(redis, options, controlOptions, metricsOptions);
10
10
  }
11
11
  /**
12
12
  * Fetch loop (the main loop)
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@koala42/redis-highway",
3
- "version": "0.2.3",
3
+ "version": "0.2.8",
4
4
  "description": "High performance redis queue",
5
5
  "license": "MIT",
6
6
  "author": {
@@ -8,6 +8,11 @@
8
8
  "name": "David Stranava",
9
9
  "url": "https://github.com/stranavad"
10
10
  },
11
+ "repository": {
12
+ "url": "https://github.com/Koala42/redis-highway",
13
+ "type": "github",
14
+ "directory": "packages/redis-highway"
15
+ },
11
16
  "type": "commonjs",
12
17
  "main": "dist/src/index.js",
13
18
  "types": "dist/src/index.d.ts",
@@ -16,7 +21,8 @@
16
21
  ],
17
22
  "scripts": {
18
23
  "clean": "rimraf dist",
19
- "test:all": "vitest run test/queue.spec.ts test/batch-worker.spec.ts",
24
+ "test": "vitest run test",
25
+ "test:all": "vitest run test",
20
26
  "build": "npm run clean && tsc",
21
27
  "prepublish": "npm run build"
22
28
  },
@@ -36,4 +42,4 @@
36
42
  "typescript": "^5.9.3",
37
43
  "vitest": "^4.0.16"
38
44
  }
39
- }
45
+ }
package/README.md DELETED
@@ -1,124 +0,0 @@
1
- # @koala42/redis-highway
2
-
3
- High performance Redis stream-based queue for Node.js. Supports Redis single instances and Valkey single instances
4
-
5
- ## Missing features ATM
6
- - Better gracefull shutdown handling
7
- - In worker process functions expose more than just data: T like job id and current status
8
- - Option to customize/enable/disable metrics
9
- - Enable custom logger for workers and producers instead of console.logs
10
-
11
-
12
- ## Roadmap
13
- - Support redis cluster, that is probably possible only for job payloads and DLQ, since the stream is only one
14
-
15
- ## Installation
16
-
17
- ```bash
18
- npm install @koala42/redis-highway
19
- ```
20
-
21
- ## Usage
22
-
23
- ### Producer
24
-
25
- ```typescript
26
- import { Redis } from 'ioredis';
27
- import { Producer } from '@koala42/redis-highway';
28
-
29
- const redis = new Redis();
30
- const producer = new Producer(redis, 'my-stream');
31
-
32
- // Send job
33
- await producer.push(
34
- JSON.stringify({ hello: 'world' }), // Message serialization is not done automatically
35
- ['group-A', 'group-B'] // Target specific consumer groups
36
- );
37
- ```
38
-
39
- ### Worker
40
-
41
- ```typescript
42
- import { Redis } from 'ioredis';
43
- import { Worker } from '@koala42/redis-highway';
44
-
45
- class MyWorker extends Worker<T> {
46
- async process(data: T) {
47
- console.log('Processing:', data);
48
- // throw new Error('fail'); // Automatic retry/DLQ logic
49
- }
50
- }
51
-
52
- const redis = new Redis();
53
- const worker = new MyWorker(redis, 'group-A', 'my-stream');
54
-
55
- await worker.start();
56
- ```
57
-
58
- ### Metrics
59
-
60
- ```typescript
61
- import { Metrics } from '@koala42/redis-highway';
62
-
63
- const metrics = new Metrics(redis, 'my-stream');
64
-
65
- // Prometheus format
66
- const payload = await metrics.getPrometheusMetrics(['group-A']);
67
- ```
68
-
69
- ## Usage with NestJS
70
-
71
- ```typescript
72
-
73
- // Producer
74
- @Injectable()
75
- export class EntryService {
76
- privater readonly producer: Producer;
77
-
78
- constructor(){
79
- this.producer = new Producer(
80
- new Redis(...), // Or reuse existing ioredis connection
81
- 'my-stream'
82
- )
83
- }
84
-
85
- public async sth(): Promise<void>{
86
- await producer.push(
87
- JSON.stringify({ hello: 'world' }), // Message serialization is not done automatically
88
- ['group-A', 'group-B'] // Target specific consumer groups
89
- );
90
- }
91
- }
92
-
93
-
94
- // Processor
95
- @Injectable()
96
- export class ProcessorService extends Worker<T> implements OnModuleInit, OnModuleDestroy {
97
- constructor(){
98
- super(
99
- new Redis(...), // or reuse existing redis conn
100
- 'group-A',
101
- 'my-stream',
102
- 50 // concurrency
103
- )
104
- }
105
-
106
- async onModuleInit(): Promise<void>{
107
- await this.start()
108
- }
109
-
110
- onModuleDestroy(){
111
- this.stop()
112
- }
113
-
114
- async process(data: T): Promise<void>{
115
- console.log("Processing job", JSON.stringify(data))
116
- }
117
- }
118
- ````
119
-
120
- ## Features
121
- - **Lightweight**: Uses light Lua scripts and pipelines wherever possible, making it highly concurrents for inserts and for processing as well, because of the reduced I/O load compared to BullMQ
122
- - **Granular Retries**: If one group fails, only that group retries.
123
- - **DLQ**: Dead Letter Queue support after max retries.
124
- - **Metrics**: Throughput, Waiting, DLQ, Prometheus export.