@hazeljs/kafka 0.2.0-beta.18
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +174 -0
- package/dist/decorators/kafka-consumer.decorator.d.ts +32 -0
- package/dist/decorators/kafka-consumer.decorator.d.ts.map +1 -0
- package/dist/decorators/kafka-consumer.decorator.js +64 -0
- package/dist/decorators/kafka-subscribe.decorator.d.ts +40 -0
- package/dist/decorators/kafka-subscribe.decorator.d.ts.map +1 -0
- package/dist/decorators/kafka-subscribe.decorator.js +53 -0
- package/dist/index.d.ts +12 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +23 -0
- package/dist/kafka-consumer.service.d.ts +21 -0
- package/dist/kafka-consumer.service.d.ts.map +1 -0
- package/dist/kafka-consumer.service.js +118 -0
- package/dist/kafka-producer.service.d.ts +35 -0
- package/dist/kafka-producer.service.d.ts.map +1 -0
- package/dist/kafka-producer.service.js +107 -0
- package/dist/kafka-stream.processor.d.ts +43 -0
- package/dist/kafka-stream.processor.d.ts.map +1 -0
- package/dist/kafka-stream.processor.js +168 -0
- package/dist/kafka.module.d.ts +33 -0
- package/dist/kafka.module.d.ts.map +1 -0
- package/dist/kafka.module.js +93 -0
- package/dist/kafka.types.d.ts +137 -0
- package/dist/kafka.types.d.ts.map +1 -0
- package/dist/kafka.types.js +5 -0
- package/package.json +52 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2024 HazelJS Team
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,174 @@
|
|
|
1
|
+
# @hazeljs/kafka
|
|
2
|
+
|
|
3
|
+
**Kafka Module for HazelJS - Produce, Consume, and Stream Processing**
|
|
4
|
+
|
|
5
|
+
Apache Kafka integration for HazelJS with decorator-based consumers, producer service, and lightweight stream processing.
|
|
6
|
+
|
|
7
|
+
[](https://www.npmjs.com/package/@hazeljs/kafka)
|
|
8
|
+
[](https://opensource.org/licenses/MIT)
|
|
9
|
+
|
|
10
|
+
## Features
|
|
11
|
+
|
|
12
|
+
- **Produce** - Publish messages to Kafka topics via `KafkaProducerService`
|
|
13
|
+
- **Consume** - Decorator-driven consumers with `@KafkaConsumer` and `@KafkaSubscribe`
|
|
14
|
+
- **Stream Processing** - Lightweight `KafkaStreamProcessor` for consume-transform-produce pipelines
|
|
15
|
+
- **Graceful Shutdown** - Clean disconnect on application shutdown
|
|
16
|
+
- **TypeScript** - Full type safety with KafkaJS
|
|
17
|
+
|
|
18
|
+
## Installation
|
|
19
|
+
|
|
20
|
+
```bash
|
|
21
|
+
npm install @hazeljs/kafka
|
|
22
|
+
```
|
|
23
|
+
|
|
24
|
+
## Quick Start
|
|
25
|
+
|
|
26
|
+
### 1. Configure KafkaModule
|
|
27
|
+
|
|
28
|
+
```typescript
|
|
29
|
+
// app.module.ts
|
|
30
|
+
import { HazelModule } from '@hazeljs/core';
|
|
31
|
+
import { KafkaModule } from '@hazeljs/kafka';
|
|
32
|
+
|
|
33
|
+
@HazelModule({
|
|
34
|
+
imports: [
|
|
35
|
+
KafkaModule.forRoot({
|
|
36
|
+
clientId: 'my-app',
|
|
37
|
+
brokers: ['localhost:9092'],
|
|
38
|
+
}),
|
|
39
|
+
],
|
|
40
|
+
})
|
|
41
|
+
export class AppModule {}
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
### 2. Produce Messages
|
|
45
|
+
|
|
46
|
+
```typescript
|
|
47
|
+
import { Injectable } from '@hazeljs/core';
|
|
48
|
+
import { KafkaProducerService } from '@hazeljs/kafka';
|
|
49
|
+
|
|
50
|
+
@Injectable()
|
|
51
|
+
export class OrderService {
|
|
52
|
+
constructor(private producer: KafkaProducerService) {}
|
|
53
|
+
|
|
54
|
+
async createOrder(data: CreateOrderDto) {
|
|
55
|
+
await this.producer.send('orders', [
|
|
56
|
+
{ key: data.id, value: JSON.stringify(data) },
|
|
57
|
+
]);
|
|
58
|
+
return data;
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
```
|
|
62
|
+
|
|
63
|
+
### 3. Consume Messages (Decorator-Based)
|
|
64
|
+
|
|
65
|
+
```typescript
|
|
66
|
+
import { Injectable } from '@hazeljs/core';
|
|
67
|
+
import { KafkaConsumer, KafkaSubscribe, KafkaMessagePayload } from '@hazeljs/kafka';
|
|
68
|
+
|
|
69
|
+
@KafkaConsumer({ groupId: 'order-processor' })
|
|
70
|
+
@Injectable()
|
|
71
|
+
export class OrderConsumer {
|
|
72
|
+
@KafkaSubscribe('orders')
|
|
73
|
+
async handleOrder({ message }: KafkaMessagePayload) {
|
|
74
|
+
const order = JSON.parse(message.value!.toString());
|
|
75
|
+
console.log('Processing order:', order);
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
```
|
|
79
|
+
|
|
80
|
+
### 4. Register Consumers in Bootstrap
|
|
81
|
+
|
|
82
|
+
```typescript
|
|
83
|
+
// index.ts
|
|
84
|
+
import { HazelApp, Container } from '@hazeljs/core';
|
|
85
|
+
import { KafkaModule } from '@hazeljs/kafka';
|
|
86
|
+
import { AppModule } from './app.module';
|
|
87
|
+
import { OrderConsumer } from './order.consumer';
|
|
88
|
+
|
|
89
|
+
async function bootstrap() {
|
|
90
|
+
const app = new HazelApp(AppModule);
|
|
91
|
+
|
|
92
|
+
// Register Kafka consumers
|
|
93
|
+
const container = Container.getInstance();
|
|
94
|
+
const orderConsumer = container.resolve(OrderConsumer);
|
|
95
|
+
if (orderConsumer) {
|
|
96
|
+
await KafkaModule.registerConsumersFromProvider(orderConsumer);
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
await app.listen(3000);
|
|
100
|
+
}
|
|
101
|
+
bootstrap();
|
|
102
|
+
```
|
|
103
|
+
|
|
104
|
+
## Stream Processing
|
|
105
|
+
|
|
106
|
+
For consume-transform-produce pipelines:
|
|
107
|
+
|
|
108
|
+
```typescript
|
|
109
|
+
import { Container } from '@hazeljs/core';
|
|
110
|
+
import { KafkaStreamProcessor } from '@hazeljs/kafka';
|
|
111
|
+
|
|
112
|
+
const container = Container.getInstance();
|
|
113
|
+
const processor = container.resolve(KafkaStreamProcessor);
|
|
114
|
+
|
|
115
|
+
processor
|
|
116
|
+
.from('raw-events')
|
|
117
|
+
.transform(async (msg) => ({
|
|
118
|
+
value: JSON.stringify({ ...JSON.parse(msg.value!.toString()), enriched: true }),
|
|
119
|
+
}))
|
|
120
|
+
.to('enriched-events')
|
|
121
|
+
.start();
|
|
122
|
+
```
|
|
123
|
+
|
|
124
|
+
## Async Configuration
|
|
125
|
+
|
|
126
|
+
Use `forRootAsync` with ConfigService:
|
|
127
|
+
|
|
128
|
+
```typescript
|
|
129
|
+
import { ConfigService } from '@hazeljs/config';
|
|
130
|
+
import { KafkaModule } from '@hazeljs/kafka';
|
|
131
|
+
|
|
132
|
+
KafkaModule.forRootAsync({
|
|
133
|
+
useFactory: (config: ConfigService) => ({
|
|
134
|
+
clientId: config.get('KAFKA_CLIENT_ID', 'my-app'),
|
|
135
|
+
brokers: (config.get('KAFKA_BROKERS') || 'localhost:9092').toString().split(','),
|
|
136
|
+
}),
|
|
137
|
+
inject: [ConfigService],
|
|
138
|
+
})
|
|
139
|
+
```
|
|
140
|
+
|
|
141
|
+
## API Reference
|
|
142
|
+
|
|
143
|
+
### KafkaProducerService
|
|
144
|
+
|
|
145
|
+
- `send(topic, messages, options?)` - Send message(s) to a topic
|
|
146
|
+
- `sendBatch(batch)` - Send to multiple topics
|
|
147
|
+
- `isProducerConnected()` - Check connection status
|
|
148
|
+
|
|
149
|
+
### Decorators
|
|
150
|
+
|
|
151
|
+
- `@KafkaConsumer(options)` - Mark class as consumer (groupId required)
|
|
152
|
+
- `@KafkaSubscribe(topic, options?)` - Mark method as topic handler
|
|
153
|
+
- `fromBeginning?: boolean` - Read from beginning of topic
|
|
154
|
+
|
|
155
|
+
### KafkaStreamProcessor
|
|
156
|
+
|
|
157
|
+
- `from(topic)` - Input topic
|
|
158
|
+
- `transform(fn)` - Transform function (message) => output
|
|
159
|
+
- `to(topic)` - Output topic
|
|
160
|
+
- `withGroupId(id)` - Consumer group ID
|
|
161
|
+
- `start()` - Start processing
|
|
162
|
+
- `stop()` - Stop processing
|
|
163
|
+
|
|
164
|
+
## Environment Variables
|
|
165
|
+
|
|
166
|
+
```
|
|
167
|
+
KAFKA_BROKERS=localhost:9092
|
|
168
|
+
KAFKA_CLIENT_ID=my-app
|
|
169
|
+
```
|
|
170
|
+
|
|
171
|
+
## Requirements
|
|
172
|
+
|
|
173
|
+
- Apache Kafka broker (>= 0.11.x)
|
|
174
|
+
- Node.js >= 14
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import 'reflect-metadata';
|
|
2
|
+
import { KafkaConsumerOptions } from '../kafka.types';
|
|
3
|
+
/**
|
|
4
|
+
* Metadata key for Kafka consumer class options
|
|
5
|
+
*/
|
|
6
|
+
export declare const KAFKA_CONSUMER_METADATA_KEY: unique symbol;
|
|
7
|
+
/**
|
|
8
|
+
* Decorator to mark a class as a Kafka consumer with consumer group options
|
|
9
|
+
*
|
|
10
|
+
* @example
|
|
11
|
+
* ```typescript
|
|
12
|
+
* @KafkaConsumer({ groupId: 'order-processor' })
|
|
13
|
+
* @Injectable()
|
|
14
|
+
* export class OrderConsumer {
|
|
15
|
+
* @KafkaSubscribe('orders')
|
|
16
|
+
* async handleOrder({ message }: KafkaMessagePayload) {
|
|
17
|
+
* const order = JSON.parse(message.value.toString());
|
|
18
|
+
* // process order
|
|
19
|
+
* }
|
|
20
|
+
* }
|
|
21
|
+
* ```
|
|
22
|
+
*/
|
|
23
|
+
export declare function KafkaConsumer(options: KafkaConsumerOptions): ClassDecorator;
|
|
24
|
+
/**
|
|
25
|
+
* Get Kafka consumer metadata from a class or instance
|
|
26
|
+
*/
|
|
27
|
+
export declare function getKafkaConsumerMetadata(target: object): KafkaConsumerOptions | undefined;
|
|
28
|
+
/**
|
|
29
|
+
* Check if a class is a Kafka consumer
|
|
30
|
+
*/
|
|
31
|
+
export declare function isKafkaConsumer(target: object): boolean;
|
|
32
|
+
//# sourceMappingURL=kafka-consumer.decorator.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"kafka-consumer.decorator.d.ts","sourceRoot":"","sources":["../../src/decorators/kafka-consumer.decorator.ts"],"names":[],"mappings":"AAAA,OAAO,kBAAkB,CAAC;AAC1B,OAAO,EAAE,oBAAoB,EAAE,MAAM,gBAAgB,CAAC;AAGtD;;GAEG;AACH,eAAO,MAAM,2BAA2B,eAA2B,CAAC;AAEpE;;;;;;;;;;;;;;;GAeG;AACH,wBAAgB,aAAa,CAAC,OAAO,EAAE,oBAAoB,GAAG,cAAc,CAe3E;AAED;;GAEG;AACH,wBAAgB,wBAAwB,CAAC,MAAM,EAAE,MAAM,GAAG,oBAAoB,GAAG,SAAS,CAKzF;AAED;;GAEG;AACH,wBAAgB,eAAe,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAKvD"}
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.KAFKA_CONSUMER_METADATA_KEY = void 0;
|
|
7
|
+
exports.KafkaConsumer = KafkaConsumer;
|
|
8
|
+
exports.getKafkaConsumerMetadata = getKafkaConsumerMetadata;
|
|
9
|
+
exports.isKafkaConsumer = isKafkaConsumer;
|
|
10
|
+
require("reflect-metadata");
|
|
11
|
+
const core_1 = __importDefault(require("@hazeljs/core"));
|
|
12
|
+
/**
|
|
13
|
+
* Metadata key for Kafka consumer class options
|
|
14
|
+
*/
|
|
15
|
+
exports.KAFKA_CONSUMER_METADATA_KEY = Symbol('kafka:consumer');
|
|
16
|
+
/**
|
|
17
|
+
* Decorator to mark a class as a Kafka consumer with consumer group options
|
|
18
|
+
*
|
|
19
|
+
* @example
|
|
20
|
+
* ```typescript
|
|
21
|
+
* @KafkaConsumer({ groupId: 'order-processor' })
|
|
22
|
+
* @Injectable()
|
|
23
|
+
* export class OrderConsumer {
|
|
24
|
+
* @KafkaSubscribe('orders')
|
|
25
|
+
* async handleOrder({ message }: KafkaMessagePayload) {
|
|
26
|
+
* const order = JSON.parse(message.value.toString());
|
|
27
|
+
* // process order
|
|
28
|
+
* }
|
|
29
|
+
* }
|
|
30
|
+
* ```
|
|
31
|
+
*/
|
|
32
|
+
function KafkaConsumer(options) {
|
|
33
|
+
return (target) => {
|
|
34
|
+
const defaults = {
|
|
35
|
+
groupId: options.groupId,
|
|
36
|
+
sessionTimeout: options.sessionTimeout ?? 30000,
|
|
37
|
+
rebalanceTimeout: options.rebalanceTimeout ?? 60000,
|
|
38
|
+
heartbeatInterval: options.heartbeatInterval ?? 3000,
|
|
39
|
+
maxWaitTimeInMs: options.maxWaitTimeInMs ?? 5000,
|
|
40
|
+
retry: options.retry,
|
|
41
|
+
};
|
|
42
|
+
const targetName = typeof target === 'function' ? target.name : 'unknown';
|
|
43
|
+
core_1.default.debug(`Marking ${targetName} as Kafka consumer with groupId: ${defaults.groupId}`);
|
|
44
|
+
Reflect.defineMetadata(exports.KAFKA_CONSUMER_METADATA_KEY, defaults, target);
|
|
45
|
+
};
|
|
46
|
+
}
|
|
47
|
+
/**
|
|
48
|
+
* Get Kafka consumer metadata from a class or instance
|
|
49
|
+
*/
|
|
50
|
+
function getKafkaConsumerMetadata(target) {
|
|
51
|
+
const constructor = typeof target === 'function' ? target : target.constructor;
|
|
52
|
+
if (!constructor)
|
|
53
|
+
return undefined;
|
|
54
|
+
return Reflect.getMetadata(exports.KAFKA_CONSUMER_METADATA_KEY, constructor);
|
|
55
|
+
}
|
|
56
|
+
/**
|
|
57
|
+
* Check if a class is a Kafka consumer
|
|
58
|
+
*/
|
|
59
|
+
function isKafkaConsumer(target) {
|
|
60
|
+
const constructor = typeof target === 'function' ? target : target.constructor;
|
|
61
|
+
if (!constructor)
|
|
62
|
+
return false;
|
|
63
|
+
return Reflect.hasMetadata(exports.KAFKA_CONSUMER_METADATA_KEY, constructor);
|
|
64
|
+
}
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import 'reflect-metadata';
|
|
2
|
+
import { KafkaSubscribeOptions } from '../kafka.types';
|
|
3
|
+
/**
|
|
4
|
+
* Metadata key for Kafka subscribe (topic + handler)
|
|
5
|
+
*/
|
|
6
|
+
export declare const KAFKA_SUBSCRIBE_METADATA_KEY: unique symbol;
|
|
7
|
+
/**
|
|
8
|
+
* Subscribe handler metadata
|
|
9
|
+
*/
|
|
10
|
+
export interface KafkaSubscribeMetadata {
|
|
11
|
+
topic: string;
|
|
12
|
+
methodName: string;
|
|
13
|
+
options?: KafkaSubscribeOptions;
|
|
14
|
+
}
|
|
15
|
+
/**
|
|
16
|
+
* Decorator to mark a method as a handler for a Kafka topic
|
|
17
|
+
*
|
|
18
|
+
* @example
|
|
19
|
+
* ```typescript
|
|
20
|
+
* @KafkaConsumer({ groupId: 'order-processor' })
|
|
21
|
+
* @Injectable()
|
|
22
|
+
* export class OrderConsumer {
|
|
23
|
+
* @KafkaSubscribe('orders')
|
|
24
|
+
* async handleOrder({ message }: KafkaMessagePayload) {
|
|
25
|
+
* // process order
|
|
26
|
+
* }
|
|
27
|
+
*
|
|
28
|
+
* @KafkaSubscribe('events', { fromBeginning: true })
|
|
29
|
+
* async handleEvents({ message }: KafkaMessagePayload) {
|
|
30
|
+
* // process events
|
|
31
|
+
* }
|
|
32
|
+
* }
|
|
33
|
+
* ```
|
|
34
|
+
*/
|
|
35
|
+
export declare function KafkaSubscribe(topic: string, options?: KafkaSubscribeOptions): MethodDecorator;
|
|
36
|
+
/**
|
|
37
|
+
* Get Kafka subscribe metadata from a class
|
|
38
|
+
*/
|
|
39
|
+
export declare function getKafkaSubscribeMetadata(target: object): KafkaSubscribeMetadata[];
|
|
40
|
+
//# sourceMappingURL=kafka-subscribe.decorator.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"kafka-subscribe.decorator.d.ts","sourceRoot":"","sources":["../../src/decorators/kafka-subscribe.decorator.ts"],"names":[],"mappings":"AAAA,OAAO,kBAAkB,CAAC;AAC1B,OAAO,EAAE,qBAAqB,EAAE,MAAM,gBAAgB,CAAC;AAGvD;;GAEG;AACH,eAAO,MAAM,4BAA4B,eAA4B,CAAC;AAEtE;;GAEG;AACH,MAAM,WAAW,sBAAsB;IACrC,KAAK,EAAE,MAAM,CAAC;IACd,UAAU,EAAE,MAAM,CAAC;IACnB,OAAO,CAAC,EAAE,qBAAqB,CAAC;CACjC;AAED;;;;;;;;;;;;;;;;;;;GAmBG;AACH,wBAAgB,cAAc,CAAC,KAAK,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,qBAAqB,GAAG,eAAe,CAkB9F;AAED;;GAEG;AACH,wBAAgB,yBAAyB,CAAC,MAAM,EAAE,MAAM,GAAG,sBAAsB,EAAE,CAElF"}
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.KAFKA_SUBSCRIBE_METADATA_KEY = void 0;
|
|
7
|
+
exports.KafkaSubscribe = KafkaSubscribe;
|
|
8
|
+
exports.getKafkaSubscribeMetadata = getKafkaSubscribeMetadata;
|
|
9
|
+
require("reflect-metadata");
|
|
10
|
+
const core_1 = __importDefault(require("@hazeljs/core"));
|
|
11
|
+
/**
|
|
12
|
+
* Metadata key for Kafka subscribe (topic + handler)
|
|
13
|
+
*/
|
|
14
|
+
exports.KAFKA_SUBSCRIBE_METADATA_KEY = Symbol('kafka:subscribe');
|
|
15
|
+
/**
|
|
16
|
+
* Decorator to mark a method as a handler for a Kafka topic
|
|
17
|
+
*
|
|
18
|
+
* @example
|
|
19
|
+
* ```typescript
|
|
20
|
+
* @KafkaConsumer({ groupId: 'order-processor' })
|
|
21
|
+
* @Injectable()
|
|
22
|
+
* export class OrderConsumer {
|
|
23
|
+
* @KafkaSubscribe('orders')
|
|
24
|
+
* async handleOrder({ message }: KafkaMessagePayload) {
|
|
25
|
+
* // process order
|
|
26
|
+
* }
|
|
27
|
+
*
|
|
28
|
+
* @KafkaSubscribe('events', { fromBeginning: true })
|
|
29
|
+
* async handleEvents({ message }: KafkaMessagePayload) {
|
|
30
|
+
* // process events
|
|
31
|
+
* }
|
|
32
|
+
* }
|
|
33
|
+
* ```
|
|
34
|
+
*/
|
|
35
|
+
function KafkaSubscribe(topic, options) {
|
|
36
|
+
return (target, propertyKey, _descriptor) => {
|
|
37
|
+
const existingSubscriptions = Reflect.getMetadata(exports.KAFKA_SUBSCRIBE_METADATA_KEY, target.constructor) || [];
|
|
38
|
+
const subscription = {
|
|
39
|
+
topic,
|
|
40
|
+
methodName: propertyKey.toString(),
|
|
41
|
+
options: options ?? {},
|
|
42
|
+
};
|
|
43
|
+
existingSubscriptions.push(subscription);
|
|
44
|
+
Reflect.defineMetadata(exports.KAFKA_SUBSCRIBE_METADATA_KEY, existingSubscriptions, target.constructor);
|
|
45
|
+
core_1.default.debug(`KafkaSubscribe applied to ${target.constructor.name}.${String(propertyKey)} for topic: ${topic}`);
|
|
46
|
+
};
|
|
47
|
+
}
|
|
48
|
+
/**
|
|
49
|
+
* Get Kafka subscribe metadata from a class
|
|
50
|
+
*/
|
|
51
|
+
function getKafkaSubscribeMetadata(target) {
|
|
52
|
+
return Reflect.getMetadata(exports.KAFKA_SUBSCRIBE_METADATA_KEY, target.constructor) || [];
|
|
53
|
+
}
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @hazeljs/kafka - Kafka module for HazelJS
|
|
3
|
+
*/
|
|
4
|
+
export { KafkaModule } from './kafka.module';
|
|
5
|
+
export { KafkaProducerService } from './kafka-producer.service';
|
|
6
|
+
export { KafkaConsumerService } from './kafka-consumer.service';
|
|
7
|
+
export { KafkaStreamProcessor } from './kafka-stream.processor';
|
|
8
|
+
export { KAFKA_CLIENT_TOKEN } from './kafka-producer.service';
|
|
9
|
+
export { KafkaConsumer, getKafkaConsumerMetadata, isKafkaConsumer, } from './decorators/kafka-consumer.decorator';
|
|
10
|
+
export { KafkaSubscribe, getKafkaSubscribeMetadata, type KafkaSubscribeMetadata, } from './decorators/kafka-subscribe.decorator';
|
|
11
|
+
export type { KafkaModuleOptions, KafkaClientOptions, KafkaConsumerOptions, KafkaSubscribeOptions, KafkaProduceOptions, KafkaMessage, KafkaMessagePayload, KafkaMessageHandler, KafkaStreamTransform, KafkaSaslOptions, KafkaSslOptions, SaslMechanism, } from './kafka.types';
|
|
12
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAC7C,OAAO,EAAE,oBAAoB,EAAE,MAAM,0BAA0B,CAAC;AAChE,OAAO,EAAE,oBAAoB,EAAE,MAAM,0BAA0B,CAAC;AAChE,OAAO,EAAE,oBAAoB,EAAE,MAAM,0BAA0B,CAAC;AAChE,OAAO,EAAE,kBAAkB,EAAE,MAAM,0BAA0B,CAAC;AAC9D,OAAO,EACL,aAAa,EACb,wBAAwB,EACxB,eAAe,GAChB,MAAM,uCAAuC,CAAC;AAC/C,OAAO,EACL,cAAc,EACd,yBAAyB,EACzB,KAAK,sBAAsB,GAC5B,MAAM,wCAAwC,CAAC;AAChD,YAAY,EACV,kBAAkB,EAClB,kBAAkB,EAClB,oBAAoB,EACpB,qBAAqB,EACrB,mBAAmB,EACnB,YAAY,EACZ,mBAAmB,EACnB,mBAAmB,EACnB,oBAAoB,EACpB,gBAAgB,EAChB,eAAe,EACf,aAAa,GACd,MAAM,eAAe,CAAC"}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* @hazeljs/kafka - Kafka module for HazelJS
|
|
4
|
+
*/
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.getKafkaSubscribeMetadata = exports.KafkaSubscribe = exports.isKafkaConsumer = exports.getKafkaConsumerMetadata = exports.KafkaConsumer = exports.KAFKA_CLIENT_TOKEN = exports.KafkaStreamProcessor = exports.KafkaConsumerService = exports.KafkaProducerService = exports.KafkaModule = void 0;
|
|
7
|
+
var kafka_module_1 = require("./kafka.module");
|
|
8
|
+
Object.defineProperty(exports, "KafkaModule", { enumerable: true, get: function () { return kafka_module_1.KafkaModule; } });
|
|
9
|
+
var kafka_producer_service_1 = require("./kafka-producer.service");
|
|
10
|
+
Object.defineProperty(exports, "KafkaProducerService", { enumerable: true, get: function () { return kafka_producer_service_1.KafkaProducerService; } });
|
|
11
|
+
var kafka_consumer_service_1 = require("./kafka-consumer.service");
|
|
12
|
+
Object.defineProperty(exports, "KafkaConsumerService", { enumerable: true, get: function () { return kafka_consumer_service_1.KafkaConsumerService; } });
|
|
13
|
+
var kafka_stream_processor_1 = require("./kafka-stream.processor");
|
|
14
|
+
Object.defineProperty(exports, "KafkaStreamProcessor", { enumerable: true, get: function () { return kafka_stream_processor_1.KafkaStreamProcessor; } });
|
|
15
|
+
var kafka_producer_service_2 = require("./kafka-producer.service");
|
|
16
|
+
Object.defineProperty(exports, "KAFKA_CLIENT_TOKEN", { enumerable: true, get: function () { return kafka_producer_service_2.KAFKA_CLIENT_TOKEN; } });
|
|
17
|
+
var kafka_consumer_decorator_1 = require("./decorators/kafka-consumer.decorator");
|
|
18
|
+
Object.defineProperty(exports, "KafkaConsumer", { enumerable: true, get: function () { return kafka_consumer_decorator_1.KafkaConsumer; } });
|
|
19
|
+
Object.defineProperty(exports, "getKafkaConsumerMetadata", { enumerable: true, get: function () { return kafka_consumer_decorator_1.getKafkaConsumerMetadata; } });
|
|
20
|
+
Object.defineProperty(exports, "isKafkaConsumer", { enumerable: true, get: function () { return kafka_consumer_decorator_1.isKafkaConsumer; } });
|
|
21
|
+
var kafka_subscribe_decorator_1 = require("./decorators/kafka-subscribe.decorator");
|
|
22
|
+
Object.defineProperty(exports, "KafkaSubscribe", { enumerable: true, get: function () { return kafka_subscribe_decorator_1.KafkaSubscribe; } });
|
|
23
|
+
Object.defineProperty(exports, "getKafkaSubscribeMetadata", { enumerable: true, get: function () { return kafka_subscribe_decorator_1.getKafkaSubscribeMetadata; } });
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { Kafka } from 'kafkajs';
|
|
2
|
+
export declare const KAFKA_CLIENT_TOKEN = "KAFKA_CLIENT";
|
|
3
|
+
/**
|
|
4
|
+
* Kafka consumer service for consuming messages with decorator-driven handlers
|
|
5
|
+
*/
|
|
6
|
+
export declare class KafkaConsumerService {
|
|
7
|
+
private readonly kafka;
|
|
8
|
+
private runningConsumers;
|
|
9
|
+
constructor(kafka: Kafka);
|
|
10
|
+
/**
|
|
11
|
+
* Register a consumer provider and start consuming
|
|
12
|
+
* Call this for each class that has @KafkaConsumer and @KafkaSubscribe decorators
|
|
13
|
+
*/
|
|
14
|
+
registerFromProvider(provider: object): Promise<void>;
|
|
15
|
+
onModuleDestroy(): Promise<void>;
|
|
16
|
+
/**
|
|
17
|
+
* Get count of running consumers
|
|
18
|
+
*/
|
|
19
|
+
getConsumerCount(): number;
|
|
20
|
+
}
|
|
21
|
+
//# sourceMappingURL=kafka-consumer.service.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"kafka-consumer.service.d.ts","sourceRoot":"","sources":["../src/kafka-consumer.service.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,KAAK,EAAE,MAAM,SAAS,CAAC;AAMhC,eAAO,MAAM,kBAAkB,iBAAiB,CAAC;AAQjD;;GAEG;AACH,qBACa,oBAAoB;IAK7B,OAAO,CAAC,QAAQ,CAAC,KAAK;IAJxB,OAAO,CAAC,gBAAgB,CAAyB;gBAI9B,KAAK,EAAE,KAAK;IAG/B;;;OAGG;IACG,oBAAoB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAiFrD,eAAe,IAAI,OAAO,CAAC,IAAI,CAAC;IAYtC;;OAEG;IACH,gBAAgB,IAAI,MAAM;CAG3B"}
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
|
3
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
4
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
5
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
6
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
7
|
+
};
|
|
8
|
+
var __metadata = (this && this.__metadata) || function (k, v) {
|
|
9
|
+
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
|
|
10
|
+
};
|
|
11
|
+
var __param = (this && this.__param) || function (paramIndex, decorator) {
|
|
12
|
+
return function (target, key) { decorator(target, key, paramIndex); }
|
|
13
|
+
};
|
|
14
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
15
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
16
|
+
};
|
|
17
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
18
|
+
exports.KafkaConsumerService = exports.KAFKA_CLIENT_TOKEN = void 0;
|
|
19
|
+
const core_1 = require("@hazeljs/core");
|
|
20
|
+
const kafkajs_1 = require("kafkajs");
|
|
21
|
+
const kafka_consumer_decorator_1 = require("./decorators/kafka-consumer.decorator");
|
|
22
|
+
const kafka_subscribe_decorator_1 = require("./decorators/kafka-subscribe.decorator");
|
|
23
|
+
const core_2 = __importDefault(require("@hazeljs/core"));
|
|
24
|
+
exports.KAFKA_CLIENT_TOKEN = 'KAFKA_CLIENT';
|
|
25
|
+
/**
|
|
26
|
+
* Kafka consumer service for consuming messages with decorator-driven handlers
|
|
27
|
+
*/
|
|
28
|
+
let KafkaConsumerService = class KafkaConsumerService {
|
|
29
|
+
constructor(kafka) {
|
|
30
|
+
this.kafka = kafka;
|
|
31
|
+
this.runningConsumers = [];
|
|
32
|
+
}
|
|
33
|
+
/**
|
|
34
|
+
* Register a consumer provider and start consuming
|
|
35
|
+
* Call this for each class that has @KafkaConsumer and @KafkaSubscribe decorators
|
|
36
|
+
*/
|
|
37
|
+
async registerFromProvider(provider) {
|
|
38
|
+
const consumerOptions = (0, kafka_consumer_decorator_1.getKafkaConsumerMetadata)(provider.constructor);
|
|
39
|
+
const subscribeMetadata = (0, kafka_subscribe_decorator_1.getKafkaSubscribeMetadata)(provider.constructor);
|
|
40
|
+
if (!consumerOptions) {
|
|
41
|
+
core_2.default.warn(`Provider ${provider.constructor.name} has @KafkaSubscribe but no @KafkaConsumer decorator - skipping`);
|
|
42
|
+
return;
|
|
43
|
+
}
|
|
44
|
+
if (!subscribeMetadata || subscribeMetadata.length === 0) {
|
|
45
|
+
core_2.default.warn(`Provider ${provider.constructor.name} has @KafkaConsumer but no @KafkaSubscribe - skipping`);
|
|
46
|
+
return;
|
|
47
|
+
}
|
|
48
|
+
const consumer = this.kafka.consumer({
|
|
49
|
+
groupId: consumerOptions.groupId,
|
|
50
|
+
sessionTimeout: consumerOptions.sessionTimeout ?? 30000,
|
|
51
|
+
rebalanceTimeout: consumerOptions.rebalanceTimeout ?? 60000,
|
|
52
|
+
heartbeatInterval: consumerOptions.heartbeatInterval ?? 3000,
|
|
53
|
+
maxWaitTimeInMs: consumerOptions.maxWaitTimeInMs ?? 5000,
|
|
54
|
+
retry: consumerOptions.retry,
|
|
55
|
+
});
|
|
56
|
+
const topicHandlers = new Map();
|
|
57
|
+
await consumer.connect();
|
|
58
|
+
for (const sub of subscribeMetadata) {
|
|
59
|
+
topicHandlers.set(sub.topic, {
|
|
60
|
+
methodName: sub.methodName,
|
|
61
|
+
fromBeginning: sub.options?.fromBeginning ?? false,
|
|
62
|
+
});
|
|
63
|
+
await consumer.subscribe({
|
|
64
|
+
topics: [sub.topic],
|
|
65
|
+
fromBeginning: sub.options?.fromBeginning ?? false,
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
await consumer.run({
|
|
69
|
+
eachMessage: async (payload) => {
|
|
70
|
+
const handlerConfig = topicHandlers.get(payload.topic);
|
|
71
|
+
if (!handlerConfig)
|
|
72
|
+
return;
|
|
73
|
+
const instance = provider;
|
|
74
|
+
const method = instance[handlerConfig.methodName];
|
|
75
|
+
if (typeof method !== 'function') {
|
|
76
|
+
core_2.default.error(`Handler ${handlerConfig.methodName} not found on ${provider.constructor.name}`);
|
|
77
|
+
return;
|
|
78
|
+
}
|
|
79
|
+
try {
|
|
80
|
+
await method.call(provider, payload);
|
|
81
|
+
}
|
|
82
|
+
catch (error) {
|
|
83
|
+
core_2.default.error(`Error in Kafka handler ${provider.constructor.name}.${handlerConfig.methodName}:`, error);
|
|
84
|
+
}
|
|
85
|
+
},
|
|
86
|
+
});
|
|
87
|
+
this.runningConsumers.push({
|
|
88
|
+
consumer,
|
|
89
|
+
provider,
|
|
90
|
+
topicHandlers,
|
|
91
|
+
});
|
|
92
|
+
core_2.default.info(`Kafka consumer started for ${provider.constructor.name} (groupId: ${consumerOptions.groupId}, topics: ${Array.from(topicHandlers.keys()).join(', ')})`);
|
|
93
|
+
}
|
|
94
|
+
async onModuleDestroy() {
|
|
95
|
+
for (const { consumer } of this.runningConsumers) {
|
|
96
|
+
try {
|
|
97
|
+
await consumer.disconnect();
|
|
98
|
+
core_2.default.info('Kafka consumer disconnected');
|
|
99
|
+
}
|
|
100
|
+
catch (error) {
|
|
101
|
+
core_2.default.error('Error disconnecting Kafka consumer:', error);
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
this.runningConsumers = [];
|
|
105
|
+
}
|
|
106
|
+
/**
|
|
107
|
+
* Get count of running consumers
|
|
108
|
+
*/
|
|
109
|
+
getConsumerCount() {
|
|
110
|
+
return this.runningConsumers.length;
|
|
111
|
+
}
|
|
112
|
+
};
|
|
113
|
+
exports.KafkaConsumerService = KafkaConsumerService;
|
|
114
|
+
exports.KafkaConsumerService = KafkaConsumerService = __decorate([
|
|
115
|
+
(0, core_1.Injectable)(),
|
|
116
|
+
__param(0, (0, core_1.Inject)(exports.KAFKA_CLIENT_TOKEN)),
|
|
117
|
+
__metadata("design:paramtypes", [kafkajs_1.Kafka])
|
|
118
|
+
], KafkaConsumerService);
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import { Kafka } from 'kafkajs';
|
|
2
|
+
import { KafkaMessage, KafkaProduceOptions } from './kafka.types';
|
|
3
|
+
export declare const KAFKA_CLIENT_TOKEN = "KAFKA_CLIENT";
|
|
4
|
+
/**
|
|
5
|
+
* Kafka producer service for publishing messages to topics
|
|
6
|
+
*/
|
|
7
|
+
export declare class KafkaProducerService {
|
|
8
|
+
private readonly kafka;
|
|
9
|
+
private producer;
|
|
10
|
+
private isConnected;
|
|
11
|
+
constructor(kafka: Kafka);
|
|
12
|
+
onModuleInit(): Promise<void>;
|
|
13
|
+
/**
|
|
14
|
+
* Connect producer (called automatically on first send if not already connected)
|
|
15
|
+
*/
|
|
16
|
+
private connect;
|
|
17
|
+
onModuleDestroy(): Promise<void>;
|
|
18
|
+
/**
|
|
19
|
+
* Send messages to a topic
|
|
20
|
+
*/
|
|
21
|
+
send(topic: string, messages: KafkaMessage | KafkaMessage[], options?: KafkaProduceOptions): Promise<void>;
|
|
22
|
+
/**
|
|
23
|
+
* Send a batch of messages to multiple topics
|
|
24
|
+
*/
|
|
25
|
+
sendBatch(batch: Array<{
|
|
26
|
+
topic: string;
|
|
27
|
+
messages: KafkaMessage | KafkaMessage[];
|
|
28
|
+
options?: KafkaProduceOptions;
|
|
29
|
+
}>): Promise<void>;
|
|
30
|
+
/**
|
|
31
|
+
* Check if producer is connected
|
|
32
|
+
*/
|
|
33
|
+
isProducerConnected(): boolean;
|
|
34
|
+
}
|
|
35
|
+
//# sourceMappingURL=kafka-producer.service.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"kafka-producer.service.d.ts","sourceRoot":"","sources":["../src/kafka-producer.service.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,KAAK,EAAE,MAAM,SAAS,CAAC;AAChC,OAAO,EAAE,YAAY,EAAE,mBAAmB,EAAE,MAAM,eAAe,CAAC;AAGlE,eAAO,MAAM,kBAAkB,iBAAiB,CAAC;AAEjD;;GAEG;AACH,qBACa,oBAAoB;IAM7B,OAAO,CAAC,QAAQ,CAAC,KAAK;IALxB,OAAO,CAAC,QAAQ,CAAgC;IAChD,OAAO,CAAC,WAAW,CAAS;gBAIT,KAAK,EAAE,KAAK;IAKzB,YAAY,IAAI,OAAO,CAAC,IAAI,CAAC;IAInC;;OAEG;YACW,OAAO;IAYf,eAAe,IAAI,OAAO,CAAC,IAAI,CAAC;IAWtC;;OAEG;IACG,IAAI,CACR,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE,YAAY,GAAG,YAAY,EAAE,EACvC,OAAO,CAAC,EAAE,mBAAmB,GAC5B,OAAO,CAAC,IAAI,CAAC;IA0BhB;;OAEG;IACG,SAAS,CACb,KAAK,EAAE,KAAK,CAAC;QACX,KAAK,EAAE,MAAM,CAAC;QACd,QAAQ,EAAE,YAAY,GAAG,YAAY,EAAE,CAAC;QACxC,OAAO,CAAC,EAAE,mBAAmB,CAAC;KAC/B,CAAC,GACD,OAAO,CAAC,IAAI,CAAC;IAShB;;OAEG;IACH,mBAAmB,IAAI,OAAO;CAG/B"}
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
|
3
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
4
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
5
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
6
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
7
|
+
};
|
|
8
|
+
var __metadata = (this && this.__metadata) || function (k, v) {
|
|
9
|
+
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
|
|
10
|
+
};
|
|
11
|
+
var __param = (this && this.__param) || function (paramIndex, decorator) {
|
|
12
|
+
return function (target, key) { decorator(target, key, paramIndex); }
|
|
13
|
+
};
|
|
14
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
15
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
16
|
+
};
|
|
17
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
18
|
+
exports.KafkaProducerService = exports.KAFKA_CLIENT_TOKEN = void 0;
|
|
19
|
+
const core_1 = require("@hazeljs/core");
|
|
20
|
+
const kafkajs_1 = require("kafkajs");
|
|
21
|
+
const core_2 = __importDefault(require("@hazeljs/core"));
|
|
22
|
+
exports.KAFKA_CLIENT_TOKEN = 'KAFKA_CLIENT';
|
|
23
|
+
/**
|
|
24
|
+
* Kafka producer service for publishing messages to topics
|
|
25
|
+
*/
|
|
26
|
+
let KafkaProducerService = class KafkaProducerService {
|
|
27
|
+
constructor(kafka) {
|
|
28
|
+
this.kafka = kafka;
|
|
29
|
+
this.isConnected = false;
|
|
30
|
+
this.producer = this.kafka.producer();
|
|
31
|
+
}
|
|
32
|
+
async onModuleInit() {
|
|
33
|
+
await this.connect();
|
|
34
|
+
}
|
|
35
|
+
/**
|
|
36
|
+
* Connect producer (called automatically on first send if not already connected)
|
|
37
|
+
*/
|
|
38
|
+
async connect() {
|
|
39
|
+
if (this.isConnected)
|
|
40
|
+
return;
|
|
41
|
+
try {
|
|
42
|
+
await this.producer.connect();
|
|
43
|
+
this.isConnected = true;
|
|
44
|
+
core_2.default.info('Kafka producer connected');
|
|
45
|
+
}
|
|
46
|
+
catch (error) {
|
|
47
|
+
core_2.default.error('Failed to connect Kafka producer:', error);
|
|
48
|
+
throw error;
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
async onModuleDestroy() {
|
|
52
|
+
try {
|
|
53
|
+
await this.producer.disconnect();
|
|
54
|
+
this.isConnected = false;
|
|
55
|
+
core_2.default.info('Kafka producer disconnected');
|
|
56
|
+
}
|
|
57
|
+
catch (error) {
|
|
58
|
+
core_2.default.error('Error disconnecting Kafka producer:', error);
|
|
59
|
+
throw error;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
/**
|
|
63
|
+
* Send messages to a topic
|
|
64
|
+
*/
|
|
65
|
+
async send(topic, messages, options) {
|
|
66
|
+
await this.connect();
|
|
67
|
+
const messageArray = Array.isArray(messages) ? messages : [messages];
|
|
68
|
+
const formattedMessages = messageArray.map((msg) => {
|
|
69
|
+
const value = msg.value ?? null;
|
|
70
|
+
return {
|
|
71
|
+
key: msg.key ?? undefined,
|
|
72
|
+
value: value,
|
|
73
|
+
headers: msg.headers ?? undefined,
|
|
74
|
+
partition: msg.partition ?? undefined,
|
|
75
|
+
timestamp: msg.timestamp ?? undefined,
|
|
76
|
+
};
|
|
77
|
+
});
|
|
78
|
+
await this.producer.send({
|
|
79
|
+
topic,
|
|
80
|
+
messages: formattedMessages,
|
|
81
|
+
acks: options?.acks ?? -1,
|
|
82
|
+
timeout: options?.timeout ?? 30000,
|
|
83
|
+
compression: options?.compression ?? 0,
|
|
84
|
+
});
|
|
85
|
+
core_2.default.debug(`Sent ${formattedMessages.length} message(s) to topic: ${topic}`);
|
|
86
|
+
}
|
|
87
|
+
/**
|
|
88
|
+
* Send a batch of messages to multiple topics
|
|
89
|
+
*/
|
|
90
|
+
async sendBatch(batch) {
|
|
91
|
+
await this.connect();
|
|
92
|
+
await Promise.all(batch.map(({ topic, messages, options }) => this.send(topic, messages, options)));
|
|
93
|
+
core_2.default.debug(`Sent batch to ${batch.length} topic(s)`);
|
|
94
|
+
}
|
|
95
|
+
/**
|
|
96
|
+
* Check if producer is connected
|
|
97
|
+
*/
|
|
98
|
+
isProducerConnected() {
|
|
99
|
+
return this.isConnected;
|
|
100
|
+
}
|
|
101
|
+
};
|
|
102
|
+
exports.KafkaProducerService = KafkaProducerService;
|
|
103
|
+
exports.KafkaProducerService = KafkaProducerService = __decorate([
|
|
104
|
+
(0, core_1.Injectable)(),
|
|
105
|
+
__param(0, (0, core_1.Inject)(exports.KAFKA_CLIENT_TOKEN)),
|
|
106
|
+
__metadata("design:paramtypes", [kafkajs_1.Kafka])
|
|
107
|
+
], KafkaProducerService);
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
import { Kafka } from 'kafkajs';
|
|
2
|
+
import { KafkaStreamTransform } from './kafka.types';
|
|
3
|
+
export declare const KAFKA_CLIENT_TOKEN = "KAFKA_CLIENT";
|
|
4
|
+
/**
|
|
5
|
+
* Lightweight Kafka stream processor: consume from topic, transform, produce to output topic
|
|
6
|
+
*/
|
|
7
|
+
export declare class KafkaStreamProcessor {
|
|
8
|
+
private kafka;
|
|
9
|
+
private consumer;
|
|
10
|
+
private producer;
|
|
11
|
+
private pipelineConfig;
|
|
12
|
+
private isRunning;
|
|
13
|
+
constructor(kafka: Kafka);
|
|
14
|
+
/**
|
|
15
|
+
* Set the input topic to consume from
|
|
16
|
+
*/
|
|
17
|
+
from(topic: string): this;
|
|
18
|
+
/**
|
|
19
|
+
* Set the transform function
|
|
20
|
+
*/
|
|
21
|
+
transform(fn: KafkaStreamTransform): this;
|
|
22
|
+
/**
|
|
23
|
+
* Set the output topic to produce to
|
|
24
|
+
*/
|
|
25
|
+
to(topic: string): this;
|
|
26
|
+
/**
|
|
27
|
+
* Set consumer group ID for the stream processor
|
|
28
|
+
*/
|
|
29
|
+
withGroupId(groupId: string): this;
|
|
30
|
+
/**
|
|
31
|
+
* Start the stream processor
|
|
32
|
+
*/
|
|
33
|
+
start(): Promise<void>;
|
|
34
|
+
/**
|
|
35
|
+
* Stop the stream processor
|
|
36
|
+
*/
|
|
37
|
+
stop(): Promise<void>;
|
|
38
|
+
/**
|
|
39
|
+
* Check if processor is running
|
|
40
|
+
*/
|
|
41
|
+
isProcessorRunning(): boolean;
|
|
42
|
+
}
|
|
43
|
+
//# sourceMappingURL=kafka-stream.processor.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"kafka-stream.processor.d.ts","sourceRoot":"","sources":["../src/kafka-stream.processor.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,KAAK,EAAE,MAAM,SAAS,CAAC;AAChC,OAAO,EAAE,oBAAoB,EAAE,MAAM,eAAe,CAAC;AAGrD,eAAO,MAAM,kBAAkB,iBAAiB,CAAC;AASjD;;GAEG;AACH,qBACa,oBAAoB;IAC/B,OAAO,CAAC,KAAK,CAAQ;IACrB,OAAO,CAAC,QAAQ,CAA8C;IAC9D,OAAO,CAAC,QAAQ,CAA8C;IAC9D,OAAO,CAAC,cAAc,CAAqC;IAC3D,OAAO,CAAC,SAAS,CAAS;gBAIxB,KAAK,EAAE,KAAK;IAKd;;OAEG;IACH,IAAI,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;IAezB;;OAEG;IACH,SAAS,CAAC,EAAE,EAAE,oBAAoB,GAAG,IAAI;IAQzC;;OAEG;IACH,EAAE,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;IAQvB;;OAEG;IACH,WAAW,CAAC,OAAO,EAAE,MAAM,GAAG,IAAI;IAQlC;;OAEG;IACG,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAsE5B;;OAEG;IACG,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;IAiB3B;;OAEG;IACH,kBAAkB,IAAI,OAAO;CAG9B"}
|
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
|
3
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
4
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
5
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
6
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
7
|
+
};
|
|
8
|
+
var __metadata = (this && this.__metadata) || function (k, v) {
|
|
9
|
+
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
|
|
10
|
+
};
|
|
11
|
+
var __param = (this && this.__param) || function (paramIndex, decorator) {
|
|
12
|
+
return function (target, key) { decorator(target, key, paramIndex); }
|
|
13
|
+
};
|
|
14
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
15
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
16
|
+
};
|
|
17
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
18
|
+
exports.KafkaStreamProcessor = exports.KAFKA_CLIENT_TOKEN = void 0;
|
|
19
|
+
const core_1 = require("@hazeljs/core");
|
|
20
|
+
const kafkajs_1 = require("kafkajs");
|
|
21
|
+
const core_2 = __importDefault(require("@hazeljs/core"));
|
|
22
|
+
exports.KAFKA_CLIENT_TOKEN = 'KAFKA_CLIENT';
|
|
23
|
+
/**
|
|
24
|
+
* Lightweight Kafka stream processor: consume from topic, transform, produce to output topic
|
|
25
|
+
*/
|
|
26
|
+
let KafkaStreamProcessor = class KafkaStreamProcessor {
|
|
27
|
+
constructor(kafka) {
|
|
28
|
+
this.consumer = null;
|
|
29
|
+
this.producer = null;
|
|
30
|
+
this.pipelineConfig = null;
|
|
31
|
+
this.isRunning = false;
|
|
32
|
+
this.kafka = kafka;
|
|
33
|
+
}
|
|
34
|
+
/**
|
|
35
|
+
* Set the input topic to consume from
|
|
36
|
+
*/
|
|
37
|
+
from(topic) {
|
|
38
|
+
if (!this.pipelineConfig) {
|
|
39
|
+
this.pipelineConfig = {
|
|
40
|
+
inputTopic: topic,
|
|
41
|
+
outputTopic: '',
|
|
42
|
+
transform: async (msg) => ({
|
|
43
|
+
value: msg.value,
|
|
44
|
+
}),
|
|
45
|
+
};
|
|
46
|
+
}
|
|
47
|
+
else {
|
|
48
|
+
this.pipelineConfig.inputTopic = topic;
|
|
49
|
+
}
|
|
50
|
+
return this;
|
|
51
|
+
}
|
|
52
|
+
/**
|
|
53
|
+
* Set the transform function
|
|
54
|
+
*/
|
|
55
|
+
transform(fn) {
|
|
56
|
+
if (!this.pipelineConfig) {
|
|
57
|
+
throw new Error('Call from(topic) before transform()');
|
|
58
|
+
}
|
|
59
|
+
this.pipelineConfig.transform = fn;
|
|
60
|
+
return this;
|
|
61
|
+
}
|
|
62
|
+
/**
|
|
63
|
+
* Set the output topic to produce to
|
|
64
|
+
*/
|
|
65
|
+
to(topic) {
|
|
66
|
+
if (!this.pipelineConfig) {
|
|
67
|
+
throw new Error('Call from(topic) before to()');
|
|
68
|
+
}
|
|
69
|
+
this.pipelineConfig.outputTopic = topic;
|
|
70
|
+
return this;
|
|
71
|
+
}
|
|
72
|
+
/**
|
|
73
|
+
* Set consumer group ID for the stream processor
|
|
74
|
+
*/
|
|
75
|
+
withGroupId(groupId) {
|
|
76
|
+
if (!this.pipelineConfig) {
|
|
77
|
+
throw new Error('Call from(topic) before withGroupId()');
|
|
78
|
+
}
|
|
79
|
+
this.pipelineConfig.groupId = groupId;
|
|
80
|
+
return this;
|
|
81
|
+
}
|
|
82
|
+
/**
|
|
83
|
+
* Start the stream processor
|
|
84
|
+
*/
|
|
85
|
+
async start() {
|
|
86
|
+
if (!this.pipelineConfig) {
|
|
87
|
+
throw new Error('Pipeline not configured. Use from().transform().to()');
|
|
88
|
+
}
|
|
89
|
+
if (this.isRunning) {
|
|
90
|
+
core_2.default.warn('Kafka stream processor already running');
|
|
91
|
+
return;
|
|
92
|
+
}
|
|
93
|
+
const { inputTopic, outputTopic, transform } = this.pipelineConfig;
|
|
94
|
+
const groupId = this.pipelineConfig.groupId ?? `stream-${inputTopic}-${outputTopic}`;
|
|
95
|
+
this.consumer = this.kafka.consumer({ groupId });
|
|
96
|
+
this.producer = this.kafka.producer();
|
|
97
|
+
await this.consumer.connect();
|
|
98
|
+
await this.producer.connect();
|
|
99
|
+
await this.consumer.subscribe({ topics: [inputTopic], fromBeginning: false });
|
|
100
|
+
this.isRunning = true;
|
|
101
|
+
await this.consumer.run({
|
|
102
|
+
eachMessage: async ({ message }) => {
|
|
103
|
+
try {
|
|
104
|
+
const result = await transform({
|
|
105
|
+
key: message.key,
|
|
106
|
+
value: message.value,
|
|
107
|
+
headers: message.headers,
|
|
108
|
+
});
|
|
109
|
+
if (result === null)
|
|
110
|
+
return;
|
|
111
|
+
const outputMessage = typeof result === 'object' && result !== null && 'value' in result
|
|
112
|
+
? result
|
|
113
|
+
: { value: result };
|
|
114
|
+
const value = outputMessage.value === undefined || outputMessage.value === null
|
|
115
|
+
? message.value
|
|
116
|
+
: typeof outputMessage.value === 'string' || Buffer.isBuffer(outputMessage.value)
|
|
117
|
+
? outputMessage.value
|
|
118
|
+
: JSON.stringify(outputMessage.value);
|
|
119
|
+
await this.producer.send({
|
|
120
|
+
topic: outputTopic,
|
|
121
|
+
messages: [
|
|
122
|
+
{
|
|
123
|
+
key: outputMessage.key ?? message.key,
|
|
124
|
+
value,
|
|
125
|
+
headers: outputMessage.headers ?? message.headers,
|
|
126
|
+
},
|
|
127
|
+
],
|
|
128
|
+
});
|
|
129
|
+
}
|
|
130
|
+
catch (error) {
|
|
131
|
+
core_2.default.error('Error in stream transform:', error);
|
|
132
|
+
throw error;
|
|
133
|
+
}
|
|
134
|
+
},
|
|
135
|
+
});
|
|
136
|
+
core_2.default.info(`Kafka stream processor started: ${inputTopic} -> ${outputTopic} (groupId: ${groupId})`);
|
|
137
|
+
}
|
|
138
|
+
/**
|
|
139
|
+
* Stop the stream processor
|
|
140
|
+
*/
|
|
141
|
+
async stop() {
|
|
142
|
+
if (!this.isRunning)
|
|
143
|
+
return;
|
|
144
|
+
if (this.consumer) {
|
|
145
|
+
await this.consumer.disconnect();
|
|
146
|
+
this.consumer = null;
|
|
147
|
+
}
|
|
148
|
+
if (this.producer) {
|
|
149
|
+
await this.producer.disconnect();
|
|
150
|
+
this.producer = null;
|
|
151
|
+
}
|
|
152
|
+
this.isRunning = false;
|
|
153
|
+
this.pipelineConfig = null;
|
|
154
|
+
core_2.default.info('Kafka stream processor stopped');
|
|
155
|
+
}
|
|
156
|
+
/**
|
|
157
|
+
* Check if processor is running
|
|
158
|
+
*/
|
|
159
|
+
isProcessorRunning() {
|
|
160
|
+
return this.isRunning;
|
|
161
|
+
}
|
|
162
|
+
};
|
|
163
|
+
exports.KafkaStreamProcessor = KafkaStreamProcessor;
|
|
164
|
+
exports.KafkaStreamProcessor = KafkaStreamProcessor = __decorate([
|
|
165
|
+
(0, core_1.Injectable)(),
|
|
166
|
+
__param(0, (0, core_1.Inject)(exports.KAFKA_CLIENT_TOKEN)),
|
|
167
|
+
__metadata("design:paramtypes", [kafkajs_1.Kafka])
|
|
168
|
+
], KafkaStreamProcessor);
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import { KafkaModuleOptions } from './kafka.types';
|
|
2
|
+
/**
|
|
3
|
+
* Kafka module for HazelJS
|
|
4
|
+
*/
|
|
5
|
+
export declare class KafkaModule {
|
|
6
|
+
/**
|
|
7
|
+
* Configure Kafka module.
|
|
8
|
+
* Registers the Kafka client with the container before module initialization,
|
|
9
|
+
* since HazelJS does not process dynamic module provider configs from forRoot return values.
|
|
10
|
+
*/
|
|
11
|
+
static forRoot(options?: Partial<KafkaModuleOptions>): typeof KafkaModule;
|
|
12
|
+
/**
|
|
13
|
+
* Configure Kafka module asynchronously.
|
|
14
|
+
* Must be awaited before creating the app so the Kafka client is registered.
|
|
15
|
+
*/
|
|
16
|
+
static forRootAsync(options: {
|
|
17
|
+
useFactory: (...args: unknown[]) => Promise<KafkaModuleOptions> | KafkaModuleOptions;
|
|
18
|
+
inject?: unknown[];
|
|
19
|
+
}): Promise<typeof KafkaModule>;
|
|
20
|
+
/**
|
|
21
|
+
* Register Kafka consumers from a provider instance.
|
|
22
|
+
* Call this after the provider has been instantiated (e.g. in bootstrap).
|
|
23
|
+
*
|
|
24
|
+
* @example
|
|
25
|
+
* ```typescript
|
|
26
|
+
* const container = Container.getInstance();
|
|
27
|
+
* const orderConsumer = container.resolve(OrderConsumer);
|
|
28
|
+
* KafkaModule.registerConsumersFromProvider(orderConsumer);
|
|
29
|
+
* ```
|
|
30
|
+
*/
|
|
31
|
+
static registerConsumersFromProvider(provider: object): Promise<void>;
|
|
32
|
+
}
|
|
33
|
+
//# sourceMappingURL=kafka.module.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"kafka.module.d.ts","sourceRoot":"","sources":["../src/kafka.module.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,kBAAkB,EAAE,MAAM,eAAe,CAAC;AAQnD;;GAEG;AACH,qBAIa,WAAW;IACtB;;;;OAIG;IACH,MAAM,CAAC,OAAO,CAAC,OAAO,GAAE,OAAO,CAAC,kBAAkB,CAAM,GAAG,OAAO,WAAW;IAgB7E;;;OAGG;WACU,YAAY,CAAC,OAAO,EAAE;QACjC,UAAU,EAAE,CAAC,GAAG,IAAI,EAAE,OAAO,EAAE,KAAK,OAAO,CAAC,kBAAkB,CAAC,GAAG,kBAAkB,CAAC;QACrF,MAAM,CAAC,EAAE,OAAO,EAAE,CAAC;KACpB,GAAG,OAAO,CAAC,OAAO,WAAW,CAAC;IAiB/B;;;;;;;;;;OAUG;WACU,6BAA6B,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;CAgB5E"}
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
|
3
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
4
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
5
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
6
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
7
|
+
};
|
|
8
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
9
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
10
|
+
};
|
|
11
|
+
var KafkaModule_1;
|
|
12
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
13
|
+
exports.KafkaModule = void 0;
|
|
14
|
+
const core_1 = require("@hazeljs/core");
|
|
15
|
+
const kafkajs_1 = require("kafkajs");
|
|
16
|
+
const kafka_producer_service_1 = require("./kafka-producer.service");
|
|
17
|
+
const kafka_consumer_service_1 = require("./kafka-consumer.service");
|
|
18
|
+
const kafka_stream_processor_1 = require("./kafka-stream.processor");
|
|
19
|
+
const kafka_producer_service_2 = require("./kafka-producer.service");
|
|
20
|
+
const core_2 = require("@hazeljs/core");
|
|
21
|
+
const core_3 = __importDefault(require("@hazeljs/core"));
|
|
22
|
+
/**
|
|
23
|
+
* Kafka module for HazelJS
|
|
24
|
+
*/
|
|
25
|
+
let KafkaModule = KafkaModule_1 = class KafkaModule {
|
|
26
|
+
/**
|
|
27
|
+
* Configure Kafka module.
|
|
28
|
+
* Registers the Kafka client with the container before module initialization,
|
|
29
|
+
* since HazelJS does not process dynamic module provider configs from forRoot return values.
|
|
30
|
+
*/
|
|
31
|
+
static forRoot(options = {}) {
|
|
32
|
+
const { clientId = 'hazeljs-app', brokers = ['localhost:9092'], ...kafkaOptions } = options;
|
|
33
|
+
core_3.default.info('Configuring Kafka module...');
|
|
34
|
+
const kafkaClient = new kafkajs_1.Kafka({
|
|
35
|
+
clientId,
|
|
36
|
+
brokers,
|
|
37
|
+
...kafkaOptions,
|
|
38
|
+
});
|
|
39
|
+
core_2.Container.getInstance().register(kafka_producer_service_2.KAFKA_CLIENT_TOKEN, kafkaClient);
|
|
40
|
+
return KafkaModule_1;
|
|
41
|
+
}
|
|
42
|
+
/**
|
|
43
|
+
* Configure Kafka module asynchronously.
|
|
44
|
+
* Must be awaited before creating the app so the Kafka client is registered.
|
|
45
|
+
*/
|
|
46
|
+
static async forRootAsync(options) {
|
|
47
|
+
const container = core_2.Container.getInstance();
|
|
48
|
+
const injectTokens = options.inject ?? [];
|
|
49
|
+
const deps = injectTokens.map((token) => container.resolve(token));
|
|
50
|
+
const kafkaOptions = await Promise.resolve(options.useFactory(...deps));
|
|
51
|
+
const { clientId = 'hazeljs-app', brokers = ['localhost:9092'], ...rest } = kafkaOptions;
|
|
52
|
+
const kafkaClient = new kafkajs_1.Kafka({
|
|
53
|
+
clientId,
|
|
54
|
+
brokers,
|
|
55
|
+
...rest,
|
|
56
|
+
});
|
|
57
|
+
container.register(kafka_producer_service_2.KAFKA_CLIENT_TOKEN, kafkaClient);
|
|
58
|
+
return KafkaModule_1;
|
|
59
|
+
}
|
|
60
|
+
/**
|
|
61
|
+
* Register Kafka consumers from a provider instance.
|
|
62
|
+
* Call this after the provider has been instantiated (e.g. in bootstrap).
|
|
63
|
+
*
|
|
64
|
+
* @example
|
|
65
|
+
* ```typescript
|
|
66
|
+
* const container = Container.getInstance();
|
|
67
|
+
* const orderConsumer = container.resolve(OrderConsumer);
|
|
68
|
+
* KafkaModule.registerConsumersFromProvider(orderConsumer);
|
|
69
|
+
* ```
|
|
70
|
+
*/
|
|
71
|
+
static async registerConsumersFromProvider(provider) {
|
|
72
|
+
try {
|
|
73
|
+
const container = core_2.Container.getInstance();
|
|
74
|
+
const consumerService = container.resolve(kafka_consumer_service_1.KafkaConsumerService);
|
|
75
|
+
if (!consumerService) {
|
|
76
|
+
core_3.default.warn('KafkaConsumerService not found in DI container');
|
|
77
|
+
return;
|
|
78
|
+
}
|
|
79
|
+
await consumerService.registerFromProvider(provider);
|
|
80
|
+
core_3.default.info(`Registered Kafka consumer from provider: ${provider.constructor.name}`);
|
|
81
|
+
}
|
|
82
|
+
catch (error) {
|
|
83
|
+
core_3.default.error('Error registering Kafka consumers from provider:', error);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
};
|
|
87
|
+
exports.KafkaModule = KafkaModule;
|
|
88
|
+
exports.KafkaModule = KafkaModule = KafkaModule_1 = __decorate([
|
|
89
|
+
(0, core_1.HazelModule)({
|
|
90
|
+
providers: [kafka_producer_service_1.KafkaProducerService, kafka_consumer_service_1.KafkaConsumerService, kafka_stream_processor_1.KafkaStreamProcessor],
|
|
91
|
+
exports: [kafka_producer_service_1.KafkaProducerService, kafka_consumer_service_1.KafkaConsumerService, kafka_stream_processor_1.KafkaStreamProcessor],
|
|
92
|
+
})
|
|
93
|
+
], KafkaModule);
|
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Kafka module types and interfaces
|
|
3
|
+
*/
|
|
4
|
+
/**
|
|
5
|
+
* SASL authentication mechanism
|
|
6
|
+
*/
|
|
7
|
+
export type SaslMechanism = 'plain' | 'scram-sha-256' | 'scram-sha-512';
|
|
8
|
+
/**
|
|
9
|
+
* SASL configuration for Kafka authentication
|
|
10
|
+
*/
|
|
11
|
+
export interface KafkaSaslOptions {
|
|
12
|
+
mechanism: SaslMechanism;
|
|
13
|
+
username: string;
|
|
14
|
+
password: string;
|
|
15
|
+
}
|
|
16
|
+
/**
|
|
17
|
+
* SSL configuration for Kafka
|
|
18
|
+
*/
|
|
19
|
+
export interface KafkaSslOptions {
|
|
20
|
+
rejectUnauthorized?: boolean;
|
|
21
|
+
ca?: string[];
|
|
22
|
+
cert?: string;
|
|
23
|
+
key?: string;
|
|
24
|
+
}
|
|
25
|
+
/**
|
|
26
|
+
* Base Kafka client options (KafkaJS compatible)
|
|
27
|
+
*/
|
|
28
|
+
export interface KafkaClientOptions {
|
|
29
|
+
clientId: string;
|
|
30
|
+
brokers: string[];
|
|
31
|
+
connectionTimeout?: number;
|
|
32
|
+
requestTimeout?: number;
|
|
33
|
+
retry?: {
|
|
34
|
+
retries?: number;
|
|
35
|
+
initialRetryTime?: number;
|
|
36
|
+
maxRetryTime?: number;
|
|
37
|
+
};
|
|
38
|
+
ssl?: boolean | KafkaSslOptions;
|
|
39
|
+
sasl?: KafkaSaslOptions;
|
|
40
|
+
}
|
|
41
|
+
/**
|
|
42
|
+
* Kafka module options for forRoot()
|
|
43
|
+
*/
|
|
44
|
+
export interface KafkaModuleOptions extends KafkaClientOptions {
|
|
45
|
+
/**
|
|
46
|
+
* Whether this is a global module
|
|
47
|
+
* @default true
|
|
48
|
+
*/
|
|
49
|
+
isGlobal?: boolean;
|
|
50
|
+
/**
|
|
51
|
+
* Enable Kafka Stream Processor
|
|
52
|
+
* @default true
|
|
53
|
+
*/
|
|
54
|
+
enableStreamProcessor?: boolean;
|
|
55
|
+
}
|
|
56
|
+
/**
|
|
57
|
+
* Consumer group options (KafkaJS consumer config)
|
|
58
|
+
*/
|
|
59
|
+
export interface KafkaConsumerOptions {
|
|
60
|
+
groupId: string;
|
|
61
|
+
sessionTimeout?: number;
|
|
62
|
+
rebalanceTimeout?: number;
|
|
63
|
+
heartbeatInterval?: number;
|
|
64
|
+
maxWaitTimeInMs?: number;
|
|
65
|
+
retry?: {
|
|
66
|
+
retries?: number;
|
|
67
|
+
initialRetryTime?: number;
|
|
68
|
+
maxRetryTime?: number;
|
|
69
|
+
};
|
|
70
|
+
}
|
|
71
|
+
/**
|
|
72
|
+
* Topic subscription options
|
|
73
|
+
*/
|
|
74
|
+
export interface KafkaSubscribeOptions {
|
|
75
|
+
/**
|
|
76
|
+
* Read from beginning of topic
|
|
77
|
+
* @default false
|
|
78
|
+
*/
|
|
79
|
+
fromBeginning?: boolean;
|
|
80
|
+
}
|
|
81
|
+
/**
|
|
82
|
+
* Producer send options
|
|
83
|
+
*/
|
|
84
|
+
export interface KafkaProduceOptions {
|
|
85
|
+
acks?: -1 | 0 | 1;
|
|
86
|
+
timeout?: number;
|
|
87
|
+
compression?: 0 | 1 | 2 | 3;
|
|
88
|
+
}
|
|
89
|
+
/**
|
|
90
|
+
* Kafka message for producing
|
|
91
|
+
*/
|
|
92
|
+
export interface KafkaMessage {
|
|
93
|
+
key?: string | Buffer;
|
|
94
|
+
value: string | Buffer | null;
|
|
95
|
+
headers?: Record<string, string>;
|
|
96
|
+
partition?: number;
|
|
97
|
+
timestamp?: string;
|
|
98
|
+
}
|
|
99
|
+
/**
|
|
100
|
+
* Payload passed to eachMessage handler (matches KafkaJS EachMessagePayload)
|
|
101
|
+
*/
|
|
102
|
+
export interface KafkaMessagePayload {
|
|
103
|
+
topic: string;
|
|
104
|
+
partition: number;
|
|
105
|
+
message: {
|
|
106
|
+
key: Buffer | null;
|
|
107
|
+
value: Buffer | null;
|
|
108
|
+
headers: Record<string, string>;
|
|
109
|
+
offset: string;
|
|
110
|
+
timestamp: string;
|
|
111
|
+
attributes?: number;
|
|
112
|
+
};
|
|
113
|
+
heartbeat(): Promise<void>;
|
|
114
|
+
pause(): void;
|
|
115
|
+
commitOffsets?(offsets: Array<{
|
|
116
|
+
topic: string;
|
|
117
|
+
partition: number;
|
|
118
|
+
offset: string;
|
|
119
|
+
}>): Promise<void>;
|
|
120
|
+
}
|
|
121
|
+
/**
|
|
122
|
+
* Handler type for Kafka message processing
|
|
123
|
+
*/
|
|
124
|
+
export type KafkaMessageHandler = (payload: KafkaMessagePayload) => Promise<void>;
|
|
125
|
+
/**
|
|
126
|
+
* Transform function for stream processor
|
|
127
|
+
*/
|
|
128
|
+
export type KafkaStreamTransform<T = unknown, R = unknown> = (message: {
|
|
129
|
+
key: Buffer | null;
|
|
130
|
+
value: Buffer | null;
|
|
131
|
+
headers: Record<string, string>;
|
|
132
|
+
}) => Promise<{
|
|
133
|
+
key?: string | Buffer;
|
|
134
|
+
value: T | string | Buffer | null;
|
|
135
|
+
headers?: Record<string, string>;
|
|
136
|
+
} | R | null>;
|
|
137
|
+
//# sourceMappingURL=kafka.types.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"kafka.types.d.ts","sourceRoot":"","sources":["../src/kafka.types.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH;;GAEG;AACH,MAAM,MAAM,aAAa,GAAG,OAAO,GAAG,eAAe,GAAG,eAAe,CAAC;AAExE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAC/B,SAAS,EAAE,aAAa,CAAC;IACzB,QAAQ,EAAE,MAAM,CAAC;IACjB,QAAQ,EAAE,MAAM,CAAC;CAClB;AAED;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAC7B,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC;IACd,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,GAAG,CAAC,EAAE,MAAM,CAAC;CACd;AAED;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC,QAAQ,EAAE,MAAM,CAAC;IACjB,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,KAAK,CAAC,EAAE;QACN,OAAO,CAAC,EAAE,MAAM,CAAC;QACjB,gBAAgB,CAAC,EAAE,MAAM,CAAC;QAC1B,YAAY,CAAC,EAAE,MAAM,CAAC;KACvB,CAAC;IACF,GAAG,CAAC,EAAE,OAAO,GAAG,eAAe,CAAC;IAChC,IAAI,CAAC,EAAE,gBAAgB,CAAC;CACzB;AAED;;GAEG;AACH,MAAM,WAAW,kBAAmB,SAAQ,kBAAkB;IAC5D;;;OAGG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAC;IAEnB;;;OAGG;IACH,qBAAqB,CAAC,EAAE,OAAO,CAAC;CACjC;AAED;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACnC,OAAO,EAAE,MAAM,CAAC;IAChB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,KAAK,CAAC,EAAE;QACN,OAAO,CAAC,EAAE,MAAM,CAAC;QACjB,gBAAgB,CAAC,EAAE,MAAM,CAAC;QAC1B,YAAY,CAAC,EAAE,MAAM,CAAC;KACvB,CAAC;CACH;AAED;;GAEG;AACH,MAAM,WAAW,qBAAqB;IACpC;;;OAGG;IACH,aAAa,CAAC,EAAE,OAAO,CAAC;CACzB;AAED;;GAEG;AACH,MAAM,WAAW,mBAAmB;IAClC,IAAI,CAAC,EAAE,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;IAClB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,WAAW,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;CAC7B;AAED;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B,GAAG,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;IACtB,KAAK,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI,CAAC;IAC9B,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACjC,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAED;;GAEG;AACH,MAAM,WAAW,mBAAmB;IAClC,KAAK,EAAE,MAAM,CAAC;IACd,SAAS,EAAE,MAAM,CAAC;IAClB,OAAO,EAAE;QACP,GAAG,EAAE,MAAM,GAAG,IAAI,CAAC;QACnB,KAAK,EAAE,MAAM,GAAG,IAAI,CAAC;QACrB,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAChC,MAAM,EAAE,MAAM,CAAC;QACf,SAAS,EAAE,MAAM,CAAC;QAClB,UAAU,CAAC,EAAE,MAAM,CAAC;KACrB,CAAC;IACF,SAAS,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;IAC3B,KAAK,IAAI,IAAI,CAAC;IACd,aAAa,CAAC,CACZ,OAAO,EAAE,KAAK,CAAC;QAAE,KAAK,EAAE,MAAM,CAAC;QAAC,SAAS,EAAE,MAAM,CAAC;QAAC,MAAM,EAAE,MAAM,CAAA;KAAE,CAAC,GACnE,OAAO,CAAC,IAAI,CAAC,CAAC;CAClB;AAED;;GAEG;AACH,MAAM,MAAM,mBAAmB,GAAG,CAAC,OAAO,EAAE,mBAAmB,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;AAElF;;GAEG;AACH,MAAM,MAAM,oBAAoB,CAAC,CAAC,GAAG,OAAO,EAAE,CAAC,GAAG,OAAO,IAAI,CAAC,OAAO,EAAE;IACrE,GAAG,EAAE,MAAM,GAAG,IAAI,CAAC;IACnB,KAAK,EAAE,MAAM,GAAG,IAAI,CAAC;IACrB,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CACjC,KAAK,OAAO,CACT;IAAE,GAAG,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;IAAC,KAAK,EAAE,CAAC,GAAG,MAAM,GAAG,MAAM,GAAG,IAAI,CAAC;IAAC,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;CAAE,GAC9F,CAAC,GACD,IAAI,CACP,CAAC"}
|
package/package.json
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@hazeljs/kafka",
|
|
3
|
+
"version": "0.2.0-beta.18",
|
|
4
|
+
"description": "Kafka module for HazelJS framework - produce, consume, and stream processing",
|
|
5
|
+
"main": "dist/index.js",
|
|
6
|
+
"types": "dist/index.d.ts",
|
|
7
|
+
"files": [
|
|
8
|
+
"dist"
|
|
9
|
+
],
|
|
10
|
+
"scripts": {
|
|
11
|
+
"build": "tsc",
|
|
12
|
+
"test": "jest --coverage",
|
|
13
|
+
"lint": "eslint \"src/**/*.ts\"",
|
|
14
|
+
"lint:fix": "eslint \"src/**/*.ts\" --fix",
|
|
15
|
+
"clean": "rm -rf dist"
|
|
16
|
+
},
|
|
17
|
+
"dependencies": {
|
|
18
|
+
"@hazeljs/core": "^0.2.0-beta.18",
|
|
19
|
+
"kafkajs": "^2.2.4"
|
|
20
|
+
},
|
|
21
|
+
"devDependencies": {
|
|
22
|
+
"@types/node": "^20.17.50",
|
|
23
|
+
"@typescript-eslint/eslint-plugin": "^8.18.2",
|
|
24
|
+
"@typescript-eslint/parser": "^8.18.2",
|
|
25
|
+
"eslint": "^8.56.0",
|
|
26
|
+
"jest": "^29.7.0",
|
|
27
|
+
"ts-jest": "^29.1.2",
|
|
28
|
+
"typescript": "^5.3.3"
|
|
29
|
+
},
|
|
30
|
+
"publishConfig": {
|
|
31
|
+
"access": "public"
|
|
32
|
+
},
|
|
33
|
+
"repository": {
|
|
34
|
+
"type": "git",
|
|
35
|
+
"url": "git+https://github.com/hazel-js/hazeljs.git",
|
|
36
|
+
"directory": "packages/kafka"
|
|
37
|
+
},
|
|
38
|
+
"keywords": [
|
|
39
|
+
"hazeljs",
|
|
40
|
+
"kafka",
|
|
41
|
+
"message-queue",
|
|
42
|
+
"stream-processing",
|
|
43
|
+
"kafkajs"
|
|
44
|
+
],
|
|
45
|
+
"author": "Muhammad Arslan <marslan@hazeljs.com>",
|
|
46
|
+
"license": "MIT",
|
|
47
|
+
"bugs": {
|
|
48
|
+
"url": "https://github.com/hazeljs/hazel-js/issues"
|
|
49
|
+
},
|
|
50
|
+
"homepage": "https://hazeljs.com",
|
|
51
|
+
"gitHead": "22082c1277661421cd32b3c4371c2c9d0bdf0501"
|
|
52
|
+
}
|