@hazeljs/kafka 0.2.0-alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +192 -0
- package/README.md +175 -0
- package/dist/__tests__/kafka-consumer.decorator.test.d.ts +2 -0
- package/dist/__tests__/kafka-consumer.decorator.test.d.ts.map +1 -0
- package/dist/__tests__/kafka-consumer.decorator.test.js +100 -0
- package/dist/__tests__/kafka-consumer.service.test.d.ts +2 -0
- package/dist/__tests__/kafka-consumer.service.test.d.ts.map +1 -0
- package/dist/__tests__/kafka-consumer.service.test.js +244 -0
- package/dist/__tests__/kafka-producer.service.test.d.ts +2 -0
- package/dist/__tests__/kafka-producer.service.test.d.ts.map +1 -0
- package/dist/__tests__/kafka-producer.service.test.js +73 -0
- package/dist/__tests__/kafka-stream.processor.test.d.ts +2 -0
- package/dist/__tests__/kafka-stream.processor.test.d.ts.map +1 -0
- package/dist/__tests__/kafka-stream.processor.test.js +243 -0
- package/dist/__tests__/kafka.module.test.d.ts +2 -0
- package/dist/__tests__/kafka.module.test.d.ts.map +1 -0
- package/dist/__tests__/kafka.module.test.js +41 -0
- package/dist/decorators/kafka-consumer.decorator.d.ts +32 -0
- package/dist/decorators/kafka-consumer.decorator.d.ts.map +1 -0
- package/dist/decorators/kafka-consumer.decorator.js +64 -0
- package/dist/decorators/kafka-subscribe.decorator.d.ts +40 -0
- package/dist/decorators/kafka-subscribe.decorator.d.ts.map +1 -0
- package/dist/decorators/kafka-subscribe.decorator.js +53 -0
- package/dist/index.d.ts +12 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +23 -0
- package/dist/kafka-consumer.service.d.ts +21 -0
- package/dist/kafka-consumer.service.d.ts.map +1 -0
- package/dist/kafka-consumer.service.js +118 -0
- package/dist/kafka-producer.service.d.ts +35 -0
- package/dist/kafka-producer.service.d.ts.map +1 -0
- package/dist/kafka-producer.service.js +107 -0
- package/dist/kafka-stream.processor.d.ts +43 -0
- package/dist/kafka-stream.processor.d.ts.map +1 -0
- package/dist/kafka-stream.processor.js +168 -0
- package/dist/kafka.module.d.ts +33 -0
- package/dist/kafka.module.d.ts.map +1 -0
- package/dist/kafka.module.js +93 -0
- package/dist/kafka.types.d.ts +137 -0
- package/dist/kafka.types.d.ts.map +1 -0
- package/dist/kafka.types.js +5 -0
- package/package.json +54 -0
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
|
3
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
4
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
5
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
6
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
7
|
+
};
|
|
8
|
+
var __metadata = (this && this.__metadata) || function (k, v) {
|
|
9
|
+
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
|
|
10
|
+
};
|
|
11
|
+
var __param = (this && this.__param) || function (paramIndex, decorator) {
|
|
12
|
+
return function (target, key) { decorator(target, key, paramIndex); }
|
|
13
|
+
};
|
|
14
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
15
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
16
|
+
};
|
|
17
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
18
|
+
exports.KafkaStreamProcessor = exports.KAFKA_CLIENT_TOKEN = void 0;
|
|
19
|
+
const core_1 = require("@hazeljs/core");
|
|
20
|
+
const kafkajs_1 = require("kafkajs");
|
|
21
|
+
const core_2 = __importDefault(require("@hazeljs/core"));
|
|
22
|
+
exports.KAFKA_CLIENT_TOKEN = 'KAFKA_CLIENT';
|
|
23
|
+
/**
|
|
24
|
+
* Lightweight Kafka stream processor: consume from topic, transform, produce to output topic
|
|
25
|
+
*/
|
|
26
|
+
let KafkaStreamProcessor = class KafkaStreamProcessor {
|
|
27
|
+
constructor(kafka) {
|
|
28
|
+
this.consumer = null;
|
|
29
|
+
this.producer = null;
|
|
30
|
+
this.pipelineConfig = null;
|
|
31
|
+
this.isRunning = false;
|
|
32
|
+
this.kafka = kafka;
|
|
33
|
+
}
|
|
34
|
+
/**
|
|
35
|
+
* Set the input topic to consume from
|
|
36
|
+
*/
|
|
37
|
+
from(topic) {
|
|
38
|
+
if (!this.pipelineConfig) {
|
|
39
|
+
this.pipelineConfig = {
|
|
40
|
+
inputTopic: topic,
|
|
41
|
+
outputTopic: '',
|
|
42
|
+
transform: async (msg) => ({
|
|
43
|
+
value: msg.value,
|
|
44
|
+
}),
|
|
45
|
+
};
|
|
46
|
+
}
|
|
47
|
+
else {
|
|
48
|
+
this.pipelineConfig.inputTopic = topic;
|
|
49
|
+
}
|
|
50
|
+
return this;
|
|
51
|
+
}
|
|
52
|
+
/**
|
|
53
|
+
* Set the transform function
|
|
54
|
+
*/
|
|
55
|
+
transform(fn) {
|
|
56
|
+
if (!this.pipelineConfig) {
|
|
57
|
+
throw new Error('Call from(topic) before transform()');
|
|
58
|
+
}
|
|
59
|
+
this.pipelineConfig.transform = fn;
|
|
60
|
+
return this;
|
|
61
|
+
}
|
|
62
|
+
/**
|
|
63
|
+
* Set the output topic to produce to
|
|
64
|
+
*/
|
|
65
|
+
to(topic) {
|
|
66
|
+
if (!this.pipelineConfig) {
|
|
67
|
+
throw new Error('Call from(topic) before to()');
|
|
68
|
+
}
|
|
69
|
+
this.pipelineConfig.outputTopic = topic;
|
|
70
|
+
return this;
|
|
71
|
+
}
|
|
72
|
+
/**
|
|
73
|
+
* Set consumer group ID for the stream processor
|
|
74
|
+
*/
|
|
75
|
+
withGroupId(groupId) {
|
|
76
|
+
if (!this.pipelineConfig) {
|
|
77
|
+
throw new Error('Call from(topic) before withGroupId()');
|
|
78
|
+
}
|
|
79
|
+
this.pipelineConfig.groupId = groupId;
|
|
80
|
+
return this;
|
|
81
|
+
}
|
|
82
|
+
/**
|
|
83
|
+
* Start the stream processor
|
|
84
|
+
*/
|
|
85
|
+
async start() {
|
|
86
|
+
if (!this.pipelineConfig) {
|
|
87
|
+
throw new Error('Pipeline not configured. Use from().transform().to()');
|
|
88
|
+
}
|
|
89
|
+
if (this.isRunning) {
|
|
90
|
+
core_2.default.warn('Kafka stream processor already running');
|
|
91
|
+
return;
|
|
92
|
+
}
|
|
93
|
+
const { inputTopic, outputTopic, transform } = this.pipelineConfig;
|
|
94
|
+
const groupId = this.pipelineConfig.groupId ?? `stream-${inputTopic}-${outputTopic}`;
|
|
95
|
+
this.consumer = this.kafka.consumer({ groupId });
|
|
96
|
+
this.producer = this.kafka.producer();
|
|
97
|
+
await this.consumer.connect();
|
|
98
|
+
await this.producer.connect();
|
|
99
|
+
await this.consumer.subscribe({ topics: [inputTopic], fromBeginning: false });
|
|
100
|
+
this.isRunning = true;
|
|
101
|
+
await this.consumer.run({
|
|
102
|
+
eachMessage: async ({ message }) => {
|
|
103
|
+
try {
|
|
104
|
+
const result = await transform({
|
|
105
|
+
key: message.key,
|
|
106
|
+
value: message.value,
|
|
107
|
+
headers: message.headers,
|
|
108
|
+
});
|
|
109
|
+
if (result === null)
|
|
110
|
+
return;
|
|
111
|
+
const outputMessage = typeof result === 'object' && result !== null && 'value' in result
|
|
112
|
+
? result
|
|
113
|
+
: { value: result };
|
|
114
|
+
const value = outputMessage.value === undefined || outputMessage.value === null
|
|
115
|
+
? message.value
|
|
116
|
+
: typeof outputMessage.value === 'string' || Buffer.isBuffer(outputMessage.value)
|
|
117
|
+
? outputMessage.value
|
|
118
|
+
: JSON.stringify(outputMessage.value);
|
|
119
|
+
await this.producer.send({
|
|
120
|
+
topic: outputTopic,
|
|
121
|
+
messages: [
|
|
122
|
+
{
|
|
123
|
+
key: outputMessage.key ?? message.key,
|
|
124
|
+
value,
|
|
125
|
+
headers: outputMessage.headers ?? message.headers,
|
|
126
|
+
},
|
|
127
|
+
],
|
|
128
|
+
});
|
|
129
|
+
}
|
|
130
|
+
catch (error) {
|
|
131
|
+
core_2.default.error('Error in stream transform:', error);
|
|
132
|
+
throw error;
|
|
133
|
+
}
|
|
134
|
+
},
|
|
135
|
+
});
|
|
136
|
+
core_2.default.info(`Kafka stream processor started: ${inputTopic} -> ${outputTopic} (groupId: ${groupId})`);
|
|
137
|
+
}
|
|
138
|
+
/**
|
|
139
|
+
* Stop the stream processor
|
|
140
|
+
*/
|
|
141
|
+
async stop() {
|
|
142
|
+
if (!this.isRunning)
|
|
143
|
+
return;
|
|
144
|
+
if (this.consumer) {
|
|
145
|
+
await this.consumer.disconnect();
|
|
146
|
+
this.consumer = null;
|
|
147
|
+
}
|
|
148
|
+
if (this.producer) {
|
|
149
|
+
await this.producer.disconnect();
|
|
150
|
+
this.producer = null;
|
|
151
|
+
}
|
|
152
|
+
this.isRunning = false;
|
|
153
|
+
this.pipelineConfig = null;
|
|
154
|
+
core_2.default.info('Kafka stream processor stopped');
|
|
155
|
+
}
|
|
156
|
+
/**
|
|
157
|
+
* Check if processor is running
|
|
158
|
+
*/
|
|
159
|
+
isProcessorRunning() {
|
|
160
|
+
return this.isRunning;
|
|
161
|
+
}
|
|
162
|
+
};
|
|
163
|
+
exports.KafkaStreamProcessor = KafkaStreamProcessor;
|
|
164
|
+
exports.KafkaStreamProcessor = KafkaStreamProcessor = __decorate([
|
|
165
|
+
(0, core_1.Service)(),
|
|
166
|
+
__param(0, (0, core_1.Inject)(exports.KAFKA_CLIENT_TOKEN)),
|
|
167
|
+
__metadata("design:paramtypes", [kafkajs_1.Kafka])
|
|
168
|
+
], KafkaStreamProcessor);
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import { KafkaModuleOptions } from './kafka.types';
|
|
2
|
+
/**
|
|
3
|
+
* Kafka module for HazelJS
|
|
4
|
+
*/
|
|
5
|
+
export declare class KafkaModule {
|
|
6
|
+
/**
|
|
7
|
+
* Configure Kafka module.
|
|
8
|
+
* Registers the Kafka client with the container before module initialization,
|
|
9
|
+
* since HazelJS does not process dynamic module provider configs from forRoot return values.
|
|
10
|
+
*/
|
|
11
|
+
static forRoot(options?: Partial<KafkaModuleOptions>): typeof KafkaModule;
|
|
12
|
+
/**
|
|
13
|
+
* Configure Kafka module asynchronously.
|
|
14
|
+
* Must be awaited before creating the app so the Kafka client is registered.
|
|
15
|
+
*/
|
|
16
|
+
static forRootAsync(options: {
|
|
17
|
+
useFactory: (...args: unknown[]) => Promise<KafkaModuleOptions> | KafkaModuleOptions;
|
|
18
|
+
inject?: unknown[];
|
|
19
|
+
}): Promise<typeof KafkaModule>;
|
|
20
|
+
/**
|
|
21
|
+
* Register Kafka consumers from a provider instance.
|
|
22
|
+
* Call this after the provider has been instantiated (e.g. in bootstrap).
|
|
23
|
+
*
|
|
24
|
+
* @example
|
|
25
|
+
* ```typescript
|
|
26
|
+
* const container = Container.getInstance();
|
|
27
|
+
* const orderConsumer = container.resolve(OrderConsumer);
|
|
28
|
+
* KafkaModule.registerConsumersFromProvider(orderConsumer);
|
|
29
|
+
* ```
|
|
30
|
+
*/
|
|
31
|
+
static registerConsumersFromProvider(provider: object): Promise<void>;
|
|
32
|
+
}
|
|
33
|
+
//# sourceMappingURL=kafka.module.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"kafka.module.d.ts","sourceRoot":"","sources":["../src/kafka.module.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,kBAAkB,EAAE,MAAM,eAAe,CAAC;AAQnD;;GAEG;AACH,qBAIa,WAAW;IACtB;;;;OAIG;IACH,MAAM,CAAC,OAAO,CAAC,OAAO,GAAE,OAAO,CAAC,kBAAkB,CAAM,GAAG,OAAO,WAAW;IAgB7E;;;OAGG;WACU,YAAY,CAAC,OAAO,EAAE;QACjC,UAAU,EAAE,CAAC,GAAG,IAAI,EAAE,OAAO,EAAE,KAAK,OAAO,CAAC,kBAAkB,CAAC,GAAG,kBAAkB,CAAC;QACrF,MAAM,CAAC,EAAE,OAAO,EAAE,CAAC;KACpB,GAAG,OAAO,CAAC,OAAO,WAAW,CAAC;IAiB/B;;;;;;;;;;OAUG;WACU,6BAA6B,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;CAgB5E"}
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
|
3
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
4
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
5
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
6
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
7
|
+
};
|
|
8
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
9
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
10
|
+
};
|
|
11
|
+
var KafkaModule_1;
|
|
12
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
13
|
+
exports.KafkaModule = void 0;
|
|
14
|
+
const core_1 = require("@hazeljs/core");
|
|
15
|
+
const kafkajs_1 = require("kafkajs");
|
|
16
|
+
const kafka_producer_service_1 = require("./kafka-producer.service");
|
|
17
|
+
const kafka_consumer_service_1 = require("./kafka-consumer.service");
|
|
18
|
+
const kafka_stream_processor_1 = require("./kafka-stream.processor");
|
|
19
|
+
const kafka_producer_service_2 = require("./kafka-producer.service");
|
|
20
|
+
const core_2 = require("@hazeljs/core");
|
|
21
|
+
const core_3 = __importDefault(require("@hazeljs/core"));
|
|
22
|
+
/**
|
|
23
|
+
* Kafka module for HazelJS
|
|
24
|
+
*/
|
|
25
|
+
let KafkaModule = KafkaModule_1 = class KafkaModule {
|
|
26
|
+
/**
|
|
27
|
+
* Configure Kafka module.
|
|
28
|
+
* Registers the Kafka client with the container before module initialization,
|
|
29
|
+
* since HazelJS does not process dynamic module provider configs from forRoot return values.
|
|
30
|
+
*/
|
|
31
|
+
static forRoot(options = {}) {
|
|
32
|
+
const { clientId = 'hazeljs-app', brokers = ['localhost:9092'], ...kafkaOptions } = options;
|
|
33
|
+
core_3.default.info('Configuring Kafka module...');
|
|
34
|
+
const kafkaClient = new kafkajs_1.Kafka({
|
|
35
|
+
clientId,
|
|
36
|
+
brokers,
|
|
37
|
+
...kafkaOptions,
|
|
38
|
+
});
|
|
39
|
+
core_2.Container.getInstance().register(kafka_producer_service_2.KAFKA_CLIENT_TOKEN, kafkaClient);
|
|
40
|
+
return KafkaModule_1;
|
|
41
|
+
}
|
|
42
|
+
/**
|
|
43
|
+
* Configure Kafka module asynchronously.
|
|
44
|
+
* Must be awaited before creating the app so the Kafka client is registered.
|
|
45
|
+
*/
|
|
46
|
+
static async forRootAsync(options) {
|
|
47
|
+
const container = core_2.Container.getInstance();
|
|
48
|
+
const injectTokens = options.inject ?? [];
|
|
49
|
+
const deps = injectTokens.map((token) => container.resolve(token));
|
|
50
|
+
const kafkaOptions = await Promise.resolve(options.useFactory(...deps));
|
|
51
|
+
const { clientId = 'hazeljs-app', brokers = ['localhost:9092'], ...rest } = kafkaOptions;
|
|
52
|
+
const kafkaClient = new kafkajs_1.Kafka({
|
|
53
|
+
clientId,
|
|
54
|
+
brokers,
|
|
55
|
+
...rest,
|
|
56
|
+
});
|
|
57
|
+
container.register(kafka_producer_service_2.KAFKA_CLIENT_TOKEN, kafkaClient);
|
|
58
|
+
return KafkaModule_1;
|
|
59
|
+
}
|
|
60
|
+
/**
|
|
61
|
+
* Register Kafka consumers from a provider instance.
|
|
62
|
+
* Call this after the provider has been instantiated (e.g. in bootstrap).
|
|
63
|
+
*
|
|
64
|
+
* @example
|
|
65
|
+
* ```typescript
|
|
66
|
+
* const container = Container.getInstance();
|
|
67
|
+
* const orderConsumer = container.resolve(OrderConsumer);
|
|
68
|
+
* KafkaModule.registerConsumersFromProvider(orderConsumer);
|
|
69
|
+
* ```
|
|
70
|
+
*/
|
|
71
|
+
static async registerConsumersFromProvider(provider) {
|
|
72
|
+
try {
|
|
73
|
+
const container = core_2.Container.getInstance();
|
|
74
|
+
const consumerService = container.resolve(kafka_consumer_service_1.KafkaConsumerService);
|
|
75
|
+
if (!consumerService) {
|
|
76
|
+
core_3.default.warn('KafkaConsumerService not found in DI container');
|
|
77
|
+
return;
|
|
78
|
+
}
|
|
79
|
+
await consumerService.registerFromProvider(provider);
|
|
80
|
+
core_3.default.info(`Registered Kafka consumer from provider: ${provider.constructor.name}`);
|
|
81
|
+
}
|
|
82
|
+
catch (error) {
|
|
83
|
+
core_3.default.error('Error registering Kafka consumers from provider:', error);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
};
|
|
87
|
+
exports.KafkaModule = KafkaModule;
|
|
88
|
+
exports.KafkaModule = KafkaModule = KafkaModule_1 = __decorate([
|
|
89
|
+
(0, core_1.HazelModule)({
|
|
90
|
+
providers: [kafka_producer_service_1.KafkaProducerService, kafka_consumer_service_1.KafkaConsumerService, kafka_stream_processor_1.KafkaStreamProcessor],
|
|
91
|
+
exports: [kafka_producer_service_1.KafkaProducerService, kafka_consumer_service_1.KafkaConsumerService, kafka_stream_processor_1.KafkaStreamProcessor],
|
|
92
|
+
})
|
|
93
|
+
], KafkaModule);
|
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Kafka module types and interfaces
|
|
3
|
+
*/
|
|
4
|
+
/**
|
|
5
|
+
* SASL authentication mechanism
|
|
6
|
+
*/
|
|
7
|
+
export type SaslMechanism = 'plain' | 'scram-sha-256' | 'scram-sha-512';
|
|
8
|
+
/**
|
|
9
|
+
* SASL configuration for Kafka authentication
|
|
10
|
+
*/
|
|
11
|
+
export interface KafkaSaslOptions {
|
|
12
|
+
mechanism: SaslMechanism;
|
|
13
|
+
username: string;
|
|
14
|
+
password: string;
|
|
15
|
+
}
|
|
16
|
+
/**
|
|
17
|
+
* SSL configuration for Kafka
|
|
18
|
+
*/
|
|
19
|
+
export interface KafkaSslOptions {
|
|
20
|
+
rejectUnauthorized?: boolean;
|
|
21
|
+
ca?: string[];
|
|
22
|
+
cert?: string;
|
|
23
|
+
key?: string;
|
|
24
|
+
}
|
|
25
|
+
/**
|
|
26
|
+
* Base Kafka client options (KafkaJS compatible)
|
|
27
|
+
*/
|
|
28
|
+
export interface KafkaClientOptions {
|
|
29
|
+
clientId: string;
|
|
30
|
+
brokers: string[];
|
|
31
|
+
connectionTimeout?: number;
|
|
32
|
+
requestTimeout?: number;
|
|
33
|
+
retry?: {
|
|
34
|
+
retries?: number;
|
|
35
|
+
initialRetryTime?: number;
|
|
36
|
+
maxRetryTime?: number;
|
|
37
|
+
};
|
|
38
|
+
ssl?: boolean | KafkaSslOptions;
|
|
39
|
+
sasl?: KafkaSaslOptions;
|
|
40
|
+
}
|
|
41
|
+
/**
|
|
42
|
+
* Kafka module options for forRoot()
|
|
43
|
+
*/
|
|
44
|
+
export interface KafkaModuleOptions extends KafkaClientOptions {
|
|
45
|
+
/**
|
|
46
|
+
* Whether this is a global module
|
|
47
|
+
* @default true
|
|
48
|
+
*/
|
|
49
|
+
isGlobal?: boolean;
|
|
50
|
+
/**
|
|
51
|
+
* Enable Kafka Stream Processor
|
|
52
|
+
* @default true
|
|
53
|
+
*/
|
|
54
|
+
enableStreamProcessor?: boolean;
|
|
55
|
+
}
|
|
56
|
+
/**
|
|
57
|
+
* Consumer group options (KafkaJS consumer config)
|
|
58
|
+
*/
|
|
59
|
+
export interface KafkaConsumerOptions {
|
|
60
|
+
groupId: string;
|
|
61
|
+
sessionTimeout?: number;
|
|
62
|
+
rebalanceTimeout?: number;
|
|
63
|
+
heartbeatInterval?: number;
|
|
64
|
+
maxWaitTimeInMs?: number;
|
|
65
|
+
retry?: {
|
|
66
|
+
retries?: number;
|
|
67
|
+
initialRetryTime?: number;
|
|
68
|
+
maxRetryTime?: number;
|
|
69
|
+
};
|
|
70
|
+
}
|
|
71
|
+
/**
|
|
72
|
+
* Topic subscription options
|
|
73
|
+
*/
|
|
74
|
+
export interface KafkaSubscribeOptions {
|
|
75
|
+
/**
|
|
76
|
+
* Read from beginning of topic
|
|
77
|
+
* @default false
|
|
78
|
+
*/
|
|
79
|
+
fromBeginning?: boolean;
|
|
80
|
+
}
|
|
81
|
+
/**
|
|
82
|
+
* Producer send options
|
|
83
|
+
*/
|
|
84
|
+
export interface KafkaProduceOptions {
|
|
85
|
+
acks?: -1 | 0 | 1;
|
|
86
|
+
timeout?: number;
|
|
87
|
+
compression?: 0 | 1 | 2 | 3;
|
|
88
|
+
}
|
|
89
|
+
/**
|
|
90
|
+
* Kafka message for producing
|
|
91
|
+
*/
|
|
92
|
+
export interface KafkaMessage {
|
|
93
|
+
key?: string | Buffer;
|
|
94
|
+
value: string | Buffer | null;
|
|
95
|
+
headers?: Record<string, string>;
|
|
96
|
+
partition?: number;
|
|
97
|
+
timestamp?: string;
|
|
98
|
+
}
|
|
99
|
+
/**
|
|
100
|
+
* Payload passed to eachMessage handler (matches KafkaJS EachMessagePayload)
|
|
101
|
+
*/
|
|
102
|
+
export interface KafkaMessagePayload {
|
|
103
|
+
topic: string;
|
|
104
|
+
partition: number;
|
|
105
|
+
message: {
|
|
106
|
+
key: Buffer | null;
|
|
107
|
+
value: Buffer | null;
|
|
108
|
+
headers: Record<string, string>;
|
|
109
|
+
offset: string;
|
|
110
|
+
timestamp: string;
|
|
111
|
+
attributes?: number;
|
|
112
|
+
};
|
|
113
|
+
heartbeat(): Promise<void>;
|
|
114
|
+
pause(): void;
|
|
115
|
+
commitOffsets?(offsets: Array<{
|
|
116
|
+
topic: string;
|
|
117
|
+
partition: number;
|
|
118
|
+
offset: string;
|
|
119
|
+
}>): Promise<void>;
|
|
120
|
+
}
|
|
121
|
+
/**
|
|
122
|
+
* Handler type for Kafka message processing
|
|
123
|
+
*/
|
|
124
|
+
export type KafkaMessageHandler = (payload: KafkaMessagePayload) => Promise<void>;
|
|
125
|
+
/**
|
|
126
|
+
* Transform function for stream processor
|
|
127
|
+
*/
|
|
128
|
+
export type KafkaStreamTransform<T = unknown, R = unknown> = (message: {
|
|
129
|
+
key: Buffer | null;
|
|
130
|
+
value: Buffer | null;
|
|
131
|
+
headers: Record<string, string>;
|
|
132
|
+
}) => Promise<{
|
|
133
|
+
key?: string | Buffer;
|
|
134
|
+
value: T | string | Buffer | null;
|
|
135
|
+
headers?: Record<string, string>;
|
|
136
|
+
} | R | null>;
|
|
137
|
+
//# sourceMappingURL=kafka.types.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"kafka.types.d.ts","sourceRoot":"","sources":["../src/kafka.types.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH;;GAEG;AACH,MAAM,MAAM,aAAa,GAAG,OAAO,GAAG,eAAe,GAAG,eAAe,CAAC;AAExE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAC/B,SAAS,EAAE,aAAa,CAAC;IACzB,QAAQ,EAAE,MAAM,CAAC;IACjB,QAAQ,EAAE,MAAM,CAAC;CAClB;AAED;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAC7B,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC;IACd,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,GAAG,CAAC,EAAE,MAAM,CAAC;CACd;AAED;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC,QAAQ,EAAE,MAAM,CAAC;IACjB,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,KAAK,CAAC,EAAE;QACN,OAAO,CAAC,EAAE,MAAM,CAAC;QACjB,gBAAgB,CAAC,EAAE,MAAM,CAAC;QAC1B,YAAY,CAAC,EAAE,MAAM,CAAC;KACvB,CAAC;IACF,GAAG,CAAC,EAAE,OAAO,GAAG,eAAe,CAAC;IAChC,IAAI,CAAC,EAAE,gBAAgB,CAAC;CACzB;AAED;;GAEG;AACH,MAAM,WAAW,kBAAmB,SAAQ,kBAAkB;IAC5D;;;OAGG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAC;IAEnB;;;OAGG;IACH,qBAAqB,CAAC,EAAE,OAAO,CAAC;CACjC;AAED;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACnC,OAAO,EAAE,MAAM,CAAC;IAChB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,KAAK,CAAC,EAAE;QACN,OAAO,CAAC,EAAE,MAAM,CAAC;QACjB,gBAAgB,CAAC,EAAE,MAAM,CAAC;QAC1B,YAAY,CAAC,EAAE,MAAM,CAAC;KACvB,CAAC;CACH;AAED;;GAEG;AACH,MAAM,WAAW,qBAAqB;IACpC;;;OAGG;IACH,aAAa,CAAC,EAAE,OAAO,CAAC;CACzB;AAED;;GAEG;AACH,MAAM,WAAW,mBAAmB;IAClC,IAAI,CAAC,EAAE,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;IAClB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,WAAW,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;CAC7B;AAED;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B,GAAG,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;IACtB,KAAK,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI,CAAC;IAC9B,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACjC,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAED;;GAEG;AACH,MAAM,WAAW,mBAAmB;IAClC,KAAK,EAAE,MAAM,CAAC;IACd,SAAS,EAAE,MAAM,CAAC;IAClB,OAAO,EAAE;QACP,GAAG,EAAE,MAAM,GAAG,IAAI,CAAC;QACnB,KAAK,EAAE,MAAM,GAAG,IAAI,CAAC;QACrB,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAChC,MAAM,EAAE,MAAM,CAAC;QACf,SAAS,EAAE,MAAM,CAAC;QAClB,UAAU,CAAC,EAAE,MAAM,CAAC;KACrB,CAAC;IACF,SAAS,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;IAC3B,KAAK,IAAI,IAAI,CAAC;IACd,aAAa,CAAC,CACZ,OAAO,EAAE,KAAK,CAAC;QAAE,KAAK,EAAE,MAAM,CAAC;QAAC,SAAS,EAAE,MAAM,CAAC;QAAC,MAAM,EAAE,MAAM,CAAA;KAAE,CAAC,GACnE,OAAO,CAAC,IAAI,CAAC,CAAC;CAClB;AAED;;GAEG;AACH,MAAM,MAAM,mBAAmB,GAAG,CAAC,OAAO,EAAE,mBAAmB,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;AAElF;;GAEG;AACH,MAAM,MAAM,oBAAoB,CAAC,CAAC,GAAG,OAAO,EAAE,CAAC,GAAG,OAAO,IAAI,CAAC,OAAO,EAAE;IACrE,GAAG,EAAE,MAAM,GAAG,IAAI,CAAC;IACnB,KAAK,EAAE,MAAM,GAAG,IAAI,CAAC;IACrB,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CACjC,KAAK,OAAO,CACT;IAAE,GAAG,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;IAAC,KAAK,EAAE,CAAC,GAAG,MAAM,GAAG,MAAM,GAAG,IAAI,CAAC;IAAC,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;CAAE,GAC9F,CAAC,GACD,IAAI,CACP,CAAC"}
|
package/package.json
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@hazeljs/kafka",
|
|
3
|
+
"version": "0.2.0-alpha.1",
|
|
4
|
+
"description": "Kafka module for HazelJS framework - produce, consume, and stream processing",
|
|
5
|
+
"main": "dist/index.js",
|
|
6
|
+
"types": "dist/index.d.ts",
|
|
7
|
+
"files": [
|
|
8
|
+
"dist"
|
|
9
|
+
],
|
|
10
|
+
"scripts": {
|
|
11
|
+
"build": "tsc",
|
|
12
|
+
"test": "jest --coverage",
|
|
13
|
+
"lint": "eslint \"src/**/*.ts\"",
|
|
14
|
+
"lint:fix": "eslint \"src/**/*.ts\" --fix",
|
|
15
|
+
"clean": "rm -rf dist"
|
|
16
|
+
},
|
|
17
|
+
"dependencies": {
|
|
18
|
+
"kafkajs": "^2.2.4"
|
|
19
|
+
},
|
|
20
|
+
"devDependencies": {
|
|
21
|
+
"@types/node": "^20.17.50",
|
|
22
|
+
"@typescript-eslint/eslint-plugin": "^8.18.2",
|
|
23
|
+
"@typescript-eslint/parser": "^8.18.2",
|
|
24
|
+
"eslint": "^8.56.0",
|
|
25
|
+
"jest": "^29.7.0",
|
|
26
|
+
"ts-jest": "^29.1.2",
|
|
27
|
+
"typescript": "^5.3.3"
|
|
28
|
+
},
|
|
29
|
+
"publishConfig": {
|
|
30
|
+
"access": "public"
|
|
31
|
+
},
|
|
32
|
+
"repository": {
|
|
33
|
+
"type": "git",
|
|
34
|
+
"url": "git+https://github.com/hazel-js/hazeljs.git",
|
|
35
|
+
"directory": "packages/kafka"
|
|
36
|
+
},
|
|
37
|
+
"keywords": [
|
|
38
|
+
"hazeljs",
|
|
39
|
+
"kafka",
|
|
40
|
+
"message-queue",
|
|
41
|
+
"stream-processing",
|
|
42
|
+
"kafkajs"
|
|
43
|
+
],
|
|
44
|
+
"author": "Muhammad Arslan <muhammad.arslan@hazeljs.com>",
|
|
45
|
+
"license": "Apache-2.0",
|
|
46
|
+
"bugs": {
|
|
47
|
+
"url": "https://github.com/hazeljs/hazel-js/issues"
|
|
48
|
+
},
|
|
49
|
+
"homepage": "https://hazeljs.com",
|
|
50
|
+
"peerDependencies": {
|
|
51
|
+
"@hazeljs/core": ">=0.2.0-beta.0"
|
|
52
|
+
},
|
|
53
|
+
"gitHead": "cbc5ee2c12ced28fd0576faf13c5f078c1e8421e"
|
|
54
|
+
}
|