@hazeljs/kafka 0.2.0-alpha.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/LICENSE +192 -0
  2. package/README.md +175 -0
  3. package/dist/__tests__/kafka-consumer.decorator.test.d.ts +2 -0
  4. package/dist/__tests__/kafka-consumer.decorator.test.d.ts.map +1 -0
  5. package/dist/__tests__/kafka-consumer.decorator.test.js +100 -0
  6. package/dist/__tests__/kafka-consumer.service.test.d.ts +2 -0
  7. package/dist/__tests__/kafka-consumer.service.test.d.ts.map +1 -0
  8. package/dist/__tests__/kafka-consumer.service.test.js +244 -0
  9. package/dist/__tests__/kafka-producer.service.test.d.ts +2 -0
  10. package/dist/__tests__/kafka-producer.service.test.d.ts.map +1 -0
  11. package/dist/__tests__/kafka-producer.service.test.js +73 -0
  12. package/dist/__tests__/kafka-stream.processor.test.d.ts +2 -0
  13. package/dist/__tests__/kafka-stream.processor.test.d.ts.map +1 -0
  14. package/dist/__tests__/kafka-stream.processor.test.js +243 -0
  15. package/dist/__tests__/kafka.module.test.d.ts +2 -0
  16. package/dist/__tests__/kafka.module.test.d.ts.map +1 -0
  17. package/dist/__tests__/kafka.module.test.js +41 -0
  18. package/dist/decorators/kafka-consumer.decorator.d.ts +32 -0
  19. package/dist/decorators/kafka-consumer.decorator.d.ts.map +1 -0
  20. package/dist/decorators/kafka-consumer.decorator.js +64 -0
  21. package/dist/decorators/kafka-subscribe.decorator.d.ts +40 -0
  22. package/dist/decorators/kafka-subscribe.decorator.d.ts.map +1 -0
  23. package/dist/decorators/kafka-subscribe.decorator.js +53 -0
  24. package/dist/index.d.ts +12 -0
  25. package/dist/index.d.ts.map +1 -0
  26. package/dist/index.js +23 -0
  27. package/dist/kafka-consumer.service.d.ts +21 -0
  28. package/dist/kafka-consumer.service.d.ts.map +1 -0
  29. package/dist/kafka-consumer.service.js +118 -0
  30. package/dist/kafka-producer.service.d.ts +35 -0
  31. package/dist/kafka-producer.service.d.ts.map +1 -0
  32. package/dist/kafka-producer.service.js +107 -0
  33. package/dist/kafka-stream.processor.d.ts +43 -0
  34. package/dist/kafka-stream.processor.d.ts.map +1 -0
  35. package/dist/kafka-stream.processor.js +168 -0
  36. package/dist/kafka.module.d.ts +33 -0
  37. package/dist/kafka.module.d.ts.map +1 -0
  38. package/dist/kafka.module.js +93 -0
  39. package/dist/kafka.types.d.ts +137 -0
  40. package/dist/kafka.types.d.ts.map +1 -0
  41. package/dist/kafka.types.js +5 -0
  42. package/package.json +54 -0
@@ -0,0 +1,64 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.KAFKA_CONSUMER_METADATA_KEY = void 0;
7
+ exports.KafkaConsumer = KafkaConsumer;
8
+ exports.getKafkaConsumerMetadata = getKafkaConsumerMetadata;
9
+ exports.isKafkaConsumer = isKafkaConsumer;
10
+ require("reflect-metadata");
11
+ const core_1 = __importDefault(require("@hazeljs/core"));
12
+ /**
13
+ * Metadata key for Kafka consumer class options
14
+ */
15
+ exports.KAFKA_CONSUMER_METADATA_KEY = Symbol('kafka:consumer');
16
+ /**
17
+ * Decorator to mark a class as a Kafka consumer with consumer group options
18
+ *
19
+ * @example
20
+ * ```typescript
21
+ * @KafkaConsumer({ groupId: 'order-processor' })
22
+ * @Injectable()
23
+ * export class OrderConsumer {
24
+ * @KafkaSubscribe('orders')
25
+ * async handleOrder({ message }: KafkaMessagePayload) {
26
+ * const order = JSON.parse(message.value.toString());
27
+ * // process order
28
+ * }
29
+ * }
30
+ * ```
31
+ */
32
+ function KafkaConsumer(options) {
33
+ return (target) => {
34
+ const defaults = {
35
+ groupId: options.groupId,
36
+ sessionTimeout: options.sessionTimeout ?? 30000,
37
+ rebalanceTimeout: options.rebalanceTimeout ?? 60000,
38
+ heartbeatInterval: options.heartbeatInterval ?? 3000,
39
+ maxWaitTimeInMs: options.maxWaitTimeInMs ?? 5000,
40
+ retry: options.retry,
41
+ };
42
+ const targetName = typeof target === 'function' ? target.name : 'unknown';
43
+ core_1.default.debug(`Marking ${targetName} as Kafka consumer with groupId: ${defaults.groupId}`);
44
+ Reflect.defineMetadata(exports.KAFKA_CONSUMER_METADATA_KEY, defaults, target);
45
+ };
46
+ }
47
+ /**
48
+ * Get Kafka consumer metadata from a class or instance
49
+ */
50
+ function getKafkaConsumerMetadata(target) {
51
+ const constructor = typeof target === 'function' ? target : target.constructor;
52
+ if (!constructor)
53
+ return undefined;
54
+ return Reflect.getMetadata(exports.KAFKA_CONSUMER_METADATA_KEY, constructor);
55
+ }
56
+ /**
57
+ * Check if a class is a Kafka consumer
58
+ */
59
+ function isKafkaConsumer(target) {
60
+ const constructor = typeof target === 'function' ? target : target.constructor;
61
+ if (!constructor)
62
+ return false;
63
+ return Reflect.hasMetadata(exports.KAFKA_CONSUMER_METADATA_KEY, constructor);
64
+ }
@@ -0,0 +1,40 @@
1
+ import 'reflect-metadata';
2
+ import { KafkaSubscribeOptions } from '../kafka.types';
3
+ /**
4
+ * Metadata key for Kafka subscribe (topic + handler)
5
+ */
6
+ export declare const KAFKA_SUBSCRIBE_METADATA_KEY: unique symbol;
7
+ /**
8
+ * Subscribe handler metadata
9
+ */
10
+ export interface KafkaSubscribeMetadata {
11
+ topic: string;
12
+ methodName: string;
13
+ options?: KafkaSubscribeOptions;
14
+ }
15
+ /**
16
+ * Decorator to mark a method as a handler for a Kafka topic
17
+ *
18
+ * @example
19
+ * ```typescript
20
+ * @KafkaConsumer({ groupId: 'order-processor' })
21
+ * @Injectable()
22
+ * export class OrderConsumer {
23
+ * @KafkaSubscribe('orders')
24
+ * async handleOrder({ message }: KafkaMessagePayload) {
25
+ * // process order
26
+ * }
27
+ *
28
+ * @KafkaSubscribe('events', { fromBeginning: true })
29
+ * async handleEvents({ message }: KafkaMessagePayload) {
30
+ * // process events
31
+ * }
32
+ * }
33
+ * ```
34
+ */
35
+ export declare function KafkaSubscribe(topic: string, options?: KafkaSubscribeOptions): MethodDecorator;
36
+ /**
37
+ * Get Kafka subscribe metadata from a class
38
+ */
39
+ export declare function getKafkaSubscribeMetadata(target: object): KafkaSubscribeMetadata[];
40
+ //# sourceMappingURL=kafka-subscribe.decorator.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"kafka-subscribe.decorator.d.ts","sourceRoot":"","sources":["../../src/decorators/kafka-subscribe.decorator.ts"],"names":[],"mappings":"AAAA,OAAO,kBAAkB,CAAC;AAC1B,OAAO,EAAE,qBAAqB,EAAE,MAAM,gBAAgB,CAAC;AAGvD;;GAEG;AACH,eAAO,MAAM,4BAA4B,eAA4B,CAAC;AAEtE;;GAEG;AACH,MAAM,WAAW,sBAAsB;IACrC,KAAK,EAAE,MAAM,CAAC;IACd,UAAU,EAAE,MAAM,CAAC;IACnB,OAAO,CAAC,EAAE,qBAAqB,CAAC;CACjC;AAED;;;;;;;;;;;;;;;;;;;GAmBG;AACH,wBAAgB,cAAc,CAAC,KAAK,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,qBAAqB,GAAG,eAAe,CAkB9F;AAED;;GAEG;AACH,wBAAgB,yBAAyB,CAAC,MAAM,EAAE,MAAM,GAAG,sBAAsB,EAAE,CAElF"}
@@ -0,0 +1,53 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.KAFKA_SUBSCRIBE_METADATA_KEY = void 0;
7
+ exports.KafkaSubscribe = KafkaSubscribe;
8
+ exports.getKafkaSubscribeMetadata = getKafkaSubscribeMetadata;
9
+ require("reflect-metadata");
10
+ const core_1 = __importDefault(require("@hazeljs/core"));
11
+ /**
12
+ * Metadata key for Kafka subscribe (topic + handler)
13
+ */
14
+ exports.KAFKA_SUBSCRIBE_METADATA_KEY = Symbol('kafka:subscribe');
15
+ /**
16
+ * Decorator to mark a method as a handler for a Kafka topic
17
+ *
18
+ * @example
19
+ * ```typescript
20
+ * @KafkaConsumer({ groupId: 'order-processor' })
21
+ * @Injectable()
22
+ * export class OrderConsumer {
23
+ * @KafkaSubscribe('orders')
24
+ * async handleOrder({ message }: KafkaMessagePayload) {
25
+ * // process order
26
+ * }
27
+ *
28
+ * @KafkaSubscribe('events', { fromBeginning: true })
29
+ * async handleEvents({ message }: KafkaMessagePayload) {
30
+ * // process events
31
+ * }
32
+ * }
33
+ * ```
34
+ */
35
+ function KafkaSubscribe(topic, options) {
36
+ return (target, propertyKey, _descriptor) => {
37
+ const existingSubscriptions = Reflect.getMetadata(exports.KAFKA_SUBSCRIBE_METADATA_KEY, target.constructor) || [];
38
+ const subscription = {
39
+ topic,
40
+ methodName: propertyKey.toString(),
41
+ options: options ?? {},
42
+ };
43
+ existingSubscriptions.push(subscription);
44
+ Reflect.defineMetadata(exports.KAFKA_SUBSCRIBE_METADATA_KEY, existingSubscriptions, target.constructor);
45
+ core_1.default.debug(`KafkaSubscribe applied to ${target.constructor.name}.${String(propertyKey)} for topic: ${topic}`);
46
+ };
47
+ }
48
+ /**
49
+ * Get Kafka subscribe metadata from a class
50
+ */
51
+ function getKafkaSubscribeMetadata(target) {
52
+ return Reflect.getMetadata(exports.KAFKA_SUBSCRIBE_METADATA_KEY, target.constructor) || [];
53
+ }
@@ -0,0 +1,12 @@
1
+ /**
2
+ * @hazeljs/kafka - Kafka module for HazelJS
3
+ */
4
+ export { KafkaModule } from './kafka.module';
5
+ export { KafkaProducerService } from './kafka-producer.service';
6
+ export { KafkaConsumerService } from './kafka-consumer.service';
7
+ export { KafkaStreamProcessor } from './kafka-stream.processor';
8
+ export { KAFKA_CLIENT_TOKEN } from './kafka-producer.service';
9
+ export { KafkaConsumer, getKafkaConsumerMetadata, isKafkaConsumer, } from './decorators/kafka-consumer.decorator';
10
+ export { KafkaSubscribe, getKafkaSubscribeMetadata, type KafkaSubscribeMetadata, } from './decorators/kafka-subscribe.decorator';
11
+ export type { KafkaModuleOptions, KafkaClientOptions, KafkaConsumerOptions, KafkaSubscribeOptions, KafkaProduceOptions, KafkaMessage, KafkaMessagePayload, KafkaMessageHandler, KafkaStreamTransform, KafkaSaslOptions, KafkaSslOptions, SaslMechanism, } from './kafka.types';
12
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAC7C,OAAO,EAAE,oBAAoB,EAAE,MAAM,0BAA0B,CAAC;AAChE,OAAO,EAAE,oBAAoB,EAAE,MAAM,0BAA0B,CAAC;AAChE,OAAO,EAAE,oBAAoB,EAAE,MAAM,0BAA0B,CAAC;AAChE,OAAO,EAAE,kBAAkB,EAAE,MAAM,0BAA0B,CAAC;AAC9D,OAAO,EACL,aAAa,EACb,wBAAwB,EACxB,eAAe,GAChB,MAAM,uCAAuC,CAAC;AAC/C,OAAO,EACL,cAAc,EACd,yBAAyB,EACzB,KAAK,sBAAsB,GAC5B,MAAM,wCAAwC,CAAC;AAChD,YAAY,EACV,kBAAkB,EAClB,kBAAkB,EAClB,oBAAoB,EACpB,qBAAqB,EACrB,mBAAmB,EACnB,YAAY,EACZ,mBAAmB,EACnB,mBAAmB,EACnB,oBAAoB,EACpB,gBAAgB,EAChB,eAAe,EACf,aAAa,GACd,MAAM,eAAe,CAAC"}
package/dist/index.js ADDED
@@ -0,0 +1,23 @@
1
+ "use strict";
2
+ /**
3
+ * @hazeljs/kafka - Kafka module for HazelJS
4
+ */
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.getKafkaSubscribeMetadata = exports.KafkaSubscribe = exports.isKafkaConsumer = exports.getKafkaConsumerMetadata = exports.KafkaConsumer = exports.KAFKA_CLIENT_TOKEN = exports.KafkaStreamProcessor = exports.KafkaConsumerService = exports.KafkaProducerService = exports.KafkaModule = void 0;
7
+ var kafka_module_1 = require("./kafka.module");
8
+ Object.defineProperty(exports, "KafkaModule", { enumerable: true, get: function () { return kafka_module_1.KafkaModule; } });
9
+ var kafka_producer_service_1 = require("./kafka-producer.service");
10
+ Object.defineProperty(exports, "KafkaProducerService", { enumerable: true, get: function () { return kafka_producer_service_1.KafkaProducerService; } });
11
+ var kafka_consumer_service_1 = require("./kafka-consumer.service");
12
+ Object.defineProperty(exports, "KafkaConsumerService", { enumerable: true, get: function () { return kafka_consumer_service_1.KafkaConsumerService; } });
13
+ var kafka_stream_processor_1 = require("./kafka-stream.processor");
14
+ Object.defineProperty(exports, "KafkaStreamProcessor", { enumerable: true, get: function () { return kafka_stream_processor_1.KafkaStreamProcessor; } });
15
+ var kafka_producer_service_2 = require("./kafka-producer.service");
16
+ Object.defineProperty(exports, "KAFKA_CLIENT_TOKEN", { enumerable: true, get: function () { return kafka_producer_service_2.KAFKA_CLIENT_TOKEN; } });
17
+ var kafka_consumer_decorator_1 = require("./decorators/kafka-consumer.decorator");
18
+ Object.defineProperty(exports, "KafkaConsumer", { enumerable: true, get: function () { return kafka_consumer_decorator_1.KafkaConsumer; } });
19
+ Object.defineProperty(exports, "getKafkaConsumerMetadata", { enumerable: true, get: function () { return kafka_consumer_decorator_1.getKafkaConsumerMetadata; } });
20
+ Object.defineProperty(exports, "isKafkaConsumer", { enumerable: true, get: function () { return kafka_consumer_decorator_1.isKafkaConsumer; } });
21
+ var kafka_subscribe_decorator_1 = require("./decorators/kafka-subscribe.decorator");
22
+ Object.defineProperty(exports, "KafkaSubscribe", { enumerable: true, get: function () { return kafka_subscribe_decorator_1.KafkaSubscribe; } });
23
+ Object.defineProperty(exports, "getKafkaSubscribeMetadata", { enumerable: true, get: function () { return kafka_subscribe_decorator_1.getKafkaSubscribeMetadata; } });
@@ -0,0 +1,21 @@
1
+ import { Kafka } from 'kafkajs';
2
+ export declare const KAFKA_CLIENT_TOKEN = "KAFKA_CLIENT";
3
+ /**
4
+ * Kafka consumer service for consuming messages with decorator-driven handlers
5
+ */
6
+ export declare class KafkaConsumerService {
7
+ private readonly kafka;
8
+ private runningConsumers;
9
+ constructor(kafka: Kafka);
10
+ /**
11
+ * Register a consumer provider and start consuming
12
+ * Call this for each class that has @KafkaConsumer and @KafkaSubscribe decorators
13
+ */
14
+ registerFromProvider(provider: object): Promise<void>;
15
+ onModuleDestroy(): Promise<void>;
16
+ /**
17
+ * Get count of running consumers
18
+ */
19
+ getConsumerCount(): number;
20
+ }
21
+ //# sourceMappingURL=kafka-consumer.service.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"kafka-consumer.service.d.ts","sourceRoot":"","sources":["../src/kafka-consumer.service.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,KAAK,EAAE,MAAM,SAAS,CAAC;AAMhC,eAAO,MAAM,kBAAkB,iBAAiB,CAAC;AAQjD;;GAEG;AACH,qBACa,oBAAoB;IAK7B,OAAO,CAAC,QAAQ,CAAC,KAAK;IAJxB,OAAO,CAAC,gBAAgB,CAAyB;gBAI9B,KAAK,EAAE,KAAK;IAG/B;;;OAGG;IACG,oBAAoB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAiFrD,eAAe,IAAI,OAAO,CAAC,IAAI,CAAC;IAYtC;;OAEG;IACH,gBAAgB,IAAI,MAAM;CAG3B"}
@@ -0,0 +1,118 @@
1
+ "use strict";
2
+ var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
3
+ var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
4
+ if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
5
+ else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
6
+ return c > 3 && r && Object.defineProperty(target, key, r), r;
7
+ };
8
+ var __metadata = (this && this.__metadata) || function (k, v) {
9
+ if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
10
+ };
11
+ var __param = (this && this.__param) || function (paramIndex, decorator) {
12
+ return function (target, key) { decorator(target, key, paramIndex); }
13
+ };
14
+ var __importDefault = (this && this.__importDefault) || function (mod) {
15
+ return (mod && mod.__esModule) ? mod : { "default": mod };
16
+ };
17
+ Object.defineProperty(exports, "__esModule", { value: true });
18
+ exports.KafkaConsumerService = exports.KAFKA_CLIENT_TOKEN = void 0;
19
+ const core_1 = require("@hazeljs/core");
20
+ const kafkajs_1 = require("kafkajs");
21
+ const kafka_consumer_decorator_1 = require("./decorators/kafka-consumer.decorator");
22
+ const kafka_subscribe_decorator_1 = require("./decorators/kafka-subscribe.decorator");
23
+ const core_2 = __importDefault(require("@hazeljs/core"));
24
+ exports.KAFKA_CLIENT_TOKEN = 'KAFKA_CLIENT';
25
+ /**
26
+ * Kafka consumer service for consuming messages with decorator-driven handlers
27
+ */
28
+ let KafkaConsumerService = class KafkaConsumerService {
29
+ constructor(kafka) {
30
+ this.kafka = kafka;
31
+ this.runningConsumers = [];
32
+ }
33
+ /**
34
+ * Register a consumer provider and start consuming
35
+ * Call this for each class that has @KafkaConsumer and @KafkaSubscribe decorators
36
+ */
37
+ async registerFromProvider(provider) {
38
+ const consumerOptions = (0, kafka_consumer_decorator_1.getKafkaConsumerMetadata)(provider.constructor);
39
+ const subscribeMetadata = (0, kafka_subscribe_decorator_1.getKafkaSubscribeMetadata)(provider.constructor);
40
+ if (!consumerOptions) {
41
+ core_2.default.warn(`Provider ${provider.constructor.name} has @KafkaSubscribe but no @KafkaConsumer decorator - skipping`);
42
+ return;
43
+ }
44
+ if (!subscribeMetadata || subscribeMetadata.length === 0) {
45
+ core_2.default.warn(`Provider ${provider.constructor.name} has @KafkaConsumer but no @KafkaSubscribe - skipping`);
46
+ return;
47
+ }
48
+ const consumer = this.kafka.consumer({
49
+ groupId: consumerOptions.groupId,
50
+ sessionTimeout: consumerOptions.sessionTimeout ?? 30000,
51
+ rebalanceTimeout: consumerOptions.rebalanceTimeout ?? 60000,
52
+ heartbeatInterval: consumerOptions.heartbeatInterval ?? 3000,
53
+ maxWaitTimeInMs: consumerOptions.maxWaitTimeInMs ?? 5000,
54
+ retry: consumerOptions.retry,
55
+ });
56
+ const topicHandlers = new Map();
57
+ await consumer.connect();
58
+ for (const sub of subscribeMetadata) {
59
+ topicHandlers.set(sub.topic, {
60
+ methodName: sub.methodName,
61
+ fromBeginning: sub.options?.fromBeginning ?? false,
62
+ });
63
+ await consumer.subscribe({
64
+ topics: [sub.topic],
65
+ fromBeginning: sub.options?.fromBeginning ?? false,
66
+ });
67
+ }
68
+ await consumer.run({
69
+ eachMessage: async (payload) => {
70
+ const handlerConfig = topicHandlers.get(payload.topic);
71
+ if (!handlerConfig)
72
+ return;
73
+ const instance = provider;
74
+ const method = instance[handlerConfig.methodName];
75
+ if (typeof method !== 'function') {
76
+ core_2.default.error(`Handler ${handlerConfig.methodName} not found on ${provider.constructor.name}`);
77
+ return;
78
+ }
79
+ try {
80
+ await method.call(provider, payload);
81
+ }
82
+ catch (error) {
83
+ core_2.default.error(`Error in Kafka handler ${provider.constructor.name}.${handlerConfig.methodName}:`, error);
84
+ }
85
+ },
86
+ });
87
+ this.runningConsumers.push({
88
+ consumer,
89
+ provider,
90
+ topicHandlers,
91
+ });
92
+ core_2.default.info(`Kafka consumer started for ${provider.constructor.name} (groupId: ${consumerOptions.groupId}, topics: ${Array.from(topicHandlers.keys()).join(', ')})`);
93
+ }
94
+ async onModuleDestroy() {
95
+ for (const { consumer } of this.runningConsumers) {
96
+ try {
97
+ await consumer.disconnect();
98
+ core_2.default.info('Kafka consumer disconnected');
99
+ }
100
+ catch (error) {
101
+ core_2.default.error('Error disconnecting Kafka consumer:', error);
102
+ }
103
+ }
104
+ this.runningConsumers = [];
105
+ }
106
+ /**
107
+ * Get count of running consumers
108
+ */
109
+ getConsumerCount() {
110
+ return this.runningConsumers.length;
111
+ }
112
+ };
113
+ exports.KafkaConsumerService = KafkaConsumerService;
114
+ exports.KafkaConsumerService = KafkaConsumerService = __decorate([
115
+ (0, core_1.Service)(),
116
+ __param(0, (0, core_1.Inject)(exports.KAFKA_CLIENT_TOKEN)),
117
+ __metadata("design:paramtypes", [kafkajs_1.Kafka])
118
+ ], KafkaConsumerService);
@@ -0,0 +1,35 @@
1
+ import { Kafka } from 'kafkajs';
2
+ import { KafkaMessage, KafkaProduceOptions } from './kafka.types';
3
+ export declare const KAFKA_CLIENT_TOKEN = "KAFKA_CLIENT";
4
+ /**
5
+ * Kafka producer service for publishing messages to topics
6
+ */
7
+ export declare class KafkaProducerService {
8
+ private readonly kafka;
9
+ private producer;
10
+ private isConnected;
11
+ constructor(kafka: Kafka);
12
+ onModuleInit(): Promise<void>;
13
+ /**
14
+ * Connect producer (called automatically on first send if not already connected)
15
+ */
16
+ private connect;
17
+ onModuleDestroy(): Promise<void>;
18
+ /**
19
+ * Send messages to a topic
20
+ */
21
+ send(topic: string, messages: KafkaMessage | KafkaMessage[], options?: KafkaProduceOptions): Promise<void>;
22
+ /**
23
+ * Send a batch of messages to multiple topics
24
+ */
25
+ sendBatch(batch: Array<{
26
+ topic: string;
27
+ messages: KafkaMessage | KafkaMessage[];
28
+ options?: KafkaProduceOptions;
29
+ }>): Promise<void>;
30
+ /**
31
+ * Check if producer is connected
32
+ */
33
+ isProducerConnected(): boolean;
34
+ }
35
+ //# sourceMappingURL=kafka-producer.service.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"kafka-producer.service.d.ts","sourceRoot":"","sources":["../src/kafka-producer.service.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,KAAK,EAAE,MAAM,SAAS,CAAC;AAChC,OAAO,EAAE,YAAY,EAAE,mBAAmB,EAAE,MAAM,eAAe,CAAC;AAGlE,eAAO,MAAM,kBAAkB,iBAAiB,CAAC;AAEjD;;GAEG;AACH,qBACa,oBAAoB;IAM7B,OAAO,CAAC,QAAQ,CAAC,KAAK;IALxB,OAAO,CAAC,QAAQ,CAAgC;IAChD,OAAO,CAAC,WAAW,CAAS;gBAIT,KAAK,EAAE,KAAK;IAKzB,YAAY,IAAI,OAAO,CAAC,IAAI,CAAC;IAInC;;OAEG;YACW,OAAO;IAYf,eAAe,IAAI,OAAO,CAAC,IAAI,CAAC;IAWtC;;OAEG;IACG,IAAI,CACR,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE,YAAY,GAAG,YAAY,EAAE,EACvC,OAAO,CAAC,EAAE,mBAAmB,GAC5B,OAAO,CAAC,IAAI,CAAC;IA0BhB;;OAEG;IACG,SAAS,CACb,KAAK,EAAE,KAAK,CAAC;QACX,KAAK,EAAE,MAAM,CAAC;QACd,QAAQ,EAAE,YAAY,GAAG,YAAY,EAAE,CAAC;QACxC,OAAO,CAAC,EAAE,mBAAmB,CAAC;KAC/B,CAAC,GACD,OAAO,CAAC,IAAI,CAAC;IAShB;;OAEG;IACH,mBAAmB,IAAI,OAAO;CAG/B"}
@@ -0,0 +1,107 @@
1
+ "use strict";
2
+ var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
3
+ var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
4
+ if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
5
+ else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
6
+ return c > 3 && r && Object.defineProperty(target, key, r), r;
7
+ };
8
+ var __metadata = (this && this.__metadata) || function (k, v) {
9
+ if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
10
+ };
11
+ var __param = (this && this.__param) || function (paramIndex, decorator) {
12
+ return function (target, key) { decorator(target, key, paramIndex); }
13
+ };
14
+ var __importDefault = (this && this.__importDefault) || function (mod) {
15
+ return (mod && mod.__esModule) ? mod : { "default": mod };
16
+ };
17
+ Object.defineProperty(exports, "__esModule", { value: true });
18
+ exports.KafkaProducerService = exports.KAFKA_CLIENT_TOKEN = void 0;
19
+ const core_1 = require("@hazeljs/core");
20
+ const kafkajs_1 = require("kafkajs");
21
+ const core_2 = __importDefault(require("@hazeljs/core"));
22
+ exports.KAFKA_CLIENT_TOKEN = 'KAFKA_CLIENT';
23
+ /**
24
+ * Kafka producer service for publishing messages to topics
25
+ */
26
+ let KafkaProducerService = class KafkaProducerService {
27
+ constructor(kafka) {
28
+ this.kafka = kafka;
29
+ this.isConnected = false;
30
+ this.producer = this.kafka.producer();
31
+ }
32
+ async onModuleInit() {
33
+ await this.connect();
34
+ }
35
+ /**
36
+ * Connect producer (called automatically on first send if not already connected)
37
+ */
38
+ async connect() {
39
+ if (this.isConnected)
40
+ return;
41
+ try {
42
+ await this.producer.connect();
43
+ this.isConnected = true;
44
+ core_2.default.info('Kafka producer connected');
45
+ }
46
+ catch (error) {
47
+ core_2.default.error('Failed to connect Kafka producer:', error);
48
+ throw error;
49
+ }
50
+ }
51
+ async onModuleDestroy() {
52
+ try {
53
+ await this.producer.disconnect();
54
+ this.isConnected = false;
55
+ core_2.default.info('Kafka producer disconnected');
56
+ }
57
+ catch (error) {
58
+ core_2.default.error('Error disconnecting Kafka producer:', error);
59
+ throw error;
60
+ }
61
+ }
62
+ /**
63
+ * Send messages to a topic
64
+ */
65
+ async send(topic, messages, options) {
66
+ await this.connect();
67
+ const messageArray = Array.isArray(messages) ? messages : [messages];
68
+ const formattedMessages = messageArray.map((msg) => {
69
+ const value = msg.value ?? null;
70
+ return {
71
+ key: msg.key ?? undefined,
72
+ value: value,
73
+ headers: msg.headers ?? undefined,
74
+ partition: msg.partition ?? undefined,
75
+ timestamp: msg.timestamp ?? undefined,
76
+ };
77
+ });
78
+ await this.producer.send({
79
+ topic,
80
+ messages: formattedMessages,
81
+ acks: options?.acks ?? -1,
82
+ timeout: options?.timeout ?? 30000,
83
+ compression: options?.compression ?? 0,
84
+ });
85
+ core_2.default.debug(`Sent ${formattedMessages.length} message(s) to topic: ${topic}`);
86
+ }
87
+ /**
88
+ * Send a batch of messages to multiple topics
89
+ */
90
+ async sendBatch(batch) {
91
+ await this.connect();
92
+ await Promise.all(batch.map(({ topic, messages, options }) => this.send(topic, messages, options)));
93
+ core_2.default.debug(`Sent batch to ${batch.length} topic(s)`);
94
+ }
95
+ /**
96
+ * Check if producer is connected
97
+ */
98
+ isProducerConnected() {
99
+ return this.isConnected;
100
+ }
101
+ };
102
+ exports.KafkaProducerService = KafkaProducerService;
103
+ exports.KafkaProducerService = KafkaProducerService = __decorate([
104
+ (0, core_1.Service)(),
105
+ __param(0, (0, core_1.Inject)(exports.KAFKA_CLIENT_TOKEN)),
106
+ __metadata("design:paramtypes", [kafkajs_1.Kafka])
107
+ ], KafkaProducerService);
@@ -0,0 +1,43 @@
1
+ import { Kafka } from 'kafkajs';
2
+ import { KafkaStreamTransform } from './kafka.types';
3
+ export declare const KAFKA_CLIENT_TOKEN = "KAFKA_CLIENT";
4
+ /**
5
+ * Lightweight Kafka stream processor: consume from topic, transform, produce to output topic
6
+ */
7
+ export declare class KafkaStreamProcessor {
8
+ private kafka;
9
+ private consumer;
10
+ private producer;
11
+ private pipelineConfig;
12
+ private isRunning;
13
+ constructor(kafka: Kafka);
14
+ /**
15
+ * Set the input topic to consume from
16
+ */
17
+ from(topic: string): this;
18
+ /**
19
+ * Set the transform function
20
+ */
21
+ transform(fn: KafkaStreamTransform): this;
22
+ /**
23
+ * Set the output topic to produce to
24
+ */
25
+ to(topic: string): this;
26
+ /**
27
+ * Set consumer group ID for the stream processor
28
+ */
29
+ withGroupId(groupId: string): this;
30
+ /**
31
+ * Start the stream processor
32
+ */
33
+ start(): Promise<void>;
34
+ /**
35
+ * Stop the stream processor
36
+ */
37
+ stop(): Promise<void>;
38
+ /**
39
+ * Check if processor is running
40
+ */
41
+ isProcessorRunning(): boolean;
42
+ }
43
+ //# sourceMappingURL=kafka-stream.processor.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"kafka-stream.processor.d.ts","sourceRoot":"","sources":["../src/kafka-stream.processor.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,KAAK,EAAE,MAAM,SAAS,CAAC;AAChC,OAAO,EAAE,oBAAoB,EAAE,MAAM,eAAe,CAAC;AAGrD,eAAO,MAAM,kBAAkB,iBAAiB,CAAC;AASjD;;GAEG;AACH,qBACa,oBAAoB;IAC/B,OAAO,CAAC,KAAK,CAAQ;IACrB,OAAO,CAAC,QAAQ,CAA8C;IAC9D,OAAO,CAAC,QAAQ,CAA8C;IAC9D,OAAO,CAAC,cAAc,CAAqC;IAC3D,OAAO,CAAC,SAAS,CAAS;gBAIxB,KAAK,EAAE,KAAK;IAKd;;OAEG;IACH,IAAI,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;IAezB;;OAEG;IACH,SAAS,CAAC,EAAE,EAAE,oBAAoB,GAAG,IAAI;IAQzC;;OAEG;IACH,EAAE,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;IAQvB;;OAEG;IACH,WAAW,CAAC,OAAO,EAAE,MAAM,GAAG,IAAI;IAQlC;;OAEG;IACG,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAsE5B;;OAEG;IACG,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;IAiB3B;;OAEG;IACH,kBAAkB,IAAI,OAAO;CAG9B"}