@xnestjs/kafka 1.8.0 → 1.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -60,7 +60,7 @@ export class MyModule {}
60
60
  The library supports configuration through environment variables. Environment variables below is accepted.
61
61
  All environment variables starts with prefix (KAFKA\_). This can be configured while registering the module.
62
62
 
63
- <--- BEGIN env --->
63
+ <!--- BEGIN env --->
64
64
 
65
65
  | Environment Variable | Type | Default | Description |
66
66
  | ------------------------------- | --------- | --------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
@@ -102,4 +102,4 @@ The environment variables are available when KAFKA_SASL is `aws`
102
102
  | AWS_SECRET_ACCESS_KEY | String! | | |
103
103
  | AWS_SESSION_TOKEN | String | | |
104
104
 
105
- <--- END env --->
105
+ <!--- END env --->
package/cjs/index.js CHANGED
@@ -1,10 +1,10 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.ClientKafka = void 0;
3
+ exports.Kafka = void 0;
4
4
  const tslib_1 = require("tslib");
5
5
  tslib_1.__exportStar(require("./constants.js"), exports);
6
6
  tslib_1.__exportStar(require("./get-kafka-config.js"), exports);
7
7
  tslib_1.__exportStar(require("./kafka.module.js"), exports);
8
8
  tslib_1.__exportStar(require("./types.js"), exports);
9
- var microservices_1 = require("@nestjs/microservices");
10
- Object.defineProperty(exports, "ClientKafka", { enumerable: true, get: function () { return microservices_1.ClientKafka; } });
9
+ var kafkajs_1 = require("kafkajs");
10
+ Object.defineProperty(exports, "Kafka", { enumerable: true, get: function () { return kafkajs_1.Kafka; } });
@@ -6,8 +6,8 @@ const tslib_1 = require("tslib");
6
6
  const node_assert_1 = tslib_1.__importDefault(require("node:assert"));
7
7
  const crypto = tslib_1.__importStar(require("node:crypto"));
8
8
  const common_1 = require("@nestjs/common");
9
- const microservices_1 = require("@nestjs/microservices");
10
9
  const ansi_colors_1 = tslib_1.__importDefault(require("ansi-colors"));
10
+ const kafkajs_1 = require("kafkajs");
11
11
  const constants_js_1 = require("./constants.js");
12
12
  const create_log_creator_js_1 = require("./create-log-creator.js");
13
13
  const get_kafka_config_js_1 = require("./get-kafka-config.js");
@@ -48,8 +48,7 @@ let KafkaCoreModule = KafkaCoreModule_1 = class KafkaCoreModule {
48
48
  });
49
49
  }
50
50
  static _createDynamicModule(opts, metadata) {
51
- const token = opts.token ?? microservices_1.ClientKafka;
52
- const name = typeof token === 'string' ? token : 'Kafka';
51
+ const token = opts.token ?? kafkajs_1.Kafka;
53
52
  const logger = typeof opts.logger === 'string' ? new common_1.Logger(opts.logger) : opts.logger;
54
53
  const exports = [constants_js_1.KAFKA_CONNECTION_OPTIONS, ...(metadata.exports ?? [])];
55
54
  const providers = [
@@ -66,41 +65,20 @@ let KafkaCoreModule = KafkaCoreModule_1 = class KafkaCoreModule {
66
65
  provide: constants_js_1.KAFKA_MODULE_ID,
67
66
  useValue: crypto.randomUUID(),
68
67
  },
69
- ];
70
- if (name !== token) {
71
- exports.push(token);
72
- providers.push({
68
+ {
73
69
  provide: token,
74
- useExisting: name,
75
- });
76
- }
70
+ inject: [constants_js_1.KAFKA_CONNECTION_OPTIONS],
71
+ useFactory: (connectionOptions) => {
72
+ return new kafkajs_1.Kafka({
73
+ ...connectionOptions,
74
+ logCreator: () => (0, create_log_creator_js_1.createLogCreator)(logger),
75
+ });
76
+ },
77
+ },
78
+ ];
77
79
  return {
78
80
  module: KafkaCoreModule_1,
79
81
  providers,
80
- imports: [
81
- /** Import ClientsModule */
82
- microservices_1.ClientsModule.registerAsync({
83
- clients: [
84
- {
85
- name,
86
- extraProviders: metadata.providers,
87
- inject: [constants_js_1.KAFKA_CONNECTION_OPTIONS],
88
- useFactory: (connectionOptions) => {
89
- return {
90
- transport: microservices_1.Transport.KAFKA,
91
- options: {
92
- client: {
93
- ...connectionOptions,
94
- logCreator: () => (0, create_log_creator_js_1.createLogCreator)(logger),
95
- },
96
- consumer: connectionOptions.consumer,
97
- },
98
- };
99
- },
100
- },
101
- ],
102
- }),
103
- ],
104
82
  exports,
105
83
  };
106
84
  }
@@ -112,28 +90,59 @@ let KafkaCoreModule = KafkaCoreModule_1 = class KafkaCoreModule {
112
90
  this.client = client;
113
91
  this.connectionOptions = connectionOptions;
114
92
  this.logger = logger;
93
+ this._admins = new Set();
94
+ this._consumers = new Set();
95
+ this._producers = new Set();
115
96
  }
116
97
  async onApplicationBootstrap() {
117
98
  const options = this.connectionOptions;
118
- if (options.lazyConnect)
99
+ const _this = this;
100
+ const oldConsumerFn = this.client.consumer;
101
+ this.client.consumer = function (...args) {
102
+ const instance = oldConsumerFn.apply(this, args);
103
+ _this._consumers.add(instance);
104
+ instance.on('consumer.disconnect', () => _this._consumers.delete(instance));
105
+ return instance;
106
+ };
107
+ const oldProducerFn = this.client.producer;
108
+ this.client.producer = function (...args) {
109
+ const instance = oldProducerFn.apply(this, args);
110
+ _this._producers.add(instance);
111
+ instance.on('producer.disconnect', () => _this._producers.delete(instance));
112
+ return instance;
113
+ };
114
+ if (options.lazyConnect || !options.brokers)
119
115
  return;
120
- this.logger?.log('Connecting to Kafka brokers' +
116
+ this.logger?.log('Testing to Kafka brokers' +
121
117
  (Array.isArray(options.brokers)
122
118
  ? ansi_colors_1.default.blue(options.brokers.join(','))
123
119
  : ''));
124
120
  common_1.Logger.flush();
125
- await this.client.connect().catch(e => {
126
- this.logger?.error('Kafka connection failed: ' + e.message);
127
- throw e;
128
- });
121
+ const admin = this.client.admin();
122
+ try {
123
+ await admin.connect();
124
+ await admin.fetchTopicMetadata(); // this will fail if Kafka is not reachable
125
+ this.logger?.log('Kafka connection is healthy');
126
+ }
127
+ catch (error) {
128
+ this.logger?.error('Kafka connection failed: ' + error.message);
129
+ throw error;
130
+ }
131
+ finally {
132
+ await admin.disconnect();
133
+ }
129
134
  }
130
135
  onApplicationShutdown() {
131
- return this.client.close();
136
+ return Promise.allSettled([
137
+ ...Array.from(this._admins).map(x => x.disconnect()),
138
+ ...Array.from(this._consumers).map(x => x.disconnect()),
139
+ ...Array.from(this._producers).map(x => x.disconnect()),
140
+ ]);
132
141
  }
133
142
  };
134
143
  exports.KafkaCoreModule = KafkaCoreModule;
135
144
  exports.KafkaCoreModule = KafkaCoreModule = KafkaCoreModule_1 = tslib_1.__decorate([
136
145
  tslib_1.__param(0, (0, common_1.Inject)(CLIENT_TOKEN)),
137
146
  tslib_1.__param(1, (0, common_1.Inject)(constants_js_1.KAFKA_CONNECTION_OPTIONS)),
138
- tslib_1.__metadata("design:paramtypes", [microservices_1.ClientKafka, Object, common_1.Logger])
147
+ tslib_1.__metadata("design:paramtypes", [kafkajs_1.Kafka, Object, common_1.Logger])
139
148
  ], KafkaCoreModule);
package/esm/index.js CHANGED
@@ -2,4 +2,4 @@ export * from './constants.js';
2
2
  export * from './get-kafka-config.js';
3
3
  export * from './kafka.module.js';
4
4
  export * from './types.js';
5
- export { ClientKafka } from '@nestjs/microservices';
5
+ export { Kafka } from 'kafkajs';
@@ -3,8 +3,8 @@ import { __decorate, __metadata, __param } from "tslib";
3
3
  import assert from 'node:assert';
4
4
  import * as crypto from 'node:crypto';
5
5
  import { Inject, Logger, } from '@nestjs/common';
6
- import { ClientKafka, ClientsModule, Transport, } from '@nestjs/microservices';
7
6
  import colors from 'ansi-colors';
7
+ import { Kafka } from 'kafkajs';
8
8
  import { KAFKA_CONNECTION_OPTIONS, KAFKA_MODULE_ID } from './constants.js';
9
9
  import { createLogCreator } from './create-log-creator.js';
10
10
  import { getKafkaConfig } from './get-kafka-config.js';
@@ -45,8 +45,7 @@ let KafkaCoreModule = KafkaCoreModule_1 = class KafkaCoreModule {
45
45
  });
46
46
  }
47
47
  static _createDynamicModule(opts, metadata) {
48
- const token = opts.token ?? ClientKafka;
49
- const name = typeof token === 'string' ? token : 'Kafka';
48
+ const token = opts.token ?? Kafka;
50
49
  const logger = typeof opts.logger === 'string' ? new Logger(opts.logger) : opts.logger;
51
50
  const exports = [KAFKA_CONNECTION_OPTIONS, ...(metadata.exports ?? [])];
52
51
  const providers = [
@@ -63,41 +62,20 @@ let KafkaCoreModule = KafkaCoreModule_1 = class KafkaCoreModule {
63
62
  provide: KAFKA_MODULE_ID,
64
63
  useValue: crypto.randomUUID(),
65
64
  },
66
- ];
67
- if (name !== token) {
68
- exports.push(token);
69
- providers.push({
65
+ {
70
66
  provide: token,
71
- useExisting: name,
72
- });
73
- }
67
+ inject: [KAFKA_CONNECTION_OPTIONS],
68
+ useFactory: (connectionOptions) => {
69
+ return new Kafka({
70
+ ...connectionOptions,
71
+ logCreator: () => createLogCreator(logger),
72
+ });
73
+ },
74
+ },
75
+ ];
74
76
  return {
75
77
  module: KafkaCoreModule_1,
76
78
  providers,
77
- imports: [
78
- /** Import ClientsModule */
79
- ClientsModule.registerAsync({
80
- clients: [
81
- {
82
- name,
83
- extraProviders: metadata.providers,
84
- inject: [KAFKA_CONNECTION_OPTIONS],
85
- useFactory: (connectionOptions) => {
86
- return {
87
- transport: Transport.KAFKA,
88
- options: {
89
- client: {
90
- ...connectionOptions,
91
- logCreator: () => createLogCreator(logger),
92
- },
93
- consumer: connectionOptions.consumer,
94
- },
95
- };
96
- },
97
- },
98
- ],
99
- }),
100
- ],
101
79
  exports,
102
80
  };
103
81
  }
@@ -109,28 +87,59 @@ let KafkaCoreModule = KafkaCoreModule_1 = class KafkaCoreModule {
109
87
  this.client = client;
110
88
  this.connectionOptions = connectionOptions;
111
89
  this.logger = logger;
90
+ this._admins = new Set();
91
+ this._consumers = new Set();
92
+ this._producers = new Set();
112
93
  }
113
94
  async onApplicationBootstrap() {
114
95
  const options = this.connectionOptions;
115
- if (options.lazyConnect)
96
+ const _this = this;
97
+ const oldConsumerFn = this.client.consumer;
98
+ this.client.consumer = function (...args) {
99
+ const instance = oldConsumerFn.apply(this, args);
100
+ _this._consumers.add(instance);
101
+ instance.on('consumer.disconnect', () => _this._consumers.delete(instance));
102
+ return instance;
103
+ };
104
+ const oldProducerFn = this.client.producer;
105
+ this.client.producer = function (...args) {
106
+ const instance = oldProducerFn.apply(this, args);
107
+ _this._producers.add(instance);
108
+ instance.on('producer.disconnect', () => _this._producers.delete(instance));
109
+ return instance;
110
+ };
111
+ if (options.lazyConnect || !options.brokers)
116
112
  return;
117
- this.logger?.log('Connecting to Kafka brokers' +
113
+ this.logger?.log('Testing to Kafka brokers' +
118
114
  (Array.isArray(options.brokers)
119
115
  ? colors.blue(options.brokers.join(','))
120
116
  : ''));
121
117
  Logger.flush();
122
- await this.client.connect().catch(e => {
123
- this.logger?.error('Kafka connection failed: ' + e.message);
124
- throw e;
125
- });
118
+ const admin = this.client.admin();
119
+ try {
120
+ await admin.connect();
121
+ await admin.fetchTopicMetadata(); // this will fail if Kafka is not reachable
122
+ this.logger?.log('Kafka connection is healthy');
123
+ }
124
+ catch (error) {
125
+ this.logger?.error('Kafka connection failed: ' + error.message);
126
+ throw error;
127
+ }
128
+ finally {
129
+ await admin.disconnect();
130
+ }
126
131
  }
127
132
  onApplicationShutdown() {
128
- return this.client.close();
133
+ return Promise.allSettled([
134
+ ...Array.from(this._admins).map(x => x.disconnect()),
135
+ ...Array.from(this._consumers).map(x => x.disconnect()),
136
+ ...Array.from(this._producers).map(x => x.disconnect()),
137
+ ]);
129
138
  }
130
139
  };
131
140
  KafkaCoreModule = KafkaCoreModule_1 = __decorate([
132
141
  __param(0, Inject(CLIENT_TOKEN)),
133
142
  __param(1, Inject(KAFKA_CONNECTION_OPTIONS)),
134
- __metadata("design:paramtypes", [ClientKafka, Object, Logger])
143
+ __metadata("design:paramtypes", [Kafka, Object, Logger])
135
144
  ], KafkaCoreModule);
136
145
  export { KafkaCoreModule };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@xnestjs/kafka",
3
- "version": "1.8.0",
3
+ "version": "1.9.0",
4
4
  "description": "NestJS extension library for Kafka",
5
5
  "author": "Panates",
6
6
  "license": "MIT",
@@ -13,7 +13,6 @@
13
13
  "peerDependencies": {
14
14
  "@nestjs/common": "^10.0.0 || ^11.0.0",
15
15
  "@nestjs/core": "^10.0.0 || ^11.0.0",
16
- "@nestjs/microservices": "^10.0.0 || ^11.0.0",
17
16
  "kafkajs": "^2.2.4"
18
17
  },
19
18
  "type": "module",
package/types/index.d.cts CHANGED
@@ -2,4 +2,4 @@ export * from './constants.js';
2
2
  export * from './get-kafka-config.js';
3
3
  export * from './kafka.module.js';
4
4
  export * from './types.js';
5
- export { ClientKafka } from '@nestjs/microservices';
5
+ export { Kafka } from 'kafkajs';
package/types/index.d.ts CHANGED
@@ -2,4 +2,4 @@ export * from './constants.js';
2
2
  export * from './get-kafka-config.js';
3
3
  export * from './kafka.module.js';
4
4
  export * from './types.js';
5
- export { ClientKafka } from '@nestjs/microservices';
5
+ export { Kafka } from 'kafkajs';
@@ -1,8 +1,8 @@
1
1
  import { DynamicModule, Logger, OnApplicationBootstrap, OnApplicationShutdown } from '@nestjs/common';
2
- import { ClientKafka } from '@nestjs/microservices';
2
+ import { Kafka } from 'kafkajs';
3
3
  import type { KafkaConnectionOptions, KafkaModuleAsyncOptions, KafkaModuleOptions } from './types';
4
4
  export declare class KafkaCoreModule implements OnApplicationShutdown, OnApplicationBootstrap {
5
- protected client: ClientKafka;
5
+ protected client: Kafka;
6
6
  private connectionOptions;
7
7
  private logger?;
8
8
  /**
@@ -14,11 +14,14 @@ export declare class KafkaCoreModule implements OnApplicationShutdown, OnApplica
14
14
  */
15
15
  static forRootAsync(asyncOptions: KafkaModuleAsyncOptions): DynamicModule;
16
16
  private static _createDynamicModule;
17
+ private _admins;
18
+ private _consumers;
19
+ private _producers;
17
20
  /**
18
21
  *
19
22
  * @constructor
20
23
  */
21
- constructor(client: ClientKafka, connectionOptions: KafkaConnectionOptions, logger?: Logger | undefined);
24
+ constructor(client: Kafka, connectionOptions: KafkaConnectionOptions, logger?: Logger | undefined);
22
25
  onApplicationBootstrap(): Promise<void>;
23
- onApplicationShutdown(): Promise<void>;
26
+ onApplicationShutdown(): Promise<PromiseSettledResult<void>[]>;
24
27
  }