@xnestjs/kafka 1.2.6 → 1.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,3 +1,102 @@
1
1
  # @xnestjs/kafka
2
2
 
3
- NestJS extension library for kafka
3
+ NestJS extension library for Apache Kafka
4
+
5
+ ## Install
6
+
7
+ ```sh
8
+ npm install @xnestjs/kafka
9
+ # or using yarn
10
+ yarn add @xnestjs/kafka
11
+ ```
12
+
13
+ ## Usage
14
+
15
+ ### Register sync
16
+
17
+ An example of nestjs module that import the @xnestjs/kafka
18
+
19
+ ```ts
20
+ // module.ts
21
+ import { Module } from '@nestjs/common';
22
+ import { KafkaModule } from '@xnestjs/kafka';
23
+
24
+ @Module({
25
+ imports: [
26
+ KafkaModule.forRoot({
27
+ useValue: {
28
+ brokers: ['localhost'],
29
+ },
30
+ }),
31
+ ],
32
+ })
33
+ export class MyModule {
34
+ }
35
+ ```
36
+
37
+ ### Register async
38
+
39
+ An example of nestjs module that import the @xnestjs/kafka async
40
+
41
+ ```ts
42
+ // module.ts
43
+ import { Module } from '@nestjs/common';
44
+ import { KafkaModule } from '@xnestjs/kafka';
45
+
46
+ @Module({
47
+ imports: [
48
+ KafkaModule.forRootAsync({
49
+ inject: [ConfigModule],
50
+ useFactory: (config: ConfigService) => ({
51
+ brokers: config.get('KAFKA_BROKERS'),
52
+ }),
53
+ }),
54
+ ]
55
+ })
56
+ export class MyModule {
57
+ }
58
+ ```
59
+
60
+ ## Environment Variables
61
+
62
+ The library supports configuration through environment variables. Environment variables below is accepted.
63
+ All environment variables starts with prefix (KAFKA_). This can be configured while registering the module.
64
+
65
+ | Environment Variable | Type | Default | Description |
66
+ |-------------------------------|-----------|---------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
67
+ | KAFKA_URL | String[]! | | |
68
+ | KAFKA_CONSUMER_GROUP_ID | String | kafka_default_group | |
69
+ | KAFKA_CLIENT_ID | String | | |
70
+ | KAFKA_SASL | Enum | | Defines the SASL Mechanism. Accepted values are (`plain`, `scram-sha-256`, `scram-sha-512`, `aws`) |
71
+ | KAFKA_SSL | Boolean | False | Enabled the SSL connection |
72
+ | KAFKA_SSL_CA_CERT | String | | Optionally override the trusted CA certificates. Default is to trust the well-known CAs curated by Mozilla. Mozilla's CAs are completely replaced when CAs are explicitly specified using this option. |
73
+ | KAFKA_SSL_CERT_FILE | String | | The File that contains Cert chains in PEM format. |
74
+ | KAFKA_SSL_KEY_FILE | String | | The File that contains private keys in PEM format. |
75
+ | KAFKA_SSL_KEY_PASSPHRASE | String | | PFX or PKCS12 encoded private key and certificate chain. |
76
+ | KAFKA_SSL_REJECT_UNAUTHORIZED | Boolean | | If true the server will reject any connection which is notauthorized with the list of supplied CAs. This option only has an effect if requestCert is true. |
77
+ | KAFKA_CONNECT_TIMEOUT | Number | | |
78
+ | KAFKA_AUTH_TIMEOUT | Number | | |
79
+ | KAFKA_REAUTH_THRESHOLD | Number | | |
80
+ | KAFKA_REQUEST_TIMEOUT | Number | | |
81
+ | KAFKA_ENFORCE_REQUEST_TIMEOUT | Number | | |
82
+ | KAFKA_RETRIES | Number | | |
83
+ | KAFKA_RETRY_MAX_TIME | Number | | |
84
+ | KAFKA_RETRY_INITIAL_TIME | Number | | |
85
+
86
+ ### SASL Environment Variables
87
+
88
+ The environment variables are available when KAFKA_SASL is one of `plain`, `scram-sha-256` or `scram-sha-512`
89
+
90
+ | Environment Variable | Type | Default | Description |
91
+ |----------------------|---------|---------|-------------|
92
+ | KAFKA_SASL_USERNAME | String! | | Username |
93
+ | KAFKA_SASL_PASSWORD | String! | | Password |
94
+
95
+ The environment variables are available when KAFKA_SASL is `aws`
96
+
97
+ | Environment Variable | Type | Default | Description |
98
+ |-----------------------|---------|---------|-------------|
99
+ | AWS_AUTH_IDENTITY | String! | | |
100
+ | AWS_ACCESS_KEY_ID | String! | | |
101
+ | AWS_SECRET_ACCESS_KEY | String! | | |
102
+ | AWS_SESSION_TOKEN | String | | |
package/cjs/constants.js CHANGED
@@ -1,5 +1,5 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.KAFKA_MODULE_ID = exports.KAFKA_MODULE_OPTIONS = void 0;
4
- exports.KAFKA_MODULE_OPTIONS = Symbol('KAFKA_MODULE_OPTIONS');
3
+ exports.KAFKA_MODULE_ID = exports.KAFKA_CONNECTION_OPTIONS = void 0;
4
+ exports.KAFKA_CONNECTION_OPTIONS = Symbol('KAFKA_CONNECTION_OPTIONS');
5
5
  exports.KAFKA_MODULE_ID = Symbol('KAFKA_MODULE_ID');
@@ -4,6 +4,8 @@ exports.createLogCreator = createLogCreator;
4
4
  const kafkajs_1 = require("kafkajs");
5
5
  function createLogCreator(logger) {
6
6
  return ({ namespace, level, log }) => {
7
+ if (!logger)
8
+ return;
7
9
  const { message, ...extra } = log;
8
10
  switch (level) {
9
11
  case kafkajs_1.logLevel.ERROR:
package/cjs/index.js CHANGED
@@ -1,7 +1,9 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ClientKafka = void 0;
3
4
  const tslib_1 = require("tslib");
4
5
  tslib_1.__exportStar(require("./constants.js"), exports);
5
- tslib_1.__exportStar(require("./interfaces/module-options.interface.js"), exports);
6
6
  tslib_1.__exportStar(require("./kafka.module.js"), exports);
7
- tslib_1.__exportStar(require("kafkajs"), exports);
7
+ tslib_1.__exportStar(require("./types.js"), exports);
8
+ var microservices_1 = require("@nestjs/microservices");
9
+ Object.defineProperty(exports, "ClientKafka", { enumerable: true, get: function () { return microservices_1.ClientKafka; } });
@@ -3,107 +3,192 @@ var KafkaCoreModule_1;
3
3
  Object.defineProperty(exports, "__esModule", { value: true });
4
4
  exports.KafkaCoreModule = void 0;
5
5
  const tslib_1 = require("tslib");
6
+ const node_assert_1 = tslib_1.__importDefault(require("node:assert"));
7
+ const crypto = tslib_1.__importStar(require("node:crypto"));
8
+ const node_process_1 = tslib_1.__importDefault(require("node:process"));
9
+ const objects_1 = require("@jsopen/objects");
6
10
  const common_1 = require("@nestjs/common");
7
- const crypto = tslib_1.__importStar(require("crypto"));
8
- const kafkajs_1 = require("kafkajs");
11
+ const microservices_1 = require("@nestjs/microservices");
12
+ const colors = tslib_1.__importStar(require("ansi-colors"));
13
+ const putil_varhelpers_1 = require("putil-varhelpers");
9
14
  const constants_js_1 = require("./constants.js");
10
- const create_log_creator_js_1 = require("./utils/create-log-creator.js");
15
+ const create_log_creator_js_1 = require("./create-log-creator.js");
16
+ const CLIENT_TOKEN = Symbol('CLIENT_TOKEN');
11
17
  let KafkaCoreModule = KafkaCoreModule_1 = class KafkaCoreModule {
12
- static forRoot(options) {
13
- const logger = options.logger || new common_1.Logger();
14
- const loggerProvider = {
15
- provide: common_1.Logger,
16
- useValue: logger,
17
- };
18
- const optionsProvider = {
19
- provide: constants_js_1.KAFKA_MODULE_OPTIONS,
20
- useValue: options,
21
- };
22
- const kafkaProvider = {
23
- provide: options.id || kafkajs_1.Kafka,
24
- useFactory: () => this.createKafka({ ...options, logger }),
25
- };
26
- return {
27
- module: KafkaCoreModule_1,
28
- providers: [loggerProvider, kafkaProvider, optionsProvider],
29
- exports: [kafkaProvider],
30
- };
31
- }
32
- static forRootAsync(asyncOptions) {
33
- const loggerProvider = {
34
- provide: common_1.Logger,
35
- inject: [constants_js_1.KAFKA_MODULE_OPTIONS],
36
- useFactory: (options) => options.logger || new common_1.Logger(),
37
- };
38
- const kafkaProvider = {
39
- provide: asyncOptions.id || kafkajs_1.Kafka,
40
- inject: [constants_js_1.KAFKA_MODULE_OPTIONS, common_1.Logger],
41
- useFactory: async (options, logger) => this.createKafka({ ...options, logger }),
42
- };
43
- const asyncProviders = this.createAsyncProviders(asyncOptions);
44
- return {
45
- module: KafkaCoreModule_1,
46
- imports: asyncOptions.imports,
18
+ /**
19
+ *
20
+ */
21
+ static forRoot(moduleOptions) {
22
+ const connectionOptions = this._readConnectionOptions(moduleOptions.useValue || {}, moduleOptions.envPrefix);
23
+ return this._createDynamicModule(moduleOptions, {
24
+ global: moduleOptions.global,
47
25
  providers: [
48
- ...asyncProviders,
49
- loggerProvider,
50
- kafkaProvider,
51
26
  {
52
- provide: constants_js_1.KAFKA_MODULE_ID,
53
- useValue: crypto.randomUUID(),
27
+ provide: constants_js_1.KAFKA_CONNECTION_OPTIONS,
28
+ useValue: connectionOptions,
54
29
  },
55
30
  ],
56
- exports: [kafkaProvider],
57
- };
31
+ });
58
32
  }
59
- static createAsyncProviders(asyncOptions) {
60
- if (asyncOptions.useExisting || asyncOptions.useFactory)
61
- return [this.createAsyncOptionsProvider(asyncOptions)];
62
- if (asyncOptions.useClass) {
63
- return [
64
- this.createAsyncOptionsProvider(asyncOptions),
33
+ /**
34
+ *
35
+ */
36
+ static forRootAsync(asyncOptions) {
37
+ node_assert_1.default.ok(asyncOptions.useFactory, 'useFactory is required');
38
+ return this._createDynamicModule(asyncOptions, {
39
+ global: asyncOptions.global,
40
+ providers: [
65
41
  {
66
- provide: asyncOptions.useClass,
67
- useClass: asyncOptions.useClass,
42
+ provide: constants_js_1.KAFKA_CONNECTION_OPTIONS,
43
+ inject: asyncOptions.inject,
44
+ useFactory: async (...args) => {
45
+ const opts = await asyncOptions.useFactory(...args);
46
+ return this._readConnectionOptions(opts, asyncOptions.envPrefix);
47
+ },
68
48
  },
69
- ];
49
+ ],
50
+ });
51
+ }
52
+ static _createDynamicModule(opts, metadata) {
53
+ const token = opts.token ?? microservices_1.ClientKafka;
54
+ const name = typeof token === 'string' ? token : 'Kafka';
55
+ const logger = typeof opts.logger === 'string' ? new common_1.Logger(opts.logger) : opts.logger;
56
+ const exports = [constants_js_1.KAFKA_CONNECTION_OPTIONS, ...(metadata.exports ?? [])];
57
+ const providers = [
58
+ ...(metadata.providers ?? []),
59
+ {
60
+ provide: common_1.Logger,
61
+ useValue: logger,
62
+ },
63
+ {
64
+ provide: CLIENT_TOKEN,
65
+ useExisting: token,
66
+ },
67
+ {
68
+ provide: constants_js_1.KAFKA_MODULE_ID,
69
+ useValue: crypto.randomUUID(),
70
+ },
71
+ ];
72
+ if (name !== token) {
73
+ exports.push(token);
74
+ providers.push({
75
+ provide: token,
76
+ useExisting: name,
77
+ });
70
78
  }
71
- throw new Error('Invalid configuration. Must provide useFactory, useClass or useExisting');
79
+ return {
80
+ module: KafkaCoreModule_1,
81
+ providers,
82
+ imports: [
83
+ /** Import ClientsModule */
84
+ microservices_1.ClientsModule.registerAsync({
85
+ clients: [
86
+ {
87
+ name,
88
+ extraProviders: metadata.providers,
89
+ inject: [constants_js_1.KAFKA_CONNECTION_OPTIONS],
90
+ useFactory: (connectionOptions) => {
91
+ return {
92
+ transport: microservices_1.Transport.KAFKA,
93
+ options: {
94
+ client: {
95
+ ...connectionOptions,
96
+ logCreator: () => (0, create_log_creator_js_1.createLogCreator)(logger),
97
+ },
98
+ consumer: connectionOptions.consumer,
99
+ },
100
+ };
101
+ },
102
+ },
103
+ ],
104
+ }),
105
+ ],
106
+ exports,
107
+ };
72
108
  }
73
- static createAsyncOptionsProvider(asyncOptions) {
74
- if (asyncOptions.useFactory) {
75
- return {
76
- provide: constants_js_1.KAFKA_MODULE_OPTIONS,
77
- useFactory: asyncOptions.useFactory,
78
- inject: asyncOptions.inject || [],
109
+ static _readConnectionOptions(moduleOptions, prefix = 'KAFKA_') {
110
+ const options = (0, objects_1.clone)(moduleOptions);
111
+ const env = node_process_1.default.env;
112
+ options.brokers = options.brokers || (env[prefix + 'URL'] ?? 'localhost').split(/\s*,\s*/);
113
+ if (options.ssl == null && (0, putil_varhelpers_1.toBoolean)(env[prefix + 'SSL'])) {
114
+ options.ssl = {
115
+ ca: [env[prefix + 'SSL_CA_CERT'] || ''],
116
+ cert: env[prefix + 'SSL_CERT_FILE'],
117
+ key: env[prefix + 'SSL_KEY_FILE'],
118
+ passphrase: env[prefix + 'SSL_KEY_PASSPHRASE'],
119
+ rejectUnauthorized: (0, putil_varhelpers_1.toBoolean)(env[prefix + 'SSL_REJECT_UNAUTHORIZED']),
120
+ checkServerIdentity: (host, cert) => {
121
+ if (cert.subject.CN !== host) {
122
+ return new Error(`Certificate CN (${cert.subject.CN}) does not match host (${host})`);
123
+ }
124
+ },
79
125
  };
80
126
  }
81
- const useClass = asyncOptions.useClass || asyncOptions.useExisting;
82
- if (useClass) {
83
- return {
84
- provide: constants_js_1.KAFKA_MODULE_OPTIONS,
85
- useFactory: (optionsFactory) => optionsFactory.createOptions(),
86
- inject: [useClass],
127
+ const sasl = env[prefix + 'SASL'];
128
+ if (options.sasl == null && sasl) {
129
+ if (sasl === 'plain' || sasl === 'scram-sha-256' || sasl === 'scram-sha-512') {
130
+ options.sasl = {
131
+ mechanism: sasl,
132
+ username: env[prefix + 'SASL_USERNAME'] || '',
133
+ password: env[prefix + 'SASL_PASSWORD'] || '',
134
+ };
135
+ }
136
+ else if (sasl === 'aws') {
137
+ options.sasl = {
138
+ mechanism: sasl,
139
+ authorizationIdentity: env[prefix + 'AWS_AUTH_IDENTITY'] || '',
140
+ accessKeyId: env[prefix + 'AWS_ACCESS_KEY_ID'] || '',
141
+ secretAccessKey: env[prefix + 'AWS_SECRET_ACCESS_KEY'] || '',
142
+ sessionToken: env[prefix + 'AWS_SESSION_TOKEN'],
143
+ };
144
+ }
145
+ }
146
+ options.clientId = options.clientId ?? env[prefix + 'CLIENT_ID'];
147
+ options.connectionTimeout = options.connectionTimeout ?? (0, putil_varhelpers_1.toInt)(env[prefix + 'CONNECT_TIMEOUT']);
148
+ options.authenticationTimeout = options.authenticationTimeout ?? (0, putil_varhelpers_1.toInt)(env[prefix + 'AUTH_TIMEOUT']);
149
+ options.reauthenticationThreshold = options.reauthenticationThreshold ?? (0, putil_varhelpers_1.toInt)(env[prefix + 'REAUTH_THRESHOLD']);
150
+ options.requestTimeout = options.requestTimeout ?? (0, putil_varhelpers_1.toInt)(env[prefix + 'REQUEST_TIMEOUT']);
151
+ options.enforceRequestTimeout = options.enforceRequestTimeout ?? (0, putil_varhelpers_1.toBoolean)(env[prefix + 'ENFORCE_REQUEST_TIMEOUT']);
152
+ const retries = (0, putil_varhelpers_1.toInt)(env[prefix + 'RETRIES']);
153
+ if (options.retry == null && retries) {
154
+ options.retry = {
155
+ maxRetryTime: (0, putil_varhelpers_1.toInt)(env[prefix + 'RETRY_MAX_TIME']),
156
+ initialRetryTime: (0, putil_varhelpers_1.toInt)(env[prefix + 'RETRY_INITIAL_TIME']),
157
+ retries,
87
158
  };
88
159
  }
89
- throw new Error('Invalid configuration. Must provide useFactory, useClass or useExisting');
160
+ options.consumer = options.consumer || {};
161
+ options.consumer.groupId =
162
+ options.consumer.groupId ?? (env[prefix + 'CONSUMER_GROUP_ID'] || 'kafka_default_group');
163
+ return options;
90
164
  }
91
- static async createKafka(options) {
92
- const logger = typeof options.logger === 'string'
93
- ? new common_1.Logger(options.logger)
94
- : typeof options.logger === 'object'
95
- ? options.logger
96
- : new common_1.Logger();
97
- return new kafkajs_1.Kafka({
98
- ...options,
99
- // @ts-ignore
100
- id: undefined,
101
- logCreator: () => (0, create_log_creator_js_1.createLogCreator)(logger),
102
- });
165
+ /**
166
+ *
167
+ * @constructor
168
+ */
169
+ constructor(client, connectionOptions, logger) {
170
+ this.client = client;
171
+ this.connectionOptions = connectionOptions;
172
+ this.logger = logger;
173
+ }
174
+ async onApplicationBootstrap() {
175
+ const options = this.connectionOptions;
176
+ if (!options.lazyConnect) {
177
+ this.logger?.log('Connecting to Kafka brokers' + (Array.isArray(options.brokers) ? colors.blue(options.brokers.join(',')) : ''));
178
+ common_1.Logger.flush();
179
+ await this.client.connect().catch(e => {
180
+ this.logger?.error('Kafka connection failed: ' + e.message);
181
+ throw e;
182
+ });
183
+ }
184
+ }
185
+ onApplicationShutdown() {
186
+ return this.client.close();
103
187
  }
104
188
  };
105
189
  exports.KafkaCoreModule = KafkaCoreModule;
106
190
  exports.KafkaCoreModule = KafkaCoreModule = KafkaCoreModule_1 = tslib_1.__decorate([
107
- (0, common_1.Global)(),
108
- (0, common_1.Module)({})
191
+ tslib_1.__param(0, (0, common_1.Inject)(CLIENT_TOKEN)),
192
+ tslib_1.__param(1, (0, common_1.Inject)(constants_js_1.KAFKA_CONNECTION_OPTIONS)),
193
+ tslib_1.__metadata("design:paramtypes", [microservices_1.ClientKafka, Object, common_1.Logger])
109
194
  ], KafkaCoreModule);
package/esm/constants.js CHANGED
@@ -1,2 +1,2 @@
1
- export const KAFKA_MODULE_OPTIONS = Symbol('KAFKA_MODULE_OPTIONS');
1
+ export const KAFKA_CONNECTION_OPTIONS = Symbol('KAFKA_CONNECTION_OPTIONS');
2
2
  export const KAFKA_MODULE_ID = Symbol('KAFKA_MODULE_ID');
@@ -1,6 +1,8 @@
1
1
  import { logLevel } from 'kafkajs';
2
2
  export function createLogCreator(logger) {
3
3
  return ({ namespace, level, log }) => {
4
+ if (!logger)
5
+ return;
4
6
  const { message, ...extra } = log;
5
7
  switch (level) {
6
8
  case logLevel.ERROR:
package/esm/index.js CHANGED
@@ -1,4 +1,4 @@
1
1
  export * from './constants.js';
2
- export * from './interfaces/module-options.interface.js';
3
2
  export * from './kafka.module.js';
4
- export * from 'kafkajs';
3
+ export * from './types.js';
4
+ export { ClientKafka } from '@nestjs/microservices';
@@ -1,106 +1,191 @@
1
1
  var KafkaCoreModule_1;
2
- import { __decorate } from "tslib";
3
- import { Global, Logger, Module } from '@nestjs/common';
4
- import * as crypto from 'crypto';
5
- import { Kafka } from 'kafkajs';
6
- import { KAFKA_MODULE_ID, KAFKA_MODULE_OPTIONS } from './constants.js';
7
- import { createLogCreator } from './utils/create-log-creator.js';
2
+ import { __decorate, __metadata, __param } from "tslib";
3
+ import assert from 'node:assert';
4
+ import * as crypto from 'node:crypto';
5
+ import process from 'node:process';
6
+ import { clone } from '@jsopen/objects';
7
+ import { Inject, Logger } from '@nestjs/common';
8
+ import { ClientKafka, ClientsModule, Transport } from '@nestjs/microservices';
9
+ import * as colors from 'ansi-colors';
10
+ import { toBoolean, toInt } from 'putil-varhelpers';
11
+ import { KAFKA_CONNECTION_OPTIONS, KAFKA_MODULE_ID } from './constants.js';
12
+ import { createLogCreator } from './create-log-creator.js';
13
+ const CLIENT_TOKEN = Symbol('CLIENT_TOKEN');
8
14
  let KafkaCoreModule = KafkaCoreModule_1 = class KafkaCoreModule {
9
- static forRoot(options) {
10
- const logger = options.logger || new Logger();
11
- const loggerProvider = {
12
- provide: Logger,
13
- useValue: logger,
14
- };
15
- const optionsProvider = {
16
- provide: KAFKA_MODULE_OPTIONS,
17
- useValue: options,
18
- };
19
- const kafkaProvider = {
20
- provide: options.id || Kafka,
21
- useFactory: () => this.createKafka({ ...options, logger }),
22
- };
23
- return {
24
- module: KafkaCoreModule_1,
25
- providers: [loggerProvider, kafkaProvider, optionsProvider],
26
- exports: [kafkaProvider],
27
- };
28
- }
29
- static forRootAsync(asyncOptions) {
30
- const loggerProvider = {
31
- provide: Logger,
32
- inject: [KAFKA_MODULE_OPTIONS],
33
- useFactory: (options) => options.logger || new Logger(),
34
- };
35
- const kafkaProvider = {
36
- provide: asyncOptions.id || Kafka,
37
- inject: [KAFKA_MODULE_OPTIONS, Logger],
38
- useFactory: async (options, logger) => this.createKafka({ ...options, logger }),
39
- };
40
- const asyncProviders = this.createAsyncProviders(asyncOptions);
41
- return {
42
- module: KafkaCoreModule_1,
43
- imports: asyncOptions.imports,
15
+ /**
16
+ *
17
+ */
18
+ static forRoot(moduleOptions) {
19
+ const connectionOptions = this._readConnectionOptions(moduleOptions.useValue || {}, moduleOptions.envPrefix);
20
+ return this._createDynamicModule(moduleOptions, {
21
+ global: moduleOptions.global,
44
22
  providers: [
45
- ...asyncProviders,
46
- loggerProvider,
47
- kafkaProvider,
48
23
  {
49
- provide: KAFKA_MODULE_ID,
50
- useValue: crypto.randomUUID(),
24
+ provide: KAFKA_CONNECTION_OPTIONS,
25
+ useValue: connectionOptions,
51
26
  },
52
27
  ],
53
- exports: [kafkaProvider],
54
- };
28
+ });
55
29
  }
56
- static createAsyncProviders(asyncOptions) {
57
- if (asyncOptions.useExisting || asyncOptions.useFactory)
58
- return [this.createAsyncOptionsProvider(asyncOptions)];
59
- if (asyncOptions.useClass) {
60
- return [
61
- this.createAsyncOptionsProvider(asyncOptions),
30
+ /**
31
+ *
32
+ */
33
+ static forRootAsync(asyncOptions) {
34
+ assert.ok(asyncOptions.useFactory, 'useFactory is required');
35
+ return this._createDynamicModule(asyncOptions, {
36
+ global: asyncOptions.global,
37
+ providers: [
62
38
  {
63
- provide: asyncOptions.useClass,
64
- useClass: asyncOptions.useClass,
39
+ provide: KAFKA_CONNECTION_OPTIONS,
40
+ inject: asyncOptions.inject,
41
+ useFactory: async (...args) => {
42
+ const opts = await asyncOptions.useFactory(...args);
43
+ return this._readConnectionOptions(opts, asyncOptions.envPrefix);
44
+ },
65
45
  },
66
- ];
46
+ ],
47
+ });
48
+ }
49
+ static _createDynamicModule(opts, metadata) {
50
+ const token = opts.token ?? ClientKafka;
51
+ const name = typeof token === 'string' ? token : 'Kafka';
52
+ const logger = typeof opts.logger === 'string' ? new Logger(opts.logger) : opts.logger;
53
+ const exports = [KAFKA_CONNECTION_OPTIONS, ...(metadata.exports ?? [])];
54
+ const providers = [
55
+ ...(metadata.providers ?? []),
56
+ {
57
+ provide: Logger,
58
+ useValue: logger,
59
+ },
60
+ {
61
+ provide: CLIENT_TOKEN,
62
+ useExisting: token,
63
+ },
64
+ {
65
+ provide: KAFKA_MODULE_ID,
66
+ useValue: crypto.randomUUID(),
67
+ },
68
+ ];
69
+ if (name !== token) {
70
+ exports.push(token);
71
+ providers.push({
72
+ provide: token,
73
+ useExisting: name,
74
+ });
67
75
  }
68
- throw new Error('Invalid configuration. Must provide useFactory, useClass or useExisting');
76
+ return {
77
+ module: KafkaCoreModule_1,
78
+ providers,
79
+ imports: [
80
+ /** Import ClientsModule */
81
+ ClientsModule.registerAsync({
82
+ clients: [
83
+ {
84
+ name,
85
+ extraProviders: metadata.providers,
86
+ inject: [KAFKA_CONNECTION_OPTIONS],
87
+ useFactory: (connectionOptions) => {
88
+ return {
89
+ transport: Transport.KAFKA,
90
+ options: {
91
+ client: {
92
+ ...connectionOptions,
93
+ logCreator: () => createLogCreator(logger),
94
+ },
95
+ consumer: connectionOptions.consumer,
96
+ },
97
+ };
98
+ },
99
+ },
100
+ ],
101
+ }),
102
+ ],
103
+ exports,
104
+ };
69
105
  }
70
- static createAsyncOptionsProvider(asyncOptions) {
71
- if (asyncOptions.useFactory) {
72
- return {
73
- provide: KAFKA_MODULE_OPTIONS,
74
- useFactory: asyncOptions.useFactory,
75
- inject: asyncOptions.inject || [],
106
+ static _readConnectionOptions(moduleOptions, prefix = 'KAFKA_') {
107
+ const options = clone(moduleOptions);
108
+ const env = process.env;
109
+ options.brokers = options.brokers || (env[prefix + 'URL'] ?? 'localhost').split(/\s*,\s*/);
110
+ if (options.ssl == null && toBoolean(env[prefix + 'SSL'])) {
111
+ options.ssl = {
112
+ ca: [env[prefix + 'SSL_CA_CERT'] || ''],
113
+ cert: env[prefix + 'SSL_CERT_FILE'],
114
+ key: env[prefix + 'SSL_KEY_FILE'],
115
+ passphrase: env[prefix + 'SSL_KEY_PASSPHRASE'],
116
+ rejectUnauthorized: toBoolean(env[prefix + 'SSL_REJECT_UNAUTHORIZED']),
117
+ checkServerIdentity: (host, cert) => {
118
+ if (cert.subject.CN !== host) {
119
+ return new Error(`Certificate CN (${cert.subject.CN}) does not match host (${host})`);
120
+ }
121
+ },
76
122
  };
77
123
  }
78
- const useClass = asyncOptions.useClass || asyncOptions.useExisting;
79
- if (useClass) {
80
- return {
81
- provide: KAFKA_MODULE_OPTIONS,
82
- useFactory: (optionsFactory) => optionsFactory.createOptions(),
83
- inject: [useClass],
124
+ const sasl = env[prefix + 'SASL'];
125
+ if (options.sasl == null && sasl) {
126
+ if (sasl === 'plain' || sasl === 'scram-sha-256' || sasl === 'scram-sha-512') {
127
+ options.sasl = {
128
+ mechanism: sasl,
129
+ username: env[prefix + 'SASL_USERNAME'] || '',
130
+ password: env[prefix + 'SASL_PASSWORD'] || '',
131
+ };
132
+ }
133
+ else if (sasl === 'aws') {
134
+ options.sasl = {
135
+ mechanism: sasl,
136
+ authorizationIdentity: env[prefix + 'AWS_AUTH_IDENTITY'] || '',
137
+ accessKeyId: env[prefix + 'AWS_ACCESS_KEY_ID'] || '',
138
+ secretAccessKey: env[prefix + 'AWS_SECRET_ACCESS_KEY'] || '',
139
+ sessionToken: env[prefix + 'AWS_SESSION_TOKEN'],
140
+ };
141
+ }
142
+ }
143
+ options.clientId = options.clientId ?? env[prefix + 'CLIENT_ID'];
144
+ options.connectionTimeout = options.connectionTimeout ?? toInt(env[prefix + 'CONNECT_TIMEOUT']);
145
+ options.authenticationTimeout = options.authenticationTimeout ?? toInt(env[prefix + 'AUTH_TIMEOUT']);
146
+ options.reauthenticationThreshold = options.reauthenticationThreshold ?? toInt(env[prefix + 'REAUTH_THRESHOLD']);
147
+ options.requestTimeout = options.requestTimeout ?? toInt(env[prefix + 'REQUEST_TIMEOUT']);
148
+ options.enforceRequestTimeout = options.enforceRequestTimeout ?? toBoolean(env[prefix + 'ENFORCE_REQUEST_TIMEOUT']);
149
+ const retries = toInt(env[prefix + 'RETRIES']);
150
+ if (options.retry == null && retries) {
151
+ options.retry = {
152
+ maxRetryTime: toInt(env[prefix + 'RETRY_MAX_TIME']),
153
+ initialRetryTime: toInt(env[prefix + 'RETRY_INITIAL_TIME']),
154
+ retries,
84
155
  };
85
156
  }
86
- throw new Error('Invalid configuration. Must provide useFactory, useClass or useExisting');
157
+ options.consumer = options.consumer || {};
158
+ options.consumer.groupId =
159
+ options.consumer.groupId ?? (env[prefix + 'CONSUMER_GROUP_ID'] || 'kafka_default_group');
160
+ return options;
87
161
  }
88
- static async createKafka(options) {
89
- const logger = typeof options.logger === 'string'
90
- ? new Logger(options.logger)
91
- : typeof options.logger === 'object'
92
- ? options.logger
93
- : new Logger();
94
- return new Kafka({
95
- ...options,
96
- // @ts-ignore
97
- id: undefined,
98
- logCreator: () => createLogCreator(logger),
99
- });
162
+ /**
163
+ *
164
+ * @constructor
165
+ */
166
+ constructor(client, connectionOptions, logger) {
167
+ this.client = client;
168
+ this.connectionOptions = connectionOptions;
169
+ this.logger = logger;
170
+ }
171
+ async onApplicationBootstrap() {
172
+ const options = this.connectionOptions;
173
+ if (!options.lazyConnect) {
174
+ this.logger?.log('Connecting to Kafka brokers' + (Array.isArray(options.brokers) ? colors.blue(options.brokers.join(',')) : ''));
175
+ Logger.flush();
176
+ await this.client.connect().catch(e => {
177
+ this.logger?.error('Kafka connection failed: ' + e.message);
178
+ throw e;
179
+ });
180
+ }
181
+ }
182
+ onApplicationShutdown() {
183
+ return this.client.close();
100
184
  }
101
185
  };
102
186
  KafkaCoreModule = KafkaCoreModule_1 = __decorate([
103
- Global(),
104
- Module({})
187
+ __param(0, Inject(CLIENT_TOKEN)),
188
+ __param(1, Inject(KAFKA_CONNECTION_OPTIONS)),
189
+ __metadata("design:paramtypes", [ClientKafka, Object, Logger])
105
190
  ], KafkaCoreModule);
106
191
  export { KafkaCoreModule };
package/package.json CHANGED
@@ -1,15 +1,19 @@
1
1
  {
2
2
  "name": "@xnestjs/kafka",
3
- "version": "1.2.6",
3
+ "version": "1.5.0",
4
4
  "description": "NestJS extension library for Kafka",
5
5
  "author": "Panates",
6
6
  "license": "MIT",
7
7
  "dependencies": {
8
+ "@jsopen/objects": "^1.5.2",
9
+ "ansi-colors": "^4.1.3",
10
+ "putil-varhelpers": "^1.6.5",
8
11
  "tslib": "^2.8.1"
9
12
  },
10
13
  "peerDependencies": {
11
14
  "@nestjs/common": "^10.0.0 || ^11.0.0",
12
15
  "@nestjs/core": "^10.0.0 || ^11.0.0",
16
+ "@nestjs/microservices": "^10.0.0 || ^11.0.0",
13
17
  "kafkajs": "^2.0.0"
14
18
  },
15
19
  "type": "module",
@@ -1,2 +1,2 @@
1
- export declare const KAFKA_MODULE_OPTIONS: unique symbol;
1
+ export declare const KAFKA_CONNECTION_OPTIONS: unique symbol;
2
2
  export declare const KAFKA_MODULE_ID: unique symbol;
@@ -1,5 +1,5 @@
1
1
  import { Logger } from '@nestjs/common';
2
- export declare function createLogCreator(logger: Logger): ({ namespace, level, log }: {
2
+ export declare function createLogCreator(logger?: Logger): ({ namespace, level, log }: {
3
3
  namespace: any;
4
4
  level: any;
5
5
  log: any;
package/types/index.d.cts CHANGED
@@ -1,4 +1,4 @@
1
1
  export * from './constants.js';
2
- export * from './interfaces/module-options.interface.js';
3
2
  export * from './kafka.module.js';
4
- export * from 'kafkajs';
3
+ export * from './types.js';
4
+ export { ClientKafka } from '@nestjs/microservices';
package/types/index.d.ts CHANGED
@@ -1,4 +1,4 @@
1
1
  export * from './constants.js';
2
- export * from './interfaces/module-options.interface.js';
3
2
  export * from './kafka.module.js';
4
- export * from 'kafkajs';
3
+ export * from './types.js';
4
+ export { ClientKafka } from '@nestjs/microservices';
@@ -1,9 +1,25 @@
1
- import { DynamicModule } from '@nestjs/common';
2
- import { KafkaModuleAsyncOptions, KafkaModuleOptions } from './interfaces/module-options.interface.js';
3
- export declare class KafkaCoreModule {
4
- static forRoot(options: KafkaModuleOptions): DynamicModule;
1
+ import { DynamicModule, Logger, OnApplicationBootstrap, OnApplicationShutdown } from '@nestjs/common';
2
+ import { ClientKafka } from '@nestjs/microservices';
3
+ import type { KafkaConnectionOptions, KafkaModuleAsyncOptions, KafkaModuleOptions } from './types';
4
+ export declare class KafkaCoreModule implements OnApplicationShutdown, OnApplicationBootstrap {
5
+ protected client: ClientKafka;
6
+ private connectionOptions;
7
+ private logger?;
8
+ /**
9
+ *
10
+ */
11
+ static forRoot(moduleOptions: KafkaModuleOptions): DynamicModule;
12
+ /**
13
+ *
14
+ */
5
15
  static forRootAsync(asyncOptions: KafkaModuleAsyncOptions): DynamicModule;
6
- private static createAsyncProviders;
7
- private static createAsyncOptionsProvider;
8
- private static createKafka;
16
+ private static _createDynamicModule;
17
+ private static _readConnectionOptions;
18
+ /**
19
+ *
20
+ * @constructor
21
+ */
22
+ constructor(client: ClientKafka, connectionOptions: KafkaConnectionOptions, logger?: Logger | undefined);
23
+ onApplicationBootstrap(): Promise<void>;
24
+ onApplicationShutdown(): Promise<void>;
9
25
  }
@@ -1,5 +1,5 @@
1
1
  import { DynamicModule } from '@nestjs/common';
2
- import type { KafkaModuleAsyncOptions, KafkaModuleOptions } from './interfaces/module-options.interface.js';
2
+ import type { KafkaModuleAsyncOptions, KafkaModuleOptions } from './types';
3
3
  export declare class KafkaModule {
4
4
  static forRoot(options: KafkaModuleOptions): DynamicModule;
5
5
  static forRootAsync(options: KafkaModuleAsyncOptions): DynamicModule;
@@ -0,0 +1,24 @@
1
+ import type { Logger } from '@nestjs/common';
2
+ import type { ModuleMetadata } from '@nestjs/common/interfaces';
3
+ import type { InjectionToken } from '@nestjs/common/interfaces/modules/injection-token.interface';
4
+ import type { ConsumerConfig } from '@nestjs/microservices/external/kafka.interface';
5
+ import type { KafkaConfig } from 'kafkajs';
6
+ import type { StrictOmit } from 'ts-gems';
7
+ export interface KafkaConnectionOptions extends StrictOmit<KafkaConfig, 'logLevel' | 'logCreator'> {
8
+ consumer?: ConsumerConfig;
9
+ lazyConnect?: boolean;
10
+ }
11
+ interface BaseModuleOptions {
12
+ token?: InjectionToken;
13
+ envPrefix?: string;
14
+ logger?: Logger | string;
15
+ global?: boolean;
16
+ }
17
+ export interface KafkaModuleOptions extends BaseModuleOptions {
18
+ useValue?: Partial<KafkaConnectionOptions>;
19
+ }
20
+ export interface KafkaModuleAsyncOptions extends BaseModuleOptions, Pick<ModuleMetadata, 'imports'> {
21
+ inject?: any[];
22
+ useFactory: (...args: any[]) => Promise<Partial<KafkaConnectionOptions>> | Partial<KafkaConnectionOptions>;
23
+ }
24
+ export {};
@@ -1,19 +0,0 @@
1
- import { Logger, Type } from '@nestjs/common';
2
- import type { ModuleMetadata } from '@nestjs/common/interfaces';
3
- import type { InjectionToken } from '@nestjs/common/interfaces/modules/injection-token.interface';
4
- import { KafkaConfig } from 'kafkajs';
5
- import { StrictOmit } from 'ts-gems';
6
- export interface KafkaModuleOptions extends StrictOmit<KafkaConfig, 'logLevel' | 'logCreator'> {
7
- id?: InjectionToken;
8
- logger?: Logger | string;
9
- }
10
- export interface KafkaModuleOptionsFactory {
11
- createOptions(): Promise<KafkaModuleOptions> | KafkaModuleOptions;
12
- }
13
- export interface KafkaModuleAsyncOptions extends Pick<ModuleMetadata, 'imports'> {
14
- id?: InjectionToken;
15
- useExisting?: Type<KafkaModuleOptionsFactory>;
16
- useClass?: Type<KafkaModuleOptionsFactory>;
17
- useFactory?: (...args: any[]) => Promise<StrictOmit<KafkaModuleOptions, 'id'>> | StrictOmit<KafkaModuleOptions, 'id'>;
18
- inject?: any[];
19
- }