@twt-494/nestjs-kafka 6.0.2 → 7.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/abstract-consumer.d.ts +6 -0
- package/dist/abstract-consumer.js +15 -0
- package/dist/enums/index.d.ts +5 -0
- package/dist/enums/index.js +9 -0
- package/dist/kafka-client.d.ts +25 -0
- package/dist/kafka-client.js +141 -0
- package/dist/kafka-decorator.d.ts +5 -0
- package/dist/kafka-decorator.js +18 -0
- package/dist/kafka.constants.d.ts +1 -0
- package/dist/kafka.constants.js +4 -0
- package/dist/kafka.module.d.ts +3 -14
- package/dist/kafka.module.js +11 -34
- package/dist/kafka.service.d.ts +7 -20
- package/dist/kafka.service.js +29 -53
- package/dist/types/index.d.ts +18 -0
- package/dist/types/index.js +2 -0
- package/dist/utlis/create-logger.d.ts +12 -0
- package/dist/utlis/create-logger.js +37 -0
- package/package.json +3 -2
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.AbstractConsumer = void 0;
|
|
4
|
+
const kafka_decorator_1 = require("./kafka-decorator");
|
|
5
|
+
class AbstractConsumer {
|
|
6
|
+
onModuleInit() {
|
|
7
|
+
this.registerTopics();
|
|
8
|
+
}
|
|
9
|
+
addTopic(topic) {
|
|
10
|
+
kafka_decorator_1.SUB_CONTEXT_MAP.set(topic, {
|
|
11
|
+
context: this,
|
|
12
|
+
});
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
exports.AbstractConsumer = AbstractConsumer;
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.ConsumerEventEnum = void 0;
|
|
4
|
+
var ConsumerEventEnum;
|
|
5
|
+
(function (ConsumerEventEnum) {
|
|
6
|
+
ConsumerEventEnum["CONNECTED"] = "CONSUMER_CONNECTED";
|
|
7
|
+
ConsumerEventEnum["DISCONNECTED"] = "CONSUMER_DISCONNECTED";
|
|
8
|
+
ConsumerEventEnum["FINISHED"] = "CONSUMER_FINISHED";
|
|
9
|
+
})(ConsumerEventEnum || (exports.ConsumerEventEnum = ConsumerEventEnum = {}));
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { Kafka, Producer, RecordMetadata } from 'kafkajs';
|
|
2
|
+
import { CreateKafkaConsumerOptions, KafkaClientOptions, KafkaConsumerHandler } from './types';
|
|
3
|
+
declare class KafkaClient extends Kafka {
|
|
4
|
+
mainProducer: Producer;
|
|
5
|
+
private isConnected;
|
|
6
|
+
private groupedConsumers;
|
|
7
|
+
private startingCloseProcess;
|
|
8
|
+
private consumersIsHealthy;
|
|
9
|
+
constructor({ logger, ...options }: KafkaClientOptions);
|
|
10
|
+
init(isDev?: boolean): Promise<void>;
|
|
11
|
+
produceMessage(topic: string, payload: any, key?: string): Promise<RecordMetadata[]>;
|
|
12
|
+
produceBatchMessages(topic: string, events: {
|
|
13
|
+
payload: string;
|
|
14
|
+
key?: string;
|
|
15
|
+
}[]): Promise<RecordMetadata[]>;
|
|
16
|
+
private eventConsumerConnect;
|
|
17
|
+
private eventConsumerDisconnect;
|
|
18
|
+
connected(): boolean;
|
|
19
|
+
getHealth(): Map<string, boolean>;
|
|
20
|
+
disconnect(): Promise<void>;
|
|
21
|
+
private processMessage;
|
|
22
|
+
private processMessagesSingle;
|
|
23
|
+
createConsumer<T>({ topics, fromStart, sessionTimeout, ...options }: CreateKafkaConsumerOptions, handler: KafkaConsumerHandler<T>): Promise<void>;
|
|
24
|
+
}
|
|
25
|
+
export default KafkaClient;
|
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const kafkajs_1 = require("kafkajs");
|
|
4
|
+
const create_logger_1 = require("./utlis/create-logger");
|
|
5
|
+
const enums_1 = require("./enums");
|
|
6
|
+
class KafkaClient extends kafkajs_1.Kafka {
|
|
7
|
+
constructor({ logger = console, ...options }) {
|
|
8
|
+
super({ ...options, logCreator: (0, create_logger_1.createLogger)(logger) });
|
|
9
|
+
this.isConnected = false;
|
|
10
|
+
this.groupedConsumers = new Map();
|
|
11
|
+
this.startingCloseProcess = false;
|
|
12
|
+
this.consumersIsHealthy = new Map();
|
|
13
|
+
}
|
|
14
|
+
async init(isDev = false) {
|
|
15
|
+
this.mainProducer = this.producer({ createPartitioner: kafkajs_1.Partitioners.LegacyPartitioner });
|
|
16
|
+
this.mainProducer.on(this.mainProducer.events.CONNECT, () => this.isConnected = true);
|
|
17
|
+
this.mainProducer.on(this.mainProducer.events.DISCONNECT, () => this.isConnected = false);
|
|
18
|
+
await this.mainProducer.connect();
|
|
19
|
+
['SIGTERM', 'SIGINT'].map((signal) => {
|
|
20
|
+
process.once(signal, async () => {
|
|
21
|
+
try {
|
|
22
|
+
await this.disconnect();
|
|
23
|
+
}
|
|
24
|
+
finally {
|
|
25
|
+
process.kill(process.pid, signal);
|
|
26
|
+
}
|
|
27
|
+
});
|
|
28
|
+
});
|
|
29
|
+
}
|
|
30
|
+
async produceMessage(topic, payload, key) {
|
|
31
|
+
const data = typeof payload === 'string' ? payload : JSON.stringify(payload);
|
|
32
|
+
return this.produceBatchMessages(topic, [{ payload: data, key }]);
|
|
33
|
+
}
|
|
34
|
+
async produceBatchMessages(topic, events) {
|
|
35
|
+
const messages = await Promise.all(events.map(({ payload, key }) => {
|
|
36
|
+
const message = {
|
|
37
|
+
value: payload,
|
|
38
|
+
};
|
|
39
|
+
if (key) {
|
|
40
|
+
message.key = key;
|
|
41
|
+
}
|
|
42
|
+
return message;
|
|
43
|
+
}));
|
|
44
|
+
if (!this.isConnected) {
|
|
45
|
+
await this.mainProducer.connect();
|
|
46
|
+
}
|
|
47
|
+
return this.mainProducer.send({ topic, messages });
|
|
48
|
+
}
|
|
49
|
+
eventConsumerConnect(event, topics) {
|
|
50
|
+
const connected = event === enums_1.ConsumerEventEnum.CONNECTED;
|
|
51
|
+
for (const topic of topics) {
|
|
52
|
+
this.consumersIsHealthy.set(topic, connected);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
eventConsumerDisconnect(topics, event) {
|
|
56
|
+
var _a;
|
|
57
|
+
const error = (_a = event === null || event === void 0 ? void 0 : event.payload) === null || _a === void 0 ? void 0 : _a.error;
|
|
58
|
+
const consumerEvent = this.startingCloseProcess ? enums_1.ConsumerEventEnum.FINISHED : enums_1.ConsumerEventEnum.DISCONNECTED;
|
|
59
|
+
if (error) {
|
|
60
|
+
this.logger().error(`Kafka client consumer disconnect error: ${error.message}`, {
|
|
61
|
+
topics,
|
|
62
|
+
eventType: event.type,
|
|
63
|
+
stack: error.stack,
|
|
64
|
+
});
|
|
65
|
+
}
|
|
66
|
+
this.eventConsumerConnect(consumerEvent, topics);
|
|
67
|
+
}
|
|
68
|
+
connected() {
|
|
69
|
+
return this.isConnected;
|
|
70
|
+
}
|
|
71
|
+
getHealth() {
|
|
72
|
+
return this.consumersIsHealthy;
|
|
73
|
+
}
|
|
74
|
+
async disconnect() {
|
|
75
|
+
try {
|
|
76
|
+
this.startingCloseProcess = true;
|
|
77
|
+
await Promise.all([
|
|
78
|
+
...[...this.groupedConsumers.values()].map(consumer => consumer.disconnect()),
|
|
79
|
+
]);
|
|
80
|
+
this.startingCloseProcess = false;
|
|
81
|
+
}
|
|
82
|
+
catch (err) {
|
|
83
|
+
// @ts-ignore
|
|
84
|
+
this.logger().error(`Kafka client disconnect error: ${err.message}`, { error: err });
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
async processMessage(value, topic, handler) {
|
|
88
|
+
await handler(topic);
|
|
89
|
+
}
|
|
90
|
+
async processMessagesSingle({ batch: { topic, messages }, isRunning, isStale, resolveOffset, heartbeat }, handler) {
|
|
91
|
+
for (let { value, key, offset } of messages) {
|
|
92
|
+
try {
|
|
93
|
+
if (!isRunning() || isStale()) {
|
|
94
|
+
break;
|
|
95
|
+
}
|
|
96
|
+
await this.processMessage(value, topic, handler);
|
|
97
|
+
}
|
|
98
|
+
catch (err) {
|
|
99
|
+
this.logger().error(`Kafka client process message error: ${err.message}`, {
|
|
100
|
+
stack: err.stack,
|
|
101
|
+
extra: {
|
|
102
|
+
topic,
|
|
103
|
+
value,
|
|
104
|
+
}
|
|
105
|
+
});
|
|
106
|
+
}
|
|
107
|
+
finally {
|
|
108
|
+
await resolveOffset(offset);
|
|
109
|
+
await heartbeat();
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
async createConsumer({ topics, fromStart = true, sessionTimeout = 30000, ...options }, handler) {
|
|
114
|
+
const consumer = this.consumer({ ...options, sessionTimeout });
|
|
115
|
+
consumer.on(consumer.events.GROUP_JOIN, () => {
|
|
116
|
+
this.groupedConsumers.set(options.groupId, consumer);
|
|
117
|
+
});
|
|
118
|
+
const handleDisconnect = this.eventConsumerDisconnect.bind(this, topics);
|
|
119
|
+
consumer.on(consumer.events.CONNECT, () => this.eventConsumerConnect(enums_1.ConsumerEventEnum.CONNECTED, topics));
|
|
120
|
+
consumer.on(consumer.events.DISCONNECT, handleDisconnect);
|
|
121
|
+
consumer.on(consumer.events.CRASH, handleDisconnect);
|
|
122
|
+
consumer.on(consumer.events.REQUEST_TIMEOUT, handleDisconnect);
|
|
123
|
+
try {
|
|
124
|
+
await consumer.connect();
|
|
125
|
+
await Promise.all(topics.map(topic => consumer.subscribe({ topic, fromBeginning: fromStart })));
|
|
126
|
+
await consumer.run({
|
|
127
|
+
eachBatchAutoResolve: false,
|
|
128
|
+
eachBatch: async (payload) => {
|
|
129
|
+
const { batch: { topic }, heartbeat } = payload;
|
|
130
|
+
const timer = setTimeout(() => heartbeat(), Math.ceil(sessionTimeout / 2));
|
|
131
|
+
await this.processMessagesSingle(payload, handler);
|
|
132
|
+
clearTimeout(timer);
|
|
133
|
+
}
|
|
134
|
+
});
|
|
135
|
+
}
|
|
136
|
+
catch (err) {
|
|
137
|
+
this.logger().error(`Kafka client consumer error: ${err.message}`, { stack: err.stack });
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
exports.default = KafkaClient;
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
import { CreateKafkaConsumerOptions } from "./types";
|
|
2
|
+
export declare const SUB_CONTEXT_MAP: Map<any, any>;
|
|
3
|
+
export declare const SUB_HANDLER_MAP: Map<any, any>;
|
|
4
|
+
export declare const SUB_CONFIG_MAP: Map<any, any>;
|
|
5
|
+
export declare function listenTo(topic: string, config?: Omit<CreateKafkaConsumerOptions, 'topic' | 'topics' | 'groupId'>): (target: any, propertyKey: any, descriptor: any) => any;
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.SUB_CONFIG_MAP = exports.SUB_HANDLER_MAP = exports.SUB_CONTEXT_MAP = void 0;
|
|
4
|
+
exports.listenTo = listenTo;
|
|
5
|
+
exports.SUB_CONTEXT_MAP = new Map();
|
|
6
|
+
exports.SUB_HANDLER_MAP = new Map();
|
|
7
|
+
exports.SUB_CONFIG_MAP = new Map();
|
|
8
|
+
function listenTo(topic, config) {
|
|
9
|
+
// @ts-ignore
|
|
10
|
+
return function (target, propertyKey, descriptor) {
|
|
11
|
+
const method = target[propertyKey];
|
|
12
|
+
exports.SUB_HANDLER_MAP.set(topic, method);
|
|
13
|
+
if (config) {
|
|
14
|
+
exports.SUB_HANDLER_MAP.set(topic, method);
|
|
15
|
+
}
|
|
16
|
+
return descriptor;
|
|
17
|
+
};
|
|
18
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare const KAFKA_MODULE_OPTIONS = "KAFKA_MODULE_OPTIONS";
|
package/dist/kafka.module.d.ts
CHANGED
|
@@ -1,16 +1,5 @@
|
|
|
1
|
-
import {
|
|
2
|
-
|
|
3
|
-
brokers: string[];
|
|
4
|
-
serviceName: string;
|
|
5
|
-
user: string;
|
|
6
|
-
}
|
|
7
|
-
export interface KafkaModuleAsyncOptions {
|
|
8
|
-
imports?: any[];
|
|
9
|
-
useFactory?: (...args: any[]) => Promise<KafkaModuleOptions> | KafkaModuleOptions;
|
|
10
|
-
inject?: any[];
|
|
11
|
-
isGlobal?: boolean;
|
|
12
|
-
}
|
|
1
|
+
import { KafkaModuleOptions } from "./types";
|
|
2
|
+
import { DynamicModule } from "@nestjs/common";
|
|
13
3
|
export declare class KafkaModule {
|
|
14
|
-
static
|
|
15
|
-
static registerAsync(options: KafkaModuleAsyncOptions): DynamicModule;
|
|
4
|
+
static registerAsync(options: KafkaModuleOptions): DynamicModule;
|
|
16
5
|
}
|
package/dist/kafka.module.js
CHANGED
|
@@ -1,47 +1,24 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
|
3
|
-
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
4
|
-
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
5
|
-
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
6
|
-
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
7
|
-
};
|
|
8
|
-
var KafkaModule_1;
|
|
9
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
10
3
|
exports.KafkaModule = void 0;
|
|
11
|
-
|
|
12
|
-
const common_1 = require("@nestjs/common");
|
|
4
|
+
const kafka_constants_1 = require("./kafka.constants");
|
|
13
5
|
const kafka_service_1 = require("./kafka.service");
|
|
14
|
-
|
|
15
|
-
static
|
|
6
|
+
class KafkaModule {
|
|
7
|
+
static registerAsync(options) {
|
|
16
8
|
return {
|
|
17
|
-
|
|
9
|
+
global: true,
|
|
10
|
+
module: KafkaModule,
|
|
18
11
|
providers: [
|
|
19
12
|
kafka_service_1.KafkaService,
|
|
20
|
-
{
|
|
13
|
+
{
|
|
14
|
+
provide: kafka_constants_1.KAFKA_MODULE_OPTIONS,
|
|
15
|
+
useFactory: options.useFactory,
|
|
16
|
+
inject: options.inject || [],
|
|
17
|
+
}
|
|
21
18
|
],
|
|
22
|
-
exports: [kafka_service_1.KafkaService],
|
|
23
|
-
};
|
|
24
|
-
}
|
|
25
|
-
static registerAsync(options) {
|
|
26
|
-
var _a;
|
|
27
|
-
if (!options.useFactory) {
|
|
28
|
-
throw new Error('KafkaModule.registerAsync requires useFactory');
|
|
29
|
-
}
|
|
30
|
-
const kafkaOptionsProvider = {
|
|
31
|
-
provide: 'KAFKA_MODULE_OPTIONS',
|
|
32
|
-
useFactory: options.useFactory,
|
|
33
|
-
inject: options.inject || [],
|
|
34
|
-
};
|
|
35
|
-
return {
|
|
36
|
-
module: KafkaModule_1,
|
|
37
19
|
imports: options.imports || [],
|
|
38
|
-
providers: [kafka_service_1.KafkaService, kafkaOptionsProvider],
|
|
39
20
|
exports: [kafka_service_1.KafkaService],
|
|
40
|
-
global: (_a = options.isGlobal) !== null && _a !== void 0 ? _a : false,
|
|
41
21
|
};
|
|
42
22
|
}
|
|
43
|
-
}
|
|
23
|
+
}
|
|
44
24
|
exports.KafkaModule = KafkaModule;
|
|
45
|
-
exports.KafkaModule = KafkaModule = KafkaModule_1 = __decorate([
|
|
46
|
-
(0, common_1.Module)({})
|
|
47
|
-
], KafkaModule);
|
package/dist/kafka.service.d.ts
CHANGED
|
@@ -1,23 +1,10 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import
|
|
3
|
-
|
|
4
|
-
export declare class KafkaService implements OnModuleInit {
|
|
1
|
+
import { OnApplicationBootstrap } from '@nestjs/common';
|
|
2
|
+
import KafkaClient from './kafka-client';
|
|
3
|
+
export declare class KafkaService implements OnApplicationBootstrap {
|
|
5
4
|
private readonly options;
|
|
6
5
|
private kafka;
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
createConsumer(topic: string, options?: {
|
|
12
|
-
sessionTimeout?: number;
|
|
13
|
-
}): Promise<Consumer>;
|
|
14
|
-
sendBatch(topic: string, data: {
|
|
15
|
-
key?: string;
|
|
16
|
-
value: string;
|
|
17
|
-
}[]): Promise<void>;
|
|
18
|
-
send(topic: string, data: any, key?: string): Promise<void>;
|
|
19
|
-
produceBatch(topic: string, messages: {
|
|
20
|
-
value: string;
|
|
21
|
-
key?: string;
|
|
22
|
-
}[]): Promise<void>;
|
|
6
|
+
constructor(options: any);
|
|
7
|
+
private getOptions;
|
|
8
|
+
onApplicationBootstrap(): Promise<void>;
|
|
9
|
+
getClient(): KafkaClient;
|
|
23
10
|
}
|
package/dist/kafka.service.js
CHANGED
|
@@ -11,73 +11,49 @@ var __metadata = (this && this.__metadata) || function (k, v) {
|
|
|
11
11
|
var __param = (this && this.__param) || function (paramIndex, decorator) {
|
|
12
12
|
return function (target, key) { decorator(target, key, paramIndex); }
|
|
13
13
|
};
|
|
14
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
15
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
16
|
+
};
|
|
14
17
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
18
|
exports.KafkaService = void 0;
|
|
16
|
-
// kafka.service.ts
|
|
17
19
|
const common_1 = require("@nestjs/common");
|
|
18
|
-
const
|
|
20
|
+
const kafka_client_1 = __importDefault(require("./kafka-client"));
|
|
21
|
+
const kafka_constants_1 = require("./kafka.constants");
|
|
22
|
+
const kafka_decorator_1 = require("./kafka-decorator");
|
|
19
23
|
let KafkaService = class KafkaService {
|
|
24
|
+
// @ts-ignore
|
|
20
25
|
constructor(options) {
|
|
21
26
|
this.options = options;
|
|
27
|
+
const { isDev, connectionTimeout = 5000, ...clientOptions } = this.options;
|
|
28
|
+
this.kafka = new kafka_client_1.default({ connectionTimeout, ...clientOptions });
|
|
29
|
+
this.kafka.init(isDev);
|
|
22
30
|
}
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
clientId: `${this.options.serviceName}Client`,
|
|
29
|
-
brokers: this.options.brokers,
|
|
30
|
-
});
|
|
31
|
-
this.producer = this.kafka.producer({
|
|
32
|
-
createPartitioner: kafkajs_1.Partitioners.LegacyPartitioner,
|
|
33
|
-
});
|
|
34
|
-
await this.producer.connect();
|
|
35
|
-
process.on('SIGTERM', this.shutdown);
|
|
36
|
-
process.on('SIGINT', this.shutdown);
|
|
37
|
-
}
|
|
38
|
-
async shutdown() {
|
|
39
|
-
await this.producer.disconnect();
|
|
31
|
+
getOptions(topic) {
|
|
32
|
+
return {
|
|
33
|
+
topics: [`${this.options.topicPrefix}_${topic}`],
|
|
34
|
+
groupId: `${this.options.clientId} ${topic} ${this.options.user} Consumer`,
|
|
35
|
+
};
|
|
40
36
|
}
|
|
41
|
-
async
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
topic,
|
|
53
|
-
messages: data,
|
|
54
|
-
});
|
|
55
|
-
}
|
|
56
|
-
async send(topic, data, key) {
|
|
57
|
-
await this.producer.send({
|
|
58
|
-
topic,
|
|
59
|
-
messages: [
|
|
60
|
-
{
|
|
61
|
-
key,
|
|
62
|
-
value: JSON.stringify(data),
|
|
63
|
-
},
|
|
64
|
-
],
|
|
37
|
+
async onApplicationBootstrap() {
|
|
38
|
+
if (!this.options.user && this.options.isDev) {
|
|
39
|
+
return;
|
|
40
|
+
}
|
|
41
|
+
kafka_decorator_1.SUB_HANDLER_MAP.forEach((handler, topic) => {
|
|
42
|
+
const { context } = kafka_decorator_1.SUB_CONTEXT_MAP.get(topic);
|
|
43
|
+
const consumerOptions = this.getOptions(topic);
|
|
44
|
+
if (kafka_decorator_1.SUB_CONFIG_MAP.has(topic)) {
|
|
45
|
+
Object.assign(consumerOptions, kafka_decorator_1.SUB_CONFIG_MAP.get(topic));
|
|
46
|
+
}
|
|
47
|
+
return this.kafka.createConsumer(consumerOptions, handler.bind(context));
|
|
65
48
|
});
|
|
66
49
|
}
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
topicMessages: [
|
|
70
|
-
{
|
|
71
|
-
topic,
|
|
72
|
-
messages,
|
|
73
|
-
},
|
|
74
|
-
],
|
|
75
|
-
});
|
|
50
|
+
getClient() {
|
|
51
|
+
return this.kafka;
|
|
76
52
|
}
|
|
77
53
|
};
|
|
78
54
|
exports.KafkaService = KafkaService;
|
|
79
55
|
exports.KafkaService = KafkaService = __decorate([
|
|
80
56
|
(0, common_1.Injectable)(),
|
|
81
|
-
__param(0, (0, common_1.Inject)(
|
|
57
|
+
__param(0, (0, common_1.Inject)(kafka_constants_1.KAFKA_MODULE_OPTIONS)),
|
|
82
58
|
__metadata("design:paramtypes", [Object])
|
|
83
59
|
], KafkaService);
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { ConsumerConfig } from 'kafkajs';
|
|
2
|
+
import { FactoryProvider, ModuleMetadata } from "@nestjs/common";
|
|
3
|
+
import { LoggerInterface } from "../utlis/create-logger";
|
|
4
|
+
export type KafkaConsumerHandler<T> = (topic: string) => void | Promise<void>;
|
|
5
|
+
export type CreateKafkaConsumerOptions = {
|
|
6
|
+
topics: string[];
|
|
7
|
+
batchSize?: number;
|
|
8
|
+
fromStart?: boolean;
|
|
9
|
+
} & ConsumerConfig;
|
|
10
|
+
export type KafkaClientOptions = {
|
|
11
|
+
clientId: string;
|
|
12
|
+
brokers: string[];
|
|
13
|
+
logger?: LoggerInterface;
|
|
14
|
+
connectionTimeout?: number;
|
|
15
|
+
};
|
|
16
|
+
export type KafkaModuleOptions = Omit<FactoryProvider<KafkaClientOptions & {
|
|
17
|
+
isDev: boolean;
|
|
18
|
+
}> & Pick<ModuleMetadata, 'imports'>, 'provide'>;
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
export interface LoggerInterface {
|
|
2
|
+
debug: (log: any) => void;
|
|
3
|
+
info: (log: any) => void;
|
|
4
|
+
warn: (log: any) => void;
|
|
5
|
+
error: (log: any) => void;
|
|
6
|
+
}
|
|
7
|
+
export declare const createLogger: (logger: LoggerInterface) => () => ({ level, log }: {
|
|
8
|
+
namespace: string;
|
|
9
|
+
level: number;
|
|
10
|
+
label: string;
|
|
11
|
+
log: Record<string, unknown>;
|
|
12
|
+
}) => void;
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.createLogger = void 0;
|
|
4
|
+
const kafkajs_1 = require("kafkajs");
|
|
5
|
+
const createLogger = (logger) => () => {
|
|
6
|
+
return ({ level, log }) => {
|
|
7
|
+
const { message, error, logger: loggerName, ...extra } = log;
|
|
8
|
+
if (level === kafkajs_1.logLevel.ERROR && error === 'string' && error.includes('rebalancing')) {
|
|
9
|
+
level = kafkajs_1.logLevel.WARN;
|
|
10
|
+
}
|
|
11
|
+
const logMessage = {
|
|
12
|
+
logger: loggerName,
|
|
13
|
+
message,
|
|
14
|
+
...extra,
|
|
15
|
+
};
|
|
16
|
+
if (message instanceof Error) {
|
|
17
|
+
logMessage.stack = message.stack;
|
|
18
|
+
}
|
|
19
|
+
switch (level) {
|
|
20
|
+
case kafkajs_1.logLevel.ERROR:
|
|
21
|
+
logger.error(logMessage);
|
|
22
|
+
return;
|
|
23
|
+
case kafkajs_1.logLevel.WARN:
|
|
24
|
+
logger.warn(logMessage);
|
|
25
|
+
return;
|
|
26
|
+
case kafkajs_1.logLevel.DEBUG:
|
|
27
|
+
logger.debug(logMessage);
|
|
28
|
+
return;
|
|
29
|
+
case kafkajs_1.logLevel.INFO:
|
|
30
|
+
logger.info(logMessage);
|
|
31
|
+
return;
|
|
32
|
+
default:
|
|
33
|
+
logger.info(logMessage);
|
|
34
|
+
}
|
|
35
|
+
};
|
|
36
|
+
};
|
|
37
|
+
exports.createLogger = createLogger;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@twt-494/nestjs-kafka",
|
|
3
|
-
"version": "
|
|
3
|
+
"version": "7.0.0",
|
|
4
4
|
"description": "",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"types": "dist/index.d.ts",
|
|
@@ -8,7 +8,8 @@
|
|
|
8
8
|
"author": "Oganes Mgerabyan",
|
|
9
9
|
"license": "MIT",
|
|
10
10
|
"dependencies": {
|
|
11
|
-
"kafkajs": "^2.2.4"
|
|
11
|
+
"kafkajs": "^2.2.4",
|
|
12
|
+
"lodash.camelcase": "^4.3.0"
|
|
12
13
|
},
|
|
13
14
|
"peerDependencies": {
|
|
14
15
|
"@nestjs/common": "11.1.6"
|