@nsshunt/stskafka 1.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2024 nsshunt
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,2 @@
1
+ # stskafka
2
+ STS Kafka
@@ -0,0 +1,329 @@
1
+ var __defProp = Object.defineProperty;
2
+ var __typeError = (msg) => {
3
+ throw TypeError(msg);
4
+ };
5
+ var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
6
+ var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
7
+ var __accessCheck = (obj, member, msg) => member.has(obj) || __typeError("Cannot " + msg);
8
+ var __privateGet = (obj, member, getter) => (__accessCheck(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
9
+ var __privateAdd = (obj, member, value) => member.has(obj) ? __typeError("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
10
+ var __privateSet = (obj, member, value, setter) => (__accessCheck(obj, member, "write to private field"), setter ? setter.call(obj, value) : member.set(obj, value), value);
11
+ var __privateMethod = (obj, member, method) => (__accessCheck(obj, member, "access private method"), method);
12
+ var _id, _groupId, _consumer, _kafka, _connected, _options, _KafkaConsumer_instances, LogErrorMessage_fn, _RaiseError, _options2, _id2, _producer, _kafka2, _connected2, _KafkaProducer_instances, LogErrorMessage_fn2, _RaiseError2, _kafka3, _KafkaManager_instances, LogErrorMessage_fn3, _RaiseError3;
13
+ import { STSOptionsBase } from "@nsshunt/stsutils";
14
+ import { logLevel, Kafka } from "kafkajs";
15
+ import { v4 } from "uuid";
16
+ import chalk from "chalk";
17
+ const net = {};
18
+ class KafkaConsumer {
19
+ constructor(options) {
20
+ __privateAdd(this, _KafkaConsumer_instances);
21
+ __privateAdd(this, _id);
22
+ __privateAdd(this, _groupId);
23
+ __privateAdd(this, _consumer);
24
+ __privateAdd(this, _kafka);
25
+ __privateAdd(this, _connected, false);
26
+ __privateAdd(this, _options);
27
+ __privateAdd(this, _RaiseError, (msg, errorCb) => {
28
+ const errorMessage = `${process.pid}:KafkaConsumer:${msg}`;
29
+ __privateMethod(this, _KafkaConsumer_instances, LogErrorMessage_fn).call(this, chalk.red(errorMessage));
30
+ errorCb(errorMessage);
31
+ });
32
+ __publicField(this, "Subscribe", async (topics, fromBeginning, errorCb) => {
33
+ if (__privateGet(this, _connected)) {
34
+ try {
35
+ await __privateGet(this, _consumer).subscribe({ topics, fromBeginning });
36
+ } catch (error) {
37
+ __privateGet(this, _RaiseError).call(this, `Subscribe(): Error: [${error}]`, errorCb);
38
+ }
39
+ } else {
40
+ await this.Connect((error) => {
41
+ __privateGet(this, _RaiseError).call(this, `Subscribe(): Could not consumer.connect, Error: [${error}]`, errorCb);
42
+ });
43
+ }
44
+ });
45
+ __publicField(this, "Stop", async (errorCb) => {
46
+ if (__privateGet(this, _connected)) {
47
+ try {
48
+ await __privateGet(this, _consumer).stop();
49
+ } catch (error) {
50
+ __privateGet(this, _RaiseError).call(this, `Stop(): Error: [${error}]`, errorCb);
51
+ }
52
+ }
53
+ });
54
+ __publicField(this, "StartConsumingMessages", async (autoCommit, cb, errorCb) => {
55
+ if (__privateGet(this, _connected)) {
56
+ await __privateGet(this, _consumer).run({
57
+ autoCommit,
58
+ eachMessage: async ({ topic, partition, message, heartbeat, pause }) => {
59
+ try {
60
+ cb(topic, partition, message, heartbeat, pause);
61
+ } catch (error) {
62
+ __privateGet(this, _RaiseError).call(this, `StartConsumingMessages:eachMessage(): Error: [${error}]`, errorCb);
63
+ }
64
+ }
65
+ });
66
+ } else {
67
+ await this.Connect((error) => {
68
+ __privateGet(this, _RaiseError).call(this, `StartConsumingMessages(): Could not consumer.connect, Error: [${error}]`, errorCb);
69
+ });
70
+ }
71
+ });
72
+ __privateSet(this, _options, options);
73
+ __privateSet(this, _id, __privateGet(this, _options).id);
74
+ __privateSet(this, _groupId, __privateGet(this, _options).groupId);
75
+ __privateSet(this, _kafka, __privateGet(this, _options).kafka);
76
+ __privateSet(this, _consumer, __privateGet(this, _kafka).consumer({
77
+ groupId: __privateGet(this, _groupId),
78
+ retry: {
79
+ restartOnFailure: async (error) => {
80
+ __privateMethod(this, _KafkaConsumer_instances, LogErrorMessage_fn).call(this, chalk.magenta(`${process.pid}:KafkaConsumer:constructor:restartOnFailure(): Error: [${error}]`));
81
+ return true;
82
+ }
83
+ }
84
+ }));
85
+ }
86
+ get consumer() {
87
+ return __privateGet(this, _consumer);
88
+ }
89
+ get id() {
90
+ return __privateGet(this, _id);
91
+ }
92
+ async Connect(errorCb) {
93
+ if (!__privateGet(this, _connected)) {
94
+ try {
95
+ await __privateGet(this, _consumer).connect();
96
+ __privateSet(this, _connected, true);
97
+ } catch (error) {
98
+ __privateGet(this, _RaiseError).call(this, `Connect(): Error: [${error}]`, errorCb);
99
+ }
100
+ }
101
+ }
102
+ async Disconnect(errorCb) {
103
+ if (__privateGet(this, _connected)) {
104
+ try {
105
+ await __privateGet(this, _consumer).disconnect();
106
+ __privateSet(this, _connected, false);
107
+ } catch (error) {
108
+ __privateGet(this, _RaiseError).call(this, `Disconnect(): Error: [${error}]`, errorCb);
109
+ }
110
+ }
111
+ }
112
+ }
113
+ _id = new WeakMap();
114
+ _groupId = new WeakMap();
115
+ _consumer = new WeakMap();
116
+ _kafka = new WeakMap();
117
+ _connected = new WeakMap();
118
+ _options = new WeakMap();
119
+ _KafkaConsumer_instances = new WeakSet();
120
+ LogErrorMessage_fn = function(message) {
121
+ __privateGet(this, _options).logger.error(message);
122
+ };
123
+ _RaiseError = new WeakMap();
124
+ class KafkaProducer {
125
+ constructor(options) {
126
+ __privateAdd(this, _KafkaProducer_instances);
127
+ __privateAdd(this, _options2);
128
+ __privateAdd(this, _id2);
129
+ __privateAdd(this, _producer);
130
+ __privateAdd(this, _kafka2);
131
+ __privateAdd(this, _connected2, false);
132
+ __privateAdd(this, _RaiseError2, (msg, errorCb) => {
133
+ __privateMethod(this, _KafkaProducer_instances, LogErrorMessage_fn2).call(this, chalk.red(msg));
134
+ errorCb(msg);
135
+ });
136
+ __publicField(this, "SendMessage", async (topic, message, errorCb) => {
137
+ if (__privateGet(this, _connected2)) {
138
+ try {
139
+ return __privateGet(this, _producer).send({
140
+ topic,
141
+ messages: [message]
142
+ });
143
+ } catch (error) {
144
+ __privateMethod(this, _KafkaProducer_instances, LogErrorMessage_fn2).call(this, chalk.red(`${process.pid}:KafkaProducer:SendMessage(): Error: [${error}]`));
145
+ return [];
146
+ }
147
+ } else {
148
+ await this.Connect((error) => {
149
+ __privateGet(this, _RaiseError2).call(this, `${process.pid}:KafkaProducer:SendMessage(): Could not producer.connect, Error: [${error}]`, errorCb);
150
+ });
151
+ return [];
152
+ }
153
+ });
154
+ __publicField(this, "SendMessages", async (topic, messages, errorCb) => {
155
+ if (__privateGet(this, _connected2)) {
156
+ try {
157
+ return __privateGet(this, _producer).send({
158
+ topic,
159
+ messages
160
+ });
161
+ } catch (error) {
162
+ __privateMethod(this, _KafkaProducer_instances, LogErrorMessage_fn2).call(this, chalk.red(`${process.pid}:KafkaProducer:SendMessages(): Error: [${error}]`));
163
+ return [];
164
+ }
165
+ } else {
166
+ await this.Connect((error) => {
167
+ __privateGet(this, _RaiseError2).call(this, `${process.pid}:KafkaProducer:SendMessages(): Could not producer.connect, Error: [${error}]`, errorCb);
168
+ });
169
+ return [];
170
+ }
171
+ });
172
+ __privateSet(this, _options2, options);
173
+ __privateSet(this, _id2, __privateGet(this, _options2).id);
174
+ __privateSet(this, _kafka2, __privateGet(this, _options2).kafka);
175
+ __privateSet(this, _producer, __privateGet(this, _kafka2).producer());
176
+ }
177
+ get producer() {
178
+ return __privateGet(this, _producer);
179
+ }
180
+ get id() {
181
+ return __privateGet(this, _id2);
182
+ }
183
+ async Connect(errorCb) {
184
+ if (!__privateGet(this, _connected2)) {
185
+ try {
186
+ await __privateGet(this, _producer).connect();
187
+ __privateSet(this, _connected2, true);
188
+ } catch (error) {
189
+ __privateGet(this, _RaiseError2).call(this, `${process.pid}:KafkaProducer:Connect(): Error: [${error}]`, errorCb);
190
+ }
191
+ }
192
+ }
193
+ async Disconnect(errorCb) {
194
+ if (__privateGet(this, _connected2)) {
195
+ try {
196
+ await __privateGet(this, _producer).disconnect();
197
+ __privateSet(this, _connected2, false);
198
+ } catch (error) {
199
+ __privateGet(this, _RaiseError2).call(this, `${process.pid}:KafkaProducer:Disconnect(): Error: [${error}]`, errorCb);
200
+ }
201
+ }
202
+ }
203
+ }
204
+ _options2 = new WeakMap();
205
+ _id2 = new WeakMap();
206
+ _producer = new WeakMap();
207
+ _kafka2 = new WeakMap();
208
+ _connected2 = new WeakMap();
209
+ _KafkaProducer_instances = new WeakSet();
210
+ LogErrorMessage_fn2 = function(message) {
211
+ __privateGet(this, _options2).logger.error(message);
212
+ };
213
+ _RaiseError2 = new WeakMap();
214
+ class KafkaManager extends STSOptionsBase {
215
+ constructor(options) {
216
+ super(options);
217
+ __privateAdd(this, _KafkaManager_instances);
218
+ __privateAdd(this, _kafka3);
219
+ __privateAdd(this, _RaiseError3, (msg, errorCb) => {
220
+ __privateMethod(this, _KafkaManager_instances, LogErrorMessage_fn3).call(this, chalk.red(msg));
221
+ errorCb(msg);
222
+ });
223
+ __publicField(this, "CreateTopic", async (topic, partitions, errorCb) => {
224
+ var _a;
225
+ try {
226
+ const admin = __privateGet(this, _kafka3).admin();
227
+ await admin.connect();
228
+ const result = await admin.createTopics({
229
+ validateOnly: false,
230
+ waitForLeaders: true,
231
+ timeout: (_a = this.options) == null ? void 0 : _a.timeout,
232
+ topics: [
233
+ {
234
+ topic,
235
+ numPartitions: partitions
236
+ // default: -1 (uses broker `num.partitions` configuration)
237
+ //replicationFactor: <Number>, // default: -1 (uses broker `default.replication.factor` configuration)
238
+ //replicaAssignment: <Array>, // Example: [{ partition: 0, replicas: [0,1,2] }] - default: []
239
+ //configEntries: <Array> // Example: [{ name: 'cleanup.policy', value: 'compact' }] - default: []
240
+ }
241
+ ]
242
+ });
243
+ await admin.disconnect();
244
+ return result;
245
+ } catch (error) {
246
+ __privateGet(this, _RaiseError3).call(this, `KafkaManager:CreateTopic(): Error: [${error}]`, errorCb);
247
+ return false;
248
+ }
249
+ });
250
+ const kc = {
251
+ clientId: options.clientId,
252
+ brokers: options.brokers,
253
+ //brokers: ['localhost:9092', 'kafka2:9092'],
254
+ connectionTimeout: options.connectionTimeout,
255
+ requestTimeout: options.requestTimeout
256
+ };
257
+ switch (options.logLevel) {
258
+ case "NOTHING":
259
+ kc.logLevel = logLevel.NOTHING;
260
+ break;
261
+ case "ERROR":
262
+ kc.logLevel = logLevel.ERROR;
263
+ break;
264
+ case "WARN":
265
+ kc.logLevel = logLevel.WARN;
266
+ break;
267
+ case "INFO":
268
+ kc.logLevel = logLevel.INFO;
269
+ break;
270
+ case "DEBUG":
271
+ kc.logLevel = logLevel.DEBUG;
272
+ break;
273
+ default:
274
+ kc.logLevel = logLevel.NOTHING;
275
+ }
276
+ if (options.useSSL && options.ssl) {
277
+ kc.ssl = {
278
+ ca: [net.readFileSync(options.ssl.cafile, { encoding: "utf8" })],
279
+ key: net.readFileSync(options.ssl.keyfile, { encoding: "utf8" }),
280
+ cert: net.readFileSync(options.ssl.certfileFile, { encoding: "utf8" })
281
+ };
282
+ }
283
+ if (options.keepAlive) {
284
+ const myCustomSocketFactory = (config) => {
285
+ const socket = config.ssl ? net.connect(
286
+ Object.assign({ host: config.host, port: config.port }, !net.isIP(config.host) ? { servername: config.host } : {}, config.ssl),
287
+ config.onConnect
288
+ ) : net.connect({ host: config.host, port: config.port }, config.onConnect);
289
+ socket.setKeepAlive(true, options.keepAlive);
290
+ return socket;
291
+ };
292
+ kc.socketFactory = myCustomSocketFactory;
293
+ }
294
+ __privateSet(this, _kafka3, new Kafka(kc));
295
+ }
296
+ get kafka() {
297
+ return __privateGet(this, _kafka3);
298
+ }
299
+ CreateProducer() {
300
+ var _a;
301
+ return new KafkaProducer({
302
+ kafka: __privateGet(this, _kafka3),
303
+ id: v4(),
304
+ logger: (_a = this.options) == null ? void 0 : _a.logger
305
+ });
306
+ }
307
+ CreateConsumer(groupId) {
308
+ var _a;
309
+ return new KafkaConsumer({
310
+ kafka: __privateGet(this, _kafka3),
311
+ id: v4(),
312
+ groupId,
313
+ logger: (_a = this.options) == null ? void 0 : _a.logger
314
+ });
315
+ }
316
+ }
317
+ _kafka3 = new WeakMap();
318
+ _KafkaManager_instances = new WeakSet();
319
+ LogErrorMessage_fn3 = function(message) {
320
+ var _a;
321
+ (_a = this.options) == null ? void 0 : _a.logger.error(message);
322
+ };
323
+ _RaiseError3 = new WeakMap();
324
+ export {
325
+ KafkaConsumer,
326
+ KafkaManager,
327
+ KafkaProducer
328
+ };
329
+ //# sourceMappingURL=stskafka.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"stskafka.mjs","sources":["../__vite-browser-external","../src/kafka/kafkaconsumer.ts","../src/kafka/kafkaproducer.ts","../src/kafka/kafkamanager.ts"],"sourcesContent":["export default {}","/* eslint @typescript-eslint/no-explicit-any: 0, @typescript-eslint/no-unused-vars: 0 */ // --> OFF\nimport { Kafka, Consumer, IHeaders, KafkaMessage } from 'kafkajs'\nimport { IKafkaConsumer, ConsumeMessageCB, ConsumeMessageErrorCB } from './../commonTypes'\n\nimport chalk from 'chalk';\nimport { ISTSLogger } from '@nsshunt/stsutils';\n\nexport interface IKafkaConsumerOptions {\n kafka: Kafka\n id: string\n groupId: string\n logger: ISTSLogger\n}\n\nexport class KafkaConsumer implements IKafkaConsumer {\n #id: string\n #groupId: string\n #consumer: Consumer;\n #kafka: Kafka;\n #connected: boolean = false;\n #options: IKafkaConsumerOptions;\n\n constructor(options: IKafkaConsumerOptions) {\n this.#options = options;\n this.#id = this.#options.id;\n this.#groupId = this.#options.groupId;\n this.#kafka = this.#options.kafka;\n this.#consumer = this.#kafka.consumer({ \n groupId: this.#groupId,\n retry: {\n restartOnFailure: async (error: Error): Promise<boolean> => {\n this.#LogErrorMessage(chalk.magenta(`${process.pid}:KafkaConsumer:constructor:restartOnFailure(): Error: [${error}]`))\n return true;\n }\n }\n })\n }\n\n #LogErrorMessage(message: any) {\n this.#options.logger.error(message);\n }\n\n get consumer() {\n return this.#consumer;\n }\n \n get id(): string {\n return this.#id;\n }\n\n #RaiseError = (msg: string, errorCb: (error: any) => void) => {\n const errorMessage = `${process.pid}:KafkaConsumer:${msg}`;\n this.#LogErrorMessage(chalk.red(errorMessage));\n errorCb(errorMessage);\n }\n\n async Connect(errorCb: (error: any) => void): Promise<void> {\n if (!this.#connected) {\n try {\n await this.#consumer.connect()\n this.#connected = true;\n } catch (error) {\n this.#RaiseError(`Connect(): Error: [${error}]`, errorCb);\n }\n }\n }\n\n async Disconnect(errorCb: (error: any) => void): Promise<void> {\n if (this.#connected) {\n try {\n await this.#consumer.disconnect()\n this.#connected = false;\n } catch (error) {\n this.#RaiseError(`Disconnect(): Error: [${error}]`, errorCb);\n }\n }\n }\n\n Subscribe = async(topics: string[], fromBeginning: boolean, errorCb: (error: any) => void): Promise<void> => {\n if (this.#connected) {\n try {\n await this.#consumer.subscribe({ topics, fromBeginning })\n } catch (error) {\n this.#RaiseError(`Subscribe(): Error: [${error}]`, errorCb);\n }\n } else {\n await this.Connect((error) => {\n this.#RaiseError(`Subscribe(): Could not consumer.connect, Error: [${error}]`, errorCb);\n });\n }\n }\n\n Stop = async(errorCb: (error: any) => void): Promise<void> => {\n if (this.#connected) {\n try {\n await this.#consumer.stop();\n } catch (error) {\n this.#RaiseError(`Stop(): Error: [${error}]`, errorCb);\n }\n }\n }\n\n StartConsumingMessages = async (autoCommit: boolean, cb: ConsumeMessageCB, errorCb: ConsumeMessageErrorCB): Promise<void> => {\n if (this.#connected) {\n await this.#consumer.run({\n autoCommit, \n eachMessage: async ({ topic, partition, message, heartbeat, pause }) => {\n try {\n cb(topic, partition, message, heartbeat, pause);\n /*\n if (message.key) {\n if (message.value) {\n cb(topic.toString(), message.key.toString(), partition, message.value.toString(), message.headers)\n } else {\n cb(topic.toString(), message.key.toString(), partition, \"\", message.headers)\n }\n } else {\n if (message.value) {\n cb(topic.toString(), \"\", partition, message.value?.toString(), message.headers)\n } else {\n cb(topic.toString(), \"\", partition, \"\", message.headers)\n }\n }\n */\n } catch (error) {\n this.#RaiseError(`StartConsumingMessages:eachMessage(): Error: [${error}]`, errorCb);\n }\n }\n })\n } else {\n await this.Connect((error) => {\n this.#RaiseError(`StartConsumingMessages(): Could not consumer.connect, Error: [${error}]`, errorCb);\n });\n }\n }\n}\n","/* eslint @typescript-eslint/no-explicit-any: 0, @typescript-eslint/no-unused-vars: 0 */ // --> OFF\nimport { Kafka, Producer, RecordMetadata } from 'kafkajs'\n\nimport chalk from 'chalk';\nimport { ISTSLogger } from '@nsshunt/stsutils';\n\nexport interface IKafkaProducerOptions {\n kafka: Kafka\n id: string\n logger: ISTSLogger\n}\n\nexport class KafkaProducer {\n #options: IKafkaProducerOptions;\n #id: string\n #producer: Producer;\n #kafka: Kafka;\n #connected: boolean = false;\n\n constructor(options: IKafkaProducerOptions) {\n this.#options = options;\n this.#id = this.#options.id;\n this.#kafka = this.#options.kafka;\n this.#producer = this.#kafka.producer()\n }\n\n #LogErrorMessage(message: any) {\n this.#options.logger.error(message);\n }\n\n #RaiseError = (msg: string, errorCb: (error: any) => void) => {\n this.#LogErrorMessage(chalk.red(msg));\n errorCb(msg);\n }\n\n get producer() {\n return this.#producer;\n }\n \n get id(): string {\n return this.#id;\n }\n\n async Connect(errorCb: (error: any) => void): Promise<void> {\n if (!this.#connected) {\n try {\n await this.#producer.connect();\n this.#connected = true;\n } catch (error) {\n this.#RaiseError(`${process.pid}:KafkaProducer:Connect(): Error: [${error}]`, errorCb);\n }\n }\n }\n\n async Disconnect(errorCb: (error: any) => void): Promise<void> {\n if (this.#connected) {\n try {\n await this.#producer.disconnect()\n this.#connected = false;\n } catch (error) {\n this.#RaiseError(`${process.pid}:KafkaProducer:Disconnect(): Error: [${error}]`, errorCb);\n }\n }\n }\n\n SendMessage = async(topic: string, message: { key: string, value: string}, errorCb: (error: any) => void): Promise<RecordMetadata[]> => {\n if (this.#connected) {\n try {\n return this.#producer.send({\n topic,\n messages: [ message ]\n })\n } catch (error) {\n this.#LogErrorMessage(chalk.red(`${process.pid}:KafkaProducer:SendMessage(): Error: [${error}]`));\n return [ ];\n }\n } else {\n await this.Connect((error) => {\n this.#RaiseError(`${process.pid}:KafkaProducer:SendMessage(): Could not producer.connect, Error: [${error}]`, errorCb);\n });\n return [ ];\n }\n }\n\n SendMessages = async(topic: string, messages: { key: string, value: string}[], errorCb: (error: any) => void): Promise<RecordMetadata[]> => {\n if (this.#connected) {\n try {\n return this.#producer.send({\n topic,\n messages\n }) \n } catch (error) {\n this.#LogErrorMessage(chalk.red(`${process.pid}:KafkaProducer:SendMessages(): Error: [${error}]`));\n return [ ];\n }\n } else {\n await this.Connect((error) => {\n this.#RaiseError(`${process.pid}:KafkaProducer:SendMessages(): Could not producer.connect, Error: [${error}]`, errorCb);\n });\n return [ ];\n }\n }\n}\n","/* eslint @typescript-eslint/no-explicit-any: 0, @typescript-eslint/no-unused-vars: 0 */ // --> OFF\n/*\n\nkafka example server #01 - Docker Compose File\n----------------------------------------------\nNote: In this example, the log retention is set to 24 hours (rather than default to 1 week)\nhttps://www.conduktor.io/kafka/kafka-topic-configuration-log-retention/\n\nversion: '2'\nservices:\n zookeeper:\n image: wurstmeister/zookeeper\n ports:\n - \"2181:2181\"\n restart: unless-stopped\n\n kafka:\n image: wurstmeister/kafka\n ports:\n - \"9092:9092\"\n environment:\n DOCKER_API_VERSION: 1.22\n KAFKA_ADVERTISED_HOST_NAME: 192.168.14.92\n KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181\n KAFKA_CREATE_TOPICS: \"topic-name2:3:1\"\n KAFKA_LOG_RETENTION_MS: 86400000\n KAFKA_LOG_RETENTION_BYTES: -1\n volumes:\n - /var/run/docker.sock:/var/run/docker.sock\n restart: unless-stopped\n\n\nkafka example server #02 - Docker Compose File\n----------------------------------------------\nversion: \"3.9\" # optional since v1.27.0\n\nnetworks:\n app-tier:\n driver: bridge\n\nservices:\n kafka:\n image: 'bitnami/kafka:latest'\n ports:\n - '9092:9092'\n networks:\n - app-tier \n environment:\n - ALLOW_PLAINTEXT_LISTENER=yes\n - KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE=true\n - KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://192.168.14.92:9092\n\n*/\nimport { ISTSLogger, STSOptionsBase } from '@nsshunt/stsutils'\n\nimport { Kafka, KafkaConfig, logLevel } from 'kafkajs'\n\nimport { v4 as uuidv4 } from 'uuid';\nimport fs from 'node:fs'\n\nimport { KafkaConsumer } from './kafkaconsumer'\nimport { KafkaProducer } from './kafkaproducer'\n\nimport net from 'net'\nimport tls from 'tls'\n\nimport chalk from 'chalk';\n\nconst KEEP_ALIVE_DELAY = 60000 //@@ in ms\n\n// https://kafka.js.org/docs/configuration\nexport interface IKafkaManagerConfig {\n clientId: string // A logical identifier of an application. Can be used by brokers to apply quotas or trace requests to a specific application. Example: booking-events-processor.\n brokers: string[] // List of Kafka brokers\n adminTimeout: number // Time in milliseconds to wait for a successful admin operation. The default value is: 5000.\n connectionTimeout: number // Time in milliseconds to wait for a successful connection. The default value is: 1000.\n requestTimeout: number // Time in milliseconds to wait for a successful request. The default value is: 30000.\n logLevel: string // There are 5 log levels available: NOTHING, ERROR, WARN, INFO, and DEBUG. INFO is configured by default.\n keepAlive?: number // When specified, the number of ms for socket keep alive processing.\n useSSL: boolean // Use SSL\n ssl?: { // Must be specified if useSSL is true\n rejectUnauthorized: boolean\n cafile: string\n keyfile: string\n certfileFile: string\n }\n logger: ISTSLogger\n}\n\n// https://kafka.js.org/docs/configuration\n// https://github.com/tulios/kafkajs/blob/master/src/network/socketFactory.js\ndeclare interface ICustomSocketFactory {\n host: any,\n port: any,\n ssl: any,\n onConnect: any\n}\n\nexport class KafkaManager extends STSOptionsBase {\n #kafka: Kafka\n\n constructor(options: IKafkaManagerConfig) {\n super(options);\n\n const kc: KafkaConfig = {\n clientId: options.clientId,\n brokers: options.brokers, //brokers: ['localhost:9092', 'kafka2:9092'],\n connectionTimeout: options.connectionTimeout,\n requestTimeout: options.requestTimeout\n }\n // NOTHING, ERROR, WARN, INFO, and DEBUG. INFO is configured by default.\n switch (options.logLevel) {\n case 'NOTHING' :\n kc.logLevel = logLevel.NOTHING;\n break;\n case 'ERROR' :\n kc.logLevel = logLevel.ERROR;\n break;\n case 'WARN' :\n kc.logLevel = logLevel.WARN;\n break;\n case 'INFO' :\n kc.logLevel = logLevel.INFO;\n break;\n case 'DEBUG' :\n kc.logLevel = logLevel.DEBUG;\n break;\n default :\n kc.logLevel = logLevel.NOTHING;\n }\n if (options.useSSL && options.ssl) {\n kc.ssl = {\n ca: [fs.readFileSync(options.ssl.cafile as string, { encoding: 'utf8'})],\n key: fs.readFileSync(options.ssl.keyfile, { encoding: 'utf8'}),\n cert: fs.readFileSync(options.ssl.certfileFile, { encoding: 'utf8'}),\n }\n }\n if (options.keepAlive) {\n //const myCustomSocketFactory = ({ host, port, ssl, onConnect }) => {\n const myCustomSocketFactory = (config: ICustomSocketFactory) => {\n const socket = config.ssl\n ? tls.connect(\n Object.assign({ host: config.host, port: config.port }, !net.isIP(config.host) ? { servername: config.host } : {}, config.ssl),\n config.onConnect\n )\n : net.connect({ host: config.host, port: config.port }, config.onConnect)\n \n socket.setKeepAlive(true, options.keepAlive)\n return socket\n }\n kc.socketFactory = myCustomSocketFactory;\n }\n\n this.#kafka = new Kafka(kc);\n }\n\n #LogErrorMessage(message: any) {\n this.options?.logger.error(message);\n }\n\n #RaiseError = (msg: string, errorCb: (error: any) => void) => {\n this.#LogErrorMessage(chalk.red(msg));\n errorCb(msg);\n }\n\n get kafka() {\n return this.#kafka;\n }\n\n CreateProducer(): KafkaProducer {\n return new KafkaProducer({\n kafka: this.#kafka,\n id: uuidv4(),\n logger: this.options?.logger});\n }\n\n CreateConsumer(groupId: string) {\n return new KafkaConsumer({\n kafka: this.#kafka, \n id: uuidv4(), \n groupId,\n logger: this.options?.logger});\n }\n\n CreateTopic = async (topic: string, partitions: number, errorCb: (error: any) => void): Promise<boolean> => {\n try {\n const admin = this.#kafka.admin()\n await admin.connect()\n const result = await admin.createTopics({\n validateOnly: false,\n waitForLeaders: true,\n timeout: this.options?.timeout,\n topics: [\n {\n topic: topic,\n numPartitions: partitions, // default: -1 (uses broker `num.partitions` configuration)\n //replicationFactor: <Number>, // default: -1 (uses broker `default.replication.factor` configuration)\n //replicaAssignment: <Array>, // Example: [{ partition: 0, replicas: [0,1,2] }] - default: []\n //configEntries: <Array> // Example: [{ name: 'cleanup.policy', value: 'compact' }] - default: []\n } \n ]\n })\n await admin.disconnect()\n return result;\n } catch (error) {\n this.#RaiseError(`KafkaManager:CreateTopic(): Error: [${error}]`, errorCb);\n return false;\n }\n }\n}\n"],"names":["_options","_id","_kafka","_connected","_RaiseError","LogErrorMessage_fn","fs","tls","uuidv4"],"mappings":";;;;;;;;;;;;;;;;AAAA,MAAe,MAAA,CAAA;ACcR,MAAM,cAAwC;AAAA,EAQjD,YAAY,SAAgC;AARzC;AACH;AACA;AACA;AACA;AACA,mCAAsB;AACtB;AA8BA,oCAAc,CAAC,KAAa,YAAkC;AAC1D,YAAM,eAAe,GAAG,QAAQ,GAAG,kBAAkB,GAAG;AACxD,4BAAK,8CAAL,WAAsB,MAAM,IAAI,YAAY;AAC5C,cAAQ,YAAY;AAAA,IAAA;AAyBxB,qCAAY,OAAM,QAAkB,eAAwB,YAAiD;AACzG,UAAI,mBAAK,aAAY;AACb,YAAA;AACA,gBAAM,mBAAK,WAAU,UAAU,EAAE,QAAQ,eAAe;AAAA,iBACnD,OAAO;AACZ,6BAAK,aAAL,WAAiB,wBAAwB,KAAK,KAAK;AAAA,QACvD;AAAA,MAAA,OACG;AACG,cAAA,KAAK,QAAQ,CAAC,UAAU;AAC1B,6BAAK,aAAL,WAAiB,oDAAoD,KAAK,KAAK;AAAA,QAAO,CACzF;AAAA,MACL;AAAA,IAAA;AAGJ,gCAAO,OAAM,YAAiD;AAC1D,UAAI,mBAAK,aAAY;AACb,YAAA;AACM,gBAAA,mBAAK,WAAU;iBAChB,OAAO;AACZ,6BAAK,aAAL,WAAiB,mBAAmB,KAAK,KAAK;AAAA,QAClD;AAAA,MACJ;AAAA,IAAA;AAGJ,kDAAyB,OAAO,YAAqB,IAAsB,YAAkD;AACzH,UAAI,mBAAK,aAAY;AACX,cAAA,mBAAK,WAAU,IAAI;AAAA,UACrB;AAAA,UACA,aAAa,OAAO,EAAE,OAAO,WAAW,SAAS,WAAW,YAAY;AAChE,gBAAA;AACA,iBAAG,OAAO,WAAW,SAAS,WAAW,KAAK;AAAA,qBAgBzC,OAAO;AACZ,iCAAK,aAAL,WAAiB,iDAAiD,KAAK,KAAK;AAAA,YAChF;AAAA,UACJ;AAAA,QAAA,CACH;AAAA,MAAA,OACE;AACG,cAAA,KAAK,QAAQ,CAAC,UAAU;AAC1B,6BAAK,aAAL,WAAiB,iEAAiE,KAAK,KAAK;AAAA,QAAO,CACtG;AAAA,MACL;AAAA,IAAA;AA9GA,uBAAK,UAAW;AACX,uBAAA,KAAM,mBAAK,UAAS;AACpB,uBAAA,UAAW,mBAAK,UAAS;AACzB,uBAAA,QAAS,mBAAK,UAAS;AACvB,uBAAA,WAAY,mBAAK,QAAO,SAAS;AAAA,MAClC,SAAS,mBAAK;AAAA,MACd,OAAO;AAAA,QACH,kBAAkB,OAAO,UAAmC;AACnD,gCAAA,8CAAA,WAAiB,MAAM,QAAQ,GAAG,QAAQ,GAAG,0DAA0D,KAAK,GAAG;AAC7G,iBAAA;AAAA,QACX;AAAA,MACJ;AAAA,IAAA,CACH;AAAA,EACL;AAAA,EAMA,IAAI,WAAW;AACX,WAAO,mBAAK;AAAA,EAChB;AAAA,EAEA,IAAI,KAAa;AACb,WAAO,mBAAK;AAAA,EAChB;AAAA,EAQA,MAAM,QAAQ,SAA8C;AACpD,QAAA,CAAC,mBAAK,aAAY;AACd,UAAA;AACM,cAAA,mBAAK,WAAU;AACrB,2BAAK,YAAa;AAAA,eACb,OAAO;AACZ,2BAAK,aAAL,WAAiB,sBAAsB,KAAK,KAAK;AAAA,MACrD;AAAA,IACJ;AAAA,EACJ;AAAA,EAEA,MAAM,WAAW,SAA8C;AAC3D,QAAI,mBAAK,aAAY;AACb,UAAA;AACM,cAAA,mBAAK,WAAU;AACrB,2BAAK,YAAa;AAAA,eACb,OAAO;AACZ,2BAAK,aAAL,WAAiB,yBAAyB,KAAK,KAAK;AAAA,MACxD;AAAA,IACJ;AAAA,EACJ;AA2DJ;AAxHI;AACA;AACA;AACA;AACA;AACA;AANG;AAwBH,8BAAiB,SAAc;AACtB,qBAAA,UAAS,OAAO,MAAM,OAAO;AACtC;AAUA;ACtCG,MAAM,cAAc;AAAA,EAOvB,YAAY,SAAgC;AAPzC;AACH,uBAAAA;AACA,uBAAAC;AACA;AACA,uBAAAC;AACA,uBAAAC,aAAsB;AAatB,uBAAAC,cAAc,CAAC,KAAa,YAAkC;AAC1D,4BAAK,0BAAAC,qBAAL,WAAsB,MAAM,IAAI,GAAG;AACnC,cAAQ,GAAG;AAAA,IAAA;AAiCf,uCAAc,OAAM,OAAe,SAAwC,YAA6D;AACpI,UAAI,mBAAKF,cAAY;AACb,YAAA;AACO,iBAAA,mBAAK,WAAU,KAAK;AAAA,YACvB;AAAA,YACA,UAAU,CAAE,OAAQ;AAAA,UAAA,CACvB;AAAA,iBACI,OAAO;AACP,gCAAA,0BAAAE,qBAAA,WAAiB,MAAM,IAAI,GAAG,QAAQ,GAAG,yCAAyC,KAAK,GAAG;AAC/F,iBAAO;QACX;AAAA,MAAA,OACG;AACG,cAAA,KAAK,QAAQ,CAAC,UAAU;AAC1B,6BAAKD,cAAL,WAAiB,GAAG,QAAQ,GAAG,qEAAqE,KAAK,KAAK;AAAA,QAAO,CACxH;AACD,eAAO;MACX;AAAA,IAAA;AAGJ,wCAAe,OAAM,OAAe,UAA2C,YAA6D;AACxI,UAAI,mBAAKD,cAAY;AACb,YAAA;AACO,iBAAA,mBAAK,WAAU,KAAK;AAAA,YACvB;AAAA,YACA;AAAA,UAAA,CACH;AAAA,iBACI,OAAO;AACP,gCAAA,0BAAAE,qBAAA,WAAiB,MAAM,IAAI,GAAG,QAAQ,GAAG,0CAA0C,KAAK,GAAG;AAChG,iBAAO;QACX;AAAA,MAAA,OACG;AACG,cAAA,KAAK,QAAQ,CAAC,UAAU;AAC1B,6BAAKD,cAAL,WAAiB,GAAG,QAAQ,GAAG,sEAAsE,KAAK,KAAK;AAAA,QAAO,CACzH;AACD,eAAO;MACX;AAAA,IAAA;AAhFA,uBAAKJ,WAAW;AACX,uBAAAC,MAAM,mBAAKD,WAAS;AACpB,uBAAAE,SAAS,mBAAKF,WAAS;AACvB,uBAAA,WAAY,mBAAKE,SAAO,SAAS;AAAA,EAC1C;AAAA,EAWA,IAAI,WAAW;AACX,WAAO,mBAAK;AAAA,EAChB;AAAA,EAEA,IAAI,KAAa;AACb,WAAO,mBAAKD;AAAA,EAChB;AAAA,EAEA,MAAM,QAAQ,SAA8C;AACpD,QAAA,CAAC,mBAAKE,cAAY;AACd,UAAA;AACM,cAAA,mBAAK,WAAU;AACrB,2BAAKA,aAAa;AAAA,eACb,OAAO;AACZ,2BAAKC,cAAL,WAAiB,GAAG,QAAQ,GAAG,qCAAqC,KAAK,KAAK;AAAA,MAClF;AAAA,IACJ;AAAA,EACJ;AAAA,EAEA,MAAM,WAAW,SAA8C;AAC3D,QAAI,mBAAKD,cAAY;AACb,UAAA;AACM,cAAA,mBAAK,WAAU;AACrB,2BAAKA,aAAa;AAAA,eACb,OAAO;AACZ,2BAAKC,cAAL,WAAiB,GAAG,QAAQ,GAAG,wCAAwC,KAAK,KAAK;AAAA,MACrF;AAAA,IACJ;AAAA,EACJ;AAuCJ;AAzFIJ,YAAA;AACAC,OAAA;AACA;AACAC,UAAA;AACAC,cAAA;AALG;AAcHE,+BAAiB,SAAc;AACtB,qBAAAL,WAAS,OAAO,MAAM,OAAO;AACtC;AAEAI,eAAA;ACoEG,MAAM,qBAAqB,eAAe;AAAA,EAG7C,YAAY,SAA8B;AACtC,UAAM,OAAO;AAJd;AACH,uBAAAF;AA6DA,uBAAAE,cAAc,CAAC,KAAa,YAAkC;AAC1D,4BAAK,yBAAAC,qBAAL,WAAsB,MAAM,IAAI,GAAG;AACnC,cAAQ,GAAG;AAAA,IAAA;AAsBf,uCAAc,OAAO,OAAe,YAAoB,YAAoD;;AACpG,UAAA;AACM,cAAA,QAAQ,mBAAKH,SAAO,MAAM;AAChC,cAAM,MAAM;AACN,cAAA,SAAS,MAAM,MAAM,aAAa;AAAA,UACpC,cAAc;AAAA,UACd,gBAAgB;AAAA,UAChB,UAAS,UAAK,YAAL,mBAAc;AAAA,UACvB,QAAQ;AAAA,YACJ;AAAA,cACI;AAAA,cACA,eAAe;AAAA;AAAA;AAAA;AAAA;AAAA,YAInB;AAAA,UACJ;AAAA,QAAA,CACH;AACD,cAAM,MAAM;AACL,eAAA;AAAA,eACF,OAAO;AACZ,2BAAKE,cAAL,WAAiB,uCAAuC,KAAK,KAAK;AAC3D,eAAA;AAAA,MACX;AAAA,IAAA;AAvGA,UAAM,KAAkB;AAAA,MACpB,UAAU,QAAQ;AAAA,MAClB,SAAS,QAAQ;AAAA;AAAA,MACjB,mBAAmB,QAAQ;AAAA,MAC3B,gBAAgB,QAAQ;AAAA,IAAA;AAG5B,YAAQ,QAAQ,UAAU;AAAA,MAC1B,KAAK;AACD,WAAG,WAAW,SAAS;AACvB;AAAA,MACJ,KAAK;AACD,WAAG,WAAW,SAAS;AACvB;AAAA,MACJ,KAAK;AACD,WAAG,WAAW,SAAS;AACvB;AAAA,MACJ,KAAK;AACD,WAAG,WAAW,SAAS;AACvB;AAAA,MACJ,KAAK;AACD,WAAG,WAAW,SAAS;AACvB;AAAA,MACJ;AACI,WAAG,WAAW,SAAS;AAAA,IAC3B;AACI,QAAA,QAAQ,UAAU,QAAQ,KAAK;AAC/B,SAAG,MAAM;AAAA,QACL,IAAI,CAACE,IAAG,aAAa,QAAQ,IAAI,QAAkB,EAAE,UAAU,OAAM,CAAC,CAAC;AAAA,QACvE,KAAKA,IAAG,aAAa,QAAQ,IAAI,SAAS,EAAE,UAAU,QAAO;AAAA,QAC7D,MAAMA,IAAG,aAAa,QAAQ,IAAI,cAAc,EAAE,UAAU,QAAO;AAAA,MAAA;AAAA,IAE3E;AACA,QAAI,QAAQ,WAAW;AAEb,YAAA,wBAAwB,CAAC,WAAiC;AACtD,cAAA,SAAS,OAAO,MAChBC,IAAI;AAAA,UACF,OAAO,OAAO,EAAE,MAAM,OAAO,MAAM,MAAM,OAAO,KAAK,GAAG,CAAC,IAAI,KAAK,OAAO,IAAI,IAAI,EAAE,YAAY,OAAO,SAAS,CAAA,GAAI,OAAO,GAAG;AAAA,UAC7H,OAAO;AAAA,QAET,IAAA,IAAI,QAAQ,EAAE,MAAM,OAAO,MAAM,MAAM,OAAO,KAAA,GAAQ,OAAO,SAAS;AAErE,eAAA,aAAa,MAAM,QAAQ,SAAS;AACpC,eAAA;AAAA,MAAA;AAEX,SAAG,gBAAgB;AAAA,IACvB;AAEK,uBAAAL,SAAS,IAAI,MAAM,EAAE;AAAA,EAC9B;AAAA,EAWA,IAAI,QAAQ;AACR,WAAO,mBAAKA;AAAA,EAChB;AAAA,EAEA,iBAAgC;;AAC5B,WAAO,IAAI,cAAc;AAAA,MACrB,OAAO,mBAAKA;AAAA,MACZ,IAAIM,GAAO;AAAA,MACX,SAAQ,UAAK,YAAL,mBAAc;AAAA,IAAA,CAAO;AAAA,EACrC;AAAA,EAEA,eAAe,SAAiB;;AAC5B,WAAO,IAAI,cAAc;AAAA,MACrB,OAAO,mBAAKN;AAAA,MACZ,IAAIM,GAAO;AAAA,MACX;AAAA,MACA,SAAQ,UAAK,YAAL,mBAAc;AAAA,IAAA,CAAO;AAAA,EACrC;AA2BJ;AA9GIN,UAAA;AADG;AA0DHG,+BAAiB,SAAc;;AACtB,aAAA,YAAA,mBAAS,OAAO,MAAM;AAC/B;AAEAD,eAAA;"}
@@ -0,0 +1,329 @@
1
+ (function(global, factory) {
2
+ typeof exports === "object" && typeof module !== "undefined" ? factory(exports, require("@nsshunt/stsutils"), require("kafkajs"), require("uuid"), require("chalk")) : typeof define === "function" && define.amd ? define(["exports", "@nsshunt/stsutils", "kafkajs", "uuid", "chalk"], factory) : (global = typeof globalThis !== "undefined" ? globalThis : global || self, factory(global["@nsshunt/stskafka"] = {}, global.stsutils, global.kafkajs, global.uuid, global.chalk));
3
+ })(this, function(exports2, stsutils, kafkajs, uuid, chalk) {
4
+ "use strict";var __defProp = Object.defineProperty;
5
+ var __typeError = (msg) => {
6
+ throw TypeError(msg);
7
+ };
8
+ var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
9
+ var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
10
+ var __accessCheck = (obj, member, msg) => member.has(obj) || __typeError("Cannot " + msg);
11
+ var __privateGet = (obj, member, getter) => (__accessCheck(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
12
+ var __privateAdd = (obj, member, value) => member.has(obj) ? __typeError("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
13
+ var __privateSet = (obj, member, value, setter) => (__accessCheck(obj, member, "write to private field"), setter ? setter.call(obj, value) : member.set(obj, value), value);
14
+ var __privateMethod = (obj, member, method) => (__accessCheck(obj, member, "access private method"), method);
15
+
16
+ var _id, _groupId, _consumer, _kafka, _connected, _options, _KafkaConsumer_instances, LogErrorMessage_fn, _RaiseError, _options2, _id2, _producer, _kafka2, _connected2, _KafkaProducer_instances, LogErrorMessage_fn2, _RaiseError2, _kafka3, _KafkaManager_instances, LogErrorMessage_fn3, _RaiseError3;
17
+ const net = {};
18
+ class KafkaConsumer {
19
+ constructor(options) {
20
+ __privateAdd(this, _KafkaConsumer_instances);
21
+ __privateAdd(this, _id);
22
+ __privateAdd(this, _groupId);
23
+ __privateAdd(this, _consumer);
24
+ __privateAdd(this, _kafka);
25
+ __privateAdd(this, _connected, false);
26
+ __privateAdd(this, _options);
27
+ __privateAdd(this, _RaiseError, (msg, errorCb) => {
28
+ const errorMessage = `${process.pid}:KafkaConsumer:${msg}`;
29
+ __privateMethod(this, _KafkaConsumer_instances, LogErrorMessage_fn).call(this, chalk.red(errorMessage));
30
+ errorCb(errorMessage);
31
+ });
32
+ __publicField(this, "Subscribe", async (topics, fromBeginning, errorCb) => {
33
+ if (__privateGet(this, _connected)) {
34
+ try {
35
+ await __privateGet(this, _consumer).subscribe({ topics, fromBeginning });
36
+ } catch (error) {
37
+ __privateGet(this, _RaiseError).call(this, `Subscribe(): Error: [${error}]`, errorCb);
38
+ }
39
+ } else {
40
+ await this.Connect((error) => {
41
+ __privateGet(this, _RaiseError).call(this, `Subscribe(): Could not consumer.connect, Error: [${error}]`, errorCb);
42
+ });
43
+ }
44
+ });
45
+ __publicField(this, "Stop", async (errorCb) => {
46
+ if (__privateGet(this, _connected)) {
47
+ try {
48
+ await __privateGet(this, _consumer).stop();
49
+ } catch (error) {
50
+ __privateGet(this, _RaiseError).call(this, `Stop(): Error: [${error}]`, errorCb);
51
+ }
52
+ }
53
+ });
54
+ __publicField(this, "StartConsumingMessages", async (autoCommit, cb, errorCb) => {
55
+ if (__privateGet(this, _connected)) {
56
+ await __privateGet(this, _consumer).run({
57
+ autoCommit,
58
+ eachMessage: async ({ topic, partition, message, heartbeat, pause }) => {
59
+ try {
60
+ cb(topic, partition, message, heartbeat, pause);
61
+ } catch (error) {
62
+ __privateGet(this, _RaiseError).call(this, `StartConsumingMessages:eachMessage(): Error: [${error}]`, errorCb);
63
+ }
64
+ }
65
+ });
66
+ } else {
67
+ await this.Connect((error) => {
68
+ __privateGet(this, _RaiseError).call(this, `StartConsumingMessages(): Could not consumer.connect, Error: [${error}]`, errorCb);
69
+ });
70
+ }
71
+ });
72
+ __privateSet(this, _options, options);
73
+ __privateSet(this, _id, __privateGet(this, _options).id);
74
+ __privateSet(this, _groupId, __privateGet(this, _options).groupId);
75
+ __privateSet(this, _kafka, __privateGet(this, _options).kafka);
76
+ __privateSet(this, _consumer, __privateGet(this, _kafka).consumer({
77
+ groupId: __privateGet(this, _groupId),
78
+ retry: {
79
+ restartOnFailure: async (error) => {
80
+ __privateMethod(this, _KafkaConsumer_instances, LogErrorMessage_fn).call(this, chalk.magenta(`${process.pid}:KafkaConsumer:constructor:restartOnFailure(): Error: [${error}]`));
81
+ return true;
82
+ }
83
+ }
84
+ }));
85
+ }
86
+ get consumer() {
87
+ return __privateGet(this, _consumer);
88
+ }
89
+ get id() {
90
+ return __privateGet(this, _id);
91
+ }
92
+ async Connect(errorCb) {
93
+ if (!__privateGet(this, _connected)) {
94
+ try {
95
+ await __privateGet(this, _consumer).connect();
96
+ __privateSet(this, _connected, true);
97
+ } catch (error) {
98
+ __privateGet(this, _RaiseError).call(this, `Connect(): Error: [${error}]`, errorCb);
99
+ }
100
+ }
101
+ }
102
+ async Disconnect(errorCb) {
103
+ if (__privateGet(this, _connected)) {
104
+ try {
105
+ await __privateGet(this, _consumer).disconnect();
106
+ __privateSet(this, _connected, false);
107
+ } catch (error) {
108
+ __privateGet(this, _RaiseError).call(this, `Disconnect(): Error: [${error}]`, errorCb);
109
+ }
110
+ }
111
+ }
112
+ }
113
+ _id = new WeakMap();
114
+ _groupId = new WeakMap();
115
+ _consumer = new WeakMap();
116
+ _kafka = new WeakMap();
117
+ _connected = new WeakMap();
118
+ _options = new WeakMap();
119
+ _KafkaConsumer_instances = new WeakSet();
120
+ LogErrorMessage_fn = function(message) {
121
+ __privateGet(this, _options).logger.error(message);
122
+ };
123
+ _RaiseError = new WeakMap();
124
+ class KafkaProducer {
125
+ constructor(options) {
126
+ __privateAdd(this, _KafkaProducer_instances);
127
+ __privateAdd(this, _options2);
128
+ __privateAdd(this, _id2);
129
+ __privateAdd(this, _producer);
130
+ __privateAdd(this, _kafka2);
131
+ __privateAdd(this, _connected2, false);
132
+ __privateAdd(this, _RaiseError2, (msg, errorCb) => {
133
+ __privateMethod(this, _KafkaProducer_instances, LogErrorMessage_fn2).call(this, chalk.red(msg));
134
+ errorCb(msg);
135
+ });
136
+ __publicField(this, "SendMessage", async (topic, message, errorCb) => {
137
+ if (__privateGet(this, _connected2)) {
138
+ try {
139
+ return __privateGet(this, _producer).send({
140
+ topic,
141
+ messages: [message]
142
+ });
143
+ } catch (error) {
144
+ __privateMethod(this, _KafkaProducer_instances, LogErrorMessage_fn2).call(this, chalk.red(`${process.pid}:KafkaProducer:SendMessage(): Error: [${error}]`));
145
+ return [];
146
+ }
147
+ } else {
148
+ await this.Connect((error) => {
149
+ __privateGet(this, _RaiseError2).call(this, `${process.pid}:KafkaProducer:SendMessage(): Could not producer.connect, Error: [${error}]`, errorCb);
150
+ });
151
+ return [];
152
+ }
153
+ });
154
+ __publicField(this, "SendMessages", async (topic, messages, errorCb) => {
155
+ if (__privateGet(this, _connected2)) {
156
+ try {
157
+ return __privateGet(this, _producer).send({
158
+ topic,
159
+ messages
160
+ });
161
+ } catch (error) {
162
+ __privateMethod(this, _KafkaProducer_instances, LogErrorMessage_fn2).call(this, chalk.red(`${process.pid}:KafkaProducer:SendMessages(): Error: [${error}]`));
163
+ return [];
164
+ }
165
+ } else {
166
+ await this.Connect((error) => {
167
+ __privateGet(this, _RaiseError2).call(this, `${process.pid}:KafkaProducer:SendMessages(): Could not producer.connect, Error: [${error}]`, errorCb);
168
+ });
169
+ return [];
170
+ }
171
+ });
172
+ __privateSet(this, _options2, options);
173
+ __privateSet(this, _id2, __privateGet(this, _options2).id);
174
+ __privateSet(this, _kafka2, __privateGet(this, _options2).kafka);
175
+ __privateSet(this, _producer, __privateGet(this, _kafka2).producer());
176
+ }
177
+ get producer() {
178
+ return __privateGet(this, _producer);
179
+ }
180
+ get id() {
181
+ return __privateGet(this, _id2);
182
+ }
183
+ async Connect(errorCb) {
184
+ if (!__privateGet(this, _connected2)) {
185
+ try {
186
+ await __privateGet(this, _producer).connect();
187
+ __privateSet(this, _connected2, true);
188
+ } catch (error) {
189
+ __privateGet(this, _RaiseError2).call(this, `${process.pid}:KafkaProducer:Connect(): Error: [${error}]`, errorCb);
190
+ }
191
+ }
192
+ }
193
+ async Disconnect(errorCb) {
194
+ if (__privateGet(this, _connected2)) {
195
+ try {
196
+ await __privateGet(this, _producer).disconnect();
197
+ __privateSet(this, _connected2, false);
198
+ } catch (error) {
199
+ __privateGet(this, _RaiseError2).call(this, `${process.pid}:KafkaProducer:Disconnect(): Error: [${error}]`, errorCb);
200
+ }
201
+ }
202
+ }
203
+ }
204
+ _options2 = new WeakMap();
205
+ _id2 = new WeakMap();
206
+ _producer = new WeakMap();
207
+ _kafka2 = new WeakMap();
208
+ _connected2 = new WeakMap();
209
+ _KafkaProducer_instances = new WeakSet();
210
+ LogErrorMessage_fn2 = function(message) {
211
+ __privateGet(this, _options2).logger.error(message);
212
+ };
213
+ _RaiseError2 = new WeakMap();
214
+ class KafkaManager extends stsutils.STSOptionsBase {
215
+ constructor(options) {
216
+ super(options);
217
+ __privateAdd(this, _KafkaManager_instances);
218
+ __privateAdd(this, _kafka3);
219
+ __privateAdd(this, _RaiseError3, (msg, errorCb) => {
220
+ __privateMethod(this, _KafkaManager_instances, LogErrorMessage_fn3).call(this, chalk.red(msg));
221
+ errorCb(msg);
222
+ });
223
+ __publicField(this, "CreateTopic", async (topic, partitions, errorCb) => {
224
+ var _a;
225
+ try {
226
+ const admin = __privateGet(this, _kafka3).admin();
227
+ await admin.connect();
228
+ const result = await admin.createTopics({
229
+ validateOnly: false,
230
+ waitForLeaders: true,
231
+ timeout: (_a = this.options) == null ? void 0 : _a.timeout,
232
+ topics: [
233
+ {
234
+ topic,
235
+ numPartitions: partitions
236
+ // default: -1 (uses broker `num.partitions` configuration)
237
+ //replicationFactor: <Number>, // default: -1 (uses broker `default.replication.factor` configuration)
238
+ //replicaAssignment: <Array>, // Example: [{ partition: 0, replicas: [0,1,2] }] - default: []
239
+ //configEntries: <Array> // Example: [{ name: 'cleanup.policy', value: 'compact' }] - default: []
240
+ }
241
+ ]
242
+ });
243
+ await admin.disconnect();
244
+ return result;
245
+ } catch (error) {
246
+ __privateGet(this, _RaiseError3).call(this, `KafkaManager:CreateTopic(): Error: [${error}]`, errorCb);
247
+ return false;
248
+ }
249
+ });
250
+ const kc = {
251
+ clientId: options.clientId,
252
+ brokers: options.brokers,
253
+ //brokers: ['localhost:9092', 'kafka2:9092'],
254
+ connectionTimeout: options.connectionTimeout,
255
+ requestTimeout: options.requestTimeout
256
+ };
257
+ switch (options.logLevel) {
258
+ case "NOTHING":
259
+ kc.logLevel = kafkajs.logLevel.NOTHING;
260
+ break;
261
+ case "ERROR":
262
+ kc.logLevel = kafkajs.logLevel.ERROR;
263
+ break;
264
+ case "WARN":
265
+ kc.logLevel = kafkajs.logLevel.WARN;
266
+ break;
267
+ case "INFO":
268
+ kc.logLevel = kafkajs.logLevel.INFO;
269
+ break;
270
+ case "DEBUG":
271
+ kc.logLevel = kafkajs.logLevel.DEBUG;
272
+ break;
273
+ default:
274
+ kc.logLevel = kafkajs.logLevel.NOTHING;
275
+ }
276
+ if (options.useSSL && options.ssl) {
277
+ kc.ssl = {
278
+ ca: [net.readFileSync(options.ssl.cafile, { encoding: "utf8" })],
279
+ key: net.readFileSync(options.ssl.keyfile, { encoding: "utf8" }),
280
+ cert: net.readFileSync(options.ssl.certfileFile, { encoding: "utf8" })
281
+ };
282
+ }
283
+ if (options.keepAlive) {
284
+ const myCustomSocketFactory = (config) => {
285
+ const socket = config.ssl ? net.connect(
286
+ Object.assign({ host: config.host, port: config.port }, !net.isIP(config.host) ? { servername: config.host } : {}, config.ssl),
287
+ config.onConnect
288
+ ) : net.connect({ host: config.host, port: config.port }, config.onConnect);
289
+ socket.setKeepAlive(true, options.keepAlive);
290
+ return socket;
291
+ };
292
+ kc.socketFactory = myCustomSocketFactory;
293
+ }
294
+ __privateSet(this, _kafka3, new kafkajs.Kafka(kc));
295
+ }
296
+ get kafka() {
297
+ return __privateGet(this, _kafka3);
298
+ }
299
+ CreateProducer() {
300
+ var _a;
301
+ return new KafkaProducer({
302
+ kafka: __privateGet(this, _kafka3),
303
+ id: uuid.v4(),
304
+ logger: (_a = this.options) == null ? void 0 : _a.logger
305
+ });
306
+ }
307
+ CreateConsumer(groupId) {
308
+ var _a;
309
+ return new KafkaConsumer({
310
+ kafka: __privateGet(this, _kafka3),
311
+ id: uuid.v4(),
312
+ groupId,
313
+ logger: (_a = this.options) == null ? void 0 : _a.logger
314
+ });
315
+ }
316
+ }
317
+ _kafka3 = new WeakMap();
318
+ _KafkaManager_instances = new WeakSet();
319
+ LogErrorMessage_fn3 = function(message) {
320
+ var _a;
321
+ (_a = this.options) == null ? void 0 : _a.logger.error(message);
322
+ };
323
+ _RaiseError3 = new WeakMap();
324
+ exports2.KafkaConsumer = KafkaConsumer;
325
+ exports2.KafkaManager = KafkaManager;
326
+ exports2.KafkaProducer = KafkaProducer;
327
+ Object.defineProperty(exports2, Symbol.toStringTag, { value: "Module" });
328
+ });
329
+ //# sourceMappingURL=stskafka.umd.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"stskafka.umd.js","sources":["../__vite-browser-external","../src/kafka/kafkaconsumer.ts","../src/kafka/kafkaproducer.ts","../src/kafka/kafkamanager.ts"],"sourcesContent":["export default {}","/* eslint @typescript-eslint/no-explicit-any: 0, @typescript-eslint/no-unused-vars: 0 */ // --> OFF\nimport { Kafka, Consumer, IHeaders, KafkaMessage } from 'kafkajs'\nimport { IKafkaConsumer, ConsumeMessageCB, ConsumeMessageErrorCB } from './../commonTypes'\n\nimport chalk from 'chalk';\nimport { ISTSLogger } from '@nsshunt/stsutils';\n\nexport interface IKafkaConsumerOptions {\n kafka: Kafka\n id: string\n groupId: string\n logger: ISTSLogger\n}\n\nexport class KafkaConsumer implements IKafkaConsumer {\n #id: string\n #groupId: string\n #consumer: Consumer;\n #kafka: Kafka;\n #connected: boolean = false;\n #options: IKafkaConsumerOptions;\n\n constructor(options: IKafkaConsumerOptions) {\n this.#options = options;\n this.#id = this.#options.id;\n this.#groupId = this.#options.groupId;\n this.#kafka = this.#options.kafka;\n this.#consumer = this.#kafka.consumer({ \n groupId: this.#groupId,\n retry: {\n restartOnFailure: async (error: Error): Promise<boolean> => {\n this.#LogErrorMessage(chalk.magenta(`${process.pid}:KafkaConsumer:constructor:restartOnFailure(): Error: [${error}]`))\n return true;\n }\n }\n })\n }\n\n #LogErrorMessage(message: any) {\n this.#options.logger.error(message);\n }\n\n get consumer() {\n return this.#consumer;\n }\n \n get id(): string {\n return this.#id;\n }\n\n #RaiseError = (msg: string, errorCb: (error: any) => void) => {\n const errorMessage = `${process.pid}:KafkaConsumer:${msg}`;\n this.#LogErrorMessage(chalk.red(errorMessage));\n errorCb(errorMessage);\n }\n\n async Connect(errorCb: (error: any) => void): Promise<void> {\n if (!this.#connected) {\n try {\n await this.#consumer.connect()\n this.#connected = true;\n } catch (error) {\n this.#RaiseError(`Connect(): Error: [${error}]`, errorCb);\n }\n }\n }\n\n async Disconnect(errorCb: (error: any) => void): Promise<void> {\n if (this.#connected) {\n try {\n await this.#consumer.disconnect()\n this.#connected = false;\n } catch (error) {\n this.#RaiseError(`Disconnect(): Error: [${error}]`, errorCb);\n }\n }\n }\n\n Subscribe = async(topics: string[], fromBeginning: boolean, errorCb: (error: any) => void): Promise<void> => {\n if (this.#connected) {\n try {\n await this.#consumer.subscribe({ topics, fromBeginning })\n } catch (error) {\n this.#RaiseError(`Subscribe(): Error: [${error}]`, errorCb);\n }\n } else {\n await this.Connect((error) => {\n this.#RaiseError(`Subscribe(): Could not consumer.connect, Error: [${error}]`, errorCb);\n });\n }\n }\n\n Stop = async(errorCb: (error: any) => void): Promise<void> => {\n if (this.#connected) {\n try {\n await this.#consumer.stop();\n } catch (error) {\n this.#RaiseError(`Stop(): Error: [${error}]`, errorCb);\n }\n }\n }\n\n StartConsumingMessages = async (autoCommit: boolean, cb: ConsumeMessageCB, errorCb: ConsumeMessageErrorCB): Promise<void> => {\n if (this.#connected) {\n await this.#consumer.run({\n autoCommit, \n eachMessage: async ({ topic, partition, message, heartbeat, pause }) => {\n try {\n cb(topic, partition, message, heartbeat, pause);\n /*\n if (message.key) {\n if (message.value) {\n cb(topic.toString(), message.key.toString(), partition, message.value.toString(), message.headers)\n } else {\n cb(topic.toString(), message.key.toString(), partition, \"\", message.headers)\n }\n } else {\n if (message.value) {\n cb(topic.toString(), \"\", partition, message.value?.toString(), message.headers)\n } else {\n cb(topic.toString(), \"\", partition, \"\", message.headers)\n }\n }\n */\n } catch (error) {\n this.#RaiseError(`StartConsumingMessages:eachMessage(): Error: [${error}]`, errorCb);\n }\n }\n })\n } else {\n await this.Connect((error) => {\n this.#RaiseError(`StartConsumingMessages(): Could not consumer.connect, Error: [${error}]`, errorCb);\n });\n }\n }\n}\n","/* eslint @typescript-eslint/no-explicit-any: 0, @typescript-eslint/no-unused-vars: 0 */ // --> OFF\nimport { Kafka, Producer, RecordMetadata } from 'kafkajs'\n\nimport chalk from 'chalk';\nimport { ISTSLogger } from '@nsshunt/stsutils';\n\nexport interface IKafkaProducerOptions {\n kafka: Kafka\n id: string\n logger: ISTSLogger\n}\n\nexport class KafkaProducer {\n #options: IKafkaProducerOptions;\n #id: string\n #producer: Producer;\n #kafka: Kafka;\n #connected: boolean = false;\n\n constructor(options: IKafkaProducerOptions) {\n this.#options = options;\n this.#id = this.#options.id;\n this.#kafka = this.#options.kafka;\n this.#producer = this.#kafka.producer()\n }\n\n #LogErrorMessage(message: any) {\n this.#options.logger.error(message);\n }\n\n #RaiseError = (msg: string, errorCb: (error: any) => void) => {\n this.#LogErrorMessage(chalk.red(msg));\n errorCb(msg);\n }\n\n get producer() {\n return this.#producer;\n }\n \n get id(): string {\n return this.#id;\n }\n\n async Connect(errorCb: (error: any) => void): Promise<void> {\n if (!this.#connected) {\n try {\n await this.#producer.connect();\n this.#connected = true;\n } catch (error) {\n this.#RaiseError(`${process.pid}:KafkaProducer:Connect(): Error: [${error}]`, errorCb);\n }\n }\n }\n\n async Disconnect(errorCb: (error: any) => void): Promise<void> {\n if (this.#connected) {\n try {\n await this.#producer.disconnect()\n this.#connected = false;\n } catch (error) {\n this.#RaiseError(`${process.pid}:KafkaProducer:Disconnect(): Error: [${error}]`, errorCb);\n }\n }\n }\n\n SendMessage = async(topic: string, message: { key: string, value: string}, errorCb: (error: any) => void): Promise<RecordMetadata[]> => {\n if (this.#connected) {\n try {\n return this.#producer.send({\n topic,\n messages: [ message ]\n })\n } catch (error) {\n this.#LogErrorMessage(chalk.red(`${process.pid}:KafkaProducer:SendMessage(): Error: [${error}]`));\n return [ ];\n }\n } else {\n await this.Connect((error) => {\n this.#RaiseError(`${process.pid}:KafkaProducer:SendMessage(): Could not producer.connect, Error: [${error}]`, errorCb);\n });\n return [ ];\n }\n }\n\n SendMessages = async(topic: string, messages: { key: string, value: string}[], errorCb: (error: any) => void): Promise<RecordMetadata[]> => {\n if (this.#connected) {\n try {\n return this.#producer.send({\n topic,\n messages\n }) \n } catch (error) {\n this.#LogErrorMessage(chalk.red(`${process.pid}:KafkaProducer:SendMessages(): Error: [${error}]`));\n return [ ];\n }\n } else {\n await this.Connect((error) => {\n this.#RaiseError(`${process.pid}:KafkaProducer:SendMessages(): Could not producer.connect, Error: [${error}]`, errorCb);\n });\n return [ ];\n }\n }\n}\n","/* eslint @typescript-eslint/no-explicit-any: 0, @typescript-eslint/no-unused-vars: 0 */ // --> OFF\n/*\n\nkafka example server #01 - Docker Compose File\n----------------------------------------------\nNote: In this example, the log retention is set to 24 hours (rather than default to 1 week)\nhttps://www.conduktor.io/kafka/kafka-topic-configuration-log-retention/\n\nversion: '2'\nservices:\n zookeeper:\n image: wurstmeister/zookeeper\n ports:\n - \"2181:2181\"\n restart: unless-stopped\n\n kafka:\n image: wurstmeister/kafka\n ports:\n - \"9092:9092\"\n environment:\n DOCKER_API_VERSION: 1.22\n KAFKA_ADVERTISED_HOST_NAME: 192.168.14.92\n KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181\n KAFKA_CREATE_TOPICS: \"topic-name2:3:1\"\n KAFKA_LOG_RETENTION_MS: 86400000\n KAFKA_LOG_RETENTION_BYTES: -1\n volumes:\n - /var/run/docker.sock:/var/run/docker.sock\n restart: unless-stopped\n\n\nkafka example server #02 - Docker Compose File\n----------------------------------------------\nversion: \"3.9\" # optional since v1.27.0\n\nnetworks:\n app-tier:\n driver: bridge\n\nservices:\n kafka:\n image: 'bitnami/kafka:latest'\n ports:\n - '9092:9092'\n networks:\n - app-tier \n environment:\n - ALLOW_PLAINTEXT_LISTENER=yes\n - KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE=true\n - KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://192.168.14.92:9092\n\n*/\nimport { ISTSLogger, STSOptionsBase } from '@nsshunt/stsutils'\n\nimport { Kafka, KafkaConfig, logLevel } from 'kafkajs'\n\nimport { v4 as uuidv4 } from 'uuid';\nimport fs from 'node:fs'\n\nimport { KafkaConsumer } from './kafkaconsumer'\nimport { KafkaProducer } from './kafkaproducer'\n\nimport net from 'net'\nimport tls from 'tls'\n\nimport chalk from 'chalk';\n\nconst KEEP_ALIVE_DELAY = 60000 //@@ in ms\n\n// https://kafka.js.org/docs/configuration\nexport interface IKafkaManagerConfig {\n clientId: string // A logical identifier of an application. Can be used by brokers to apply quotas or trace requests to a specific application. Example: booking-events-processor.\n brokers: string[] // List of Kafka brokers\n adminTimeout: number // Time in milliseconds to wait for a successful admin operation. The default value is: 5000.\n connectionTimeout: number // Time in milliseconds to wait for a successful connection. The default value is: 1000.\n requestTimeout: number // Time in milliseconds to wait for a successful request. The default value is: 30000.\n logLevel: string // There are 5 log levels available: NOTHING, ERROR, WARN, INFO, and DEBUG. INFO is configured by default.\n keepAlive?: number // When specified, the number of ms for socket keep alive processing.\n useSSL: boolean // Use SSL\n ssl?: { // Must be specified if useSSL is true\n rejectUnauthorized: boolean\n cafile: string\n keyfile: string\n certfileFile: string\n }\n logger: ISTSLogger\n}\n\n// https://kafka.js.org/docs/configuration\n// https://github.com/tulios/kafkajs/blob/master/src/network/socketFactory.js\ndeclare interface ICustomSocketFactory {\n host: any,\n port: any,\n ssl: any,\n onConnect: any\n}\n\nexport class KafkaManager extends STSOptionsBase {\n #kafka: Kafka\n\n constructor(options: IKafkaManagerConfig) {\n super(options);\n\n const kc: KafkaConfig = {\n clientId: options.clientId,\n brokers: options.brokers, //brokers: ['localhost:9092', 'kafka2:9092'],\n connectionTimeout: options.connectionTimeout,\n requestTimeout: options.requestTimeout\n }\n // NOTHING, ERROR, WARN, INFO, and DEBUG. INFO is configured by default.\n switch (options.logLevel) {\n case 'NOTHING' :\n kc.logLevel = logLevel.NOTHING;\n break;\n case 'ERROR' :\n kc.logLevel = logLevel.ERROR;\n break;\n case 'WARN' :\n kc.logLevel = logLevel.WARN;\n break;\n case 'INFO' :\n kc.logLevel = logLevel.INFO;\n break;\n case 'DEBUG' :\n kc.logLevel = logLevel.DEBUG;\n break;\n default :\n kc.logLevel = logLevel.NOTHING;\n }\n if (options.useSSL && options.ssl) {\n kc.ssl = {\n ca: [fs.readFileSync(options.ssl.cafile as string, { encoding: 'utf8'})],\n key: fs.readFileSync(options.ssl.keyfile, { encoding: 'utf8'}),\n cert: fs.readFileSync(options.ssl.certfileFile, { encoding: 'utf8'}),\n }\n }\n if (options.keepAlive) {\n //const myCustomSocketFactory = ({ host, port, ssl, onConnect }) => {\n const myCustomSocketFactory = (config: ICustomSocketFactory) => {\n const socket = config.ssl\n ? tls.connect(\n Object.assign({ host: config.host, port: config.port }, !net.isIP(config.host) ? { servername: config.host } : {}, config.ssl),\n config.onConnect\n )\n : net.connect({ host: config.host, port: config.port }, config.onConnect)\n \n socket.setKeepAlive(true, options.keepAlive)\n return socket\n }\n kc.socketFactory = myCustomSocketFactory;\n }\n\n this.#kafka = new Kafka(kc);\n }\n\n #LogErrorMessage(message: any) {\n this.options?.logger.error(message);\n }\n\n #RaiseError = (msg: string, errorCb: (error: any) => void) => {\n this.#LogErrorMessage(chalk.red(msg));\n errorCb(msg);\n }\n\n get kafka() {\n return this.#kafka;\n }\n\n CreateProducer(): KafkaProducer {\n return new KafkaProducer({\n kafka: this.#kafka,\n id: uuidv4(),\n logger: this.options?.logger});\n }\n\n CreateConsumer(groupId: string) {\n return new KafkaConsumer({\n kafka: this.#kafka, \n id: uuidv4(), \n groupId,\n logger: this.options?.logger});\n }\n\n CreateTopic = async (topic: string, partitions: number, errorCb: (error: any) => void): Promise<boolean> => {\n try {\n const admin = this.#kafka.admin()\n await admin.connect()\n const result = await admin.createTopics({\n validateOnly: false,\n waitForLeaders: true,\n timeout: this.options?.timeout,\n topics: [\n {\n topic: topic,\n numPartitions: partitions, // default: -1 (uses broker `num.partitions` configuration)\n //replicationFactor: <Number>, // default: -1 (uses broker `default.replication.factor` configuration)\n //replicaAssignment: <Array>, // Example: [{ partition: 0, replicas: [0,1,2] }] - default: []\n //configEntries: <Array> // Example: [{ name: 'cleanup.policy', value: 'compact' }] - default: []\n } \n ]\n })\n await admin.disconnect()\n return result;\n } catch (error) {\n this.#RaiseError(`KafkaManager:CreateTopic(): Error: [${error}]`, errorCb);\n return false;\n }\n }\n}\n"],"names":["_options","_id","_kafka","_connected","_RaiseError","LogErrorMessage_fn","STSOptionsBase","logLevel","fs","tls","Kafka","uuidv4"],"mappings":";;;;;;;;;;;;;;;;AAAe,QAAA,MAAA,CAAA;AAAA,ECcR,MAAM,cAAwC;AAAA,IAQjD,YAAY,SAAgC;AARzC;AACH;AACA;AACA;AACA;AACA,qCAAsB;AACtB;AA8BA,sCAAc,CAAC,KAAa,YAAkC;AAC1D,cAAM,eAAe,GAAG,QAAQ,GAAG,kBAAkB,GAAG;AACxD,8BAAK,8CAAL,WAAsB,MAAM,IAAI,YAAY;AAC5C,gBAAQ,YAAY;AAAA,MAAA;AAyBxB,uCAAY,OAAM,QAAkB,eAAwB,YAAiD;AACzG,YAAI,mBAAK,aAAY;AACb,cAAA;AACA,kBAAM,mBAAK,WAAU,UAAU,EAAE,QAAQ,eAAe;AAAA,mBACnD,OAAO;AACZ,+BAAK,aAAL,WAAiB,wBAAwB,KAAK,KAAK;AAAA,UACvD;AAAA,QAAA,OACG;AACG,gBAAA,KAAK,QAAQ,CAAC,UAAU;AAC1B,+BAAK,aAAL,WAAiB,oDAAoD,KAAK,KAAK;AAAA,UAAO,CACzF;AAAA,QACL;AAAA,MAAA;AAGJ,kCAAO,OAAM,YAAiD;AAC1D,YAAI,mBAAK,aAAY;AACb,cAAA;AACM,kBAAA,mBAAK,WAAU;mBAChB,OAAO;AACZ,+BAAK,aAAL,WAAiB,mBAAmB,KAAK,KAAK;AAAA,UAClD;AAAA,QACJ;AAAA,MAAA;AAGJ,oDAAyB,OAAO,YAAqB,IAAsB,YAAkD;AACzH,YAAI,mBAAK,aAAY;AACX,gBAAA,mBAAK,WAAU,IAAI;AAAA,YACrB;AAAA,YACA,aAAa,OAAO,EAAE,OAAO,WAAW,SAAS,WAAW,YAAY;AAChE,kBAAA;AACA,mBAAG,OAAO,WAAW,SAAS,WAAW,KAAK;AAAA,uBAgBzC,OAAO;AACZ,mCAAK,aAAL,WAAiB,iDAAiD,KAAK,KAAK;AAAA,cAChF;AAAA,YACJ;AAAA,UAAA,CACH;AAAA,QAAA,OACE;AACG,gBAAA,KAAK,QAAQ,CAAC,UAAU;AAC1B,+BAAK,aAAL,WAAiB,iEAAiE,KAAK,KAAK;AAAA,UAAO,CACtG;AAAA,QACL;AAAA,MAAA;AA9GA,yBAAK,UAAW;AACX,yBAAA,KAAM,mBAAK,UAAS;AACpB,yBAAA,UAAW,mBAAK,UAAS;AACzB,yBAAA,QAAS,mBAAK,UAAS;AACvB,yBAAA,WAAY,mBAAK,QAAO,SAAS;AAAA,QAClC,SAAS,mBAAK;AAAA,QACd,OAAO;AAAA,UACH,kBAAkB,OAAO,UAAmC;AACnD,kCAAA,8CAAA,WAAiB,MAAM,QAAQ,GAAG,QAAQ,GAAG,0DAA0D,KAAK,GAAG;AAC7G,mBAAA;AAAA,UACX;AAAA,QACJ;AAAA,MAAA,CACH;AAAA,IACL;AAAA,IAMA,IAAI,WAAW;AACX,aAAO,mBAAK;AAAA,IAChB;AAAA,IAEA,IAAI,KAAa;AACb,aAAO,mBAAK;AAAA,IAChB;AAAA,IAQA,MAAM,QAAQ,SAA8C;AACpD,UAAA,CAAC,mBAAK,aAAY;AACd,YAAA;AACM,gBAAA,mBAAK,WAAU;AACrB,6BAAK,YAAa;AAAA,iBACb,OAAO;AACZ,6BAAK,aAAL,WAAiB,sBAAsB,KAAK,KAAK;AAAA,QACrD;AAAA,MACJ;AAAA,IACJ;AAAA,IAEA,MAAM,WAAW,SAA8C;AAC3D,UAAI,mBAAK,aAAY;AACb,YAAA;AACM,gBAAA,mBAAK,WAAU;AACrB,6BAAK,YAAa;AAAA,iBACb,OAAO;AACZ,6BAAK,aAAL,WAAiB,yBAAyB,KAAK,KAAK;AAAA,QACxD;AAAA,MACJ;AAAA,IACJ;AAAA,EA2DJ;AAxHI;AACA;AACA;AACA;AACA;AACA;AANG;AAwBH,gCAAiB,SAAc;AACtB,uBAAA,UAAS,OAAO,MAAM,OAAO;AAAA,EACtC;AAUA;AAAA,ECtCG,MAAM,cAAc;AAAA,IAOvB,YAAY,SAAgC;AAPzC;AACH,yBAAAA;AACA,yBAAAC;AACA;AACA,yBAAAC;AACA,yBAAAC,aAAsB;AAatB,yBAAAC,cAAc,CAAC,KAAa,YAAkC;AAC1D,8BAAK,0BAAAC,qBAAL,WAAsB,MAAM,IAAI,GAAG;AACnC,gBAAQ,GAAG;AAAA,MAAA;AAiCf,yCAAc,OAAM,OAAe,SAAwC,YAA6D;AACpI,YAAI,mBAAKF,cAAY;AACb,cAAA;AACO,mBAAA,mBAAK,WAAU,KAAK;AAAA,cACvB;AAAA,cACA,UAAU,CAAE,OAAQ;AAAA,YAAA,CACvB;AAAA,mBACI,OAAO;AACP,kCAAA,0BAAAE,qBAAA,WAAiB,MAAM,IAAI,GAAG,QAAQ,GAAG,yCAAyC,KAAK,GAAG;AAC/F,mBAAO;UACX;AAAA,QAAA,OACG;AACG,gBAAA,KAAK,QAAQ,CAAC,UAAU;AAC1B,+BAAKD,cAAL,WAAiB,GAAG,QAAQ,GAAG,qEAAqE,KAAK,KAAK;AAAA,UAAO,CACxH;AACD,iBAAO;QACX;AAAA,MAAA;AAGJ,0CAAe,OAAM,OAAe,UAA2C,YAA6D;AACxI,YAAI,mBAAKD,cAAY;AACb,cAAA;AACO,mBAAA,mBAAK,WAAU,KAAK;AAAA,cACvB;AAAA,cACA;AAAA,YAAA,CACH;AAAA,mBACI,OAAO;AACP,kCAAA,0BAAAE,qBAAA,WAAiB,MAAM,IAAI,GAAG,QAAQ,GAAG,0CAA0C,KAAK,GAAG;AAChG,mBAAO;UACX;AAAA,QAAA,OACG;AACG,gBAAA,KAAK,QAAQ,CAAC,UAAU;AAC1B,+BAAKD,cAAL,WAAiB,GAAG,QAAQ,GAAG,sEAAsE,KAAK,KAAK;AAAA,UAAO,CACzH;AACD,iBAAO;QACX;AAAA,MAAA;AAhFA,yBAAKJ,WAAW;AACX,yBAAAC,MAAM,mBAAKD,WAAS;AACpB,yBAAAE,SAAS,mBAAKF,WAAS;AACvB,yBAAA,WAAY,mBAAKE,SAAO,SAAS;AAAA,IAC1C;AAAA,IAWA,IAAI,WAAW;AACX,aAAO,mBAAK;AAAA,IAChB;AAAA,IAEA,IAAI,KAAa;AACb,aAAO,mBAAKD;AAAA,IAChB;AAAA,IAEA,MAAM,QAAQ,SAA8C;AACpD,UAAA,CAAC,mBAAKE,cAAY;AACd,YAAA;AACM,gBAAA,mBAAK,WAAU;AACrB,6BAAKA,aAAa;AAAA,iBACb,OAAO;AACZ,6BAAKC,cAAL,WAAiB,GAAG,QAAQ,GAAG,qCAAqC,KAAK,KAAK;AAAA,QAClF;AAAA,MACJ;AAAA,IACJ;AAAA,IAEA,MAAM,WAAW,SAA8C;AAC3D,UAAI,mBAAKD,cAAY;AACb,YAAA;AACM,gBAAA,mBAAK,WAAU;AACrB,6BAAKA,aAAa;AAAA,iBACb,OAAO;AACZ,6BAAKC,cAAL,WAAiB,GAAG,QAAQ,GAAG,wCAAwC,KAAK,KAAK;AAAA,QACrF;AAAA,MACJ;AAAA,IACJ;AAAA,EAuCJ;AAzFI,EAAAJ,YAAA;AACA,EAAAC,OAAA;AACA;AACA,EAAAC,UAAA;AACA,EAAAC,cAAA;AALG;AAcH,EAAAE,+BAAiB,SAAc;AACtB,uBAAAL,WAAS,OAAO,MAAM,OAAO;AAAA,EACtC;AAEA,EAAAI,eAAA;AAAA,ECoEG,MAAM,qBAAqBE,SAAAA,eAAe;AAAA,IAG7C,YAAY,SAA8B;AACtC,YAAM,OAAO;AAJd;AACH,yBAAAJ;AA6DA,yBAAAE,cAAc,CAAC,KAAa,YAAkC;AAC1D,8BAAK,yBAAAC,qBAAL,WAAsB,MAAM,IAAI,GAAG;AACnC,gBAAQ,GAAG;AAAA,MAAA;AAsBf,yCAAc,OAAO,OAAe,YAAoB,YAAoD;;AACpG,YAAA;AACM,gBAAA,QAAQ,mBAAKH,SAAO,MAAM;AAChC,gBAAM,MAAM;AACN,gBAAA,SAAS,MAAM,MAAM,aAAa;AAAA,YACpC,cAAc;AAAA,YACd,gBAAgB;AAAA,YAChB,UAAS,UAAK,YAAL,mBAAc;AAAA,YACvB,QAAQ;AAAA,cACJ;AAAA,gBACI;AAAA,gBACA,eAAe;AAAA;AAAA;AAAA;AAAA;AAAA,cAInB;AAAA,YACJ;AAAA,UAAA,CACH;AACD,gBAAM,MAAM;AACL,iBAAA;AAAA,iBACF,OAAO;AACZ,6BAAKE,cAAL,WAAiB,uCAAuC,KAAK,KAAK;AAC3D,iBAAA;AAAA,QACX;AAAA,MAAA;AAvGA,YAAM,KAAkB;AAAA,QACpB,UAAU,QAAQ;AAAA,QAClB,SAAS,QAAQ;AAAA;AAAA,QACjB,mBAAmB,QAAQ;AAAA,QAC3B,gBAAgB,QAAQ;AAAA,MAAA;AAG5B,cAAQ,QAAQ,UAAU;AAAA,QAC1B,KAAK;AACD,aAAG,WAAWG,QAAS,SAAA;AACvB;AAAA,QACJ,KAAK;AACD,aAAG,WAAWA,QAAS,SAAA;AACvB;AAAA,QACJ,KAAK;AACD,aAAG,WAAWA,QAAS,SAAA;AACvB;AAAA,QACJ,KAAK;AACD,aAAG,WAAWA,QAAS,SAAA;AACvB;AAAA,QACJ,KAAK;AACD,aAAG,WAAWA,QAAS,SAAA;AACvB;AAAA,QACJ;AACI,aAAG,WAAWA,QAAS,SAAA;AAAA,MAC3B;AACI,UAAA,QAAQ,UAAU,QAAQ,KAAK;AAC/B,WAAG,MAAM;AAAA,UACL,IAAI,CAACC,IAAG,aAAa,QAAQ,IAAI,QAAkB,EAAE,UAAU,OAAM,CAAC,CAAC;AAAA,UACvE,KAAKA,IAAG,aAAa,QAAQ,IAAI,SAAS,EAAE,UAAU,QAAO;AAAA,UAC7D,MAAMA,IAAG,aAAa,QAAQ,IAAI,cAAc,EAAE,UAAU,QAAO;AAAA,QAAA;AAAA,MAE3E;AACA,UAAI,QAAQ,WAAW;AAEb,cAAA,wBAAwB,CAAC,WAAiC;AACtD,gBAAA,SAAS,OAAO,MAChBC,IAAI;AAAA,YACF,OAAO,OAAO,EAAE,MAAM,OAAO,MAAM,MAAM,OAAO,KAAK,GAAG,CAAC,IAAI,KAAK,OAAO,IAAI,IAAI,EAAE,YAAY,OAAO,SAAS,CAAA,GAAI,OAAO,GAAG;AAAA,YAC7H,OAAO;AAAA,UAET,IAAA,IAAI,QAAQ,EAAE,MAAM,OAAO,MAAM,MAAM,OAAO,KAAA,GAAQ,OAAO,SAAS;AAErE,iBAAA,aAAa,MAAM,QAAQ,SAAS;AACpC,iBAAA;AAAA,QAAA;AAEX,WAAG,gBAAgB;AAAA,MACvB;AAEK,yBAAAP,SAAS,IAAIQ,QAAA,MAAM,EAAE;AAAA,IAC9B;AAAA,IAWA,IAAI,QAAQ;AACR,aAAO,mBAAKR;AAAA,IAChB;AAAA,IAEA,iBAAgC;;AAC5B,aAAO,IAAI,cAAc;AAAA,QACrB,OAAO,mBAAKA;AAAA,QACZ,IAAIS,KAAAA,GAAO;AAAA,QACX,SAAQ,UAAK,YAAL,mBAAc;AAAA,MAAA,CAAO;AAAA,IACrC;AAAA,IAEA,eAAe,SAAiB;;AAC5B,aAAO,IAAI,cAAc;AAAA,QACrB,OAAO,mBAAKT;AAAA,QACZ,IAAIS,KAAAA,GAAO;AAAA,QACX;AAAA,QACA,SAAQ,UAAK,YAAL,mBAAc;AAAA,MAAA,CAAO;AAAA,IACrC;AAAA,EA2BJ;AA9GI,EAAAT,UAAA;AADG;AA0DH,EAAAG,+BAAiB,SAAc;;AACtB,eAAA,YAAA,mBAAS,OAAO,MAAM;AAAA,EAC/B;AAEA,EAAAD,eAAA;;;;;;"}
package/package.json ADDED
@@ -0,0 +1,65 @@
1
+ {
2
+ "name": "@nsshunt/stskafka",
3
+ "version": "1.0.3",
4
+ "description": "STS Kafka",
5
+ "main": "./dist/stskafka.umd.js",
6
+ "module": "./dist/stskafka.mjs",
7
+ "type": "commonjs",
8
+ "types": "./types/index.d.ts",
9
+ "exports": {
10
+ ".": {
11
+ "import": "./dist/stskafka.mjs",
12
+ "require": "./dist/stskafka.umd.js"
13
+ }
14
+ },
15
+ "files": [
16
+ "dist",
17
+ "types",
18
+ "LICENSE",
19
+ "README.md"
20
+ ],
21
+ "scripts": {
22
+ "lint": "eslint .",
23
+ "lintex": "eslint . --fix",
24
+ "test": "DEBUG=testcontainers:containers vitest run --reporter verbose --pool forks",
25
+ "test2": "jest --detectOpenHandles --no-cache",
26
+ "testwatch": "jest --watchAll --detectOpenHandles --no-cache",
27
+ "build": "tsc && vite build",
28
+ "build2": "tsc"
29
+ },
30
+ "repository": {
31
+ "type": "git",
32
+ "url": "git+https://github.com/nsshunt/stskafka.git"
33
+ },
34
+ "author": "STS",
35
+ "license": "MIT",
36
+ "bugs": {
37
+ "url": "https://github.com/nsshunt/stskafka/issues"
38
+ },
39
+ "homepage": "https://github.com/nsshunt/stskafka#readme",
40
+ "devDependencies": {
41
+ "@babel/preset-env": "^7.26.0",
42
+ "@babel/preset-typescript": "^7.26.0",
43
+ "@eslint/eslintrc": "^3.1.0",
44
+ "@eslint/js": "^9.13.0",
45
+ "@testcontainers/kafka": "^10.13.2",
46
+ "@tsconfig/node20": "^20.1.4",
47
+ "@types/uuid": "^10.0.0",
48
+ "@typescript-eslint/eslint-plugin": "^8.12.2",
49
+ "@typescript-eslint/parser": "^8.12.2",
50
+ "eslint": "^9.13.0",
51
+ "globals": "^15.10.0",
52
+ "jest": "^29.7.0",
53
+ "testcontainers": "^10.13.2",
54
+ "typescript": "^5.6.3",
55
+ "vite": "^5.4.10",
56
+ "vitest": "^2.1.4"
57
+ },
58
+ "dependencies": {
59
+ "@nsshunt/stsutils": "^1.16.91",
60
+ "chalk": "^4.1.2",
61
+ "kafkajs": "^2.2.4",
62
+ "uuid": "^10.0.0",
63
+ "winston": "^3.15.0"
64
+ }
65
+ }
@@ -0,0 +1,13 @@
1
+ import { Consumer, KafkaMessage } from 'kafkajs';
2
+ export type ConsumeMessageCB = (topic: string, partition: number, message: KafkaMessage, heartbeat: () => Promise<void>, pause: () => () => void) => void;
3
+ export type ConsumeMessageErrorCB = (error: any) => void;
4
+ export interface IKafkaConsumer {
5
+ get consumer(): Consumer;
6
+ get id(): string;
7
+ Connect(errorCb: (error: any) => void): Promise<void>;
8
+ Disconnect(errorCb: (error: any) => void): Promise<void>;
9
+ Subscribe: (topics: string[], fromBeginning: boolean, errorCb: (error: any) => void) => Promise<void>;
10
+ Stop: (errorCb: (error: any) => void) => Promise<void>;
11
+ StartConsumingMessages: (autoCommit: boolean, cb: ConsumeMessageCB, errorCb: ConsumeMessageErrorCB) => Promise<void>;
12
+ }
13
+ //# sourceMappingURL=commonTypes.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"commonTypes.d.ts","sourceRoot":"","sources":["../src/commonTypes.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,QAAQ,EAAE,YAAY,EAAE,MAAM,SAAS,CAAA;AAEhD,MAAM,MAAM,gBAAgB,GAAG,CAAC,KAAK,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,EAAE,YAAY,EAAE,SAAS,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,EAAE,KAAK,EAAE,MAAM,MAAM,IAAI,KAAK,IAAI,CAAC;AAC1J,MAAM,MAAM,qBAAqB,GAAG,CAAC,KAAK,EAAE,GAAG,KAAK,IAAI,CAAC;AAEzD,MAAM,WAAW,cAAc;IAC3B,IAAI,QAAQ,IAAI,QAAQ,CAAA;IACxB,IAAI,EAAE,IAAI,MAAM,CAAA;IAChB,OAAO,CAAC,OAAO,EAAE,CAAC,KAAK,EAAE,GAAG,KAAK,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IACrD,UAAU,CAAC,OAAO,EAAE,CAAC,KAAK,EAAE,GAAG,KAAK,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IACxD,SAAS,EAAE,CAAC,MAAM,EAAE,MAAM,EAAE,EAAE,aAAa,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,KAAK,EAAE,GAAG,KAAK,IAAI,KAAK,OAAO,CAAC,IAAI,CAAC,CAAA;IACrG,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC,KAAK,EAAE,GAAG,KAAK,IAAI,KAAK,OAAO,CAAC,IAAI,CAAC,CAAA;IACtD,sBAAsB,EAAE,CAAC,UAAU,EAAE,OAAO,EAAE,EAAE,EAAE,gBAAgB,EAAE,OAAO,EAAE,qBAAqB,KAAK,OAAO,CAAC,IAAI,CAAC,CAAA;CACvH"}
@@ -0,0 +1,5 @@
1
+ export * from './commonTypes';
2
+ export * from './kafka/kafkamanager';
3
+ export * from './kafka/kafkaproducer';
4
+ export * from './kafka/kafkaconsumer';
5
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,eAAe,CAAA;AAC7B,cAAc,sBAAsB,CAAA;AACpC,cAAc,uBAAuB,CAAA;AACrC,cAAc,uBAAuB,CAAA"}
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //# sourceMappingURL=index.test.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.test.d.ts","sourceRoot":"","sources":["../src/index.test.ts"],"names":[],"mappings":""}
@@ -0,0 +1,21 @@
1
+ import { Kafka, Consumer } from 'kafkajs';
2
+ import { IKafkaConsumer, ConsumeMessageCB, ConsumeMessageErrorCB } from './../commonTypes';
3
+ import { ISTSLogger } from '@nsshunt/stsutils';
4
+ export interface IKafkaConsumerOptions {
5
+ kafka: Kafka;
6
+ id: string;
7
+ groupId: string;
8
+ logger: ISTSLogger;
9
+ }
10
+ export declare class KafkaConsumer implements IKafkaConsumer {
11
+ #private;
12
+ constructor(options: IKafkaConsumerOptions);
13
+ get consumer(): Consumer;
14
+ get id(): string;
15
+ Connect(errorCb: (error: any) => void): Promise<void>;
16
+ Disconnect(errorCb: (error: any) => void): Promise<void>;
17
+ Subscribe: (topics: string[], fromBeginning: boolean, errorCb: (error: any) => void) => Promise<void>;
18
+ Stop: (errorCb: (error: any) => void) => Promise<void>;
19
+ StartConsumingMessages: (autoCommit: boolean, cb: ConsumeMessageCB, errorCb: ConsumeMessageErrorCB) => Promise<void>;
20
+ }
21
+ //# sourceMappingURL=kafkaconsumer.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"kafkaconsumer.d.ts","sourceRoot":"","sources":["../../src/kafka/kafkaconsumer.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,KAAK,EAAE,QAAQ,EAA0B,MAAM,SAAS,CAAA;AACjE,OAAO,EAAE,cAAc,EAAE,gBAAgB,EAAE,qBAAqB,EAAE,MAAM,kBAAkB,CAAA;AAG1F,OAAO,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAC;AAE/C,MAAM,WAAW,qBAAqB;IAClC,KAAK,EAAE,KAAK,CAAA;IACZ,EAAE,EAAE,MAAM,CAAA;IACV,OAAO,EAAE,MAAM,CAAA;IACf,MAAM,EAAE,UAAU,CAAA;CACrB;AAED,qBAAa,aAAc,YAAW,cAAc;;gBAQpC,OAAO,EAAE,qBAAqB;IAoB1C,IAAI,QAAQ,aAEX;IAED,IAAI,EAAE,IAAI,MAAM,CAEf;IAQK,OAAO,CAAC,OAAO,EAAE,CAAC,KAAK,EAAE,GAAG,KAAK,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;IAWrD,UAAU,CAAC,OAAO,EAAE,CAAC,KAAK,EAAE,GAAG,KAAK,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;IAW9D,SAAS,WAAiB,MAAM,EAAE,iBAAiB,OAAO,WAAW,CAAC,KAAK,EAAE,GAAG,KAAK,IAAI,KAAG,OAAO,CAAC,IAAI,CAAC,CAYxG;IAED,IAAI,YAAkB,CAAC,KAAK,EAAE,GAAG,KAAK,IAAI,KAAG,OAAO,CAAC,IAAI,CAAC,CAQzD;IAED,sBAAsB,eAAsB,OAAO,MAAM,gBAAgB,WAAW,qBAAqB,KAAG,OAAO,CAAC,IAAI,CAAC,CAgCxH;CACJ"}
@@ -0,0 +1,30 @@
1
+ import { ISTSLogger, STSOptionsBase } from '@nsshunt/stsutils';
2
+ import { Kafka } from 'kafkajs';
3
+ import { KafkaConsumer } from './kafkaconsumer';
4
+ import { KafkaProducer } from './kafkaproducer';
5
+ export interface IKafkaManagerConfig {
6
+ clientId: string;
7
+ brokers: string[];
8
+ adminTimeout: number;
9
+ connectionTimeout: number;
10
+ requestTimeout: number;
11
+ logLevel: string;
12
+ keepAlive?: number;
13
+ useSSL: boolean;
14
+ ssl?: {
15
+ rejectUnauthorized: boolean;
16
+ cafile: string;
17
+ keyfile: string;
18
+ certfileFile: string;
19
+ };
20
+ logger: ISTSLogger;
21
+ }
22
+ export declare class KafkaManager extends STSOptionsBase {
23
+ #private;
24
+ constructor(options: IKafkaManagerConfig);
25
+ get kafka(): Kafka;
26
+ CreateProducer(): KafkaProducer;
27
+ CreateConsumer(groupId: string): KafkaConsumer;
28
+ CreateTopic: (topic: string, partitions: number, errorCb: (error: any) => void) => Promise<boolean>;
29
+ }
30
+ //# sourceMappingURL=kafkamanager.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"kafkamanager.d.ts","sourceRoot":"","sources":["../../src/kafka/kafkamanager.ts"],"names":[],"mappings":"AAqDA,OAAO,EAAE,UAAU,EAAE,cAAc,EAAE,MAAM,mBAAmB,CAAA;AAE9D,OAAO,EAAE,KAAK,EAAyB,MAAM,SAAS,CAAA;AAKtD,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AAC/C,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AAU/C,MAAM,WAAW,mBAAmB;IAChC,QAAQ,EAAE,MAAM,CAAA;IAChB,OAAO,EAAE,MAAM,EAAE,CAAA;IACjB,YAAY,EAAE,MAAM,CAAA;IACpB,iBAAiB,EAAE,MAAM,CAAA;IACzB,cAAc,EAAE,MAAM,CAAA;IACtB,QAAQ,EAAE,MAAM,CAAA;IAChB,SAAS,CAAC,EAAE,MAAM,CAAA;IAClB,MAAM,EAAE,OAAO,CAAA;IACf,GAAG,CAAC,EAAE;QACJ,kBAAkB,EAAE,OAAO,CAAA;QAC3B,MAAM,EAAE,MAAM,CAAA;QACd,OAAO,EAAE,MAAM,CAAA;QACf,YAAY,EAAE,MAAM,CAAA;KACrB,CAAA;IACD,MAAM,EAAE,UAAU,CAAA;CACrB;AAWD,qBAAa,YAAa,SAAQ,cAAc;;gBAGhC,OAAO,EAAE,mBAAmB;IAgExC,IAAI,KAAK,UAER;IAED,cAAc,IAAI,aAAa;IAO/B,cAAc,CAAC,OAAO,EAAE,MAAM;IAQ9B,WAAW,UAAiB,MAAM,cAAc,MAAM,WAAW,CAAC,KAAK,EAAE,GAAG,KAAK,IAAI,KAAG,OAAO,CAAC,OAAO,CAAC,CAwBvG;CACJ"}
@@ -0,0 +1,24 @@
1
+ import { Kafka, Producer, RecordMetadata } from 'kafkajs';
2
+ import { ISTSLogger } from '@nsshunt/stsutils';
3
+ export interface IKafkaProducerOptions {
4
+ kafka: Kafka;
5
+ id: string;
6
+ logger: ISTSLogger;
7
+ }
8
+ export declare class KafkaProducer {
9
+ #private;
10
+ constructor(options: IKafkaProducerOptions);
11
+ get producer(): Producer;
12
+ get id(): string;
13
+ Connect(errorCb: (error: any) => void): Promise<void>;
14
+ Disconnect(errorCb: (error: any) => void): Promise<void>;
15
+ SendMessage: (topic: string, message: {
16
+ key: string;
17
+ value: string;
18
+ }, errorCb: (error: any) => void) => Promise<RecordMetadata[]>;
19
+ SendMessages: (topic: string, messages: {
20
+ key: string;
21
+ value: string;
22
+ }[], errorCb: (error: any) => void) => Promise<RecordMetadata[]>;
23
+ }
24
+ //# sourceMappingURL=kafkaproducer.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"kafkaproducer.d.ts","sourceRoot":"","sources":["../../src/kafka/kafkaproducer.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,KAAK,EAAE,QAAQ,EAAE,cAAc,EAAE,MAAM,SAAS,CAAA;AAGzD,OAAO,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAC;AAE/C,MAAM,WAAW,qBAAqB;IAClC,KAAK,EAAE,KAAK,CAAA;IACZ,EAAE,EAAE,MAAM,CAAA;IACV,MAAM,EAAE,UAAU,CAAA;CACrB;AAED,qBAAa,aAAa;;gBAOV,OAAO,EAAE,qBAAqB;IAgB1C,IAAI,QAAQ,aAEX;IAED,IAAI,EAAE,IAAI,MAAM,CAEf;IAEK,OAAO,CAAC,OAAO,EAAE,CAAC,KAAK,EAAE,GAAG,KAAK,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;IAWrD,UAAU,CAAC,OAAO,EAAE,CAAC,KAAK,EAAE,GAAG,KAAK,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;IAW9D,WAAW,UAAgB,MAAM,WAAW;QAAE,GAAG,EAAE,MAAM,CAAC;QAAC,KAAK,EAAE,MAAM,CAAA;KAAC,WAAW,CAAC,KAAK,EAAE,GAAG,KAAK,IAAI,KAAG,OAAO,CAAC,cAAc,EAAE,CAAC,CAiBnI;IAED,YAAY,UAAgB,MAAM,YAAY;QAAE,GAAG,EAAE,MAAM,CAAC;QAAC,KAAK,EAAE,MAAM,CAAA;KAAC,EAAE,WAAW,CAAC,KAAK,EAAE,GAAG,KAAK,IAAI,KAAG,OAAO,CAAC,cAAc,EAAE,CAAC,CAiBvI;CACJ"}
@@ -0,0 +1,7 @@
1
+ export declare const TOPIC = "appframework-test-logs";
2
+ export declare const PARTITIONS = 3;
3
+ export declare const TIMEOUT = 5000;
4
+ export declare const GROUP_ID = "my-group";
5
+ export declare const CLIENT_ID = "my-app";
6
+ export declare const BROKERS: string[];
7
+ //# sourceMappingURL=config.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../../src/kafkatesting/config.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,KAAK,2BAA2B,CAAC;AAC9C,eAAO,MAAM,UAAU,IAAI,CAAC;AAC5B,eAAO,MAAM,OAAO,OAAO,CAAC;AAE5B,eAAO,MAAM,QAAQ,aAAa,CAAC;AAEnC,eAAO,MAAM,SAAS,WAAW,CAAC;AAElC,eAAO,MAAM,OAAO,UAAyB,CAAC"}
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //# sourceMappingURL=consume.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"consume.d.ts","sourceRoot":"","sources":["../../src/kafkatesting/consume.ts"],"names":[],"mappings":""}
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //# sourceMappingURL=produce.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"produce.d.ts","sourceRoot":"","sources":["../../src/kafkatesting/produce.ts"],"names":[],"mappings":""}