@nsshunt/stsappframework 3.1.230 → 3.1.231

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/dist/commonTypes.js +31 -0
  2. package/dist/commonTypes.js.map +1 -1
  3. package/dist/index.js +0 -3
  4. package/dist/index.js.map +1 -1
  5. package/package.json +1 -2
  6. package/src/commonTypes.ts +2 -1
  7. package/src/index.ts +0 -3
  8. package/types/commonTypes.d.ts +0 -12
  9. package/types/commonTypes.d.ts.map +1 -1
  10. package/types/index.d.ts +0 -3
  11. package/types/index.d.ts.map +1 -1
  12. package/dist/kafka/kafkaconsumer.js +0 -128
  13. package/dist/kafka/kafkaconsumer.js.map +0 -1
  14. package/dist/kafka/kafkamanager.js +0 -173
  15. package/dist/kafka/kafkamanager.js.map +0 -1
  16. package/dist/kafka/kafkaproducer.js +0 -97
  17. package/dist/kafka/kafkaproducer.js.map +0 -1
  18. package/dist/kafkatesting/config.js +0 -10
  19. package/dist/kafkatesting/config.js.map +0 -1
  20. package/dist/kafkatesting/consume.js +0 -154
  21. package/dist/kafkatesting/consume.js.map +0 -1
  22. package/dist/kafkatesting/produce.js +0 -164
  23. package/dist/kafkatesting/produce.js.map +0 -1
  24. package/src/kafka/kafkaconsumer.ts +0 -136
  25. package/src/kafka/kafkamanager.ts +0 -210
  26. package/src/kafka/kafkaproducer.ts +0 -103
  27. package/src/kafkatesting/config.ts +0 -10
  28. package/src/kafkatesting/consume.ts +0 -185
  29. package/src/kafkatesting/produce.ts +0 -187
  30. package/types/kafka/kafkaconsumer.d.ts +0 -21
  31. package/types/kafka/kafkaconsumer.d.ts.map +0 -1
  32. package/types/kafka/kafkamanager.d.ts +0 -30
  33. package/types/kafka/kafkamanager.d.ts.map +0 -1
  34. package/types/kafka/kafkaproducer.d.ts +0 -24
  35. package/types/kafka/kafkaproducer.d.ts.map +0 -1
  36. package/types/kafkatesting/config.d.ts +0 -7
  37. package/types/kafkatesting/config.d.ts.map +0 -1
  38. package/types/kafkatesting/consume.d.ts +0 -2
  39. package/types/kafkatesting/consume.d.ts.map +0 -1
  40. package/types/kafkatesting/produce.d.ts +0 -2
  41. package/types/kafkatesting/produce.d.ts.map +0 -1
@@ -1,103 +0,0 @@
1
- /* eslint @typescript-eslint/no-explicit-any: 0, @typescript-eslint/no-unused-vars: 0 */ // --> OFF
2
- import { Kafka, Producer, RecordMetadata } from 'kafkajs'
3
-
4
- import chalk from 'chalk';
5
- import { ISTSLogger } from '@nsshunt/stsutils';
6
-
7
- export interface IKafkaProducerOptions {
8
- kafka: Kafka
9
- id: string
10
- logger: ISTSLogger
11
- }
12
-
13
- export class KafkaProducer {
14
- #options: IKafkaProducerOptions;
15
- #id: string
16
- #producer: Producer;
17
- #kafka: Kafka;
18
- #connected: boolean = false;
19
-
20
- constructor(options: IKafkaProducerOptions) {
21
- this.#options = options;
22
- this.#id = this.#options.id;
23
- this.#kafka = this.#options.kafka;
24
- this.#producer = this.#kafka.producer()
25
- }
26
-
27
- #LogErrorMessage(message: any) {
28
- this.#options.logger.error(message);
29
- }
30
-
31
- #RaiseError = (msg: string, errorCb: (error: any) => void) => {
32
- this.#LogErrorMessage(chalk.red(msg));
33
- errorCb(msg);
34
- }
35
-
36
- get producer() {
37
- return this.#producer;
38
- }
39
-
40
- get id(): string {
41
- return this.#id;
42
- }
43
-
44
- async Connect(errorCb: (error: any) => void): Promise<void> {
45
- if (!this.#connected) {
46
- try {
47
- await this.#producer.connect();
48
- this.#connected = true;
49
- } catch (error) {
50
- this.#RaiseError(`${process.pid}:KafkaProducer:Connect(): Error: [${error}]`, errorCb);
51
- }
52
- }
53
- }
54
-
55
- async Disconnect(errorCb: (error: any) => void): Promise<void> {
56
- if (this.#connected) {
57
- try {
58
- await this.#producer.disconnect()
59
- this.#connected = false;
60
- } catch (error) {
61
- this.#RaiseError(`${process.pid}:KafkaProducer:Disconnect(): Error: [${error}]`, errorCb);
62
- }
63
- }
64
- }
65
-
66
- SendMessage = async(topic: string, message: { key: string, value: string}, errorCb: (error: any) => void): Promise<RecordMetadata[]> => {
67
- if (this.#connected) {
68
- try {
69
- return this.#producer.send({
70
- topic,
71
- messages: [ message ]
72
- })
73
- } catch (error) {
74
- this.#LogErrorMessage(chalk.red(`${process.pid}:KafkaProducer:SendMessage(): Error: [${error}]`));
75
- return [ ];
76
- }
77
- } else {
78
- await this.Connect((error) => {
79
- this.#RaiseError(`${process.pid}:KafkaProducer:SendMessage(): Could not producer.connect, Error: [${error}]`, errorCb);
80
- });
81
- return [ ];
82
- }
83
- }
84
-
85
- SendMessages = async(topic: string, messages: { key: string, value: string}[], errorCb: (error: any) => void): Promise<RecordMetadata[]> => {
86
- if (this.#connected) {
87
- try {
88
- return this.#producer.send({
89
- topic,
90
- messages
91
- })
92
- } catch (error) {
93
- this.#LogErrorMessage(chalk.red(`${process.pid}:KafkaProducer:SendMessages(): Error: [${error}]`));
94
- return [ ];
95
- }
96
- } else {
97
- await this.Connect((error) => {
98
- this.#RaiseError(`${process.pid}:KafkaProducer:SendMessages(): Could not producer.connect, Error: [${error}]`, errorCb);
99
- });
100
- return [ ];
101
- }
102
- }
103
- }
@@ -1,10 +0,0 @@
1
- export const TOPIC = 'appframework-test-logs';
2
- export const PARTITIONS = 3;
3
- export const TIMEOUT = 5000;
4
-
5
- export const GROUP_ID = 'my-group';
6
-
7
- export const CLIENT_ID = 'my-app';
8
-
9
- export const BROKERS = ['192.168.14.92:9092'];
10
-
@@ -1,185 +0,0 @@
1
- /* eslint @typescript-eslint/no-unused-vars: 0 */ // --> OFF
2
- import { TOPIC, GROUP_ID, CLIENT_ID, BROKERS, TIMEOUT } from './config'
3
-
4
- import { KafkaManager } from './../kafka/kafkamanager'
5
- import { JSONObject } from '@nsshunt/stsutils';
6
-
7
- import { KafkaMessage } from 'kafkajs'
8
-
9
- import winston from 'winston'
10
-
11
- async function Sleep(milliseconds = 1000) {
12
- return new Promise(resolve => setTimeout(resolve, milliseconds))
13
- }
14
-
15
-
16
- winston.format.combine(
17
- winston.format.colorize(),
18
- winston.format.simple()
19
- );
20
-
21
- const logger = winston.createLogger({
22
- level: 'silly',
23
- format: winston.format.combine(
24
- winston.format.colorize(),
25
- winston.format.simple()
26
- ),
27
- transports: [
28
- new winston.transports.Console()
29
- ]});
30
-
31
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
32
- const LogErrorMessage = (message: any) => {
33
- logger.error(message);
34
- }
35
-
36
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
37
- const LogInfoMessage = (message: any) => {
38
- logger.info(message);
39
- }
40
-
41
- const km = new KafkaManager({
42
- clientId: CLIENT_ID + process.env.CLIENT_ID,
43
- brokers: BROKERS,
44
- adminTimeout: TIMEOUT,
45
- connectionTimeout: TIMEOUT,
46
- requestTimeout: TIMEOUT,
47
- logLevel: 'NOTHING',
48
- useSSL: false,
49
- logger
50
- });
51
-
52
- const runme = async () => {
53
-
54
- const fromBeginning = false;
55
-
56
- const consumer = km.CreateConsumer(GROUP_ID + process.env.GROUP_ID);
57
-
58
-
59
-
60
- await consumer.Connect((error) => {
61
- LogErrorMessage(`Connect(): Error: [${error}]`);
62
- });
63
-
64
- await consumer.Subscribe([TOPIC], fromBeginning, (error) => {
65
- LogErrorMessage(`Subscribe(): Error: [${error}]`);
66
- });
67
-
68
- await consumer.StartConsumingMessages(true, (topic: string, partition: number, message: KafkaMessage, heartbeat: () => Promise<void>, pause: () => () => void) => {
69
- const { key, value, headers } = message;
70
- LogInfoMessage({
71
- key,
72
- partition,
73
- value,
74
- headers
75
- });
76
- }, (error) => {
77
- LogErrorMessage(`StartConsumingMessages(): Error: [${error}]`);
78
- });
79
-
80
- /*
81
- await consumer.StartConsumingMessages(true, (topic: string, key: string, partition: number, value: string, headers: JSONObject | undefined) => {
82
- LogInfoMessage({
83
- key,
84
- partition,
85
- value,
86
- headers
87
- });
88
- }, (error) => {
89
- LogErrorMessage(`StartConsumingMessages(): Error: [${error}]`);
90
- });
91
- */
92
-
93
- await consumer.Stop((error) => {
94
- LogErrorMessage(`Stop(): Error: [${error}]`);
95
- });
96
-
97
- await consumer.Subscribe(['zzz'], fromBeginning, (error) => {
98
- LogErrorMessage(`Subscribe(): Error: [${error}]`);
99
- });
100
-
101
- await consumer.StartConsumingMessages(true, (topic: string, partition: number, message: KafkaMessage, heartbeat: () => Promise<void>, pause: () => () => void) => {
102
- const { key, value, headers } = message;
103
- LogInfoMessage({
104
- key,
105
- partition,
106
- value,
107
- headers
108
- });
109
- }, (error) => {
110
- LogErrorMessage(`StartConsumingMessages(): Error: [${error}]`);
111
- });
112
-
113
- await consumer.Stop((error) => {
114
- LogErrorMessage(`Stop(): Error: [${error}]`);
115
- });
116
-
117
- await consumer.Subscribe(['yyy'], fromBeginning, (error) => {
118
- LogErrorMessage(`Subscribe(): Error: [${error}]`);
119
- });
120
-
121
- await consumer.StartConsumingMessages(true, (topic: string, partition: number, message: KafkaMessage, heartbeat: () => Promise<void>, pause: () => () => void) => {
122
- const { key, value, headers } = message;
123
- /*
124
- this.#LogInfoMessage({
125
- key,
126
- partition,
127
- value,
128
- headers
129
- });
130
- */
131
- LogInfoMessage(`key: [${key}] value: [${value}] partition: [${partition}] headers: [${headers}]`);
132
- }, (error) => {
133
- LogErrorMessage(`StartConsumingMessages(): Error: [${error}]`);
134
- });
135
-
136
- //await km.Subscribe(['zzz'], cb);
137
-
138
- process.on("SIGINT", async () => {
139
- LogInfoMessage('=========SIGTERM START =======================')
140
- await consumer.Disconnect((error) => {
141
- LogErrorMessage(`Disconnect(): Error: [${error}]`);
142
- });
143
- LogInfoMessage('=========SIGTERM END =======================')
144
- process.exit();
145
- });
146
-
147
- let iteration = 0;
148
- for (;;) {
149
- LogInfoMessage('sleep: ' + iteration++);
150
- await Sleep(1000);
151
- }
152
- }
153
-
154
- /*
155
- const errorTypes = ['unhandledRejection', 'uncaughtException']
156
- const signalTraps = ['SIGTERM', 'SIGINT', 'SIGUSR2']
157
-
158
- errorTypes.forEach(type => {
159
- process.on(type, async () => {
160
- try {
161
- this.#LogInfoMessage(`process.on ${type}`)
162
- this.#LogInfoMessage('=========consumer.disconnect() START =======================')
163
- await consumer.disconnect()
164
- this.#LogInfoMessage('=========consumer.disconnect() END =======================')
165
- process.exit(0)
166
- } catch (_) {
167
- process.exit(1)
168
- }
169
- })
170
- })
171
-
172
- signalTraps.forEach(type => {
173
- process.once(type, async () => {
174
- try {
175
- this.#LogInfoMessage('=========consumer.disconnect() START [2] =======================')
176
- await consumer.disconnect()
177
- this.#LogInfoMessage('=========consumer.disconnect() END [2] =======================')
178
- } finally {
179
- process.kill(process.pid, type)
180
- }
181
- })
182
- })
183
- */
184
-
185
- runme().catch(e => LogErrorMessage(`[example/producer] ${e.message}`))
@@ -1,187 +0,0 @@
1
- /*
2
-
3
- kafka example server #01 - Docker Compose File
4
- ----------------------------------------------
5
- Note: In this example, the log retention is set to 24 hours (rather than default to 1 week)
6
- https://www.conduktor.io/kafka/kafka-topic-configuration-log-retention/
7
-
8
- version: '2'
9
- services:
10
- zookeeper:
11
- image: wurstmeister/zookeeper
12
- ports:
13
- - "2181:2181"
14
- restart: unless-stopped
15
-
16
- kafka:
17
- image: wurstmeister/kafka
18
- ports:
19
- - "9092:9092"
20
- environment:
21
- DOCKER_API_VERSION: 1.22
22
- KAFKA_ADVERTISED_HOST_NAME: 192.168.14.92
23
- KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
24
- KAFKA_CREATE_TOPICS: "topic-name2:3:1"
25
- KAFKA_LOG_RETENTION_MS: 86400000
26
- KAFKA_LOG_RETENTION_BYTES: -1
27
- volumes:
28
- - /var/run/docker.sock:/var/run/docker.sock
29
- restart: unless-stopped
30
-
31
-
32
- kafka example server #02 - Docker Compose File
33
- ----------------------------------------------
34
- version: "3.9" # optional since v1.27.0
35
-
36
- networks:
37
- app-tier:
38
- driver: bridge
39
-
40
- services:
41
- kafka:
42
- image: 'bitnami/kafka:latest'
43
- ports:
44
- - '9092:9092'
45
- networks:
46
- - app-tier
47
- environment:
48
- - ALLOW_PLAINTEXT_LISTENER=yes
49
- - KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE=true
50
- - KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://192.168.14.92:9092
51
-
52
- */
53
- import { TOPIC, CLIENT_ID, BROKERS, PARTITIONS, TIMEOUT } from './config'
54
-
55
- import { KafkaManager } from './../kafka/kafkamanager'
56
-
57
- import chalk from 'chalk';
58
-
59
- import winston from 'winston'
60
-
61
- winston.format.combine(
62
- winston.format.colorize(),
63
- winston.format.simple()
64
- );
65
-
66
- const logger = winston.createLogger({
67
- level: 'silly',
68
- format: winston.format.combine(
69
- winston.format.colorize(),
70
- winston.format.simple()
71
- ),
72
- transports: [
73
- new winston.transports.Console()
74
- ]});
75
-
76
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
77
- const LogErrorMessage = (message: any) => {
78
- logger.error(message);
79
- }
80
-
81
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
82
- const LogInfoMessage = (message: any) => {
83
- logger.info(message);
84
- }
85
-
86
- async function Sleep(milliseconds = 1000) {
87
- return new Promise(resolve => setTimeout(resolve, milliseconds))
88
- }
89
-
90
- const km = new KafkaManager({
91
- clientId: CLIENT_ID + process.env.CLIENT_ID,
92
- brokers: BROKERS,
93
- adminTimeout: TIMEOUT,
94
- connectionTimeout: TIMEOUT,
95
- requestTimeout: TIMEOUT,
96
- logLevel: 'NOTHING',
97
- useSSL: false,
98
- logger
99
- });
100
-
101
- const runme = async () => {
102
- await km.CreateTopic(TOPIC, PARTITIONS, (error) => LogInfoMessage(`CreateTopic: Error: [${error}]`));
103
- await km.CreateTopic('zzz', 1, (error) => LogInfoMessage(`CreateTopic: Error: [${error}]`));
104
- await km.CreateTopic('yyy', 1, (error) => LogInfoMessage(`CreateTopic: Error: [${error}]`));
105
-
106
- const producer = km.CreateProducer();
107
-
108
- await producer.Connect((error) => LogInfoMessage(`Connect: Error: [${error}]`));
109
-
110
- const count = 100000;
111
- const sleepTime = 1000;
112
-
113
- process.on("SIGINT", async () => {
114
- LogInfoMessage('=========SIGTERM START =======================')
115
- await producer.Disconnect((error) => LogInfoMessage(`Disconnect: Error: [${error}]`));
116
- LogInfoMessage('=========SIGTERM END =======================')
117
- process.exit();
118
- });
119
-
120
- for (let i=0; i < count; i++) {
121
- if (i % 100 === 0) LogInfoMessage(i);
122
- const retVal = await producer.SendMessages(TOPIC, [
123
- { key: 'key1', value: chalk.green(`hello world - ${i}`) },
124
- { key: 'key2', value: 'hey hey! -2' },
125
- { key: 'key3', value: 'hey hey! -3' },
126
- { key: 'key4', value: 'hey hey! -4' },
127
- { key: 'key5', value: 'hey hey! -5' }
128
- ], (error) => {
129
- LogErrorMessage(error);
130
- }
131
- );
132
-
133
- await producer.SendMessages('zzz', [
134
- { key: 'key-zzz', value: chalk.yellow(`hello world - ${i}`) }
135
- ], (error) => {
136
- LogErrorMessage(error);
137
- }
138
- );
139
-
140
- await producer.SendMessages('yyy', [
141
- { key: 'key-yyy', value: chalk.cyan(`hello world - ${i}`) }
142
- ], (error) => {
143
- LogErrorMessage(error);
144
- }
145
- );
146
-
147
- if (i % 100 === 0) {
148
- LogInfoMessage(retVal);
149
- LogInfoMessage(` ------------=================> ${i}`);
150
- }
151
- if (sleepTime >= 0) {
152
- await Sleep(sleepTime);
153
- }
154
- }
155
-
156
- await producer.Disconnect((error) => LogInfoMessage(`Disconnect: Error: [${error}]`));
157
- }
158
-
159
- /*
160
- const errorTypes = ['unhandledRejection', 'uncaughtException']
161
- const signalTraps = ['SIGTERM', 'SIGINT', 'SIGUSR2']
162
-
163
- errorTypes.forEach(type => {
164
- process.on(type, async () => {
165
- try {
166
- LogInfoMessage(`process.on ${type}`)
167
- await producer.disconnect()
168
- process.exit(0)
169
- } catch (_) {
170
- process.exit(1)
171
- }
172
- })
173
- })
174
-
175
- signalTraps.forEach(type => {
176
- process.once(type, async () => {
177
- try {
178
- await producer.disconnect()
179
- } finally {
180
- process.kill(process.pid, type)
181
- }
182
- })
183
- })
184
- */
185
-
186
- runme().catch(e => LogErrorMessage(`[example/producer] ${e.message}`))
187
-
@@ -1,21 +0,0 @@
1
- import { Kafka, Consumer } from 'kafkajs';
2
- import { IKafkaConsumer, ConsumeMessageCB, ConsumeMessageErrorCB } from './../commonTypes';
3
- import { ISTSLogger } from '@nsshunt/stsutils';
4
- export interface IKafkaConsumerOptions {
5
- kafka: Kafka;
6
- id: string;
7
- groupId: string;
8
- logger: ISTSLogger;
9
- }
10
- export declare class KafkaConsumer implements IKafkaConsumer {
11
- #private;
12
- constructor(options: IKafkaConsumerOptions);
13
- get consumer(): Consumer;
14
- get id(): string;
15
- Connect(errorCb: (error: any) => void): Promise<void>;
16
- Disconnect(errorCb: (error: any) => void): Promise<void>;
17
- Subscribe: (topics: string[], fromBeginning: boolean, errorCb: (error: any) => void) => Promise<void>;
18
- Stop: (errorCb: (error: any) => void) => Promise<void>;
19
- StartConsumingMessages: (autoCommit: boolean, cb: ConsumeMessageCB, errorCb: ConsumeMessageErrorCB) => Promise<void>;
20
- }
21
- //# sourceMappingURL=kafkaconsumer.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"kafkaconsumer.d.ts","sourceRoot":"","sources":["../../src/kafka/kafkaconsumer.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,KAAK,EAAE,QAAQ,EAA0B,MAAM,SAAS,CAAA;AACjE,OAAO,EAAE,cAAc,EAAE,gBAAgB,EAAE,qBAAqB,EAAE,MAAM,kBAAkB,CAAA;AAG1F,OAAO,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAC;AAE/C,MAAM,WAAW,qBAAqB;IAClC,KAAK,EAAE,KAAK,CAAA;IACZ,EAAE,EAAE,MAAM,CAAA;IACV,OAAO,EAAE,MAAM,CAAA;IACf,MAAM,EAAE,UAAU,CAAA;CACrB;AAED,qBAAa,aAAc,YAAW,cAAc;;gBAQpC,OAAO,EAAE,qBAAqB;IAoB1C,IAAI,QAAQ,aAEX;IAED,IAAI,EAAE,IAAI,MAAM,CAEf;IAQK,OAAO,CAAC,OAAO,EAAE,CAAC,KAAK,EAAE,GAAG,KAAK,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;IAWrD,UAAU,CAAC,OAAO,EAAE,CAAC,KAAK,EAAE,GAAG,KAAK,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;IAW9D,SAAS,WAAiB,MAAM,EAAE,iBAAiB,OAAO,WAAW,CAAC,KAAK,EAAE,GAAG,KAAK,IAAI,KAAG,OAAO,CAAC,IAAI,CAAC,CAYxG;IAED,IAAI,YAAkB,CAAC,KAAK,EAAE,GAAG,KAAK,IAAI,KAAG,OAAO,CAAC,IAAI,CAAC,CAQzD;IAED,sBAAsB,eAAsB,OAAO,MAAM,gBAAgB,WAAW,qBAAqB,KAAG,OAAO,CAAC,IAAI,CAAC,CAgCxH;CACJ"}
@@ -1,30 +0,0 @@
1
- import { ISTSLogger, STSOptionsBase } from '@nsshunt/stsutils';
2
- import { Kafka } from 'kafkajs';
3
- import { KafkaConsumer } from './kafkaconsumer';
4
- import { KafkaProducer } from './kafkaproducer';
5
- export interface IKafkaManagerConfig {
6
- clientId: string;
7
- brokers: string[];
8
- adminTimeout: number;
9
- connectionTimeout: number;
10
- requestTimeout: number;
11
- logLevel: string;
12
- keepAlive?: number;
13
- useSSL: boolean;
14
- ssl?: {
15
- rejectUnauthorized: boolean;
16
- cafile: string;
17
- keyfile: string;
18
- certfileFile: string;
19
- };
20
- logger: ISTSLogger;
21
- }
22
- export declare class KafkaManager extends STSOptionsBase {
23
- #private;
24
- constructor(options: IKafkaManagerConfig);
25
- get kafka(): Kafka;
26
- CreateProducer(): KafkaProducer;
27
- CreateConsumer(groupId: string): KafkaConsumer;
28
- CreateTopic: (topic: string, partitions: number, errorCb: (error: any) => void) => Promise<boolean>;
29
- }
30
- //# sourceMappingURL=kafkamanager.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"kafkamanager.d.ts","sourceRoot":"","sources":["../../src/kafka/kafkamanager.ts"],"names":[],"mappings":"AAqDA,OAAO,EAAE,UAAU,EAAE,cAAc,EAAE,MAAM,mBAAmB,CAAA;AAE9D,OAAO,EAAE,KAAK,EAAyB,MAAM,SAAS,CAAA;AAKtD,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AAC/C,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AAU/C,MAAM,WAAW,mBAAmB;IAChC,QAAQ,EAAE,MAAM,CAAA;IAChB,OAAO,EAAE,MAAM,EAAE,CAAA;IACjB,YAAY,EAAE,MAAM,CAAA;IACpB,iBAAiB,EAAE,MAAM,CAAA;IACzB,cAAc,EAAE,MAAM,CAAA;IACtB,QAAQ,EAAE,MAAM,CAAA;IAChB,SAAS,CAAC,EAAE,MAAM,CAAA;IAClB,MAAM,EAAE,OAAO,CAAA;IACf,GAAG,CAAC,EAAE;QACJ,kBAAkB,EAAE,OAAO,CAAA;QAC3B,MAAM,EAAE,MAAM,CAAA;QACd,OAAO,EAAE,MAAM,CAAA;QACf,YAAY,EAAE,MAAM,CAAA;KACrB,CAAA;IACD,MAAM,EAAE,UAAU,CAAA;CACrB;AAWD,qBAAa,YAAa,SAAQ,cAAc;;gBAGhC,OAAO,EAAE,mBAAmB;IAgExC,IAAI,KAAK,UAER;IAED,cAAc,IAAI,aAAa;IAO/B,cAAc,CAAC,OAAO,EAAE,MAAM;IAQ9B,WAAW,UAAiB,MAAM,cAAc,MAAM,WAAW,CAAC,KAAK,EAAE,GAAG,KAAK,IAAI,KAAG,OAAO,CAAC,OAAO,CAAC,CAwBvG;CACJ"}
@@ -1,24 +0,0 @@
1
- import { Kafka, Producer, RecordMetadata } from 'kafkajs';
2
- import { ISTSLogger } from '@nsshunt/stsutils';
3
- export interface IKafkaProducerOptions {
4
- kafka: Kafka;
5
- id: string;
6
- logger: ISTSLogger;
7
- }
8
- export declare class KafkaProducer {
9
- #private;
10
- constructor(options: IKafkaProducerOptions);
11
- get producer(): Producer;
12
- get id(): string;
13
- Connect(errorCb: (error: any) => void): Promise<void>;
14
- Disconnect(errorCb: (error: any) => void): Promise<void>;
15
- SendMessage: (topic: string, message: {
16
- key: string;
17
- value: string;
18
- }, errorCb: (error: any) => void) => Promise<RecordMetadata[]>;
19
- SendMessages: (topic: string, messages: {
20
- key: string;
21
- value: string;
22
- }[], errorCb: (error: any) => void) => Promise<RecordMetadata[]>;
23
- }
24
- //# sourceMappingURL=kafkaproducer.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"kafkaproducer.d.ts","sourceRoot":"","sources":["../../src/kafka/kafkaproducer.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,KAAK,EAAE,QAAQ,EAAE,cAAc,EAAE,MAAM,SAAS,CAAA;AAGzD,OAAO,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAC;AAE/C,MAAM,WAAW,qBAAqB;IAClC,KAAK,EAAE,KAAK,CAAA;IACZ,EAAE,EAAE,MAAM,CAAA;IACV,MAAM,EAAE,UAAU,CAAA;CACrB;AAED,qBAAa,aAAa;;gBAOV,OAAO,EAAE,qBAAqB;IAgB1C,IAAI,QAAQ,aAEX;IAED,IAAI,EAAE,IAAI,MAAM,CAEf;IAEK,OAAO,CAAC,OAAO,EAAE,CAAC,KAAK,EAAE,GAAG,KAAK,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;IAWrD,UAAU,CAAC,OAAO,EAAE,CAAC,KAAK,EAAE,GAAG,KAAK,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;IAW9D,WAAW,UAAgB,MAAM,WAAW;QAAE,GAAG,EAAE,MAAM,CAAC;QAAC,KAAK,EAAE,MAAM,CAAA;KAAC,WAAW,CAAC,KAAK,EAAE,GAAG,KAAK,IAAI,KAAG,OAAO,CAAC,cAAc,EAAE,CAAC,CAiBnI;IAED,YAAY,UAAgB,MAAM,YAAY;QAAE,GAAG,EAAE,MAAM,CAAC;QAAC,KAAK,EAAE,MAAM,CAAA;KAAC,EAAE,WAAW,CAAC,KAAK,EAAE,GAAG,KAAK,IAAI,KAAG,OAAO,CAAC,cAAc,EAAE,CAAC,CAiBvI;CACJ"}
@@ -1,7 +0,0 @@
1
- export declare const TOPIC = "appframework-test-logs";
2
- export declare const PARTITIONS = 3;
3
- export declare const TIMEOUT = 5000;
4
- export declare const GROUP_ID = "my-group";
5
- export declare const CLIENT_ID = "my-app";
6
- export declare const BROKERS: string[];
7
- //# sourceMappingURL=config.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../../src/kafkatesting/config.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,KAAK,2BAA2B,CAAC;AAC9C,eAAO,MAAM,UAAU,IAAI,CAAC;AAC5B,eAAO,MAAM,OAAO,OAAO,CAAC;AAE5B,eAAO,MAAM,QAAQ,aAAa,CAAC;AAEnC,eAAO,MAAM,SAAS,WAAW,CAAC;AAElC,eAAO,MAAM,OAAO,UAAyB,CAAC"}
@@ -1,2 +0,0 @@
1
- export {};
2
- //# sourceMappingURL=consume.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"consume.d.ts","sourceRoot":"","sources":["../../src/kafkatesting/consume.ts"],"names":[],"mappings":""}
@@ -1,2 +0,0 @@
1
- export {};
2
- //# sourceMappingURL=produce.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"produce.d.ts","sourceRoot":"","sources":["../../src/kafkatesting/produce.ts"],"names":[],"mappings":""}