@nsshunt/stsappframework 3.0.104 → 3.0.106

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (124) hide show
  1. package/dist/influxdb/influxDBManager.js +16 -17
  2. package/dist/influxdb/influxDBManager.js.map +1 -1
  3. package/dist/influxdb/influxDBManagerAgent.js +9 -13
  4. package/dist/influxdb/influxDBManagerAgent.js.map +1 -1
  5. package/dist/influxdb/influxDBManagerBase.js +2 -6
  6. package/dist/influxdb/influxDBManagerBase.js.map +1 -1
  7. package/dist/influxdb/influxDBManagerService.js +10 -14
  8. package/dist/influxdb/influxDBManagerService.js.map +1 -1
  9. package/dist/instrumentationsubscriber.js +11 -15
  10. package/dist/instrumentationsubscriber.js.map +1 -1
  11. package/dist/kafka/IMKafkaManager.js +2 -6
  12. package/dist/kafka/IMKafkaManager.js.map +1 -1
  13. package/dist/kafkatesting/produce.js +1 -5
  14. package/dist/kafkatesting/produce.js.map +1 -1
  15. package/dist/processbase.js +7 -8
  16. package/dist/processbase.js.map +1 -1
  17. package/dist/server.js +1 -1
  18. package/dist/server.js.map +1 -1
  19. package/dist/tcpclient/app2.js +2 -2
  20. package/dist/tcpserver/appmaster.js +39 -16
  21. package/dist/tcpserver/appmaster.js.map +1 -1
  22. package/package.json +7 -7
  23. package/src/influxdb/influxDBManager.ts +16 -18
  24. package/src/influxdb/influxDBManagerAgent.ts +9 -11
  25. package/src/influxdb/influxDBManagerBase.ts +2 -4
  26. package/src/influxdb/influxDBManagerService.ts +10 -12
  27. package/src/instrumentationsubscriber.ts +11 -14
  28. package/src/kafka/IMKafkaManager.ts +2 -4
  29. package/src/kafkatesting/produce.ts +1 -3
  30. package/src/processbase.ts +7 -9
  31. package/src/server.ts +1 -1
  32. package/src/tcpclient/app2.ts +2 -2
  33. package/src/tcpserver/appmaster.ts +39 -17
  34. package/types/influxdb/influxDBManager.d.ts.map +1 -1
  35. package/types/influxdb/influxDBManagerAgent.d.ts.map +1 -1
  36. package/types/influxdb/influxDBManagerBase.d.ts.map +1 -1
  37. package/types/influxdb/influxDBManagerService.d.ts.map +1 -1
  38. package/types/instrumentationsubscriber.d.ts.map +1 -1
  39. package/types/kafka/IMKafkaManager.d.ts.map +1 -1
  40. package/types/processbase.d.ts +2 -2
  41. package/types/processbase.d.ts.map +1 -1
  42. package/types/tcpserver/appmaster.d.ts.map +1 -1
  43. package/src_working/authDefs.ts +0 -37
  44. package/src_working/authutilsnode.ts +0 -373
  45. package/src_working/commonTypes.ts +0 -239
  46. package/src_working/index.ts +0 -22
  47. package/src_working/influxdb/influxDBManager.ts +0 -970
  48. package/src_working/influxdb/influxDBManagerAgent.ts +0 -314
  49. package/src_working/influxdb/influxDBManagerBase.ts +0 -109
  50. package/src_working/influxdb/influxDBManagerService.ts +0 -373
  51. package/src_working/instrumentationsubscriber.ts +0 -283
  52. package/src_working/kafka/IMKafkaManager.ts +0 -152
  53. package/src_working/kafka/kafkaconsumer.ts +0 -82
  54. package/src_working/kafka/kafkamanager.ts +0 -186
  55. package/src_working/kafka/kafkaproducer.ts +0 -58
  56. package/src_working/kafkatesting/config.ts +0 -10
  57. package/src_working/kafkatesting/consume.ts +0 -116
  58. package/src_working/kafkatesting/produce.ts +0 -153
  59. package/src_working/masterprocessbase.ts +0 -598
  60. package/src_working/middleware/serverNetworkMiddleware.ts +0 -240
  61. package/src_working/network.ts +0 -36
  62. package/src_working/processbase.ts +0 -411
  63. package/src_working/processoptions.ts +0 -164
  64. package/src_working/publishertransports/publishTransportDirect.ts +0 -45
  65. package/src_working/publishertransports/publishTransportUtils.ts +0 -53
  66. package/src_working/server.ts +0 -141
  67. package/src_working/serverprocessbase.ts +0 -393
  68. package/src_working/singleprocessbase.ts +0 -121
  69. package/src_working/socketIoServerHelper.ts +0 -177
  70. package/src_working/stscontrollerbase.ts +0 -15
  71. package/src_working/stslatencycontroller.ts +0 -27
  72. package/src_working/stslatencyroute.ts +0 -16
  73. package/src_working/stsrouterbase.ts +0 -22
  74. package/src_working/tcpclient/app.ts +0 -19
  75. package/src_working/tcpclient/app2.ts +0 -56
  76. package/src_working/tcpserver/app.ts +0 -11
  77. package/src_working/tcpserver/appConfig.ts +0 -65
  78. package/src_working/tcpserver/appmaster.ts +0 -544
  79. package/src_working/validation/errors.ts +0 -6
  80. package/src_working/webworkertesting/app.ts +0 -49
  81. package/src_working/webworkertesting/worker.ts +0 -24
  82. package/src_working/workerprocessbase.test.ts +0 -47
  83. package/src_working/workerprocessbase.ts +0 -185
  84. package/src_working2/authDefs.ts +0 -37
  85. package/src_working2/authutilsnode.ts +0 -375
  86. package/src_working2/commonTypes.ts +0 -239
  87. package/src_working2/index.ts +0 -22
  88. package/src_working2/influxdb/influxDBManager.ts +0 -972
  89. package/src_working2/influxdb/influxDBManagerAgent.ts +0 -316
  90. package/src_working2/influxdb/influxDBManagerBase.ts +0 -111
  91. package/src_working2/influxdb/influxDBManagerService.ts +0 -375
  92. package/src_working2/instrumentationsubscriber.ts +0 -286
  93. package/src_working2/kafka/IMKafkaManager.ts +0 -154
  94. package/src_working2/kafka/kafkaconsumer.ts +0 -82
  95. package/src_working2/kafka/kafkamanager.ts +0 -186
  96. package/src_working2/kafka/kafkaproducer.ts +0 -58
  97. package/src_working2/kafkatesting/config.ts +0 -10
  98. package/src_working2/kafkatesting/consume.ts +0 -116
  99. package/src_working2/kafkatesting/produce.ts +0 -155
  100. package/src_working2/masterprocessbase.ts +0 -590
  101. package/src_working2/middleware/serverNetworkMiddleware.ts +0 -240
  102. package/src_working2/network.ts +0 -36
  103. package/src_working2/processbase.ts +0 -415
  104. package/src_working2/processoptions.ts +0 -164
  105. package/src_working2/publishertransports/publishTransportDirect.ts +0 -45
  106. package/src_working2/publishertransports/publishTransportUtils.ts +0 -53
  107. package/src_working2/server.ts +0 -141
  108. package/src_working2/serverprocessbase.ts +0 -393
  109. package/src_working2/singleprocessbase.ts +0 -123
  110. package/src_working2/socketIoServerHelper.ts +0 -177
  111. package/src_working2/stscontrollerbase.ts +0 -15
  112. package/src_working2/stslatencycontroller.ts +0 -27
  113. package/src_working2/stslatencyroute.ts +0 -16
  114. package/src_working2/stsrouterbase.ts +0 -22
  115. package/src_working2/tcpclient/app.ts +0 -19
  116. package/src_working2/tcpclient/app2.ts +0 -56
  117. package/src_working2/tcpserver/app.ts +0 -11
  118. package/src_working2/tcpserver/appConfig.ts +0 -65
  119. package/src_working2/tcpserver/appmaster.ts +0 -522
  120. package/src_working2/validation/errors.ts +0 -6
  121. package/src_working2/webworkertesting/app.ts +0 -49
  122. package/src_working2/webworkertesting/worker.ts +0 -24
  123. package/src_working2/workerprocessbase.test.ts +0 -47
  124. package/src_working2/workerprocessbase.ts +0 -187
@@ -1,154 +0,0 @@
1
- /* eslint @typescript-eslint/no-explicit-any: 0, @typescript-eslint/no-unused-vars: 0 */ // --> OFF
2
- import { InstrumentPayload } from './../commonTypes'
3
- import { Gauge } from '@nsshunt/stsinstrumentation'
4
- import { KAFKA_PREFIX } from '@nsshunt/stssocketio-client'
5
-
6
- import { KafkaManager, IKafkaManagerConfig } from './kafkamanager'
7
- import { KafkaProducer } from './kafkaproducer'
8
-
9
- import { JSONObject } from '@nsshunt/stsutils'
10
- import { v4 as uuidv4 } from 'uuid';
11
-
12
- import chalk from 'chalk';
13
-
14
- import { $Options } from '@nsshunt/stsconfig'
15
- const goptions = $Options()
16
-
17
- const _logPrefix = 'IMKafkaManager.'
18
-
19
- // Manage the publication of InstrumentPayload to Kafka
20
- export class IMKafkaManager {
21
- #shuttingDown: boolean = false;
22
- #producer: KafkaProducer | null = null;
23
-
24
- #km: KafkaManager;
25
- #topics: JSONObject = { }
26
-
27
- constructor()
28
- {
29
- //super(options);
30
-
31
- const kmc: IKafkaManagerConfig = {
32
- clientId: `${goptions.kafka_clientId}_${uuidv4()}`,
33
- brokers: goptions.kafka_brokers.split(','),
34
- keepAlive: goptions.kafka_keep_alive,
35
- adminTimeout: goptions.kafka_admin_timeout,
36
- connectionTimeout: goptions.kafka_connection_timeout,
37
- logLevel: goptions.kafka_log_level,
38
- useSSL: goptions.kafka_use_ssl,
39
- requestTimeout: goptions.kafka_request_timeout
40
- }
41
- if (goptions.kafka_use_ssl) {
42
- kmc.ssl = {
43
- rejectUnauthorized: goptions.kafka_ssl_rejectUnauthorized,
44
- cafile: goptions.kafka_ssl_cafile,
45
- certfileFile: goptions.kafka_ssl_certfile,
46
- keyfile: goptions.kafka_ssl_keyfile
47
- }
48
- }
49
-
50
- this.#km = new KafkaManager(kmc);
51
- }
52
-
53
- get km(): KafkaManager {
54
- return this.#km;
55
- }
56
-
57
- Start = async (): Promise<void> => {
58
- this.#producer = this.#km.CreateProducer();
59
- await this.#producer.Connect()
60
- }
61
-
62
- Terminate = async (): Promise<void> => {
63
- try {
64
- if (this.#shuttingDown) {
65
- console.log(`IMKafkaManager:Terminate: Terminate already called. Ignoring.`.yellow);
66
- } else {
67
- this.#shuttingDown = true;
68
- console.log(`ProducerDisconnect`.yellow);
69
- if (this.#producer) {
70
- await this.#producer.Disconnect();
71
- }
72
- }
73
- } catch (error) {
74
- console.error(chalk.red(`${_logPrefix}#Terminate: Error: [${error}]`));
75
- }
76
- }
77
-
78
- async #OutputLogsToKafkaTopic(topic: string, logMessages: string[]) {
79
- if (!this.#topics[topic]) {
80
- console.log(`Setting up kafka topic: [${topic}] Starting`.yellow);
81
- this.#topics[topic] = true;
82
- const result = await this.#km.CreateTopic(topic, 1);
83
- if (result) {
84
- console.log(`Setting up kafka topic: [${topic}] Completed`.yellow);
85
- } else {
86
- console.log(`Setting up kafka topic: [${topic}] Completed - topic already exists`.cyan);
87
- }
88
- }
89
-
90
- const messages: JSONObject[] = [ ];
91
- logMessages.forEach((m) => {
92
- messages.push({key: topic, value: m})
93
- });
94
-
95
- // Don't wait for these to return ...
96
- if (this.#producer) {
97
- this.#producer.SendMessages(topic, messages as any);
98
- }
99
- }
100
-
101
- async OutputLogs(instrumentPayload: InstrumentPayload): Promise<boolean> {
102
- try {
103
- if (this.#shuttingDown) {
104
- return false;
105
- }
106
-
107
- let topic: string = '';
108
- if (instrumentPayload.instruments[Gauge.LOGGER]) {
109
- const loggerMessages = [ ...instrumentPayload.instruments[Gauge.LOGGER].val as string[] ];
110
- if (loggerMessages.length > 0) {
111
- if (instrumentPayload.context.agentName) {
112
- // Processing agent payload
113
- const { agentName, threadId, asyncRunnerId } = instrumentPayload.context;
114
-
115
- topic = '';
116
- if (agentName && threadId && asyncRunnerId) {
117
- // Output logs for a specific asyncRunnerId
118
- topic = `${KAFKA_PREFIX}_${agentName}_${threadId}_${asyncRunnerId}`.replace(/@/g, '_').replace(/\|/g, '_');
119
- this.#OutputLogsToKafkaTopic(topic, loggerMessages); // no need to await this ...
120
- }
121
- if (agentName && threadId) {
122
- // Output logs for a specific thread (worker) within an agent
123
- topic = `${KAFKA_PREFIX}_${agentName}_${threadId}`.replace(/@/g, '_').replace(/\|/g, '_');
124
- this.#OutputLogsToKafkaTopic(topic, loggerMessages); // no need to await this ...
125
- }
126
- if (agentName) {
127
- // Output logs for a specific thread (worker) within an agent
128
- topic = `${KAFKA_PREFIX}_${agentName}`.replace(/@/g, '_').replace(/\|/g, '_');
129
- this.#OutputLogsToKafkaTopic(topic, loggerMessages); // no need to await this ...
130
- }
131
- } else {
132
- // Processing service payload
133
- const { serviceInstanceId, serviceInstanceProcessId, pid, ppid } = instrumentPayload.context;
134
- if (serviceInstanceId && serviceInstanceProcessId && pid && ppid) {
135
- // Output to the specific thread within an instance
136
- topic = `${KAFKA_PREFIX}_${serviceInstanceId}_${serviceInstanceProcessId}`.replace(/@/g, '_').replace(/\|/g, '_');
137
- this.#OutputLogsToKafkaTopic(topic, loggerMessages); // no need to await this ...
138
-
139
- if (pid === ppid) {
140
- // Output only to the main thread on the service, i.e. this is for O/A service logging
141
- topic = `${KAFKA_PREFIX}_${serviceInstanceId}`.replace(/@/g, '_').replace(/\|/g, '_');
142
- this.#OutputLogsToKafkaTopic(topic, loggerMessages); // no need to await this ...
143
- }
144
- }
145
- }
146
- }
147
- }
148
- return true;
149
- } catch (error: any) {
150
- console.error(chalk.red(`${_logPrefix}OutputLogs: Could not output log data to kafka: [${error}]`));
151
- return false;
152
- }
153
- }
154
- }
@@ -1,82 +0,0 @@
1
- import { Kafka, Consumer, IHeaders } from 'kafkajs'
2
- import { IKafkaConsumer } from './../commonTypes'
3
-
4
- export class KafkaConsumer implements IKafkaConsumer {
5
- #id: string
6
- #groupId: string
7
- #consumer: Consumer;
8
- #kafka: Kafka;
9
- #connected: boolean = false;
10
-
11
- constructor(kafka: Kafka, id: string, groupId: string) {
12
- this.#id = id;
13
- this.#groupId = groupId;
14
- this.#kafka = kafka;
15
- this.#consumer = this.#kafka.consumer({ groupId: this.#groupId })
16
- }
17
-
18
- get consumer() {
19
- return this.#consumer;
20
- }
21
-
22
- get id(): string {
23
- return this.#id;
24
- }
25
-
26
- async Connect(): Promise<void> {
27
- if (!this.#connected) {
28
- await this.#consumer.connect()
29
- this.#connected = true;
30
- }
31
- }
32
-
33
- async Disconnect(): Promise<void> {
34
- if (this.#connected) {
35
- await this.#consumer.disconnect()
36
- this.#connected = false;
37
- }
38
- }
39
-
40
- Subscribe = async(topics: string[], fromBeginning: boolean): Promise<void> => {
41
- if (this.#connected) {
42
- await this.#consumer.subscribe({ topics, fromBeginning })
43
- } else {
44
- throw new Error(`[Subscribe] Consumer not connected.`);
45
- }
46
- }
47
-
48
- Stop = async(): Promise<void> => {
49
- if (this.#connected) {
50
- await this.#consumer.stop();
51
- }
52
- }
53
-
54
- StartConsumingMessages = async (cb: (topic: string, key: string, partition: number, value: string, headers: IHeaders | undefined) => void): Promise<void> => {
55
- if (this.#connected) {
56
- await this.#consumer.run({
57
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
58
- eachMessage: async ({ topic, partition, message, heartbeat, pause }) => {
59
- try {
60
- if (message.key) {
61
- if (message.value) {
62
- cb(topic.toString(), message.key.toString(), partition, message.value.toString(), message.headers)
63
- } else {
64
- cb(topic.toString(), message.key.toString(), partition, "", message.headers)
65
- }
66
- } else {
67
- if (message.value) {
68
- cb(topic.toString(), "", partition, message.value?.toString(), message.headers)
69
- } else {
70
- cb(topic.toString(), "", partition, "", message.headers)
71
- }
72
- }
73
- } catch (err) {
74
- console.log(err);
75
- }
76
- }
77
- })
78
- } else {
79
- throw new Error(`[StartConsumingMessages] Consumer not connected.`);
80
- }
81
- }
82
- }
@@ -1,186 +0,0 @@
1
- /* eslint @typescript-eslint/no-explicit-any: 0, @typescript-eslint/no-unused-vars: 0 */ // --> OFF
2
- /*
3
-
4
- kafka example server #01 - Docker Compose File
5
- ----------------------------------------------
6
- Note: In this example, the log retention is set to 24 hours (rather than default to 1 week)
7
- https://www.conduktor.io/kafka/kafka-topic-configuration-log-retention/
8
-
9
- version: '2'
10
- services:
11
- zookeeper:
12
- image: wurstmeister/zookeeper
13
- ports:
14
- - "2181:2181"
15
- restart: unless-stopped
16
-
17
- kafka:
18
- image: wurstmeister/kafka
19
- ports:
20
- - "9092:9092"
21
- environment:
22
- DOCKER_API_VERSION: 1.22
23
- KAFKA_ADVERTISED_HOST_NAME: 192.168.14.92
24
- KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
25
- KAFKA_CREATE_TOPICS: "topic-name2:3:1"
26
- KAFKA_LOG_RETENTION_MS: 86400000
27
- KAFKA_LOG_RETENTION_BYTES: -1
28
- volumes:
29
- - /var/run/docker.sock:/var/run/docker.sock
30
- restart: unless-stopped
31
-
32
-
33
- kafka example server #02 - Docker Compose File
34
- ----------------------------------------------
35
- version: "3.9" # optional since v1.27.0
36
-
37
- networks:
38
- app-tier:
39
- driver: bridge
40
-
41
- services:
42
- kafka:
43
- image: 'bitnami/kafka:latest'
44
- ports:
45
- - '9092:9092'
46
- networks:
47
- - app-tier
48
- environment:
49
- - ALLOW_PLAINTEXT_LISTENER=yes
50
- - KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE=true
51
- - KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://192.168.14.92:9092
52
-
53
- */
54
- import { STSOptionsBase } from '@nsshunt/stsutils'
55
-
56
- import { Kafka, KafkaConfig, logLevel } from 'kafkajs'
57
-
58
- import { v4 as uuidv4 } from 'uuid';
59
- import fs from 'node:fs'
60
-
61
- import { KafkaConsumer } from './kafkaconsumer'
62
- import { KafkaProducer } from './kafkaproducer'
63
-
64
- import net from 'net'
65
- import tls from 'tls'
66
-
67
- const KEEP_ALIVE_DELAY = 60000 //@@ in ms
68
-
69
- // https://kafka.js.org/docs/configuration
70
- export interface IKafkaManagerConfig {
71
- clientId: string // A logical identifier of an application. Can be used by brokers to apply quotas or trace requests to a specific application. Example: booking-events-processor.
72
- brokers: string[] // List of Kafka brokers
73
- adminTimeout: number // Time in milliseconds to wait for a successful admin operation. The default value is: 5000.
74
- connectionTimeout: number // Time in milliseconds to wait for a successful connection. The default value is: 1000.
75
- requestTimeout: number // Time in milliseconds to wait for a successful request. The default value is: 30000.
76
- logLevel: string // There are 5 log levels available: NOTHING, ERROR, WARN, INFO, and DEBUG. INFO is configured by default.
77
- keepAlive?: number // When specified, the number of ms for socket keep alive processing.
78
- useSSL: boolean // Use SSL
79
- ssl?: { // Must be specified if useSSL is true
80
- rejectUnauthorized: boolean
81
- cafile: string
82
- keyfile: string
83
- certfileFile: string
84
- }
85
- }
86
-
87
- // https://kafka.js.org/docs/configuration
88
- // https://github.com/tulios/kafkajs/blob/master/src/network/socketFactory.js
89
- declare interface ICustomSocketFactory {
90
- host: any,
91
- port: any,
92
- ssl: any,
93
- onConnect: any
94
- }
95
-
96
- export class KafkaManager extends STSOptionsBase {
97
- #kafka: Kafka
98
-
99
- constructor(options: IKafkaManagerConfig) {
100
- super(options);
101
-
102
- const kc: KafkaConfig = {
103
- clientId: options.clientId,
104
- brokers: options.brokers, //brokers: ['localhost:9092', 'kafka2:9092'],
105
- connectionTimeout: options.connectionTimeout,
106
- requestTimeout: options.requestTimeout
107
- }
108
- // NOTHING, ERROR, WARN, INFO, and DEBUG. INFO is configured by default.
109
- switch (options.logLevel) {
110
- case 'NOTHING' :
111
- kc.logLevel = logLevel.NOTHING;
112
- break;
113
- case 'ERROR' :
114
- kc.logLevel = logLevel.ERROR;
115
- break;
116
- case 'WARN' :
117
- kc.logLevel = logLevel.WARN;
118
- break;
119
- case 'INFO' :
120
- kc.logLevel = logLevel.INFO;
121
- break;
122
- case 'DEBUG' :
123
- kc.logLevel = logLevel.DEBUG;
124
- break;
125
- default :
126
- kc.logLevel = logLevel.NOTHING;
127
- }
128
- if (options.useSSL && options.ssl) {
129
- kc.ssl = {
130
- ca: [fs.readFileSync(options.ssl.cafile as string, { encoding: 'utf8'})],
131
- key: fs.readFileSync(options.ssl.keyfile, { encoding: 'utf8'}),
132
- cert: fs.readFileSync(options.ssl.certfileFile, { encoding: 'utf8'}),
133
- }
134
- }
135
- if (options.keepAlive) {
136
- //const myCustomSocketFactory = ({ host, port, ssl, onConnect }) => {
137
- const myCustomSocketFactory = (config: ICustomSocketFactory) => {
138
- const socket = config.ssl
139
- ? tls.connect(
140
- Object.assign({ host: config.host, port: config.port }, !net.isIP(config.host) ? { servername: config.host } : {}, config.ssl),
141
- config.onConnect
142
- )
143
- : net.connect({ host: config.host, port: config.port }, config.onConnect)
144
-
145
- socket.setKeepAlive(true, options.keepAlive)
146
- return socket
147
- }
148
- kc.socketFactory = myCustomSocketFactory;
149
- }
150
-
151
- this.#kafka = new Kafka(kc);
152
- }
153
-
154
- get kafka() {
155
- return this.#kafka;
156
- }
157
-
158
- CreateProducer(): KafkaProducer {
159
- return new KafkaProducer(this.#kafka, uuidv4());
160
- }
161
-
162
- CreateConsumer(groupId: string) {
163
- return new KafkaConsumer(this.#kafka, uuidv4(), groupId);
164
- }
165
-
166
- CreateTopic = async (topic: string, partitions: number): Promise<boolean> => {
167
- const admin = this.#kafka.admin()
168
- await admin.connect()
169
- const result = await admin.createTopics({
170
- validateOnly: false,
171
- waitForLeaders: true,
172
- timeout: this.options?.timeout,
173
- topics: [
174
- {
175
- topic: topic,
176
- numPartitions: partitions, // default: -1 (uses broker `num.partitions` configuration)
177
- //replicationFactor: <Number>, // default: -1 (uses broker `default.replication.factor` configuration)
178
- //replicaAssignment: <Array>, // Example: [{ partition: 0, replicas: [0,1,2] }] - default: []
179
- //configEntries: <Array> // Example: [{ name: 'cleanup.policy', value: 'compact' }] - default: []
180
- }
181
- ]
182
- })
183
- await admin.disconnect()
184
- return result;
185
- }
186
- }
@@ -1,58 +0,0 @@
1
- import { Kafka, Producer, RecordMetadata } from 'kafkajs'
2
-
3
- export class KafkaProducer {
4
- #id: string
5
- #producer: Producer;
6
- #kafka: Kafka;
7
- #connected: boolean = false;
8
-
9
- constructor(kafka: Kafka, id: string) {
10
- this.#id = id;
11
- this.#kafka = kafka;
12
- this.#producer = this.#kafka.producer()
13
- }
14
-
15
- get producer() {
16
- return this.#producer;
17
- }
18
-
19
- get id(): string {
20
- return this.#id;
21
- }
22
-
23
- async Connect(): Promise<void> {
24
- if (!this.#connected) {
25
- await this.#producer.connect();
26
- this.#connected = true;
27
- }
28
- }
29
-
30
- async Disconnect(): Promise<void> {
31
- if (this.#connected) {
32
- await this.#producer.disconnect()
33
- this.#connected = false;
34
- }
35
- }
36
-
37
- SendMessage = async(topic: string, message: { key: string, value: string} ): Promise<RecordMetadata[]> => {
38
- if (this.#connected) {
39
- return this.#producer.send({
40
- topic,
41
- messages: [ message ]
42
- })
43
- } else {
44
- throw new Error(`[SendMessage] Producer not connected.`);
45
- }
46
- }
47
-
48
- SendMessages = async(topic: string, messages: { key: string, value: string}[] ): Promise<RecordMetadata[]> => {
49
- if (this.#connected) {
50
- return this.#producer.send({
51
- topic,
52
- messages
53
- })
54
- } else {
55
- throw new Error(`[SendMessages] Producer not connected.`);
56
- }
57
- }
58
- }
@@ -1,10 +0,0 @@
1
- export const TOPIC = 'appframework-test-logs';
2
- export const PARTITIONS = 3;
3
- export const TIMEOUT = 5000;
4
-
5
- export const GROUP_ID = 'my-group';
6
-
7
- export const CLIENT_ID = 'my-app';
8
-
9
- export const BROKERS = ['192.168.14.92:9092'];
10
-
@@ -1,116 +0,0 @@
1
- /* eslint @typescript-eslint/no-unused-vars: 0 */ // --> OFF
2
- import { TOPIC, GROUP_ID, CLIENT_ID, BROKERS, TIMEOUT } from './config'
3
-
4
- import { KafkaManager } from './../kafka/kafkamanager'
5
- import { JSONObject } from '@nsshunt/stsutils';
6
-
7
- async function Sleep(milliseconds = 1000) {
8
- return new Promise(resolve => setTimeout(resolve, milliseconds))
9
- }
10
-
11
- const km = new KafkaManager({
12
- clientId: CLIENT_ID + process.env.CLIENT_ID,
13
- brokers: BROKERS,
14
- adminTimeout: TIMEOUT,
15
- connectionTimeout: TIMEOUT,
16
- requestTimeout: TIMEOUT,
17
- logLevel: 'NOTHING',
18
- useSSL: false
19
- });
20
-
21
- const runme = async () => {
22
-
23
- const fromBeginning = false;
24
-
25
- const consumer = km.CreateConsumer(GROUP_ID + process.env.GROUP_ID);
26
-
27
- await consumer.Connect();
28
-
29
- await consumer.Subscribe([TOPIC], fromBeginning);
30
-
31
- await consumer.StartConsumingMessages((topic: string, key: string, partition: number, value: string, headers: JSONObject | undefined) => {
32
- console.log({
33
- key,
34
- partition,
35
- value,
36
- headers
37
- });
38
- });
39
-
40
- await consumer.Stop();
41
-
42
- await consumer.Subscribe(['zzz'], fromBeginning);
43
-
44
- await consumer.StartConsumingMessages((topic: string, key: string, partition: number, value: string, headers: JSONObject | undefined) => {
45
- console.log({
46
- key,
47
- partition,
48
- value,
49
- headers
50
- });
51
- });
52
-
53
- await consumer.Stop();
54
-
55
- await consumer.Subscribe(['yyy'], fromBeginning);
56
-
57
- await consumer.StartConsumingMessages((topic: string, key: string, partition: number, value: string, headers: JSONObject | undefined) => {
58
- /*
59
- console.log({
60
- key,
61
- partition,
62
- value,
63
- headers
64
- });
65
- */
66
- console.log(`key: [${key}] value: [${value}] partition: [${partition}] headers: [${headers}]`);
67
- });
68
-
69
- //await km.Subscribe(['zzz'], cb);
70
-
71
- process.on("SIGINT", async () => {
72
- console.log('=========SIGTERM START =======================')
73
- await consumer.Disconnect();
74
- console.log('=========SIGTERM END =======================')
75
- process.exit();
76
- });
77
-
78
- let iteration = 0;
79
- for (;;) {
80
- console.log('sleep: ' + iteration++);
81
- await Sleep(1000);
82
- }
83
- }
84
-
85
- /*
86
- const errorTypes = ['unhandledRejection', 'uncaughtException']
87
- const signalTraps = ['SIGTERM', 'SIGINT', 'SIGUSR2']
88
-
89
- errorTypes.forEach(type => {
90
- process.on(type, async () => {
91
- try {
92
- console.log(`process.on ${type}`)
93
- console.log('=========consumer.disconnect() START =======================')
94
- await consumer.disconnect()
95
- console.log('=========consumer.disconnect() END =======================')
96
- process.exit(0)
97
- } catch (_) {
98
- process.exit(1)
99
- }
100
- })
101
- })
102
-
103
- signalTraps.forEach(type => {
104
- process.once(type, async () => {
105
- try {
106
- console.log('=========consumer.disconnect() START [2] =======================')
107
- await consumer.disconnect()
108
- console.log('=========consumer.disconnect() END [2] =======================')
109
- } finally {
110
- process.kill(process.pid, type)
111
- }
112
- })
113
- })
114
- */
115
-
116
- runme().catch(e => console.error(`[example/producer] ${e.message}`, e))