@nsshunt/stsappframework 2.19.251 → 2.19.252
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/kafkamanager.js +156 -0
- package/dist/kafkamanager.js.map +1 -0
- package/dist/kafkatesting/config.js +10 -0
- package/dist/kafkatesting/config.js.map +1 -0
- package/dist/kafkatesting/consume.js +67 -0
- package/dist/kafkatesting/consume.js.map +1 -0
- package/dist/kafkatesting/produce.js +123 -0
- package/dist/kafkatesting/produce.js.map +1 -0
- package/package.json +1 -1
- package/runkafkaconsume01.sh +26 -0
- package/runkafkaconsume02.sh +26 -0
- package/src/kafkamanager.ts +164 -0
- package/src/kafkatesting/config.ts +10 -0
- package/src/kafkatesting/consume.ts +75 -0
- package/src/kafkatesting/produce.ts +133 -0
- package/types/kafkamanager.d.ts +26 -0
- package/types/kafkamanager.d.ts.map +1 -0
- package/types/kafkatesting/config.d.ts +7 -0
- package/types/kafkatesting/config.d.ts.map +1 -0
- package/types/kafkatesting/consume.d.ts +2 -0
- package/types/kafkatesting/consume.d.ts.map +1 -0
- package/types/kafkatesting/produce.d.ts +2 -0
- package/types/kafkatesting/produce.d.ts.map +1 -0
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.KafkaManager = void 0;
|
|
4
|
+
/*
|
|
5
|
+
|
|
6
|
+
kafka example server #01 - Docker Compose File
|
|
7
|
+
----------------------------------------------
|
|
8
|
+
Note: In this example, the log retention is set to 24 hours (rather than default to 1 week)
|
|
9
|
+
https://www.conduktor.io/kafka/kafka-topic-configuration-log-retention/
|
|
10
|
+
|
|
11
|
+
version: '2'
|
|
12
|
+
services:
|
|
13
|
+
zookeeper:
|
|
14
|
+
image: wurstmeister/zookeeper
|
|
15
|
+
ports:
|
|
16
|
+
- "2181:2181"
|
|
17
|
+
restart: unless-stopped
|
|
18
|
+
|
|
19
|
+
kafka:
|
|
20
|
+
image: wurstmeister/kafka
|
|
21
|
+
ports:
|
|
22
|
+
- "9092:9092"
|
|
23
|
+
environment:
|
|
24
|
+
DOCKER_API_VERSION: 1.22
|
|
25
|
+
KAFKA_ADVERTISED_HOST_NAME: 192.168.14.92
|
|
26
|
+
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
|
|
27
|
+
KAFKA_CREATE_TOPICS: "topic-name2:3:1"
|
|
28
|
+
KAFKA_LOG_RETENTION_MS: 86400000
|
|
29
|
+
KAFKA_LOG_RETENTION_BYTES: -1
|
|
30
|
+
volumes:
|
|
31
|
+
- /var/run/docker.sock:/var/run/docker.sock
|
|
32
|
+
restart: unless-stopped
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
kafka example server #02 - Docker Compose File
|
|
36
|
+
----------------------------------------------
|
|
37
|
+
version: "3.9" # optional since v1.27.0
|
|
38
|
+
|
|
39
|
+
networks:
|
|
40
|
+
app-tier:
|
|
41
|
+
driver: bridge
|
|
42
|
+
|
|
43
|
+
services:
|
|
44
|
+
kafka:
|
|
45
|
+
image: 'bitnami/kafka:latest'
|
|
46
|
+
ports:
|
|
47
|
+
- '9092:9092'
|
|
48
|
+
networks:
|
|
49
|
+
- app-tier
|
|
50
|
+
environment:
|
|
51
|
+
- ALLOW_PLAINTEXT_LISTENER=yes
|
|
52
|
+
- KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE=true
|
|
53
|
+
- KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://192.168.14.92:9092
|
|
54
|
+
|
|
55
|
+
*/
|
|
56
|
+
const stsutils_1 = require("@nsshunt/stsutils");
|
|
57
|
+
const kafkajs_1 = require("kafkajs");
|
|
58
|
+
class KafkaManager extends stsutils_1.STSOptionsBase {
|
|
59
|
+
#kafka;
|
|
60
|
+
#producer;
|
|
61
|
+
#consumer;
|
|
62
|
+
constructor(options) {
|
|
63
|
+
super(options);
|
|
64
|
+
this.#kafka = new kafkajs_1.Kafka({
|
|
65
|
+
clientId: options.clientId,
|
|
66
|
+
brokers: options.brokers
|
|
67
|
+
//brokers: ['localhost:9092', 'kafka2:9092'],
|
|
68
|
+
});
|
|
69
|
+
}
|
|
70
|
+
ProducerConnect = async () => {
|
|
71
|
+
this.#producer = this.#kafka.producer();
|
|
72
|
+
return this.#producer.connect();
|
|
73
|
+
};
|
|
74
|
+
ProducerDisconnect = async () => {
|
|
75
|
+
await this.#producer?.disconnect();
|
|
76
|
+
};
|
|
77
|
+
ConsumerConnect = async (groupId) => {
|
|
78
|
+
this.#consumer = this.#kafka.consumer({ groupId });
|
|
79
|
+
return this.#consumer.connect();
|
|
80
|
+
};
|
|
81
|
+
ConsumerDisconnect = async () => {
|
|
82
|
+
return this.#consumer?.disconnect();
|
|
83
|
+
};
|
|
84
|
+
CreateTopic = async (topic, partitions) => {
|
|
85
|
+
const admin = this.#kafka.admin();
|
|
86
|
+
await admin.connect();
|
|
87
|
+
await admin.createTopics({
|
|
88
|
+
validateOnly: false,
|
|
89
|
+
waitForLeaders: true,
|
|
90
|
+
timeout: this.options?.timeout,
|
|
91
|
+
topics: [
|
|
92
|
+
{
|
|
93
|
+
topic: topic,
|
|
94
|
+
numPartitions: partitions, // default: -1 (uses broker `num.partitions` configuration)
|
|
95
|
+
//replicationFactor: <Number>, // default: -1 (uses broker `default.replication.factor` configuration)
|
|
96
|
+
//replicaAssignment: <Array>, // Example: [{ partition: 0, replicas: [0,1,2] }] - default: []
|
|
97
|
+
//configEntries: <Array> // Example: [{ name: 'cleanup.policy', value: 'compact' }] - default: []
|
|
98
|
+
}
|
|
99
|
+
]
|
|
100
|
+
});
|
|
101
|
+
await admin.disconnect();
|
|
102
|
+
};
|
|
103
|
+
SendMessage = async (topic, message) => {
|
|
104
|
+
if (this.#producer) {
|
|
105
|
+
return this.#producer.send({
|
|
106
|
+
topic,
|
|
107
|
+
messages: [message]
|
|
108
|
+
});
|
|
109
|
+
}
|
|
110
|
+
else {
|
|
111
|
+
return null;
|
|
112
|
+
}
|
|
113
|
+
};
|
|
114
|
+
SendMessages = async (topic, messages) => {
|
|
115
|
+
if (this.#producer) {
|
|
116
|
+
return this.#producer.send({
|
|
117
|
+
topic,
|
|
118
|
+
messages
|
|
119
|
+
});
|
|
120
|
+
}
|
|
121
|
+
else {
|
|
122
|
+
return null;
|
|
123
|
+
}
|
|
124
|
+
};
|
|
125
|
+
Subscribe = async (topics, cb) => {
|
|
126
|
+
await this.#consumer?.subscribe({ topics, fromBeginning: true });
|
|
127
|
+
await this.#consumer?.run({
|
|
128
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
129
|
+
eachMessage: async ({ topic, partition, message, heartbeat, pause }) => {
|
|
130
|
+
try {
|
|
131
|
+
if (message.key) {
|
|
132
|
+
if (message.value) {
|
|
133
|
+
cb(message.key.toString(), partition, message.value.toString(), message.headers);
|
|
134
|
+
}
|
|
135
|
+
else {
|
|
136
|
+
cb(message.key.toString(), partition, "", message.headers);
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
else {
|
|
140
|
+
if (message.value) {
|
|
141
|
+
cb("", partition, message.value?.toString(), message.headers);
|
|
142
|
+
}
|
|
143
|
+
else {
|
|
144
|
+
cb("", partition, "", message.headers);
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
catch (err) {
|
|
149
|
+
console.log(err);
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
});
|
|
153
|
+
};
|
|
154
|
+
}
|
|
155
|
+
exports.KafkaManager = KafkaManager;
|
|
156
|
+
//# sourceMappingURL=kafkamanager.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"kafkamanager.js","sourceRoot":"","sources":["../src/kafkamanager.ts"],"names":[],"mappings":";;;AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAmDE;AACF,gDAAkD;AAElD,qCAA6E;AAQ7E,MAAa,YAAa,SAAQ,yBAAc;IAC5C,MAAM,CAAO;IACb,SAAS,CAAuB;IAChC,SAAS,CAAuB;IAEhC,YAAY,OAA4B;QACpC,KAAK,CAAC,OAAO,CAAC,CAAC;QAEf,IAAI,CAAC,MAAM,GAAG,IAAI,eAAK,CAAC;YACpB,QAAQ,EAAE,OAAO,CAAC,QAAQ;YAC1B,OAAO,EAAE,OAAO,CAAC,OAAO;YACxB,6CAA6C;SAChD,CAAC,CAAA;IACN,CAAC;IAED,eAAe,GAAG,KAAK,IAAoB,EAAE;QACzC,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAA;QACvC,OAAO,IAAI,CAAC,SAAS,CAAC,OAAO,EAAE,CAAA;IACnC,CAAC,CAAA;IAED,kBAAkB,GAAG,KAAK,IAAI,EAAE;QAC5B,MAAM,IAAI,CAAC,SAAS,EAAE,UAAU,EAAE,CAAA;IACtC,CAAC,CAAA;IAED,eAAe,GAAG,KAAK,EAAE,OAAe,EAAiB,EAAE;QACvD,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAC,CAAA;QAClD,OAAO,IAAI,CAAC,SAAS,CAAC,OAAO,EAAE,CAAA;IACnC,CAAC,CAAA;IAED,kBAAkB,GAAG,KAAK,IAAmB,EAAE;QAC3C,OAAO,IAAI,CAAC,SAAS,EAAE,UAAU,EAAE,CAAC;IACxC,CAAC,CAAA;IAED,WAAW,GAAG,KAAK,EAAE,KAAa,EAAE,UAAkB,EAAE,EAAE;QACtD,MAAM,KAAK,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QACjC,MAAM,KAAK,CAAC,OAAO,EAAE,CAAA;QACrB,MAAM,KAAK,CAAC,YAAY,CAAC;YACrB,YAAY,EAAE,KAAK;YACnB,cAAc,EAAE,IAAI;YACpB,OAAO,EAAE,IAAI,CAAC,OAAO,EAAE,OAAO;YAC9B,MAAM,EAAE;gBACJ;oBACI,KAAK,EAAE,KAAK;oBACZ,aAAa,EAAE,UAAU,EAAM,2DAA2D;oBAC1F,sGAAsG;oBACtG,8FAA8F;oBAC9F,uGAAuG;iBAC1G;aACJ;SACJ,CAAC,CAAA;QACF,MAAM,KAAK,CAAC,UAAU,EAAE,CAAA;IAC5B,CAAC,CAAA;IAED,WAAW,GAAG,KAAK,EAAC,KAAa,EAAE,OAAsC,EAAqC,EAAE;QAC5G,IAAI,IAAI,CAAC,SAAS,EAAE;YAChB,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;gBACvB,KAAK;gBACL,QAAQ,EAAE,CAAE,OAAO,CAAE;aACxB,CAAC,CAAA;SACL;aAAM;YACH,OAAO,IAAI,CAAC;SACf;IACL,CAAC,CAAA;IAED,YAAY,GAAG,KAAK,EAAC,KAAa,EAAE,QAAyC,EAAqC,EAAE;QAChH,IAAI,IAAI,CAAC,SAAS,EAAE;YAChB,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;gBACvB,KAAK;gBACL,QAAQ;aACX,CAAC,CAAA;SACL;aAAM;YACH,OAAO,IAAI,CAAC;SACf;IACL,CAAC,CAAA;IAED,SAAS,GAAG,KAAK,EAAC,MAAgB,EAAE,EAA0F,EAAE,EAAE;QAC9H,MAAM,IAAI,CAAC,SAAS,EAAE,SAAS,CAAC,EAAE,MAAM,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAA;QAEhE,MAAM,IAAI,CAAC,SAAS,EAAE,GAAG,CAAC;YACtB,6DAA6D;YAC7D,WAAW,EAAE,KAAK,EAAE,EAAE,KAAK,EAAE,SAAS,EAAE,OAAO,EAAE,SAAS,EAAE,KAAK,EAAE,EAAE,EAAE;gBACnE,IAAI;oBACA,IAAI,OAAO,CAAC,GAAG,EAAE;wBACb,IAAI,OAAO,CAAC,KAAK,EAAE;4BACf,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,OAAO,CAAC,KAAK,CAAC,QAAQ,EAAE,EAAE,OAAO,CAAC,OAAO,CAAC,CAAA;yBACnF;6BAAM;4BACH,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,EAAE,EAAE,OAAO,CAAC,OAAO,CAAC,CAAA;yBAC7D;qBACJ;yBAAM;wBACH,IAAI,OAAO,CAAC,KAAK,EAAE;4BACf,EAAE,CAAC,EAAE,EAAE,SAAS,EAAE,OAAO,CAAC,KAAK,EAAE,QAAQ,EAAE,EAAE,OAAO,CAAC,OAAO,CAAC,CAAA;yBAChE;6BAAM;4BACH,EAAE,CAAC,EAAE,EAAE,SAAS,EAAE,EAAE,EAAE,OAAO,CAAC,OAAO,CAAC,CAAA;yBACzC;qBACJ;iBACJ;gBAAC,OAAO,GAAG,EAAE;oBACV,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;iBACpB;YACL,CAAC;SACJ,CAAC,CAAA;IACN,CAAC,CAAA;CACJ;AArGD,oCAqGC"}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.BROKERS = exports.CLIENT_ID = exports.GROUP_ID = exports.TIMEOUT = exports.PARTITIONS = exports.TOPIC = void 0;
|
|
4
|
+
exports.TOPIC = 'appframework-test-logs';
|
|
5
|
+
exports.PARTITIONS = 3;
|
|
6
|
+
exports.TIMEOUT = 5000;
|
|
7
|
+
exports.GROUP_ID = 'my-group';
|
|
8
|
+
exports.CLIENT_ID = 'my-app';
|
|
9
|
+
exports.BROKERS = ['192.168.14.92:9092'];
|
|
10
|
+
//# sourceMappingURL=config.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"config.js","sourceRoot":"","sources":["../../src/kafkatesting/config.ts"],"names":[],"mappings":";;;AAAa,QAAA,KAAK,GAAG,wBAAwB,CAAC;AACjC,QAAA,UAAU,GAAG,CAAC,CAAC;AACf,QAAA,OAAO,GAAG,IAAI,CAAC;AAEf,QAAA,QAAQ,GAAG,UAAU,CAAC;AAEtB,QAAA,SAAS,GAAG,QAAQ,CAAC;AAErB,QAAA,OAAO,GAAG,CAAC,oBAAoB,CAAC,CAAC"}
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
/* eslint @typescript-eslint/no-unused-vars: 0 */ // --> OFF
|
|
4
|
+
const config_1 = require("./config");
|
|
5
|
+
const kafkamanager_1 = require("./../kafkamanager");
|
|
6
|
+
async function Sleep(milliseconds = 1000) {
|
|
7
|
+
return new Promise(resolve => setTimeout(resolve, milliseconds));
|
|
8
|
+
}
|
|
9
|
+
const km = new kafkamanager_1.KafkaManager({
|
|
10
|
+
clientId: config_1.CLIENT_ID + process.env.CLIENT_ID,
|
|
11
|
+
brokers: config_1.BROKERS,
|
|
12
|
+
timeout: config_1.TIMEOUT
|
|
13
|
+
});
|
|
14
|
+
const runme = async () => {
|
|
15
|
+
await km.ConsumerConnect(config_1.GROUP_ID + process.env.GROUP_ID);
|
|
16
|
+
await km.Subscribe([config_1.TOPIC], (key, partition, value, headers) => {
|
|
17
|
+
console.log({
|
|
18
|
+
key,
|
|
19
|
+
partition,
|
|
20
|
+
value,
|
|
21
|
+
headers
|
|
22
|
+
});
|
|
23
|
+
});
|
|
24
|
+
process.on("SIGINT", async () => {
|
|
25
|
+
console.log('=========SIGTERM START =======================');
|
|
26
|
+
await km.ConsumerDisconnect();
|
|
27
|
+
console.log('=========SIGTERM END =======================');
|
|
28
|
+
process.exit();
|
|
29
|
+
});
|
|
30
|
+
let iteration = 0;
|
|
31
|
+
for (;;) {
|
|
32
|
+
console.log('sleep: ' + iteration++);
|
|
33
|
+
await Sleep(1000);
|
|
34
|
+
}
|
|
35
|
+
};
|
|
36
|
+
/*
|
|
37
|
+
const errorTypes = ['unhandledRejection', 'uncaughtException']
|
|
38
|
+
const signalTraps = ['SIGTERM', 'SIGINT', 'SIGUSR2']
|
|
39
|
+
|
|
40
|
+
errorTypes.forEach(type => {
|
|
41
|
+
process.on(type, async () => {
|
|
42
|
+
try {
|
|
43
|
+
console.log(`process.on ${type}`)
|
|
44
|
+
console.log('=========consumer.disconnect() START =======================')
|
|
45
|
+
await consumer.disconnect()
|
|
46
|
+
console.log('=========consumer.disconnect() END =======================')
|
|
47
|
+
process.exit(0)
|
|
48
|
+
} catch (_) {
|
|
49
|
+
process.exit(1)
|
|
50
|
+
}
|
|
51
|
+
})
|
|
52
|
+
})
|
|
53
|
+
|
|
54
|
+
signalTraps.forEach(type => {
|
|
55
|
+
process.once(type, async () => {
|
|
56
|
+
try {
|
|
57
|
+
console.log('=========consumer.disconnect() START [2] =======================')
|
|
58
|
+
await consumer.disconnect()
|
|
59
|
+
console.log('=========consumer.disconnect() END [2] =======================')
|
|
60
|
+
} finally {
|
|
61
|
+
process.kill(process.pid, type)
|
|
62
|
+
}
|
|
63
|
+
})
|
|
64
|
+
})
|
|
65
|
+
*/
|
|
66
|
+
runme().catch(e => console.error(`[example/producer] ${e.message}`, e));
|
|
67
|
+
//# sourceMappingURL=consume.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"consume.js","sourceRoot":"","sources":["../../src/kafkatesting/consume.ts"],"names":[],"mappings":";;AAAA,iDAAiD,CAAE,UAAU;AAC7D,qCAAuE;AAEvE,oDAAgD;AAGhD,KAAK,UAAU,KAAK,CAAC,YAAY,GAAG,IAAI;IACpC,OAAO,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC,CAAA;AACpE,CAAC;AAED,MAAM,EAAE,GAAG,IAAI,2BAAY,CAAC;IACxB,QAAQ,EAAE,kBAAS,GAAG,OAAO,CAAC,GAAG,CAAC,SAAS;IAC3C,OAAO,EAAE,gBAAO;IAChB,OAAO,EAAE,gBAAO;CACnB,CAAC,CAAC;AAEH,MAAM,KAAK,GAAG,KAAK,IAAI,EAAE;IAErB,MAAM,EAAE,CAAC,eAAe,CAAC,iBAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC;IAE1D,MAAM,EAAE,CAAC,SAAS,CAAC,CAAC,cAAK,CAAC,EAAE,CAAC,GAAW,EAAE,SAAiB,EAAE,KAAa,EAAE,OAA+B,EAAE,EAAE;QAC3G,OAAO,CAAC,GAAG,CAAC;YACR,GAAG;YACH,SAAS;YACT,KAAK;YACL,OAAO;SACV,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,OAAO,CAAC,EAAE,CAAC,QAAQ,EAAE,KAAK,IAAI,EAAE;QAC5B,OAAO,CAAC,GAAG,CAAC,gDAAgD,CAAC,CAAA;QAC7D,MAAM,EAAE,CAAC,kBAAkB,EAAE,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,8CAA8C,CAAC,CAAA;QAC3D,OAAO,CAAC,IAAI,EAAE,CAAC;IACnB,CAAC,CAAC,CAAC;IAEH,IAAI,SAAS,GAAG,CAAC,CAAC;IAClB,SAAS;QACL,OAAO,CAAC,GAAG,CAAC,SAAS,GAAG,SAAS,EAAE,CAAC,CAAC;QACrC,MAAM,KAAK,CAAC,IAAI,CAAC,CAAC;KACrB;AACL,CAAC,CAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA6BE;AAEF,KAAK,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC,sBAAsB,CAAC,CAAC,OAAO,EAAE,EAAE,CAAC,CAAC,CAAC,CAAA"}
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
/*
|
|
4
|
+
|
|
5
|
+
kafka example server #01 - Docker Compose File
|
|
6
|
+
----------------------------------------------
|
|
7
|
+
Note: In this example, the log retention is set to 24 hours (rather than default to 1 week)
|
|
8
|
+
https://www.conduktor.io/kafka/kafka-topic-configuration-log-retention/
|
|
9
|
+
|
|
10
|
+
version: '2'
|
|
11
|
+
services:
|
|
12
|
+
zookeeper:
|
|
13
|
+
image: wurstmeister/zookeeper
|
|
14
|
+
ports:
|
|
15
|
+
- "2181:2181"
|
|
16
|
+
restart: unless-stopped
|
|
17
|
+
|
|
18
|
+
kafka:
|
|
19
|
+
image: wurstmeister/kafka
|
|
20
|
+
ports:
|
|
21
|
+
- "9092:9092"
|
|
22
|
+
environment:
|
|
23
|
+
DOCKER_API_VERSION: 1.22
|
|
24
|
+
KAFKA_ADVERTISED_HOST_NAME: 192.168.14.92
|
|
25
|
+
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
|
|
26
|
+
KAFKA_CREATE_TOPICS: "topic-name2:3:1"
|
|
27
|
+
KAFKA_LOG_RETENTION_MS: 86400000
|
|
28
|
+
KAFKA_LOG_RETENTION_BYTES: -1
|
|
29
|
+
volumes:
|
|
30
|
+
- /var/run/docker.sock:/var/run/docker.sock
|
|
31
|
+
restart: unless-stopped
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
kafka example server #02 - Docker Compose File
|
|
35
|
+
----------------------------------------------
|
|
36
|
+
version: "3.9" # optional since v1.27.0
|
|
37
|
+
|
|
38
|
+
networks:
|
|
39
|
+
app-tier:
|
|
40
|
+
driver: bridge
|
|
41
|
+
|
|
42
|
+
services:
|
|
43
|
+
kafka:
|
|
44
|
+
image: 'bitnami/kafka:latest'
|
|
45
|
+
ports:
|
|
46
|
+
- '9092:9092'
|
|
47
|
+
networks:
|
|
48
|
+
- app-tier
|
|
49
|
+
environment:
|
|
50
|
+
- ALLOW_PLAINTEXT_LISTENER=yes
|
|
51
|
+
- KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE=true
|
|
52
|
+
- KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://192.168.14.92:9092
|
|
53
|
+
|
|
54
|
+
*/
|
|
55
|
+
const config_1 = require("./config");
|
|
56
|
+
const kafkamanager_1 = require("./../kafkamanager");
|
|
57
|
+
async function Sleep(milliseconds = 1000) {
|
|
58
|
+
return new Promise(resolve => setTimeout(resolve, milliseconds));
|
|
59
|
+
}
|
|
60
|
+
const km = new kafkamanager_1.KafkaManager({
|
|
61
|
+
clientId: config_1.CLIENT_ID + process.env.CLIENT_ID,
|
|
62
|
+
brokers: config_1.BROKERS,
|
|
63
|
+
timeout: config_1.TIMEOUT
|
|
64
|
+
});
|
|
65
|
+
const runme = async () => {
|
|
66
|
+
await km.CreateTopic(config_1.TOPIC, config_1.PARTITIONS);
|
|
67
|
+
await km.ProducerConnect();
|
|
68
|
+
const count = 100000;
|
|
69
|
+
const sleepTime = 1000;
|
|
70
|
+
process.on("SIGINT", async () => {
|
|
71
|
+
console.log('=========SIGTERM START =======================');
|
|
72
|
+
await km.ProducerDisconnect();
|
|
73
|
+
console.log('=========SIGTERM END =======================');
|
|
74
|
+
process.exit();
|
|
75
|
+
});
|
|
76
|
+
for (let i = 0; i < count; i++) {
|
|
77
|
+
if (i % 100 === 0)
|
|
78
|
+
console.log(i);
|
|
79
|
+
const retVal = await km.SendMessages(config_1.TOPIC, [
|
|
80
|
+
{ key: 'key1', value: `hello world - ${i}` },
|
|
81
|
+
{ key: 'key2', value: 'hey hey! -2' },
|
|
82
|
+
{ key: 'key3', value: 'hey hey! -3' },
|
|
83
|
+
{ key: 'key4', value: 'hey hey! -4' },
|
|
84
|
+
{ key: 'key5', value: 'hey hey! -5' }
|
|
85
|
+
]);
|
|
86
|
+
if (i % 100 === 0) {
|
|
87
|
+
console.log(retVal);
|
|
88
|
+
console.log(` ------------=================> ${i}`);
|
|
89
|
+
}
|
|
90
|
+
if (sleepTime >= 0) {
|
|
91
|
+
await Sleep(sleepTime);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
await km.ProducerDisconnect();
|
|
95
|
+
};
|
|
96
|
+
/*
|
|
97
|
+
const errorTypes = ['unhandledRejection', 'uncaughtException']
|
|
98
|
+
const signalTraps = ['SIGTERM', 'SIGINT', 'SIGUSR2']
|
|
99
|
+
|
|
100
|
+
errorTypes.forEach(type => {
|
|
101
|
+
process.on(type, async () => {
|
|
102
|
+
try {
|
|
103
|
+
console.log(`process.on ${type}`)
|
|
104
|
+
await producer.disconnect()
|
|
105
|
+
process.exit(0)
|
|
106
|
+
} catch (_) {
|
|
107
|
+
process.exit(1)
|
|
108
|
+
}
|
|
109
|
+
})
|
|
110
|
+
})
|
|
111
|
+
|
|
112
|
+
signalTraps.forEach(type => {
|
|
113
|
+
process.once(type, async () => {
|
|
114
|
+
try {
|
|
115
|
+
await producer.disconnect()
|
|
116
|
+
} finally {
|
|
117
|
+
process.kill(process.pid, type)
|
|
118
|
+
}
|
|
119
|
+
})
|
|
120
|
+
})
|
|
121
|
+
*/
|
|
122
|
+
runme().catch(e => console.error(`[example/producer] ${e.message}`, e));
|
|
123
|
+
//# sourceMappingURL=produce.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"produce.js","sourceRoot":"","sources":["../../src/kafkatesting/produce.ts"],"names":[],"mappings":";;AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAmDE;AACF,qCAAyE;AAEzE,oDAAgD;AAEhD,KAAK,UAAU,KAAK,CAAC,YAAY,GAAG,IAAI;IACpC,OAAO,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC,CAAA;AACpE,CAAC;AAED,MAAM,EAAE,GAAG,IAAI,2BAAY,CAAC;IACxB,QAAQ,EAAE,kBAAS,GAAG,OAAO,CAAC,GAAG,CAAC,SAAS;IAC3C,OAAO,EAAE,gBAAO;IAChB,OAAO,EAAE,gBAAO;CACnB,CAAC,CAAC;AAEH,MAAM,KAAK,GAAG,KAAK,IAAI,EAAE;IACrB,MAAM,EAAE,CAAC,WAAW,CAAC,cAAK,EAAE,mBAAU,CAAC,CAAC;IAExC,MAAM,EAAE,CAAC,eAAe,EAAE,CAAC;IAE3B,MAAM,KAAK,GAAG,MAAM,CAAC;IACrB,MAAM,SAAS,GAAG,IAAI,CAAC;IAEvB,OAAO,CAAC,EAAE,CAAC,QAAQ,EAAE,KAAK,IAAI,EAAE;QAC5B,OAAO,CAAC,GAAG,CAAC,gDAAgD,CAAC,CAAA;QAC7D,MAAM,EAAE,CAAC,kBAAkB,EAAE,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,8CAA8C,CAAC,CAAA;QAC3D,OAAO,CAAC,IAAI,EAAE,CAAC;IACnB,CAAC,CAAC,CAAC;IAEH,KAAK,IAAI,CAAC,GAAC,CAAC,EAAE,CAAC,GAAG,KAAK,EAAE,CAAC,EAAE,EAAE;QAC1B,IAAI,CAAC,GAAG,GAAG,KAAK,CAAC;YAAE,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QAClC,MAAM,MAAM,GAAG,MAAM,EAAE,CAAC,YAAY,CAAC,cAAK,EAAE;YACxC,EAAE,GAAG,EAAE,MAAM,EAAE,KAAK,EAAE,iBAAiB,CAAC,EAAE,EAAE;YAC5C,EAAE,GAAG,EAAE,MAAM,EAAE,KAAK,EAAE,aAAa,EAAE;YACrC,EAAE,GAAG,EAAE,MAAM,EAAE,KAAK,EAAE,aAAa,EAAE;YACrC,EAAE,GAAG,EAAE,MAAM,EAAE,KAAK,EAAE,aAAa,EAAE;YACrC,EAAE,GAAG,EAAE,MAAM,EAAE,KAAK,EAAE,aAAa,EAAE;SACxC,CACA,CAAC;QAEF,IAAI,CAAC,GAAG,GAAG,KAAK,CAAC,EAAE;YACf,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;YACpB,OAAO,CAAC,GAAG,CAAC,mCAAmC,CAAC,EAAE,CAAC,CAAC;SACvD;QACD,IAAI,SAAS,IAAI,CAAC,EAAE;YAChB,MAAM,KAAK,CAAC,SAAS,CAAC,CAAC;SAC1B;KACJ;IAED,MAAM,EAAE,CAAC,kBAAkB,EAAE,CAAC;AAClC,CAAC,CAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;EAyBE;AAEF,KAAK,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC,sBAAsB,CAAC,CAAC,OAAO,EAAE,EAAE,CAAC,CAAC,CAAC,CAAA"}
|
package/package.json
CHANGED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
#!/bin/sh
|
|
2
|
+
# openssl req -nodes -new -x509 -keyout server.key -out server.cert
|
|
3
|
+
clear; \
|
|
4
|
+
export STS_PROJ_ROOT=./..; \
|
|
5
|
+
STSENVFILE=$STS_PROJ_ROOT/stsglobalresources/.env \
|
|
6
|
+
DB_SCRIPT_FOLDER=$STS_PROJ_ROOT/stsglobalresources/db-scripts \
|
|
7
|
+
REST01_PORT=3003 \
|
|
8
|
+
REST01_HOST_PORT=3003 \
|
|
9
|
+
REST01_SERVICE_NAME="STSRest01-3003" \
|
|
10
|
+
REST01_API_IDENTIFIER="https://stsmda.com.au/stsrest01api/v1.0/" \
|
|
11
|
+
REST01_ENDPOINT="https://stsrest.stsmda.org" \
|
|
12
|
+
MAX_CPU=1 \
|
|
13
|
+
AS_ENDPOINT=https://stscore.stsmda.org \
|
|
14
|
+
AS_HOST_PORT=3002 \
|
|
15
|
+
AS_PORT=3002 \
|
|
16
|
+
DB_HOST=localhost \
|
|
17
|
+
DB_PORT=5432 \
|
|
18
|
+
DB_PASSWORD=postgres \
|
|
19
|
+
HTTPS_SERVER_KEY_PATH=/etc/letsencrypt/live/stsmda.org/privkey.pem \
|
|
20
|
+
HTTPS_SERVER_CERT_PATH=/etc/letsencrypt/live/stsmda.org/fullchain.pem \
|
|
21
|
+
DEBUG=proc* \
|
|
22
|
+
PUBLISH_DEBUG=false \
|
|
23
|
+
UV_THREADPOOL_SIZE=64 \
|
|
24
|
+
GROUP_ID=01 \
|
|
25
|
+
CLIENT_ID=01 \
|
|
26
|
+
node ./dist/kafkatesting/consume.js
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
#!/bin/sh
|
|
2
|
+
# openssl req -nodes -new -x509 -keyout server.key -out server.cert
|
|
3
|
+
clear; \
|
|
4
|
+
export STS_PROJ_ROOT=./..; \
|
|
5
|
+
STSENVFILE=$STS_PROJ_ROOT/stsglobalresources/.env \
|
|
6
|
+
DB_SCRIPT_FOLDER=$STS_PROJ_ROOT/stsglobalresources/db-scripts \
|
|
7
|
+
REST01_PORT=3003 \
|
|
8
|
+
REST01_HOST_PORT=3003 \
|
|
9
|
+
REST01_SERVICE_NAME="STSRest01-3003" \
|
|
10
|
+
REST01_API_IDENTIFIER="https://stsmda.com.au/stsrest01api/v1.0/" \
|
|
11
|
+
REST01_ENDPOINT="https://stsrest.stsmda.org" \
|
|
12
|
+
MAX_CPU=1 \
|
|
13
|
+
AS_ENDPOINT=https://stscore.stsmda.org \
|
|
14
|
+
AS_HOST_PORT=3002 \
|
|
15
|
+
AS_PORT=3002 \
|
|
16
|
+
DB_HOST=localhost \
|
|
17
|
+
DB_PORT=5432 \
|
|
18
|
+
DB_PASSWORD=postgres \
|
|
19
|
+
HTTPS_SERVER_KEY_PATH=/etc/letsencrypt/live/stsmda.org/privkey.pem \
|
|
20
|
+
HTTPS_SERVER_CERT_PATH=/etc/letsencrypt/live/stsmda.org/fullchain.pem \
|
|
21
|
+
DEBUG=proc* \
|
|
22
|
+
PUBLISH_DEBUG=false \
|
|
23
|
+
UV_THREADPOOL_SIZE=64 \
|
|
24
|
+
GROUP_ID=02 \
|
|
25
|
+
CLIENT_ID=02 \
|
|
26
|
+
node ./dist/kafkatesting/consume.js
|
|
@@ -0,0 +1,164 @@
|
|
|
1
|
+
/*
|
|
2
|
+
|
|
3
|
+
kafka example server #01 - Docker Compose File
|
|
4
|
+
----------------------------------------------
|
|
5
|
+
Note: In this example, the log retention is set to 24 hours (rather than default to 1 week)
|
|
6
|
+
https://www.conduktor.io/kafka/kafka-topic-configuration-log-retention/
|
|
7
|
+
|
|
8
|
+
version: '2'
|
|
9
|
+
services:
|
|
10
|
+
zookeeper:
|
|
11
|
+
image: wurstmeister/zookeeper
|
|
12
|
+
ports:
|
|
13
|
+
- "2181:2181"
|
|
14
|
+
restart: unless-stopped
|
|
15
|
+
|
|
16
|
+
kafka:
|
|
17
|
+
image: wurstmeister/kafka
|
|
18
|
+
ports:
|
|
19
|
+
- "9092:9092"
|
|
20
|
+
environment:
|
|
21
|
+
DOCKER_API_VERSION: 1.22
|
|
22
|
+
KAFKA_ADVERTISED_HOST_NAME: 192.168.14.92
|
|
23
|
+
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
|
|
24
|
+
KAFKA_CREATE_TOPICS: "topic-name2:3:1"
|
|
25
|
+
KAFKA_LOG_RETENTION_MS: 86400000
|
|
26
|
+
KAFKA_LOG_RETENTION_BYTES: -1
|
|
27
|
+
volumes:
|
|
28
|
+
- /var/run/docker.sock:/var/run/docker.sock
|
|
29
|
+
restart: unless-stopped
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
kafka example server #02 - Docker Compose File
|
|
33
|
+
----------------------------------------------
|
|
34
|
+
version: "3.9" # optional since v1.27.0
|
|
35
|
+
|
|
36
|
+
networks:
|
|
37
|
+
app-tier:
|
|
38
|
+
driver: bridge
|
|
39
|
+
|
|
40
|
+
services:
|
|
41
|
+
kafka:
|
|
42
|
+
image: 'bitnami/kafka:latest'
|
|
43
|
+
ports:
|
|
44
|
+
- '9092:9092'
|
|
45
|
+
networks:
|
|
46
|
+
- app-tier
|
|
47
|
+
environment:
|
|
48
|
+
- ALLOW_PLAINTEXT_LISTENER=yes
|
|
49
|
+
- KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE=true
|
|
50
|
+
- KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://192.168.14.92:9092
|
|
51
|
+
|
|
52
|
+
*/
|
|
53
|
+
import { STSOptionsBase } from '@nsshunt/stsutils'
|
|
54
|
+
|
|
55
|
+
import { Kafka, Producer, RecordMetadata, Consumer, IHeaders } from 'kafkajs'
|
|
56
|
+
|
|
57
|
+
export interface IKafkaManagerConfig {
|
|
58
|
+
clientId: string
|
|
59
|
+
brokers: string[]
|
|
60
|
+
timeout: number
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
export class KafkaManager extends STSOptionsBase {
|
|
64
|
+
#kafka: Kafka
|
|
65
|
+
#producer: Producer | undefined;
|
|
66
|
+
#consumer: Consumer | undefined;
|
|
67
|
+
|
|
68
|
+
constructor(options: IKafkaManagerConfig) {
|
|
69
|
+
super(options);
|
|
70
|
+
|
|
71
|
+
this.#kafka = new Kafka({
|
|
72
|
+
clientId: options.clientId,
|
|
73
|
+
brokers: options.brokers
|
|
74
|
+
//brokers: ['localhost:9092', 'kafka2:9092'],
|
|
75
|
+
})
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
ProducerConnect = async (): Promise<void> => {
|
|
79
|
+
this.#producer = this.#kafka.producer()
|
|
80
|
+
return this.#producer.connect()
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
ProducerDisconnect = async () => {
|
|
84
|
+
await this.#producer?.disconnect()
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
ConsumerConnect = async (groupId: string): Promise<void> => {
|
|
88
|
+
this.#consumer = this.#kafka.consumer({ groupId })
|
|
89
|
+
return this.#consumer.connect()
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
ConsumerDisconnect = async (): Promise<void> => {
|
|
93
|
+
return this.#consumer?.disconnect();
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
CreateTopic = async (topic: string, partitions: number) => {
|
|
97
|
+
const admin = this.#kafka.admin()
|
|
98
|
+
await admin.connect()
|
|
99
|
+
await admin.createTopics({
|
|
100
|
+
validateOnly: false,
|
|
101
|
+
waitForLeaders: true,
|
|
102
|
+
timeout: this.options?.timeout,
|
|
103
|
+
topics: [
|
|
104
|
+
{
|
|
105
|
+
topic: topic,
|
|
106
|
+
numPartitions: partitions, // default: -1 (uses broker `num.partitions` configuration)
|
|
107
|
+
//replicationFactor: <Number>, // default: -1 (uses broker `default.replication.factor` configuration)
|
|
108
|
+
//replicaAssignment: <Array>, // Example: [{ partition: 0, replicas: [0,1,2] }] - default: []
|
|
109
|
+
//configEntries: <Array> // Example: [{ name: 'cleanup.policy', value: 'compact' }] - default: []
|
|
110
|
+
}
|
|
111
|
+
]
|
|
112
|
+
})
|
|
113
|
+
await admin.disconnect()
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
SendMessage = async(topic: string, message: { key: string, value: string} ): Promise<RecordMetadata[] | null> => {
|
|
117
|
+
if (this.#producer) {
|
|
118
|
+
return this.#producer.send({
|
|
119
|
+
topic,
|
|
120
|
+
messages: [ message ]
|
|
121
|
+
})
|
|
122
|
+
} else {
|
|
123
|
+
return null;
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
SendMessages = async(topic: string, messages: { key: string, value: string}[] ): Promise<RecordMetadata[] | null> => {
|
|
128
|
+
if (this.#producer) {
|
|
129
|
+
return this.#producer.send({
|
|
130
|
+
topic,
|
|
131
|
+
messages
|
|
132
|
+
})
|
|
133
|
+
} else {
|
|
134
|
+
return null;
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
Subscribe = async(topics: string[], cb: (key: string, partition: number, value: string, headers: IHeaders | undefined) => void) => {
|
|
139
|
+
await this.#consumer?.subscribe({ topics, fromBeginning: true })
|
|
140
|
+
|
|
141
|
+
await this.#consumer?.run({
|
|
142
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
143
|
+
eachMessage: async ({ topic, partition, message, heartbeat, pause }) => {
|
|
144
|
+
try {
|
|
145
|
+
if (message.key) {
|
|
146
|
+
if (message.value) {
|
|
147
|
+
cb(message.key.toString(), partition, message.value.toString(), message.headers)
|
|
148
|
+
} else {
|
|
149
|
+
cb(message.key.toString(), partition, "", message.headers)
|
|
150
|
+
}
|
|
151
|
+
} else {
|
|
152
|
+
if (message.value) {
|
|
153
|
+
cb("", partition, message.value?.toString(), message.headers)
|
|
154
|
+
} else {
|
|
155
|
+
cb("", partition, "", message.headers)
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
} catch (err) {
|
|
159
|
+
console.log(err);
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
})
|
|
163
|
+
}
|
|
164
|
+
}
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
/* eslint @typescript-eslint/no-unused-vars: 0 */ // --> OFF
|
|
2
|
+
import { TOPIC, GROUP_ID, CLIENT_ID, BROKERS, TIMEOUT } from './config'
|
|
3
|
+
|
|
4
|
+
import { KafkaManager } from './../kafkamanager'
|
|
5
|
+
import { JSONObject } from '@nsshunt/stsutils';
|
|
6
|
+
|
|
7
|
+
async function Sleep(milliseconds = 1000) {
|
|
8
|
+
return new Promise(resolve => setTimeout(resolve, milliseconds))
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
const km = new KafkaManager({
|
|
12
|
+
clientId: CLIENT_ID + process.env.CLIENT_ID,
|
|
13
|
+
brokers: BROKERS,
|
|
14
|
+
timeout: TIMEOUT
|
|
15
|
+
});
|
|
16
|
+
|
|
17
|
+
const runme = async () => {
|
|
18
|
+
|
|
19
|
+
await km.ConsumerConnect(GROUP_ID + process.env.GROUP_ID);
|
|
20
|
+
|
|
21
|
+
await km.Subscribe([TOPIC], (key: string, partition: number, value: string, headers: JSONObject | undefined) => {
|
|
22
|
+
console.log({
|
|
23
|
+
key,
|
|
24
|
+
partition,
|
|
25
|
+
value,
|
|
26
|
+
headers
|
|
27
|
+
});
|
|
28
|
+
});
|
|
29
|
+
|
|
30
|
+
process.on("SIGINT", async () => {
|
|
31
|
+
console.log('=========SIGTERM START =======================')
|
|
32
|
+
await km.ConsumerDisconnect();
|
|
33
|
+
console.log('=========SIGTERM END =======================')
|
|
34
|
+
process.exit();
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
let iteration = 0;
|
|
38
|
+
for (;;) {
|
|
39
|
+
console.log('sleep: ' + iteration++);
|
|
40
|
+
await Sleep(1000);
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/*
|
|
45
|
+
const errorTypes = ['unhandledRejection', 'uncaughtException']
|
|
46
|
+
const signalTraps = ['SIGTERM', 'SIGINT', 'SIGUSR2']
|
|
47
|
+
|
|
48
|
+
errorTypes.forEach(type => {
|
|
49
|
+
process.on(type, async () => {
|
|
50
|
+
try {
|
|
51
|
+
console.log(`process.on ${type}`)
|
|
52
|
+
console.log('=========consumer.disconnect() START =======================')
|
|
53
|
+
await consumer.disconnect()
|
|
54
|
+
console.log('=========consumer.disconnect() END =======================')
|
|
55
|
+
process.exit(0)
|
|
56
|
+
} catch (_) {
|
|
57
|
+
process.exit(1)
|
|
58
|
+
}
|
|
59
|
+
})
|
|
60
|
+
})
|
|
61
|
+
|
|
62
|
+
signalTraps.forEach(type => {
|
|
63
|
+
process.once(type, async () => {
|
|
64
|
+
try {
|
|
65
|
+
console.log('=========consumer.disconnect() START [2] =======================')
|
|
66
|
+
await consumer.disconnect()
|
|
67
|
+
console.log('=========consumer.disconnect() END [2] =======================')
|
|
68
|
+
} finally {
|
|
69
|
+
process.kill(process.pid, type)
|
|
70
|
+
}
|
|
71
|
+
})
|
|
72
|
+
})
|
|
73
|
+
*/
|
|
74
|
+
|
|
75
|
+
runme().catch(e => console.error(`[example/producer] ${e.message}`, e))
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
/*
|
|
2
|
+
|
|
3
|
+
kafka example server #01 - Docker Compose File
|
|
4
|
+
----------------------------------------------
|
|
5
|
+
Note: In this example, the log retention is set to 24 hours (rather than default to 1 week)
|
|
6
|
+
https://www.conduktor.io/kafka/kafka-topic-configuration-log-retention/
|
|
7
|
+
|
|
8
|
+
version: '2'
|
|
9
|
+
services:
|
|
10
|
+
zookeeper:
|
|
11
|
+
image: wurstmeister/zookeeper
|
|
12
|
+
ports:
|
|
13
|
+
- "2181:2181"
|
|
14
|
+
restart: unless-stopped
|
|
15
|
+
|
|
16
|
+
kafka:
|
|
17
|
+
image: wurstmeister/kafka
|
|
18
|
+
ports:
|
|
19
|
+
- "9092:9092"
|
|
20
|
+
environment:
|
|
21
|
+
DOCKER_API_VERSION: 1.22
|
|
22
|
+
KAFKA_ADVERTISED_HOST_NAME: 192.168.14.92
|
|
23
|
+
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
|
|
24
|
+
KAFKA_CREATE_TOPICS: "topic-name2:3:1"
|
|
25
|
+
KAFKA_LOG_RETENTION_MS: 86400000
|
|
26
|
+
KAFKA_LOG_RETENTION_BYTES: -1
|
|
27
|
+
volumes:
|
|
28
|
+
- /var/run/docker.sock:/var/run/docker.sock
|
|
29
|
+
restart: unless-stopped
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
kafka example server #02 - Docker Compose File
|
|
33
|
+
----------------------------------------------
|
|
34
|
+
version: "3.9" # optional since v1.27.0
|
|
35
|
+
|
|
36
|
+
networks:
|
|
37
|
+
app-tier:
|
|
38
|
+
driver: bridge
|
|
39
|
+
|
|
40
|
+
services:
|
|
41
|
+
kafka:
|
|
42
|
+
image: 'bitnami/kafka:latest'
|
|
43
|
+
ports:
|
|
44
|
+
- '9092:9092'
|
|
45
|
+
networks:
|
|
46
|
+
- app-tier
|
|
47
|
+
environment:
|
|
48
|
+
- ALLOW_PLAINTEXT_LISTENER=yes
|
|
49
|
+
- KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE=true
|
|
50
|
+
- KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://192.168.14.92:9092
|
|
51
|
+
|
|
52
|
+
*/
|
|
53
|
+
import { TOPIC, CLIENT_ID, BROKERS, PARTITIONS, TIMEOUT } from './config'
|
|
54
|
+
|
|
55
|
+
import { KafkaManager } from './../kafkamanager'
|
|
56
|
+
|
|
57
|
+
async function Sleep(milliseconds = 1000) {
|
|
58
|
+
return new Promise(resolve => setTimeout(resolve, milliseconds))
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
const km = new KafkaManager({
|
|
62
|
+
clientId: CLIENT_ID + process.env.CLIENT_ID,
|
|
63
|
+
brokers: BROKERS,
|
|
64
|
+
timeout: TIMEOUT
|
|
65
|
+
});
|
|
66
|
+
|
|
67
|
+
const runme = async () => {
|
|
68
|
+
await km.CreateTopic(TOPIC, PARTITIONS);
|
|
69
|
+
|
|
70
|
+
await km.ProducerConnect();
|
|
71
|
+
|
|
72
|
+
const count = 100000;
|
|
73
|
+
const sleepTime = 1000;
|
|
74
|
+
|
|
75
|
+
process.on("SIGINT", async () => {
|
|
76
|
+
console.log('=========SIGTERM START =======================')
|
|
77
|
+
await km.ProducerDisconnect();
|
|
78
|
+
console.log('=========SIGTERM END =======================')
|
|
79
|
+
process.exit();
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
for (let i=0; i < count; i++) {
|
|
83
|
+
if (i % 100 === 0) console.log(i);
|
|
84
|
+
const retVal = await km.SendMessages(TOPIC, [
|
|
85
|
+
{ key: 'key1', value: `hello world - ${i}` },
|
|
86
|
+
{ key: 'key2', value: 'hey hey! -2' },
|
|
87
|
+
{ key: 'key3', value: 'hey hey! -3' },
|
|
88
|
+
{ key: 'key4', value: 'hey hey! -4' },
|
|
89
|
+
{ key: 'key5', value: 'hey hey! -5' }
|
|
90
|
+
]
|
|
91
|
+
);
|
|
92
|
+
|
|
93
|
+
if (i % 100 === 0) {
|
|
94
|
+
console.log(retVal);
|
|
95
|
+
console.log(` ------------=================> ${i}`);
|
|
96
|
+
}
|
|
97
|
+
if (sleepTime >= 0) {
|
|
98
|
+
await Sleep(sleepTime);
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
await km.ProducerDisconnect();
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
/*
|
|
106
|
+
const errorTypes = ['unhandledRejection', 'uncaughtException']
|
|
107
|
+
const signalTraps = ['SIGTERM', 'SIGINT', 'SIGUSR2']
|
|
108
|
+
|
|
109
|
+
errorTypes.forEach(type => {
|
|
110
|
+
process.on(type, async () => {
|
|
111
|
+
try {
|
|
112
|
+
console.log(`process.on ${type}`)
|
|
113
|
+
await producer.disconnect()
|
|
114
|
+
process.exit(0)
|
|
115
|
+
} catch (_) {
|
|
116
|
+
process.exit(1)
|
|
117
|
+
}
|
|
118
|
+
})
|
|
119
|
+
})
|
|
120
|
+
|
|
121
|
+
signalTraps.forEach(type => {
|
|
122
|
+
process.once(type, async () => {
|
|
123
|
+
try {
|
|
124
|
+
await producer.disconnect()
|
|
125
|
+
} finally {
|
|
126
|
+
process.kill(process.pid, type)
|
|
127
|
+
}
|
|
128
|
+
})
|
|
129
|
+
})
|
|
130
|
+
*/
|
|
131
|
+
|
|
132
|
+
runme().catch(e => console.error(`[example/producer] ${e.message}`, e))
|
|
133
|
+
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import { STSOptionsBase } from '@nsshunt/stsutils';
|
|
2
|
+
import { RecordMetadata, IHeaders } from 'kafkajs';
|
|
3
|
+
export interface IKafkaManagerConfig {
|
|
4
|
+
clientId: string;
|
|
5
|
+
brokers: string[];
|
|
6
|
+
timeout: number;
|
|
7
|
+
}
|
|
8
|
+
export declare class KafkaManager extends STSOptionsBase {
|
|
9
|
+
#private;
|
|
10
|
+
constructor(options: IKafkaManagerConfig);
|
|
11
|
+
ProducerConnect: () => Promise<void>;
|
|
12
|
+
ProducerDisconnect: () => Promise<void>;
|
|
13
|
+
ConsumerConnect: (groupId: string) => Promise<void>;
|
|
14
|
+
ConsumerDisconnect: () => Promise<void>;
|
|
15
|
+
CreateTopic: (topic: string, partitions: number) => Promise<void>;
|
|
16
|
+
SendMessage: (topic: string, message: {
|
|
17
|
+
key: string;
|
|
18
|
+
value: string;
|
|
19
|
+
}) => Promise<RecordMetadata[] | null>;
|
|
20
|
+
SendMessages: (topic: string, messages: {
|
|
21
|
+
key: string;
|
|
22
|
+
value: string;
|
|
23
|
+
}[]) => Promise<RecordMetadata[] | null>;
|
|
24
|
+
Subscribe: (topics: string[], cb: (key: string, partition: number, value: string, headers: IHeaders | undefined) => void) => Promise<void>;
|
|
25
|
+
}
|
|
26
|
+
//# sourceMappingURL=kafkamanager.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"kafkamanager.d.ts","sourceRoot":"","sources":["../src/kafkamanager.ts"],"names":[],"mappings":"AAoDA,OAAO,EAAE,cAAc,EAAE,MAAM,mBAAmB,CAAA;AAElD,OAAO,EAAmB,cAAc,EAAY,QAAQ,EAAE,MAAM,SAAS,CAAA;AAE7E,MAAM,WAAW,mBAAmB;IAChC,QAAQ,EAAE,MAAM,CAAA;IAChB,OAAO,EAAE,MAAM,EAAE,CAAA;IACjB,OAAO,EAAE,MAAM,CAAA;CAClB;AAED,qBAAa,YAAa,SAAQ,cAAc;;gBAKhC,OAAO,EAAE,mBAAmB;IAUxC,eAAe,QAAa,QAAQ,IAAI,CAAC,CAGxC;IAED,kBAAkB,sBAEjB;IAED,eAAe,YAAmB,MAAM,KAAG,QAAQ,IAAI,CAAC,CAGvD;IAED,kBAAkB,QAAa,QAAQ,IAAI,CAAC,CAE3C;IAED,WAAW,UAAiB,MAAM,cAAc,MAAM,mBAkBrD;IAED,WAAW,UAAgB,MAAM,WAAW;QAAE,GAAG,EAAE,MAAM,CAAC;QAAC,KAAK,EAAE,MAAM,CAAA;KAAC,KAAI,QAAQ,cAAc,EAAE,GAAG,IAAI,CAAC,CAS5G;IAED,YAAY,UAAgB,MAAM,YAAY;QAAE,GAAG,EAAE,MAAM,CAAC;QAAC,KAAK,EAAE,MAAM,CAAA;KAAC,EAAE,KAAI,QAAQ,cAAc,EAAE,GAAG,IAAI,CAAC,CAShH;IAED,SAAS,WAAiB,MAAM,EAAE,YAAY,MAAM,aAAa,MAAM,SAAS,MAAM,WAAW,QAAQ,GAAG,SAAS,KAAK,IAAI,mBAyB7H;CACJ"}
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
export declare const TOPIC = "appframework-test-logs";
|
|
2
|
+
export declare const PARTITIONS = 3;
|
|
3
|
+
export declare const TIMEOUT = 5000;
|
|
4
|
+
export declare const GROUP_ID = "my-group";
|
|
5
|
+
export declare const CLIENT_ID = "my-app";
|
|
6
|
+
export declare const BROKERS: string[];
|
|
7
|
+
//# sourceMappingURL=config.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../../src/kafkatesting/config.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,KAAK,2BAA2B,CAAC;AAC9C,eAAO,MAAM,UAAU,IAAI,CAAC;AAC5B,eAAO,MAAM,OAAO,OAAO,CAAC;AAE5B,eAAO,MAAM,QAAQ,aAAa,CAAC;AAEnC,eAAO,MAAM,SAAS,WAAW,CAAC;AAElC,eAAO,MAAM,OAAO,UAAyB,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"consume.d.ts","sourceRoot":"","sources":["../../src/kafkatesting/consume.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"produce.d.ts","sourceRoot":"","sources":["../../src/kafkatesting/produce.ts"],"names":[],"mappings":""}
|