@blazedpath/commons 0.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -0
- package/blz-base/health/index.js +215 -0
- package/blz-base/index.js +1466 -0
- package/blz-cache/LruCache.js +44 -0
- package/blz-cache/index.js +29 -0
- package/blz-config/index.js +434 -0
- package/blz-core/index.js +364 -0
- package/blz-cryptography/index.js +54 -0
- package/blz-datetimes/index.js +356 -0
- package/blz-file/example.dat +2545 -0
- package/blz-file/fileService.js +205 -0
- package/blz-file/index.js +94 -0
- package/blz-file/index.test.js +31 -0
- package/blz-file/lab.js +33 -0
- package/blz-hazelcast/index.js +189 -0
- package/blz-hazelcast/lib/credentials.js +25 -0
- package/blz-hazelcast/lib/credentialsFactory.js +12 -0
- package/blz-hazelcast/lib/hazelcastCache.js +234 -0
- package/blz-iterable/index.js +446 -0
- package/blz-json-schema/index.js +11 -0
- package/blz-jwt/index.js +121 -0
- package/blz-kafka/index.js +522 -0
- package/blz-math/index.js +131 -0
- package/blz-mongodb/index.js +326 -0
- package/blz-rds/__test__/scape.test.js +58 -0
- package/blz-rds/blz-rds-executor.js +578 -0
- package/blz-rds/blz-rds-helper.js +310 -0
- package/blz-rds/commands/core/add.js +13 -0
- package/blz-rds/commands/core/and.js +18 -0
- package/blz-rds/commands/core/asc.js +10 -0
- package/blz-rds/commands/core/avg.js +10 -0
- package/blz-rds/commands/core/column-ref.js +8 -0
- package/blz-rds/commands/core/count-distinct.js +10 -0
- package/blz-rds/commands/core/count.js +10 -0
- package/blz-rds/commands/core/decimal.js +8 -0
- package/blz-rds/commands/core/desc.js +10 -0
- package/blz-rds/commands/core/distinct.js +10 -0
- package/blz-rds/commands/core/divide.js +11 -0
- package/blz-rds/commands/core/embedded-exists.js +17 -0
- package/blz-rds/commands/core/embedded-select.js +17 -0
- package/blz-rds/commands/core/equals.js +9 -0
- package/blz-rds/commands/core/false.js +8 -0
- package/blz-rds/commands/core/greater-or-equal.js +9 -0
- package/blz-rds/commands/core/greater.js +9 -0
- package/blz-rds/commands/core/in.js +9 -0
- package/blz-rds/commands/core/integer.js +8 -0
- package/blz-rds/commands/core/is-not-null.js +11 -0
- package/blz-rds/commands/core/is-null-or-value.js +10 -0
- package/blz-rds/commands/core/is-null.js +11 -0
- package/blz-rds/commands/core/less-or-equal.js +9 -0
- package/blz-rds/commands/core/less-unary.js +12 -0
- package/blz-rds/commands/core/less.js +9 -0
- package/blz-rds/commands/core/like.js +12 -0
- package/blz-rds/commands/core/max.js +10 -0
- package/blz-rds/commands/core/min.js +10 -0
- package/blz-rds/commands/core/multiply.js +13 -0
- package/blz-rds/commands/core/not-equals.js +9 -0
- package/blz-rds/commands/core/not-in.js +9 -0
- package/blz-rds/commands/core/not.js +13 -0
- package/blz-rds/commands/core/null.js +8 -0
- package/blz-rds/commands/core/nvl.js +11 -0
- package/blz-rds/commands/core/or.js +13 -0
- package/blz-rds/commands/core/parameter.js +34 -0
- package/blz-rds/commands/core/remainder.js +16 -0
- package/blz-rds/commands/core/string.js +8 -0
- package/blz-rds/commands/core/subtract.js +13 -0
- package/blz-rds/commands/core/sum.js +10 -0
- package/blz-rds/commands/core/true.js +8 -0
- package/blz-rds/commands/core/tuple.js +13 -0
- package/blz-rds/commands/datetimes/add-days.js +11 -0
- package/blz-rds/commands/datetimes/add-hours.js +11 -0
- package/blz-rds/commands/datetimes/add-milliseconds.js +11 -0
- package/blz-rds/commands/datetimes/add-minutes.js +11 -0
- package/blz-rds/commands/datetimes/add-months.js +11 -0
- package/blz-rds/commands/datetimes/add-seconds.js +11 -0
- package/blz-rds/commands/datetimes/add-years.js +11 -0
- package/blz-rds/commands/datetimes/date-diff.js +11 -0
- package/blz-rds/commands/datetimes/date.js +12 -0
- package/blz-rds/commands/datetimes/datetime-diff.js +11 -0
- package/blz-rds/commands/datetimes/datetime.js +15 -0
- package/blz-rds/commands/datetimes/day.js +10 -0
- package/blz-rds/commands/datetimes/hour.js +10 -0
- package/blz-rds/commands/datetimes/millisecond.js +10 -0
- package/blz-rds/commands/datetimes/minute.js +10 -0
- package/blz-rds/commands/datetimes/month-text.js +10 -0
- package/blz-rds/commands/datetimes/month.js +10 -0
- package/blz-rds/commands/datetimes/now.js +9 -0
- package/blz-rds/commands/datetimes/second.js +10 -0
- package/blz-rds/commands/datetimes/subtract-days.js +11 -0
- package/blz-rds/commands/datetimes/subtract-hours.js +11 -0
- package/blz-rds/commands/datetimes/subtract-milliseconds.js +11 -0
- package/blz-rds/commands/datetimes/subtract-minutes.js +11 -0
- package/blz-rds/commands/datetimes/subtract-seconds.js +11 -0
- package/blz-rds/commands/datetimes/time-diff.js +11 -0
- package/blz-rds/commands/datetimes/time.js +13 -0
- package/blz-rds/commands/datetimes/today.js +9 -0
- package/blz-rds/commands/datetimes/week-day-text.js +10 -0
- package/blz-rds/commands/datetimes/week-day.js +10 -0
- package/blz-rds/commands/datetimes/week.js +10 -0
- package/blz-rds/commands/datetimes/year.js +10 -0
- package/blz-rds/commands/math/abs.js +10 -0
- package/blz-rds/commands/math/acos.js +10 -0
- package/blz-rds/commands/math/asin.js +10 -0
- package/blz-rds/commands/math/atan.js +10 -0
- package/blz-rds/commands/math/atan2.js +11 -0
- package/blz-rds/commands/math/ceil.js +10 -0
- package/blz-rds/commands/math/cos.js +10 -0
- package/blz-rds/commands/math/cosh.js +10 -0
- package/blz-rds/commands/math/exp.js +10 -0
- package/blz-rds/commands/math/floor.js +10 -0
- package/blz-rds/commands/math/log.js +18 -0
- package/blz-rds/commands/math/log10.js +10 -0
- package/blz-rds/commands/math/pow.js +11 -0
- package/blz-rds/commands/math/random.js +9 -0
- package/blz-rds/commands/math/round.js +18 -0
- package/blz-rds/commands/math/sign.js +10 -0
- package/blz-rds/commands/math/sin.js +10 -0
- package/blz-rds/commands/math/sinh.js +10 -0
- package/blz-rds/commands/math/sqrt.js +10 -0
- package/blz-rds/commands/math/tan.js +10 -0
- package/blz-rds/commands/math/tanh.js +10 -0
- package/blz-rds/commands/math/trunc.js +18 -0
- package/blz-rds/commands/strings/concat.js +20 -0
- package/blz-rds/commands/strings/contains.js +12 -0
- package/blz-rds/commands/strings/ends-with.js +12 -0
- package/blz-rds/commands/strings/index-of.js +11 -0
- package/blz-rds/commands/strings/is-null-or-empty.js +11 -0
- package/blz-rds/commands/strings/is-null-or-white-space.js +11 -0
- package/blz-rds/commands/strings/join.js +22 -0
- package/blz-rds/commands/strings/last-index-of.js +11 -0
- package/blz-rds/commands/strings/length.js +10 -0
- package/blz-rds/commands/strings/pad-left.js +20 -0
- package/blz-rds/commands/strings/pad-right.js +20 -0
- package/blz-rds/commands/strings/replace.js +12 -0
- package/blz-rds/commands/strings/starts-with.js +12 -0
- package/blz-rds/commands/strings/substring.js +12 -0
- package/blz-rds/commands/strings/to-lower.js +10 -0
- package/blz-rds/commands/strings/to-upper.js +10 -0
- package/blz-rds/commands/strings/trim-end.js +10 -0
- package/blz-rds/commands/strings/trim-start.js +10 -0
- package/blz-rds/commands/strings/trim.js +10 -0
- package/blz-rds/index.js +744 -0
- package/blz-rds-mysql/base.js +857 -0
- package/blz-rds-mysql/connection-manager.js +129 -0
- package/blz-rds-mysql/execute-bulk-insert.js +35 -0
- package/blz-rds-mysql/execute-bulk-merge.js +45 -0
- package/blz-rds-mysql/execute-non-query.js +34 -0
- package/blz-rds-mysql/execute-query.js +50 -0
- package/blz-rds-mysql/index.js +41 -0
- package/blz-rds-mysql/stored-procedure.js +207 -0
- package/blz-rds-mysql/syntaxis.json +114 -0
- package/blz-rds-mysqlx/base.js +846 -0
- package/blz-rds-mysqlx/connection-manager.js +141 -0
- package/blz-rds-mysqlx/execute-bulk-insert.js +35 -0
- package/blz-rds-mysqlx/execute-bulk-merge.js +45 -0
- package/blz-rds-mysqlx/execute-non-query.js +29 -0
- package/blz-rds-mysqlx/execute-query.js +39 -0
- package/blz-rds-mysqlx/index.js +41 -0
- package/blz-rds-mysqlx/stored-procedure.js +179 -0
- package/blz-rds-mysqlx/syntaxis.json +105 -0
- package/blz-rds-oracle/index.js +540 -0
- package/blz-rds-oracle/syntaxis.json +112 -0
- package/blz-rds-postgres/base.js +861 -0
- package/blz-rds-postgres/connection-manager.js +225 -0
- package/blz-rds-postgres/execute-bulk-insert.js +81 -0
- package/blz-rds-postgres/execute-bulk-merge.js +93 -0
- package/blz-rds-postgres/execute-non-query.js +23 -0
- package/blz-rds-postgres/execute-query.js +37 -0
- package/blz-rds-postgres/index.js +41 -0
- package/blz-rds-postgres/result-set.js +51 -0
- package/blz-rds-postgres/stored-procedure.js +116 -0
- package/blz-rds-postgres/syntaxis.json +114 -0
- package/blz-redis/index.js +217 -0
- package/blz-redis/lib/redisCache.js +265 -0
- package/blz-regex/index.js +25 -0
- package/blz-security/.eslintrc.js +15 -0
- package/blz-security/__test__/AuthorizationKpn.yaml +1043 -0
- package/blz-security/__test__/FinancingSetting.yaml +177 -0
- package/blz-security/__test__/KpnConfigPortal.yaml +330 -0
- package/blz-security/__test__/OrderManagement.yaml +5190 -0
- package/blz-security/__test__/Security.yaml +128 -0
- package/blz-security/__test__/autorization.test.js +105 -0
- package/blz-security/__test__/orderManagement.test.js +26 -0
- package/blz-security/__test__/secureUrl.test.js +79 -0
- package/blz-security/__test__/solveMergeRule.test.js +109 -0
- package/blz-security/__test__/sqlInjectionGuard.test.js +203 -0
- package/blz-security/__test__/xssGuard.test.js +204 -0
- package/blz-security/authorizationService.js +536 -0
- package/blz-security/config/global.js +8 -0
- package/blz-security/config/welcome +8 -0
- package/blz-security/doc/README.md +75 -0
- package/blz-security/filescanner/index.js +46 -0
- package/blz-security/helpers/consts.js +229 -0
- package/blz-security/helpers/utils.js +267 -0
- package/blz-security/implementations/cache.js +90 -0
- package/blz-security/implementations/oidc.js +404 -0
- package/blz-security/implementations/pkceCacheStore.js +23 -0
- package/blz-security/implementations/saml.js +10 -0
- package/blz-security/implementations/uma.js +63 -0
- package/blz-security/implementations/webAuthn.js +9 -0
- package/blz-security/implementations/wstg.js +72 -0
- package/blz-security/index.js +77 -0
- package/blz-security/lab/index.js +27 -0
- package/blz-security/middleware/HapiServerAzureAd.js +641 -0
- package/blz-security/middleware/HapiServerKeycloak.js +840 -0
- package/blz-security/middleware/HapiServerSimToken.js +247 -0
- package/blz-security/middleware/hapi.js +515 -0
- package/blz-security/middleware/hapiServer.js +974 -0
- package/blz-security/navigationMemoryRepository.js +15 -0
- package/blz-security/navigationMongoDbRepository.js +73 -0
- package/blz-security/secureUrlService.js +47 -0
- package/blz-security/securityService.js +409 -0
- package/blz-security/sqlInjectionGuard.js +162 -0
- package/blz-security/templates/forbidden.html +0 -0
- package/blz-security/templates/session-iframe-azure-ad.html +7 -0
- package/blz-security/templates/session-iframe.html +73 -0
- package/blz-security/templates/unauthorized.html +1 -0
- package/blz-security/xssGuard.js +87 -0
- package/blz-strings/index.js +167 -0
- package/blz-uuid/index.js +7 -0
- package/blz-yaml/index.js +19 -0
- package/index.js +84 -0
- package/package.json +97 -0
- package/process-managers/index.js +422 -0
package/blz-jwt/index.js
ADDED
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
const jwt = require('jsonwebtoken');
|
|
2
|
+
|
|
3
|
+
class JwtSecretKeyNotDefined extends Error {
|
|
4
|
+
constructor() {
|
|
5
|
+
super('JWT_SECRET_KEY is not defined.');
|
|
6
|
+
this.name = 'JwtSecretKeyNotDefined';
|
|
7
|
+
this.code = 'JwtSecretKeyNotDefined';
|
|
8
|
+
}
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
function getJwtSecretKey() {
|
|
12
|
+
const secret = process.env.JWT_SECRET_KEY;
|
|
13
|
+
if (!secret || secret.trim() === '') {
|
|
14
|
+
throw new JwtSecretKeyNotDefined();
|
|
15
|
+
}
|
|
16
|
+
return secret;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
module.exports = {
|
|
20
|
+
JwtSecretKeyNotDefined,
|
|
21
|
+
|
|
22
|
+
jwtSign: function (payload, secret, algorithm) {
|
|
23
|
+
try {
|
|
24
|
+
switch (algorithm) {
|
|
25
|
+
case 'HMAC-SHA256':
|
|
26
|
+
return jwt.sign(payload, secret, { algorithm: 'HS256' });
|
|
27
|
+
case 'HMAC-SHA384':
|
|
28
|
+
return jwt.sign(payload, secret, { algorithm: 'HS384' });
|
|
29
|
+
case 'HMAC-SHA512':
|
|
30
|
+
return jwt.sign(payload, secret, { algorithm: 'HS512' });
|
|
31
|
+
default:
|
|
32
|
+
throw new Error('Invalid encryption algorithm');
|
|
33
|
+
}
|
|
34
|
+
} catch (jwterr) {
|
|
35
|
+
if (jwterr.name === 'TokenExpiredError') {
|
|
36
|
+
let err = new Error();
|
|
37
|
+
err.code = 'JwtExpired';
|
|
38
|
+
err.data = { expiredAt: jwterr.expiredAt };
|
|
39
|
+
throw err;
|
|
40
|
+
} else {
|
|
41
|
+
throw jwterr;
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
},
|
|
45
|
+
|
|
46
|
+
jwtVerify: function (token, secret) {
|
|
47
|
+
try {
|
|
48
|
+
return jwt.verify(token, secret);
|
|
49
|
+
} catch (jwterr) {
|
|
50
|
+
if (jwterr.name === 'TokenExpiredError') {
|
|
51
|
+
let err = new Error();
|
|
52
|
+
err.code = 'JwtExpired';
|
|
53
|
+
err.data = { expiredAt: jwterr.expiredAt };
|
|
54
|
+
throw err;
|
|
55
|
+
} else {
|
|
56
|
+
let err = new Error();
|
|
57
|
+
err.code = 'JwtError';
|
|
58
|
+
err.data = {};
|
|
59
|
+
throw err;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
},
|
|
63
|
+
|
|
64
|
+
jwtDecode: function (token) {
|
|
65
|
+
try {
|
|
66
|
+
return jwt.decode(token);
|
|
67
|
+
} catch (jwterr) {
|
|
68
|
+
if (jwterr.name === 'TokenExpiredError') {
|
|
69
|
+
let err = new Error();
|
|
70
|
+
err.code = 'JwtExpired';
|
|
71
|
+
err.data = { expiredAt: jwterr.expiredAt };
|
|
72
|
+
throw err;
|
|
73
|
+
} else {
|
|
74
|
+
let err = new Error();
|
|
75
|
+
err.code = 'JwtError';
|
|
76
|
+
err.data = {};
|
|
77
|
+
throw err;
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
},
|
|
81
|
+
|
|
82
|
+
jwtCreateToken: function (id, expiresIn = '10s') {
|
|
83
|
+
return jwt.sign({ id }, getJwtSecretKey(), { expiresIn });
|
|
84
|
+
},
|
|
85
|
+
|
|
86
|
+
jwtGetId: function (token) {
|
|
87
|
+
try {
|
|
88
|
+
const decoded = jwt.verify(token, getJwtSecretKey());
|
|
89
|
+
return decoded.id;
|
|
90
|
+
} catch (err) {
|
|
91
|
+
const error = new Error('Invalid or expired token');
|
|
92
|
+
error.code = 'JwtInvalid';
|
|
93
|
+
error.data = err.message;
|
|
94
|
+
throw error;
|
|
95
|
+
}
|
|
96
|
+
},
|
|
97
|
+
|
|
98
|
+
jwtEncodeUrl: function (url, expiresIn = '10s') {
|
|
99
|
+
const [path, query] = url.split('?');
|
|
100
|
+
const parts = path.split('/').map(part => {
|
|
101
|
+
if (/^\d+$/.test(part)) {
|
|
102
|
+
return jwt.sign({ id: parseInt(part) }, getJwtSecretKey(), { expiresIn });
|
|
103
|
+
}
|
|
104
|
+
return part;
|
|
105
|
+
});
|
|
106
|
+
return parts.join('/') + (query ? `?${query}` : '');
|
|
107
|
+
},
|
|
108
|
+
|
|
109
|
+
jwtDecodeUrl: function (url) {
|
|
110
|
+
const [path, query] = url.split('?');
|
|
111
|
+
const parts = path.split('/').map(part => {
|
|
112
|
+
try {
|
|
113
|
+
const decoded = jwt.verify(part, getJwtSecretKey());
|
|
114
|
+
return decoded.id?.toString() ?? part;
|
|
115
|
+
} catch {
|
|
116
|
+
return part;
|
|
117
|
+
}
|
|
118
|
+
});
|
|
119
|
+
return parts.join('/') + (query ? `?${query}` : '');
|
|
120
|
+
}
|
|
121
|
+
};
|
|
@@ -0,0 +1,522 @@
|
|
|
1
|
+
// This code has functions for the backend.
|
|
2
|
+
const Kafkajs = require('kafkajs');
|
|
3
|
+
const Fs = require('fs-extra');
|
|
4
|
+
const retry = require('async-retry');
|
|
5
|
+
const logger = require('pino')();
|
|
6
|
+
// let blzCorepath= (true) ? '../../blz-core/dual/index' : '../../blz-core/sources-dual/index';
|
|
7
|
+
// const BlzCore = require(blzCorepath);
|
|
8
|
+
let convertToString = function (value) {
|
|
9
|
+
if (value === null)
|
|
10
|
+
return null;
|
|
11
|
+
let valueType = toString.call(value);
|
|
12
|
+
if (valueType === '[object String]')
|
|
13
|
+
return value;
|
|
14
|
+
if (valueType === '[object Number]')
|
|
15
|
+
return value.toString();
|
|
16
|
+
if (valueType === '[object Uint8Array]')
|
|
17
|
+
return value.toString();
|
|
18
|
+
if (valueType === '[object Boolean]')
|
|
19
|
+
return value ? 'true' : 'false';
|
|
20
|
+
if (valueType === '[object Date]')
|
|
21
|
+
return value.toJSON();
|
|
22
|
+
if (valueType === '[object Object]' && value.type === 'Buffer' && value.data)
|
|
23
|
+
return Buffer.from(value.data).toString();
|
|
24
|
+
throw this.errorInvalidConversion(value, 'string');
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
let producers = {};
|
|
28
|
+
|
|
29
|
+
const getConfig = async function (connection) {
|
|
30
|
+
let kafkaConfig = {};
|
|
31
|
+
if (connection.brokers)
|
|
32
|
+
kafkaConfig.brokers = connection.brokers.split(',');
|
|
33
|
+
kafkaConfig.connectionTimeout = connection.connectionTimeout || 3000;
|
|
34
|
+
kafkaConfig.authenticationTimeout= connection.authenticationTimeout || 1000;
|
|
35
|
+
kafkaConfig.reauthenticationThreshold = connection.reauthenticationThreshold || 10000;
|
|
36
|
+
if (connection.requestTimeout)
|
|
37
|
+
kafkaConfig.requestTimeout = connection.requestTimeout;
|
|
38
|
+
if (connection.connectionRetries || connection.connectionInitialRetryTime || connection.connectionMaxRetryTime || connection.connectionRetryFactor){
|
|
39
|
+
kafkaConfig.retry = {};
|
|
40
|
+
if (connection.connectionRetries)
|
|
41
|
+
kafkaConfig.retry.retries = connection.connectionRetries;
|
|
42
|
+
if (connection.connectionInitialRetryTime)
|
|
43
|
+
kafkaConfig.retry.initialRetryTime = connection.connectionInitialRetryTime;
|
|
44
|
+
if (connection.connectionMaxRetryTime)
|
|
45
|
+
kafkaConfig.retry.maxRetryTime = connection.connectionMaxRetryTime;
|
|
46
|
+
if (connection.connectionRetryFactor)
|
|
47
|
+
kafkaConfig.retry.factor = connection.connectionRetryFactor;
|
|
48
|
+
}
|
|
49
|
+
if (connection.ssl) {
|
|
50
|
+
kafkaConfig.ssl = {};
|
|
51
|
+
if (connection.sslPfx)
|
|
52
|
+
kafkaConfig.ssl.pfx = await convertFileArg(connection.sslPfx);
|
|
53
|
+
if (connection.sslKey)
|
|
54
|
+
kafkaConfig.ssl.key = await convertFileArg(connection.sslKey);
|
|
55
|
+
if (connection.sslPassphrase)
|
|
56
|
+
kafkaConfig.ssl.passphrase = connection.sslPassphrase;
|
|
57
|
+
if (connection.sslCert)
|
|
58
|
+
kafkaConfig.ssl.cert = await convertFileArg(connection.sslCert);
|
|
59
|
+
if (connection.sslCa)
|
|
60
|
+
kafkaConfig.ssl.ca = await convertFileArg(connection.sslCa);
|
|
61
|
+
if (connection.sslCrl)
|
|
62
|
+
kafkaConfig.ssl.crl = await convertFileArg(connection.sslCrl);
|
|
63
|
+
if (connection.sslCiphers)
|
|
64
|
+
kafkaConfig.ssl.ciphers = connection.sslCiphers;
|
|
65
|
+
if (connection.sslSecureProtocol)
|
|
66
|
+
kafkaConfig.ssl.secureProtocol = connection.sslSecureProtocol;
|
|
67
|
+
}
|
|
68
|
+
if (connection.sasl) {
|
|
69
|
+
kafkaConfig.sasl = {
|
|
70
|
+
mechanism: connection.saslMechanism,
|
|
71
|
+
username: connection.saslUsername,
|
|
72
|
+
password: connection.saslPassword,
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
kafkaConfig.logLevel = Kafkajs.logLevel.ERROR;
|
|
76
|
+
return kafkaConfig;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
let getOrCreateProducer = async function (connection) {
|
|
80
|
+
let producer = producers[connection.name];
|
|
81
|
+
if (producer) {
|
|
82
|
+
return producer;
|
|
83
|
+
}
|
|
84
|
+
let kafkaConfig = await getConfig(connection);
|
|
85
|
+
let kafka = new Kafkajs.Kafka(kafkaConfig);
|
|
86
|
+
producer = kafka.producer({createPartitioner: Kafkajs.Partitioners.LegacyPartitioner});
|
|
87
|
+
await producer.connect();
|
|
88
|
+
producers[connection.name] = producer;
|
|
89
|
+
return producer;
|
|
90
|
+
};
|
|
91
|
+
|
|
92
|
+
let getOrCreateAdmin = async function (connection, config) {
|
|
93
|
+
let kafkaConfig = await getConfig(connection);
|
|
94
|
+
let kafka = new Kafkajs.Kafka(kafkaConfig);
|
|
95
|
+
admin = kafka.admin();
|
|
96
|
+
await admin.connect();
|
|
97
|
+
producers[connection.name] = producer;
|
|
98
|
+
return producer;
|
|
99
|
+
};
|
|
100
|
+
|
|
101
|
+
let convertFileArg = async function (fileArg) {
|
|
102
|
+
if (fileArg) {
|
|
103
|
+
let fileArgParts = fileArg.split(',');
|
|
104
|
+
if (fileArgParts.length === 1) {
|
|
105
|
+
return await Fs.readFile(fileArgParts[0], 'utf-8');
|
|
106
|
+
}
|
|
107
|
+
else if (fileArgParts.length > 1) {
|
|
108
|
+
let promises = [];
|
|
109
|
+
for (let i = 0; i < fileArgParts.length; i++) {
|
|
110
|
+
let fileArgPart = fileArgParts[0];
|
|
111
|
+
promises.push(Fs.readFile(fileArgParts[i], 'utf-8'));
|
|
112
|
+
}
|
|
113
|
+
return await Promise.all(promises);
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
return null;
|
|
117
|
+
};
|
|
118
|
+
|
|
119
|
+
// createKafkaInstances: Returns an obj with configured new Kafkajs.Kafka(kafkaCondig), using key = systemName
|
|
120
|
+
// Return obj can only be loged with console.dir()
|
|
121
|
+
// Parameters:
|
|
122
|
+
// - kafkaConsumers: array con el json serializado de cada consumidor de kafka.
|
|
123
|
+
// - connections(blz-suite) viene de config.connections donde config es toda la config cuando le di START
|
|
124
|
+
// Adentro tienen el nombre de la connexion que se va a usar.
|
|
125
|
+
// - buildConfigItems = config.buildConfigItems;
|
|
126
|
+
async function createKafkaInstances(kafkaConsumers, connections, buildConfigItems, runHelper) {
|
|
127
|
+
let kafkas = {};
|
|
128
|
+
// Iterate all consumers to make sure all kafka connections will be up and running exactly 1 time
|
|
129
|
+
for (let kafkaConsumerName in kafkaConsumers) {
|
|
130
|
+
let buildKafkaConsumer = kafkaConsumers[kafkaConsumerName];
|
|
131
|
+
let kafka = kafkas[buildKafkaConsumer.systemName];
|
|
132
|
+
// If connection is present, skip
|
|
133
|
+
if (kafka) {
|
|
134
|
+
continue
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
let connection = connections[buildKafkaConsumer.systemName];
|
|
138
|
+
// Check if kafka connection info is undefined
|
|
139
|
+
if (!connection) {
|
|
140
|
+
const kafkaBuildSystemConfig = buildConfigItems[buildKafkaConsumer.systemName]
|
|
141
|
+
if (kafkaBuildSystemConfig && kafkaBuildSystemConfig.action !== 'omit') {
|
|
142
|
+
console.log(`Connection ${buildKafkaConsumer.systemName} undefined`)
|
|
143
|
+
} else if (!kafkaBuildSystemConfig) {
|
|
144
|
+
console.log(`Config ${buildKafkaConsumer.systemName} undefined`)
|
|
145
|
+
}
|
|
146
|
+
continue
|
|
147
|
+
}
|
|
148
|
+
if (connection.provider) {
|
|
149
|
+
kafka = connection;
|
|
150
|
+
}
|
|
151
|
+
else {
|
|
152
|
+
let kafkaConfig = {};
|
|
153
|
+
if (connection.brokers)
|
|
154
|
+
kafkaConfig.brokers = connection.brokers.split(',');
|
|
155
|
+
kafkaConfig.connectionTimeout = connection.connectionTimeout || 3000;
|
|
156
|
+
kafkaConfig.authenticationTimeout= connection.authenticationTimeout || 1000;
|
|
157
|
+
kafkaConfig.reauthenticationThreshold = connection.reauthenticationThreshold || 10000;
|
|
158
|
+
if (connection.requestTimeout)
|
|
159
|
+
kafkaConfig.requestTimeout = connection.requestTimeout;
|
|
160
|
+
if (connection.connectionRetries || connection.connectionInitialRetryTime || connection.connectionMaxRetryTime || connection.connectionRetryFactor){
|
|
161
|
+
kafkaConfig.retry = {};
|
|
162
|
+
if (connection.connectionRetries)
|
|
163
|
+
kafkaConfig.retry.retries = connection.connectionRetries;
|
|
164
|
+
if (connection.connectionInitialRetryTime)
|
|
165
|
+
kafkaConfig.retry.initialRetryTime = connection.connectionInitialRetryTime;
|
|
166
|
+
if (connection.connectionMaxRetryTime)
|
|
167
|
+
kafkaConfig.retry.maxRetryTime = connection.connectionMaxRetryTime;
|
|
168
|
+
if (connection.connectionRetryFactor)
|
|
169
|
+
kafkaConfig.retry.factor = connection.connectionRetryFactor;
|
|
170
|
+
}
|
|
171
|
+
if (connection.ssl) {
|
|
172
|
+
kafkaConfig.ssl = {};
|
|
173
|
+
if (connection.sslPfx)
|
|
174
|
+
kafkaConfig.ssl.pfx = await convertFileArg(connection.sslPfx);
|
|
175
|
+
if (connection.sslKey)
|
|
176
|
+
kafkaConfig.ssl.key = await convertFileArg(connection.sslKey);
|
|
177
|
+
if (connection.sslPassphrase)
|
|
178
|
+
kafkaConfig.ssl.passphrase = connection.sslPassphrase;
|
|
179
|
+
if (connection.sslCert)
|
|
180
|
+
kafkaConfig.ssl.cert = await convertFileArg(connection.sslCert);
|
|
181
|
+
if (connection.sslCa)
|
|
182
|
+
kafkaConfig.ssl.ca = await convertFileArg(connection.sslCa);
|
|
183
|
+
if (connection.sslCrl)
|
|
184
|
+
kafkaConfig.ssl.crl = await convertFileArg(connection.sslCrl);
|
|
185
|
+
if (connection.sslCiphers)
|
|
186
|
+
kafkaConfig.ssl.ciphers = connection.sslCiphers;
|
|
187
|
+
if (connection.sslSecureProtocol)
|
|
188
|
+
kafkaConfig.ssl.secureProtocol = connection.sslSecureProtocol;
|
|
189
|
+
}
|
|
190
|
+
if (connection.sasl) {
|
|
191
|
+
kafkaConfig.sasl = {
|
|
192
|
+
mechanism: connection.saslMechanism,
|
|
193
|
+
username: connection.saslUsername,
|
|
194
|
+
password: connection.saslPassword,
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
kafkaConfig.logLevel = Kafkajs.logLevel.ERROR;
|
|
198
|
+
kafkaConfig.logCreator = ()=> {
|
|
199
|
+
return ({ namespace, level, label, log }) => {
|
|
200
|
+
if (level === 1 && log.message && log.message.includes('The group is rebalancing')) {
|
|
201
|
+
// Do nothing or log at a different level for rebalancing errors
|
|
202
|
+
return;
|
|
203
|
+
} else if (level === 1 ) {
|
|
204
|
+
runHelper.logError(`[${namespace}] : ${JSON.stringify(log.message)}`, namespace, log);
|
|
205
|
+
} else {
|
|
206
|
+
runHelper.log(`[${namespace}] : ${log.message}`);
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
};
|
|
210
|
+
kafka = new Kafkajs.Kafka(kafkaConfig);
|
|
211
|
+
}
|
|
212
|
+
kafkas[buildKafkaConsumer.systemName] = kafka;
|
|
213
|
+
}
|
|
214
|
+
return kafkas;
|
|
215
|
+
}
|
|
216
|
+
// Funtion to start the kafka consumers
|
|
217
|
+
// - kafkas: obj con instancias new Kafkajs.Kafka(kafkaCondig), configuradas, usando key = systemName
|
|
218
|
+
// - config: en blz-suite es toda la environment config del proyecto cuando le di start.
|
|
219
|
+
// en blz-builder es ... TODO
|
|
220
|
+
// - kafkaConsumers: objeto con todos los consumers. Cada property del objeto es consumer
|
|
221
|
+
// - connections: objeto con todas las conneciones con key
|
|
222
|
+
async function startKafkaConsumers (kafkas,config, kafkaConsumers, connections, runHelper) {
|
|
223
|
+
|
|
224
|
+
for (let kafkaConsumerName in kafkaConsumers) {
|
|
225
|
+
let buildKafkaConsumer = kafkaConsumers[kafkaConsumerName];
|
|
226
|
+
// Probe kafka connection configuration for message error options
|
|
227
|
+
let connection = connections[buildKafkaConsumer.systemName];
|
|
228
|
+
if (!connection) {
|
|
229
|
+
console.log(`Kafka connection : ${buildKafkaConsumer.systemName} not defined or ommited`);
|
|
230
|
+
continue;
|
|
231
|
+
}
|
|
232
|
+
let kafkaErrorOptions = {};
|
|
233
|
+
kafkaErrorOptions.errorOptions = connection.errorOptions ?? false;
|
|
234
|
+
kafkaErrorOptions.manualCommit = connection.messageManualCommit ?? false;
|
|
235
|
+
kafkaErrorOptions.retries = connection.messageRetryCount ?? 5;
|
|
236
|
+
kafkaErrorOptions.initialRetryTime = connection.initialRetryTime ?? 300;
|
|
237
|
+
kafkaErrorOptions.factor = connection.messageRetryFactor ?? 2;
|
|
238
|
+
kafkaErrorOptions.multiplier = connection.messageRetryMultiplier ?? 2;
|
|
239
|
+
kafkaErrorOptions.traceAll = connection.traceAll ?? false;
|
|
240
|
+
kafkaErrorOptions.retryTopic = (buildKafkaConsumer.useErrorTopic && buildKafkaConsumer.parameterizedErrorTopic)? config.parameters[ buildKafkaConsumer.parameterizedErrorTopic.configParameterName ] : '';
|
|
241
|
+
|
|
242
|
+
let topics = [];
|
|
243
|
+
if (buildKafkaConsumer.topics){
|
|
244
|
+
for (let i = 0; i < buildKafkaConsumer.topics.length; i++) {
|
|
245
|
+
let topic = buildKafkaConsumer.topics[i];
|
|
246
|
+
topics.push(topic);
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
if (buildKafkaConsumer.parameterizedTopics){
|
|
250
|
+
for (let i = 0; i < buildKafkaConsumer.parameterizedTopics.length; i++) {
|
|
251
|
+
let parameterizedTopic = buildKafkaConsumer.parameterizedTopics[i];
|
|
252
|
+
const topic = config.parameters[parameterizedTopic.configParameterName];
|
|
253
|
+
if (!topic) {
|
|
254
|
+
console.error(`Kafka: ${parameterizedTopic.configParameterName} parameter not had topic defined`)
|
|
255
|
+
continue
|
|
256
|
+
}
|
|
257
|
+
topics.push(config.parameters[parameterizedTopic.configParameterName]);
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
let kafka = kafkas[buildKafkaConsumer.systemName];
|
|
261
|
+
if (!kafka) {
|
|
262
|
+
console.error(`Kafka ${buildKafkaConsumer.systemName} not found`)
|
|
263
|
+
continue
|
|
264
|
+
}
|
|
265
|
+
const groupId = buildKafkaConsumer.isParameterizedGroupId
|
|
266
|
+
? config.parameters[ buildKafkaConsumer.parameterizedGroupId.configParameterName ]
|
|
267
|
+
: buildKafkaConsumer.groupId
|
|
268
|
+
|
|
269
|
+
if (groupId === undefined || groupId === null || groupId.trim() === '') {
|
|
270
|
+
console.error(`Kafka: ${kafkaConsumerName} consumer not had groupId defined`)
|
|
271
|
+
continue
|
|
272
|
+
}
|
|
273
|
+
if (topics.length === 0) {
|
|
274
|
+
console.error(`Kafka: ${kafkaConsumerName} consumer not had topics defined`)
|
|
275
|
+
continue
|
|
276
|
+
}
|
|
277
|
+
// When Kafka = mock, use this section
|
|
278
|
+
if (kafka.provider) {
|
|
279
|
+
setInterval(async function () {
|
|
280
|
+
let consumeData = await kafka.provider.consume(kafka, groupId, topics);
|
|
281
|
+
while (consumeData) {
|
|
282
|
+
let callContext = runHelper.createCallContext('Running Kafka Consumer [' + kafkaConsumerName + ']...');
|
|
283
|
+
try {
|
|
284
|
+
if (kafkaErrorOptions.traceAll) {
|
|
285
|
+
console.log(
|
|
286
|
+
'KAFKA CONSUMER | ' + kafkaConsumerName +
|
|
287
|
+
' | TOPIC: ' + consumeData.topic +
|
|
288
|
+
(consumeData.partition ? ' | PARTITION: ' + consumeData.partition : '') +
|
|
289
|
+
(consumeData.message.key ? ' | MESSAGE KEY: ' + convertToString(consumeData.message.key) : '') +
|
|
290
|
+
(consumeData.message.value ? ' | MESSAGE VALUE: ' + convertToString(consumeData.message.value) : '')
|
|
291
|
+
);
|
|
292
|
+
}
|
|
293
|
+
let ctx0 = {
|
|
294
|
+
topic: consumeData.topic,
|
|
295
|
+
partition: consumeData.partition,
|
|
296
|
+
message: { key: consumeData.message.key, value: consumeData.message.value, headers: consumeData.message.headers }
|
|
297
|
+
};
|
|
298
|
+
await runHelper.runAlgorithm(callContext, { elementType: 'KafkaConsumer', elementName: kafkaConsumerName, path: buildKafkaConsumer.path, aliasesMatches: buildKafkaConsumer.aliasesMatches }, null, buildKafkaConsumer.algorithm, ctx0);
|
|
299
|
+
if (kafkaErrorOptions.manualCommit) {
|
|
300
|
+
await consumer.commitOffsets([{ topic, partition, offset: (Number(message.offset) + 1).toString() }]);
|
|
301
|
+
}
|
|
302
|
+
}
|
|
303
|
+
catch (err) {
|
|
304
|
+
runHelper.logError(err);
|
|
305
|
+
}
|
|
306
|
+
await runHelper.terminateCallContext(callContext);
|
|
307
|
+
consumeData = await kafka.provider.consume(kafka, groupId, topics);
|
|
308
|
+
}
|
|
309
|
+
}, 4000);
|
|
310
|
+
}
|
|
311
|
+
// If Kafka is an actual server (!= mock). Use this section
|
|
312
|
+
else {
|
|
313
|
+
try{
|
|
314
|
+
let consumerOptions = {}
|
|
315
|
+
let consumerOptionsDefault = {
|
|
316
|
+
sessionTimeout: 60000, // 60 seconds
|
|
317
|
+
heartbeatInterval: 6000, // 6 seconds
|
|
318
|
+
maxBytesPerPartition: 2097152, // 2 MB
|
|
319
|
+
maxBytes: 20971520, // 20 MB
|
|
320
|
+
maxWaitTimeInMs: 5000, // 1 second
|
|
321
|
+
minBytes: 1, // 1 byte
|
|
322
|
+
maxInFlightRequests: 10, // 10 in-flight requests
|
|
323
|
+
retry: {
|
|
324
|
+
initialRetryTime: 100, // 100 milliseconds
|
|
325
|
+
retries: 5, // Increase to 5 retries
|
|
326
|
+
factor: 2, // Exponential backoff factor
|
|
327
|
+
multiplier: 2 // Backoff multiplier
|
|
328
|
+
}
|
|
329
|
+
};
|
|
330
|
+
if (connection.consumerOptions){
|
|
331
|
+
consumerOptions = { ...consumerOptionsDefault, ...connection.consumerOptions };
|
|
332
|
+
}
|
|
333
|
+
let consumer = kafka.consumer({ groupId: groupId });
|
|
334
|
+
await consumer.connect(consumerOptions);
|
|
335
|
+
for (let i = 0; i < topics.length; i++) {
|
|
336
|
+
const topic = topics[i];
|
|
337
|
+
if (!topic || topic.trim() === '') {
|
|
338
|
+
runHelper.logError(new Error(`Kafka: ${kafkaConsumerName} consumer not had topic defined`));
|
|
339
|
+
continue
|
|
340
|
+
}
|
|
341
|
+
try {
|
|
342
|
+
await consumer.subscribe({ topic: topic, fromBeginning: buildKafkaConsumer.fromBeginning });
|
|
343
|
+
} catch(err) {
|
|
344
|
+
runHelper.logError(new Error(`Kafka: error whit topic ${topic}: ${err.message} `,err));
|
|
345
|
+
}
|
|
346
|
+
}
|
|
347
|
+
// Handle group join and leave events
|
|
348
|
+
consumer.on(consumer.events.GROUP_JOIN, async e => {
|
|
349
|
+
try {
|
|
350
|
+
const group = (e && e.payload && e.payload.groupId) ? e.payload.groupId : 'undefined';
|
|
351
|
+
const memberId = (e && e.payload && e.payload.memberId) ? e.payload.memberId : 'undefined';
|
|
352
|
+
console.log(`Consumer with member ID ${memberId} has joined the group: ${group}`);
|
|
353
|
+
} catch (error) {
|
|
354
|
+
console.error('Error in GROUP_JOIN event handler:', error);
|
|
355
|
+
}
|
|
356
|
+
});
|
|
357
|
+
consumer.on(consumer.events.REBALANCING, async e => {
|
|
358
|
+
try {
|
|
359
|
+
const group = (e && e.payload && e.payload.groupId) ? e.payload.groupId : 'undefined';
|
|
360
|
+
const memberId = (e && e.payload && e.payload.memberId) ? e.payload.memberId : 'undefined';
|
|
361
|
+
console.log(`Consumer with member ID ${memberId} has joined the group: ${group}`);
|
|
362
|
+
} catch (error) {
|
|
363
|
+
console.error('Error in GROUP_JOIN event handler:', error);
|
|
364
|
+
}
|
|
365
|
+
});
|
|
366
|
+
// consumer.on(consumer.events.CONNECT, async e => {
|
|
367
|
+
// runHelper.log(`Consumer connected: ${JSON.stringify(e)}`);
|
|
368
|
+
// });
|
|
369
|
+
consumer.on(consumer.events.DISCONNECT, async e => {
|
|
370
|
+
runHelper.log(`Consumer disconnected: ${JSON.stringify(e)}`);
|
|
371
|
+
await consumer.connect();
|
|
372
|
+
});
|
|
373
|
+
consumer.on(consumer.events.CRASH, async e => {
|
|
374
|
+
runHelper.logError(`Consumer crashed: ${e.error.message}`, e.error);
|
|
375
|
+
await consumer.disconnect();
|
|
376
|
+
await consumer.connect();
|
|
377
|
+
});
|
|
378
|
+
// consumer run configuration
|
|
379
|
+
await consumer.run({
|
|
380
|
+
autoCommit: !kafkaErrorOptions.manualCommit,
|
|
381
|
+
eachMessage: async function ({ topic, partition, message }) {
|
|
382
|
+
let callContext = runHelper.createCallContext('Running Kafka Consumer [' + kafkaConsumerName + ']...');
|
|
383
|
+
try {
|
|
384
|
+
await retry(async (bail, attempt) => {
|
|
385
|
+
if (kafkaErrorOptions.traceAll) {
|
|
386
|
+
console.log(`Attempt ${attempt} | ` + 'KAFKA CONSUMER | ' + kafkaConsumerName + ' | TOPIC: ' + topic + (partition ? ' | PARTITION: ' + partition : '') + (message.key ? ' | MESSAGE KEY: ' + convertToString(message.key) : '') + (message.value ? ' | MESSAGE VALUE: ' + convertToString(message.value) : ''));
|
|
387
|
+
}
|
|
388
|
+
let ctx0 = {
|
|
389
|
+
topic: topic,
|
|
390
|
+
partition: partition,
|
|
391
|
+
message: { key: message.key, value: message.value, headers: message.headers }
|
|
392
|
+
};
|
|
393
|
+
await runHelper.runAlgorithm(callContext, { elementType: 'KafkaConsumer', elementName: kafkaConsumerName, path: buildKafkaConsumer.path, aliasesMatches: buildKafkaConsumer.aliasesMatches }, null, buildKafkaConsumer.algorithm, ctx0);
|
|
394
|
+
}, {
|
|
395
|
+
retries: kafkaErrorOptions.retries, // Maximum number of retries
|
|
396
|
+
minTimeout: kafkaErrorOptions.initialRetryTime, // Minimum wait time between retries (in ms)
|
|
397
|
+
});
|
|
398
|
+
if (kafkaErrorOptions.manualCommit) {
|
|
399
|
+
await consumer.commitOffsets([{ topic, partition, offset: (Number(message.offset) + 1).toString() }]);
|
|
400
|
+
}
|
|
401
|
+
} catch (err) {
|
|
402
|
+
if(kafkaErrorOptions.traceAll) {
|
|
403
|
+
console.error(err);
|
|
404
|
+
}
|
|
405
|
+
if(kafkaErrorOptions.retryTopic && kafkaErrorOptions.retryTopic !== '') {
|
|
406
|
+
// sends message with error to retry topic
|
|
407
|
+
await module.exports.kafkaSend(connection, kafkaErrorOptions.retryTopic, [message]);
|
|
408
|
+
// marks the message as read on original topic
|
|
409
|
+
await consumer.commitOffsets([{ topic, partition, offset: (Number(message.offset) + 1).toString() }]);
|
|
410
|
+
}
|
|
411
|
+
}
|
|
412
|
+
await runHelper.terminateCallContext(callContext);
|
|
413
|
+
},
|
|
414
|
+
retry: {
|
|
415
|
+
retries: kafkaErrorOptions.retries,
|
|
416
|
+
initialRetryTime: kafkaErrorOptions.initialRetryTime,
|
|
417
|
+
factor: kafkaErrorOptions.factor,
|
|
418
|
+
multiplier: kafkaErrorOptions.multiplier,
|
|
419
|
+
},
|
|
420
|
+
rebalance: {
|
|
421
|
+
eachGroupJoin: async ({ groupId, memberId, groupAssignment }) => {
|
|
422
|
+
console.log(`KAFKA CONSUMER | ${kafkaConsumerName} | memberId: ${memberId} joined the group ${groupId}`);
|
|
423
|
+
},
|
|
424
|
+
eachGroupLeave: async ({ groupId, memberId }) => {
|
|
425
|
+
console.log(`KAFKA CONSUMER | ${kafkaConsumerName} | memberId: ${memberId} left the group ${groupId}`);
|
|
426
|
+
}
|
|
427
|
+
}
|
|
428
|
+
});
|
|
429
|
+
} catch (err) {
|
|
430
|
+
runHelper.logError(err);
|
|
431
|
+
}
|
|
432
|
+
}
|
|
433
|
+
if (process.env.blz_traceAll === 'true' || process.env.blz_traceKafkaConsumers === 'true') {
|
|
434
|
+
console.log('KAFKA CONSUMER | ' + kafkaConsumerName + ' | Start');
|
|
435
|
+
}
|
|
436
|
+
|
|
437
|
+
}
|
|
438
|
+
}
|
|
439
|
+
// END: functions to initialize kafka consumers
|
|
440
|
+
|
|
441
|
+
module.exports = {
|
|
442
|
+
kafkaConfig: async function (connection, config) {
|
|
443
|
+
if(!config || !config.topicsConfig || !config.topicsConfig.topics || !config.topicsConfig.topics.length || config.topicsConfig.topics.length === 0) {
|
|
444
|
+
throw new Error('Invalid config');
|
|
445
|
+
}
|
|
446
|
+
for (let i = 0; i < config.topicsConfig.topics.length; i++) {
|
|
447
|
+
let topic = config.topicsConfig.topics[i];
|
|
448
|
+
if (!topic.topic || topic.topic.trim().length === 0) {
|
|
449
|
+
throw new Error(`Invalid topic ${i+1} config`);
|
|
450
|
+
}
|
|
451
|
+
}
|
|
452
|
+
let result = false
|
|
453
|
+
let error = null;
|
|
454
|
+
let admin = null;
|
|
455
|
+
try {
|
|
456
|
+
let kafkaConfig = await getConfig(connection);
|
|
457
|
+
let kafka = new Kafkajs.Kafka(kafkaConfig);
|
|
458
|
+
admin = kafka.admin();
|
|
459
|
+
await admin.connect();
|
|
460
|
+
const currentTopics = await admin.listTopics();
|
|
461
|
+
let newsTopics = config.topicsConfig.topics.filter(topic => !currentTopics.includes(topic.topic));
|
|
462
|
+
if(newsTopics && newsTopics.length > 0) {
|
|
463
|
+
config.topicsConfig.topics = newsTopics;
|
|
464
|
+
result = await admin.createTopics(config.topicsConfig);
|
|
465
|
+
}
|
|
466
|
+
}catch(e) {
|
|
467
|
+
error = e;
|
|
468
|
+
} finally {
|
|
469
|
+
if(admin) {
|
|
470
|
+
await admin.disconnect();
|
|
471
|
+
}
|
|
472
|
+
}
|
|
473
|
+
if(error) {
|
|
474
|
+
throw error;
|
|
475
|
+
}
|
|
476
|
+
return result;
|
|
477
|
+
},
|
|
478
|
+
kafkaGetTopics: async function (connection) {
|
|
479
|
+
let kafkaConfig = await getConfig(connection);
|
|
480
|
+
let kafka = new Kafkajs.Kafka(kafkaConfig);
|
|
481
|
+
const admin = kafka.admin();
|
|
482
|
+
await admin.connect();
|
|
483
|
+
const topics = await admin.listTopics();
|
|
484
|
+
await admin.disconnect();
|
|
485
|
+
return topics;
|
|
486
|
+
},
|
|
487
|
+
kafkaSend: async function (connection, topic, messages) {
|
|
488
|
+
if (connection.provider) {
|
|
489
|
+
await connection.provider.produce(connection, topic, messages);
|
|
490
|
+
return null;
|
|
491
|
+
}
|
|
492
|
+
let producer = await getOrCreateProducer(connection);
|
|
493
|
+
await producer.send({ topic: topic, messages: messages });
|
|
494
|
+
return null;
|
|
495
|
+
},
|
|
496
|
+
kafkaConnections: async function(kafkaConsumers, connections, buildConfigItems, runHelper){
|
|
497
|
+
return createKafkaInstances(kafkaConsumers, connections, buildConfigItems, runHelper);
|
|
498
|
+
},
|
|
499
|
+
startKafkaConsumers: async function(kafkas,config, kafkaConsumers, connections, runHelper){
|
|
500
|
+
startKafkaConsumers(kafkas, config, kafkaConsumers, connections, runHelper);
|
|
501
|
+
},
|
|
502
|
+
kafkaConnectionHealthCheck: async function (connection) {
|
|
503
|
+
if (!connection.brokers) {
|
|
504
|
+
return { status: "error", message: "Kafka broker list not defined" }
|
|
505
|
+
}
|
|
506
|
+
const kafka = new Kafkajs.Kafka({ brokers: [connection.brokers] });
|
|
507
|
+
try {
|
|
508
|
+
// Try to get metadata from Kafka to check the connection
|
|
509
|
+
const admin = kafka.admin();
|
|
510
|
+
await admin.connect();
|
|
511
|
+
// Fetch metadata (this will trigger a request to Kafka to see if it's reachable)
|
|
512
|
+
const metadata = await admin.fetchTopicMetadata({ topics: [] });
|
|
513
|
+
logger.info({message: "Kafka is reachable!", metadata:metadata});
|
|
514
|
+
await admin.disconnect(); // Disconnect after metadata check
|
|
515
|
+
return { status: "ok", message: "Kafka connected, metadata vissible" };
|
|
516
|
+
} catch (error) {
|
|
517
|
+
logger.info({ message: "Kafka is not reachable", error: error });
|
|
518
|
+
return { status: "error", message: error.message };
|
|
519
|
+
}
|
|
520
|
+
|
|
521
|
+
}
|
|
522
|
+
};
|