@xrystal/core 3.26.8 → 3.26.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "author": "Yusuf Yasir KAYGUSUZ",
3
3
  "name": "@xrystal/core",
4
- "version": "3.26.8",
4
+ "version": "3.26.9",
5
5
  "description": "Project core for xrystal",
6
6
  "publishConfig": {
7
7
  "access": "public",
@@ -41,7 +41,6 @@ export default class Configs implements IProvide<any> {
41
41
  system: System;
42
42
  });
43
43
  onInit: ({}: {}) => Promise<void>;
44
- private initializeKafkaInfrastructure;
45
44
  setConfig(newConfigs: any): void;
46
45
  _<K extends keyof IConfig>(key: K): IConfig[K];
47
46
  get all(): IConfig;
@@ -1,7 +1,6 @@
1
1
  import path from 'node:path';
2
2
  import { merge } from 'lodash';
3
3
  import { pathToFileURL } from 'node:url';
4
- import { Kafka, logLevel } from 'kafkajs';
5
4
  import { Constants } from '../../utils';
6
5
  export default class Configs {
7
6
  publicFolderName = Constants.publicFolderName;
@@ -68,54 +67,6 @@ export default class Configs {
68
67
  catch (e) {
69
68
  // error
70
69
  }
71
- await this.initializeKafkaInfrastructure();
72
- };
73
- initializeKafkaInfrastructure = async () => {
74
- const { kafkaBrokers, kafkaTopics, isKafkaPassive, serviceName } = {
75
- isKafkaPassive: process.env.IS_KAFKA_PASSIVE === 'true' ? true : false,
76
- kafkaBrokers: process.env?.KAFKA_BROKERS,
77
- kafkaTopics: [
78
- ...new Set([
79
- ...this.#config.kafkaTopics
80
- ])
81
- ],
82
- serviceName: this.#system.tmp.configs.service
83
- };
84
- if (isKafkaPassive === true || !kafkaBrokers)
85
- return;
86
- const brokers = String(kafkaBrokers).split(",").map(b => b.trim());
87
- const topicsToCreate = Array.isArray(kafkaTopics) ? kafkaTopics : [];
88
- if (topicsToCreate.length === 0)
89
- return;
90
- const kafka = new Kafka({
91
- clientId: `${serviceName}`,
92
- brokers,
93
- logLevel: logLevel.NOTHING
94
- });
95
- const admin = kafka.admin();
96
- try {
97
- await admin.connect();
98
- const existingTopics = await admin.listTopics();
99
- const newTopics = topicsToCreate
100
- .filter(topic => !existingTopics.includes(topic))
101
- .map(topic => ({
102
- topic,
103
- numPartitions: 1,
104
- replicationFactor: 1
105
- }));
106
- if (newTopics.length > 0) {
107
- await admin.createTopics({
108
- waitForLeaders: true,
109
- topics: newTopics
110
- });
111
- }
112
- }
113
- catch (error) {
114
- // => Error
115
- }
116
- finally {
117
- await admin.disconnect();
118
- }
119
70
  };
120
71
  setConfig(newConfigs) {
121
72
  const mergedDObj = Object.assign(this.#config, newConfigs);
@@ -0,0 +1,29 @@
1
+ import Configs from "../configs";
2
+ import System from "../system";
3
+ import { IProvide } from "source/utils";
4
+ type KafkaInstanceType = {
5
+ clientId: string;
6
+ brokers: string[];
7
+ username?: string | null;
8
+ password?: string | null;
9
+ };
10
+ export default class KafkaForCore implements IProvide<any> {
11
+ #private;
12
+ private _instance;
13
+ private _producer;
14
+ private clientId;
15
+ private brokers;
16
+ private username;
17
+ private password;
18
+ constructor({ system, configs, }: {
19
+ system: System;
20
+ configs: Configs;
21
+ });
22
+ onInit: ({}: {}) => Promise<void>;
23
+ kafkaLoader: ({ clientId, brokers, username, password }: KafkaInstanceType) => Promise<any>;
24
+ private initializeKafkaInfrastructure;
25
+ sendMessage: (topic: string, message: any) => Promise<void>;
26
+ get producer(): any;
27
+ get instance(): any;
28
+ }
29
+ export {};
@@ -0,0 +1,128 @@
1
+ import { Kafka as KafkaClient, logLevel } from "kafkajs";
2
+ import { LoggerLayerEnum } from "source/utils";
3
+ export default class KafkaForCore {
4
+ _instance;
5
+ _producer;
6
+ clientId = "";
7
+ brokers = "";
8
+ username = null;
9
+ password = null;
10
+ #system;
11
+ #configs;
12
+ constructor({ system, configs, }) {
13
+ this.#system = system;
14
+ this.#configs = configs;
15
+ }
16
+ onInit = async ({}) => {
17
+ this.clientId = this.#configs.all?.kafkaClientId;
18
+ this.brokers = this.#configs.all?.kafkaBrokers;
19
+ this.username = this.#configs.all?.kafkaUsername;
20
+ this.password = this.#configs.all?.kafkaPassword;
21
+ if (this.#configs.all.isKafkaPassive) {
22
+ return;
23
+ }
24
+ const brokersArray = (Array.isArray(this.brokers) ? this.brokers : (this.brokers?.split(',') ?? []))
25
+ .map(b => b?.trim())
26
+ .filter(Boolean);
27
+ if (brokersArray?.length === 0) {
28
+ throw new Error('Broker list error');
29
+ }
30
+ await this.kafkaLoader({
31
+ clientId: this.clientId,
32
+ brokers: brokersArray,
33
+ username: this.username,
34
+ password: this.password
35
+ });
36
+ await this.initializeKafkaInfrastructure();
37
+ };
38
+ kafkaLoader = async ({ clientId, brokers, username, password }) => {
39
+ const kafkaConfig = {
40
+ clientId,
41
+ brokers,
42
+ sasl: (username && password) ? { mechanism: 'plain', username, password } : undefined,
43
+ ssl: false,
44
+ logLevel: logLevel.ERROR,
45
+ logCreator: () => {
46
+ return ({ level, log }) => {
47
+ const winstonLevel = level === 1 ? LoggerLayerEnum.CRITICAL :
48
+ level === 2 ? LoggerLayerEnum.INFO :
49
+ level === 4 ? LoggerLayerEnum.INFO :
50
+ level === 5 ? LoggerLayerEnum.DEBUG :
51
+ LoggerLayerEnum.CRITICAL;
52
+ console.error(`Kafka ${log.message}`);
53
+ };
54
+ },
55
+ retry: {
56
+ initialRetryTime: 100,
57
+ retries: 8
58
+ }
59
+ };
60
+ try {
61
+ const kafka = new KafkaClient(kafkaConfig);
62
+ this._instance = kafka;
63
+ this._producer = kafka.producer();
64
+ await this.producer.connect();
65
+ return kafka;
66
+ }
67
+ catch (error) {
68
+ console.error(`Kafka ${error}`);
69
+ }
70
+ };
71
+ initializeKafkaInfrastructure = async () => {
72
+ const { kafkaBrokers, kafkaTopics, isKafkaPassive, } = {
73
+ isKafkaPassive: process.env.IS_KAFKA_PASSIVE === 'true' ? true : false,
74
+ kafkaBrokers: process.env?.KAFKA_BROKERS,
75
+ kafkaTopics: [
76
+ ...new Set([
77
+ ...this.#configs.all.kafkaTopics
78
+ ])
79
+ ],
80
+ };
81
+ if (isKafkaPassive === true || !kafkaBrokers)
82
+ return;
83
+ const brokers = String(kafkaBrokers).split(",").map(b => b.trim());
84
+ const topicsToCreate = Array.isArray(kafkaTopics) ? kafkaTopics : [];
85
+ if (topicsToCreate.length === 0)
86
+ return;
87
+ const admin = this._instance.admin();
88
+ try {
89
+ await admin.connect();
90
+ const existingTopics = await admin.listTopics();
91
+ const newTopics = topicsToCreate
92
+ .filter(topic => !existingTopics.includes(topic))
93
+ .map(topic => ({
94
+ topic,
95
+ numPartitions: 1,
96
+ replicationFactor: 1
97
+ }));
98
+ if (newTopics.length > 0) {
99
+ await admin.createTopics({
100
+ waitForLeaders: true,
101
+ topics: newTopics
102
+ });
103
+ }
104
+ }
105
+ catch (error) {
106
+ // => Error
107
+ }
108
+ finally {
109
+ await admin.disconnect();
110
+ }
111
+ };
112
+ // => Helpers
113
+ sendMessage = async (topic, message) => {
114
+ if (!this.producer)
115
+ throw new Error("Producer not ready");
116
+ await this.producer.send({
117
+ topic,
118
+ messages: [{ value: JSON.stringify(message) }],
119
+ acks: 0
120
+ });
121
+ };
122
+ get producer() {
123
+ return this._producer;
124
+ }
125
+ get instance() {
126
+ return this._instance;
127
+ }
128
+ }
@@ -5,6 +5,7 @@ import System from "../system";
5
5
  import Configs from "../configs";
6
6
  import { LoggerLayerEnum } from '../../utils/models/enums/index';
7
7
  import { IProvide } from "../../utils";
8
+ import KafkaForCore from "../kafka";
8
9
  export interface ICustomLogger extends winston.Logger {
9
10
  critical: winston.LeveledLogMethod;
10
11
  http: winston.LeveledLogMethod;
@@ -23,9 +24,10 @@ export default class Logger implements IProvide<any> {
23
24
  private kafkaLogsTopic;
24
25
  private isKafkaReady;
25
26
  winston: ICustomLogger;
26
- constructor({ system, configs }: {
27
+ constructor({ system, configs, kafka, }: {
27
28
  system: System;
28
29
  configs: Configs;
30
+ kafka: KafkaForCore;
29
31
  });
30
32
  onInit: () => Promise<void>;
31
33
  winstonLoader: ({ loadPath, loggerLevel }: {
@@ -3,7 +3,7 @@ import Transport from "winston-transport";
3
3
  import "winston-daily-rotate-file";
4
4
  import path from "node:path";
5
5
  import { AsyncLocalStorage } from "node:async_hooks";
6
- import { Kafka, Partitioners, logLevel } from "kafkajs";
6
+ import { Partitioners } from "kafkajs";
7
7
  import { LoggerLayerEnum } from '../../utils/models/enums/index';
8
8
  class KafkaTransport extends Transport {
9
9
  service;
@@ -41,9 +41,11 @@ export default class Logger {
41
41
  winston;
42
42
  #system;
43
43
  #configs;
44
- constructor({ system, configs }) {
44
+ #kafka;
45
+ constructor({ system, configs, kafka, }) {
45
46
  this.#system = system;
46
47
  this.#configs = configs;
48
+ this.#kafka = kafka;
47
49
  }
48
50
  onInit = async () => {
49
51
  this.serviceName = this.#system?.tmp?.configs?.service;
@@ -60,18 +62,7 @@ export default class Logger {
60
62
  const brokers = kafkaBrokers ? String(kafkaBrokers).split(",").map((b) => b.trim()) : [];
61
63
  const isKafkaEnabled = isKafkaPassive === false && brokers.length > 0;
62
64
  if (isKafkaEnabled) {
63
- const kafka = new Kafka({
64
- clientId: kafkaClientId,
65
- brokers,
66
- logLevel: logLevel.NOTHING,
67
- sasl: {
68
- mechanism: 'plain',
69
- username: kafkaUsername,
70
- password: kafkaPassword
71
- },
72
- ssl: false,
73
- });
74
- this.kafkaProducer = kafka.producer({
65
+ this.kafkaProducer = this.#kafka.producer({
75
66
  createPartitioner: Partitioners.DefaultPartitioner,
76
67
  retry: { initialRetryTime: 500, retries: 5 }
77
68
  });
@@ -1,6 +1,7 @@
1
1
  import path from 'path';
2
2
  import { System, Configs, Logger, Events, Localizations, Clients, Controller } from '../loader';
3
3
  import { Constants, getCore, x, } from '../utils';
4
+ import KafkaForCore from 'source/loader/kafka';
4
5
  //
5
6
  export const core = getCore();
6
7
  export const coreInit = async (params) => {
@@ -31,6 +32,10 @@ const coreLoader = async ({}) => {
31
32
  service: Configs,
32
33
  props: {}
33
34
  },
35
+ {
36
+ service: KafkaForCore,
37
+ props: {}
38
+ },
34
39
  {
35
40
  service: Logger,
36
41
  props: {}