@xrystal/core 3.5.2 → 3.5.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "author": "Yusuf Yasir KAYGUSUZ",
3
3
  "name": "@xrystal/core",
4
- "version": "3.5.2",
4
+ "version": "3.5.4",
5
5
  "description": "Project core for xrystal",
6
6
  "publishConfig": {
7
7
  "access": "public",
@@ -1,18 +1,22 @@
1
- import winston from 'winston';
2
- import 'winston-daily-rotate-file';
1
+ import winston from "winston";
2
+ import "winston-daily-rotate-file";
3
+ import { AsyncLocalStorage } from "node:async_hooks";
3
4
  interface CustomLogger extends winston.Logger {
4
5
  critical: winston.LeveledLogMethod;
5
6
  http: winston.LeveledLogMethod;
6
7
  }
7
8
  export default class LoggerService {
9
+ static readonly storage: AsyncLocalStorage<Map<string, string>>;
8
10
  private serviceName;
9
11
  private environment;
10
12
  private kafkaProducer;
11
13
  private kafkaTopic;
12
14
  private isKafkaReady;
15
+ private getTracingFormat;
13
16
  private getConsoleFormat;
14
17
  winston: CustomLogger;
15
18
  constructor();
19
+ runWithId: (id: string, callback: () => void) => void;
16
20
  load: (config: Record<string, any>) => Promise<void>;
17
21
  winstonLoader: ({ loadPath, loggerLevel }: {
18
22
  loadPath: string;
@@ -1,37 +1,47 @@
1
- import winston, { format } from 'winston';
2
- import 'winston-daily-rotate-file';
3
- import path from 'path';
4
- import { Kafka, Partitioners, logLevel } from 'kafkajs';
5
- import { LoggerLayerEnum } from '../../utils';
1
+ import winston, { format } from "winston";
2
+ import "winston-daily-rotate-file";
3
+ import path from "node:path";
4
+ import { AsyncLocalStorage } from "node:async_hooks";
5
+ import { Kafka, Partitioners, logLevel } from "kafkajs";
6
+ import { LoggerLayerEnum } from "../../utils";
6
7
  const customLevels = {
7
- critical: LoggerLayerEnum.CRITICAL, // 0
8
- error: LoggerLayerEnum.ERROR, // 1
9
- info: LoggerLayerEnum.INFO, // 2
10
- http: LoggerLayerEnum.HTTP, // 4
11
- debug: LoggerLayerEnum.DEBUG, // 5
8
+ critical: LoggerLayerEnum.CRITICAL,
9
+ error: LoggerLayerEnum.ERROR,
10
+ info: LoggerLayerEnum.INFO,
11
+ http: LoggerLayerEnum.HTTP,
12
+ debug: LoggerLayerEnum.DEBUG,
12
13
  };
13
14
  const customColors = {
14
- critical: 'red',
15
- error: 'magenta',
16
- warn: 'yellow',
17
- info: 'green',
18
- http: 'cyan',
19
- debug: 'blue'
15
+ critical: "red",
16
+ error: "magenta",
17
+ warn: "yellow",
18
+ info: "green",
19
+ http: "cyan",
20
+ debug: "blue"
20
21
  };
21
22
  export default class LoggerService {
23
+ static storage = new AsyncLocalStorage();
22
24
  serviceName = "";
23
25
  environment = "";
24
26
  kafkaProducer = null;
25
27
  kafkaTopic = "";
26
28
  isKafkaReady = false;
29
+ getTracingFormat = format((info) => {
30
+ const store = LoggerService.storage.getStore();
31
+ if (store) {
32
+ info.correlationId = store.get("correlationId");
33
+ }
34
+ return info;
35
+ });
27
36
  getConsoleFormat() {
28
- return format.combine(format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }), format.colorize({ all: true }), format.printf((info) => {
29
- const msg = typeof info.message === 'object' ? JSON.stringify(info.message) : info.message;
30
- return `${info.timestamp} [${this.serviceName}] ${info.level}: ${msg}`;
37
+ return format.combine(this.getTracingFormat(), format.timestamp({ format: "YYYY-MM-DD HH:mm:ss" }), format.colorize({ all: true }), format.printf((info) => {
38
+ const msg = typeof info.message === "object" ? JSON.stringify(info.message) : info.message;
39
+ const id = info.correlationId ? ` [ID: ${info.correlationId}]` : "";
40
+ return `${info.timestamp} [${this.serviceName}]${info.level}: ${msg} : ${id}`;
31
41
  }));
32
42
  }
33
43
  winston = winston.createLogger({
34
- level: 'debug',
44
+ level: "debug",
35
45
  levels: customLevels,
36
46
  transports: [
37
47
  new winston.transports.Console({
@@ -42,6 +52,11 @@ export default class LoggerService {
42
52
  constructor() {
43
53
  winston.addColors(customColors);
44
54
  }
55
+ runWithId = (id, callback) => {
56
+ const store = new Map();
57
+ store.set("correlationId", id);
58
+ LoggerService.storage.run(store, callback);
59
+ };
45
60
  load = async (config) => {
46
61
  this.serviceName = config?.serviceName || "service";
47
62
  this.environment = config?.env || "dev";
@@ -51,7 +66,7 @@ export default class LoggerService {
51
66
  loggerLevel: config?.loggerLevel || "debug"
52
67
  });
53
68
  const rawBrokers = config?.kafkaBrokers;
54
- const brokers = rawBrokers ? String(rawBrokers).split(',').map((b) => b.trim()) : [];
69
+ const brokers = rawBrokers ? String(rawBrokers).split(",").map((b) => b.trim()) : [];
55
70
  if (brokers.length > 0) {
56
71
  const kafka = new Kafka({
57
72
  clientId: this.serviceName,
@@ -67,7 +82,7 @@ export default class LoggerService {
67
82
  this.kafkaProducer = kafka.producer({ createPartitioner: Partitioners.LegacyPartitioner });
68
83
  const connectWithRetry = async () => {
69
84
  try {
70
- await this.kafkaProducer.connect();
85
+ await this.kafkaProducer?.connect();
71
86
  this.isKafkaReady = true;
72
87
  }
73
88
  catch (err) {
@@ -80,7 +95,7 @@ export default class LoggerService {
80
95
  };
81
96
  winstonLoader = ({ loadPath, loggerLevel }) => {
82
97
  const { combine, timestamp, json, errors } = format;
83
- const jsonFileFormat = combine(timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }), errors({ stack: true }), json());
98
+ const jsonFileFormat = combine(this.getTracingFormat(), timestamp({ format: "YYYY-MM-DD HH:mm:ss" }), errors({ stack: true }), json());
84
99
  const winstonLogger = winston.createLogger({
85
100
  level: loggerLevel,
86
101
  levels: customLevels,
@@ -89,22 +104,22 @@ export default class LoggerService {
89
104
  format: this.getConsoleFormat()
90
105
  }),
91
106
  new winston.transports.DailyRotateFile({
92
- filename: path.resolve(loadPath, 'error', '%DATE%_error.log'),
93
- level: 'error',
107
+ filename: path.resolve(loadPath, "error", "%DATE%_error.log"),
108
+ level: "error",
94
109
  format: jsonFileFormat,
95
- maxSize: '2mb',
96
- maxFiles: '7d'
110
+ maxSize: "2mb",
111
+ maxFiles: "7d"
97
112
  }),
98
113
  new winston.transports.DailyRotateFile({
99
- filename: path.resolve(loadPath, 'critical', '%DATE%_critical.log'),
100
- level: 'critical',
114
+ filename: path.resolve(loadPath, "critical", "%DATE%_critical.log"),
115
+ level: "critical",
101
116
  format: jsonFileFormat,
102
- maxSize: '2mb',
103
- maxFiles: '14d'
117
+ maxSize: "2mb",
118
+ maxFiles: "14d"
104
119
  })
105
120
  ]
106
121
  });
107
- winstonLogger.on('data', (info) => {
122
+ winstonLogger.on("data", (info) => {
108
123
  if (this.isKafkaReady) {
109
124
  this.logToKafka(info);
110
125
  }
@@ -1,4 +1,4 @@
1
- export { default as icons } from "picocolors";
1
+ export { default as pic } from "picocolors";
2
2
  export * from './tmp/index';
3
3
  export * from './path/index';
4
4
  export * from './is/index';
@@ -1,4 +1,4 @@
1
- export { default as icons } from "picocolors";
1
+ export { default as pic } from "picocolors";
2
2
  export * from './tmp/index';
3
3
  export * from './path/index';
4
4
  export * from './is/index';
@@ -1,18 +1,22 @@
1
- import winston from 'winston';
2
- import 'winston-daily-rotate-file';
1
+ import winston from "winston";
2
+ import "winston-daily-rotate-file";
3
+ import { AsyncLocalStorage } from "node:async_hooks";
3
4
  interface CustomLogger extends winston.Logger {
4
5
  critical: winston.LeveledLogMethod;
5
6
  http: winston.LeveledLogMethod;
6
7
  }
7
8
  export default class LoggerService {
9
+ static readonly storage: AsyncLocalStorage<Map<string, string>>;
8
10
  private serviceName;
9
11
  private environment;
10
12
  private kafkaProducer;
11
13
  private kafkaTopic;
12
14
  private isKafkaReady;
15
+ private getTracingFormat;
13
16
  private getConsoleFormat;
14
17
  winston: CustomLogger;
15
18
  constructor();
19
+ runWithId: (id: string, callback: () => void) => void;
16
20
  load: (config: Record<string, any>) => Promise<void>;
17
21
  winstonLoader: ({ loadPath, loggerLevel }: {
18
22
  loadPath: string;
@@ -1,37 +1,47 @@
1
- import winston, { format } from 'winston';
2
- import 'winston-daily-rotate-file';
3
- import path from 'path';
4
- import { Kafka, Partitioners, logLevel } from 'kafkajs';
5
- import { LoggerLayerEnum } from '../../utils';
1
+ import winston, { format } from "winston";
2
+ import "winston-daily-rotate-file";
3
+ import path from "node:path";
4
+ import { AsyncLocalStorage } from "node:async_hooks";
5
+ import { Kafka, Partitioners, logLevel } from "kafkajs";
6
+ import { LoggerLayerEnum } from "../../utils";
6
7
  const customLevels = {
7
- critical: LoggerLayerEnum.CRITICAL, // 0
8
- error: LoggerLayerEnum.ERROR, // 1
9
- info: LoggerLayerEnum.INFO, // 2
10
- http: LoggerLayerEnum.HTTP, // 4
11
- debug: LoggerLayerEnum.DEBUG, // 5
8
+ critical: LoggerLayerEnum.CRITICAL,
9
+ error: LoggerLayerEnum.ERROR,
10
+ info: LoggerLayerEnum.INFO,
11
+ http: LoggerLayerEnum.HTTP,
12
+ debug: LoggerLayerEnum.DEBUG,
12
13
  };
13
14
  const customColors = {
14
- critical: 'red',
15
- error: 'magenta',
16
- warn: 'yellow',
17
- info: 'green',
18
- http: 'cyan',
19
- debug: 'blue'
15
+ critical: "red",
16
+ error: "magenta",
17
+ warn: "yellow",
18
+ info: "green",
19
+ http: "cyan",
20
+ debug: "blue"
20
21
  };
21
22
  export default class LoggerService {
23
+ static storage = new AsyncLocalStorage();
22
24
  serviceName = "";
23
25
  environment = "";
24
26
  kafkaProducer = null;
25
27
  kafkaTopic = "";
26
28
  isKafkaReady = false;
29
+ getTracingFormat = format((info) => {
30
+ const store = LoggerService.storage.getStore();
31
+ if (store) {
32
+ info.correlationId = store.get("correlationId");
33
+ }
34
+ return info;
35
+ });
27
36
  getConsoleFormat() {
28
- return format.combine(format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }), format.colorize({ all: true }), format.printf((info) => {
29
- const msg = typeof info.message === 'object' ? JSON.stringify(info.message) : info.message;
30
- return `${info.timestamp} [${this.serviceName}] ${info.level}: ${msg}`;
37
+ return format.combine(this.getTracingFormat(), format.timestamp({ format: "YYYY-MM-DD HH:mm:ss" }), format.colorize({ all: true }), format.printf((info) => {
38
+ const msg = typeof info.message === "object" ? JSON.stringify(info.message) : info.message;
39
+ const id = info.correlationId ? ` [ID: ${info.correlationId}]` : "";
40
+ return `${info.timestamp} [${this.serviceName}]${info.level}: ${msg} : ${id}`;
31
41
  }));
32
42
  }
33
43
  winston = winston.createLogger({
34
- level: 'debug',
44
+ level: "debug",
35
45
  levels: customLevels,
36
46
  transports: [
37
47
  new winston.transports.Console({
@@ -42,6 +52,11 @@ export default class LoggerService {
42
52
  constructor() {
43
53
  winston.addColors(customColors);
44
54
  }
55
+ runWithId = (id, callback) => {
56
+ const store = new Map();
57
+ store.set("correlationId", id);
58
+ LoggerService.storage.run(store, callback);
59
+ };
45
60
  load = async (config) => {
46
61
  this.serviceName = config?.serviceName || "service";
47
62
  this.environment = config?.env || "dev";
@@ -51,7 +66,7 @@ export default class LoggerService {
51
66
  loggerLevel: config?.loggerLevel || "debug"
52
67
  });
53
68
  const rawBrokers = config?.kafkaBrokers;
54
- const brokers = rawBrokers ? String(rawBrokers).split(',').map((b) => b.trim()) : [];
69
+ const brokers = rawBrokers ? String(rawBrokers).split(",").map((b) => b.trim()) : [];
55
70
  if (brokers.length > 0) {
56
71
  const kafka = new Kafka({
57
72
  clientId: this.serviceName,
@@ -67,7 +82,7 @@ export default class LoggerService {
67
82
  this.kafkaProducer = kafka.producer({ createPartitioner: Partitioners.LegacyPartitioner });
68
83
  const connectWithRetry = async () => {
69
84
  try {
70
- await this.kafkaProducer.connect();
85
+ await this.kafkaProducer?.connect();
71
86
  this.isKafkaReady = true;
72
87
  }
73
88
  catch (err) {
@@ -80,7 +95,7 @@ export default class LoggerService {
80
95
  };
81
96
  winstonLoader = ({ loadPath, loggerLevel }) => {
82
97
  const { combine, timestamp, json, errors } = format;
83
- const jsonFileFormat = combine(timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }), errors({ stack: true }), json());
98
+ const jsonFileFormat = combine(this.getTracingFormat(), timestamp({ format: "YYYY-MM-DD HH:mm:ss" }), errors({ stack: true }), json());
84
99
  const winstonLogger = winston.createLogger({
85
100
  level: loggerLevel,
86
101
  levels: customLevels,
@@ -89,22 +104,22 @@ export default class LoggerService {
89
104
  format: this.getConsoleFormat()
90
105
  }),
91
106
  new winston.transports.DailyRotateFile({
92
- filename: path.resolve(loadPath, 'error', '%DATE%_error.log'),
93
- level: 'error',
107
+ filename: path.resolve(loadPath, "error", "%DATE%_error.log"),
108
+ level: "error",
94
109
  format: jsonFileFormat,
95
- maxSize: '2mb',
96
- maxFiles: '7d'
110
+ maxSize: "2mb",
111
+ maxFiles: "7d"
97
112
  }),
98
113
  new winston.transports.DailyRotateFile({
99
- filename: path.resolve(loadPath, 'critical', '%DATE%_critical.log'),
100
- level: 'critical',
114
+ filename: path.resolve(loadPath, "critical", "%DATE%_critical.log"),
115
+ level: "critical",
101
116
  format: jsonFileFormat,
102
- maxSize: '2mb',
103
- maxFiles: '14d'
117
+ maxSize: "2mb",
118
+ maxFiles: "14d"
104
119
  })
105
120
  ]
106
121
  });
107
- winstonLogger.on('data', (info) => {
122
+ winstonLogger.on("data", (info) => {
108
123
  if (this.isKafkaReady) {
109
124
  this.logToKafka(info);
110
125
  }
@@ -1,4 +1,4 @@
1
- export { default as icons } from "picocolors";
1
+ export { default as pic } from "picocolors";
2
2
  export * from './tmp/index';
3
3
  export * from './path/index';
4
4
  export * from './is/index';
@@ -1,4 +1,4 @@
1
- export { default as icons } from "picocolors";
1
+ export { default as pic } from "picocolors";
2
2
  export * from './tmp/index';
3
3
  export * from './path/index';
4
4
  export * from './is/index';