@mimik/sumologic-winston-logger 1.6.21 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,10 +1,37 @@
1
1
  /* eslint no-process-env: "off" */
2
- const fs = require('fs');
3
- const isUndefined = require('lodash.isundefined');
4
- const isNil = require('lodash.isnil');
5
- const trim = require('lodash.trim');
6
- const split = require('lodash.split');
7
- const difference = require('lodash.difference');
2
+ import {
3
+ ALL_MODE,
4
+ ALL_MODES,
5
+ AWS_KINESIS,
6
+ AWS_S3,
7
+ DEFAULT_ENV,
8
+ DEFAULT_EXIT_DELAY,
9
+ DEFAULT_FILTER_FILE,
10
+ DEFAULT_KINESIS_MAX_EVENTS,
11
+ DEFAULT_KINESIS_MAX_SIZE,
12
+ DEFAULT_KINESIS_TIMEOUT,
13
+ DEFAULT_LEVEL,
14
+ DEFAULT_MODE,
15
+ DEFAULT_NO_STACK,
16
+ DEFAULT_S3_MAX_EVENTS,
17
+ DEFAULT_S3_MAX_SIZE,
18
+ DEFAULT_S3_TIMEOUT,
19
+ NONE_MODE,
20
+ SUMOLOGIC,
21
+ } from '../lib/common.js';
22
+ import difference from 'lodash.difference';
23
+ import fs from 'fs';
24
+ import isNil from 'lodash.isnil';
25
+ import isUndefined from 'lodash.isundefined';
26
+ import process from 'process';
27
+ import split from 'lodash.split';
28
+ import trim from 'lodash.trim';
29
+
30
+ const DECIMAL = 10;
31
+ const EXISTING_ERRORS = 1;
32
+ const NO_MODE = 0;
33
+ const KNOWN_MODE = 0;
34
+ const SINGLE_MODE = 1;
8
35
 
9
36
  /**
10
37
  *
@@ -66,41 +93,21 @@ const difference = require('lodash.difference');
66
93
  * If `global.serverType` is set, this value is used over SERVER_TYPE.
67
94
  * If `global.serverType` is set, the value of `global.serverId` is used over `SERVER_ID`.
68
95
  */
69
- const {
70
- DEFAULT_LEVEL,
71
- DEFAULT_ENV,
72
- DEFAULT_FILTER_FILE,
73
- DEFAULT_S3_MAX_SIZE,
74
- DEFAULT_S3_MAX_EVENTS,
75
- DEFAULT_S3_TIMEOUT,
76
- DEFAULT_KINESIS_MAX_SIZE,
77
- DEFAULT_KINESIS_MAX_EVENTS,
78
- DEFAULT_KINESIS_TIMEOUT,
79
- DEFAULT_MODE,
80
- DEFAULT_EXIT_DELAY,
81
- DEFAULT_NO_STACK,
82
- SUMOLOGIC,
83
- AWS_S3,
84
- AWS_KINESIS,
85
- ALL,
86
- NONE,
87
- ALL_MODES,
88
- } = require('../lib/common');
89
96
 
90
97
  const checkConfig = (config) => {
91
98
  const errs = [];
92
99
 
93
- function traverseNodeSync(node, path) {
100
+ const traverseNodeSync = (node, path) => {
94
101
  Object.keys(node).forEach((prop) => {
95
102
  if (typeof node[prop] === 'object' && node[prop]) {
96
103
  traverseNodeSync(node[prop], `${path}.${prop}`);
97
104
  }
98
105
  else if (isUndefined(node[prop])) errs.push(`${path}.${prop}`);
99
106
  });
100
- }
107
+ };
101
108
 
102
109
  traverseNodeSync(config, 'configuration');
103
- if (errs.length > 0) {
110
+ if (errs.length > EXISTING_ERRORS) {
104
111
  throw new Error(`Missing values for ${errs}`);
105
112
  }
106
113
  };
@@ -109,11 +116,11 @@ const checkMode = (mode) => {
109
116
  let logMode = null;
110
117
 
111
118
  if (mode) {
112
- logMode = split(trim(mode), /\s*,\s*/);
113
- if (logMode.length === 0) throw new Error('Invalid LOG_MODE: cannot be an empty array');
114
- if (difference(logMode, ALL_MODES).length !== 0) throw new Error(`Invalid items in LOG_MODE: ${mode}`);
115
- if (logMode.includes(NONE) && logMode.length !== 1) throw new Error(`Cannot have multiple modes when ${NONE} is selected`);
116
- if (logMode.includes(ALL)) logMode = [SUMOLOGIC, AWS_S3]; // legacy support
119
+ logMode = split(trim(mode), /\s*,\s*/u);
120
+ if (logMode.length === NO_MODE) throw new Error('Invalid LOG_MODE: cannot be an empty array');
121
+ if (difference(logMode, ALL_MODES).length !== KNOWN_MODE) throw new Error(`Invalid items in LOG_MODE: ${mode}`);
122
+ if (logMode.includes(NONE_MODE) && logMode.length !== SINGLE_MODE) throw new Error(`Cannot have multiple modes when ${NONE_MODE} is selected`);
123
+ if (logMode.includes(ALL_MODE)) logMode = [SUMOLOGIC, AWS_S3]; // legacy support
117
124
  }
118
125
  return logMode;
119
126
  };
@@ -131,7 +138,7 @@ const configuration = {
131
138
  filter: {
132
139
  file: process.env.FILTER_FILE || DEFAULT_FILTER_FILE,
133
140
  },
134
- exitDelay: parseInt(process.env.EXIT_DELAY, 10) || DEFAULT_EXIT_DELAY, // in ms
141
+ exitDelay: parseInt(process.env.EXIT_DELAY, DECIMAL) || DEFAULT_EXIT_DELAY, // in ms
135
142
  noStack: process.env.NO_STACK || DEFAULT_NO_STACK,
136
143
  };
137
144
  configuration.mode = checkMode(process.env.LOG_MODE) || DEFAULT_MODE;
@@ -148,9 +155,9 @@ if (configuration.mode.includes(AWS_KINESIS)) {
148
155
  streamNameError: process.env.KINESIS_AWS_STREAM_NAME_ERROR,
149
156
  streamNameOther: process.env.KINESIS_AWS_STREAM_NAME_OTHER,
150
157
  region: process.env.KINESIS_AWS_REGION,
151
- timeout: parseInt(process.env.KINESIS_AWS_TIMEOUT, 10) || DEFAULT_KINESIS_TIMEOUT, // in ms
152
- maxSize: parseInt(process.env.KINESIS_AWS_MAX_SIZE, 10) || DEFAULT_KINESIS_MAX_SIZE, // in mB
153
- maxEvents: parseInt(process.env.KINESIS_AWS_MAX_EVENTS, 10) || DEFAULT_KINESIS_MAX_EVENTS,
158
+ timeout: parseInt(process.env.KINESIS_AWS_TIMEOUT, DECIMAL) || DEFAULT_KINESIS_TIMEOUT, // in ms
159
+ maxSize: parseInt(process.env.KINESIS_AWS_MAX_SIZE, DECIMAL) || DEFAULT_KINESIS_MAX_SIZE, // in mB
160
+ maxEvents: parseInt(process.env.KINESIS_AWS_MAX_EVENTS, DECIMAL) || DEFAULT_KINESIS_MAX_EVENTS,
154
161
  };
155
162
 
156
163
  if (!isNil(process.env.KINESIS_AWS_ACCESS_KEY_ID)) configuration[AWS_KINESIS].accessKeyId = process.env.KINESIS_AWS_ACCESS_KEY_ID;
@@ -160,9 +167,9 @@ if (configuration.mode.includes(AWS_S3)) {
160
167
  configuration[AWS_S3] = {
161
168
  bucketname: process.env.S3_AWS_BUCKET_NAME,
162
169
  region: process.env.S3_AWS_REGION,
163
- timeout: parseInt(process.env.S3_AWS_TIMEOUT, 10) || DEFAULT_S3_TIMEOUT, // in minutes
164
- maxSize: parseInt(process.env.S3_AWS_MAX_SIZE, 10) || DEFAULT_S3_MAX_SIZE, // in mB
165
- maxEvents: parseInt(process.env.S3_AWS_MAX_EVENTS, 10) || DEFAULT_S3_MAX_EVENTS,
170
+ timeout: parseInt(process.env.S3_AWS_TIMEOUT, DECIMAL) || DEFAULT_S3_TIMEOUT, // in minutes
171
+ maxSize: parseInt(process.env.S3_AWS_MAX_SIZE, DECIMAL) || DEFAULT_S3_MAX_SIZE, // in mB
172
+ maxEvents: parseInt(process.env.S3_AWS_MAX_EVENTS, DECIMAL) || DEFAULT_S3_MAX_EVENTS,
166
173
  };
167
174
 
168
175
  if (!isNil(process.env.S3_AWS_ACCESS_KEY_ID)) configuration[AWS_S3].accessKeyId = process.env.S3_AWS_ACCESS_KEY_ID;
@@ -172,7 +179,9 @@ const { filter } = configuration;
172
179
  let filterConfig = [];
173
180
 
174
181
  if (filter.file) {
175
- try { filterConfig = JSON.parse(fs.readFileSync(filter.file).toString()); }
182
+ try {
183
+ filterConfig = JSON.parse(fs.readFileSync(filter.file).toString());
184
+ }
176
185
  catch (err) {
177
186
  throw new Error(`Invalid file for logger config: ${filter.file}, error: ${err.message}`);
178
187
  }
@@ -180,4 +189,4 @@ if (filter.file) {
180
189
  filter.config = filterConfig;
181
190
  checkConfig(configuration);
182
191
 
183
- module.exports = configuration;
192
+ export default configuration;
@@ -0,0 +1,60 @@
1
+ import importPlugin from 'eslint-plugin-import';
2
+ import js from '@eslint/js';
3
+ import processDoc from '@mimik/eslint-plugin-document-env';
4
+ import stylistic from '@stylistic/eslint-plugin';
5
+
6
+ const MAX_LENGTH_LINE = 180;
7
+ const MAX_FUNCTION_PARAMETERS = 6;
8
+ const MAX_LINES_IN_FILES = 400;
9
+ const MAX_LINES_IN_FUNCTION = 100;
10
+ const MAX_STATEMENTS_IN_FUNCTION = 45;
11
+ const MIN_KEYS_IN_OBJECT = 10;
12
+
13
+ export default [
14
+ {
15
+ ignores: ['mochawesome-report/**', 'node_modules/**', 'dist/**'],
16
+ },
17
+ importPlugin.flatConfigs.recommended,
18
+ stylistic.configs['recommended-flat'],
19
+ js.configs.all,
20
+ {
21
+ plugins: {
22
+ processDoc,
23
+ },
24
+ languageOptions: {
25
+ ecmaVersion: 2022,
26
+ globals: {
27
+ console: 'readonly',
28
+ describe: 'readonly',
29
+ it: 'readonly',
30
+ require: 'readonly',
31
+ },
32
+ sourceType: 'module',
33
+ },
34
+ rules: {
35
+ '@stylistic/brace-style': ['warn', 'stroustrup', { allowSingleLine: true }],
36
+ '@stylistic/line-comment-position': ['off'],
37
+ '@stylistic/semi': ['error', 'always'],
38
+ 'capitalized-comments': ['off'],
39
+ 'curly': ['off'],
40
+ 'id-length': ['error', { exceptions: ['x', 'y', 'z', 'i', 'j', 'k'] }],
41
+ 'import/no-extraneous-dependencies': ['error', { devDependencies: true }],
42
+ 'import/no-unresolved': ['error', { amd: true, caseSensitiveStrict: true, commonjs: true }],
43
+ 'init-declarations': ['off'],
44
+ 'linebreak-style': ['off'],
45
+ 'max-len': ['warn', MAX_LENGTH_LINE, { ignoreComments: true }],
46
+ 'max-lines': ['warn', MAX_LINES_IN_FILES],
47
+ 'max-lines-per-function': ['warn', MAX_LINES_IN_FUNCTION],
48
+ 'max-params': ['error', MAX_FUNCTION_PARAMETERS],
49
+ 'max-statements': ['warn', MAX_STATEMENTS_IN_FUNCTION],
50
+ 'no-confusing-arrow': ['off'], // arrow isnt confusing
51
+ 'no-inline-comments': ['off'],
52
+ 'no-process-env': ['error'],
53
+ 'no-undefined': ['off'],
54
+ 'one-var': ['error', 'never'],
55
+ 'processDoc/validate-document-env': ['error'],
56
+ 'quotes': ['warn', 'single'],
57
+ 'sort-keys': ['error', 'asc', { caseSensitive: true, minKeys: MIN_KEYS_IN_OBJECT, natural: false }],
58
+ },
59
+ },
60
+ ];
package/index.js CHANGED
@@ -1,24 +1,30 @@
1
- const { createLogger, format, transports } = require('winston');
2
-
3
- const Sumologic = require('./lib/sumologicTransport');
4
- const AwsS3 = require('./lib/awsS3Transport');
5
- const AwsKinesis = require('./lib/awsKinesisTransport');
6
- const {
7
- stackInfo,
8
- correlationId,
9
- filterMeta,
10
- } = require('./lib/formatLib');
11
- const config = require('./configuration/config');
12
- const {
13
- SUMOLOGIC,
14
- AWS_S3,
1
+ import {
15
2
  AWS_KINESIS,
16
- NONE,
17
- WARN,
18
- // LOG,
3
+ AWS_S3,
19
4
  FLUSH,
20
5
  FLUSH_EXIT,
21
- } = require('./lib/common');
6
+ // LOG,
7
+ NONE_MODE,
8
+ SUMOLOGIC,
9
+ WARN,
10
+ } from './lib/common.js';
11
+ import {
12
+ correlationId,
13
+ filterMeta,
14
+ stackInfo,
15
+ } from './lib/formatLib.js';
16
+ import {
17
+ createLogger,
18
+ format,
19
+ transports,
20
+ } from 'winston';
21
+
22
+ import AwsKinesis from './lib/awsKinesisTransport.js';
23
+ import AwsS3 from './lib/awsS3Transport.js';
24
+ import Sumologic from './lib/sumologicTransport.js';
25
+ import config from './configuration/config.js';
26
+ import process from 'process';
27
+ import { setTimeout } from 'timers';
22
28
 
23
29
  let sumo;
24
30
  let awsS3;
@@ -56,45 +62,45 @@ if (config.mode.includes(SUMOLOGIC)) {
56
62
  serverType: config.server.type,
57
63
  serverId: config.server.id,
58
64
  });
59
- sumo.on(WARN, (data) => miniLog.warn('Could not send logs: ', data, data.correlationId));
65
+ sumo.on(WARN, data => miniLog.warn('Could not send logs: ', data, data.correlationId));
60
66
  // sumo.on(LOG, (data) => console.log('sumo done')); // miniLog.info('Information sent: ', data));
61
67
  logger.add(sumo);
62
68
  }
63
69
  if (config.mode.includes(AWS_S3)) {
64
70
  awsS3 = new AwsS3({
65
- level: config.level.log,
66
- bucketname: config[AWS_S3].bucketname,
67
- region: config[AWS_S3].region,
68
71
  accessKeyId: config[AWS_S3].accessKeyId,
69
- secretAccessKey: config[AWS_S3].secretAccessKey,
70
- timeout: config[AWS_S3].timeout,
72
+ bucketname: config[AWS_S3].bucketname,
73
+ format: format.combine(format.timestamp(), format.json()),
74
+ level: config.level.log,
71
75
  maxEvents: config[AWS_S3].maxEvents,
72
76
  maxSize: config[AWS_S3].maxSize,
73
- serverType: config.server.type,
77
+ region: config[AWS_S3].region,
78
+ secretAccessKey: config[AWS_S3].secretAccessKey,
74
79
  serverId: config.server.id,
75
- format: format.combine(format.timestamp(), format.json()),
80
+ serverType: config.server.type,
81
+ timeout: config[AWS_S3].timeout,
76
82
  });
77
- awsS3.on(WARN, (data) => miniLog.warn('Could not send logs: ', data));
83
+ awsS3.on(WARN, data => miniLog.warn('Could not send logs: ', data));
78
84
  // awsS3.on(LOG, (data) => console.log('awsS3 done')); // miniLog.info('Information sent : ', data));
79
85
  logger.add(awsS3);
80
86
  }
81
87
  if (config.mode.includes(AWS_KINESIS)) {
82
88
  awsKinesis = new AwsKinesis({
83
- level: config.level.log,
84
- streamNameInfo: config[AWS_KINESIS].streamNameInfo,
85
- streamNameError: config[AWS_KINESIS].streamNameError,
86
- streamNameOther: config[AWS_KINESIS].streamNameOther,
87
- region: config[AWS_KINESIS].region,
88
89
  accessKeyId: config[AWS_KINESIS].accessKeyId,
89
- secretAccessKey: config[AWS_KINESIS].secretAccessKey,
90
- timeout: config[AWS_KINESIS].timeout,
90
+ format: format.combine(format.timestamp(), format.json()),
91
+ level: config.level.log,
91
92
  maxEvents: config[AWS_KINESIS].maxEvents,
92
93
  maxSize: config[AWS_KINESIS].maxSize,
93
- serverType: config.server.type,
94
+ region: config[AWS_KINESIS].region,
95
+ secretAccessKey: config[AWS_KINESIS].secretAccessKey,
94
96
  serverId: config.server.id,
95
- format: format.combine(format.timestamp(), format.json()),
97
+ serverType: config.server.type,
98
+ streamNameError: config[AWS_KINESIS].streamNameError,
99
+ streamNameInfo: config[AWS_KINESIS].streamNameInfo,
100
+ streamNameOther: config[AWS_KINESIS].streamNameOther,
101
+ timeout: config[AWS_KINESIS].timeout,
96
102
  });
97
- awsKinesis.on(WARN, (data) => miniLog.warn('Could not send logs: ', data));
103
+ awsKinesis.on(WARN, data => miniLog.warn('Could not send logs: ', data));
98
104
  // awsKinesis.on(LOG, (data) => console.log('awsKinesis done')); // miniLog.info('Information sent: ', data));
99
105
  logger.add(awsKinesis);
100
106
  }
@@ -105,7 +111,7 @@ logger.flushAndExit = (code) => {
105
111
  let awsS3Done = false;
106
112
  let awsKinesisDone = false;
107
113
 
108
- if (config.mode.includes(NONE)) return process.exit(code);
114
+ if (config.mode.includes(NONE_MODE)) return process.exit(code);
109
115
  if (awsS3) {
110
116
  awsS3.flush(FLUSH_EXIT);
111
117
  awsS3.on(FLUSH_EXIT, () => {
@@ -142,7 +148,7 @@ logger.flush = () => {
142
148
  let awsS3Done = false;
143
149
  let awsKinesisDone = false;
144
150
 
145
- if (config.mode.includes(NONE)) return null;
151
+ if (config.mode.includes(NONE_MODE)) return null;
146
152
  if (awsS3) {
147
153
  awsS3.flush(FLUSH);
148
154
  awsS3.on(FLUSH, () => {
@@ -175,4 +181,4 @@ logger.flush = () => {
175
181
 
176
182
  logger.LEVELS = ['error', 'warn', 'info', 'verbose', 'debug', 'silly'];
177
183
 
178
- module.exports = logger;
184
+ export default logger;
@@ -1,36 +1,44 @@
1
- const Transport = require('winston-transport');
2
- const Promise = require('bluebird');
3
- const { KinesisClient, PutRecordsCommand } = require('@aws-sdk/client-kinesis');
4
-
5
- const {
1
+ import {
6
2
  AWS_KINESIS,
7
- LOG,
8
- WARN,
9
- MESSAGE,
10
- UNKNOWN_TYPE,
11
- UNKNOWN_ID,
12
3
  CLIENTS,
13
- PARTITION_KEY,
14
- INFO,
15
4
  ERROR,
5
+ INFO,
6
+ LOG,
7
+ MESSAGE,
16
8
  OTHER,
17
- } = require('./common');
9
+ PARTITION_KEY,
10
+ UNKNOWN_ID,
11
+ UNKNOWN_TYPE,
12
+ WARN,
13
+ } from './common.js';
14
+ import {
15
+ KinesisClient,
16
+ PutRecordsCommand,
17
+ } from '@aws-sdk/client-kinesis';
18
+ import {
19
+ setImmediate,
20
+ setInterval,
21
+ } from 'timers';
22
+ import Buffer from 'buffer';
23
+ import Promise from 'bluebird';
24
+ import Transport from 'winston-transport';
18
25
 
26
+ const RANDOM_MIN = 0;
19
27
  const RANDOM_LIMIT = 100;
28
+ const MEGA = 1048576; // 2^20 conversion to mega
29
+ const SYSTEM_ERROR = 500;
20
30
 
21
31
  const events = {};
22
32
 
23
- function randomInt(low, high) {
24
- return Math.floor(Math.random() * (high - low) + low);
25
- }
33
+ const randomInt = (low, high) => Math.floor(Math.random() * (high - low) + low);
26
34
 
27
- module.exports = class AwsKinesis extends Transport {
35
+ export default class AwsKinesis extends Transport {
28
36
  constructor(options = {}) {
29
37
  super(options);
30
38
  this.serverType = options.serverType || UNKNOWN_TYPE;
31
39
  this.serverId = `${options.serverId || UNKNOWN_ID}${CLIENTS}`; // adding CLIENTS to conform to oauth tokens
32
40
  this.name = AWS_KINESIS;
33
- this.maxSize = 1024 * 1024 * options.maxSize; // in mB
41
+ this.maxSize = MEGA * options.maxSize; // in mB
34
42
  this.maxEvents = options.maxEvents;
35
43
  this.timeInterval = options.timeout; // in ms
36
44
  this.streamNameInfo = options.streamNameInfo;
@@ -75,10 +83,10 @@ module.exports = class AwsKinesis extends Transport {
75
83
  send(Records, lvl) {
76
84
  return this.put(Records, lvl)
77
85
  .then(() => this.emit(LOG, { message: `log sent to ${AWS_KINESIS}` }))
78
- .catch((err) => this.emit(WARN, {
86
+ .catch(err => this.emit(WARN, {
79
87
  data: Records,
80
88
  message: `could not log to ${AWS_KINESIS}`,
81
- error: { message: err.message, statusCode: err.statusCode || 500, details: err },
89
+ error: { message: err.message, statusCode: err.statusCode || SYSTEM_ERROR, details: err },
82
90
  }));
83
91
  }
84
92
 
@@ -90,10 +98,10 @@ module.exports = class AwsKinesis extends Transport {
90
98
  events[level].data = []; // we may lose some logs due to concurrency issues
91
99
  events[level].size = 0;
92
100
  })
93
- .catch((err) => this.emit(WARN, {
101
+ .catch(err => this.emit(WARN, {
94
102
  data: events[level].Records,
95
103
  message: `could not log to ${AWS_KINESIS}`,
96
- error: { message: err.message, statusCode: err.statusCode || 500, details: err },
104
+ error: { message: err.message, statusCode: err.statusCode || SYSTEM_ERROR, details: err },
97
105
  }));
98
106
  }
99
107
  return Promise.resolve();
@@ -105,8 +113,8 @@ module.exports = class AwsKinesis extends Transport {
105
113
  const messageInfo = info[MESSAGE];
106
114
  const infoSize = messageInfo.length;
107
115
  let { level } = info;
108
- const { serverType } = global;
109
- let { serverId } = global;
116
+ const { serverType } = globalThis;
117
+ let { serverId } = globalThis;
110
118
  const data = JSON.parse(messageInfo);
111
119
 
112
120
  if (serverType) {
@@ -126,7 +134,7 @@ module.exports = class AwsKinesis extends Transport {
126
134
  levelData = events[level];
127
135
  }
128
136
  levelData.size += infoSize;
129
- levelData.Records.push({ Data: Buffer.from(JSON.stringify(data)), PartitionKey: `${PARTITION_KEY}-${randomInt(0, RANDOM_LIMIT)}` });
137
+ levelData.Records.push({ Data: Buffer.from(JSON.stringify(data)), PartitionKey: `${PARTITION_KEY}-${randomInt(RANDOM_MIN, RANDOM_LIMIT)}` });
130
138
  if (levelData.Records.length >= this.maxEvents || levelData.size >= this.maxSize) {
131
139
  this.send(levelData.Records, level);
132
140
  levelData.Records = [];
@@ -1,30 +1,42 @@
1
- const Transport = require('winston-transport');
2
- const Promise = require('bluebird');
3
- const { S3Client, PutObjectCommand } = require('@aws-sdk/client-s3');
4
-
5
- const {
1
+ import {
6
2
  AWS_S3,
3
+ CLIENTS,
7
4
  LOG,
8
- WARN,
9
5
  MESSAGE,
10
- UNKNOWN_TYPE,
11
6
  UNKNOWN_ID,
12
- CLIENTS,
13
- } = require('./common');
7
+ UNKNOWN_TYPE,
8
+ WARN,
9
+ } from './common.js';
10
+ import {
11
+ PutObjectCommand,
12
+ S3Client,
13
+ } from '@aws-sdk/client-s3';
14
+ import {
15
+ setImmediate,
16
+ setInterval,
17
+ } from 'timers';
18
+ import Promise from 'bluebird';
19
+ import Transport from 'winston-transport';
14
20
 
15
21
  const events = {};
16
22
  const typeEvents = {};
17
23
 
18
- module.exports = class AwsS3 extends Transport {
24
+ const INCR = 1;
25
+ const NONE = 0;
26
+ const MEGA = 1048576; // 2^20 conversion to mega
27
+ const MILLI_MIN = 60000; // 1000*60 conversion to minute
28
+ const SYSTEM_ERROR = 500;
29
+
30
+ export default class AwsS3 extends Transport {
19
31
  constructor(options = {}) {
20
32
  super(options);
21
33
  this.serverType = options.serverType || UNKNOWN_TYPE;
22
34
  this.serverId = `${options.serverId || UNKNOWN_ID}${CLIENTS}`; // adding CLIENTS to conform to oauth tokens
23
35
  this.server = `${this.serverType}/${this.serverId}`;
24
36
  this.name = AWS_S3;
25
- this.maxSize = 1024 * 1024 * options.maxSize; // in mB
37
+ this.maxSize = MEGA * options.maxSize; // in mB
26
38
  this.maxEvents = options.maxEvents;
27
- this.timeInterval = 1000 * 60 * options.timeout; // in minutes
39
+ this.timeInterval = MILLI_MIN * options.timeout; // in minutes
28
40
  this.bucketname = options.bucketname;
29
41
  this.s3ClientConfig = {
30
42
  region: options.region,
@@ -59,7 +71,7 @@ module.exports = class AwsS3 extends Transport {
59
71
  put(data, lvl, date) {
60
72
  const command = new PutObjectCommand({
61
73
  Bucket: this.bucketname,
62
- Key: `${lvl}/${this.server}/${date.getFullYear()}/${date.getMonth() + 1}/${date.getDate()}/${date.toISOString()}.json`,
74
+ Key: `${lvl}/${this.server}/${date.getFullYear()}/${date.getMonth() + INCR}/${date.getDate()}/${date.toISOString()}.json`,
63
75
  Body: JSON.stringify(data),
64
76
  });
65
77
 
@@ -69,10 +81,10 @@ module.exports = class AwsS3 extends Transport {
69
81
  send(data, lvl, date) {
70
82
  return this.put(data, lvl, date)
71
83
  .then(() => this.emit(LOG, { message: `log sent to ${AWS_S3}` }))
72
- .catch((err) => this.emit(WARN, {
84
+ .catch(err => this.emit(WARN, {
73
85
  data,
74
86
  message: `could not log to ${AWS_S3}`,
75
- error: { message: err.message, statusCode: err.statusCode || 500, details: err },
87
+ error: { message: err.message, statusCode: err.statusCode || SYSTEM_ERROR, details: err },
76
88
  }));
77
89
  }
78
90
 
@@ -80,20 +92,20 @@ module.exports = class AwsS3 extends Transport {
80
92
  const errors = [];
81
93
  let count = 0;
82
94
 
83
- return Promise.map(Object.keys(data), (sType) => Promise.each(Object.keys(data[sType]), (sId) => {
95
+ return Promise.map(Object.keys(data), sType => Promise.each(Object.keys(data[sType]), (sId) => {
84
96
  const command = new PutObjectCommand({
85
97
  Bucket: this.bucketname,
86
- Key: `${lvl}/${sType}/${sId}/${date.getFullYear()}/${date.getMonth() + 1}/${date.getDate()}/${date.toISOString()}.json`,
98
+ Key: `${lvl}/${sType}/${sId}/${date.getFullYear()}/${date.getMonth() + INCR}/${date.getDate()}/${date.toISOString()}.json`,
87
99
  Body: JSON.stringify(data[sType][sId]),
88
100
  });
89
101
 
90
102
  return this.s3.send(command)
91
103
  .then(() => {
92
- count += 1;
104
+ count += INCR;
93
105
  })
94
- .catch((err) => errors.push({
106
+ .catch(err => errors.push({
95
107
  data: data[sType][sId],
96
- error: { message: err.message, statusCode: err.statusCode || 500, details: err },
108
+ error: { message: err.message, statusCode: err.statusCode || SYSTEM_ERROR, details: err },
97
109
  }));
98
110
  }))
99
111
  .then(() => ({ count, errors }));
@@ -105,8 +117,8 @@ module.exports = class AwsS3 extends Transport {
105
117
  const { count, errors } = result;
106
118
  const errorCount = errors.length;
107
119
 
108
- if (errorCount === 0) return this.emit(LOG, { message: `${count} logs sent to ${AWS_S3}` });
109
- if (count === 0) {
120
+ if (errorCount === NONE) return this.emit(LOG, { message: `${count} logs sent to ${AWS_S3}` });
121
+ if (count === NONE) {
110
122
  return this.emit(WARN, {
111
123
  errors,
112
124
  nblogs: errorCount,
@@ -131,7 +143,7 @@ module.exports = class AwsS3 extends Transport {
131
143
  events[level].data = []; // we may lose some logs due to concurrency issues
132
144
  events[level].size = 0;
133
145
  })
134
- .catch((err) => this.emit(WARN, { error: err.message, message: `could not log to ${AWS_S3}` }));
146
+ .catch(err => this.emit(WARN, { error: err.message, message: `could not log to ${AWS_S3}` }));
135
147
  }
136
148
  return Promise.resolve();
137
149
  })
@@ -149,8 +161,8 @@ module.exports = class AwsS3 extends Transport {
149
161
  const { count, errors } = result;
150
162
  const errorCount = errors.length;
151
163
 
152
- if (errorCount === 0) return this.emit(LOG, { message: `${count} logs sent to ${AWS_S3}` });
153
- if (count === 0) {
164
+ if (errorCount === NONE) return this.emit(LOG, { message: `${count} logs sent to ${AWS_S3}` });
165
+ if (count === NONE) {
154
166
  return this.emit(WARN, {
155
167
  errors,
156
168
  nblogs: errorCount,
@@ -174,8 +186,8 @@ module.exports = class AwsS3 extends Transport {
174
186
  const messageInfo = info[MESSAGE];
175
187
  const infoSize = messageInfo.length;
176
188
  const { level } = info;
177
- const { serverType } = global;
178
- let { serverId } = global;
189
+ const { serverType } = globalThis;
190
+ let { serverId } = globalThis;
179
191
  const data = JSON.parse(messageInfo);
180
192
 
181
193
  if (serverType) {