@mhmdhammoud/meritt-utils 1.5.1 → 1.5.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,41 +3,82 @@ name: NPM Publish on Release
3
3
  on:
4
4
  push:
5
5
  branches: [master]
6
+
6
7
  jobs:
8
+ test:
9
+ runs-on: ubuntu-latest
10
+ steps:
11
+ - name: Checkout code
12
+ uses: actions/checkout@v4
13
+
14
+ - name: Setup Node.js
15
+ uses: actions/setup-node@v4
16
+ with:
17
+ node-version: '20'
18
+ cache: 'npm'
19
+
20
+ - name: Install dependencies
21
+ run: npm ci
22
+
23
+ - name: Run type check
24
+ run: npm run test:types
25
+
26
+ - name: Run linting
27
+ run: npm run lint
28
+
29
+ - name: Run tests
30
+ run: npm test
31
+
7
32
  build:
33
+ needs: test
8
34
  runs-on: ubuntu-latest
9
35
  steps:
10
- - uses: actions/checkout@v3
11
- - uses: actions/setup-node@v3
36
+ - name: Checkout code
37
+ uses: actions/checkout@v4
38
+
39
+ - name: Setup Node.js
40
+ uses: actions/setup-node@v4
12
41
  with:
13
- node-version: 16
14
- - run: npm ci
42
+ node-version: '20'
43
+ cache: 'npm'
44
+
45
+ - name: Install dependencies
46
+ run: npm ci
47
+
48
+ - name: Build project
49
+ run: npm run build
50
+
51
+ - name: Upload build artifacts
52
+ uses: actions/upload-artifact@v4
53
+ with:
54
+ name: dist
55
+ path: dist/
15
56
 
16
57
  publish-npm:
17
- needs: build
58
+ needs: [test, build]
18
59
  runs-on: ubuntu-latest
60
+ if: success()
19
61
  steps:
20
- - uses: actions/checkout@v3
21
- - uses: actions/setup-node@v3
62
+ - name: Checkout code
63
+ uses: actions/checkout@v4
64
+
65
+ - name: Setup Node.js
66
+ uses: actions/setup-node@v4
22
67
  with:
23
- node-version: 16
68
+ node-version: '20'
24
69
  registry-url: https://registry.npmjs.org/
25
- - run: npm ci
26
- - run: npm publish
27
- env:
28
- NODE_AUTH_TOKEN: ${{secrets.npm_token}}
29
- notify:
30
- needs: [publish-npm]
31
- runs-on: ubuntu-latest
32
- steps:
33
- - name: Notify by Email
34
- uses: dawidd6/action-send-mail@v2
70
+ cache: 'npm'
71
+
72
+ - name: Install dependencies
73
+ run: npm ci
74
+
75
+ - name: Download build artifacts
76
+ uses: actions/download-artifact@v4
35
77
  with:
36
- server_address: ${{ secrets.EMAIL_HOST }}
37
- server_port: 465
38
- username: ${{ secrets.EMAIL_USERNAME }}
39
- password: ${{ secrets.EMAIL_PASSWORD }}
40
- subject: ${{ github.event.head_commit.message }} ${{ github.job }} job of ${{ github.repository }} has ${{ job.status }}
41
- body: ${{ github.job }} job in worflow ${{ github.workflow }} of ${{ github.repository }} has ${{ job.status }}
42
- to: mohammad.hammoud.lb@hotmail.com,steef12009@gmail.com
43
- from: Github Action
78
+ name: dist
79
+ path: dist/
80
+
81
+ - name: Publish to NPM
82
+ run: npm publish
83
+ env:
84
+ NODE_AUTH_TOKEN: ${{ secrets.npm_token }}
@@ -1,29 +1,83 @@
1
- name: Changes
1
+ name: CI Pipeline
2
+
2
3
  on:
3
4
  push:
4
5
  branches: [dev]
6
+ pull_request:
7
+ branches: [dev, master]
8
+
5
9
  jobs:
6
- type-check:
10
+ lint-and-test:
7
11
  runs-on: ubuntu-latest
8
12
  steps:
9
- - uses: actions/checkout@v3
10
- - uses: actions/setup-node@v3
13
+ - name: Checkout code
14
+ uses: actions/checkout@v4
15
+
16
+ - name: Setup Node.js
17
+ uses: actions/setup-node@v4
11
18
  with:
12
- node-version: 16
13
- - run: npm ci
14
- - run: npm run test:types
15
- notify:
16
- needs: [type-check]
19
+ node-version: '20'
20
+ cache: 'npm'
21
+
22
+ - name: Install dependencies
23
+ run: npm ci
24
+
25
+ - name: Run type check
26
+ run: npm run test:types
27
+
28
+ - name: Run linting
29
+ run: npm run lint
30
+
31
+ - name: Run tests with coverage
32
+ run: npm run test:coverage
33
+
34
+ - name: Upload coverage reports
35
+ uses: actions/upload-artifact@v4
36
+ if: always()
37
+ with:
38
+ name: coverage-report
39
+ path: coverage/
40
+
41
+ build:
42
+ needs: lint-and-test
43
+ runs-on: ubuntu-latest
44
+ steps:
45
+ - name: Checkout code
46
+ uses: actions/checkout@v4
47
+
48
+ - name: Setup Node.js
49
+ uses: actions/setup-node@v4
50
+ with:
51
+ node-version: '20'
52
+ cache: 'npm'
53
+
54
+ - name: Install dependencies
55
+ run: npm ci
56
+
57
+ - name: Build project
58
+ run: npm run build
59
+
60
+ - name: Upload build artifacts
61
+ uses: actions/upload-artifact@v4
62
+ with:
63
+ name: dist
64
+ path: dist/
65
+
66
+ security-audit:
17
67
  runs-on: ubuntu-latest
18
68
  steps:
19
- - name: Notify by Email
20
- uses: dawidd6/action-send-mail@v2
69
+ - name: Checkout code
70
+ uses: actions/checkout@v4
71
+
72
+ - name: Setup Node.js
73
+ uses: actions/setup-node@v4
21
74
  with:
22
- server_address: ${{ secrets.EMAIL_HOST }}
23
- server_port: 465
24
- username: ${{ secrets.EMAIL_USERNAME }}
25
- password: ${{ secrets.EMAIL_PASSWORD }}
26
- subject: ${{ github.event.head_commit.message }} ${{ github.job }} job of ${{ github.repository }} has ${{ job.status }}
27
- body: ${{ github.job }} job in worflow ${{ github.workflow }} of ${{ github.repository }} has ${{ job.status }}
28
- to: mohammad.hammoud.lb@hotmail.com,steef12009@gmail.com
29
- from: Github Action
75
+ node-version: '20'
76
+ cache: 'npm'
77
+
78
+ - name: Install dependencies
79
+ run: npm ci
80
+
81
+ - name: Run security audit
82
+ run: npm audit --audit-level=moderate
83
+
package/.prettierrc ADDED
@@ -0,0 +1,8 @@
1
+ {
2
+ "semi": false,
3
+ "singleQuote": true,
4
+ "trailingComma": "es5",
5
+ "tabWidth": 2,
6
+ "useTabs": true,
7
+ "printWidth": 80
8
+ }
package/ReleaseNotes.md CHANGED
@@ -1,5 +1,60 @@
1
1
  # Changes
2
2
 
3
+ ## Version 1.5.3
4
+
5
+ ### Logger Improvements - Critical Bug Fixes & Production Reliability
6
+
7
+ #### Critical Fixes
8
+
9
+ - **Fixed broken JSON.stringify**: Corrected production log serialization from `JSON.stringify(...args)` to `JSON.stringify(args)` - previously only stringified first argument
10
+ - **Fixed silent failures**: Logger now throws clear errors when `LOG_LEVEL` is invalid instead of silently failing with undefined logger
11
+ - **Fixed return type mismatch**: `getLogger()` now properly returns `PinoLogger` type with explicit error handling
12
+ - **Fixed buffer configuration**: Corrected inconsistent buffer settings to match tests (`minLength: 1024, sync: true`)
13
+ - **Fixed race condition**: Event listener now registered before calling `.end()` to prevent missed flush events on shutdown
14
+
15
+ #### Environment Variable Validation
16
+
17
+ - **Added Elasticsearch validation**: Validates required env vars (`ELASTICSEARCH_NODE`, `ELASTICSEARCH_USERNAME`, `ELASTICSEARCH_PASSWORD`, `SERVER_NICKNAME`) with clear error messages
18
+ - **Added integer parsing validation**: All numeric env vars now validated to prevent `NaN` values from breaking configuration
19
+ - **Validates positive numbers**: Ensures flush intervals, buffer sizes, retries, and timeouts are positive integers
20
+
21
+ #### Configurable Settings (New Environment Variables)
22
+
23
+ - `ES_FLUSH_INTERVAL_MS` - How often to send logs (default: 2000ms = 2 seconds)
24
+ - `ES_FLUSH_BYTES` - Buffer size before forcing flush (default: 102400 = 100KB)
25
+ - `ES_MAX_RETRIES` - Number of retry attempts on failure (default: 3)
26
+ - `ES_REQUEST_TIMEOUT_MS` - Request timeout before retry (default: 30000ms = 30 seconds)
27
+
28
+ #### Reliability Improvements
29
+
30
+ - **Error monitoring**: Elasticsearch transport errors now logged to console instead of silent failure
31
+ - `error` event: Connection errors with clear messaging
32
+ - `insertError` event: Document indexing failures
33
+ - **Graceful shutdown**: Properly flushes buffered logs before process exit
34
+ - Handles `SIGTERM` and `SIGINT` signals
35
+ - 5-second timeout prevents hanging forever
36
+ - Proper async/await handling to ensure flush completes
37
+ - **Auto-reconnection**: `sniffOnConnectionFault: true` enables automatic reconnection when Elasticsearch nodes fail
38
+ - **Retry logic**: Configurable retry attempts with timeout for failed requests
39
+
40
+ #### Type Safety
41
+
42
+ - **Removed `any` types**: All configurations now use proper TypeScript interfaces
43
+ - **Extended ElasticConfig interface**: Added proper types for `maxRetries`, `requestTimeout`, `sniffOnConnectionFault`, and buffer settings
44
+ - **Full compile-time validation**: Type system now validates all configuration options
45
+
46
+ #### Performance
47
+
48
+ - **Optimized flush settings**: Default 2-second interval with 100KB buffer balances real-time logs with reliability
49
+ - **Reduced network overhead**: Larger buffer prevents excessive network calls during high-traffic periods
50
+
51
+ #### Migration Notes
52
+
53
+ - All changes are backward compatible
54
+ - Default values ensure existing deployments work without changes
55
+ - Optional environment variables allow fine-tuning per environment
56
+ - No breaking changes to the Logger API
57
+
3
58
  ## Version 1.4.1
4
59
 
5
60
  - Json stringify the log message if its being pushed to Kibana
@@ -15,13 +15,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
15
15
  }) : function(o, v) {
16
16
  o["default"] = v;
17
17
  });
18
- var __importStar = (this && this.__importStar) || function (mod) {
19
- if (mod && mod.__esModule) return mod;
20
- var result = {};
21
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
22
- __setModuleDefault(result, mod);
23
- return result;
24
- };
18
+ var __importStar = (this && this.__importStar) || (function () {
19
+ var ownKeys = function(o) {
20
+ ownKeys = Object.getOwnPropertyNames || function (o) {
21
+ var ar = [];
22
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
23
+ return ar;
24
+ };
25
+ return ownKeys(o);
26
+ };
27
+ return function (mod) {
28
+ if (mod && mod.__esModule) return mod;
29
+ var result = {};
30
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
31
+ __setModuleDefault(result, mod);
32
+ return result;
33
+ };
34
+ })();
25
35
  Object.defineProperty(exports, "__esModule", { value: true });
26
36
  const logger_1 = __importStar(require("../lib/logger"));
27
37
  const pino_1 = require("pino");
@@ -63,7 +63,7 @@ declare class Crypto {
63
63
  * crypto.generateKeys()
64
64
  * ```
65
65
  * */
66
- generateKeys: () => Record<'publicKey' | 'privateKey', number>;
66
+ generateKeys: () => Record<"publicKey" | "privateKey", number>;
67
67
  /**
68
68
  *
69
69
  * @param publicKey - The public key number
@@ -0,0 +1,27 @@
1
+ /**
2
+ * Elasticsearch transport for Pino with connection lifecycle resilience.
3
+ *
4
+ * Based on pino-elasticsearch with a fix for GitHub issue #140:
5
+ * When maxRetries are exceeded and Elasticsearch nodes are DEAD, the bulk helper
6
+ * destroys the splitter stream, causing logs to stop permanently until restart.
7
+ *
8
+ * This implementation overrides splitter.destroy to BOTH resurrect the connection
9
+ * pool AND reinitialize the bulk handler, so logging continues after ES recovers.
10
+ *
11
+ * @see https://github.com/pinojs/pino-elasticsearch/issues/140
12
+ * @see https://github.com/pinojs/pino-elasticsearch/issues/72
13
+ */
14
+ import type { ClientOptions } from '@elastic/elasticsearch';
15
+ export interface ElasticTransportOptions extends Pick<ClientOptions, 'node' | 'auth' | 'cloud' | 'caFingerprint' | 'Connection' | 'ConnectionPool' | 'maxRetries' | 'requestTimeout'> {
16
+ sniffOnConnectionFault?: boolean;
17
+ index?: string | ((logTime: string) => string);
18
+ flushBytes?: number;
19
+ 'flush-bytes'?: number;
20
+ flushInterval?: number;
21
+ 'flush-interval'?: number;
22
+ esVersion?: number;
23
+ 'es-version'?: number;
24
+ rejectUnauthorized?: boolean;
25
+ tls?: ClientOptions['tls'];
26
+ }
27
+ export declare const createElasticTransport: (opts?: ElasticTransportOptions) => NodeJS.ReadWriteStream;
@@ -0,0 +1,137 @@
1
+ "use strict";
2
+ /**
3
+ * Elasticsearch transport for Pino with connection lifecycle resilience.
4
+ *
5
+ * Based on pino-elasticsearch with a fix for GitHub issue #140:
6
+ * When maxRetries are exceeded and Elasticsearch nodes are DEAD, the bulk helper
7
+ * destroys the splitter stream, causing logs to stop permanently until restart.
8
+ *
9
+ * This implementation overrides splitter.destroy to BOTH resurrect the connection
10
+ * pool AND reinitialize the bulk handler, so logging continues after ES recovers.
11
+ *
12
+ * @see https://github.com/pinojs/pino-elasticsearch/issues/140
13
+ * @see https://github.com/pinojs/pino-elasticsearch/issues/72
14
+ */
15
+ Object.defineProperty(exports, "__esModule", { value: true });
16
+ exports.createElasticTransport = void 0;
17
+ // eslint-disable-next-line @typescript-eslint/no-require-imports
18
+ const split = require('split2');
19
+ const elasticsearch_1 = require("@elastic/elasticsearch");
20
+ function setDateTimeString(value) {
21
+ if (value !== null && typeof value === 'object' && 'time' in value) {
22
+ const t = value.time;
23
+ if ((typeof t === 'string' && t.length > 0) ||
24
+ (typeof t === 'number' && t >= 0)) {
25
+ return new Date(t).toISOString();
26
+ }
27
+ }
28
+ return new Date().toISOString();
29
+ }
30
+ function getIndexName(index, time) {
31
+ if (typeof index === 'function') {
32
+ return index(time);
33
+ }
34
+ return index.replace('%{DATE}', time.substring(0, 10));
35
+ }
36
+ function initializeBulkHandler(opts, client, splitter) {
37
+ var _a, _b, _c, _d, _e, _f, _g;
38
+ const esVersion = Number((_b = (_a = opts.esVersion) !== null && _a !== void 0 ? _a : opts['es-version']) !== null && _b !== void 0 ? _b : 7);
39
+ const index = (_c = opts.index) !== null && _c !== void 0 ? _c : 'pino';
40
+ const buildIndexName = typeof index === 'function' ? index : null;
41
+ const opType = esVersion >= 7 ? undefined : undefined;
42
+ // CRITICAL FIX (issue #140): When bulk helper destroys stream after retries exhausted,
43
+ // we must BOTH resurrect the pool AND reinitialize the bulk handler so logging continues.
44
+ // connectionPool.resurrect exists at runtime (elastic-transport) but may not be in types
45
+ const pool = client.connectionPool;
46
+ const splitterWithDestroy = splitter;
47
+ splitterWithDestroy.destroy = function () {
48
+ if (typeof pool.resurrect === 'function') {
49
+ pool.resurrect({ name: 'elasticsearch-js' });
50
+ }
51
+ // Reinitialize bulk handler - without this, logging stops permanently until restart
52
+ initializeBulkHandler(opts, client, splitter);
53
+ };
54
+ const indexName = (time = new Date().toISOString()) => buildIndexName ? buildIndexName(time) : getIndexName(index, time);
55
+ const bulkInsert = client.helpers.bulk({
56
+ datasource: splitter,
57
+ flushBytes: (_e = (_d = opts.flushBytes) !== null && _d !== void 0 ? _d : opts['flush-bytes']) !== null && _e !== void 0 ? _e : 1000,
58
+ flushInterval: (_g = (_f = opts.flushInterval) !== null && _f !== void 0 ? _f : opts['flush-interval']) !== null && _g !== void 0 ? _g : 3000,
59
+ refreshOnCompletion: indexName(),
60
+ onDocument(doc) {
61
+ var _a, _b;
62
+ const d = doc;
63
+ const date = (_b = (_a = d.time) !== null && _a !== void 0 ? _a : d['@timestamp']) !== null && _b !== void 0 ? _b : new Date().toISOString();
64
+ if (opType === 'create') {
65
+ d['@timestamp'] = date;
66
+ }
67
+ return {
68
+ index: {
69
+ _index: indexName(date),
70
+ op_type: opType,
71
+ },
72
+ };
73
+ },
74
+ onDrop(doc) {
75
+ const error = new Error('Dropped document');
76
+ error.document = doc;
77
+ splitter.emit('insertError', error);
78
+ },
79
+ });
80
+ bulkInsert.then((stats) => splitter.emit('insert', stats), (err) => splitter.emit('error', err));
81
+ }
82
+ const createElasticTransport = (opts = {}) => {
83
+ const splitter = split(function (line) {
84
+ let value;
85
+ try {
86
+ value = JSON.parse(line);
87
+ }
88
+ catch (error) {
89
+ this.emit('unknown', line, error);
90
+ return;
91
+ }
92
+ if (typeof value === 'boolean') {
93
+ this.emit('unknown', line, 'Boolean value ignored');
94
+ return;
95
+ }
96
+ if (value === null) {
97
+ this.emit('unknown', line, 'Null value ignored');
98
+ return;
99
+ }
100
+ if (typeof value !== 'object') {
101
+ value = { data: value, time: setDateTimeString(value) };
102
+ }
103
+ else {
104
+ const obj = value;
105
+ if (obj['@timestamp'] === undefined) {
106
+ ;
107
+ obj.time = setDateTimeString(obj);
108
+ }
109
+ }
110
+ return value;
111
+ }, { autoDestroy: true });
112
+ const clientOpts = {
113
+ node: opts.node,
114
+ auth: opts.auth,
115
+ cloud: opts.cloud,
116
+ tls: { rejectUnauthorized: opts.rejectUnauthorized, ...opts.tls },
117
+ maxRetries: opts.maxRetries,
118
+ requestTimeout: opts.requestTimeout,
119
+ sniffOnConnectionFault: opts.sniffOnConnectionFault,
120
+ };
121
+ if (opts.caFingerprint) {
122
+ clientOpts.caFingerprint = opts.caFingerprint;
123
+ }
124
+ if (opts.Connection) {
125
+ clientOpts.Connection = opts.Connection;
126
+ }
127
+ if (opts.ConnectionPool) {
128
+ clientOpts.ConnectionPool = opts.ConnectionPool;
129
+ }
130
+ const client = new elasticsearch_1.Client(clientOpts);
131
+ client.diagnostic.on('resurrect', () => {
132
+ initializeBulkHandler(opts, client, splitter);
133
+ });
134
+ initializeBulkHandler(opts, client, splitter);
135
+ return splitter;
136
+ };
137
+ exports.createElasticTransport = createElasticTransport;
@@ -1,5 +1,4 @@
1
- import { Options as ElasticConfig } from 'pino-elasticsearch';
2
- import { LOG_LEVEL, LogEvent } from '../types';
1
+ import { LOG_LEVEL, LogEvent, ElasticConfig } from '../types';
3
2
  /**
4
3
  * Checks if a given log level is valid.
5
4
  * @param level - The log level to check.