@nm-logger/logger 1.1.9 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +0 -18
- package/README.md +21 -223
- package/index.d.ts +13 -65
- package/index.js +1 -3
- package/package.json +1 -36
- package/src/DailyWatcher.js +3 -84
- package/src/LogWriter.js +3 -63
- package/src/Logger.js +12 -252
- package/src/Queue.js +1 -29
- package/src/S3Uploader.js +2 -47
- package/src/utils.js +6 -100
package/LICENSE
CHANGED
|
@@ -1,21 +1,3 @@
|
|
|
1
1
|
MIT License
|
|
2
2
|
|
|
3
3
|
Copyright (c) 2025
|
|
4
|
-
|
|
5
|
-
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
-
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
-
in the Software without restriction, including without limitation the rights
|
|
8
|
-
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
-
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
-
furnished to do so, subject to the following conditions:
|
|
11
|
-
|
|
12
|
-
The above copyright notice and this permission notice shall be included in all
|
|
13
|
-
copies or substantial portions of the Software.
|
|
14
|
-
|
|
15
|
-
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
-
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
-
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
-
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
-
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
-
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
-
SOFTWARE.
|
package/README.md
CHANGED
|
@@ -1,241 +1,39 @@
|
|
|
1
|
-
# @nm-logger/logger
|
|
1
|
+
# @nm-logger/logger v1.2.0
|
|
2
2
|
|
|
3
|
-
|
|
4
|
-
- Per-request logging (success, error, external API)
|
|
5
|
-
- Separate daily files:
|
|
6
|
-
- `daily_logs_success.json`
|
|
7
|
-
- `daily_logs_error.json`
|
|
8
|
-
- `daily_logs_external.json`
|
|
9
|
-
- S3 upload + queue + daily rotation
|
|
10
|
-
- Correlation IDs (with `X-Correlation-ID` header)
|
|
11
|
-
- External API logging (Axios)
|
|
12
|
-
- Sensitive field masking (password, token, otp, etc.)
|
|
3
|
+
Minimal JSON logger for Express:
|
|
13
4
|
|
|
14
|
-
|
|
5
|
+
- Logs every handled request as **success**
|
|
6
|
+
- Logs errors from Express error middleware as **error**
|
|
7
|
+
- Logs Axios requests as **external**
|
|
8
|
+
- Local daily files (under `baseDir`):
|
|
9
|
+
- `YYYY/MM/DD/daily_logs_success.json`
|
|
10
|
+
- `YYYY/MM/DD/daily_logs_error.json`
|
|
11
|
+
- `YYYY/MM/DD/daily_logs_external.json`
|
|
12
|
+
- S3: one JSON file per day per category, always appended (read + merge + put)
|
|
13
|
+
- Local previous-day folder is removed when date changes
|
|
14
|
+
- `employee_id` is taken from `req.user.employee_id` (if available)
|
|
15
15
|
|
|
16
|
-
##
|
|
17
|
-
|
|
18
|
-
After you publish the package to npm:
|
|
19
|
-
|
|
20
|
-
```bash
|
|
21
|
-
npm install @nm-logger/logger
|
|
22
|
-
```
|
|
23
|
-
|
|
24
|
-
---
|
|
25
|
-
|
|
26
|
-
## Log Format
|
|
27
|
-
|
|
28
|
-
Each log line in `daily_logs.json` is a JSON object:
|
|
29
|
-
|
|
30
|
-
```json
|
|
31
|
-
{
|
|
32
|
-
"url": "/api/v1/attendance/get",
|
|
33
|
-
"body": "{\"month\":\"2025-12\"}",
|
|
34
|
-
"params": "{\"params\":{},\"query\":{}}",
|
|
35
|
-
"type": "get",
|
|
36
|
-
"error": "",
|
|
37
|
-
"date": "2025-12-05 12:24:00",
|
|
38
|
-
"employee_id": "TAKK122",
|
|
39
|
-
"correlation_id": "cid-abcd1234-17f5d3c9a"
|
|
40
|
-
}
|
|
41
|
-
```
|
|
42
|
-
|
|
43
|
-
Fields:
|
|
44
|
-
|
|
45
|
-
- `url` – `req.originalUrl`
|
|
46
|
-
- `body` – stringified (and masked) `req.body`
|
|
47
|
-
- `params` – stringified (and masked) object `{ params: req.params, query: req.query }`
|
|
48
|
-
- `type` – last segment of the URL (e.g. `/api/v1/attendance/get` → `"get"`)
|
|
49
|
-
- `error` – error message if any
|
|
50
|
-
- `date` – `YYYY-MM-DD HH:mm:ss`
|
|
51
|
-
- `employee_id` – from argument or `req.user.employee_id / emp_code / id`
|
|
52
|
-
- `correlation_id` – unique per request chain (also added as `X-Correlation-ID` header)
|
|
53
|
-
|
|
54
|
-
---
|
|
55
|
-
|
|
56
|
-
## Basic Usage
|
|
57
|
-
|
|
58
|
-
### 1. Create the logger
|
|
16
|
+
## Basic usage
|
|
59
17
|
|
|
60
18
|
```js
|
|
61
19
|
const Logger = require("@nm-logger/logger");
|
|
20
|
+
const axios = require("axios");
|
|
62
21
|
|
|
63
22
|
const logger = new Logger(
|
|
64
23
|
{
|
|
65
|
-
accessKeyId: process.env.
|
|
66
|
-
secretAccessKey: process.env.
|
|
24
|
+
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
|
|
25
|
+
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
|
|
67
26
|
region: "ap-south-1",
|
|
68
|
-
bucket: "
|
|
27
|
+
bucket: "nmhive"
|
|
69
28
|
},
|
|
70
29
|
{
|
|
71
|
-
baseDir: "
|
|
72
|
-
|
|
73
|
-
maskFields: ["aadhaar", "panNumber"] // extra fields to mask
|
|
30
|
+
baseDir: "hiveLogs",
|
|
31
|
+
uploadIntervalMs: 60_000
|
|
74
32
|
}
|
|
75
33
|
);
|
|
76
|
-
```
|
|
77
|
-
|
|
78
|
-
### 2. Log every request + set correlation ID header
|
|
79
34
|
|
|
80
|
-
```js
|
|
81
35
|
app.use(logger.requestLoggerMiddleware());
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
### 3. Log errors via Express error middleware
|
|
85
|
-
|
|
86
|
-
```js
|
|
87
|
-
// your routes above...
|
|
88
|
-
|
|
89
|
-
app.use(logger.expressMiddleware()); // or logger.expressErrorMiddleware()
|
|
90
|
-
```
|
|
91
|
-
|
|
92
|
-
### 4. Manual logging in routes
|
|
93
|
-
|
|
94
|
-
```js
|
|
95
|
-
app.post("/api/v1/attendance/get", async (req, res) => {
|
|
96
|
-
try {
|
|
97
|
-
// ... your logic, external APIs etc ...
|
|
98
|
-
|
|
99
|
-
await logger.logRequest(req, req.user?.employee_id);
|
|
100
|
-
res.json({ success: true });
|
|
101
|
-
} catch (err) {
|
|
102
|
-
await logger.logError(err, req, req.user?.employee_id);
|
|
103
|
-
res.status(500).json({ error: err.message });
|
|
104
|
-
}
|
|
105
|
-
});
|
|
106
|
-
```
|
|
107
|
-
|
|
108
|
-
---
|
|
109
|
-
|
|
110
|
-
## External API logging with Axios
|
|
111
|
-
|
|
112
|
-
```js
|
|
113
|
-
const axios = require("axios");
|
|
114
|
-
|
|
115
|
-
// Attach once at startup
|
|
36
|
+
app.use("/api", routes);
|
|
37
|
+
app.use(logger.expressMiddleware());
|
|
116
38
|
logger.attachAxiosLogger(axios);
|
|
117
|
-
|
|
118
|
-
app.get("/api/v1/some-data", async (req, res) => {
|
|
119
|
-
try {
|
|
120
|
-
const response = await axios.get("https://api.example.com/data", {
|
|
121
|
-
headers: {
|
|
122
|
-
"X-Correlation-ID": req.correlationId, // propagated
|
|
123
|
-
"X-Employee-ID": req.user?.employee_id || "" // optional
|
|
124
|
-
},
|
|
125
|
-
params: {
|
|
126
|
-
id: 123
|
|
127
|
-
}
|
|
128
|
-
});
|
|
129
|
-
|
|
130
|
-
res.json(response.data);
|
|
131
|
-
} catch (err) {
|
|
132
|
-
await logger.logError(err, req, req.user?.employee_id);
|
|
133
|
-
res.status(500).json({ error: err.message });
|
|
134
|
-
}
|
|
135
|
-
});
|
|
136
|
-
```
|
|
137
|
-
|
|
138
|
-
This will produce external log lines like:
|
|
139
|
-
|
|
140
|
-
```json
|
|
141
|
-
{
|
|
142
|
-
"url": "https://api.example.com/data",
|
|
143
|
-
"body": "{}",
|
|
144
|
-
"params": "{\"id\":123}",
|
|
145
|
-
"type": "external_api",
|
|
146
|
-
"error": "",
|
|
147
|
-
"date": "2025-12-05 12:24:00",
|
|
148
|
-
"employee_id": "TAKK122",
|
|
149
|
-
"correlation_id": "cid-abcd1234-17f5d3c9a"
|
|
150
|
-
}
|
|
151
|
-
```
|
|
152
|
-
|
|
153
|
-
---
|
|
154
|
-
|
|
155
|
-
## Sensitive Data Masking
|
|
156
|
-
|
|
157
|
-
Built-in masked keys (case-insensitive, partial match):
|
|
158
|
-
|
|
159
|
-
- password, pass
|
|
160
|
-
- token, secret
|
|
161
|
-
- otp
|
|
162
|
-
- auth, authorization
|
|
163
|
-
- apiKey, api_key
|
|
164
|
-
- session
|
|
165
|
-
- ssn
|
|
166
|
-
|
|
167
|
-
Plus anything you pass in `maskFields` option.
|
|
168
|
-
|
|
169
|
-
Any object like:
|
|
170
|
-
|
|
171
|
-
```json
|
|
172
|
-
{
|
|
173
|
-
"password": "MyPass123",
|
|
174
|
-
"otp": "111222",
|
|
175
|
-
"aadhaar": "9999-8888-7777",
|
|
176
|
-
"email": "user@example.com"
|
|
177
|
-
}
|
|
178
|
-
```
|
|
179
|
-
|
|
180
|
-
will be logged as:
|
|
181
|
-
|
|
182
|
-
```json
|
|
183
|
-
{
|
|
184
|
-
"password": "*****",
|
|
185
|
-
"otp": "*****",
|
|
186
|
-
"aadhaar": "*****",
|
|
187
|
-
"email": "user@example.com"
|
|
188
|
-
}
|
|
189
|
-
```
|
|
190
|
-
|
|
191
|
-
---
|
|
192
|
-
|
|
193
|
-
## S3 Upload Behavior
|
|
194
|
-
|
|
195
|
-
- Logs are stored locally under:
|
|
196
|
-
- `logs/YYYY/MM/DD/daily_logs.json`
|
|
197
|
-
- A watcher runs every `watchIntervalMs` (default 60 seconds)
|
|
198
|
-
- When the date changes (e.g., from `2025-12-05` to `2025-12-06`),
|
|
199
|
-
- The logger uploads the **previous day's** log file to S3:
|
|
200
|
-
|
|
201
|
-
Example S3 key:
|
|
202
|
-
|
|
203
|
-
```txt
|
|
204
|
-
2025/12/05/daily_logs.json
|
|
205
|
-
```
|
|
206
|
-
|
|
207
|
-
So final S3 path:
|
|
208
|
-
|
|
209
|
-
```txt
|
|
210
|
-
s3://<bucket>/<year>/<month>/<day>/daily_logs.json
|
|
211
|
-
```
|
|
212
|
-
|
|
213
|
-
---
|
|
214
|
-
|
|
215
|
-
## TypeScript Usage
|
|
216
|
-
|
|
217
|
-
```ts
|
|
218
|
-
import Logger, { S3Config, LoggerOptions } from "@ve/logger";
|
|
219
|
-
|
|
220
|
-
const s3config: S3Config = {
|
|
221
|
-
accessKeyId: process.env.AWS_KEY!,
|
|
222
|
-
secretAccessKey: process.env.AWS_SECRET!,
|
|
223
|
-
region: "ap-south-1",
|
|
224
|
-
bucket: "your-log-bucket"
|
|
225
|
-
};
|
|
226
|
-
|
|
227
|
-
const options: LoggerOptions = {
|
|
228
|
-
baseDir: "logs",
|
|
229
|
-
watchIntervalMs: 60000,
|
|
230
|
-
maskFields: ["aadhaar", "pan"]
|
|
231
|
-
};
|
|
232
|
-
|
|
233
|
-
const logger = new Logger(s3config, options);
|
|
234
39
|
```
|
|
235
|
-
|
|
236
|
-
---
|
|
237
|
-
|
|
238
|
-
## License
|
|
239
|
-
|
|
240
|
-
MIT
|
|
241
|
-
|
package/index.d.ts
CHANGED
|
@@ -1,69 +1,17 @@
|
|
|
1
1
|
import * as express from "express";
|
|
2
2
|
|
|
3
|
-
export interface S3Config
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
3
|
+
export interface S3Config{accessKeyId:string;secretAccessKey:string;region:string;bucket:string;}
|
|
4
|
+
export interface LoggerOptions{baseDir?:string;maskFields?:string[];uploadIntervalMs?:number;}
|
|
5
|
+
export interface ExternalApiLogOptions{url?:string;method?:string;data?:any;params?:any;error?:string|null;employeeId?:string;}
|
|
6
|
+
|
|
7
|
+
declare class Logger{
|
|
8
|
+
constructor(s3config?:S3Config,options?:LoggerOptions);
|
|
9
|
+
logError(err:any,req?:express.Request,employee_id?:string):Promise<void>;
|
|
10
|
+
logRequest(req:express.Request,employee_id?:string):Promise<void>;
|
|
11
|
+
logExternalApi(options:ExternalApiLogOptions):Promise<void>;
|
|
12
|
+
expressMiddleware(): (err:any,req:express.Request,res:express.Response,next:express.NextFunction)=>void;
|
|
13
|
+
expressErrorMiddleware(): (err:any,req:express.Request,res:express.Response,next:express.NextFunction)=>void;
|
|
14
|
+
requestLoggerMiddleware(): (req:express.Request,res:express.Response,next:express.NextFunction)=>void;
|
|
15
|
+
attachAxiosLogger(axiosInstance:any):void;
|
|
8
16
|
}
|
|
9
|
-
|
|
10
|
-
export interface LoggerOptions {
|
|
11
|
-
baseDir?: string;
|
|
12
|
-
maskFields?: string[];
|
|
13
|
-
uploadIntervalMs?: number;
|
|
14
|
-
}
|
|
15
|
-
|
|
16
|
-
export interface LogEntryShape {
|
|
17
|
-
url: string;
|
|
18
|
-
body: string;
|
|
19
|
-
params: string;
|
|
20
|
-
type: string;
|
|
21
|
-
method: string;
|
|
22
|
-
error: string;
|
|
23
|
-
employee_id: string;
|
|
24
|
-
correlation_id: string;
|
|
25
|
-
date: string;
|
|
26
|
-
category: string;
|
|
27
|
-
}
|
|
28
|
-
|
|
29
|
-
export interface ExternalApiLogOptions {
|
|
30
|
-
url?: string;
|
|
31
|
-
method?: string;
|
|
32
|
-
data?: any;
|
|
33
|
-
params?: any;
|
|
34
|
-
error?: string | null;
|
|
35
|
-
correlationId?: string;
|
|
36
|
-
employeeId?: string;
|
|
37
|
-
}
|
|
38
|
-
|
|
39
|
-
declare class Logger {
|
|
40
|
-
constructor(s3config?: S3Config, options?: LoggerOptions);
|
|
41
|
-
|
|
42
|
-
logError(err: any, req?: express.Request, employee_id?: string): Promise<void>;
|
|
43
|
-
logRequest(req: express.Request, employee_id?: string): Promise<void>;
|
|
44
|
-
logExternalApi(options: ExternalApiLogOptions): Promise<void>;
|
|
45
|
-
|
|
46
|
-
expressMiddleware(): (
|
|
47
|
-
err: any,
|
|
48
|
-
req: express.Request,
|
|
49
|
-
res: express.Response,
|
|
50
|
-
next: express.NextFunction
|
|
51
|
-
) => void;
|
|
52
|
-
|
|
53
|
-
expressErrorMiddleware(): (
|
|
54
|
-
err: any,
|
|
55
|
-
req: express.Request,
|
|
56
|
-
res: express.Response,
|
|
57
|
-
next: express.NextFunction
|
|
58
|
-
) => void;
|
|
59
|
-
|
|
60
|
-
requestLoggerMiddleware(): (
|
|
61
|
-
req: express.Request,
|
|
62
|
-
res: express.Response,
|
|
63
|
-
next: express.NextFunction
|
|
64
|
-
) => void;
|
|
65
|
-
|
|
66
|
-
attachAxiosLogger(axiosInstance: any): void;
|
|
67
|
-
}
|
|
68
|
-
|
|
69
17
|
export = Logger;
|
package/index.js
CHANGED
package/package.json
CHANGED
|
@@ -1,36 +1 @@
|
|
|
1
|
-
{
|
|
2
|
-
"name": "@nm-logger/logger",
|
|
3
|
-
"version": "1.1.9",
|
|
4
|
-
"description": "Express JSON logger with S3 upload, correlation IDs, and separate success/error/external daily logs.",
|
|
5
|
-
"main": "index.js",
|
|
6
|
-
"types": "index.d.ts",
|
|
7
|
-
"scripts": {
|
|
8
|
-
"test": "node -e \"console.log('no tests yet')\""
|
|
9
|
-
},
|
|
10
|
-
"keywords": [
|
|
11
|
-
"logger",
|
|
12
|
-
"logging",
|
|
13
|
-
"express",
|
|
14
|
-
"s3",
|
|
15
|
-
"aws",
|
|
16
|
-
"json-logger",
|
|
17
|
-
"daily-logs",
|
|
18
|
-
"correlation-id",
|
|
19
|
-
"axios"
|
|
20
|
-
],
|
|
21
|
-
"author": "nm-logger",
|
|
22
|
-
"license": "MIT",
|
|
23
|
-
"dependencies": {
|
|
24
|
-
"@aws-sdk/client-s3": "^3.600.0",
|
|
25
|
-
"fs-extra": "^11.1.1",
|
|
26
|
-
"@nm-logger/logger": "^1.1.9"
|
|
27
|
-
},
|
|
28
|
-
"peerDependencies": {
|
|
29
|
-
"express": ">=4.0.0"
|
|
30
|
-
},
|
|
31
|
-
"devDependencies": {
|
|
32
|
-
"@types/express": "^4.17.21",
|
|
33
|
-
"@types/node": "^22.0.0",
|
|
34
|
-
"typescript": "^5.6.0"
|
|
35
|
-
}
|
|
36
|
-
}
|
|
1
|
+
{"name":"@nm-logger/logger","version":"1.2.0","description":"Express JSON logger with daily success/error/external logs and S3 append uploads.","main":"index.js","types":"index.d.ts","license":"MIT","dependencies":{"@aws-sdk/client-s3":"^3.600.0","fs-extra":"^11.1.1"}}
|
package/src/DailyWatcher.js
CHANGED
|
@@ -1,84 +1,3 @@
|
|
|
1
|
-
const fs
|
|
2
|
-
const
|
|
3
|
-
const { getDatePath } =
|
|
4
|
-
|
|
5
|
-
const CATEGORY_FILE_MAP = {
|
|
6
|
-
success: "daily_logs_success.json",
|
|
7
|
-
error: "daily_logs_error.json",
|
|
8
|
-
external: "daily_logs_external.json"
|
|
9
|
-
};
|
|
10
|
-
|
|
11
|
-
class DailyWatcher {
|
|
12
|
-
constructor(baseDir, queue, s3Uploader, options = {}) {
|
|
13
|
-
this.baseDir = baseDir;
|
|
14
|
-
this.queue = queue;
|
|
15
|
-
this.s3Uploader = s3Uploader;
|
|
16
|
-
this.intervalMs = options.uploadIntervalMs || 60_000; // default 1 minute
|
|
17
|
-
|
|
18
|
-
console.log(
|
|
19
|
-
"⏱ [@nm-logger/logger] periodic S3 upload every",
|
|
20
|
-
this.intervalMs,
|
|
21
|
-
"ms"
|
|
22
|
-
);
|
|
23
|
-
|
|
24
|
-
this.start();
|
|
25
|
-
}
|
|
26
|
-
|
|
27
|
-
start() {
|
|
28
|
-
setInterval(() => {
|
|
29
|
-
const { Y, M, D } = getDatePath();
|
|
30
|
-
|
|
31
|
-
Object.entries(CATEGORY_FILE_MAP).forEach(([category, fileName]) => {
|
|
32
|
-
const file = path.join(
|
|
33
|
-
this.baseDir,
|
|
34
|
-
`${Y}/${M}/${D}`,
|
|
35
|
-
fileName
|
|
36
|
-
);
|
|
37
|
-
const key = `${Y}/${M}/${D}/${fileName}`;
|
|
38
|
-
|
|
39
|
-
if (!fs.existsSync(file)) {
|
|
40
|
-
return;
|
|
41
|
-
}
|
|
42
|
-
|
|
43
|
-
this.queue.add(async () => {
|
|
44
|
-
try {
|
|
45
|
-
const now = new Date();
|
|
46
|
-
const key = `${Y}/${M}/${D}/${fileName}`;
|
|
47
|
-
|
|
48
|
-
console.log(`📤 Uploading [${category}] logs to S3: ${key}`);
|
|
49
|
-
|
|
50
|
-
// Step 1 — Read local file
|
|
51
|
-
const localContent = JSON.parse(await fs.readFile(file, "utf8"));
|
|
52
|
-
|
|
53
|
-
// Step 2 — Try loading existing file from S3
|
|
54
|
-
let existing = { logs: [] };
|
|
55
|
-
try {
|
|
56
|
-
const s3Content = await this.s3Uploader.getObject(key);
|
|
57
|
-
if (s3Content) {
|
|
58
|
-
existing = JSON.parse(s3Content);
|
|
59
|
-
}
|
|
60
|
-
} catch (err) {
|
|
61
|
-
// File may not exist on first upload — ignore
|
|
62
|
-
}
|
|
63
|
-
|
|
64
|
-
// Step 3 — Append logs
|
|
65
|
-
const merged = {
|
|
66
|
-
logs: [...existing.logs, ...localContent.logs]
|
|
67
|
-
};
|
|
68
|
-
|
|
69
|
-
// Step 4 — Upload merged logs
|
|
70
|
-
await this.s3Uploader.putObject(key, JSON.stringify(merged, null, 2));
|
|
71
|
-
|
|
72
|
-
console.log("✅ Logs appended to S3 successfully");
|
|
73
|
-
|
|
74
|
-
} catch (err) {
|
|
75
|
-
console.error(`❌ Error uploading logs to S3:`, err);
|
|
76
|
-
}
|
|
77
|
-
});
|
|
78
|
-
|
|
79
|
-
});
|
|
80
|
-
}, this.intervalMs);
|
|
81
|
-
}
|
|
82
|
-
}
|
|
83
|
-
|
|
84
|
-
module.exports = DailyWatcher;
|
|
1
|
+
const fs=require("fs-extra");const path=require("path");const{getDatePath}=require("./utils");
|
|
2
|
+
const MAP={success:"daily_logs_success.json",error:"daily_logs_error.json",external:"daily_logs_external.json"};
|
|
3
|
+
class DailyWatcher{constructor(baseDir,q,s3,opt={}){this.baseDir=baseDir;this.q=q;this.s3=s3;this.ms=opt.uploadIntervalMs||60000;this.currentDate=getDatePath();this.start();}async cleanup(){const{Y,M,D}=getDatePath();const p=this.currentDate;if(p.Y===Y&&p.M===M&&p.D===D)return;const dir=path.join(this.baseDir,`${p.Y}/${p.M}/${p.D}`);try{if(await fs.pathExists(dir))await fs.remove(dir);}catch(e){console.error("daily cleanup error",e);}this.currentDate={Y,M,D};}start(){setInterval(()=>{this.tick().catch(e=>console.error("daily tick error",e));},this.ms);}async tick(){await this.cleanup();const{Y,M,D}=getDatePath();for(const[cat,fn]of Object.entries(MAP)){const file=path.join(this.baseDir,`${Y}/${M}/${D}`,fn);const key=`${Y}/${M}/${D}/${fn}`;if(!(await fs.pathExists(file)))continue;this.q.add(async()=>{try{const txt=await fs.readFile(file,"utf8");let nw={logs:[]};if(txt.trim()){try{const p=JSON.parse(txt);if(Array.isArray(p.logs))nw.logs=p.logs;}catch(_){}}let ex={logs:[]};try{const s=await this.s3.getObject(key);if(s&&s.trim()){const p=JSON.parse(s);if(Array.isArray(p.logs))ex.logs=p.logs;}}catch(_){ }const merged={logs:[...ex.logs,...nw.logs]};await this.s3.putObject(key,JSON.stringify(merged,null,2));}catch(e){console.error("s3 upload error",e);}});}}}module.exports=DailyWatcher;
|
package/src/LogWriter.js
CHANGED
|
@@ -1,63 +1,3 @@
|
|
|
1
|
-
const fs
|
|
2
|
-
const
|
|
3
|
-
const {
|
|
4
|
-
|
|
5
|
-
const CATEGORY_FILE_MAP = {
|
|
6
|
-
success: "daily_logs_success.json",
|
|
7
|
-
error: "daily_logs_error.json",
|
|
8
|
-
external: "daily_logs_external.json"
|
|
9
|
-
};
|
|
10
|
-
|
|
11
|
-
class LogWriter {
|
|
12
|
-
constructor(baseDir = "logs") {
|
|
13
|
-
this.baseDir = baseDir;
|
|
14
|
-
}
|
|
15
|
-
|
|
16
|
-
async writeLog(log, category = "success") {
|
|
17
|
-
const filePath = this.getFilePath(category);
|
|
18
|
-
|
|
19
|
-
await fs.ensureDir(path.dirname(filePath));
|
|
20
|
-
|
|
21
|
-
let logsWrapper = { logs: [] };
|
|
22
|
-
|
|
23
|
-
if (await fs.pathExists(filePath)) {
|
|
24
|
-
try {
|
|
25
|
-
const content = await fs.readFile(filePath, "utf8");
|
|
26
|
-
if (content.trim()) {
|
|
27
|
-
const parsed = JSON.parse(content);
|
|
28
|
-
if (Array.isArray(parsed.logs)) {
|
|
29
|
-
logsWrapper.logs = parsed.logs;
|
|
30
|
-
}
|
|
31
|
-
}
|
|
32
|
-
} catch (e) {
|
|
33
|
-
logsWrapper = { logs: [] };
|
|
34
|
-
}
|
|
35
|
-
}
|
|
36
|
-
|
|
37
|
-
const enriched = {
|
|
38
|
-
url: log.url || "",
|
|
39
|
-
body: log.body || "",
|
|
40
|
-
params: log.params || "",
|
|
41
|
-
type: log.type || "",
|
|
42
|
-
method: log.method || "",
|
|
43
|
-
error: log.error || "",
|
|
44
|
-
employee_id: log.employee_id || "",
|
|
45
|
-
correlation_id: log.correlation_id || "",
|
|
46
|
-
date: log.date || formatDate(new Date()),
|
|
47
|
-
category: category || ""
|
|
48
|
-
};
|
|
49
|
-
|
|
50
|
-
logsWrapper.logs.push(enriched);
|
|
51
|
-
|
|
52
|
-
await fs.writeFile(filePath, JSON.stringify(logsWrapper, null, 2), "utf8");
|
|
53
|
-
return filePath;
|
|
54
|
-
}
|
|
55
|
-
|
|
56
|
-
getFilePath(category = "success") {
|
|
57
|
-
const { Y, M, D } = getDatePath();
|
|
58
|
-
const fileName = CATEGORY_FILE_MAP[category] || CATEGORY_FILE_MAP.success;
|
|
59
|
-
return path.join(this.baseDir, `${Y}/${M}/${D}/${fileName}`);
|
|
60
|
-
}
|
|
61
|
-
}
|
|
62
|
-
|
|
63
|
-
module.exports = LogWriter;
|
|
1
|
+
const fs=require("fs-extra");const path=require("path");const{getDatePath,formatDate}=require("./utils");
|
|
2
|
+
const MAP={success:"daily_logs_success.json",error:"daily_logs_error.json",external:"daily_logs_external.json"};
|
|
3
|
+
class LogWriter{constructor(baseDir="logs"){this.baseDir=baseDir;}async writeLog(log,cat="success"){const fp=this.getFilePath(cat);await fs.ensureDir(path.dirname(fp));let w={logs:[]};if(await fs.pathExists(fp)){try{const t=await fs.readFile(fp,"utf8");if(t.trim()){const p=JSON.parse(t);if(Array.isArray(p.logs))w.logs=p.logs;}}catch(_){w={logs:[]};}}const e={url:log.url||"",body:log.body||"",params:log.params||"",type:log.type||"",method:log.method||"",error:log.error||"",employee_id:log.employee_id||"",date:log.date||formatDate(new Date())};w.logs.push(e);await fs.writeFile(fp,JSON.stringify(w,null,2),"utf8");return fp;}getFilePath(cat="success"){const{Y,M,D}=getDatePath();const fn=MAP[cat]||MAP.success;return path.join(this.baseDir,`${Y}/${M}/${D}/${fn}`);}}module.exports=LogWriter;
|
package/src/Logger.js
CHANGED
|
@@ -1,252 +1,12 @@
|
|
|
1
|
-
const LogWriter = require("./
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
const
|
|
5
|
-
const {
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
this.baseDir = options.baseDir || "logs";
|
|
14
|
-
|
|
15
|
-
this.logWriter = new LogWriter(this.baseDir);
|
|
16
|
-
this.queue = new Queue();
|
|
17
|
-
this.s3Uploader = new S3Uploader(s3config);
|
|
18
|
-
|
|
19
|
-
this.extraMaskFields = (options.maskFields || []).map((f) =>
|
|
20
|
-
String(f).toLowerCase()
|
|
21
|
-
);
|
|
22
|
-
|
|
23
|
-
new DailyWatcher(this.baseDir, this.queue, this.s3Uploader, {
|
|
24
|
-
uploadIntervalMs: options.uploadIntervalMs
|
|
25
|
-
});
|
|
26
|
-
}
|
|
27
|
-
|
|
28
|
-
mask(value) {
|
|
29
|
-
return maskSensitive(value, this.extraMaskFields);
|
|
30
|
-
}
|
|
31
|
-
|
|
32
|
-
buildBaseLog(req, employee_id = "") {
|
|
33
|
-
const url = req?.originalUrl || "";
|
|
34
|
-
const bodySrc = req?.body || {};
|
|
35
|
-
const paramsObj = {
|
|
36
|
-
params: req?.params || {},
|
|
37
|
-
query: req?.query || {}
|
|
38
|
-
};
|
|
39
|
-
|
|
40
|
-
const maskedBody = this.mask(bodySrc);
|
|
41
|
-
const maskedParams = this.mask(paramsObj);
|
|
42
|
-
|
|
43
|
-
let correlationId =
|
|
44
|
-
req?.correlationId ||
|
|
45
|
-
(req?.headers &&
|
|
46
|
-
(req.headers["x-correlation-id"] || req.headers["X-Correlation-ID"])) ||
|
|
47
|
-
"";
|
|
48
|
-
|
|
49
|
-
const method = (req?.method || "").toUpperCase();
|
|
50
|
-
|
|
51
|
-
return {
|
|
52
|
-
url,
|
|
53
|
-
body: JSON.stringify(maskedBody || {}),
|
|
54
|
-
params: JSON.stringify(maskedParams || {}),
|
|
55
|
-
type: getApiType(url),
|
|
56
|
-
method,
|
|
57
|
-
error: "",
|
|
58
|
-
employee_id:
|
|
59
|
-
employee_id ||
|
|
60
|
-
(req &&
|
|
61
|
-
req.user &&
|
|
62
|
-
(req.user.employee_id || req.user.emp_code || req.user.id)) ||
|
|
63
|
-
"",
|
|
64
|
-
correlation_id: correlationId
|
|
65
|
-
};
|
|
66
|
-
}
|
|
67
|
-
|
|
68
|
-
async logError(err, req, employee_id = "") {
|
|
69
|
-
const base = this.buildBaseLog(req || {}, employee_id);
|
|
70
|
-
const logData = {
|
|
71
|
-
...base,
|
|
72
|
-
error: err && err.message ? err.message : String(err || "")
|
|
73
|
-
};
|
|
74
|
-
await this.logWriter.writeLog(logData, "error");
|
|
75
|
-
}
|
|
76
|
-
|
|
77
|
-
async logRequest(req, employee_id = "") {
|
|
78
|
-
const logData = this.buildBaseLog(req, employee_id);
|
|
79
|
-
await this.logWriter.writeLog(logData, "success");
|
|
80
|
-
}
|
|
81
|
-
|
|
82
|
-
async logExternalApi({
|
|
83
|
-
url,
|
|
84
|
-
method,
|
|
85
|
-
data,
|
|
86
|
-
params,
|
|
87
|
-
error,
|
|
88
|
-
correlationId,
|
|
89
|
-
employeeId
|
|
90
|
-
}) {
|
|
91
|
-
const maskedBody = this.mask(data || {});
|
|
92
|
-
const maskedParams = this.mask(params || {});
|
|
93
|
-
|
|
94
|
-
const logData = {
|
|
95
|
-
url: url || "",
|
|
96
|
-
body: JSON.stringify(maskedBody || {}),
|
|
97
|
-
params: JSON.stringify(maskedParams || {}),
|
|
98
|
-
type: "external_api",
|
|
99
|
-
method: (method || "").toUpperCase(),
|
|
100
|
-
error: error || "",
|
|
101
|
-
employee_id: employeeId || "",
|
|
102
|
-
correlation_id: correlationId || "",
|
|
103
|
-
date: undefined
|
|
104
|
-
};
|
|
105
|
-
|
|
106
|
-
await this.logWriter.writeLog(logData, "external");
|
|
107
|
-
}
|
|
108
|
-
|
|
109
|
-
expressMiddleware() {
|
|
110
|
-
return (err, req, res, next) => {
|
|
111
|
-
try {
|
|
112
|
-
let correlationId =
|
|
113
|
-
req.correlationId ||
|
|
114
|
-
(req.headers &&
|
|
115
|
-
(req.headers["x-correlation-id"] ||
|
|
116
|
-
req.headers["X-Correlation-ID"])) ||
|
|
117
|
-
"";
|
|
118
|
-
|
|
119
|
-
if (!correlationId) {
|
|
120
|
-
correlationId = generateCorrelationId();
|
|
121
|
-
req.correlationId = correlationId;
|
|
122
|
-
}
|
|
123
|
-
|
|
124
|
-
if (res && !res.headersSent) {
|
|
125
|
-
res.setHeader("X-Correlation-ID", correlationId);
|
|
126
|
-
}
|
|
127
|
-
|
|
128
|
-
this.logError(err, req).catch((e) =>
|
|
129
|
-
console.error("Logger expressMiddleware error:", e)
|
|
130
|
-
);
|
|
131
|
-
} catch (e) {
|
|
132
|
-
console.error("Logger expressMiddleware outer error:", e);
|
|
133
|
-
}
|
|
134
|
-
|
|
135
|
-
next(err);
|
|
136
|
-
};
|
|
137
|
-
}
|
|
138
|
-
|
|
139
|
-
expressErrorMiddleware() {
|
|
140
|
-
return this.expressMiddleware();
|
|
141
|
-
}
|
|
142
|
-
|
|
143
|
-
requestLoggerMiddleware() {
|
|
144
|
-
return (req, res, next) => {
|
|
145
|
-
try {
|
|
146
|
-
if (req.__nm_logger_logged) return next();
|
|
147
|
-
req.__nm_logger_logged = true;
|
|
148
|
-
|
|
149
|
-
// correlation ID (no-crash)
|
|
150
|
-
const correlationId =
|
|
151
|
-
req.headers["x-correlation-id"] ||
|
|
152
|
-
req.headers["X-Correlation-ID"] ||
|
|
153
|
-
req.correlationId ||
|
|
154
|
-
generateCorrelationId();
|
|
155
|
-
|
|
156
|
-
req.correlationId = correlationId;
|
|
157
|
-
res.setHeader("X-Correlation-ID", correlationId);
|
|
158
|
-
|
|
159
|
-
// safe logging
|
|
160
|
-
try {
|
|
161
|
-
this.logRequest(req).catch(err => {
|
|
162
|
-
console.error("🔥 Error inside logRequest:", err);
|
|
163
|
-
});
|
|
164
|
-
} catch (inner) {
|
|
165
|
-
console.error("🔥 UNEXPECTED error inside requestLoggerMiddleware:", inner);
|
|
166
|
-
}
|
|
167
|
-
|
|
168
|
-
} catch (outer) {
|
|
169
|
-
console.error("🔥 requestLoggerMiddleware FATAL error:", outer);
|
|
170
|
-
}
|
|
171
|
-
|
|
172
|
-
next();
|
|
173
|
-
};
|
|
174
|
-
}
|
|
175
|
-
|
|
176
|
-
attachAxiosLogger(axiosInstance) {
|
|
177
|
-
if (!axiosInstance || !axiosInstance.interceptors) {
|
|
178
|
-
console.warn(
|
|
179
|
-
"[@nm-logger/logger] attachAxiosLogger: provided axios instance is invalid"
|
|
180
|
-
);
|
|
181
|
-
return;
|
|
182
|
-
}
|
|
183
|
-
|
|
184
|
-
axiosInstance.interceptors.response.use(
|
|
185
|
-
(response) => {
|
|
186
|
-
try {
|
|
187
|
-
const cfg = response.config || {};
|
|
188
|
-
const headers = cfg.headers || {};
|
|
189
|
-
|
|
190
|
-
const correlationId =
|
|
191
|
-
headers["X-Correlation-ID"] ||
|
|
192
|
-
headers["x-correlation-id"] ||
|
|
193
|
-
"";
|
|
194
|
-
|
|
195
|
-
const employeeId =
|
|
196
|
-
headers["X-Employee-ID"] ||
|
|
197
|
-
headers["x-employee-id"] ||
|
|
198
|
-
"";
|
|
199
|
-
|
|
200
|
-
this.logExternalApi({
|
|
201
|
-
url: cfg.url,
|
|
202
|
-
method: cfg.method,
|
|
203
|
-
data: cfg.data,
|
|
204
|
-
params: cfg.params,
|
|
205
|
-
error: null,
|
|
206
|
-
correlationId,
|
|
207
|
-
employeeId
|
|
208
|
-
}).catch((e) =>
|
|
209
|
-
console.error("Logger axios success logExternalApi error:", e)
|
|
210
|
-
);
|
|
211
|
-
} catch (e) {
|
|
212
|
-
console.error("Logger axios response interceptor error:", e);
|
|
213
|
-
}
|
|
214
|
-
return response;
|
|
215
|
-
},
|
|
216
|
-
(error) => {
|
|
217
|
-
try {
|
|
218
|
-
const cfg = error.config || {};
|
|
219
|
-
const headers = cfg ? cfg.headers || {} : {};
|
|
220
|
-
|
|
221
|
-
const correlationId =
|
|
222
|
-
headers["X-Correlation-ID"] ||
|
|
223
|
-
headers["x-correlation-id"] ||
|
|
224
|
-
"";
|
|
225
|
-
|
|
226
|
-
const employeeId =
|
|
227
|
-
headers["X-Employee-ID"] ||
|
|
228
|
-
headers["x-employee-id"] ||
|
|
229
|
-
"";
|
|
230
|
-
|
|
231
|
-
this.logExternalApi({
|
|
232
|
-
url: cfg && cfg.url,
|
|
233
|
-
method: cfg && cfg.method,
|
|
234
|
-
data: cfg && cfg.data,
|
|
235
|
-
params: cfg && cfg.params,
|
|
236
|
-
error: error && error.message,
|
|
237
|
-
correlationId,
|
|
238
|
-
employeeId
|
|
239
|
-
}).catch((e) =>
|
|
240
|
-
console.error("Logger axios error logExternalApi error:", e)
|
|
241
|
-
);
|
|
242
|
-
} catch (e) {
|
|
243
|
-
console.error("Logger axios error interceptor outer error:", e);
|
|
244
|
-
}
|
|
245
|
-
|
|
246
|
-
return Promise.reject(error);
|
|
247
|
-
}
|
|
248
|
-
);
|
|
249
|
-
}
|
|
250
|
-
}
|
|
251
|
-
|
|
252
|
-
module.exports = Logger;
|
|
1
|
+
const LogWriter=require("./LogWriter");const Queue=require("./Queue");const S3Uploader=require("./S3Uploader");const DailyWatcher=require("./DailyWatcher");const{getApiType,maskSensitive}=require("./utils");
|
|
2
|
+
class Logger{constructor(s3cfg={},opt={}){this.baseDir=opt.baseDir||"logs";this.logWriter=new LogWriter(this.baseDir);this.queue=new Queue();this.s3=new S3Uploader(s3cfg);this.maskFields=(opt.maskFields||[]).map(x=>String(x).toLowerCase());new DailyWatcher(this.baseDir,this.queue,this.s3,{uploadIntervalMs:opt.uploadIntervalMs});}
|
|
3
|
+
mask(v){return maskSensitive(v,this.maskFields);}
|
|
4
|
+
buildBaseLog(req,employee_id=""){const url=req&&req.originalUrl||"";const body=req&&req.body||{};const params={params:req&&req.params||{},query:req&&req.query||{}};const mBody=this.mask(body);const mParams=this.mask(params);const method=(req&&req.method||"").toUpperCase();const emp=employee_id||(req&&req.user&&(req.user.employee_id||req.user.emp_code||req.user.id))||"";return{url,body:JSON.stringify(mBody||{}),params:JSON.stringify(mParams||{}),type:getApiType(url),method,error:"",employee_id:emp};}
|
|
5
|
+
async logError(err,req,employee_id=""){const base=this.buildBaseLog(req||{},employee_id);const d={...base,error:err&&err.message?err.message:String(err||"")};await this.logWriter.writeLog(d,"error");}
|
|
6
|
+
async logRequest(req,employee_id=""){const d=this.buildBaseLog(req,employee_id);await this.logWriter.writeLog(d,"success");}
|
|
7
|
+
async logExternalApi({url,method,data,params,error,employeeId}){const mb=this.mask(data||{});const mp=this.mask(params||{});const d={url:url||"",body:JSON.stringify(mb||{}),params:JSON.stringify(mp||{}),type:"external_api",method:(method||"").toUpperCase(),error:error||"",employee_id:employeeId||""};await this.logWriter.writeLog(d,"external");}
|
|
8
|
+
expressMiddleware(){return(err,req,res,next)=>{try{this.logError(err,req).catch(e=>console.error("logger express error",e));}catch(e){console.error("logger express outer",e);}next(err);};}
|
|
9
|
+
expressErrorMiddleware(){return this.expressMiddleware();}
|
|
10
|
+
requestLoggerMiddleware(){return(req,res,next)=>{try{if(req.__nm_logger_logged)return next();req.__nm_logger_logged=true;this.logRequest(req).catch(e=>console.error("logger request error",e));}catch(e){console.error("logger request outer",e);}next();};}
|
|
11
|
+
attachAxiosLogger(ax){if(!ax||!ax.interceptors)return;ax.interceptors.response.use(r=>{try{const c=r.config||{};this.logExternalApi({url:c.url,method:c.method,data:c.data,params:c.params,error:null,employeeId:c.employee_id}).catch(e=>console.error("logger axios ok",e));}catch(e){console.error("logger axios ok outer",e);}return r;},err=>{try{const c=err.config||{};this.logExternalApi({url:c&&c.url,method:c&&c.method,data:c&&c.data,params:c&&c.params,error:err&&err.message,employeeId:c&&c.employee_id}).catch(e=>console.error("logger axios err",e));}catch(e){console.error("logger axios err outer",e);}return Promise.reject(err);});}}
|
|
12
|
+
module.exports=Logger;
|
package/src/Queue.js
CHANGED
|
@@ -1,29 +1 @@
|
|
|
1
|
-
class Queue {
|
|
2
|
-
constructor() {
|
|
3
|
-
this.jobs = [];
|
|
4
|
-
this.processing = false;
|
|
5
|
-
}
|
|
6
|
-
|
|
7
|
-
add(job) {
|
|
8
|
-
this.jobs.push(job);
|
|
9
|
-
this.run();
|
|
10
|
-
}
|
|
11
|
-
|
|
12
|
-
async run() {
|
|
13
|
-
if (this.processing) return;
|
|
14
|
-
this.processing = true;
|
|
15
|
-
|
|
16
|
-
while (this.jobs.length) {
|
|
17
|
-
const job = this.jobs.shift();
|
|
18
|
-
try {
|
|
19
|
-
await job();
|
|
20
|
-
} catch (err) {
|
|
21
|
-
console.error("[@nm-logger/logger] Queue job failed:", err);
|
|
22
|
-
}
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
this.processing = false;
|
|
26
|
-
}
|
|
27
|
-
}
|
|
28
|
-
|
|
29
|
-
module.exports = Queue;
|
|
1
|
+
class Queue{constructor(){this.jobs=[];this.processing=false;}add(j){this.jobs.push(j);this.run();}async run(){if(this.processing)return;this.processing=true;while(this.jobs.length){const j=this.jobs.shift();try{await j();}catch(e){console.error("[@nm-logger/logger] queue job failed",e);}}this.processing=false;}}module.exports=Queue;
|
package/src/S3Uploader.js
CHANGED
|
@@ -1,47 +1,2 @@
|
|
|
1
|
-
const
|
|
2
|
-
const {
|
|
3
|
-
S3Client,
|
|
4
|
-
PutObjectCommand,
|
|
5
|
-
GetObjectCommand
|
|
6
|
-
} = require("@aws-sdk/client-s3");
|
|
7
|
-
const { streamToString } = require("./utils");
|
|
8
|
-
|
|
9
|
-
class S3Uploader {
|
|
10
|
-
constructor(config) {
|
|
11
|
-
this.client = new S3Client({
|
|
12
|
-
region: config.region,
|
|
13
|
-
credentials: {
|
|
14
|
-
accessKeyId: config.accessKeyId,
|
|
15
|
-
secretAccessKey: config.secretAccessKey
|
|
16
|
-
}
|
|
17
|
-
});
|
|
18
|
-
|
|
19
|
-
this.bucket = config.bucket;
|
|
20
|
-
}
|
|
21
|
-
|
|
22
|
-
async getObject(key) {
|
|
23
|
-
try {
|
|
24
|
-
const result = await this.client.send(
|
|
25
|
-
new GetObjectCommand({
|
|
26
|
-
Bucket: this.bucket,
|
|
27
|
-
Key: key
|
|
28
|
-
})
|
|
29
|
-
);
|
|
30
|
-
return await streamToString(result.Body);
|
|
31
|
-
} catch (err) {
|
|
32
|
-
return null; // file not found
|
|
33
|
-
}
|
|
34
|
-
}
|
|
35
|
-
|
|
36
|
-
async putObject(key, body) {
|
|
37
|
-
await this.client.send(
|
|
38
|
-
new PutObjectCommand({
|
|
39
|
-
Bucket: this.bucket,
|
|
40
|
-
Key: key,
|
|
41
|
-
Body: body
|
|
42
|
-
})
|
|
43
|
-
);
|
|
44
|
-
}
|
|
45
|
-
}
|
|
46
|
-
|
|
47
|
-
module.exports = S3Uploader;
|
|
1
|
+
const{S3Client,PutObjectCommand,GetObjectCommand}=require("@aws-sdk/client-s3");const{streamToString}=require("./utils");
|
|
2
|
+
class S3Uploader{constructor(cfg){this.client=new S3Client({region:cfg.region,credentials:{accessKeyId:cfg.accessKeyId,secretAccessKey:cfg.secretAccessKey}});this.bucket=cfg.bucket;}async getObject(key){try{const r=await this.client.send(new GetObjectCommand({Bucket:this.bucket,Key:key}));return await streamToString(r.Body);}catch(e){if(e&&((e.name==="NoSuchKey")||(e.$metadata&&e.$metadata.httpStatusCode===404)))return null;throw e;}}async putObject(key,body){await this.client.send(new PutObjectCommand({Bucket:this.bucket,Key:key,Body:body}));}}module.exports=S3Uploader;
|
package/src/utils.js
CHANGED
|
@@ -1,100 +1,6 @@
|
|
|
1
|
-
exports.getDatePath = ()
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
};
|
|
8
|
-
|
|
9
|
-
exports.formatDate = (d) => {
|
|
10
|
-
const pad = (n) => String(n).padStart(2, "0");
|
|
11
|
-
return (
|
|
12
|
-
d.getFullYear() +
|
|
13
|
-
"-" +
|
|
14
|
-
pad(d.getMonth() + 1) +
|
|
15
|
-
"-" +
|
|
16
|
-
pad(d.getDate()) +
|
|
17
|
-
" " +
|
|
18
|
-
pad(d.getHours()) +
|
|
19
|
-
":" +
|
|
20
|
-
pad(d.getMinutes()) +
|
|
21
|
-
":" +
|
|
22
|
-
pad(d.getSeconds())
|
|
23
|
-
);
|
|
24
|
-
};
|
|
25
|
-
|
|
26
|
-
exports.getApiType = (url) => {
|
|
27
|
-
if (!url) return "";
|
|
28
|
-
const segments = url.split("/").filter(Boolean);
|
|
29
|
-
return segments[segments.length - 1] || "";
|
|
30
|
-
};
|
|
31
|
-
|
|
32
|
-
const DEFAULT_MASK_KEYS = [
|
|
33
|
-
"password",
|
|
34
|
-
"pass",
|
|
35
|
-
"token",
|
|
36
|
-
"secret",
|
|
37
|
-
"otp",
|
|
38
|
-
"auth",
|
|
39
|
-
"authorization",
|
|
40
|
-
"apikey",
|
|
41
|
-
"api_key",
|
|
42
|
-
"session",
|
|
43
|
-
"ssn"
|
|
44
|
-
];
|
|
45
|
-
|
|
46
|
-
exports.maskSensitive = (value, extraFields = []) => {
|
|
47
|
-
const allKeys = [
|
|
48
|
-
...DEFAULT_MASK_KEYS,
|
|
49
|
-
...(extraFields || [])
|
|
50
|
-
].map((k) => String(k).toLowerCase());
|
|
51
|
-
|
|
52
|
-
const shouldMaskKey = (key) => {
|
|
53
|
-
const lower = String(key).toLowerCase();
|
|
54
|
-
return allKeys.some((mk) => lower.includes(mk));
|
|
55
|
-
};
|
|
56
|
-
|
|
57
|
-
const maskAny = (val) => {
|
|
58
|
-
if (val && typeof val === "object") {
|
|
59
|
-
if (Array.isArray(val)) {
|
|
60
|
-
return val.map(maskAny);
|
|
61
|
-
}
|
|
62
|
-
const out = {};
|
|
63
|
-
for (const [k, v] of Object.entries(val)) {
|
|
64
|
-
if (shouldMaskKey(k)) {
|
|
65
|
-
out[k] = "*****";
|
|
66
|
-
} else {
|
|
67
|
-
out[k] = maskAny(v);
|
|
68
|
-
}
|
|
69
|
-
}
|
|
70
|
-
return out;
|
|
71
|
-
}
|
|
72
|
-
return val;
|
|
73
|
-
};
|
|
74
|
-
|
|
75
|
-
if (typeof value === "string") {
|
|
76
|
-
try {
|
|
77
|
-
const parsed = JSON.parse(value);
|
|
78
|
-
return maskAny(parsed);
|
|
79
|
-
} catch (_) {
|
|
80
|
-
return value;
|
|
81
|
-
}
|
|
82
|
-
}
|
|
83
|
-
|
|
84
|
-
return maskAny(value);
|
|
85
|
-
};
|
|
86
|
-
|
|
87
|
-
exports.generateCorrelationId = () => {
|
|
88
|
-
const rand = Math.random().toString(16).slice(2, 10);
|
|
89
|
-
const ts = Date.now().toString(16);
|
|
90
|
-
return `cid-${rand}-${ts}`;
|
|
91
|
-
};
|
|
92
|
-
|
|
93
|
-
exports.streamToString = async (stream) => {
|
|
94
|
-
return await new Promise((resolve, reject) => {
|
|
95
|
-
const chunks = [];
|
|
96
|
-
stream.on("data", (chunk) => chunks.push(chunk));
|
|
97
|
-
stream.on("error", reject);
|
|
98
|
-
stream.on("end", () => resolve(Buffer.concat(chunks).toString("utf8")));
|
|
99
|
-
});
|
|
100
|
-
};
|
|
1
|
+
exports.getDatePath=function(){const d=new Date();return{Y:d.getFullYear(),M:String(d.getMonth()+1).padStart(2,"0"),D:String(d.getDate()).padStart(2,"0")};};
|
|
2
|
+
exports.formatDate=function(d){const p=n=>String(n).padStart(2,"0");return d.getFullYear()+"-"+p(d.getMonth()+1)+"-"+p(d.getDate())+" "+p(d.getHours())+":"+p(d.getMinutes())+":"+p(d.getSeconds());};
|
|
3
|
+
exports.getApiType=function(url){if(!url)return"";const s=url.split("/").filter(Boolean);return s[s.length-1]||"";};
|
|
4
|
+
const MASK_KEYS=["password","pass","token","secret","otp","auth","authorization","apikey","api_key","session","ssn"];
|
|
5
|
+
exports.maskSensitive=function(v,extra){const keys=[...MASK_KEYS,...(extra||[])].map(k=>String(k).toLowerCase());const isMask=k=>keys.some(m=>String(k).toLowerCase().includes(m));const maskAny=val=>{if(val&&typeof val==="object"){if(Array.isArray(val))return val.map(maskAny);const o={};for(const[k,x]of Object.entries(val))o[k]=isMask(k)?"*****":maskAny(x);return o;}return val;};if(typeof v==="string"){try{return maskAny(JSON.parse(v));}catch(_){return v;}}return maskAny(v);};
|
|
6
|
+
exports.streamToString=async s=>await new Promise((res,rej)=>{const c=[];s.on("data",x=>c.push(x));s.on("error",rej);s.on("end",()=>res(Buffer.concat(c).toString("utf8")));});
|