@nm-logger/logger 1.2.0 → 1.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +0 -2
- package/README.md +18 -16
- package/index.d.ts +0 -1
- package/package.json +1 -1
- package/src/DailyWatcher.js +2 -3
- package/src/LogWriter.js +2 -3
- package/src/Logger.js +5 -6
- package/src/Queue.js +1 -1
- package/src/S3Uploader.js +1 -1
- package/src/utils.js +2 -2
package/LICENSE
CHANGED
package/README.md
CHANGED
|
@@ -1,39 +1,41 @@
|
|
|
1
|
-
# @nm-logger/logger v1.2.
|
|
1
|
+
# @nm-logger/logger v1.2.2
|
|
2
2
|
|
|
3
3
|
Minimal JSON logger for Express:
|
|
4
4
|
|
|
5
|
-
- Logs
|
|
6
|
-
- Logs
|
|
7
|
-
- Logs Axios
|
|
8
|
-
- Local daily files
|
|
5
|
+
- Logs handled requests as **success**
|
|
6
|
+
- Logs Express errors as **error**
|
|
7
|
+
- Logs Axios calls as **external**
|
|
8
|
+
- Local daily files under configurable `baseDir`:
|
|
9
9
|
- `YYYY/MM/DD/daily_logs_success.json`
|
|
10
10
|
- `YYYY/MM/DD/daily_logs_error.json`
|
|
11
11
|
- `YYYY/MM/DD/daily_logs_external.json`
|
|
12
|
-
- S3: one JSON file per day per category,
|
|
13
|
-
-
|
|
14
|
-
- `employee_id`
|
|
12
|
+
- S3: one JSON file per day per category, append (read+merge+put)
|
|
13
|
+
- Previous day's local folder is removed when the date changes
|
|
14
|
+
- `employee_id` taken from `req.user.employee_id || req.user.emp_code || req.user.id`
|
|
15
|
+
- `baseDir` can be absolute (e.g. `/home/VE/hiveLogs`)
|
|
15
16
|
|
|
16
|
-
|
|
17
|
+
Basic usage:
|
|
17
18
|
|
|
18
19
|
```js
|
|
19
20
|
const Logger = require("@nm-logger/logger");
|
|
20
21
|
const axios = require("axios");
|
|
21
22
|
|
|
22
|
-
const
|
|
23
|
+
const nm_logger = new Logger(
|
|
23
24
|
{
|
|
24
25
|
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
|
|
25
26
|
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
|
|
26
|
-
region: "
|
|
27
|
+
region: "us-east-1",
|
|
27
28
|
bucket: "nmhive"
|
|
28
29
|
},
|
|
29
30
|
{
|
|
30
|
-
baseDir: "
|
|
31
|
-
uploadIntervalMs: 60_000
|
|
31
|
+
baseDir: "/home/logger/logs",
|
|
32
|
+
uploadIntervalMs: 60_000,
|
|
33
|
+
maskFields: ["aadhaar", "pan"]
|
|
32
34
|
}
|
|
33
35
|
);
|
|
34
36
|
|
|
35
|
-
app.use(
|
|
37
|
+
app.use(nm_logger.requestLoggerMiddleware());
|
|
36
38
|
app.use("/api", routes);
|
|
37
|
-
app.use(
|
|
38
|
-
|
|
39
|
+
app.use(nm_logger.expressMiddleware());
|
|
40
|
+
nm_logger.attachAxiosLogger(axios);
|
|
39
41
|
```
|
package/index.d.ts
CHANGED
|
@@ -10,7 +10,6 @@ declare class Logger{
|
|
|
10
10
|
logRequest(req:express.Request,employee_id?:string):Promise<void>;
|
|
11
11
|
logExternalApi(options:ExternalApiLogOptions):Promise<void>;
|
|
12
12
|
expressMiddleware(): (err:any,req:express.Request,res:express.Response,next:express.NextFunction)=>void;
|
|
13
|
-
expressErrorMiddleware(): (err:any,req:express.Request,res:express.Response,next:express.NextFunction)=>void;
|
|
14
13
|
requestLoggerMiddleware(): (req:express.Request,res:express.Response,next:express.NextFunction)=>void;
|
|
15
14
|
attachAxiosLogger(axiosInstance:any):void;
|
|
16
15
|
}
|
package/package.json
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"name":"@nm-logger/logger","version":"1.2.
|
|
1
|
+
{"name":"@nm-logger/logger","version":"1.2.2","description":"Express JSON logger with daily success/error/external logs, S3 append uploads, and configurable baseDir.","main":"index.js","types":"index.d.ts","license":"MIT","dependencies":{"@aws-sdk/client-s3":"^3.600.0","fs-extra":"^11.1.1"}}
|
package/src/DailyWatcher.js
CHANGED
|
@@ -1,3 +1,2 @@
|
|
|
1
|
-
const fs=require("fs-extra");const path=require("path");const{getDatePath}=require("./utils");
|
|
2
|
-
const MAP={
|
|
3
|
-
class DailyWatcher{constructor(baseDir,q,s3,opt={}){this.baseDir=baseDir;this.q=q;this.s3=s3;this.ms=opt.uploadIntervalMs||60000;this.currentDate=getDatePath();this.start();}async cleanup(){const{Y,M,D}=getDatePath();const p=this.currentDate;if(p.Y===Y&&p.M===M&&p.D===D)return;const dir=path.join(this.baseDir,`${p.Y}/${p.M}/${p.D}`);try{if(await fs.pathExists(dir))await fs.remove(dir);}catch(e){console.error("daily cleanup error",e);}this.currentDate={Y,M,D};}start(){setInterval(()=>{this.tick().catch(e=>console.error("daily tick error",e));},this.ms);}async tick(){await this.cleanup();const{Y,M,D}=getDatePath();for(const[cat,fn]of Object.entries(MAP)){const file=path.join(this.baseDir,`${Y}/${M}/${D}`,fn);const key=`${Y}/${M}/${D}/${fn}`;if(!(await fs.pathExists(file)))continue;this.q.add(async()=>{try{const txt=await fs.readFile(file,"utf8");let nw={logs:[]};if(txt.trim()){try{const p=JSON.parse(txt);if(Array.isArray(p.logs))nw.logs=p.logs;}catch(_){}}let ex={logs:[]};try{const s=await this.s3.getObject(key);if(s&&s.trim()){const p=JSON.parse(s);if(Array.isArray(p.logs))ex.logs=p.logs;}}catch(_){ }const merged={logs:[...ex.logs,...nw.logs]};await this.s3.putObject(key,JSON.stringify(merged,null,2));}catch(e){console.error("s3 upload error",e);}});}}}module.exports=DailyWatcher;
|
|
1
|
+
const fs=require("fs-extra");const path=require("path");const{getDatePath}=require("./utils");const MAP={success:"daily_logs_success.json",error:"daily_logs_error.json",external:"daily_logs_external.json"};
|
|
2
|
+
class DailyWatcher{constructor(baseDir,q,s3,opt={}){this.baseDir=baseDir;this.q=q;this.s3=s3;this.ms=opt.uploadIntervalMs||60000;this.currentDate=getDatePath();this.start();}async cleanup(){const{Y,M,D}=getDatePath();const p=this.currentDate;if(p.Y===Y&&p.M===M&&p.D===D)return;const dir=path.join(this.baseDir,`${p.Y}/${p.M}/${p.D}`);try{if(await fs.pathExists(dir))await fs.remove(dir);}catch(_){ }this.currentDate={Y,M,D};}start(){setInterval(()=>this.tick().catch(()=>{}),this.ms);}async tick(){await this.cleanup();const{Y,M,D}=getDatePath();for(const[cat,fn]of Object.entries(MAP)){const file=path.join(this.baseDir,`${Y}/${M}/${D}`,fn);const key=`${Y}/${M}/${D}/${fn}`;if(!(await fs.pathExists(file)))continue;this.q.add(async()=>{try{const localTxt=await fs.readFile(file,"utf8");let local={logs:[]};if(localTxt.trim())local=JSON.parse(localTxt);let existing={logs:[]};const s3Txt=await this.s3.getObject(key).catch(()=>null);if(s3Txt&&s3Txt.trim())existing=JSON.parse(s3Txt);const merged={logs:[...existing.logs,...local.logs]};await this.s3.putObject(key,JSON.stringify(merged,null,2));}catch(_){}});}}}module.exports=DailyWatcher;
|
package/src/LogWriter.js
CHANGED
|
@@ -1,3 +1,2 @@
|
|
|
1
|
-
const fs=require("fs-extra");const path=require("path");const{getDatePath,formatDate}=require("./utils");
|
|
2
|
-
const MAP={
|
|
3
|
-
class LogWriter{constructor(baseDir="logs"){this.baseDir=baseDir;}async writeLog(log,cat="success"){const fp=this.getFilePath(cat);await fs.ensureDir(path.dirname(fp));let w={logs:[]};if(await fs.pathExists(fp)){try{const t=await fs.readFile(fp,"utf8");if(t.trim()){const p=JSON.parse(t);if(Array.isArray(p.logs))w.logs=p.logs;}}catch(_){w={logs:[]};}}const e={url:log.url||"",body:log.body||"",params:log.params||"",type:log.type||"",method:log.method||"",error:log.error||"",employee_id:log.employee_id||"",date:log.date||formatDate(new Date())};w.logs.push(e);await fs.writeFile(fp,JSON.stringify(w,null,2),"utf8");return fp;}getFilePath(cat="success"){const{Y,M,D}=getDatePath();const fn=MAP[cat]||MAP.success;return path.join(this.baseDir,`${Y}/${M}/${D}/${fn}`);}}module.exports=LogWriter;
|
|
1
|
+
const fs=require("fs-extra");const path=require("path");const{getDatePath,formatDate}=require("./utils");const MAP={success:"daily_logs_success.json",error:"daily_logs_error.json",external:"daily_logs_external.json"};
|
|
2
|
+
class LogWriter{constructor(baseDir){this.baseDir=baseDir;}getFilePath(cat="success"){const{Y,M,D}=getDatePath();const fn=MAP[cat]||MAP.success;return path.join(this.baseDir,`${Y}/${M}/${D}/${fn}`);}async writeLog(log,cat="success"){const fp=this.getFilePath(cat);await fs.ensureDir(path.dirname(fp));let w={logs:[]};if(await fs.pathExists(fp)){try{const t=await fs.readFile(fp,"utf8");if(t.trim()){const p=JSON.parse(t);if(Array.isArray(p.logs))w.logs=p.logs;}}catch(_){}}w.logs.push({url:log.url||"",body:log.body||"",params:log.params||"",type:log.type||"",method:log.method||"",error:log.error||"",employee_id:log.employee_id||"",date:log.date||formatDate(new Date())});await fs.writeFile(fp,JSON.stringify(w,null,2),"utf8");return fp;}}module.exports=LogWriter;
|
package/src/Logger.js
CHANGED
|
@@ -1,12 +1,11 @@
|
|
|
1
|
-
const LogWriter=require("./LogWriter");const Queue=require("./Queue");const S3Uploader=require("./S3Uploader");const DailyWatcher=require("./DailyWatcher");const{getApiType,maskSensitive}=require("./utils");
|
|
2
|
-
class Logger{constructor(s3cfg={},opt={}){
|
|
1
|
+
const LogWriter=require("./LogWriter");const Queue=require("./Queue");const S3Uploader=require("./S3Uploader");const DailyWatcher=require("./DailyWatcher");const{getApiType,maskSensitive}=require("./utils");const fs=require("fs-extra");const path=require("path");
|
|
2
|
+
class Logger{constructor(s3cfg={},opt={}){let dir="logs";try{if(opt.baseDir){const r=path.resolve(opt.baseDir);fs.ensureDirSync(r);dir=r;}}catch(e){console.error("[@nm-logger] Invalid baseDir, fallback to ./logs",e.message);}this.baseDir=dir;this.logWriter=new LogWriter(this.baseDir);this.queue=new Queue();this.s3=new S3Uploader(s3cfg);this.maskFields=(opt.maskFields||[]).map(x=>String(x).toLowerCase());new DailyWatcher(this.baseDir,this.queue,this.s3,{uploadIntervalMs:opt.uploadIntervalMs||60000});}
|
|
3
3
|
mask(v){return maskSensitive(v,this.maskFields);}
|
|
4
4
|
buildBaseLog(req,employee_id=""){const url=req&&req.originalUrl||"";const body=req&&req.body||{};const params={params:req&&req.params||{},query:req&&req.query||{}};const mBody=this.mask(body);const mParams=this.mask(params);const method=(req&&req.method||"").toUpperCase();const emp=employee_id||(req&&req.user&&(req.user.employee_id||req.user.emp_code||req.user.id))||"";return{url,body:JSON.stringify(mBody||{}),params:JSON.stringify(mParams||{}),type:getApiType(url),method,error:"",employee_id:emp};}
|
|
5
5
|
async logError(err,req,employee_id=""){const base=this.buildBaseLog(req||{},employee_id);const d={...base,error:err&&err.message?err.message:String(err||"")};await this.logWriter.writeLog(d,"error");}
|
|
6
6
|
async logRequest(req,employee_id=""){const d=this.buildBaseLog(req,employee_id);await this.logWriter.writeLog(d,"success");}
|
|
7
7
|
async logExternalApi({url,method,data,params,error,employeeId}){const mb=this.mask(data||{});const mp=this.mask(params||{});const d={url:url||"",body:JSON.stringify(mb||{}),params:JSON.stringify(mp||{}),type:"external_api",method:(method||"").toUpperCase(),error:error||"",employee_id:employeeId||""};await this.logWriter.writeLog(d,"external");}
|
|
8
|
-
expressMiddleware(){return(err,req,res,next)=>{try{this.logError(err,req).catch(
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
attachAxiosLogger(ax){if(!ax||!ax.interceptors)return;ax.interceptors.response.use(r=>{try{const c=r.config||{};this.logExternalApi({url:c.url,method:c.method,data:c.data,params:c.params,error:null,employeeId:c.employee_id}).catch(e=>console.error("logger axios ok",e));}catch(e){console.error("logger axios ok outer",e);}return r;},err=>{try{const c=err.config||{};this.logExternalApi({url:c&&c.url,method:c&&c.method,data:c&&c.data,params:c&&c.params,error:err&&err.message,employeeId:c&&c.employee_id}).catch(e=>console.error("logger axios err",e));}catch(e){console.error("logger axios err outer",e);}return Promise.reject(err);});}}
|
|
8
|
+
expressMiddleware(){return(err,req,res,next)=>{try{this.logError(err,req).catch(()=>{});}catch(_){ }next(err);};}
|
|
9
|
+
requestLoggerMiddleware(){return(req,res,next)=>{try{if(!req.__nm_logger_logged){req.__nm_logger_logged=true;this.logRequest(req).catch(()=>{});}}catch(_){ }next();};}
|
|
10
|
+
attachAxiosLogger(ax){if(!ax||!ax.interceptors)return;ax.interceptors.response.use(r=>{try{const c=r.config||{};this.logExternalApi({url:c.url,method:c.method,data:c.data,params:c.params,error:null,employeeId:c.employee_id}).catch(()=>{});}catch(_){ }return r;},err=>{try{const c=err.config||{};this.logExternalApi({url:c&&c.url,method:c&&c.method,data:c&&c.data,params:c&&c.params,error:err&&err.message,employeeId:c&&c.employee_id}).catch(()=>{});}catch(_){ }return Promise.reject(err);});}}
|
|
12
11
|
module.exports=Logger;
|
package/src/Queue.js
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
class Queue{constructor(){this.jobs=[];this.processing=false;}add(j){this.jobs.push(j);this.run();}async run(){if(this.processing)return;this.processing=true;while(this.jobs.length){const
|
|
1
|
+
class Queue{constructor(){this.jobs=[];this.processing=false;}add(j){this.jobs.push(j);this.run();}async run(){if(this.processing)return;this.processing=true;while(this.jobs.length){const job=this.jobs.shift();try{await job();}catch(_){}}this.processing=false;}}module.exports=Queue;
|
package/src/S3Uploader.js
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
const{S3Client,PutObjectCommand,GetObjectCommand}=require("@aws-sdk/client-s3");const{streamToString}=require("./utils");
|
|
2
|
-
class S3Uploader{constructor(cfg){this.client=new S3Client({region:cfg.region,credentials:{accessKeyId:cfg.accessKeyId,secretAccessKey:cfg.secretAccessKey}});this.bucket=cfg.bucket;}async getObject(key){try{const r=await this.client.send(new GetObjectCommand({Bucket:this.bucket,Key:key}));return await streamToString(r.Body);}catch(e){if(e&&(
|
|
2
|
+
class S3Uploader{constructor(cfg){this.client=new S3Client({region:cfg.region,credentials:{accessKeyId:cfg.accessKeyId,secretAccessKey:cfg.secretAccessKey}});this.bucket=cfg.bucket;}async getObject(key){try{const r=await this.client.send(new GetObjectCommand({Bucket:this.bucket,Key:key}));return await streamToString(r.Body);}catch(e){if(e&&(e.name==="NoSuchKey"||(e.$metadata&&e.$metadata.httpStatusCode===404)))return null;throw e;}}async putObject(key,body){await this.client.send(new PutObjectCommand({Bucket:this.bucket,Key:key,Body:body}));}}module.exports=S3Uploader;
|
package/src/utils.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
exports.getDatePath=function(){const d=new Date();return{Y:d.getFullYear(),M:String(d.getMonth()+1).padStart(2,"0"),D:String(d.getDate()).padStart(2,"0")};};
|
|
2
2
|
exports.formatDate=function(d){const p=n=>String(n).padStart(2,"0");return d.getFullYear()+"-"+p(d.getMonth()+1)+"-"+p(d.getDate())+" "+p(d.getHours())+":"+p(d.getMinutes())+":"+p(d.getSeconds());};
|
|
3
3
|
exports.getApiType=function(url){if(!url)return"";const s=url.split("/").filter(Boolean);return s[s.length-1]||"";};
|
|
4
|
-
const
|
|
5
|
-
exports.maskSensitive=function(v,extra){const keys=[...
|
|
4
|
+
const MK=["password","pass","token","secret","otp","auth","authorization","apikey","api_key","session","ssn"];
|
|
5
|
+
exports.maskSensitive=function(v,extra){const keys=[...MK,...(extra||[])].map(x=>String(x).toLowerCase());const isMask=k=>keys.some(m=>String(k).toLowerCase().includes(m));const maskAny=val=>{if(val&&typeof val==="object"){if(Array.isArray(val))return val.map(maskAny);const o={};for(const[k,x]of Object.entries(val))o[k]=isMask(k)?"*****":maskAny(x);return o;}return val;};if(typeof v==="string"){try{return maskAny(JSON.parse(v));}catch(_){return v;}}return maskAny(v);};
|
|
6
6
|
exports.streamToString=async s=>await new Promise((res,rej)=>{const c=[];s.on("data",x=>c.push(x));s.on("error",rej);s.on("end",()=>res(Buffer.concat(c).toString("utf8")));});
|