backlib 0.4.0 → 0.5.0-SNAPSHOT.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +6 -4
- package/dist/index.js +8 -4
- package/dist/index.js.map +1 -1
- package/dist/log-file-writer.d.ts +39 -0
- package/dist/log-file-writer.js +104 -0
- package/dist/log-file-writer.js.map +1 -0
- package/dist/log.d.ts +1 -40
- package/dist/log.js +2 -96
- package/dist/log.js.map +1 -1
- package/package.json +4 -6
- package/src/index.ts +18 -4
- package/src/log-file-writer.ts +159 -0
- package/src/log.ts +4 -160
- package/dist/fs.d.ts +0 -12
- package/dist/fs.js +0 -70
- package/dist/fs.js.map +0 -1
- package/src/fs.ts +0 -84
package/dist/index.d.ts
CHANGED
|
@@ -1,4 +1,6 @@
|
|
|
1
|
-
export
|
|
2
|
-
export
|
|
3
|
-
export
|
|
4
|
-
export
|
|
1
|
+
export { newLeafTracer } from './decorator-leaf-tracer.js';
|
|
2
|
+
export { FileWriter } from './log-file-writer.js';
|
|
3
|
+
export type { FileNameProvider, FileWriterOptions, OnFileCompleted, RecordSerializer } from './log-file-writer.js';
|
|
4
|
+
export { BaseLog } from './log.js';
|
|
5
|
+
export type { LogOptions, LogWriter } from './log.js';
|
|
6
|
+
export { prompt } from './utils.js';
|
package/dist/index.js
CHANGED
|
@@ -1,5 +1,9 @@
|
|
|
1
|
-
|
|
2
|
-
export
|
|
3
|
-
|
|
4
|
-
export
|
|
1
|
+
// Export decorator constructs
|
|
2
|
+
export { newLeafTracer } from './decorator-leaf-tracer.js';
|
|
3
|
+
// Export log-file-writer constructs
|
|
4
|
+
export { FileWriter } from './log-file-writer.js';
|
|
5
|
+
// Export log constructs
|
|
6
|
+
export { BaseLog } from './log.js';
|
|
7
|
+
// Export utils constructs
|
|
8
|
+
export { prompt } from './utils.js';
|
|
5
9
|
//# sourceMappingURL=index.js.map
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,8BAA8B;AAC9B,OAAO,EAAE,aAAa,EAAE,MAAM,4BAA4B,CAAC;AAC3D,oCAAoC;AACpC,OAAO,EAAE,UAAU,EAAE,MAAM,sBAAsB,CAAC;AAElD,wBAAwB;AACxB,OAAO,EAAE,OAAO,EAAE,MAAM,UAAU,CAAC;AAEnC,0BAA0B;AAC1B,OAAO,EAAE,MAAM,EAAE,MAAM,YAAY,CAAC"}
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import { LogWriter } from './index.js';
|
|
2
|
+
export declare type OnFileCompleted = (file: string) => Promise<void>;
|
|
3
|
+
export declare type FileNameProvider = (rev: number) => string;
|
|
4
|
+
/** Record serializer to string, which will be appended to the file. If null, record will be skipped */
|
|
5
|
+
export declare type RecordSerializer<R> = (rec: R) => string | null;
|
|
6
|
+
export interface FileWriterOptions<R> {
|
|
7
|
+
/** Local directory in which the logs files will be saved */
|
|
8
|
+
dir: string;
|
|
9
|
+
/** maxCount of record before file is uploaded to destination */
|
|
10
|
+
maxCount: number;
|
|
11
|
+
/** max time (in seconds) before file is uploaded to destination (which ever comes first with maxCount) */
|
|
12
|
+
maxTime: number;
|
|
13
|
+
/** Optional fileName generator for the new log file name (MUST BE UNIQUE for this dir) */
|
|
14
|
+
fileNameProvider?: FileNameProvider;
|
|
15
|
+
recordSerializer?: RecordSerializer<R>;
|
|
16
|
+
onFileCompleted?: OnFileCompleted;
|
|
17
|
+
}
|
|
18
|
+
export declare class FileWriter<R> implements LogWriter<R> {
|
|
19
|
+
readonly name = "to-deprecate";
|
|
20
|
+
private dir;
|
|
21
|
+
private maxCount;
|
|
22
|
+
private maxTime;
|
|
23
|
+
private fileNameProvider;
|
|
24
|
+
private recordSerializer;
|
|
25
|
+
private onFileCompleted?;
|
|
26
|
+
private _init;
|
|
27
|
+
private _rev;
|
|
28
|
+
private count;
|
|
29
|
+
private nextUpload;
|
|
30
|
+
private lastUpload?;
|
|
31
|
+
private file?;
|
|
32
|
+
constructor(opts: FileWriterOptions<R>);
|
|
33
|
+
private init;
|
|
34
|
+
/** Update the revision file */
|
|
35
|
+
private rev;
|
|
36
|
+
/** IMPLEMENTATION of the FileWriter interface */
|
|
37
|
+
writeRec(rec: R): Promise<void>;
|
|
38
|
+
private endFile;
|
|
39
|
+
}
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
import { pathExists } from 'fs-aux';
|
|
2
|
+
import { appendFile, mkdir, rename } from 'fs/promises';
|
|
3
|
+
import * as Path from "path";
|
|
4
|
+
import { isString } from 'utils-min';
|
|
5
|
+
export class FileWriter {
|
|
6
|
+
constructor(opts) {
|
|
7
|
+
this.name = 'to-deprecate'; // TODO: to deprecate
|
|
8
|
+
this._init = false;
|
|
9
|
+
this._rev = 0;
|
|
10
|
+
this.count = 0;
|
|
11
|
+
this.nextUpload = null; // null means nothing scheduled
|
|
12
|
+
this.maxCount = opts.maxCount;
|
|
13
|
+
this.maxTime = opts.maxTime;
|
|
14
|
+
this.dir = opts.dir;
|
|
15
|
+
this.fileNameProvider = opts.fileNameProvider ?? defaultFileNameProvider;
|
|
16
|
+
this.onFileCompleted = opts.onFileCompleted;
|
|
17
|
+
this.recordSerializer = opts.recordSerializer ?? defaultSerializer;
|
|
18
|
+
}
|
|
19
|
+
async init() {
|
|
20
|
+
if (!this._init) {
|
|
21
|
+
await mkdir(this.dir, { recursive: true });
|
|
22
|
+
this.rev();
|
|
23
|
+
this._init = true;
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
/** Update the revision file */
|
|
27
|
+
rev() {
|
|
28
|
+
this.count = 0;
|
|
29
|
+
this._rev = this._rev + 1;
|
|
30
|
+
const fileName = this.fileNameProvider(this._rev);
|
|
31
|
+
this.file = Path.join(this.dir, fileName);
|
|
32
|
+
}
|
|
33
|
+
/** IMPLEMENTATION of the FileWriter interface */
|
|
34
|
+
async writeRec(rec) {
|
|
35
|
+
if (!this._init) {
|
|
36
|
+
await this.init();
|
|
37
|
+
}
|
|
38
|
+
const str = this.recordSerializer(rec);
|
|
39
|
+
if (str != null) {
|
|
40
|
+
const strWithNl = str + '\n'; // add the new line
|
|
41
|
+
await appendFile(this.file, strWithNl);
|
|
42
|
+
}
|
|
43
|
+
// add count
|
|
44
|
+
this.count = this.count + 1;
|
|
45
|
+
// if we are above the count, we upload
|
|
46
|
+
if (this.count > this.maxCount) {
|
|
47
|
+
console.log(`->> rev ${this.name} because count ${this.count} > maxCount ${this.maxCount}`);
|
|
48
|
+
await this.endFile();
|
|
49
|
+
}
|
|
50
|
+
// if still below the count, but do not have this.nextUpload, schedule one
|
|
51
|
+
else if (this.nextUpload === null) {
|
|
52
|
+
const maxTimeMs = this.maxTime * 1000; // in ms
|
|
53
|
+
const nextUpload = Date.now() + maxTimeMs;
|
|
54
|
+
this.nextUpload = nextUpload;
|
|
55
|
+
setTimeout(async () => {
|
|
56
|
+
// perform only if this.nextUpload match the scheduled nextUpload (otherwise, was already processed and this schedule is outdated)
|
|
57
|
+
if (this.nextUpload === nextUpload) {
|
|
58
|
+
console.log(`->> rev ${this.name} because maxTimeMs ${maxTimeMs}`);
|
|
59
|
+
await this.endFile();
|
|
60
|
+
}
|
|
61
|
+
}, maxTimeMs);
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
async endFile() {
|
|
65
|
+
const file = this.file;
|
|
66
|
+
// we rev just before to make sure other logs will happen on new files
|
|
67
|
+
this.rev();
|
|
68
|
+
try {
|
|
69
|
+
const exists = await pathExists(file);
|
|
70
|
+
if (exists) {
|
|
71
|
+
if (this.onFileCompleted) {
|
|
72
|
+
try {
|
|
73
|
+
console.log(`->> endFile processing ${this.name} `);
|
|
74
|
+
await this.onFileCompleted(file);
|
|
75
|
+
}
|
|
76
|
+
catch (ex) {
|
|
77
|
+
console.log(`LOG PROCESSING ERROR - processing file '${file}' caused the following error: ${ex}`);
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
else {
|
|
82
|
+
console.log(`LOG PROCESSING REMOVE ERROR - cannot be processed file '${file}' does not exists anymore`);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
// Note: note sure we need this global catch now.
|
|
86
|
+
catch (ex) {
|
|
87
|
+
console.log(`LOG PROCESSING - logger.processLogFile - cannot upload to big query ${file}, ${ex.message}`);
|
|
88
|
+
await rename(file, file + '.error');
|
|
89
|
+
}
|
|
90
|
+
this.count = 0;
|
|
91
|
+
this.lastUpload = Date.now();
|
|
92
|
+
this.nextUpload = null;
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
/** default serializer */
|
|
96
|
+
function defaultSerializer(rec) {
|
|
97
|
+
return isString(rec) ? rec : JSON.stringify(rec);
|
|
98
|
+
}
|
|
99
|
+
function defaultFileNameProvider(rev) {
|
|
100
|
+
const date = new Date().toISOString().replace(/[T:.]/g, "-").slice(0, -1);
|
|
101
|
+
const revStr = `${rev}`.padStart(5, '0');
|
|
102
|
+
return `log-file-${date}-${revStr}.log`;
|
|
103
|
+
}
|
|
104
|
+
//# sourceMappingURL=log-file-writer.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"log-file-writer.js","sourceRoot":"","sources":["../src/log-file-writer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AACpC,OAAO,EAAE,UAAU,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,aAAa,CAAC;AACxD,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAC;AA0BrC,MAAM,OAAO,UAAU;IAkBrB,YAAY,IAA0B;QAjB7B,SAAI,GAAG,cAAc,CAAC,CAAC,qBAAqB;QAS7C,UAAK,GAAG,KAAK,CAAC;QACd,SAAI,GAAG,CAAC,CAAC;QACT,UAAK,GAAG,CAAC,CAAC;QACV,eAAU,GAAkB,IAAI,CAAC,CAAC,+BAA+B;QAMvE,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;QAC9B,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC;QAC5B,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC;QACpB,IAAI,CAAC,gBAAgB,GAAG,IAAI,CAAC,gBAAgB,IAAI,uBAAuB,CAAC;QACzE,IAAI,CAAC,eAAe,GAAG,IAAI,CAAC,eAAe,CAAC;QAC5C,IAAI,CAAC,gBAAgB,GAAG,IAAI,CAAC,gBAAgB,IAAI,iBAAiB,CAAC;IACrE,CAAC;IAEO,KAAK,CAAC,IAAI;QAChB,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;YACf,MAAM,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;YAC3C,IAAI,CAAC,GAAG,EAAE,CAAC;YACX,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC;SACnB;IACH,CAAC;IAED,+BAA+B;IACvB,GAAG;QACT,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC;QAE1B,MAAM,QAAQ,GAAG,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAElD,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAA;IAC3C,CAAC;IAGD,iDAAiD;IACjD,KAAK,CAAC,QAAQ,CAAC,GAAM;QAEnB,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;YACf,MAAM,IAAI,CAAC,IAAI,EAAE,CAAC;SACnB;QAED,MAAM,GAAG,GAAG,IAAI,CAAC,gBAAgB,CAAC,GAAG,CAAC,CAAC;QACvC,IAAI,GAAG,IAAI,IAAI,EAAE;YACf,MAAM,SAAS,GAAG,GAAG,GAAG,IAAI,CAAC,CAAC,mBAAmB;YACjD,MAAM,UAAU,CAAC,IAAI,CAAC,IAAK,EAAE,SAAS,CAAC,CAAC;SACzC;QAED,YAAY;QACZ,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC;QAE5B,uCAAuC;QACvC,IAAI,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,QAAQ,EAAE;YAC9B,OAAO,CAAC,GAAG,CAAC,WAAW,IAAI,CAAC,IAAI,kBAAkB,IAAI,CAAC,KAAK,eAAe,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;YAC5F,MAAM,IAAI,CAAC,OAAO,EAAE,CAAC;SACtB;QACD,0EAA0E;aACrE,IAAI,IAAI,CAAC,UAAU,KAAK,IAAI,EAAE;YACjC,MAAM,SAAS,GAAG,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,CAAC,QAAQ;YAE/C,MAAM,UAAU,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC;YAC1C,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;YAE7B,UAAU,CAAC,KAAK,IAAI,EAAE;gBACpB,kIAAkI;gBAClI,IAAI,IAAI,CAAC,UAAU,KAAK,UAAU,EAAE;oBAClC,OAAO,CAAC,GAAG,CAAC,WAAW,IAAI,CAAC,IAAI,sBAAsB,SAAS,EAAE,CAAC,CAAC;oBACnE,MAAM,IAAI,CAAC,OAAO,EAAE,CAAC;iBACtB;YACH,CAAC,EAAE,SAAS,CAAC,CAAC;SACf;IAEH,CAAC;IAEO,KAAK,CAAC,OAAO;QACnB,MAAM,IAAI,GAAG,IAAI,CAAC,IAAK,CAAC;QACxB,sEAAsE;QACtE,IAAI,CAAC,GAAG,EAAE,CAAC;QAEX,IAAI;YACF,MAAM,MAAM,GAAG,MAAM,UAAU,CAAC,IAAI,CAAC,CAAC;YACtC,IAAI,MAAM,EAAE;gBACV,IAAI,IAAI,CAAC,eAAe,EAAE;oBACxB,IAAI;wBACF,OAAO,CAAC,GAAG,CAAC,0BAA0B,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC;wBACpD,MAAM,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,CAAC;qBAClC;oBAAC,OAAO,EAAO,EAAE;wBAChB,OAAO,CAAC,GAAG,CAAC,2CAA2C,IAAI,iCAAiC,EAAE,EAAE,CAAC,CAAC;qBACnG;iBACF;aACF;iBAAM;gBACL,OAAO,CAAC,GAAG,CAAC,2DAA2D,IAAI,2BAA2B,CAAC,CAAC;aACzG;SAEF;QACD,kDAAkD;QAClD,OAAO,EAAO,EAAE;YACd,OAAO,CAAC,GAAG,CAAC,uEAAuE,IAAI,KAAK,EAAE,CAAC,OAAO,EAAE,CAAC,CAAC;YAC1G,MAAM,MAAM,CAAC,IAAI,EAAE,IAAI,GAAG,QAAQ,CAAC,CAAC;SACrC;QAED,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC;QACf,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;QAC7B,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC;IACzB,CAAC;CAEF;AAED,yBAAyB;AAEzB,SAAS,iBAAiB,CAAI,GAAM;IAClC,OAAO,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;AACnD,CAAC;AAED,SAAS,uBAAuB,CAAC,GAAW;IAC1C,MAAM,IAAI,GAAG,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC,OAAO,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;IAC1E,MAAM,MAAM,GAAG,GAAG,GAAG,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC;IACzC,OAAO,YAAY,IAAI,IAAI,MAAM,MAAM,CAAC;AAC1C,CAAC"}
|
package/dist/log.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
interface LogOptions<R> {
|
|
1
|
+
export interface LogOptions<R> {
|
|
2
2
|
writers: LogWriter<R>[];
|
|
3
3
|
}
|
|
4
4
|
/**
|
|
@@ -10,44 +10,5 @@ export declare class BaseLog<R> {
|
|
|
10
10
|
log(rec: R): Promise<void>;
|
|
11
11
|
}
|
|
12
12
|
export interface LogWriter<R> {
|
|
13
|
-
readonly name: string;
|
|
14
13
|
writeRec?(rec: R): Promise<void>;
|
|
15
14
|
}
|
|
16
|
-
/** processing file, if return true, then file will be assumed to be full processed, and will be deleted */
|
|
17
|
-
export declare type FileProcessor = (file: string) => Promise<boolean>;
|
|
18
|
-
/** Record serializer to string, which will be appended to the file. If null, record will be skipped */
|
|
19
|
-
export declare type RecordSerializer<R> = (rec: R) => string | null;
|
|
20
|
-
interface FileLogWriterOptions<R> {
|
|
21
|
-
/** name of the logWriter, will be used as prefix */
|
|
22
|
-
name: string;
|
|
23
|
-
/** Local directory in which the logs files will be saved */
|
|
24
|
-
dir: string;
|
|
25
|
-
/** maxCount of record before file is uploaded to destination */
|
|
26
|
-
maxCount: number;
|
|
27
|
-
/** max time (in ms) before file is uploaded to destination (which ever comes first with maxCount) */
|
|
28
|
-
maxTime: number;
|
|
29
|
-
fileProcessor?: FileProcessor;
|
|
30
|
-
recordSerializer?: RecordSerializer<R>;
|
|
31
|
-
}
|
|
32
|
-
export declare class FileLogWriter<R> implements LogWriter<R> {
|
|
33
|
-
readonly name: string;
|
|
34
|
-
private dir;
|
|
35
|
-
private maxCount;
|
|
36
|
-
private maxTime;
|
|
37
|
-
private fileProcessor?;
|
|
38
|
-
private recordSerializer;
|
|
39
|
-
private _init;
|
|
40
|
-
private _rev;
|
|
41
|
-
private count;
|
|
42
|
-
private nextUpload;
|
|
43
|
-
private lastUpload?;
|
|
44
|
-
private file?;
|
|
45
|
-
constructor(opts: FileLogWriterOptions<R>);
|
|
46
|
-
private init;
|
|
47
|
-
/** Update the revision file */
|
|
48
|
-
private rev;
|
|
49
|
-
/** IMPLEMENTATION of the FileWriter interface */
|
|
50
|
-
writeRec(rec: R): Promise<void>;
|
|
51
|
-
private endFile;
|
|
52
|
-
}
|
|
53
|
-
export {};
|
package/dist/log.js
CHANGED
|
@@ -1,7 +1,4 @@
|
|
|
1
|
-
|
|
2
|
-
import { isString } from 'utils-min';
|
|
3
|
-
import { glob, saferRemove } from './fs.js';
|
|
4
|
-
const { pathExists, mkdirs, appendFile, rename } = (await import('fs-extra')).default;
|
|
1
|
+
// const { pathExists, mkdirs, appendFile, rename } = (await import('fs-extra')).default;
|
|
5
2
|
/**
|
|
6
3
|
* Base Log class that handle the base log management logic.
|
|
7
4
|
*/
|
|
@@ -19,101 +16,10 @@ export class BaseLog {
|
|
|
19
16
|
}
|
|
20
17
|
catch (ex) {
|
|
21
18
|
// here log console.log, no choise
|
|
22
|
-
console.log(`
|
|
19
|
+
console.log(`ERROR - BACKLIB - Log exception when writeRec on logWriter ${writer}. ${ex}`);
|
|
23
20
|
}
|
|
24
21
|
}
|
|
25
22
|
}
|
|
26
23
|
}
|
|
27
24
|
}
|
|
28
|
-
export class FileLogWriter {
|
|
29
|
-
constructor(opts) {
|
|
30
|
-
this._init = false;
|
|
31
|
-
this._rev = 0;
|
|
32
|
-
this.count = 0;
|
|
33
|
-
this.nextUpload = null; // null means nothing scheduled
|
|
34
|
-
this.name = opts.name;
|
|
35
|
-
this.maxCount = opts.maxCount;
|
|
36
|
-
this.maxTime = opts.maxTime;
|
|
37
|
-
this.dir = opts.dir;
|
|
38
|
-
this.fileProcessor = opts.fileProcessor;
|
|
39
|
-
this.recordSerializer = opts.recordSerializer ?? defaultSerializer;
|
|
40
|
-
}
|
|
41
|
-
async init() {
|
|
42
|
-
if (!this._init) {
|
|
43
|
-
await mkdirs(this.dir);
|
|
44
|
-
// delete the logs dir if exit
|
|
45
|
-
const oldLogFiles = await glob(this.dir + `${this.name}*.log`);
|
|
46
|
-
await saferRemove(oldLogFiles);
|
|
47
|
-
console.log('Deleted old log files', oldLogFiles);
|
|
48
|
-
this.rev();
|
|
49
|
-
this._init = true;
|
|
50
|
-
}
|
|
51
|
-
}
|
|
52
|
-
/** Update the revision file */
|
|
53
|
-
rev() {
|
|
54
|
-
this.count = 0;
|
|
55
|
-
this._rev = this._rev + 1;
|
|
56
|
-
const suffix = `${this._rev}`.padStart(5, '0');
|
|
57
|
-
this.file = Path.join(this.dir, `${this.name}-${suffix}.log`);
|
|
58
|
-
}
|
|
59
|
-
/** IMPLEMENTATION of the FileWriter interface */
|
|
60
|
-
async writeRec(rec) {
|
|
61
|
-
if (!this._init) {
|
|
62
|
-
await this.init();
|
|
63
|
-
}
|
|
64
|
-
// TODO: Need to move this outside of the generic log implementation (we do this because bigquery expect info to be string, since it can be dynamic)
|
|
65
|
-
// NOTE: In fact, this whole file write and upload, should be part of a FileLogWriter, and we just treat it as above (perhaps in the BigQueryLogWriter extends FileLogWriter)
|
|
66
|
-
const str = this.recordSerializer(rec);
|
|
67
|
-
if (str != null) {
|
|
68
|
-
await appendFile(this.file, str);
|
|
69
|
-
}
|
|
70
|
-
// add count
|
|
71
|
-
this.count = this.count + 1;
|
|
72
|
-
// if we are above the count, we upload
|
|
73
|
-
if (this.count > this.maxCount) {
|
|
74
|
-
await this.endFile();
|
|
75
|
-
}
|
|
76
|
-
// if still below the count, but do not have this.nextUpload, schedule one
|
|
77
|
-
else if (this.nextUpload === null) {
|
|
78
|
-
const maxTimeMs = this.maxTime * 1000; // in ms
|
|
79
|
-
const nextUpload = Date.now() + maxTimeMs;
|
|
80
|
-
this.nextUpload = nextUpload;
|
|
81
|
-
setTimeout(async () => {
|
|
82
|
-
// perform only if this.nextUpload match the scheduled nextUpload (otherwise, was already processed and this schedule is outdated)
|
|
83
|
-
if (this.nextUpload === nextUpload) {
|
|
84
|
-
await this.endFile();
|
|
85
|
-
}
|
|
86
|
-
}, maxTimeMs);
|
|
87
|
-
}
|
|
88
|
-
}
|
|
89
|
-
async endFile() {
|
|
90
|
-
const file = this.file;
|
|
91
|
-
// we rev just before to make sure other logs will happen on new files
|
|
92
|
-
this.rev();
|
|
93
|
-
try {
|
|
94
|
-
const exists = await pathExists(file);
|
|
95
|
-
if (exists) {
|
|
96
|
-
if (this.fileProcessor) {
|
|
97
|
-
await this.fileProcessor(file);
|
|
98
|
-
}
|
|
99
|
-
await saferRemove(file);
|
|
100
|
-
}
|
|
101
|
-
else {
|
|
102
|
-
console.log(`CODE ERROR - can't upload to big query ${file} does not exists`);
|
|
103
|
-
}
|
|
104
|
-
}
|
|
105
|
-
catch (ex) {
|
|
106
|
-
console.log(`ERROR - logger.processLogFile - cannot upload to big query ${file}, ${ex.message}`);
|
|
107
|
-
await rename(file, file + '.error');
|
|
108
|
-
}
|
|
109
|
-
this.count = 0;
|
|
110
|
-
this.lastUpload = Date.now();
|
|
111
|
-
this.nextUpload = null;
|
|
112
|
-
}
|
|
113
|
-
}
|
|
114
|
-
/** default serializer */
|
|
115
|
-
function defaultSerializer(rec) {
|
|
116
|
-
return isString(rec) ? rec : JSON.stringify(rec);
|
|
117
|
-
}
|
|
118
|
-
//#endregion ---------- /FileLogWriter ----------
|
|
119
25
|
//# sourceMappingURL=log.js.map
|
package/dist/log.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"log.js","sourceRoot":"","sources":["../src/log.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"log.js","sourceRoot":"","sources":["../src/log.ts"],"names":[],"mappings":"AACA,yFAAyF;AAQzF;;GAEG;AACH,MAAM,OAAO,OAAO;IAGnB,YAAY,IAAmB;QAFvB,eAAU,GAAmB,EAAE,CAAC;QAGvC,IAAI,CAAC,UAAU,GAAG,CAAC,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC;IACrC,CAAC;IAED,KAAK,CAAC,GAAG,CAAC,GAAM;QAEf,GAAG;QACH,KAAK,MAAM,MAAM,IAAI,IAAI,CAAC,UAAU,EAAE;YACrC,IAAI,MAAM,CAAC,QAAQ,EAAE;gBACpB,IAAI;oBACH,MAAM,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;iBAC3B;gBAAC,OAAO,EAAE,EAAE;oBACZ,kCAAkC;oBAClC,OAAO,CAAC,GAAG,CAAC,8DAA8D,MAAM,KAAK,EAAE,EAAE,CAAC,CAAC;iBAC3F;aACD;SACD;IAEF,CAAC;CAED"}
|
package/package.json
CHANGED
|
@@ -1,12 +1,11 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "backlib",
|
|
3
3
|
"type": "module",
|
|
4
|
-
"version": "0.
|
|
4
|
+
"version": "0.5.0-SNAPSHOT.1",
|
|
5
5
|
"description": "Minimalist library for backend services",
|
|
6
6
|
"main": "dist/index.js",
|
|
7
|
-
"typings": "dist/index.d.ts",
|
|
8
7
|
"engines": {
|
|
9
|
-
"node": ">=
|
|
8
|
+
"node": ">=16.13"
|
|
10
9
|
},
|
|
11
10
|
"repository": {
|
|
12
11
|
"type": "git",
|
|
@@ -19,12 +18,11 @@
|
|
|
19
18
|
"author": "jeremy.chone@gmail.com",
|
|
20
19
|
"license": "MIT",
|
|
21
20
|
"dependencies": {
|
|
22
|
-
"
|
|
23
|
-
"fs-extra": "^10.0.0",
|
|
21
|
+
"fs-aux": "^0.1.0",
|
|
24
22
|
"utils-min": "^0.2.0"
|
|
25
23
|
},
|
|
26
24
|
"devDependencies": {
|
|
27
|
-
"@types/
|
|
25
|
+
"@types/node": "^17.0.10",
|
|
28
26
|
"rimraf": "^3.0.2",
|
|
29
27
|
"typescript": "^4.5.5"
|
|
30
28
|
},
|
package/src/index.ts
CHANGED
|
@@ -1,5 +1,19 @@
|
|
|
1
|
-
|
|
2
|
-
export
|
|
3
|
-
|
|
4
|
-
export
|
|
1
|
+
// Export decorator constructs
|
|
2
|
+
export { newLeafTracer } from './decorator-leaf-tracer.js';
|
|
3
|
+
// Export log-file-writer constructs
|
|
4
|
+
export { FileWriter } from './log-file-writer.js';
|
|
5
|
+
export type { FileNameProvider, FileWriterOptions, OnFileCompleted, RecordSerializer } from './log-file-writer.js';
|
|
6
|
+
// Export log constructs
|
|
7
|
+
export { BaseLog } from './log.js';
|
|
8
|
+
export type { LogOptions, LogWriter } from './log.js';
|
|
9
|
+
// Export utils constructs
|
|
10
|
+
export { prompt } from './utils.js';
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
|
|
5
19
|
|
|
@@ -0,0 +1,159 @@
|
|
|
1
|
+
import { pathExists } from 'fs-aux';
|
|
2
|
+
import { appendFile, mkdir, rename } from 'fs/promises';
|
|
3
|
+
import * as Path from "path";
|
|
4
|
+
import { isString } from 'utils-min';
|
|
5
|
+
import { LogWriter } from './index.js';
|
|
6
|
+
|
|
7
|
+
export type OnFileCompleted = (file: string) => Promise<void>;
|
|
8
|
+
export type FileNameProvider = (rev: number) => string;
|
|
9
|
+
|
|
10
|
+
/** Record serializer to string, which will be appended to the file. If null, record will be skipped */
|
|
11
|
+
export type RecordSerializer<R> = (rec: R) => string | null;
|
|
12
|
+
|
|
13
|
+
export interface FileWriterOptions<R> {
|
|
14
|
+
/** Local directory in which the logs files will be saved */
|
|
15
|
+
dir: string;
|
|
16
|
+
/** maxCount of record before file is uploaded to destination */
|
|
17
|
+
maxCount: number;
|
|
18
|
+
/** max time (in seconds) before file is uploaded to destination (which ever comes first with maxCount) */
|
|
19
|
+
maxTime: number;
|
|
20
|
+
|
|
21
|
+
/** Optional fileName generator for the new log file name (MUST BE UNIQUE for this dir) */
|
|
22
|
+
fileNameProvider?: FileNameProvider,
|
|
23
|
+
/* Optional recordSerializer to file. By default, JSON.serializer() (new line json) */
|
|
24
|
+
recordSerializer?: RecordSerializer<R>;
|
|
25
|
+
/* Call when a log file is completed (i.e., new entries will go to another file) */
|
|
26
|
+
onFileCompleted?: OnFileCompleted;
|
|
27
|
+
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export class FileWriter<R> implements LogWriter<R> {
|
|
31
|
+
readonly name = 'to-deprecate'; // TODO: to deprecate
|
|
32
|
+
private dir: string;
|
|
33
|
+
private maxCount: number;
|
|
34
|
+
private maxTime: number;
|
|
35
|
+
|
|
36
|
+
private fileNameProvider: FileNameProvider;
|
|
37
|
+
private recordSerializer: RecordSerializer<R>;
|
|
38
|
+
private onFileCompleted?: OnFileCompleted;
|
|
39
|
+
|
|
40
|
+
private _init = false;
|
|
41
|
+
private _rev = 0;
|
|
42
|
+
private count = 0;
|
|
43
|
+
private nextUpload: number | null = null; // null means nothing scheduled
|
|
44
|
+
private lastUpload?: number;
|
|
45
|
+
|
|
46
|
+
private file?: string;
|
|
47
|
+
|
|
48
|
+
constructor(opts: FileWriterOptions<R>) {
|
|
49
|
+
this.maxCount = opts.maxCount;
|
|
50
|
+
this.maxTime = opts.maxTime;
|
|
51
|
+
this.dir = opts.dir;
|
|
52
|
+
this.fileNameProvider = opts.fileNameProvider ?? defaultFileNameProvider;
|
|
53
|
+
this.onFileCompleted = opts.onFileCompleted;
|
|
54
|
+
this.recordSerializer = opts.recordSerializer ?? defaultSerializer;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
private async init() {
|
|
58
|
+
if (!this._init) {
|
|
59
|
+
await mkdir(this.dir, { recursive: true });
|
|
60
|
+
this.rev();
|
|
61
|
+
this._init = true;
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
/** Update the revision file */
|
|
66
|
+
private rev() {
|
|
67
|
+
this.count = 0;
|
|
68
|
+
this._rev = this._rev + 1;
|
|
69
|
+
|
|
70
|
+
const fileName = this.fileNameProvider(this._rev);
|
|
71
|
+
|
|
72
|
+
this.file = Path.join(this.dir, fileName)
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
/** IMPLEMENTATION of the FileWriter interface */
|
|
77
|
+
async writeRec(rec: R) {
|
|
78
|
+
|
|
79
|
+
if (!this._init) {
|
|
80
|
+
await this.init();
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
const str = this.recordSerializer(rec);
|
|
84
|
+
if (str != null) {
|
|
85
|
+
const strWithNl = str + '\n'; // add the new line
|
|
86
|
+
await appendFile(this.file!, strWithNl);
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
// add count
|
|
90
|
+
this.count = this.count + 1;
|
|
91
|
+
|
|
92
|
+
// if we are above the count, we upload
|
|
93
|
+
if (this.count > this.maxCount) {
|
|
94
|
+
console.log(`->> rev ${this.name} because count ${this.count} > maxCount ${this.maxCount}`);
|
|
95
|
+
await this.endFile();
|
|
96
|
+
}
|
|
97
|
+
// if still below the count, but do not have this.nextUpload, schedule one
|
|
98
|
+
else if (this.nextUpload === null) {
|
|
99
|
+
const maxTimeMs = this.maxTime * 1000; // in ms
|
|
100
|
+
|
|
101
|
+
const nextUpload = Date.now() + maxTimeMs;
|
|
102
|
+
this.nextUpload = nextUpload;
|
|
103
|
+
|
|
104
|
+
setTimeout(async () => {
|
|
105
|
+
// perform only if this.nextUpload match the scheduled nextUpload (otherwise, was already processed and this schedule is outdated)
|
|
106
|
+
if (this.nextUpload === nextUpload) {
|
|
107
|
+
console.log(`->> rev ${this.name} because maxTimeMs ${maxTimeMs}`);
|
|
108
|
+
await this.endFile();
|
|
109
|
+
}
|
|
110
|
+
}, maxTimeMs);
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
private async endFile() {
|
|
116
|
+
const file = this.file!;
|
|
117
|
+
// we rev just before to make sure other logs will happen on new files
|
|
118
|
+
this.rev();
|
|
119
|
+
|
|
120
|
+
try {
|
|
121
|
+
const exists = await pathExists(file);
|
|
122
|
+
if (exists) {
|
|
123
|
+
if (this.onFileCompleted) {
|
|
124
|
+
try {
|
|
125
|
+
console.log(`->> endFile processing ${this.name} `);
|
|
126
|
+
await this.onFileCompleted(file);
|
|
127
|
+
} catch (ex: any) {
|
|
128
|
+
console.log(`LOG PROCESSING ERROR - processing file '${file}' caused the following error: ${ex}`);
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
} else {
|
|
132
|
+
console.log(`LOG PROCESSING REMOVE ERROR - cannot be processed file '${file}' does not exists anymore`);
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
}
|
|
136
|
+
// Note: note sure we need this global catch now.
|
|
137
|
+
catch (ex: any) {
|
|
138
|
+
console.log(`LOG PROCESSING - logger.processLogFile - cannot upload to big query ${file}, ${ex.message}`);
|
|
139
|
+
await rename(file, file + '.error');
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
this.count = 0;
|
|
143
|
+
this.lastUpload = Date.now();
|
|
144
|
+
this.nextUpload = null;
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
/** default serializer */
|
|
150
|
+
|
|
151
|
+
function defaultSerializer<R>(rec: R): string {
|
|
152
|
+
return isString(rec) ? rec : JSON.stringify(rec);
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
function defaultFileNameProvider(rev: number): string {
|
|
156
|
+
const date = new Date().toISOString().replace(/[T:.]/g, "-").slice(0, -1);
|
|
157
|
+
const revStr = `${rev}`.padStart(5, '0');
|
|
158
|
+
return `log-file-${date}-${revStr}.log`;
|
|
159
|
+
}
|
package/src/log.ts
CHANGED
|
@@ -1,11 +1,9 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
import { glob, saferRemove } from './fs.js';
|
|
4
|
-
const { pathExists, mkdirs, appendFile, rename } = (await import('fs-extra')).default;
|
|
1
|
+
|
|
2
|
+
// const { pathExists, mkdirs, appendFile, rename } = (await import('fs-extra')).default;
|
|
5
3
|
|
|
6
4
|
|
|
7
5
|
//#region ---------- BaseLog ----------
|
|
8
|
-
interface LogOptions<R> {
|
|
6
|
+
export interface LogOptions<R> {
|
|
9
7
|
writers: LogWriter<R>[]
|
|
10
8
|
}
|
|
11
9
|
|
|
@@ -28,7 +26,7 @@ export class BaseLog<R> {
|
|
|
28
26
|
await writer.writeRec(rec);
|
|
29
27
|
} catch (ex) {
|
|
30
28
|
// here log console.log, no choise
|
|
31
|
-
console.log(`
|
|
29
|
+
console.log(`ERROR - BACKLIB - Log exception when writeRec on logWriter ${writer}. ${ex}`);
|
|
32
30
|
}
|
|
33
31
|
}
|
|
34
32
|
}
|
|
@@ -39,160 +37,6 @@ export class BaseLog<R> {
|
|
|
39
37
|
//#endregion ---------- /BaseLog ----------
|
|
40
38
|
|
|
41
39
|
export interface LogWriter<R> {
|
|
42
|
-
readonly name: string;
|
|
43
40
|
writeRec?(rec: R): Promise<void>
|
|
44
41
|
}
|
|
45
42
|
|
|
46
|
-
|
|
47
|
-
//#region ---------- FileLogWriter ----------
|
|
48
|
-
/** processing file, if return true, then file will be assumed to be full processed, and will be deleted */
|
|
49
|
-
export type FileProcessor = (file: string) => Promise<boolean>;
|
|
50
|
-
|
|
51
|
-
/** Record serializer to string, which will be appended to the file. If null, record will be skipped */
|
|
52
|
-
export type RecordSerializer<R> = (rec: R) => string | null;
|
|
53
|
-
|
|
54
|
-
interface FileLogWriterOptions<R> {
|
|
55
|
-
/** name of the logWriter, will be used as prefix */
|
|
56
|
-
name: string;
|
|
57
|
-
/** Local directory in which the logs files will be saved */
|
|
58
|
-
dir: string;
|
|
59
|
-
/** maxCount of record before file is uploaded to destination */
|
|
60
|
-
maxCount: number;
|
|
61
|
-
/** max time (in ms) before file is uploaded to destination (which ever comes first with maxCount) */
|
|
62
|
-
maxTime: number;
|
|
63
|
-
|
|
64
|
-
/* Mehod to process the file (.e.g., upload to bucket, bigquery, ...) */
|
|
65
|
-
fileProcessor?: FileProcessor;
|
|
66
|
-
|
|
67
|
-
/* Optional recordSerializer to file. By default, JSON.serializer() (new line json) */
|
|
68
|
-
recordSerializer?: RecordSerializer<R>;
|
|
69
|
-
}
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
export class FileLogWriter<R> implements LogWriter<R> {
|
|
74
|
-
|
|
75
|
-
readonly name: string;
|
|
76
|
-
private dir: string;
|
|
77
|
-
private maxCount: number;
|
|
78
|
-
private maxTime: number;
|
|
79
|
-
private fileProcessor?: FileProcessor;
|
|
80
|
-
private recordSerializer: RecordSerializer<R>;
|
|
81
|
-
|
|
82
|
-
private _init = false;
|
|
83
|
-
private _rev = 0;
|
|
84
|
-
private count = 0;
|
|
85
|
-
private nextUpload: number | null = null; // null means nothing scheduled
|
|
86
|
-
private lastUpload?: number;
|
|
87
|
-
|
|
88
|
-
private file?: string;
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
constructor(opts: FileLogWriterOptions<R>) {
|
|
92
|
-
this.name = opts.name;
|
|
93
|
-
this.maxCount = opts.maxCount;
|
|
94
|
-
this.maxTime = opts.maxTime;
|
|
95
|
-
this.dir = opts.dir;
|
|
96
|
-
this.fileProcessor = opts.fileProcessor;
|
|
97
|
-
this.recordSerializer = opts.recordSerializer ?? defaultSerializer;
|
|
98
|
-
}
|
|
99
|
-
|
|
100
|
-
private async init() {
|
|
101
|
-
if (!this._init) {
|
|
102
|
-
await mkdirs(this.dir);
|
|
103
|
-
|
|
104
|
-
// delete the logs dir if exit
|
|
105
|
-
const oldLogFiles = await glob(this.dir + `${this.name}*.log`);
|
|
106
|
-
await saferRemove(oldLogFiles);
|
|
107
|
-
console.log('Deleted old log files', oldLogFiles);
|
|
108
|
-
|
|
109
|
-
this.rev();
|
|
110
|
-
|
|
111
|
-
this._init = true;
|
|
112
|
-
}
|
|
113
|
-
|
|
114
|
-
}
|
|
115
|
-
|
|
116
|
-
/** Update the revision file */
|
|
117
|
-
private rev() {
|
|
118
|
-
this.count = 0;
|
|
119
|
-
this._rev = this._rev + 1;
|
|
120
|
-
const suffix = `${this._rev}`.padStart(5, '0');
|
|
121
|
-
this.file = Path.join(this.dir, `${this.name}-${suffix}.log`)
|
|
122
|
-
}
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
/** IMPLEMENTATION of the FileWriter interface */
|
|
126
|
-
async writeRec(rec: R) {
|
|
127
|
-
|
|
128
|
-
if (!this._init) {
|
|
129
|
-
await this.init();
|
|
130
|
-
}
|
|
131
|
-
|
|
132
|
-
// TODO: Need to move this outside of the generic log implementation (we do this because bigquery expect info to be string, since it can be dynamic)
|
|
133
|
-
// NOTE: In fact, this whole file write and upload, should be part of a FileLogWriter, and we just treat it as above (perhaps in the BigQueryLogWriter extends FileLogWriter)
|
|
134
|
-
const str = this.recordSerializer(rec);
|
|
135
|
-
if (str != null) {
|
|
136
|
-
await appendFile(this.file!, str);
|
|
137
|
-
}
|
|
138
|
-
|
|
139
|
-
// add count
|
|
140
|
-
this.count = this.count + 1;
|
|
141
|
-
|
|
142
|
-
// if we are above the count, we upload
|
|
143
|
-
if (this.count > this.maxCount) {
|
|
144
|
-
await this.endFile();
|
|
145
|
-
}
|
|
146
|
-
// if still below the count, but do not have this.nextUpload, schedule one
|
|
147
|
-
else if (this.nextUpload === null) {
|
|
148
|
-
const maxTimeMs = this.maxTime * 1000; // in ms
|
|
149
|
-
|
|
150
|
-
const nextUpload = Date.now() + maxTimeMs;
|
|
151
|
-
this.nextUpload = nextUpload;
|
|
152
|
-
|
|
153
|
-
setTimeout(async () => {
|
|
154
|
-
// perform only if this.nextUpload match the scheduled nextUpload (otherwise, was already processed and this schedule is outdated)
|
|
155
|
-
if (this.nextUpload === nextUpload) {
|
|
156
|
-
await this.endFile();
|
|
157
|
-
}
|
|
158
|
-
}, maxTimeMs);
|
|
159
|
-
}
|
|
160
|
-
|
|
161
|
-
}
|
|
162
|
-
|
|
163
|
-
private async endFile() {
|
|
164
|
-
const file = this.file!;
|
|
165
|
-
// we rev just before to make sure other logs will happen on new files
|
|
166
|
-
this.rev();
|
|
167
|
-
|
|
168
|
-
try {
|
|
169
|
-
const exists = await pathExists(file);
|
|
170
|
-
if (exists) {
|
|
171
|
-
if (this.fileProcessor) {
|
|
172
|
-
await this.fileProcessor(file);
|
|
173
|
-
}
|
|
174
|
-
await saferRemove(file);
|
|
175
|
-
} else {
|
|
176
|
-
console.log(`CODE ERROR - can't upload to big query ${file} does not exists`);
|
|
177
|
-
}
|
|
178
|
-
|
|
179
|
-
} catch (ex: any) {
|
|
180
|
-
console.log(`ERROR - logger.processLogFile - cannot upload to big query ${file}, ${ex.message}`);
|
|
181
|
-
await rename(file, file + '.error');
|
|
182
|
-
}
|
|
183
|
-
|
|
184
|
-
this.count = 0;
|
|
185
|
-
this.lastUpload = Date.now();
|
|
186
|
-
this.nextUpload = null;
|
|
187
|
-
}
|
|
188
|
-
|
|
189
|
-
}
|
|
190
|
-
|
|
191
|
-
/** default serializer */
|
|
192
|
-
|
|
193
|
-
function defaultSerializer<R>(rec: R): string {
|
|
194
|
-
return isString(rec) ? rec : JSON.stringify(rec);
|
|
195
|
-
}
|
|
196
|
-
|
|
197
|
-
//#endregion ---------- /FileLogWriter ----------
|
|
198
|
-
|
package/dist/fs.d.ts
DELETED
|
@@ -1,12 +0,0 @@
|
|
|
1
|
-
import { Options } from 'fast-glob';
|
|
2
|
-
/**
|
|
3
|
-
* Simplified and sorted glob function (using fast-glob) for one or more pattern from current directory or a optional cwd one.
|
|
4
|
-
*
|
|
5
|
-
* Note 1: The result will be sorted by natural directory/subdir/filename order (as a would a recursive walk)
|
|
6
|
-
* Note 2: When `cwd` in options, it is added to the file path i.e. `pathJoin(cwd, path)`
|
|
7
|
-
*
|
|
8
|
-
* @returns always sorted result return Promise<string[]>
|
|
9
|
-
*/
|
|
10
|
-
export declare function glob(pattern: string | string[], cwdOrFastGlobOptions?: string | Options): Promise<string[]>;
|
|
11
|
-
/** Remove one or more files. Resolved the number of names removed */
|
|
12
|
-
export declare function saferRemove(names: string | string[], cwd?: string): Promise<string[]>;
|
package/dist/fs.js
DELETED
|
@@ -1,70 +0,0 @@
|
|
|
1
|
-
import FastGlob from 'fast-glob';
|
|
2
|
-
import { join as pathJoin, resolve as pathResolve } from 'path';
|
|
3
|
-
import { asArray } from 'utils-min';
|
|
4
|
-
const { pathExists, remove } = (await import('fs-extra')).default;
|
|
5
|
-
/**
|
|
6
|
-
* Simplified and sorted glob function (using fast-glob) for one or more pattern from current directory or a optional cwd one.
|
|
7
|
-
*
|
|
8
|
-
* Note 1: The result will be sorted by natural directory/subdir/filename order (as a would a recursive walk)
|
|
9
|
-
* Note 2: When `cwd` in options, it is added to the file path i.e. `pathJoin(cwd, path)`
|
|
10
|
-
*
|
|
11
|
-
* @returns always sorted result return Promise<string[]>
|
|
12
|
-
*/
|
|
13
|
-
export async function glob(pattern, cwdOrFastGlobOptions) {
|
|
14
|
-
let opts = undefined;
|
|
15
|
-
if (cwdOrFastGlobOptions != null) {
|
|
16
|
-
opts = (typeof cwdOrFastGlobOptions === 'string') ? { cwd: cwdOrFastGlobOptions } : cwdOrFastGlobOptions;
|
|
17
|
-
}
|
|
18
|
-
const result = await FastGlob(pattern, opts);
|
|
19
|
-
const cwd = (opts) ? opts.cwd : undefined;
|
|
20
|
-
const list = result.map(path => {
|
|
21
|
-
return (cwd) ? pathJoin(cwd, path) : path;
|
|
22
|
-
});
|
|
23
|
-
return list.sort(globCompare);
|
|
24
|
-
}
|
|
25
|
-
/** Remove one or more files. Resolved the number of names removed */
|
|
26
|
-
export async function saferRemove(names, cwd) {
|
|
27
|
-
const baseDir = (cwd) ? pathResolve(cwd) : pathResolve('./');
|
|
28
|
-
let removedNames = [];
|
|
29
|
-
for (const name of asArray(names)) {
|
|
30
|
-
const fullPath = pathJoin(baseDir, name);
|
|
31
|
-
if (!fullPath.startsWith(baseDir)) {
|
|
32
|
-
throw new Error(`Path to be removed does not look safe (nothing done): ${fullPath}\n\tCause: Does not belong to ${baseDir}`);
|
|
33
|
-
}
|
|
34
|
-
const exists = await pathExists(fullPath);
|
|
35
|
-
if (exists) {
|
|
36
|
-
await remove(fullPath);
|
|
37
|
-
removedNames.push(name);
|
|
38
|
-
}
|
|
39
|
-
}
|
|
40
|
-
return removedNames;
|
|
41
|
-
}
|
|
42
|
-
//#region ---------- Utils ----------
|
|
43
|
-
function globCompare(a, b) {
|
|
44
|
-
const aPathIdxs = pathIndexes(a);
|
|
45
|
-
const bPathIdxs = pathIndexes(b);
|
|
46
|
-
const minIdx = Math.min(aPathIdxs.length, bPathIdxs.length) - 1;
|
|
47
|
-
const aMinPath = a.substring(0, aPathIdxs[minIdx]);
|
|
48
|
-
const bMinPath = b.substring(0, bPathIdxs[minIdx]);
|
|
49
|
-
// if the common path is the same, and the path depth is different, then, the shortest one come first;
|
|
50
|
-
if ((aMinPath === bMinPath) && (aPathIdxs.length !== bPathIdxs.length)) {
|
|
51
|
-
return (aPathIdxs.length < bPathIdxs.length) ? -1 : 1;
|
|
52
|
-
}
|
|
53
|
-
// otherwise, we do a normal compare
|
|
54
|
-
return (a < b) ? -1 : 1;
|
|
55
|
-
}
|
|
56
|
-
function pathIndexes(fullPath) {
|
|
57
|
-
const idxs = [];
|
|
58
|
-
const l = fullPath.length;
|
|
59
|
-
for (let i = 0; i < l; i++) {
|
|
60
|
-
if (fullPath[i] === '/') {
|
|
61
|
-
idxs.push(i);
|
|
62
|
-
}
|
|
63
|
-
}
|
|
64
|
-
return idxs;
|
|
65
|
-
}
|
|
66
|
-
// function asArray(names: string | string[]) {
|
|
67
|
-
// return (names instanceof Array) ? names : [names];
|
|
68
|
-
// }
|
|
69
|
-
//#endregion ---------- /Utils ----------
|
|
70
|
-
//# sourceMappingURL=fs.js.map
|
package/dist/fs.js.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"fs.js","sourceRoot":"","sources":["../src/fs.ts"],"names":[],"mappings":"AAAA,OAAO,QAAqB,MAAM,WAAW,CAAC;AAC9C,OAAO,EAAE,IAAI,IAAI,QAAQ,EAAE,OAAO,IAAI,WAAW,EAAE,MAAM,MAAM,CAAC;AAChE,OAAO,EAAE,OAAO,EAAE,MAAM,WAAW,CAAC;AACpC,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE,GAAG,CAAC,MAAM,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC,OAAO,CAAC;AAElE;;;;;;;EAOE;AACF,MAAM,CAAC,KAAK,UAAU,IAAI,CAAC,OAA0B,EAAE,oBAAuC;IAC7F,IAAI,IAAI,GAAwB,SAAS,CAAC;IAE1C,IAAI,oBAAoB,IAAI,IAAI,EAAE;QACjC,IAAI,GAAG,CAAC,OAAO,oBAAoB,KAAK,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE,oBAAoB,EAAE,CAAC,CAAC,CAAC,oBAAoB,CAAC;KACzG;IAED,MAAM,MAAM,GAAG,MAAM,QAAQ,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;IAC7C,MAAM,GAAG,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,SAAS,CAAC;IAC1C,MAAM,IAAI,GAAG,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;QAC9B,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;IAC3C,CAAC,CAAC,CAAC;IACH,OAAO,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;AAC/B,CAAC;AAED,qEAAqE;AACrE,MAAM,CAAC,KAAK,UAAU,WAAW,CAAC,KAAwB,EAAE,GAAY;IACvE,MAAM,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;IAC7D,IAAI,YAAY,GAAa,EAAE,CAAC;IAEhC,KAAK,MAAM,IAAI,IAAI,OAAO,CAAC,KAAK,CAAC,EAAE;QAClC,MAAM,QAAQ,GAAG,QAAQ,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;QACzC,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;YAClC,MAAM,IAAI,KAAK,CAAC,yDAAyD,QAAQ,iCAAiC,OAAO,EAAE,CAAC,CAAC;SAC7H;QACD,MAAM,MAAM,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,CAAC;QAC1C,IAAI,MAAM,EAAE;YACX,MAAM,MAAM,CAAC,QAAQ,CAAC,CAAC;YACvB,YAAY,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;SACxB;KACD;IACD,OAAO,YAAY,CAAC;AACrB,CAAC;AAGD,yCAAyC;AACzC,SAAS,WAAW,CAAC,CAAS,EAAE,CAAS;IACxC,MAAM,SAAS,GAAG,WAAW,CAAC,CAAC,CAAC,CAAC;IACjC,MAAM,SAAS,GAAG,WAAW,CAAC,CAAC,CAAC,CAAC;IACjC,MAAM,MAAM,GAAG,IAAI,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM,EAAE,SAAS,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;IAChE,MAAM,QAAQ,GAAG,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC;IACnD,MAAM,QAAQ,GAAG,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC;IAEnD,sGAAsG;IACtG,IAAI,CAAC,QAAQ,KAAK,QAAQ,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,KAAK,SAAS,CAAC,MAAM,CAAC,EAAE;QACvE,OAAO,CAAC,SAAS,CAAC,MAAM,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;KACtD;IAED,oCAAoC;IACpC,OAAO,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAGzB,CAAC;AAED,SAAS,WAAW,CAAC,QAAgB;IACpC,MAAM,IAAI,GAAa,EAAE,CAAC;IAE1B,MAAM,CAAC,GAAG,QAAQ,CAAC,MAAM,CAAC;IAC1B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;QAC3B,IAAI,QAAQ,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;YACxB,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;SACb;KACD;IAED,OAAO,IAAI,CAAC;AACb,CAAC;AAED,+CAA+C;AAC/C,sDAAsD;AACtD,IAAI;AACJ,yCAAyC"}
|
package/src/fs.ts
DELETED
|
@@ -1,84 +0,0 @@
|
|
|
1
|
-
import FastGlob, { Options } from 'fast-glob';
|
|
2
|
-
import { join as pathJoin, resolve as pathResolve } from 'path';
|
|
3
|
-
import { asArray } from 'utils-min';
|
|
4
|
-
const { pathExists, remove } = (await import('fs-extra')).default;
|
|
5
|
-
|
|
6
|
-
/**
|
|
7
|
-
* Simplified and sorted glob function (using fast-glob) for one or more pattern from current directory or a optional cwd one.
|
|
8
|
-
*
|
|
9
|
-
* Note 1: The result will be sorted by natural directory/subdir/filename order (as a would a recursive walk)
|
|
10
|
-
* Note 2: When `cwd` in options, it is added to the file path i.e. `pathJoin(cwd, path)`
|
|
11
|
-
*
|
|
12
|
-
* @returns always sorted result return Promise<string[]>
|
|
13
|
-
*/
|
|
14
|
-
export async function glob(pattern: string | string[], cwdOrFastGlobOptions?: string | Options): Promise<string[]> {
|
|
15
|
-
let opts: Options | undefined = undefined;
|
|
16
|
-
|
|
17
|
-
if (cwdOrFastGlobOptions != null) {
|
|
18
|
-
opts = (typeof cwdOrFastGlobOptions === 'string') ? { cwd: cwdOrFastGlobOptions } : cwdOrFastGlobOptions;
|
|
19
|
-
}
|
|
20
|
-
|
|
21
|
-
const result = await FastGlob(pattern, opts);
|
|
22
|
-
const cwd = (opts) ? opts.cwd : undefined;
|
|
23
|
-
const list = result.map(path => {
|
|
24
|
-
return (cwd) ? pathJoin(cwd, path) : path;
|
|
25
|
-
});
|
|
26
|
-
return list.sort(globCompare);
|
|
27
|
-
}
|
|
28
|
-
|
|
29
|
-
/** Remove one or more files. Resolved the number of names removed */
|
|
30
|
-
export async function saferRemove(names: string | string[], cwd?: string): Promise<string[]> {
|
|
31
|
-
const baseDir = (cwd) ? pathResolve(cwd) : pathResolve('./');
|
|
32
|
-
let removedNames: string[] = [];
|
|
33
|
-
|
|
34
|
-
for (const name of asArray(names)) {
|
|
35
|
-
const fullPath = pathJoin(baseDir, name);
|
|
36
|
-
if (!fullPath.startsWith(baseDir)) {
|
|
37
|
-
throw new Error(`Path to be removed does not look safe (nothing done): ${fullPath}\n\tCause: Does not belong to ${baseDir}`);
|
|
38
|
-
}
|
|
39
|
-
const exists = await pathExists(fullPath);
|
|
40
|
-
if (exists) {
|
|
41
|
-
await remove(fullPath);
|
|
42
|
-
removedNames.push(name);
|
|
43
|
-
}
|
|
44
|
-
}
|
|
45
|
-
return removedNames;
|
|
46
|
-
}
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
//#region ---------- Utils ----------
|
|
50
|
-
function globCompare(a: string, b: string) {
|
|
51
|
-
const aPathIdxs = pathIndexes(a);
|
|
52
|
-
const bPathIdxs = pathIndexes(b);
|
|
53
|
-
const minIdx = Math.min(aPathIdxs.length, bPathIdxs.length) - 1;
|
|
54
|
-
const aMinPath = a.substring(0, aPathIdxs[minIdx]);
|
|
55
|
-
const bMinPath = b.substring(0, bPathIdxs[minIdx]);
|
|
56
|
-
|
|
57
|
-
// if the common path is the same, and the path depth is different, then, the shortest one come first;
|
|
58
|
-
if ((aMinPath === bMinPath) && (aPathIdxs.length !== bPathIdxs.length)) {
|
|
59
|
-
return (aPathIdxs.length < bPathIdxs.length) ? -1 : 1;
|
|
60
|
-
}
|
|
61
|
-
|
|
62
|
-
// otherwise, we do a normal compare
|
|
63
|
-
return (a < b) ? -1 : 1;
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
}
|
|
67
|
-
|
|
68
|
-
function pathIndexes(fullPath: string): number[] {
|
|
69
|
-
const idxs: number[] = [];
|
|
70
|
-
|
|
71
|
-
const l = fullPath.length;
|
|
72
|
-
for (let i = 0; i < l; i++) {
|
|
73
|
-
if (fullPath[i] === '/') {
|
|
74
|
-
idxs.push(i);
|
|
75
|
-
}
|
|
76
|
-
}
|
|
77
|
-
|
|
78
|
-
return idxs;
|
|
79
|
-
}
|
|
80
|
-
|
|
81
|
-
// function asArray(names: string | string[]) {
|
|
82
|
-
// return (names instanceof Array) ? names : [names];
|
|
83
|
-
// }
|
|
84
|
-
//#endregion ---------- /Utils ----------
|