@ezs/basics 2.6.0 → 2.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +33 -0
- package/lib/file-merge.js +57 -0
- package/lib/index.js +2 -0
- package/lib/obj-namespaces.js +5 -9
- package/lib/tar-dump.js +2 -2
- package/lib/tar-extract.js +15 -3
- package/lib/txt-inflection.js +2 -3
- package/lib/txt-sentences.js +2 -3
- package/lib/url-fetch.js +6 -7
- package/lib/url-request.js +4 -4
- package/package.json +3 -9
package/README.md
CHANGED
|
@@ -20,6 +20,7 @@ npm install @ezs/basics
|
|
|
20
20
|
- [CSVParse](#csvparse)
|
|
21
21
|
- [CSVString](#csvstring)
|
|
22
22
|
- [FILELoad](#fileload)
|
|
23
|
+
- [FILEMerge](#filemerge)
|
|
23
24
|
- [FILESave](#filesave)
|
|
24
25
|
- [INIString](#inistring)
|
|
25
26
|
- [JSONParse](#jsonparse)
|
|
@@ -251,6 +252,37 @@ Output:
|
|
|
251
252
|
|
|
252
253
|
Returns **[Object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)**
|
|
253
254
|
|
|
255
|
+
### FILEMerge
|
|
256
|
+
|
|
257
|
+
Take `Object` or `Buffer` and throw only one document
|
|
258
|
+
|
|
259
|
+
```json
|
|
260
|
+
[ fi1e1.csv, file2.csv ]
|
|
261
|
+
```
|
|
262
|
+
|
|
263
|
+
Script:
|
|
264
|
+
|
|
265
|
+
```ini
|
|
266
|
+
[use]
|
|
267
|
+
plugin = basics
|
|
268
|
+
|
|
269
|
+
[FILELoad]
|
|
270
|
+
[FILEMerge]
|
|
271
|
+
[replace]
|
|
272
|
+
path = contentOfFile1AndFile2
|
|
273
|
+
value = self()
|
|
274
|
+
```
|
|
275
|
+
|
|
276
|
+
Output:
|
|
277
|
+
|
|
278
|
+
```json
|
|
279
|
+
[
|
|
280
|
+
(...)
|
|
281
|
+
]
|
|
282
|
+
```
|
|
283
|
+
|
|
284
|
+
Returns **[Object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)**
|
|
285
|
+
|
|
254
286
|
### FILESave
|
|
255
287
|
|
|
256
288
|
Take data, convert it to buffer and append it to file
|
|
@@ -591,6 +623,7 @@ It returns to the output stream
|
|
|
591
623
|
|
|
592
624
|
- `path` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)** Regex to select the files to extract (optional, default `"**\/*.json"`)
|
|
593
625
|
- `json` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)** Parse as JSON the content of each file (optional, default `true`)
|
|
626
|
+
- `text` **[Boolean](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean)** The content of each file is converted to a string (otherwise it remains a buffer) (optional, default `true`)
|
|
594
627
|
- `compress` **[Boolean](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean)** Enable gzip compression (optional, default `false`)
|
|
595
628
|
|
|
596
629
|
Returns **[Array](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array)<{id: [String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String), value: [String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)}>**
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.default = FILEMerge;
|
|
7
|
+
/**
|
|
8
|
+
* Take `Object` or `Buffer` and throw only one document
|
|
9
|
+
*
|
|
10
|
+
* ```json
|
|
11
|
+
* [ fi1e1.csv, file2.csv ]
|
|
12
|
+
* ```
|
|
13
|
+
*
|
|
14
|
+
* Script:
|
|
15
|
+
*
|
|
16
|
+
* ```ini
|
|
17
|
+
* [use]
|
|
18
|
+
* plugin = basics
|
|
19
|
+
*
|
|
20
|
+
* [FILELoad]
|
|
21
|
+
* [FILEMerge]
|
|
22
|
+
* [replace]
|
|
23
|
+
* path = contentOfFile1AndFile2
|
|
24
|
+
* value = self()
|
|
25
|
+
*
|
|
26
|
+
* ```
|
|
27
|
+
*
|
|
28
|
+
* Output:
|
|
29
|
+
*
|
|
30
|
+
* ```json
|
|
31
|
+
* [
|
|
32
|
+
* (...)
|
|
33
|
+
* ]
|
|
34
|
+
* ```
|
|
35
|
+
*
|
|
36
|
+
* @name FILEMerge
|
|
37
|
+
* @returns {Object}
|
|
38
|
+
*/
|
|
39
|
+
function FILEMerge(data, feed) {
|
|
40
|
+
if (this.isFirst()) {
|
|
41
|
+
this.chunks = [];
|
|
42
|
+
this.length = 0;
|
|
43
|
+
this.isBuffer = Buffer.isBuffer(data);
|
|
44
|
+
}
|
|
45
|
+
if (this.isLast()) {
|
|
46
|
+
feed.write(this.isBuffer ? Buffer.concat(this.chunks, this.length) : this.chunks.join(''));
|
|
47
|
+
feed.close();
|
|
48
|
+
return;
|
|
49
|
+
}
|
|
50
|
+
if (this.isBuffer) {
|
|
51
|
+
this.length += data.length;
|
|
52
|
+
} else {
|
|
53
|
+
this.length = this.chunks.length;
|
|
54
|
+
}
|
|
55
|
+
this.chunks.push(data);
|
|
56
|
+
feed.end();
|
|
57
|
+
}
|
package/lib/index.js
CHANGED
|
@@ -37,6 +37,7 @@ var _tarDump = _interopRequireDefault(require("./tar-dump"));
|
|
|
37
37
|
var _iniString = _interopRequireDefault(require("./ini-string"));
|
|
38
38
|
var _fileSave = _interopRequireDefault(require("./file-save"));
|
|
39
39
|
var _fileLoad = _interopRequireDefault(require("./file-load"));
|
|
40
|
+
var _fileMerge = _interopRequireDefault(require("./file-merge"));
|
|
40
41
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
41
42
|
const funcs = {
|
|
42
43
|
BUFObject: _bufObject.default,
|
|
@@ -72,6 +73,7 @@ const funcs = {
|
|
|
72
73
|
INIString: _iniString.default,
|
|
73
74
|
FILESave: _fileSave.default,
|
|
74
75
|
FILELoad: _fileLoad.default,
|
|
76
|
+
FILEMerge: _fileMerge.default,
|
|
75
77
|
// aliases
|
|
76
78
|
bufferify: _bufObject.default.BUFObject,
|
|
77
79
|
concat: _txtConcat.default.TXTConcat,
|
package/lib/obj-namespaces.js
CHANGED
|
@@ -4,11 +4,7 @@ Object.defineProperty(exports, "__esModule", {
|
|
|
4
4
|
value: true
|
|
5
5
|
});
|
|
6
6
|
exports.default = OBJNamespaces;
|
|
7
|
-
var _lodash =
|
|
8
|
-
var _lodash2 = _interopRequireDefault(require("lodash.escaperegexp"));
|
|
9
|
-
var _lodash3 = _interopRequireDefault(require("lodash.mapkeys"));
|
|
10
|
-
var _lodash4 = _interopRequireDefault(require("lodash.mapvalues"));
|
|
11
|
-
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
7
|
+
var _lodash = require("lodash");
|
|
12
8
|
/**
|
|
13
9
|
* Take `Object` and throw the same object, all keys parsed to replace namespaces with their prefixes
|
|
14
10
|
*
|
|
@@ -78,13 +74,13 @@ function OBJNamespaces(data, feed) {
|
|
|
78
74
|
if (this.isFirst()) {
|
|
79
75
|
const prefixes = [].concat(this.getParam('prefix')).map(x => String(x).trim()).filter(Boolean);
|
|
80
76
|
const namespaces = [].concat(this.getParam('namespace')).filter(Boolean).slice(0, prefixes.length).map(x => String(x).trim());
|
|
81
|
-
this.mapping = (0, _lodash.
|
|
82
|
-
this.expression = RegExp(Object.keys(this.mapping).map(
|
|
77
|
+
this.mapping = (0, _lodash.zipObject)(namespaces, prefixes);
|
|
78
|
+
this.expression = RegExp(Object.keys(this.mapping).map(_lodash.escapeRegExp).join('|'), 'g');
|
|
83
79
|
this.references = [].concat(this.getParam('reference')).filter(Boolean).map(x => RegExp(String(x).trim(), 'g'));
|
|
84
80
|
}
|
|
85
|
-
const result = (0,
|
|
81
|
+
const result = (0, _lodash.mapKeys)(data, (val, key) => String(key).replace(this.expression, matched => this.mapping[matched]));
|
|
86
82
|
if (this.references.length > 0) {
|
|
87
|
-
const result1 = (0,
|
|
83
|
+
const result1 = (0, _lodash.mapValues)(result, (value, key) => {
|
|
88
84
|
if (this.references.some(x => key.search(x) !== -1)) {
|
|
89
85
|
return String(value).replace(this.expression, matched => this.mapping[matched]);
|
|
90
86
|
}
|
package/lib/tar-dump.js
CHANGED
|
@@ -9,7 +9,7 @@ var _util = require("util");
|
|
|
9
9
|
var _fs = require("fs");
|
|
10
10
|
var _tarStream = _interopRequireDefault(require("tar-stream"));
|
|
11
11
|
var _zlib = require("zlib");
|
|
12
|
-
var _lodash =
|
|
12
|
+
var _lodash = require("lodash");
|
|
13
13
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
14
14
|
// Avoid importing from fs/promise to be compatible with node 12
|
|
15
15
|
const readFilePromise = (0, _util.promisify)(_fs.readFile);
|
|
@@ -48,7 +48,7 @@ function TARDump(data, feed) {
|
|
|
48
48
|
'processingMSTime': this.getCumulativeTimeMS()
|
|
49
49
|
};
|
|
50
50
|
const manifestArray = [metadata].concat(this.getParam('manifest', [])).filter(Boolean);
|
|
51
|
-
const manifest = (0, _lodash.
|
|
51
|
+
const manifest = (0, _lodash.merge)(...manifestArray);
|
|
52
52
|
this.pack.entry({
|
|
53
53
|
name: 'manifest.json'
|
|
54
54
|
}, JSON.stringify(manifest, null, ' '));
|
package/lib/tar-extract.js
CHANGED
|
@@ -25,6 +25,7 @@ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { de
|
|
|
25
25
|
* @name TARExtract
|
|
26
26
|
* @param {String} [path="**\/*.json"] Regex to select the files to extract
|
|
27
27
|
* @param {String} [json=true] Parse as JSON the content of each file
|
|
28
|
+
* @param {Boolean} [text=true] The content of each file is converted to a string (otherwise it remains a buffer)
|
|
28
29
|
* @param {Boolean} [compress=false] Enable gzip compression
|
|
29
30
|
* @returns {{id: String, value: String}[]}
|
|
30
31
|
*/
|
|
@@ -34,7 +35,8 @@ function TARExtract(data, feed) {
|
|
|
34
35
|
const {
|
|
35
36
|
ezs
|
|
36
37
|
} = this;
|
|
37
|
-
const
|
|
38
|
+
const text = this.getParam('text', true);
|
|
39
|
+
const json = text ? this.getParam('json', true) : false;
|
|
38
40
|
const compress = this.getParam('compress', false);
|
|
39
41
|
this.input = ezs.createStream(ezs.objectMode());
|
|
40
42
|
this.output = ezs.createStream(ezs.objectMode());
|
|
@@ -43,11 +45,21 @@ function TARExtract(data, feed) {
|
|
|
43
45
|
extract.on('entry', async (header, stream, next) => {
|
|
44
46
|
if (_micromatch.default.isMatch(header.name, filesPatern)) {
|
|
45
47
|
try {
|
|
46
|
-
const contentRaw = await (0, _getStream.default)(stream);
|
|
47
48
|
if (json) {
|
|
48
|
-
const
|
|
49
|
+
const contentText = await (0, _getStream.default)(stream);
|
|
50
|
+
const contentJson = JSON.parse(contentText);
|
|
49
51
|
return (0, _streamWrite.default)(this.output, contentJson, () => next());
|
|
50
52
|
}
|
|
53
|
+
if (text) {
|
|
54
|
+
const contentText = await (0, _getStream.default)(stream);
|
|
55
|
+
return (0, _streamWrite.default)(this.output, {
|
|
56
|
+
id: header.name,
|
|
57
|
+
value: contentText
|
|
58
|
+
}, () => next());
|
|
59
|
+
}
|
|
60
|
+
const contentRaw = await (0, _getStream.default)(stream, {
|
|
61
|
+
encoding: 'buffer'
|
|
62
|
+
});
|
|
51
63
|
return (0, _streamWrite.default)(this.output, {
|
|
52
64
|
id: header.name,
|
|
53
65
|
value: contentRaw
|
package/lib/txt-inflection.js
CHANGED
|
@@ -4,9 +4,8 @@ Object.defineProperty(exports, "__esModule", {
|
|
|
4
4
|
value: true
|
|
5
5
|
});
|
|
6
6
|
exports.default = void 0;
|
|
7
|
-
var _lodash =
|
|
7
|
+
var _lodash = require("lodash");
|
|
8
8
|
var _inflection = require("inflection");
|
|
9
|
-
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
10
9
|
const transformer = transformations => str => str && typeof str === 'string' ? (0, _inflection.transform)(str, transformations) : str;
|
|
11
10
|
const TXTInflection = (data, feed, ctx) => {
|
|
12
11
|
if (ctx.isLast()) {
|
|
@@ -14,7 +13,7 @@ const TXTInflection = (data, feed, ctx) => {
|
|
|
14
13
|
}
|
|
15
14
|
const transformations = [].concat(ctx.getParam('transform', [])).filter(Boolean);
|
|
16
15
|
const path = ctx.getParam('path', 'value');
|
|
17
|
-
const value = (0, _lodash.
|
|
16
|
+
const value = (0, _lodash.get)(data, path, '');
|
|
18
17
|
const process = transformer(transformations);
|
|
19
18
|
const result = Array.isArray(value) ? value.map(item => process(item)) : process(value);
|
|
20
19
|
feed.write({
|
package/lib/txt-sentences.js
CHANGED
|
@@ -4,8 +4,7 @@ Object.defineProperty(exports, "__esModule", {
|
|
|
4
4
|
value: true
|
|
5
5
|
});
|
|
6
6
|
exports.default = void 0;
|
|
7
|
-
var _lodash =
|
|
8
|
-
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
7
|
+
var _lodash = require("lodash");
|
|
9
8
|
const UPPER_LETTERS = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ';
|
|
10
9
|
const SENTENCE_INIT = ' ';
|
|
11
10
|
const SENTENCE_ENDING = '.?!';
|
|
@@ -46,7 +45,7 @@ const TXTSentences = (data, feed, ctx) => {
|
|
|
46
45
|
return feed.close();
|
|
47
46
|
}
|
|
48
47
|
const path = ctx.getParam('path', 'value');
|
|
49
|
-
const value = (0, _lodash.
|
|
48
|
+
const value = (0, _lodash.get)(data, path);
|
|
50
49
|
const str = Array.isArray(value) ? value.map(item => typeof item === 'string' ? item : '').join(' ') : value;
|
|
51
50
|
const sentences = str ? segmentSentences(str) : [];
|
|
52
51
|
feed.write({
|
package/lib/url-fetch.js
CHANGED
|
@@ -5,8 +5,7 @@ Object.defineProperty(exports, "__esModule", {
|
|
|
5
5
|
});
|
|
6
6
|
exports.default = URLFetch;
|
|
7
7
|
var _debug = _interopRequireDefault(require("debug"));
|
|
8
|
-
var _lodash =
|
|
9
|
-
var _lodash2 = _interopRequireDefault(require("lodash.set"));
|
|
8
|
+
var _lodash = require("lodash");
|
|
10
9
|
var _nodeAbortController = _interopRequireDefault(require("node-abort-controller"));
|
|
11
10
|
var _parseHeaders = _interopRequireDefault(require("parse-headers"));
|
|
12
11
|
var _asyncRetry = _interopRequireDefault(require("async-retry"));
|
|
@@ -43,7 +42,7 @@ async function URLFetch(data, feed) {
|
|
|
43
42
|
const mimetype = String(this.getParam('mimetype', 'application/json'));
|
|
44
43
|
const controller = new _nodeAbortController.default();
|
|
45
44
|
const key = Array.isArray(path) ? path.shift() : path;
|
|
46
|
-
const body = (0, _lodash.
|
|
45
|
+
const body = (0, _lodash.get)(data, key);
|
|
47
46
|
const parameters = {
|
|
48
47
|
timeout,
|
|
49
48
|
headers,
|
|
@@ -53,9 +52,9 @@ async function URLFetch(data, feed) {
|
|
|
53
52
|
retries
|
|
54
53
|
};
|
|
55
54
|
if (body) {
|
|
56
|
-
(0,
|
|
57
|
-
(0,
|
|
58
|
-
(0,
|
|
55
|
+
(0, _lodash.set)(parameters, 'method', 'POST');
|
|
56
|
+
(0, _lodash.set)(parameters, 'body', Buffer.isBuffer(body) ? body : JSON.stringify(body));
|
|
57
|
+
(0, _lodash.set)(parameters, 'headers.content-type', mimetype);
|
|
59
58
|
}
|
|
60
59
|
try {
|
|
61
60
|
const response = await (0, _asyncRetry.default)((0, _request.default)(url, parameters), options);
|
|
@@ -67,7 +66,7 @@ async function URLFetch(data, feed) {
|
|
|
67
66
|
} : {
|
|
68
67
|
input: data
|
|
69
68
|
};
|
|
70
|
-
(0,
|
|
69
|
+
(0, _lodash.set)(result, target, value);
|
|
71
70
|
return feed.send(result);
|
|
72
71
|
}
|
|
73
72
|
return feed.send(value);
|
package/lib/url-request.js
CHANGED
|
@@ -4,7 +4,7 @@ Object.defineProperty(exports, "__esModule", {
|
|
|
4
4
|
value: true
|
|
5
5
|
});
|
|
6
6
|
exports.default = URLRequest;
|
|
7
|
-
var _lodash =
|
|
7
|
+
var _lodash = require("lodash");
|
|
8
8
|
var _debug = _interopRequireDefault(require("debug"));
|
|
9
9
|
var _url = require("url");
|
|
10
10
|
var _nodeAbortController = _interopRequireDefault(require("node-abort-controller"));
|
|
@@ -93,11 +93,11 @@ async function URLRequest(data, feed) {
|
|
|
93
93
|
} : {
|
|
94
94
|
url: data
|
|
95
95
|
};
|
|
96
|
-
(0, _lodash.
|
|
97
|
-
inserts.forEach(i => (0, _lodash.
|
|
96
|
+
(0, _lodash.set)(result, target, value);
|
|
97
|
+
inserts.forEach(i => (0, _lodash.set)(result, i, response.headers.get(i)));
|
|
98
98
|
return feed.send(result);
|
|
99
99
|
}
|
|
100
|
-
inserts.forEach(i => (0, _lodash.
|
|
100
|
+
inserts.forEach(i => (0, _lodash.set)(value, i, response.headers.get(i)));
|
|
101
101
|
return feed.send(value);
|
|
102
102
|
} catch (e) {
|
|
103
103
|
onError(e);
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@ezs/basics",
|
|
3
3
|
"description": "Basics statements for EZS",
|
|
4
|
-
"version": "2.
|
|
4
|
+
"version": "2.7.0",
|
|
5
5
|
"author": "Nicolas Thouvenin <nthouvenin@gmail.com>",
|
|
6
6
|
"bugs": "https://github.com/Inist-CNRS/ezs/issues",
|
|
7
7
|
"dependencies": {
|
|
@@ -17,13 +17,7 @@
|
|
|
17
17
|
"get-stream": "6.0.1",
|
|
18
18
|
"higher-path": "1.0.0",
|
|
19
19
|
"inflection": "2.0.1",
|
|
20
|
-
"lodash
|
|
21
|
-
"lodash.get": "4.4.2",
|
|
22
|
-
"lodash.mapkeys": "4.6.0",
|
|
23
|
-
"lodash.mapvalues": "4.6.0",
|
|
24
|
-
"lodash.merge": "4.6.2",
|
|
25
|
-
"lodash.set": "4.3.2",
|
|
26
|
-
"lodash.zipobject": "4.1.3",
|
|
20
|
+
"lodash": "4.17.21",
|
|
27
21
|
"make-dir": "4.0.0",
|
|
28
22
|
"micromatch": "4.0.4",
|
|
29
23
|
"node-abort-controller": "1.1.0",
|
|
@@ -42,7 +36,7 @@
|
|
|
42
36
|
"directories": {
|
|
43
37
|
"test": "test"
|
|
44
38
|
},
|
|
45
|
-
"gitHead": "
|
|
39
|
+
"gitHead": "0cbe93a845e6d4fe89501d3d7b4d2bd0f77f0297",
|
|
46
40
|
"homepage": "https://github.com/Inist-CNRS/ezs/tree/master/packages/basics#readme",
|
|
47
41
|
"keywords": [
|
|
48
42
|
"ezs"
|