@ezs/basics 1.18.0 → 1.21.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -3,6 +3,39 @@
3
3
  All notable changes to this project will be documented in this file.
4
4
  See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
5
5
 
6
+ # [1.21.0](https://github.com/Inist-CNRS/ezs/compare/@ezs/basics@1.20.0...@ezs/basics@1.21.0) (2022-07-11)
7
+
8
+
9
+ ### Features
10
+
11
+ * 🎸 add target option to URLRequest ([3861c5e](https://github.com/Inist-CNRS/ezs/commit/3861c5e10b4dcfd060b49fa9b23f2bcf98d8c942))
12
+
13
+
14
+
15
+
16
+
17
+ # [1.20.0](https://github.com/Inist-CNRS/ezs/compare/@ezs/basics@1.19.0...@ezs/basics@1.20.0) (2022-06-24)
18
+
19
+
20
+ ### Features
21
+
22
+ * 🎸 add [URLPager] ([59dcbdc](https://github.com/Inist-CNRS/ezs/commit/59dcbdca0c33eaf2ebf5a195de153cec802d1bc1))
23
+
24
+
25
+
26
+
27
+
28
+ # [1.19.0](https://github.com/Inist-CNRS/ezs/compare/@ezs/basics@1.18.0...@ezs/basics@1.19.0) (2022-06-22)
29
+
30
+
31
+ ### Features
32
+
33
+ * 🎸 file save gz ([33df6cd](https://github.com/Inist-CNRS/ezs/commit/33df6cdc40af50c10511cca1449c5edb3a6878f2))
34
+
35
+
36
+
37
+
38
+
6
39
  # [1.18.0](https://github.com/Inist-CNRS/ezs/compare/@ezs/basics@1.17.1...@ezs/basics@1.18.0) (2022-06-21)
7
40
 
8
41
 
package/README.md CHANGED
@@ -32,7 +32,9 @@ npm install @ezs/basics
32
32
  - [TXTZip](#txtzip)
33
33
  - [URLConnect](#urlconnect)
34
34
  - [URLFetch](#urlfetch)
35
+ - [URLPagination](#urlpagination)
35
36
  - [URLParse](#urlparse)
37
+ - [URLRequest](#urlrequest)
36
38
  - [URLStream](#urlstream)
37
39
  - [URLString](#urlstring)
38
40
  - [XMLConvert](#xmlconvert)
@@ -217,13 +219,14 @@ identifier = toto
217
219
  Output:
218
220
 
219
221
  ```json
220
- ["/tmp/truc.json"]
222
+ [{ filename: "/tmp/toto", size: XXX, ... }]
221
223
  ```
222
224
 
223
225
  #### Parameters
224
226
 
225
227
  - `location` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)** Directory location (optional, default `TMPDIR`)
226
- - `identifier` **[Number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)?** File name
228
+ - `identifier` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)?** File name
229
+ - `compress` **[Boolean](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean)** Enable gzip compression (optional, default `false`)
227
230
 
228
231
  Returns **[Object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)**
229
232
 
@@ -609,6 +612,68 @@ Or if no target is specified, the output will be the returned content of URL.
609
612
 
610
613
  Returns **[Object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)**
611
614
 
615
+ ### URLPagination
616
+
617
+ Take `Object` and multiple it to make it one object per page
618
+
619
+ Input:
620
+
621
+ ```json
622
+ [{"q": "a"}]
623
+ ```
624
+
625
+ Script:
626
+
627
+ ```ini
628
+ [URLRequest]
629
+ url = https://api.search.net
630
+
631
+ [URLPagination]
632
+ total = get('total')
633
+ ```
634
+
635
+ Output:
636
+
637
+ ```json
638
+ [
639
+ {
640
+ "q": "a",
641
+ "total": 22
642
+ "offset": 0,
643
+ "pageNumber": 1,
644
+ "totalPages", 3,
645
+ "maxPages": 1000,
646
+ "limit": 10
647
+ },
648
+ {
649
+ "q": "a",
650
+ "total": 22
651
+ "offset": 10,
652
+ "pageNumber": 2,
653
+ "totalPages", 3,
654
+ "maxPages": 1000,
655
+ "limit": 10
656
+ },
657
+ {
658
+ "q": "a",
659
+ "total": 22
660
+ "offset": 20,
661
+ "pageNumber": 3,
662
+ "totalPages", 3,
663
+ "maxPages": 1000,
664
+ "limit": 10
665
+ }
666
+ ]
667
+ ```
668
+
669
+ #### Parameters
670
+
671
+ - `total` **[Number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)** total to use for the pagination (optional, default `0`)
672
+ - `limit` **[Number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)** limit to use to pagination (optional, default `10`)
673
+ - `maxPages` **[Number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)** maxPages to use to pagination (optional, default `1000`)
674
+
675
+ Returns **[Object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)**
676
+
612
677
  ### URLParse
613
678
 
614
679
  Take an URL `String`, parse it and return `Object`.
@@ -636,6 +701,45 @@ See:
636
701
 
637
702
  Returns **[Object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)**
638
703
 
704
+ ### URLRequest
705
+
706
+ Take `Object` as parameters of URL, throw each chunk from the result
707
+
708
+ Input:
709
+
710
+ ```json
711
+ [{"q": "a"}]
712
+ ```
713
+
714
+ Script:
715
+
716
+ ```ini
717
+ [URLRequest]
718
+ url = https://api.search.net
719
+ ```
720
+
721
+ Output:
722
+
723
+ ```json
724
+ [
725
+ {
726
+ "result": "a"
727
+ }
728
+ ]
729
+ ```
730
+
731
+ #### Parameters
732
+
733
+ - `url` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)?** URL to fetch
734
+ - `json` **[Boolean](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean)** parse result as json (optional, default `true`)
735
+ - `target` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)?** choose the key to set
736
+ - `timeout` **[Number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)** Timeout in milliseconds (optional, default `1000`)
737
+ - `noerror` **[Boolean](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean)** Ignore all errors, the target field will remain undefined (optional, default `false`)
738
+ - `retries` **[Number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)** The maximum amount of times to retry the connection (optional, default `5`)
739
+ - `insert` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)?** a header response value in the result
740
+
741
+ Returns **[Object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)**
742
+
639
743
  ### URLStream
640
744
 
641
745
  Take `String` as URL, throw each chunk from the result or
package/lib/file-save.js CHANGED
@@ -7,6 +7,8 @@ exports.default = FILESave;
7
7
 
8
8
  var _fs = require("fs");
9
9
 
10
+ var _zlib = require("zlib");
11
+
10
12
  var _path = _interopRequireDefault(require("path"));
11
13
 
12
14
  var _os = require("os");
@@ -45,36 +47,47 @@ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { de
45
47
  * Output:
46
48
  *
47
49
  * ```json
48
- * ["/tmp/truc.json"]
50
+ * [{ filename: "/tmp/toto", size: XXX, ... }]
49
51
  * ```
50
52
  *
51
53
  * @name FILESave
52
54
  * @param {String} [location=TMPDIR] Directory location
53
- * @param {Number} [identifier] File name
55
+ * @param {String} [identifier] File name
56
+ * @param {Boolean} [compress=false] Enable gzip compression
54
57
  * @returns {Object}
55
58
  */
56
59
  async function FILESave(data, feed) {
57
60
  if (!this.handle) {
58
61
  const identifier = String(this.getParam('identifier'));
59
62
  const location = this.getParam('location', (0, _os.tmpdir)());
63
+ const compress = this.getParam('compress', false);
60
64
 
61
65
  if (!_pathExists.default.sync(location)) {
62
66
  _makeDir.default.sync(location);
63
67
  }
64
68
 
65
- this.filename = _path.default.resolve(location, identifier);
66
- this.handle = (0, _fs.createWriteStream)(this.filename);
69
+ if (compress) {
70
+ this.filename = _path.default.resolve(location, `${identifier}.gz`);
71
+ this.handle = (0, _zlib.createGzip)();
72
+ this.handleEnd = this.handle.pipe((0, _fs.createWriteStream)(this.filename));
73
+ } else {
74
+ this.filename = _path.default.resolve(location, identifier);
75
+ this.handle = (0, _fs.createWriteStream)(this.filename);
76
+ this.handleEnd = this.handle;
77
+ }
67
78
  }
68
79
 
69
80
  if (this.isLast()) {
70
- this.handle.close();
71
- return (0, _fs.lstat)(this.filename, (err, stat) => {
72
- feed.write({
73
- filename: this.filename,
74
- ...stat
81
+ this.handleEnd.on('close', () => {
82
+ (0, _fs.lstat)(this.filename, (err, stat) => {
83
+ feed.write({
84
+ filename: this.filename,
85
+ ...stat
86
+ });
87
+ return feed.close();
75
88
  });
76
- return feed.close();
77
89
  });
90
+ return this.handle.end();
78
91
  }
79
92
 
80
93
  (0, _streamWrite.default)(this.handle, Buffer.from(String(data)), () => feed.end());
package/lib/index.js CHANGED
@@ -41,6 +41,10 @@ var _urlFetch = _interopRequireDefault(require("./url-fetch"));
41
41
 
42
42
  var _urlParse = _interopRequireDefault(require("./url-parse"));
43
43
 
44
+ var _urlRequest = _interopRequireDefault(require("./url-request"));
45
+
46
+ var _urlPagination = _interopRequireDefault(require("./url-pagination"));
47
+
44
48
  var _urlString = _interopRequireDefault(require("./url-string"));
45
49
 
46
50
  var _urlStream = _interopRequireDefault(require("./url-stream"));
@@ -75,6 +79,8 @@ const funcs = {
75
79
  JSONParse: _jsonParse.default,
76
80
  JSONString: _jsonString.default,
77
81
  URLFetch: _urlFetch.default,
82
+ URLPagination: _urlPagination.default,
83
+ URLRequest: _urlRequest.default,
78
84
  URLParse: _urlParse.default,
79
85
  URLString: _urlString.default,
80
86
  URLStream: _urlStream.default,
package/lib/url-fetch.js CHANGED
@@ -44,7 +44,7 @@ async function URLFetch(data, feed) {
44
44
 
45
45
  const url = this.getParam('url');
46
46
  const path = this.getParam('path');
47
- const target = this.getParam('target');
47
+ const target = [].concat(this.getParam('target')).filter(Boolean).shift();
48
48
  const json = Boolean(this.getParam('json', false));
49
49
  const retries = Number(this.getParam('retries', 5));
50
50
  const noerror = Boolean(this.getParam('noerror', false));
@@ -74,8 +74,10 @@ async function URLFetch(data, feed) {
74
74
  const func = json ? 'json' : 'text';
75
75
  const value = await response[func]();
76
76
 
77
- if (target && typeof target === 'string' && typeof data === 'object') {
78
- const result = { ...data
77
+ if (target) {
78
+ const result = typeof data === 'object' ? { ...data
79
+ } : {
80
+ input: data
79
81
  };
80
82
  (0, _lodash2.default)(result, target, value);
81
83
  return feed.send(result);
@@ -0,0 +1,179 @@
1
+ "use strict";
2
+
3
+ Object.defineProperty(exports, "__esModule", {
4
+ value: true
5
+ });
6
+ exports.default = URLPager;
7
+
8
+ var _debug = _interopRequireDefault(require("debug"));
9
+
10
+ var _url = require("url");
11
+
12
+ var _nodeAbortController = _interopRequireDefault(require("node-abort-controller"));
13
+
14
+ var _lodash = _interopRequireDefault(require("lodash.get"));
15
+
16
+ var _parseHeaders = _interopRequireDefault(require("parse-headers"));
17
+
18
+ var _asyncRetry = _interopRequireDefault(require("async-retry"));
19
+
20
+ var _request = _interopRequireDefault(require("./request"));
21
+
22
+ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
23
+
24
+ /**
25
+ * Take `Object` as parameters of URL, throw each chunk from the result
26
+ *
27
+ *
28
+ * Input:
29
+ *
30
+ * ```json
31
+ * [{"q": "a"}]
32
+ * ```
33
+ *
34
+ * Script:
35
+ *
36
+ * ```ini
37
+ * [URLPager]
38
+ * url = https://api.search.net
39
+ * path = total
40
+ * ```
41
+ *
42
+ * Output:
43
+ *
44
+ * ```json
45
+ * [
46
+ * {
47
+ * "q": "a",
48
+ * "total": 22
49
+ * "offset": 0,
50
+ * "pageNumber": 1,
51
+ * "totalPages", 3,
52
+ * "maxPages": 1000,
53
+ * "limit": 10
54
+ * },
55
+ * {
56
+ * "q": "a",
57
+ * "total": 22
58
+ * "offset": 10,
59
+ * "pageNumber": 2,
60
+ * "totalPages", 3,
61
+ * "maxPages": 1000,
62
+ * "limit": 10
63
+ * },
64
+ * {
65
+ * "q": "a",
66
+ * "total": 22
67
+ * "offset": 20,
68
+ * "pageNumber": 3,
69
+ * "totalPages", 3,
70
+ * "maxPages": 1000,
71
+ * "limit": 10
72
+ * }
73
+ * ]
74
+ * ```
75
+ *
76
+ * #### Example with URLs
77
+ *
78
+ * Input:
79
+ *
80
+ * ```json
81
+ * [
82
+ * "https://httpbin.org/get?a=a",
83
+ * "https://httpbin.org/get?a=b",
84
+ * "https://httpbin.org/get?a=c"
85
+ * ]
86
+ * ```
87
+ *
88
+ * Script:
89
+ *
90
+ * ```ini
91
+ * [URLPager]
92
+ * path = .args
93
+ * ```
94
+ *
95
+ * Output:
96
+ *
97
+ * ```json
98
+ * [{"a": "a"}, {"a": "b"}, {"a": "c" }]
99
+ * ```
100
+ *
101
+ * @name URLPager
102
+ * @param {String} [url] URL to fetch (by default input string is taken)
103
+ * @param {String} [path=total] choose the path to find the number of result
104
+ * @param {Number} [timeout=1000] Timeout in milliseconds
105
+ * @param {Boolean} [noerror=false] Ignore all errors, the target field will remain undefined
106
+ * @param {Number} [retries=5] The maximum amount of times to retry the connection
107
+ * @returns {Object}
108
+ */
109
+ async function URLPager(data, feed) {
110
+ if (this.isLast()) {
111
+ return feed.close();
112
+ }
113
+
114
+ const url = this.getParam('url');
115
+ const path = this.getParam('path', 'total');
116
+ const limit = Number(this.getParam('limit', 10));
117
+ const maxPages = Number(this.getParam('maxPages', 1000));
118
+ const retries = Number(this.getParam('retries', 5));
119
+ const noerror = Boolean(this.getParam('noerror', false));
120
+ const timeout = Number(this.getParam('timeout')) || 1000;
121
+ const headers = (0, _parseHeaders.default)([].concat(this.getParam('header')).filter(Boolean).join('\n'));
122
+ const cURL = new _url.URL(url || data);
123
+ const controller = new _nodeAbortController.default();
124
+ const parameters = {
125
+ timeout,
126
+ headers,
127
+ signal: controller.signal
128
+ };
129
+ const options = {
130
+ retries
131
+ };
132
+ cURL.search = new _url.URLSearchParams(data);
133
+
134
+ const onError = e => {
135
+ controller.abort();
136
+
137
+ if (noerror) {
138
+ (0, _debug.default)('ezs')(`Ignore item #${this.getIndex()} [URLPager] <${e}>`);
139
+ return feed.send(data);
140
+ }
141
+
142
+ (0, _debug.default)('ezs')(`Break item #${this.getIndex()} [URLPager] <${e}>`);
143
+ return feed.send(e);
144
+ };
145
+
146
+ try {
147
+ const response = await (0, _asyncRetry.default)((0, _request.default)(cURL.href, parameters), options);
148
+ const json = await response.json();
149
+ const total = (0, _lodash.default)(json, path);
150
+
151
+ if (total === 0) {
152
+ return onError(new Error('No result.'));
153
+ }
154
+
155
+ if (total === undefined) {
156
+ return onError(new Error('Unexpected response.'));
157
+ }
158
+
159
+ let totalPages = Math.ceil(json.total / limit);
160
+
161
+ if (totalPages > maxPages) {
162
+ totalPages = maxPages;
163
+ }
164
+
165
+ for (let pageNumber = 1; pageNumber <= totalPages; pageNumber += 1) {
166
+ feed.write({ ...data,
167
+ offset: (pageNumber - 1) * limit,
168
+ pageNumber,
169
+ totalPages,
170
+ maxPages,
171
+ limit
172
+ });
173
+ }
174
+
175
+ feed.end();
176
+ } catch (e) {
177
+ onError(e);
178
+ }
179
+ }
@@ -0,0 +1,103 @@
1
+ "use strict";
2
+
3
+ Object.defineProperty(exports, "__esModule", {
4
+ value: true
5
+ });
6
+ exports.default = URLPagination;
7
+
8
+ /**
9
+ * Take `Object` and multiple it to make it one object per page
10
+ *
11
+ *
12
+ * Input:
13
+ *
14
+ * ```json
15
+ * [{"q": "a"}]
16
+ * ```
17
+ *
18
+ * Script:
19
+ *
20
+ * ```ini
21
+ * [URLRequest]
22
+ * url = https://api.search.net
23
+ *
24
+ * [URLPagination]
25
+ * total = get('total')
26
+ * ```
27
+ *
28
+ * Output:
29
+ *
30
+ * ```json
31
+ * [
32
+ * {
33
+ * "q": "a",
34
+ * "total": 22
35
+ * "offset": 0,
36
+ * "pageNumber": 1,
37
+ * "totalPages", 3,
38
+ * "maxPages": 1000,
39
+ * "limit": 10
40
+ * },
41
+ * {
42
+ * "q": "a",
43
+ * "total": 22
44
+ * "offset": 10,
45
+ * "pageNumber": 2,
46
+ * "totalPages", 3,
47
+ * "maxPages": 1000,
48
+ * "limit": 10
49
+ * },
50
+ * {
51
+ * "q": "a",
52
+ * "total": 22
53
+ * "offset": 20,
54
+ * "pageNumber": 3,
55
+ * "totalPages", 3,
56
+ * "maxPages": 1000,
57
+ * "limit": 10
58
+ * }
59
+ * ]
60
+ * ```
61
+ *
62
+ *
63
+ * @name URLPagination
64
+ * @param {Number} [total=0] total to use for the pagination
65
+ * @param {Number} [limit=10] limit to use to pagination
66
+ * @param {Number} [maxPages=1000] maxPages to use to pagination
67
+ * @returns {Object}
68
+ */
69
+ function URLPagination(data, feed) {
70
+ if (this.isLast()) {
71
+ return feed.close();
72
+ }
73
+
74
+ const total = Number(this.getParam('total', 0));
75
+ const limit = Number(this.getParam('limit', 10));
76
+ const maxPages = Number(this.getParam('maxPages', 1000));
77
+
78
+ if (total === 0) {
79
+ return feed.send(new Error('No result.'));
80
+ }
81
+
82
+ if (Number.isNaN(total)) {
83
+ return feed.send(new Error('Unexpected response.'));
84
+ }
85
+
86
+ let totalPages = Math.ceil(total / limit);
87
+
88
+ if (totalPages > maxPages) {
89
+ totalPages = maxPages;
90
+ }
91
+
92
+ for (let pageNumber = 1; pageNumber <= totalPages; pageNumber += 1) {
93
+ feed.write({ ...data,
94
+ offset: (pageNumber - 1) * limit,
95
+ pageNumber,
96
+ totalPages,
97
+ maxPages,
98
+ limit
99
+ });
100
+ }
101
+
102
+ feed.end();
103
+ }
@@ -0,0 +1,121 @@
1
+ "use strict";
2
+
3
+ Object.defineProperty(exports, "__esModule", {
4
+ value: true
5
+ });
6
+ exports.default = URLRequest;
7
+
8
+ var _lodash = _interopRequireDefault(require("lodash.set"));
9
+
10
+ var _debug = _interopRequireDefault(require("debug"));
11
+
12
+ var _url = require("url");
13
+
14
+ var _nodeAbortController = _interopRequireDefault(require("node-abort-controller"));
15
+
16
+ var _parseHeaders = _interopRequireDefault(require("parse-headers"));
17
+
18
+ var _asyncRetry = _interopRequireDefault(require("async-retry"));
19
+
20
+ var _request = _interopRequireDefault(require("./request"));
21
+
22
+ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
23
+
24
+ /**
25
+ * Take `Object` as parameters of URL, throw each chunk from the result
26
+ *
27
+ *
28
+ * Input:
29
+ *
30
+ * ```json
31
+ * [{"q": "a"}]
32
+ * ```
33
+ *
34
+ * Script:
35
+ *
36
+ * ```ini
37
+ * [URLRequest]
38
+ * url = https://api.search.net
39
+ * ```
40
+ *
41
+ * Output:
42
+ *
43
+ * ```json
44
+ * [
45
+ * {
46
+ * "result": "a"
47
+ * }
48
+ * ]
49
+ * ```
50
+ *
51
+ * @name URLRequest
52
+ * @param {String} [url] URL to fetch
53
+ * @param {Boolean} [json=true] parse result as json
54
+ * @param {String} [target] choose the key to set
55
+ * @param {Number} [timeout=1000] Timeout in milliseconds
56
+ * @param {Boolean} [noerror=false] Ignore all errors, the target field will remain undefined
57
+ * @param {Number} [retries=5] The maximum amount of times to retry the connection
58
+ * @param {String} [insert] a header response value in the result
59
+ * @returns {Object}
60
+ */
61
+ async function URLRequest(data, feed) {
62
+ if (this.isLast()) {
63
+ return feed.close();
64
+ }
65
+
66
+ const url = this.getParam('url');
67
+ const json = Boolean(this.getParam('json', true));
68
+ const target = [].concat(this.getParam('target')).filter(Boolean).shift();
69
+ const retries = Number(this.getParam('retries', 5));
70
+ const noerror = Boolean(this.getParam('noerror', false));
71
+ const timeout = Number(this.getParam('timeout')) || 1000;
72
+ const headers = (0, _parseHeaders.default)([].concat(this.getParam('header')).filter(Boolean).join('\n'));
73
+ const inserts = [].concat(this.getParam('insert')).filter(Boolean);
74
+ const cURL = new _url.URL(url || data);
75
+ const controller = new _nodeAbortController.default();
76
+ const parameters = {
77
+ timeout,
78
+ headers,
79
+ signal: controller.signal
80
+ };
81
+ const options = {
82
+ retries
83
+ };
84
+
85
+ if (url) {
86
+ cURL.search = new _url.URLSearchParams(data);
87
+ }
88
+
89
+ const onError = e => {
90
+ controller.abort();
91
+
92
+ if (noerror) {
93
+ (0, _debug.default)('ezs')(`Ignore item #${this.getIndex()} [URLRequest] <${e}>`);
94
+ return feed.send(data);
95
+ }
96
+
97
+ (0, _debug.default)('ezs')(`Break item #${this.getIndex()} [URLRequest] <${e}>`);
98
+ return feed.send(e);
99
+ };
100
+
101
+ try {
102
+ const response = await (0, _asyncRetry.default)((0, _request.default)(cURL.href, parameters), options);
103
+ const func = json ? 'json' : 'text';
104
+ const value = await response[func]();
105
+
106
+ if (target) {
107
+ const result = typeof data === 'object' ? { ...data
108
+ } : {
109
+ url: data
110
+ };
111
+ (0, _lodash.default)(result, target, value);
112
+ inserts.forEach(i => (0, _lodash.default)(result, i, response.headers.get(i)));
113
+ return feed.send(result);
114
+ }
115
+
116
+ inserts.forEach(i => (0, _lodash.default)(value, i, response.headers.get(i)));
117
+ return feed.send(value);
118
+ } catch (e) {
119
+ onError(e);
120
+ }
121
+ }
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@ezs/basics",
3
3
  "description": "Basics statements for EZS",
4
- "version": "1.18.0",
4
+ "version": "1.21.0",
5
5
  "author": "Nicolas Thouvenin <nthouvenin@gmail.com>",
6
6
  "bugs": "https://github.com/Inist-CNRS/ezs/issues",
7
7
  "dependencies": {
@@ -36,7 +36,7 @@
36
36
  "directories": {
37
37
  "test": "test"
38
38
  },
39
- "gitHead": "346fbfb891990dda7dd2d445e1f32ab2276c7020",
39
+ "gitHead": "b6d40a5733f24e3f138d999f5b7b53e42d3d812a",
40
40
  "homepage": "https://github.com/Inist-CNRS/ezs/tree/master/packages/basics#readme",
41
41
  "keywords": [
42
42
  "ezs"