@naturalcycles/nodejs-lib 12.58.0 → 12.62.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (87) hide show
  1. package/dist/got/getGot.js +98 -28
  2. package/dist/got/got.model.d.ts +6 -0
  3. package/dist/index.d.ts +37 -36
  4. package/dist/index.js +36 -129
  5. package/dist/log/log.util.d.ts +4 -0
  6. package/dist/log/log.util.js +11 -0
  7. package/dist/stream/ndjson/ndjsonMap.d.ts +2 -2
  8. package/dist/stream/ndjson/ndjsonMap.js +4 -3
  9. package/dist/stream/ndjson/ndjsonStreamForEach.d.ts +2 -2
  10. package/dist/stream/ndjson/transformJsonParse.js +3 -3
  11. package/dist/stream/ndjson/transformToNDJson.js +2 -2
  12. package/dist/stream/pipeline/pipeline.d.ts +25 -3
  13. package/dist/stream/pipeline/pipeline.js +76 -9
  14. package/dist/stream/readable/readableCreate.d.ts +8 -0
  15. package/dist/stream/readable/readableCreate.js +9 -1
  16. package/dist/stream/readable/readableForEach.d.ts +2 -2
  17. package/dist/stream/readable/readableFromArray.d.ts +2 -2
  18. package/dist/stream/readable/readableFromArray.js +17 -13
  19. package/dist/stream/readable/readableMap.d.ts +2 -2
  20. package/dist/stream/readable/readableMap.js +22 -17
  21. package/dist/stream/sizeStack.d.ts +9 -0
  22. package/dist/stream/sizeStack.js +48 -0
  23. package/dist/stream/stream.util.d.ts +4 -0
  24. package/dist/stream/stream.util.js +24 -0
  25. package/dist/stream/transform/transformBuffer.js +1 -1
  26. package/dist/stream/transform/transformFilter.d.ts +3 -4
  27. package/dist/stream/transform/transformFilter.js +5 -20
  28. package/dist/stream/transform/transformLimit.d.ts +32 -1
  29. package/dist/stream/transform/transformLimit.js +33 -16
  30. package/dist/stream/transform/transformLogProgress.d.ts +20 -0
  31. package/dist/stream/transform/transformLogProgress.js +36 -18
  32. package/dist/stream/transform/transformMap.d.ts +4 -15
  33. package/dist/stream/transform/transformMap.js +54 -71
  34. package/dist/stream/transform/transformMapSimple.js +1 -1
  35. package/dist/stream/transform/transformMapSync.d.ts +5 -3
  36. package/dist/stream/transform/transformMapSync.js +28 -22
  37. package/dist/stream/transform/transformNoOp.js +1 -1
  38. package/dist/stream/transform/transformTap.js +3 -3
  39. package/dist/stream/transform/transformToArray.js +1 -1
  40. package/dist/stream/transform/transformToString.js +2 -2
  41. package/dist/stream/transform/worker/transformMultiThreaded.js +1 -1
  42. package/dist/stream/transform/worker/workerClassProxy.js +1 -0
  43. package/dist/stream/writable/writableFork.js +1 -1
  44. package/dist/stream/writable/writableLimit.d.ts +9 -0
  45. package/dist/stream/writable/writableLimit.js +29 -0
  46. package/dist/stream/writable/writablePushToArray.js +1 -1
  47. package/dist/stream/writable/writableVoid.d.ts +8 -1
  48. package/dist/stream/writable/writableVoid.js +6 -2
  49. package/dist/util/zip.util.d.ts +15 -7
  50. package/dist/util/zip.util.js +27 -22
  51. package/package.json +1 -1
  52. package/src/got/getGot.ts +120 -31
  53. package/src/got/got.model.ts +8 -0
  54. package/src/index.ts +36 -194
  55. package/src/log/log.util.ts +9 -0
  56. package/src/stream/ndjson/ndjsonMap.ts +7 -5
  57. package/src/stream/ndjson/ndjsonStreamForEach.ts +2 -2
  58. package/src/stream/ndjson/transformJsonParse.ts +3 -3
  59. package/src/stream/ndjson/transformToNDJson.ts +2 -2
  60. package/src/stream/pipeline/pipeline.ts +102 -9
  61. package/src/stream/readable/readableCreate.ts +9 -1
  62. package/src/stream/readable/readableForEach.ts +2 -2
  63. package/src/stream/readable/readableFromArray.ts +18 -21
  64. package/src/stream/readable/readableMap.ts +24 -21
  65. package/src/stream/sizeStack.ts +56 -0
  66. package/src/stream/stream.util.ts +29 -0
  67. package/src/stream/transform/transformBuffer.ts +1 -1
  68. package/src/stream/transform/transformFilter.ts +6 -20
  69. package/src/stream/transform/transformLimit.ts +66 -20
  70. package/src/stream/transform/transformLogProgress.ts +72 -23
  71. package/src/stream/transform/transformMap.ts +75 -101
  72. package/src/stream/transform/transformMapSimple.ts +1 -1
  73. package/src/stream/transform/transformMapSync.ts +40 -26
  74. package/src/stream/transform/transformNoOp.ts +1 -1
  75. package/src/stream/transform/transformTap.ts +3 -3
  76. package/src/stream/transform/transformToArray.ts +1 -1
  77. package/src/stream/transform/transformToString.ts +2 -2
  78. package/src/stream/transform/worker/transformMultiThreaded.ts +1 -1
  79. package/src/stream/transform/worker/workerClassProxy.js +1 -0
  80. package/src/stream/writable/writableFork.ts +1 -1
  81. package/src/stream/writable/writableLimit.ts +28 -0
  82. package/src/stream/writable/writablePushToArray.ts +1 -1
  83. package/src/stream/writable/writableVoid.ts +14 -2
  84. package/src/util/zip.util.ts +26 -20
  85. package/dist/stream/transform/legacy/transformMap.d.ts +0 -17
  86. package/dist/stream/transform/legacy/transformMap.js +0 -94
  87. package/src/stream/transform/legacy/transformMap.ts +0 -133
@@ -18,19 +18,20 @@ async function ndjsonMap(mapper, opt) {
18
18
  });
19
19
  const transformUnzip = inputFilePath.endsWith('.gz') ? [(0, zlib_1.createUnzip)()] : [];
20
20
  const transformZip = outputFilePath.endsWith('.gz') ? [(0, zlib_1.createGzip)()] : [];
21
+ const readable = (0, fs_1.createReadStream)(inputFilePath);
21
22
  await (0, __1._pipeline)([
22
- (0, fs_1.createReadStream)(inputFilePath),
23
+ readable,
23
24
  ...transformUnzip,
24
25
  (0, __1.transformSplit)(),
25
26
  (0, __1.transformJsonParse)(),
26
- (0, __1.transformLimit)(limitInput),
27
+ (0, __1.transformLimit)({ limit: limitInput, sourceReadable: readable }),
27
28
  (0, __1.transformLogProgress)({ metric: 'read', ...opt }),
28
29
  (0, __1.transformMap)(mapper, {
29
30
  flattenArrayOutput: true,
30
31
  errorMode: js_lib_1.ErrorMode.SUPPRESS,
31
32
  ...opt,
32
33
  }),
33
- (0, __1.transformLimit)(limitOutput),
34
+ (0, __1.transformLimit)({ limit: limitOutput, sourceReadable: readable }),
34
35
  (0, __1.transformLogProgress)({ metric: 'saved', logEvery: logEveryOutput }),
35
36
  (0, __1.transformToNDJson)(),
36
37
  ...transformZip,
@@ -1,4 +1,4 @@
1
- import { AsyncMapper } from '@naturalcycles/js-lib';
1
+ import { AbortableAsyncMapper } from '@naturalcycles/js-lib';
2
2
  import { TransformLogProgressOptions, TransformMapOptions } from '../..';
3
3
  export interface NDJSONStreamForEachOptions<IN = any> extends TransformMapOptions<IN, void>, TransformLogProgressOptions<IN> {
4
4
  inputFilePath: string;
@@ -6,4 +6,4 @@ export interface NDJSONStreamForEachOptions<IN = any> extends TransformMapOption
6
6
  /**
7
7
  * Convenience function to `forEach` through an ndjson file.
8
8
  */
9
- export declare function ndjsonStreamForEach<T>(mapper: AsyncMapper<T, void>, opt: NDJSONStreamForEachOptions<T>): Promise<void>;
9
+ export declare function ndjsonStreamForEach<T>(mapper: AbortableAsyncMapper<T, void>, opt: NDJSONStreamForEachOptions<T>): Promise<void>;
@@ -19,19 +19,19 @@ const stream_1 = require("stream");
19
19
  function transformJsonParse(opt = {}) {
20
20
  const { strict = true, reviver } = opt;
21
21
  return new stream_1.Transform({
22
- objectMode: false,
22
+ writableObjectMode: false,
23
23
  readableObjectMode: true,
24
- transform(chunk, _encoding, cb) {
24
+ transform(chunk, _, cb) {
25
25
  try {
26
26
  const data = JSON.parse(chunk, reviver);
27
27
  cb(null, data);
28
28
  }
29
29
  catch (err) {
30
- // console.error(err)
31
30
  if (strict) {
32
31
  cb(err); // emit error
33
32
  }
34
33
  else {
34
+ console.error(err);
35
35
  cb(); // emit no error, but no result neither
36
36
  }
37
37
  }
@@ -9,9 +9,9 @@ const js_lib_1 = require("@naturalcycles/js-lib");
9
9
  function transformToNDJson(opt = {}) {
10
10
  const { strict = true, separator = '\n', sortObjects = false, useFlatstr = false } = opt;
11
11
  return new stream_1.Transform({
12
- objectMode: true,
12
+ writableObjectMode: true,
13
13
  readableObjectMode: false,
14
- transform(chunk, _encoding, cb) {
14
+ transform(chunk, _, cb) {
15
15
  try {
16
16
  if (sortObjects) {
17
17
  chunk = (0, js_lib_1._sortObjectDeep)(chunk);
@@ -1,6 +1,28 @@
1
1
  /// <reference types="node" />
2
- import { pipeline } from 'stream';
2
+ import { Readable, Transform } from 'stream';
3
+ import { DeferredPromise } from '@naturalcycles/js-lib';
4
+ declare type AnyStream = NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream;
5
+ export interface PipelineOptions {
6
+ /**
7
+ * Set to true to allow ERR_STREAM_PREMATURE_CLOSE.
8
+ * Required to support graceful close when using transformLimit
9
+ */
10
+ allowClose?: boolean;
11
+ }
3
12
  /**
4
- * Promisified stream.pipeline()
13
+ * Promisified `stream.pipeline`.
14
+ *
15
+ * Supports opt.allowClose, which allows transformLimit to work (to actually stop source Readable)
16
+ * without throwing an error (ERR_STREAM_PREMATURE_CLOSE).
5
17
  */
6
- export declare let _pipeline: typeof pipeline.__promisify__;
18
+ export declare function _pipeline(streams: AnyStream[], opt?: PipelineOptions): Promise<void>;
19
+ /**
20
+ * Convenience function to make _pipeline collect all items at the end of the stream (should be Transform, not Writeable!)
21
+ * and return.
22
+ */
23
+ export declare function _pipelineToArray<T>(streams: AnyStream[], opt?: PipelineOptions): Promise<T[]>;
24
+ export declare class AbortableTransform extends Transform {
25
+ sourceReadable?: Readable;
26
+ streamDone?: DeferredPromise;
27
+ }
28
+ export {};
@@ -1,15 +1,82 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports._pipeline = void 0;
3
+ exports.AbortableTransform = exports._pipelineToArray = exports._pipeline = void 0;
4
4
  const stream_1 = require("stream");
5
- const util_1 = require("util");
5
+ const js_lib_1 = require("@naturalcycles/js-lib");
6
+ const index_1 = require("../../index");
6
7
  /**
7
- * Promisified stream.pipeline()
8
+ * Promisified `stream.pipeline`.
9
+ *
10
+ * Supports opt.allowClose, which allows transformLimit to work (to actually stop source Readable)
11
+ * without throwing an error (ERR_STREAM_PREMATURE_CLOSE).
8
12
  */
9
- exports._pipeline = (0, util_1.promisify)(stream_1.pipeline);
10
- // Workaround https://github.com/nodejs/node/issues/40191
11
- // todo: remove it when fix is released in 16.x and in AppEngine 16.x
12
- if (process.version >= 'v16.10') {
13
- const { pipeline } = require('stream/promises');
14
- exports._pipeline = ((streams) => pipeline(...streams));
13
+ async function _pipeline(streams, opt = {}) {
14
+ const first = streams[0];
15
+ const rest = streams.slice(1);
16
+ if (opt.allowClose) {
17
+ // Do the magic of making the pipeline "abortable"
18
+ //
19
+ // How does it work:
20
+ // It finds `sourceReadable` (basically, it's just first item in the passed array of streams)
21
+ // Finds last "writable" (last item), patches the `_final` method of it to detect when the whole pipeline is "done",
22
+ // sets the `streamDone` DeferredPromise that resolves when the pipeline is done.
23
+ // Scans through all passed items, finds those that are capable of "closing" the stream
24
+ // (currently its `transformLimit` or `transformMap`)
25
+ // Patches them by attaching `sourceReadable` and `streamDone`.
26
+ // These items (transformLimit and transformMap), when they need to "close the stream" - call `pipelineClose`.
27
+ // `pipelineClose` is the result of 2 sleepless nights of googling and experimentation:)
28
+ // It does:
29
+ // 1. Stops the "downstream" by doing `this.push(null)`.
30
+ // 2. Pauses the `sourceReadable` by calling sourceReadable.unpipe()
31
+ // 3. Waits for `streamDone` to ensure that downstream chunks are fully processed (e.g written to disk).
32
+ // 4. Calls `sourceReadable.destroy()`, which emits ERR_STREAM_PREMATURE_CLOSE
33
+ // 5. _pipeline (this function) catches that specific error and suppresses it (because it's expected and
34
+ // inevitable in this flow). Know a better way to close the stream? Tell me!
35
+ const streamDone = (0, js_lib_1.pDefer)();
36
+ const sourceReadable = first;
37
+ const last = (0, js_lib_1._last)(streams);
38
+ const lastFinal = last._final?.bind(last) || ((cb) => cb());
39
+ last._final = cb => {
40
+ lastFinal(() => {
41
+ cb();
42
+ streamDone.resolve();
43
+ });
44
+ };
45
+ rest.forEach(s => {
46
+ // console.log(s)
47
+ if (s instanceof AbortableTransform || s.constructor.name === 'DestroyableTransform') {
48
+ // console.log(`found ${s.constructor.name}, setting props`)
49
+ ;
50
+ s.sourceReadable = sourceReadable;
51
+ s.streamDone = streamDone;
52
+ }
53
+ });
54
+ }
55
+ return new Promise((resolve, reject) => {
56
+ (0, stream_1.pipeline)(first, ...rest, (err) => {
57
+ if (err) {
58
+ if (opt.allowClose && err?.code === 'ERR_STREAM_PREMATURE_CLOSE') {
59
+ console.log('_pipeline closed (as expected)');
60
+ return resolve();
61
+ }
62
+ // console.log(`_pipeline error`, err)
63
+ return reject(err);
64
+ }
65
+ resolve();
66
+ });
67
+ });
15
68
  }
69
+ exports._pipeline = _pipeline;
70
+ /**
71
+ * Convenience function to make _pipeline collect all items at the end of the stream (should be Transform, not Writeable!)
72
+ * and return.
73
+ */
74
+ async function _pipelineToArray(streams, opt = {}) {
75
+ const a = [];
76
+ await _pipeline([...streams, (0, index_1.writablePushToArray)(a)], opt);
77
+ return a;
78
+ }
79
+ exports._pipelineToArray = _pipelineToArray;
80
+ class AbortableTransform extends stream_1.Transform {
81
+ }
82
+ exports.AbortableTransform = AbortableTransform;
@@ -6,6 +6,14 @@ import { ReadableTyped } from '../stream.model';
6
6
  * Push `null` to it to complete (similar to RxJS `.complete()`).
7
7
  *
8
8
  * Difference from Readable.from() is that this readable is not "finished" yet and allows pushing more to it.
9
+ *
10
+ * Caution!
11
+ * The implementation of this Readable is not fully compliant,
12
+ * e.g the read() method doesn't return anything, so, it will hand the Node process (or cause it to process.exit(0))
13
+ * if read() will be called AFTER everything was pushed and Readable is closed (by pushing `null`).
14
+ * Beware of it when e.g doing unit testing! Jest prefers to hang (not exit-0).
15
+ *
16
+ * @deprecated because of the caution above
9
17
  */
10
18
  export declare function readableCreate<T>(items?: Iterable<T>, opt?: ReadableOptions): ReadableTyped<T>;
11
19
  /**
@@ -7,12 +7,20 @@ const stream_1 = require("stream");
7
7
  * Push `null` to it to complete (similar to RxJS `.complete()`).
8
8
  *
9
9
  * Difference from Readable.from() is that this readable is not "finished" yet and allows pushing more to it.
10
+ *
11
+ * Caution!
12
+ * The implementation of this Readable is not fully compliant,
13
+ * e.g the read() method doesn't return anything, so, it will hand the Node process (or cause it to process.exit(0))
14
+ * if read() will be called AFTER everything was pushed and Readable is closed (by pushing `null`).
15
+ * Beware of it when e.g doing unit testing! Jest prefers to hang (not exit-0).
16
+ *
17
+ * @deprecated because of the caution above
10
18
  */
11
19
  function readableCreate(items = [], opt) {
12
20
  const readable = new stream_1.Readable({
13
21
  objectMode: true,
14
22
  ...opt,
15
- read() { },
23
+ read() { }, // Caution, if this is called and Readable has not finished yet (null wasn't pushed) - it'll hang the process!
16
24
  });
17
25
  for (const item of items) {
18
26
  readable.push(item);
@@ -1,4 +1,4 @@
1
- import { AsyncMapper, Mapper } from '@naturalcycles/js-lib';
1
+ import { Mapper, AbortableAsyncMapper } from '@naturalcycles/js-lib';
2
2
  import { ReadableTyped } from '../../index';
3
3
  import { TransformMapOptions } from '../transform/transformMap';
4
4
  /**
@@ -7,7 +7,7 @@ import { TransformMapOptions } from '../transform/transformMap';
7
7
  *
8
8
  * @experimental
9
9
  */
10
- export declare function readableForEach<T>(readable: ReadableTyped<T>, mapper: AsyncMapper<T, void>, opt?: TransformMapOptions<T, void>): Promise<void>;
10
+ export declare function readableForEach<T>(readable: ReadableTyped<T>, mapper: AbortableAsyncMapper<T, void>, opt?: TransformMapOptions<T, void>): Promise<void>;
11
11
  /**
12
12
  * Convenience function to do `.forEach` over a Readable.
13
13
  * Typed! (unlike default Readable).
@@ -1,6 +1,6 @@
1
1
  /// <reference types="node" />
2
2
  import { ReadableOptions } from 'stream';
3
- import { AsyncMapper } from '@naturalcycles/js-lib';
3
+ import { AbortableAsyncMapper } from '@naturalcycles/js-lib';
4
4
  import { ReadableTyped } from '../stream.model';
5
5
  /**
6
6
  * Create Readable from Array.
@@ -8,4 +8,4 @@ import { ReadableTyped } from '../stream.model';
8
8
  *
9
9
  * For simple cases use Readable.from(...) (Node.js 12+)
10
10
  */
11
- export declare function readableFromArray<IN, OUT>(items: IN[], mapper?: AsyncMapper<IN, OUT>, opt?: ReadableOptions): ReadableTyped<OUT>;
11
+ export declare function readableFromArray<IN, OUT>(items: IN[], mapper?: AbortableAsyncMapper<IN, OUT>, opt?: ReadableOptions): ReadableTyped<OUT>;
@@ -10,21 +10,25 @@ const js_lib_1 = require("@naturalcycles/js-lib");
10
10
  * For simple cases use Readable.from(...) (Node.js 12+)
11
11
  */
12
12
  function readableFromArray(items, mapper = js_lib_1._passthroughMapper, opt) {
13
- const readable = new stream_1.Readable({
13
+ let i = -1;
14
+ return new stream_1.Readable({
14
15
  objectMode: true,
15
16
  ...opt,
16
- read() { },
17
+ async read() {
18
+ i++;
19
+ if (i < items.length) {
20
+ try {
21
+ this.push(await mapper(items[i], i));
22
+ }
23
+ catch (err) {
24
+ console.error(err);
25
+ this.destroy(err);
26
+ }
27
+ }
28
+ else {
29
+ this.push(null); // end
30
+ }
31
+ },
17
32
  });
18
- void (0, js_lib_1.pMap)(items, async (item, index) => {
19
- readable.push(await mapper(item, index));
20
- }, { concurrency: 1 })
21
- .then(() => {
22
- readable.push(null); // done
23
- })
24
- .catch(err => {
25
- console.error(err);
26
- readable.push(err);
27
- });
28
- return readable;
29
33
  }
30
34
  exports.readableFromArray = readableFromArray;
@@ -1,3 +1,3 @@
1
- import { AsyncMapper } from '@naturalcycles/js-lib';
1
+ import { AbortableAsyncMapper } from '@naturalcycles/js-lib';
2
2
  import { ReadableTyped } from '../stream.model';
3
- export declare function readableMap<IN, OUT>(readable: ReadableTyped<IN>, mapper: AsyncMapper<IN, OUT>): ReadableTyped<OUT>;
3
+ export declare function readableMap<IN, OUT>(readable: ReadableTyped<IN>, mapper: AbortableAsyncMapper<IN, OUT>): ReadableTyped<OUT>;
@@ -1,24 +1,29 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.readableMap = void 0;
4
- const index_1 = require("../../index");
4
+ const stream_1 = require("stream");
5
+ const js_lib_1 = require("@naturalcycles/js-lib");
5
6
  function readableMap(readable, mapper) {
6
- const out = (0, index_1.readableCreate)();
7
- void (async () => {
8
- try {
9
- let index = 0;
10
- for await (const item of readable) {
11
- const v = await mapper(item, index++);
12
- out.push(v);
7
+ let i = -1;
8
+ // todo: check if we need to handle errors somehow specifically
9
+ return readable.pipe(new stream_1.Transform({
10
+ objectMode: true,
11
+ async transform(chunk, _enc, cb) {
12
+ try {
13
+ const r = await mapper(chunk, ++i);
14
+ if (r === js_lib_1.SKIP) {
15
+ cb();
16
+ }
17
+ else {
18
+ // _assert(r !== END, `readableMap END not supported`)
19
+ cb(null, r);
20
+ }
13
21
  }
14
- // We're done
15
- out.push(null);
16
- }
17
- catch (err) {
18
- console.error(err);
19
- out.emit('error', err);
20
- }
21
- })();
22
- return out;
22
+ catch (err) {
23
+ console.error(err);
24
+ cb(err);
25
+ }
26
+ },
27
+ }));
23
28
  }
24
29
  exports.readableMap = readableMap;
@@ -0,0 +1,9 @@
1
+ import { CommonLogger, NumberStack } from '@naturalcycles/js-lib';
2
+ export declare class SizeStack extends NumberStack {
3
+ name: string;
4
+ constructor(name: string, size: number);
5
+ total: number;
6
+ push(item: any): this;
7
+ getStats(): string;
8
+ static countItem(item: any, logger: CommonLogger, sizes?: SizeStack, sizesZipped?: SizeStack): Promise<void>;
9
+ }
@@ -0,0 +1,48 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.SizeStack = void 0;
4
+ const js_lib_1 = require("@naturalcycles/js-lib");
5
+ const colors_1 = require("../colors");
6
+ const zip_util_1 = require("../util/zip.util");
7
+ class SizeStack extends js_lib_1.NumberStack {
8
+ constructor(name, size) {
9
+ super(size);
10
+ this.name = name;
11
+ this.total = 0;
12
+ }
13
+ push(item) {
14
+ this.total += item;
15
+ return super.push(item);
16
+ }
17
+ getStats() {
18
+ // const pcs = this.percentiles([50, 90])
19
+ return [
20
+ ' ' + this.name,
21
+ 'avg',
22
+ (0, colors_1.yellow)((0, js_lib_1._hb)(this.avg())),
23
+ // 'p50',
24
+ // yellow(_hb(pcs[50])),
25
+ // 'p90',
26
+ // yellow(_hb(pcs[90])),
27
+ 'total',
28
+ (0, colors_1.yellow)((0, js_lib_1._hb)(this.total)),
29
+ ].join(' ');
30
+ }
31
+ static async countItem(item, logger, sizes, sizesZipped) {
32
+ if (!sizes)
33
+ return;
34
+ // try-catch, because we don't want to fail the pipeline on logProgress
35
+ try {
36
+ const buf = Buffer.from(JSON.stringify(item));
37
+ sizes.push(buf.byteLength);
38
+ if (sizesZipped) {
39
+ const { byteLength } = await (0, zip_util_1.gzipBuffer)(buf);
40
+ sizesZipped.push(byteLength);
41
+ }
42
+ }
43
+ catch (err) {
44
+ logger.warn(`transformLogProgress failed to JSON.stringify the chunk: ${err.message}`);
45
+ }
46
+ }
47
+ }
48
+ exports.SizeStack = SizeStack;
@@ -0,0 +1,4 @@
1
+ /// <reference types="node" />
2
+ import { Readable } from 'stream';
3
+ import { CommonLogger } from '@naturalcycles/js-lib';
4
+ export declare function pipelineClose(name: string, readableDownstream: Readable, sourceReadable: Readable | undefined, streamDone: Promise<void> | undefined, logger: CommonLogger): void;
@@ -0,0 +1,24 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.pipelineClose = void 0;
4
+ function pipelineClose(name, readableDownstream, sourceReadable, streamDone, logger) {
5
+ readableDownstream.push(null); // this closes the stream, so downstream Readable will receive `end` and won't write anything
6
+ if (!sourceReadable) {
7
+ logger.warn(`${name} sourceReadable is not provided, readable stream will not be stopped`);
8
+ }
9
+ else {
10
+ logger.log(`${name} is calling readable.unpipe() to pause the stream`);
11
+ sourceReadable.unpipe(); // it is expected to pause the stream
12
+ if (!streamDone) {
13
+ logger.log(`${name} streamDone is not provided, will do readable.destroy right away`);
14
+ sourceReadable.destroy();
15
+ }
16
+ else {
17
+ void streamDone.then(() => {
18
+ logger.log(`${name} streamDone, calling readable.destroy()`);
19
+ sourceReadable.destroy(); // this throws ERR_STREAM_PREMATURE_CLOSE
20
+ });
21
+ }
22
+ }
23
+ }
24
+ exports.pipelineClose = pipelineClose;
@@ -13,7 +13,7 @@ function transformBuffer(opt) {
13
13
  return new stream_1.Transform({
14
14
  objectMode: true,
15
15
  ...opt,
16
- transform(chunk, _encoding, cb) {
16
+ transform(chunk, _, cb) {
17
17
  buf.push(chunk);
18
18
  if (buf.length >= batchSize) {
19
19
  cb(null, buf);
@@ -1,11 +1,10 @@
1
1
  import { AsyncPredicate, Predicate } from '@naturalcycles/js-lib';
2
2
  import { TransformOptions, TransformTyped } from '../stream.model';
3
+ import { TransformMapOptions } from './transformMap';
3
4
  /**
4
- * Note, that currently it's NOT concurrent! (concurrency = 1)
5
- * So, it's recommended to use transformMap instead, that is both concurrent and has
6
- * filtering feature by default.
5
+ * Just a convenience wrapper around `transformMap` that has built-in predicate filtering support.
7
6
  */
8
- export declare function transformFilter<IN = any>(predicate: AsyncPredicate<IN>, opt?: TransformOptions): TransformTyped<IN, IN>;
7
+ export declare function transformFilter<IN = any>(predicate: AsyncPredicate<IN>, opt?: TransformMapOptions): TransformTyped<IN, IN>;
9
8
  /**
10
9
  * Sync version of `transformFilter`
11
10
  */
@@ -2,29 +2,14 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.transformFilterSync = exports.transformFilter = void 0;
4
4
  const stream_1 = require("stream");
5
+ const transformMap_1 = require("./transformMap");
5
6
  /**
6
- * Note, that currently it's NOT concurrent! (concurrency = 1)
7
- * So, it's recommended to use transformMap instead, that is both concurrent and has
8
- * filtering feature by default.
7
+ * Just a convenience wrapper around `transformMap` that has built-in predicate filtering support.
9
8
  */
10
9
  function transformFilter(predicate, opt = {}) {
11
- let index = 0;
12
- return new stream_1.Transform({
13
- objectMode: true,
10
+ return (0, transformMap_1.transformMap)(v => v, {
11
+ predicate,
14
12
  ...opt,
15
- async transform(chunk, _encoding, cb) {
16
- try {
17
- if (await predicate(chunk, index++)) {
18
- cb(null, chunk); // pass through
19
- }
20
- else {
21
- cb(); // signal that we've finished processing, but emit no output here
22
- }
23
- }
24
- catch (err) {
25
- cb(err);
26
- }
27
- },
28
13
  });
29
14
  }
30
15
  exports.transformFilter = transformFilter;
@@ -36,7 +21,7 @@ function transformFilterSync(predicate, opt = {}) {
36
21
  return new stream_1.Transform({
37
22
  objectMode: true,
38
23
  ...opt,
39
- async transform(chunk, _encoding, cb) {
24
+ transform(chunk, _, cb) {
40
25
  try {
41
26
  if (predicate(chunk, index++)) {
42
27
  cb(null, chunk); // pass through
@@ -1,9 +1,40 @@
1
+ /// <reference types="node" />
2
+ import { Readable } from 'stream';
1
3
  import { CommonLogger } from '@naturalcycles/js-lib';
4
+ import { AbortableTransform } from '../../index';
2
5
  import { TransformOptions, TransformTyped } from '../stream.model';
3
6
  export interface TransformLimitOptions extends TransformOptions {
7
+ /**
8
+ * Nullish value (e.g 0 or undefined) would mean "no limit"
9
+ */
10
+ limit?: number;
11
+ /**
12
+ * If provided (recommended!) - it will call readable.destroy() on limit.
13
+ * Without it - it will only stop the downstream consumers, but won't stop
14
+ * the Readable ("source" of the stream).
15
+ * It is almost always crucial to stop the Source too, so, please provide the Readable here!
16
+ */
17
+ sourceReadable?: Readable;
18
+ /**
19
+ * Please provide it (a Promise that resolves when the Stream is done, e.g finished consuming things)
20
+ * to be able to wait for Consumers before calling `readable.destroy`.
21
+ * Has no effect if `readable` is not provided.
22
+ */
23
+ streamDone?: Promise<void>;
4
24
  logger?: CommonLogger;
25
+ /**
26
+ * Set to true to enable additional debug messages, e.g it'll log
27
+ * when readable still emits values after the limit is reached.
28
+ */
29
+ debug?: boolean;
30
+ }
31
+ /**
32
+ * Class only exists to be able to do `instanceof TransformLimit`
33
+ * and to set sourceReadable+streamDone to it in `_pipeline`.
34
+ */
35
+ export declare class TransformLimit extends AbortableTransform {
5
36
  }
6
37
  /**
7
38
  * 0 or falsy value means "no limit"
8
39
  */
9
- export declare function transformLimit<IN>(limit?: number, opt?: TransformLimitOptions): TransformTyped<IN, IN>;
40
+ export declare function transformLimit<IN>(opt?: TransformLimitOptions): TransformTyped<IN, IN>;
@@ -1,29 +1,46 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.transformLimit = void 0;
4
- const stream_1 = require("stream");
3
+ exports.transformLimit = exports.TransformLimit = void 0;
4
+ const index_1 = require("../../index");
5
+ const stream_util_1 = require("../stream.util");
6
+ /**
7
+ * Class only exists to be able to do `instanceof TransformLimit`
8
+ * and to set sourceReadable+streamDone to it in `_pipeline`.
9
+ */
10
+ class TransformLimit extends index_1.AbortableTransform {
11
+ }
12
+ exports.TransformLimit = TransformLimit;
5
13
  /**
6
14
  * 0 or falsy value means "no limit"
7
15
  */
8
- function transformLimit(limit, opt = {}) {
9
- const { logger = console } = opt;
10
- let index = 0;
16
+ function transformLimit(opt = {}) {
17
+ const { logger = console, limit, debug } = opt;
18
+ if (!limit) {
19
+ // No limit - returning pass-through transform
20
+ return (0, index_1.transformNoOp)();
21
+ }
22
+ let i = 0; // so we start first chunk with 1
11
23
  let ended = false;
12
- return new stream_1.Transform({
24
+ return new TransformLimit({
13
25
  objectMode: true,
14
26
  ...opt,
15
- transform(chunk, _encoding, cb) {
16
- index++;
17
- if (!ended) {
18
- cb(null, chunk); // pass through the item
27
+ transform(chunk, _, cb) {
28
+ i++;
29
+ if (i === limit) {
30
+ ended = true;
31
+ logger.log(`transformLimit of ${limit} reached`);
32
+ this.push(chunk);
33
+ (0, stream_util_1.pipelineClose)('transformLimit', this, opt.sourceReadable || this.sourceReadable, opt.streamDone || this.streamDone, logger);
34
+ cb(); // after pause
19
35
  }
20
- else {
21
- cb(null); // pass-through empty
36
+ else if (!ended) {
37
+ cb(null, chunk);
22
38
  }
23
- if (limit && index === limit) {
24
- ended = true;
25
- logger.log(`transformLimit: limit of ${limit} reached`);
26
- // this.emit('end') // this makes it "halt" on Node 14 lts
39
+ else {
40
+ if (debug)
41
+ logger.log(`transformLimit.transform after limit`, i);
42
+ // If we ever HANG (don't call cb) - Node will do process.exit(0) to us
43
+ cb(); // ended, don't emit anything
27
44
  }
28
45
  },
29
46
  });