@naturalcycles/nodejs-lib 12.59.0 → 12.60.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/dist/stream/ndjson/transformJsonParse.js +3 -3
  2. package/dist/stream/ndjson/transformToNDJson.js +2 -2
  3. package/dist/stream/sizeStack.d.ts +9 -0
  4. package/dist/stream/sizeStack.js +48 -0
  5. package/dist/stream/transform/transformBuffer.js +1 -1
  6. package/dist/stream/transform/transformFilter.d.ts +3 -4
  7. package/dist/stream/transform/transformFilter.js +5 -20
  8. package/dist/stream/transform/transformLogProgress.d.ts +20 -0
  9. package/dist/stream/transform/transformLogProgress.js +36 -18
  10. package/dist/stream/transform/transformMap.d.ts +2 -4
  11. package/dist/stream/transform/transformMap.js +6 -11
  12. package/dist/stream/transform/transformMapSimple.js +1 -1
  13. package/dist/stream/transform/transformMapSync.d.ts +5 -3
  14. package/dist/stream/transform/transformMapSync.js +28 -22
  15. package/dist/stream/transform/transformNoOp.js +1 -1
  16. package/dist/stream/transform/transformTap.js +3 -3
  17. package/dist/stream/transform/transformToArray.js +1 -1
  18. package/dist/stream/transform/transformToString.js +2 -2
  19. package/dist/stream/transform/worker/transformMultiThreaded.js +1 -1
  20. package/dist/stream/writable/writableFork.js +1 -1
  21. package/dist/stream/writable/writablePushToArray.js +1 -1
  22. package/dist/stream/writable/writableVoid.js +1 -1
  23. package/dist/util/zip.util.d.ts +10 -2
  24. package/dist/util/zip.util.js +10 -3
  25. package/package.json +1 -1
  26. package/src/stream/ndjson/transformJsonParse.ts +3 -3
  27. package/src/stream/ndjson/transformToNDJson.ts +2 -2
  28. package/src/stream/sizeStack.ts +56 -0
  29. package/src/stream/transform/transformBuffer.ts +1 -1
  30. package/src/stream/transform/transformFilter.ts +6 -20
  31. package/src/stream/transform/transformLogProgress.ts +72 -23
  32. package/src/stream/transform/transformMap.ts +7 -14
  33. package/src/stream/transform/transformMapSimple.ts +1 -1
  34. package/src/stream/transform/transformMapSync.ts +40 -26
  35. package/src/stream/transform/transformNoOp.ts +1 -1
  36. package/src/stream/transform/transformTap.ts +3 -3
  37. package/src/stream/transform/transformToArray.ts +1 -1
  38. package/src/stream/transform/transformToString.ts +2 -2
  39. package/src/stream/transform/worker/transformMultiThreaded.ts +1 -1
  40. package/src/stream/writable/writableFork.ts +1 -1
  41. package/src/stream/writable/writablePushToArray.ts +1 -1
  42. package/src/stream/writable/writableVoid.ts +1 -1
  43. package/src/util/zip.util.ts +11 -3
@@ -19,19 +19,19 @@ const stream_1 = require("stream");
19
19
  function transformJsonParse(opt = {}) {
20
20
  const { strict = true, reviver } = opt;
21
21
  return new stream_1.Transform({
22
- objectMode: false,
22
+ writableObjectMode: false,
23
23
  readableObjectMode: true,
24
- transform(chunk, _encoding, cb) {
24
+ transform(chunk, _, cb) {
25
25
  try {
26
26
  const data = JSON.parse(chunk, reviver);
27
27
  cb(null, data);
28
28
  }
29
29
  catch (err) {
30
- // console.error(err)
31
30
  if (strict) {
32
31
  cb(err); // emit error
33
32
  }
34
33
  else {
34
+ console.error(err);
35
35
  cb(); // emit no error, but no result neither
36
36
  }
37
37
  }
@@ -9,9 +9,9 @@ const js_lib_1 = require("@naturalcycles/js-lib");
9
9
  function transformToNDJson(opt = {}) {
10
10
  const { strict = true, separator = '\n', sortObjects = false, useFlatstr = false } = opt;
11
11
  return new stream_1.Transform({
12
- objectMode: true,
12
+ writableObjectMode: true,
13
13
  readableObjectMode: false,
14
- transform(chunk, _encoding, cb) {
14
+ transform(chunk, _, cb) {
15
15
  try {
16
16
  if (sortObjects) {
17
17
  chunk = (0, js_lib_1._sortObjectDeep)(chunk);
@@ -0,0 +1,9 @@
1
+ import { CommonLogger, NumberStack } from '@naturalcycles/js-lib';
2
+ export declare class SizeStack extends NumberStack {
3
+ name: string;
4
+ constructor(name: string, size: number);
5
+ total: number;
6
+ push(item: any): this;
7
+ getStats(): string;
8
+ static countItem(item: any, logger: CommonLogger, sizes?: SizeStack, sizesZipped?: SizeStack): Promise<void>;
9
+ }
@@ -0,0 +1,48 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.SizeStack = void 0;
4
+ const js_lib_1 = require("@naturalcycles/js-lib");
5
+ const colors_1 = require("../colors");
6
+ const zip_util_1 = require("../util/zip.util");
7
+ class SizeStack extends js_lib_1.NumberStack {
8
+ constructor(name, size) {
9
+ super(size);
10
+ this.name = name;
11
+ this.total = 0;
12
+ }
13
+ push(item) {
14
+ this.total += item;
15
+ return super.push(item);
16
+ }
17
+ getStats() {
18
+ const pcs = this.percentiles([50, 90]);
19
+ return [
20
+ this.name,
21
+ 'avg',
22
+ (0, colors_1.yellow)((0, js_lib_1._hb)(this.avg())),
23
+ 'p50',
24
+ (0, colors_1.yellow)((0, js_lib_1._hb)(pcs[50])),
25
+ 'p90',
26
+ (0, colors_1.yellow)((0, js_lib_1._hb)(pcs[90])),
27
+ 'total',
28
+ (0, colors_1.yellow)((0, js_lib_1._hb)(this.total)),
29
+ ].join(' ');
30
+ }
31
+ static async countItem(item, logger, sizes, sizesZipped) {
32
+ if (!sizes)
33
+ return;
34
+ // try-catch, because we don't want to fail the pipeline on logProgress
35
+ try {
36
+ const buf = Buffer.from(JSON.stringify(item));
37
+ sizes.push(buf.byteLength);
38
+ if (sizesZipped) {
39
+ const { byteLength } = await (0, zip_util_1.gzipBuffer)(buf);
40
+ sizesZipped.push(byteLength);
41
+ }
42
+ }
43
+ catch (err) {
44
+ logger.warn(`transformLogProgress failed to JSON.stringify the chunk: ${err.message}`);
45
+ }
46
+ }
47
+ }
48
+ exports.SizeStack = SizeStack;
@@ -13,7 +13,7 @@ function transformBuffer(opt) {
13
13
  return new stream_1.Transform({
14
14
  objectMode: true,
15
15
  ...opt,
16
- transform(chunk, _encoding, cb) {
16
+ transform(chunk, _, cb) {
17
17
  buf.push(chunk);
18
18
  if (buf.length >= batchSize) {
19
19
  cb(null, buf);
@@ -1,11 +1,10 @@
1
1
  import { AsyncPredicate, Predicate } from '@naturalcycles/js-lib';
2
2
  import { TransformOptions, TransformTyped } from '../stream.model';
3
+ import { TransformMapOptions } from './transformMap';
3
4
  /**
4
- * Note, that currently it's NOT concurrent! (concurrency = 1)
5
- * So, it's recommended to use transformMap instead, that is both concurrent and has
6
- * filtering feature by default.
5
+ * Just a convenience wrapper around `transformMap` that has built-in predicate filtering support.
7
6
  */
8
- export declare function transformFilter<IN = any>(predicate: AsyncPredicate<IN>, opt?: TransformOptions): TransformTyped<IN, IN>;
7
+ export declare function transformFilter<IN = any>(predicate: AsyncPredicate<IN>, opt?: TransformMapOptions): TransformTyped<IN, IN>;
9
8
  /**
10
9
  * Sync version of `transformFilter`
11
10
  */
@@ -2,29 +2,14 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.transformFilterSync = exports.transformFilter = void 0;
4
4
  const stream_1 = require("stream");
5
+ const transformMap_1 = require("./transformMap");
5
6
  /**
6
- * Note, that currently it's NOT concurrent! (concurrency = 1)
7
- * So, it's recommended to use transformMap instead, that is both concurrent and has
8
- * filtering feature by default.
7
+ * Just a convenience wrapper around `transformMap` that has built-in predicate filtering support.
9
8
  */
10
9
  function transformFilter(predicate, opt = {}) {
11
- let index = 0;
12
- return new stream_1.Transform({
13
- objectMode: true,
10
+ return (0, transformMap_1.transformMap)(v => v, {
11
+ predicate,
14
12
  ...opt,
15
- async transform(chunk, _encoding, cb) {
16
- try {
17
- if (await predicate(chunk, index++)) {
18
- cb(null, chunk); // pass through
19
- }
20
- else {
21
- cb(); // signal that we've finished processing, but emit no output here
22
- }
23
- }
24
- catch (err) {
25
- cb(err);
26
- }
27
- },
28
13
  });
29
14
  }
30
15
  exports.transformFilter = transformFilter;
@@ -36,7 +21,7 @@ function transformFilterSync(predicate, opt = {}) {
36
21
  return new stream_1.Transform({
37
22
  objectMode: true,
38
23
  ...opt,
39
- async transform(chunk, _encoding, cb) {
24
+ transform(chunk, _, cb) {
40
25
  try {
41
26
  if (predicate(chunk, index++)) {
42
27
  cb(null, chunk); // pass through
@@ -84,6 +84,26 @@ export interface TransformLogProgressOptions<IN = any> extends TransformOptions
84
84
  * Defaults to 1.
85
85
  */
86
86
  batchSize?: number;
87
+ /**
88
+ * Experimental logging of item (shunk) sizes, when json-stringified.
89
+ *
90
+ * Defaults to false.
91
+ *
92
+ * @experimental
93
+ */
94
+ logSizes?: boolean;
95
+ /**
96
+ * How many last item sizes to keep in a buffer, to calculate stats (p50, p90, avg, etc).
97
+ * Defaults to 100_000.
98
+ * Cannot be Infinity.
99
+ */
100
+ logSizesBuffer?: number;
101
+ /**
102
+ * Works in addition to `logSizes`. Adds "zipped sizes".
103
+ *
104
+ * @experimental
105
+ */
106
+ logZippedSizes?: boolean;
87
107
  }
88
108
  /**
89
109
  * Pass-through transform that optionally logs progress.
@@ -7,6 +7,7 @@ const js_lib_1 = require("@naturalcycles/js-lib");
7
7
  const time_lib_1 = require("@naturalcycles/time-lib");
8
8
  const colors_1 = require("../../colors");
9
9
  const colors_2 = require("../../colors/colors");
10
+ const sizeStack_1 = require("../sizeStack");
10
11
  const inspectOpt = {
11
12
  colors: colors_2.hasColors,
12
13
  breakLength: 300,
@@ -15,7 +16,7 @@ const inspectOpt = {
15
16
  * Pass-through transform that optionally logs progress.
16
17
  */
17
18
  function transformLogProgress(opt = {}) {
18
- const { metric = 'progress', heapTotal: logHeapTotal = false, heapUsed: logHeapUsed = false, rss: logRss = true, peakRSS: logPeakRSS = true, logRPS = true, logEvery = 1000, batchSize = 1, extra, logger = console, } = opt;
19
+ const { metric = 'progress', heapTotal: logHeapTotal = false, heapUsed: logHeapUsed = false, rss: logRss = true, peakRSS: logPeakRSS = true, logRPS = true, logEvery = 1000, logSizes = false, logSizesBuffer = 100000, logZippedSizes = false, batchSize = 1, extra, logger = console, } = opt;
19
20
  const logProgress = opt.logProgress !== false && logEvery !== 0; // true by default
20
21
  const logEvery10 = logEvery * 10;
21
22
  const started = Date.now();
@@ -24,13 +25,19 @@ function transformLogProgress(opt = {}) {
24
25
  let processedLastSecond = 0;
25
26
  let progress = 0;
26
27
  let peakRSS = 0;
28
+ const sizes = logSizes ? new sizeStack_1.SizeStack('json', logSizesBuffer) : undefined;
29
+ const sizesZipped = logZippedSizes ? new sizeStack_1.SizeStack('json.gz', logSizesBuffer) : undefined;
27
30
  logStats(); // initial
28
31
  return new stream_1.Transform({
29
32
  objectMode: true,
30
33
  ...opt,
31
- transform(chunk, _encoding, cb) {
34
+ transform(chunk, _, cb) {
32
35
  progress++;
33
36
  processedLastSecond++;
37
+ if (sizes) {
38
+ // Check it, cause gzipping might be delayed here..
39
+ void sizeStack_1.SizeStack.countItem(chunk, logger, sizes, sizesZipped);
40
+ }
34
41
  if (logProgress && progress % logEvery === 0) {
35
42
  logStats(chunk, false, progress % logEvery10 === 0);
36
43
  }
@@ -54,23 +61,34 @@ function transformLogProgress(opt = {}) {
54
61
  const rps10 = Math.round(sma.push(lastRPS));
55
62
  if (mem.rss > peakRSS)
56
63
  peakRSS = mem.rss;
57
- logger.log((0, util_1.inspect)({
64
+ const o = {
58
65
  [final ? `${metric}_final` : metric]: batchedProgress,
59
- ...(extra ? extra(chunk, progress) : {}),
60
- ...(logHeapUsed ? { heapUsed: (0, js_lib_1._mb)(mem.heapUsed) } : {}),
61
- ...(logHeapTotal ? { heapTotal: (0, js_lib_1._mb)(mem.heapTotal) } : {}),
62
- ...(logRss ? { rss: (0, js_lib_1._mb)(mem.rss) } : {}),
63
- ...(logPeakRSS ? { peakRSS: (0, js_lib_1._mb)(peakRSS) } : {}),
64
- ...(opt.rssMinusHeap ? { rssMinusHeap: (0, js_lib_1._mb)(mem.rss - mem.heapTotal) } : {}),
65
- ...(opt.external ? { external: (0, js_lib_1._mb)(mem.external) } : {}),
66
- ...(opt.arrayBuffers ? { arrayBuffers: (0, js_lib_1._mb)(mem.arrayBuffers || 0) } : {}),
67
- ...(logRPS
68
- ? {
69
- rps10,
70
- rpsTotal,
71
- }
72
- : {}),
73
- }, inspectOpt));
66
+ };
67
+ if (extra)
68
+ Object.assign(o, extra(chunk, progress));
69
+ if (logHeapUsed)
70
+ o.heapUsed = (0, js_lib_1._mb)(mem.heapUsed);
71
+ if (logHeapTotal)
72
+ o.heapTotal = (0, js_lib_1._mb)(mem.heapTotal);
73
+ if (logRss)
74
+ o.rss = (0, js_lib_1._mb)(mem.rss);
75
+ if (logPeakRSS)
76
+ o.peakRSS = (0, js_lib_1._mb)(peakRSS);
77
+ if (opt.rssMinusHeap)
78
+ o.rssMinusHeap = (0, js_lib_1._mb)(mem.rss - mem.heapTotal);
79
+ if (opt.external)
80
+ o.external = (0, js_lib_1._mb)(mem.external);
81
+ if (opt.arrayBuffers)
82
+ o.arrayBuffers = (0, js_lib_1._mb)(mem.arrayBuffers || 0);
83
+ if (logRPS)
84
+ Object.assign(o, { rps10, rpsTotal });
85
+ logger.log((0, util_1.inspect)(o, inspectOpt));
86
+ if (sizes?.items.length) {
87
+ logger.log(sizes.getStats());
88
+ if (sizesZipped?.items.length) {
89
+ logger.log(sizesZipped.getStats());
90
+ }
91
+ }
74
92
  if (tenx) {
75
93
  let perHour = Math.round((batchedProgress * 1000 * 60 * 60) / (now - started)) || 0;
76
94
  if (perHour > 900) {
@@ -11,9 +11,8 @@ export interface TransformMapOptions<IN = any, OUT = IN> {
11
11
  * Predicate to filter outgoing results (after mapper).
12
12
  * Allows to not emit all results.
13
13
  *
14
- * Set to `r => r` (passthrough predicate) to pass ANY value (including undefined/null)
15
- *
16
- * @default to filter out undefined/null values, but pass anything else
14
+ * Defaults to "pass everything" (including null, undefined, etc).
15
+ * Simpler way to exclude certain cases is to return SKIP symbol from the mapper.
17
16
  */
18
17
  predicate?: AsyncPredicate<OUT>;
19
18
  /**
@@ -39,7 +38,6 @@ export interface TransformMapOptions<IN = any, OUT = IN> {
39
38
  metric?: string;
40
39
  logger?: CommonLogger;
41
40
  }
42
- export declare function notNullishPredicate(item: any): boolean;
43
41
  /**
44
42
  * Like pMap, but for streams.
45
43
  * Inspired by `through2`.
@@ -1,14 +1,10 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.transformMap = exports.notNullishPredicate = void 0;
3
+ exports.transformMap = void 0;
4
4
  const js_lib_1 = require("@naturalcycles/js-lib");
5
5
  const through2Concurrent = require("through2-concurrent");
6
6
  const colors_1 = require("../../colors");
7
7
  const stream_util_1 = require("../stream.util");
8
- function notNullishPredicate(item) {
9
- return item !== undefined && item !== null;
10
- }
11
- exports.notNullishPredicate = notNullishPredicate;
12
8
  // doesn't work, cause here we don't construct our Transform instance ourselves
13
9
  // export class TransformMap extends AbortableTransform {}
14
10
  /**
@@ -24,7 +20,8 @@ exports.notNullishPredicate = notNullishPredicate;
24
20
  * If an Array is returned by `mapper` - it will be flattened and multiple results will be emitted from it. Tested by Array.isArray().
25
21
  */
26
22
  function transformMap(mapper, opt = {}) {
27
- const { concurrency = 16, predicate = notNullishPredicate, errorMode = js_lib_1.ErrorMode.THROW_IMMEDIATELY, flattenArrayOutput, onError, metric = 'stream', logger = console, } = opt;
23
+ const { concurrency = 16, predicate, // we now default to "no predicate" (meaning pass-everything)
24
+ errorMode = js_lib_1.ErrorMode.THROW_IMMEDIATELY, flattenArrayOutput, onError, metric = 'stream', logger = console, } = opt;
28
25
  let index = -1;
29
26
  let isSettled = false;
30
27
  let errors = 0;
@@ -44,24 +41,22 @@ function transformMap(mapper, opt = {}) {
44
41
  }
45
42
  },
46
43
  }, async function transformMapFn(chunk, _, cb) {
47
- index++;
48
- // console.log({chunk, _encoding})
49
44
  // Stop processing if isSettled (either THROW_IMMEDIATELY was fired or END received)
50
45
  if (isSettled)
51
46
  return cb();
47
+ const currentIndex = ++index;
52
48
  try {
53
- const currentIndex = index; // because we need to pass it to 2 functions - mapper and predicate. Refers to INPUT index (since it may return multiple outputs)
54
49
  const res = await mapper(chunk, currentIndex);
55
50
  const passedResults = await (0, js_lib_1.pFilter)(flattenArrayOutput && Array.isArray(res) ? res : [res], async (r) => {
56
51
  if (r === js_lib_1.END) {
57
52
  isSettled = true; // will be checked later
58
53
  return false;
59
54
  }
60
- return r !== js_lib_1.SKIP && (await predicate(r, currentIndex));
55
+ return r !== js_lib_1.SKIP && (!predicate || (await predicate(r, currentIndex)));
61
56
  });
62
57
  passedResults.forEach(r => this.push(r));
63
58
  if (isSettled) {
64
- logger.log(`transformMap END received at index ${index}`);
59
+ logger.log(`transformMap END received at index ${currentIndex}`);
65
60
  (0, stream_util_1.pipelineClose)('transformMap', this, this.sourceReadable, this.streamDone, logger);
66
61
  }
67
62
  cb(); // done processing
@@ -17,7 +17,7 @@ function transformMapSimple(mapper, opt = {}) {
17
17
  const { errorMode = js_lib_1.ErrorMode.THROW_IMMEDIATELY, logger = console } = opt;
18
18
  return new stream_1.Transform({
19
19
  objectMode: true,
20
- transform(chunk, _encoding, cb) {
20
+ transform(chunk, _, cb) {
21
21
  try {
22
22
  cb(null, mapper(chunk, ++index));
23
23
  }
@@ -1,4 +1,5 @@
1
1
  import { CommonLogger, ErrorMode, Mapper, Predicate } from '@naturalcycles/js-lib';
2
+ import { AbortableTransform } from '../pipeline/pipeline';
2
3
  import { TransformTyped } from '../stream.model';
3
4
  export interface TransformMapSyncOptions<IN = any, OUT = IN> {
4
5
  /**
@@ -14,9 +15,8 @@ export interface TransformMapSyncOptions<IN = any, OUT = IN> {
14
15
  * Predicate to filter outgoing results (after mapper).
15
16
  * Allows to not emit all results.
16
17
  *
17
- * @default to filter out undefined/null values, but pass anything else
18
- *
19
- * Set to `r => r` (passthrough predicate) to pass ANY value (including undefined/null)
18
+ * Defaults to "pass everything".
19
+ * Simpler way to skip individual entries is to return SKIP symbol.
20
20
  */
21
21
  predicate?: Predicate<OUT>;
22
22
  /**
@@ -36,6 +36,8 @@ export interface TransformMapSyncOptions<IN = any, OUT = IN> {
36
36
  metric?: string;
37
37
  logger?: CommonLogger;
38
38
  }
39
+ export declare class TransformMapSync extends AbortableTransform {
40
+ }
39
41
  /**
40
42
  * Sync (not async) version of transformMap.
41
43
  * Supposedly faster, for cases when async is not needed.
@@ -1,42 +1,48 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.transformMapSync = void 0;
4
- const stream_1 = require("stream");
3
+ exports.transformMapSync = exports.TransformMapSync = void 0;
5
4
  const js_lib_1 = require("@naturalcycles/js-lib");
6
5
  const colors_1 = require("../../colors");
7
- const transformMap_1 = require("./transformMap");
6
+ const pipeline_1 = require("../pipeline/pipeline");
7
+ const stream_util_1 = require("../stream.util");
8
+ class TransformMapSync extends pipeline_1.AbortableTransform {
9
+ }
10
+ exports.TransformMapSync = TransformMapSync;
8
11
  /**
9
12
  * Sync (not async) version of transformMap.
10
13
  * Supposedly faster, for cases when async is not needed.
11
14
  */
12
15
  function transformMapSync(mapper, opt = {}) {
13
16
  let index = -1;
14
- const { predicate = transformMap_1.notNullishPredicate, errorMode = js_lib_1.ErrorMode.THROW_IMMEDIATELY, flattenArrayOutput = false, onError, metric = 'stream', objectMode = true, logger = console, } = opt;
15
- let isRejected = false;
17
+ const { predicate, // defaults to "no predicate" (pass everything)
18
+ errorMode = js_lib_1.ErrorMode.THROW_IMMEDIATELY, flattenArrayOutput = false, onError, metric = 'stream', objectMode = true, logger = console, } = opt;
19
+ let isSettled = false;
16
20
  let errors = 0;
17
21
  const collectedErrors = []; // only used if errorMode == THROW_AGGREGATED
18
- return new stream_1.Transform({
22
+ return new TransformMapSync({
19
23
  objectMode,
20
24
  ...opt,
21
- transform(chunk, _encoding, cb) {
22
- // Stop processing if THROW_IMMEDIATELY mode is used
23
- if (isRejected && errorMode === js_lib_1.ErrorMode.THROW_IMMEDIATELY) {
25
+ transform(chunk, _, cb) {
26
+ // Stop processing if isSettled
27
+ if (isSettled)
24
28
  return cb();
25
- }
29
+ const currentIndex = ++index;
26
30
  try {
27
- if (!predicate(chunk, ++index)) {
28
- cb(); // signal that we've finished processing, but emit no output here
29
- return;
30
- }
31
31
  // map and pass through
32
- const v = mapper(chunk, index);
33
- if (flattenArrayOutput && Array.isArray(v)) {
34
- // Pass each item individually
35
- v.forEach(item => this.push(item));
36
- }
37
- else {
38
- cb(null, v);
32
+ const v = mapper(chunk, currentIndex);
33
+ const passedResults = (flattenArrayOutput && Array.isArray(v) ? v : [v]).filter(r => {
34
+ if (r === js_lib_1.END) {
35
+ isSettled = true; // will be checked later
36
+ return false;
37
+ }
38
+ return r !== js_lib_1.SKIP && (!predicate || predicate(r, currentIndex));
39
+ });
40
+ passedResults.forEach(r => this.push(r));
41
+ if (isSettled) {
42
+ logger.log(`transformMapSync END received at index ${currentIndex}`);
43
+ (0, stream_util_1.pipelineClose)('transformMapSync', this, this.sourceReadable, this.streamDone, logger);
39
44
  }
45
+ cb(); // done processing
40
46
  }
41
47
  catch (err) {
42
48
  logger.error(err);
@@ -49,7 +55,7 @@ function transformMapSync(mapper, opt = {}) {
49
55
  catch { }
50
56
  }
51
57
  if (errorMode === js_lib_1.ErrorMode.THROW_IMMEDIATELY) {
52
- isRejected = true;
58
+ isSettled = true;
53
59
  // Emit error immediately
54
60
  return cb(err);
55
61
  }
@@ -10,7 +10,7 @@ const stream_1 = require("stream");
10
10
  function transformNoOp() {
11
11
  return new stream_1.Transform({
12
12
  objectMode: true,
13
- transform(chunk, _encoding, cb) {
13
+ transform(chunk, _, cb) {
14
14
  cb(null, chunk);
15
15
  },
16
16
  });
@@ -10,14 +10,14 @@ const stream_1 = require("stream");
10
10
  */
11
11
  function transformTap(fn, opt = {}) {
12
12
  const { logger = console } = opt;
13
- let index = 0;
13
+ let index = -1;
14
14
  return new stream_1.Transform({
15
15
  objectMode: true,
16
16
  ...opt,
17
- async transform(chunk, _encoding, cb) {
17
+ async transform(chunk, _, cb) {
18
18
  // console.log('tap', chunk)
19
19
  try {
20
- await fn(chunk, index++);
20
+ await fn(chunk, ++index);
21
21
  }
22
22
  catch (err) {
23
23
  logger.error(err);
@@ -10,7 +10,7 @@ function transformToArray(opt = {}) {
10
10
  return new stream_1.Transform({
11
11
  objectMode: true,
12
12
  ...opt,
13
- transform(chunk, _encoding, cb) {
13
+ transform(chunk, _, cb) {
14
14
  res.push(chunk);
15
15
  // callback to signal that we processed input, but not emitting any output
16
16
  cb();
@@ -15,8 +15,8 @@ function transformToString() {
15
15
  return new stream_1.Transform({
16
16
  objectMode: false,
17
17
  readableObjectMode: true,
18
- transform(chunk, _encoding, cb) {
19
- // console.log(`enc: ${_encoding}`, chunk.toString())
18
+ transform(chunk, _, cb) {
19
+ // console.log(`enc: ${_}`, chunk.toString())
20
20
  cb(null, chunk.toString());
21
21
  },
22
22
  });
@@ -71,7 +71,7 @@ function transformMultiThreaded(opt) {
71
71
  cb(err);
72
72
  }
73
73
  },
74
- }, async function transformMapFn(chunk, _encoding, cb) {
74
+ }, async function transformMapFn(chunk, _, cb) {
75
75
  // Freezing the index, because it may change due to concurrency
76
76
  const currentIndex = ++index;
77
77
  // Create the unresolved promise (to avait)
@@ -24,7 +24,7 @@ function writableFork(chains, opt) {
24
24
  return new stream_1.Writable({
25
25
  objectMode: true,
26
26
  ...opt,
27
- write(chunk, _encoding, cb) {
27
+ write(chunk, _, cb) {
28
28
  // Push/fork to all sub-streams
29
29
  // No backpressure is ensured here, it'll push regardless of the
30
30
  readables.forEach(readable => readable.push(chunk));
@@ -9,7 +9,7 @@ function writablePushToArray(arr, opt = {}) {
9
9
  return new stream_1.Writable({
10
10
  objectMode: true,
11
11
  ...opt,
12
- write(chunk, _encoding, cb) {
12
+ write(chunk, _, cb) {
13
13
  arr.push(chunk);
14
14
  // callback to signal that we processed input, but not emitting any output
15
15
  cb();
@@ -11,7 +11,7 @@ function writableVoid(opt = {}) {
11
11
  return new stream_1.Writable({
12
12
  objectMode: true,
13
13
  ...opt,
14
- write(chunk, _encoding, cb) {
14
+ write(chunk, _, cb) {
15
15
  cb();
16
16
  },
17
17
  final(cb) {
@@ -2,17 +2,25 @@
2
2
  import { ZlibOptions } from 'zlib';
3
3
  /**
4
4
  * zipBuffer uses `deflate`.
5
- * It's 9 bytes shorter than gzip.
5
+ * It's 9 bytes shorter than `gzip`.
6
6
  */
7
7
  export declare function zipBuffer(buf: Buffer, options?: ZlibOptions): Promise<Buffer>;
8
8
  /**
9
9
  * gzipBuffer uses `gzip`
10
- * It's 9 bytes longer than deflate.
10
+ * It's 9 bytes longer than `deflate`.
11
11
  */
12
12
  export declare function gzipBuffer(buf: Buffer, options?: ZlibOptions): Promise<Buffer>;
13
13
  export declare function unzipBuffer(buf: Buffer, options?: ZlibOptions): Promise<Buffer>;
14
14
  export declare function gunzipBuffer(buf: Buffer, options?: ZlibOptions): Promise<Buffer>;
15
+ /**
16
+ * zipString uses `deflate`.
17
+ * It's 9 bytes shorter than `gzip`.
18
+ */
15
19
  export declare function zipString(s: string, options?: ZlibOptions): Promise<Buffer>;
20
+ /**
21
+ * gzipString uses `gzip`.
22
+ * It's 9 bytes longer than `deflate`.
23
+ */
16
24
  export declare function gzipString(s: string, options?: ZlibOptions): Promise<Buffer>;
17
25
  export declare function unzipToString(buf: Buffer, options?: ZlibOptions): Promise<string>;
18
26
  export declare function gunzipToString(buf: Buffer, options?: ZlibOptions): Promise<string>;
@@ -10,7 +10,7 @@ const gunzip = (0, util_1.promisify)(zlib.gunzip.bind(zlib));
10
10
  // string > zip
11
11
  /**
12
12
  * zipBuffer uses `deflate`.
13
- * It's 9 bytes shorter than gzip.
13
+ * It's 9 bytes shorter than `gzip`.
14
14
  */
15
15
  async function zipBuffer(buf, options = {}) {
16
16
  return await deflate(buf, options);
@@ -18,7 +18,7 @@ async function zipBuffer(buf, options = {}) {
18
18
  exports.zipBuffer = zipBuffer;
19
19
  /**
20
20
  * gzipBuffer uses `gzip`
21
- * It's 9 bytes longer than deflate.
21
+ * It's 9 bytes longer than `deflate`.
22
22
  */
23
23
  async function gzipBuffer(buf, options = {}) {
24
24
  return await gzip(buf, options);
@@ -33,11 +33,18 @@ async function gunzipBuffer(buf, options = {}) {
33
33
  return await gunzip(buf, options);
34
34
  }
35
35
  exports.gunzipBuffer = gunzipBuffer;
36
- // convenience
36
+ /**
37
+ * zipString uses `deflate`.
38
+ * It's 9 bytes shorter than `gzip`.
39
+ */
37
40
  async function zipString(s, options) {
38
41
  return await zipBuffer(Buffer.from(s), options);
39
42
  }
40
43
  exports.zipString = zipString;
44
+ /**
45
+ * gzipString uses `gzip`.
46
+ * It's 9 bytes longer than `deflate`.
47
+ */
41
48
  async function gzipString(s, options) {
42
49
  return await gzipBuffer(Buffer.from(s), options);
43
50
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@naturalcycles/nodejs-lib",
3
- "version": "12.59.0",
3
+ "version": "12.60.0",
4
4
  "scripts": {
5
5
  "prepare": "husky install",
6
6
  "docs-serve": "vuepress dev docs",