@naturalcycles/nodejs-lib 12.56.1 → 12.60.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (86) hide show
  1. package/dist/index.d.ts +19 -18
  2. package/dist/index.js +19 -92
  3. package/dist/log/log.util.d.ts +4 -0
  4. package/dist/log/log.util.js +11 -0
  5. package/dist/stream/ndjson/ndjsonMap.d.ts +2 -2
  6. package/dist/stream/ndjson/ndjsonMap.js +4 -3
  7. package/dist/stream/ndjson/ndjsonStreamForEach.d.ts +2 -2
  8. package/dist/stream/ndjson/transformJsonParse.js +3 -3
  9. package/dist/stream/ndjson/transformToNDJson.js +2 -2
  10. package/dist/stream/pipeline/pipeline.d.ts +25 -3
  11. package/dist/stream/pipeline/pipeline.js +76 -9
  12. package/dist/stream/readable/readableCreate.d.ts +8 -0
  13. package/dist/stream/readable/readableCreate.js +9 -1
  14. package/dist/stream/readable/readableForEach.d.ts +2 -2
  15. package/dist/stream/readable/readableFromArray.d.ts +2 -2
  16. package/dist/stream/readable/readableFromArray.js +17 -13
  17. package/dist/stream/readable/readableMap.d.ts +2 -2
  18. package/dist/stream/readable/readableMap.js +22 -17
  19. package/dist/stream/sizeStack.d.ts +9 -0
  20. package/dist/stream/sizeStack.js +48 -0
  21. package/dist/stream/stream.util.d.ts +4 -0
  22. package/dist/stream/stream.util.js +24 -0
  23. package/dist/stream/transform/transformBuffer.js +1 -1
  24. package/dist/stream/transform/transformFilter.d.ts +3 -4
  25. package/dist/stream/transform/transformFilter.js +5 -20
  26. package/dist/stream/transform/transformLimit.d.ts +36 -1
  27. package/dist/stream/transform/transformLimit.js +33 -15
  28. package/dist/stream/transform/transformLogProgress.d.ts +22 -1
  29. package/dist/stream/transform/transformLogProgress.js +38 -20
  30. package/dist/stream/transform/transformMap.d.ts +4 -10
  31. package/dist/stream/transform/transformMap.js +52 -64
  32. package/dist/stream/transform/transformMapSimple.d.ts +2 -1
  33. package/dist/stream/transform/transformMapSimple.js +3 -3
  34. package/dist/stream/transform/transformMapSync.d.ts +7 -4
  35. package/dist/stream/transform/transformMapSync.js +30 -24
  36. package/dist/stream/transform/transformNoOp.js +1 -1
  37. package/dist/stream/transform/transformTap.d.ts +5 -2
  38. package/dist/stream/transform/transformTap.js +5 -4
  39. package/dist/stream/transform/transformToArray.js +1 -1
  40. package/dist/stream/transform/transformToString.js +2 -2
  41. package/dist/stream/transform/worker/transformMultiThreaded.js +1 -1
  42. package/dist/stream/transform/worker/workerClassProxy.js +1 -0
  43. package/dist/stream/writable/writableFork.d.ts +2 -0
  44. package/dist/stream/writable/writableFork.js +3 -1
  45. package/dist/stream/writable/writableLimit.d.ts +9 -0
  46. package/dist/stream/writable/writableLimit.js +29 -0
  47. package/dist/stream/writable/writablePushToArray.js +1 -1
  48. package/dist/stream/writable/writableVoid.d.ts +8 -1
  49. package/dist/stream/writable/writableVoid.js +6 -2
  50. package/dist/util/zip.util.d.ts +10 -2
  51. package/dist/util/zip.util.js +10 -3
  52. package/package.json +1 -1
  53. package/src/index.ts +17 -156
  54. package/src/log/log.util.ts +9 -0
  55. package/src/stream/ndjson/ndjsonMap.ts +7 -5
  56. package/src/stream/ndjson/ndjsonStreamForEach.ts +2 -2
  57. package/src/stream/ndjson/transformJsonParse.ts +3 -3
  58. package/src/stream/ndjson/transformToNDJson.ts +2 -2
  59. package/src/stream/pipeline/pipeline.ts +102 -9
  60. package/src/stream/readable/readableCreate.ts +9 -1
  61. package/src/stream/readable/readableForEach.ts +2 -2
  62. package/src/stream/readable/readableFromArray.ts +18 -21
  63. package/src/stream/readable/readableMap.ts +24 -21
  64. package/src/stream/sizeStack.ts +56 -0
  65. package/src/stream/stream.util.ts +29 -0
  66. package/src/stream/transform/transformBuffer.ts +1 -1
  67. package/src/stream/transform/transformFilter.ts +6 -20
  68. package/src/stream/transform/transformLimit.ts +71 -19
  69. package/src/stream/transform/transformLogProgress.ts +78 -26
  70. package/src/stream/transform/transformMap.ts +74 -94
  71. package/src/stream/transform/transformMapSimple.ts +6 -4
  72. package/src/stream/transform/transformMapSync.ts +45 -28
  73. package/src/stream/transform/transformNoOp.ts +1 -1
  74. package/src/stream/transform/transformTap.ts +11 -6
  75. package/src/stream/transform/transformToArray.ts +1 -1
  76. package/src/stream/transform/transformToString.ts +2 -2
  77. package/src/stream/transform/worker/transformMultiThreaded.ts +1 -1
  78. package/src/stream/transform/worker/workerClassProxy.js +1 -0
  79. package/src/stream/writable/writableFork.ts +3 -1
  80. package/src/stream/writable/writableLimit.ts +28 -0
  81. package/src/stream/writable/writablePushToArray.ts +1 -1
  82. package/src/stream/writable/writableVoid.ts +14 -2
  83. package/src/util/zip.util.ts +11 -3
  84. package/dist/stream/transform/legacy/transformMap.d.ts +0 -17
  85. package/dist/stream/transform/legacy/transformMap.js +0 -94
  86. package/src/stream/transform/legacy/transformMap.ts +0 -133
@@ -14,15 +14,15 @@ const js_lib_1 = require("@naturalcycles/js-lib");
14
14
  */
15
15
  function transformMapSimple(mapper, opt = {}) {
16
16
  let index = -1;
17
- const { errorMode = js_lib_1.ErrorMode.THROW_IMMEDIATELY } = opt;
17
+ const { errorMode = js_lib_1.ErrorMode.THROW_IMMEDIATELY, logger = console } = opt;
18
18
  return new stream_1.Transform({
19
19
  objectMode: true,
20
- transform(chunk, _encoding, cb) {
20
+ transform(chunk, _, cb) {
21
21
  try {
22
22
  cb(null, mapper(chunk, ++index));
23
23
  }
24
24
  catch (err) {
25
- console.error(err);
25
+ logger.error(err);
26
26
  if (errorMode === js_lib_1.ErrorMode.SUPPRESS) {
27
27
  cb(); // suppress the error
28
28
  }
@@ -1,4 +1,5 @@
1
- import { ErrorMode, Mapper, Predicate } from '@naturalcycles/js-lib';
1
+ import { CommonLogger, ErrorMode, Mapper, Predicate } from '@naturalcycles/js-lib';
2
+ import { AbortableTransform } from '../pipeline/pipeline';
2
3
  import { TransformTyped } from '../stream.model';
3
4
  export interface TransformMapSyncOptions<IN = any, OUT = IN> {
4
5
  /**
@@ -14,9 +15,8 @@ export interface TransformMapSyncOptions<IN = any, OUT = IN> {
14
15
  * Predicate to filter outgoing results (after mapper).
15
16
  * Allows to not emit all results.
16
17
  *
17
- * @default to filter out undefined/null values, but pass anything else
18
- *
19
- * Set to `r => r` (passthrough predicate) to pass ANY value (including undefined/null)
18
+ * Defaults to "pass everything".
19
+ * Simpler way to skip individual entries is to return SKIP symbol.
20
20
  */
21
21
  predicate?: Predicate<OUT>;
22
22
  /**
@@ -34,6 +34,9 @@ export interface TransformMapSyncOptions<IN = any, OUT = IN> {
34
34
  * @default `stream`
35
35
  */
36
36
  metric?: string;
37
+ logger?: CommonLogger;
38
+ }
39
+ export declare class TransformMapSync extends AbortableTransform {
37
40
  }
38
41
  /**
39
42
  * Sync (not async) version of transformMap.
@@ -1,45 +1,51 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.transformMapSync = void 0;
4
- const stream_1 = require("stream");
3
+ exports.transformMapSync = exports.TransformMapSync = void 0;
5
4
  const js_lib_1 = require("@naturalcycles/js-lib");
6
5
  const colors_1 = require("../../colors");
7
- const transformMap_1 = require("./transformMap");
6
+ const pipeline_1 = require("../pipeline/pipeline");
7
+ const stream_util_1 = require("../stream.util");
8
+ class TransformMapSync extends pipeline_1.AbortableTransform {
9
+ }
10
+ exports.TransformMapSync = TransformMapSync;
8
11
  /**
9
12
  * Sync (not async) version of transformMap.
10
13
  * Supposedly faster, for cases when async is not needed.
11
14
  */
12
15
  function transformMapSync(mapper, opt = {}) {
13
16
  let index = -1;
14
- const { predicate = transformMap_1.notNullishPredicate, errorMode = js_lib_1.ErrorMode.THROW_IMMEDIATELY, flattenArrayOutput = false, onError, metric = 'stream', objectMode = true, } = opt;
15
- let isRejected = false;
17
+ const { predicate, // defaults to "no predicate" (pass everything)
18
+ errorMode = js_lib_1.ErrorMode.THROW_IMMEDIATELY, flattenArrayOutput = false, onError, metric = 'stream', objectMode = true, logger = console, } = opt;
19
+ let isSettled = false;
16
20
  let errors = 0;
17
21
  const collectedErrors = []; // only used if errorMode == THROW_AGGREGATED
18
- return new stream_1.Transform({
22
+ return new TransformMapSync({
19
23
  objectMode,
20
24
  ...opt,
21
- transform(chunk, _encoding, cb) {
22
- // Stop processing if THROW_IMMEDIATELY mode is used
23
- if (isRejected && errorMode === js_lib_1.ErrorMode.THROW_IMMEDIATELY) {
25
+ transform(chunk, _, cb) {
26
+ // Stop processing if isSettled
27
+ if (isSettled)
24
28
  return cb();
25
- }
29
+ const currentIndex = ++index;
26
30
  try {
27
- if (!predicate(chunk, ++index)) {
28
- cb(); // signal that we've finished processing, but emit no output here
29
- return;
30
- }
31
31
  // map and pass through
32
- const v = mapper(chunk, index);
33
- if (flattenArrayOutput && Array.isArray(v)) {
34
- // Pass each item individually
35
- v.forEach(item => this.push(item));
36
- }
37
- else {
38
- cb(null, v);
32
+ const v = mapper(chunk, currentIndex);
33
+ const passedResults = (flattenArrayOutput && Array.isArray(v) ? v : [v]).filter(r => {
34
+ if (r === js_lib_1.END) {
35
+ isSettled = true; // will be checked later
36
+ return false;
37
+ }
38
+ return r !== js_lib_1.SKIP && (!predicate || predicate(r, currentIndex));
39
+ });
40
+ passedResults.forEach(r => this.push(r));
41
+ if (isSettled) {
42
+ logger.log(`transformMapSync END received at index ${currentIndex}`);
43
+ (0, stream_util_1.pipelineClose)('transformMapSync', this, this.sourceReadable, this.streamDone, logger);
39
44
  }
45
+ cb(); // done processing
40
46
  }
41
47
  catch (err) {
42
- console.error(err);
48
+ logger.error(err);
43
49
  errors++;
44
50
  logErrorStats();
45
51
  if (onError) {
@@ -49,7 +55,7 @@ function transformMapSync(mapper, opt = {}) {
49
55
  catch { }
50
56
  }
51
57
  if (errorMode === js_lib_1.ErrorMode.THROW_IMMEDIATELY) {
52
- isRejected = true;
58
+ isSettled = true;
53
59
  // Emit error immediately
54
60
  return cb(err);
55
61
  }
@@ -75,7 +81,7 @@ function transformMapSync(mapper, opt = {}) {
75
81
  function logErrorStats(final = false) {
76
82
  if (!errors)
77
83
  return;
78
- console.log(`${metric} ${final ? 'final ' : ''}errors: ${(0, colors_1.yellow)(errors)}`);
84
+ logger.log(`${metric} ${final ? 'final ' : ''}errors: ${(0, colors_1.yellow)(errors)}`);
79
85
  }
80
86
  }
81
87
  exports.transformMapSync = transformMapSync;
@@ -10,7 +10,7 @@ const stream_1 = require("stream");
10
10
  function transformNoOp() {
11
11
  return new stream_1.Transform({
12
12
  objectMode: true,
13
- transform(chunk, _encoding, cb) {
13
+ transform(chunk, _, cb) {
14
14
  cb(null, chunk);
15
15
  },
16
16
  });
@@ -1,9 +1,12 @@
1
- import { AsyncMapper } from '@naturalcycles/js-lib';
1
+ import { AsyncMapper, CommonLogger } from '@naturalcycles/js-lib';
2
2
  import { TransformOptions, TransformTyped } from '../stream.model';
3
+ export interface TransformTapOptions extends TransformOptions {
4
+ logger?: CommonLogger;
5
+ }
3
6
  /**
4
7
  * Similar to RxJS `tap` - allows to run a function for each stream item, without affecting the result.
5
8
  * Item is passed through to the output.
6
9
  *
7
10
  * Can also act as a counter, since `index` is passed to `fn`
8
11
  */
9
- export declare function transformTap<IN>(fn: AsyncMapper<IN, any>, opt?: TransformOptions): TransformTyped<IN, IN>;
12
+ export declare function transformTap<IN>(fn: AsyncMapper<IN, any>, opt?: TransformTapOptions): TransformTyped<IN, IN>;
@@ -9,17 +9,18 @@ const stream_1 = require("stream");
9
9
  * Can also act as a counter, since `index` is passed to `fn`
10
10
  */
11
11
  function transformTap(fn, opt = {}) {
12
- let index = 0;
12
+ const { logger = console } = opt;
13
+ let index = -1;
13
14
  return new stream_1.Transform({
14
15
  objectMode: true,
15
16
  ...opt,
16
- async transform(chunk, _encoding, cb) {
17
+ async transform(chunk, _, cb) {
17
18
  // console.log('tap', chunk)
18
19
  try {
19
- await fn(chunk, index++);
20
+ await fn(chunk, ++index);
20
21
  }
21
22
  catch (err) {
22
- console.error(err);
23
+ logger.error(err);
23
24
  // suppressed error
24
25
  }
25
26
  cb(null, chunk); // pass through the item
@@ -10,7 +10,7 @@ function transformToArray(opt = {}) {
10
10
  return new stream_1.Transform({
11
11
  objectMode: true,
12
12
  ...opt,
13
- transform(chunk, _encoding, cb) {
13
+ transform(chunk, _, cb) {
14
14
  res.push(chunk);
15
15
  // callback to signal that we processed input, but not emitting any output
16
16
  cb();
@@ -15,8 +15,8 @@ function transformToString() {
15
15
  return new stream_1.Transform({
16
16
  objectMode: false,
17
17
  readableObjectMode: true,
18
- transform(chunk, _encoding, cb) {
19
- // console.log(`enc: ${_encoding}`, chunk.toString())
18
+ transform(chunk, _, cb) {
19
+ // console.log(`enc: ${_}`, chunk.toString())
20
20
  cb(null, chunk.toString());
21
21
  },
22
22
  });
@@ -71,7 +71,7 @@ function transformMultiThreaded(opt) {
71
71
  cb(err);
72
72
  }
73
73
  },
74
- }, async function transformMapFn(chunk, _encoding, cb) {
74
+ }, async function transformMapFn(chunk, _, cb) {
75
75
  // Freezing the index, because it may change due to concurrency
76
76
  const currentIndex = ++index;
77
77
  // Create the unresolved promise (to avait)
@@ -10,6 +10,7 @@ if (!workerFile) {
10
10
  // console.log(`worker#${workerIndex} created`)
11
11
 
12
12
  try {
13
+ // require('esbuild-register') // alternative
13
14
  require('ts-node/register/transpile-only')
14
15
  require('tsconfig-paths/register')
15
16
  } catch {} // require if exists
@@ -5,5 +5,7 @@ import { TransformOptions, WritableTyped } from '../stream.model';
5
5
  * Currently does NOT (!) maintain backpressure.
6
6
  * Error in the forked pipeline will propagate up to the main pipeline (and log error, to be sure).
7
7
  * Will wait until all forked pipelines are completed before completing the stream.
8
+ *
9
+ * @experimental
8
10
  */
9
11
  export declare function writableFork<T>(chains: NodeJS.WritableStream[][], opt?: TransformOptions): WritableTyped<T>;
@@ -8,6 +8,8 @@ const __1 = require("../..");
8
8
  * Currently does NOT (!) maintain backpressure.
9
9
  * Error in the forked pipeline will propagate up to the main pipeline (and log error, to be sure).
10
10
  * Will wait until all forked pipelines are completed before completing the stream.
11
+ *
12
+ * @experimental
11
13
  */
12
14
  function writableFork(chains, opt) {
13
15
  const readables = [];
@@ -22,7 +24,7 @@ function writableFork(chains, opt) {
22
24
  return new stream_1.Writable({
23
25
  objectMode: true,
24
26
  ...opt,
25
- write(chunk, _encoding, cb) {
27
+ write(chunk, _, cb) {
26
28
  // Push/fork to all sub-streams
27
29
  // No backpressure is ensured here, it'll push regardless of the
28
30
  readables.forEach(readable => readable.push(chunk));
@@ -0,0 +1,9 @@
1
+ /// <reference types="node" />
2
+ import { Readable } from 'stream';
3
+ import { WritableTyped } from '../stream.model';
4
+ /**
5
+ * Allows to stop the Readable stream after the pipeline has processed X number of rows.
6
+ * It counts OUTPUT rows (not input), because this Writable is always at the end of the Pipeline.
7
+ * It ensures that everything has been processed before issuing a STOP on the readable.
8
+ */
9
+ export declare function writableLimit<T>(readable: Readable, limit: number): WritableTyped<T>;
@@ -0,0 +1,29 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.writableLimit = void 0;
4
+ const stream_1 = require("stream");
5
+ /**
6
+ * Allows to stop the Readable stream after the pipeline has processed X number of rows.
7
+ * It counts OUTPUT rows (not input), because this Writable is always at the end of the Pipeline.
8
+ * It ensures that everything has been processed before issuing a STOP on the readable.
9
+ */
10
+ function writableLimit(readable, limit) {
11
+ let i = 0;
12
+ return new stream_1.Writable({
13
+ objectMode: true,
14
+ write(chunk, _, cb) {
15
+ if (limit === 0)
16
+ return cb(); // no limit, just passthrough
17
+ i++;
18
+ if (i === limit) {
19
+ console.log(`writableLimit of ${limit} reached`);
20
+ readable.destroy();
21
+ cb(); // do we need it?
22
+ }
23
+ else {
24
+ cb(); // passthrough
25
+ }
26
+ },
27
+ });
28
+ }
29
+ exports.writableLimit = writableLimit;
@@ -9,7 +9,7 @@ function writablePushToArray(arr, opt = {}) {
9
9
  return new stream_1.Writable({
10
10
  objectMode: true,
11
11
  ...opt,
12
- write(chunk, _encoding, cb) {
12
+ write(chunk, _, cb) {
13
13
  arr.push(chunk);
14
14
  // callback to signal that we processed input, but not emitting any output
15
15
  cb();
@@ -1,9 +1,16 @@
1
1
  /// <reference types="node" />
2
2
  import { Writable } from 'stream';
3
+ import { DeferredPromise } from '@naturalcycles/js-lib';
3
4
  import { TransformOptions } from '../stream.model';
5
+ export interface WritableVoidOptions extends TransformOptions {
6
+ /**
7
+ * If set - it will be Resolved when the Stream is done (after final.cb)
8
+ */
9
+ streamDone?: DeferredPromise;
10
+ }
4
11
  /**
5
12
  * Use as a "null-terminator" of stream.pipeline.
6
13
  * It consumes the stream as quickly as possible without doing anything.
7
14
  * Put it in the end of your pipeline in case it ends with Transform that needs a consumer.
8
15
  */
9
- export declare function writableVoid(opt?: TransformOptions): Writable;
16
+ export declare function writableVoid(opt?: WritableVoidOptions): Writable;
@@ -7,13 +7,17 @@ const stream_1 = require("stream");
7
7
  * It consumes the stream as quickly as possible without doing anything.
8
8
  * Put it in the end of your pipeline in case it ends with Transform that needs a consumer.
9
9
  */
10
- function writableVoid(opt) {
10
+ function writableVoid(opt = {}) {
11
11
  return new stream_1.Writable({
12
12
  objectMode: true,
13
13
  ...opt,
14
- write(chunk, _encoding, cb) {
14
+ write(chunk, _, cb) {
15
15
  cb();
16
16
  },
17
+ final(cb) {
18
+ cb();
19
+ opt.streamDone?.resolve();
20
+ },
17
21
  });
18
22
  }
19
23
  exports.writableVoid = writableVoid;
@@ -2,17 +2,25 @@
2
2
  import { ZlibOptions } from 'zlib';
3
3
  /**
4
4
  * zipBuffer uses `deflate`.
5
- * It's 9 bytes shorter than gzip.
5
+ * It's 9 bytes shorter than `gzip`.
6
6
  */
7
7
  export declare function zipBuffer(buf: Buffer, options?: ZlibOptions): Promise<Buffer>;
8
8
  /**
9
9
  * gzipBuffer uses `gzip`
10
- * It's 9 bytes longer than deflate.
10
+ * It's 9 bytes longer than `deflate`.
11
11
  */
12
12
  export declare function gzipBuffer(buf: Buffer, options?: ZlibOptions): Promise<Buffer>;
13
13
  export declare function unzipBuffer(buf: Buffer, options?: ZlibOptions): Promise<Buffer>;
14
14
  export declare function gunzipBuffer(buf: Buffer, options?: ZlibOptions): Promise<Buffer>;
15
+ /**
16
+ * zipString uses `deflate`.
17
+ * It's 9 bytes shorter than `gzip`.
18
+ */
15
19
  export declare function zipString(s: string, options?: ZlibOptions): Promise<Buffer>;
20
+ /**
21
+ * gzipString uses `gzip`.
22
+ * It's 9 bytes longer than `deflate`.
23
+ */
16
24
  export declare function gzipString(s: string, options?: ZlibOptions): Promise<Buffer>;
17
25
  export declare function unzipToString(buf: Buffer, options?: ZlibOptions): Promise<string>;
18
26
  export declare function gunzipToString(buf: Buffer, options?: ZlibOptions): Promise<string>;
@@ -10,7 +10,7 @@ const gunzip = (0, util_1.promisify)(zlib.gunzip.bind(zlib));
10
10
  // string > zip
11
11
  /**
12
12
  * zipBuffer uses `deflate`.
13
- * It's 9 bytes shorter than gzip.
13
+ * It's 9 bytes shorter than `gzip`.
14
14
  */
15
15
  async function zipBuffer(buf, options = {}) {
16
16
  return await deflate(buf, options);
@@ -18,7 +18,7 @@ async function zipBuffer(buf, options = {}) {
18
18
  exports.zipBuffer = zipBuffer;
19
19
  /**
20
20
  * gzipBuffer uses `gzip`
21
- * It's 9 bytes longer than deflate.
21
+ * It's 9 bytes longer than `deflate`.
22
22
  */
23
23
  async function gzipBuffer(buf, options = {}) {
24
24
  return await gzip(buf, options);
@@ -33,11 +33,18 @@ async function gunzipBuffer(buf, options = {}) {
33
33
  return await gunzip(buf, options);
34
34
  }
35
35
  exports.gunzipBuffer = gunzipBuffer;
36
- // convenience
36
+ /**
37
+ * zipString uses `deflate`.
38
+ * It's 9 bytes shorter than `gzip`.
39
+ */
37
40
  async function zipString(s, options) {
38
41
  return await zipBuffer(Buffer.from(s), options);
39
42
  }
40
43
  exports.zipString = zipString;
44
+ /**
45
+ * gzipString uses `gzip`.
46
+ * It's 9 bytes longer than `deflate`.
47
+ */
41
48
  async function gzipString(s, options) {
42
49
  return await gzipBuffer(Buffer.from(s), options);
43
50
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@naturalcycles/nodejs-lib",
3
- "version": "12.56.1",
3
+ "version": "12.60.0",
4
4
  "scripts": {
5
5
  "prepare": "husky install",
6
6
  "docs-serve": "vuepress dev docs",