@naturalcycles/nodejs-lib 12.56.0 → 12.59.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +19 -18
- package/dist/index.js +19 -92
- package/dist/log/log.util.d.ts +4 -0
- package/dist/log/log.util.js +11 -0
- package/dist/stream/ndjson/ndjsonMap.d.ts +2 -2
- package/dist/stream/ndjson/ndjsonMap.js +4 -3
- package/dist/stream/ndjson/ndjsonStreamForEach.d.ts +2 -2
- package/dist/stream/pipeline/pipeline.d.ts +25 -3
- package/dist/stream/pipeline/pipeline.js +76 -9
- package/dist/stream/readable/readableCreate.d.ts +8 -0
- package/dist/stream/readable/readableCreate.js +9 -1
- package/dist/stream/readable/readableForEach.d.ts +2 -2
- package/dist/stream/readable/readableFromArray.d.ts +2 -2
- package/dist/stream/readable/readableFromArray.js +17 -13
- package/dist/stream/readable/readableMap.d.ts +2 -2
- package/dist/stream/readable/readableMap.js +22 -17
- package/dist/stream/stream.util.d.ts +4 -0
- package/dist/stream/stream.util.js +24 -0
- package/dist/stream/transform/transformLimit.d.ts +36 -1
- package/dist/stream/transform/transformLimit.js +33 -15
- package/dist/stream/transform/transformLogProgress.d.ts +2 -1
- package/dist/stream/transform/transformLogProgress.js +4 -4
- package/dist/stream/transform/transformMap.d.ts +2 -6
- package/dist/stream/transform/transformMap.js +51 -53
- package/dist/stream/transform/transformMapSimple.d.ts +2 -1
- package/dist/stream/transform/transformMapSimple.js +2 -2
- package/dist/stream/transform/transformMapSync.d.ts +2 -1
- package/dist/stream/transform/transformMapSync.js +3 -3
- package/dist/stream/transform/transformTap.d.ts +5 -2
- package/dist/stream/transform/transformTap.js +2 -1
- package/dist/stream/transform/worker/workerClassProxy.js +1 -0
- package/dist/stream/writable/writableFork.d.ts +2 -0
- package/dist/stream/writable/writableFork.js +2 -0
- package/dist/stream/writable/writableLimit.d.ts +9 -0
- package/dist/stream/writable/writableLimit.js +29 -0
- package/dist/stream/writable/writableVoid.d.ts +8 -1
- package/dist/stream/writable/writableVoid.js +5 -1
- package/package.json +1 -1
- package/src/index.ts +17 -156
- package/src/log/log.util.ts +9 -0
- package/src/stream/ndjson/ndjsonMap.ts +7 -5
- package/src/stream/ndjson/ndjsonStreamForEach.ts +2 -2
- package/src/stream/pipeline/pipeline.ts +102 -9
- package/src/stream/readable/readableCreate.ts +9 -1
- package/src/stream/readable/readableForEach.ts +2 -2
- package/src/stream/readable/readableFromArray.ts +18 -21
- package/src/stream/readable/readableMap.ts +24 -21
- package/src/stream/stream.util.ts +29 -0
- package/src/stream/transform/transformLimit.ts +71 -19
- package/src/stream/transform/transformLogProgress.ts +7 -4
- package/src/stream/transform/transformMap.ts +74 -82
- package/src/stream/transform/transformMapSimple.ts +5 -3
- package/src/stream/transform/transformMapSync.ts +6 -3
- package/src/stream/transform/transformTap.ts +8 -3
- package/src/stream/transform/worker/workerClassProxy.js +1 -0
- package/src/stream/writable/writableFork.ts +2 -0
- package/src/stream/writable/writableLimit.ts +28 -0
- package/src/stream/writable/writableVoid.ts +13 -1
- package/dist/stream/transform/legacy/transformMap.d.ts +0 -17
- package/dist/stream/transform/legacy/transformMap.js +0 -94
- package/src/stream/transform/legacy/transformMap.ts +0 -133
|
@@ -1,24 +1,29 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.readableMap = void 0;
|
|
4
|
-
const
|
|
4
|
+
const stream_1 = require("stream");
|
|
5
|
+
const js_lib_1 = require("@naturalcycles/js-lib");
|
|
5
6
|
function readableMap(readable, mapper) {
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
7
|
+
let i = -1;
|
|
8
|
+
// todo: check if we need to handle errors somehow specifically
|
|
9
|
+
return readable.pipe(new stream_1.Transform({
|
|
10
|
+
objectMode: true,
|
|
11
|
+
async transform(chunk, _enc, cb) {
|
|
12
|
+
try {
|
|
13
|
+
const r = await mapper(chunk, ++i);
|
|
14
|
+
if (r === js_lib_1.SKIP) {
|
|
15
|
+
cb();
|
|
16
|
+
}
|
|
17
|
+
else {
|
|
18
|
+
// _assert(r !== END, `readableMap END not supported`)
|
|
19
|
+
cb(null, r);
|
|
20
|
+
}
|
|
13
21
|
}
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
}
|
|
21
|
-
})();
|
|
22
|
-
return out;
|
|
22
|
+
catch (err) {
|
|
23
|
+
console.error(err);
|
|
24
|
+
cb(err);
|
|
25
|
+
}
|
|
26
|
+
},
|
|
27
|
+
}));
|
|
23
28
|
}
|
|
24
29
|
exports.readableMap = readableMap;
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
import { Readable } from 'stream';
|
|
3
|
+
import { CommonLogger } from '@naturalcycles/js-lib';
|
|
4
|
+
export declare function pipelineClose(name: string, readableDownstream: Readable, sourceReadable: Readable | undefined, streamDone: Promise<void> | undefined, logger: CommonLogger): void;
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.pipelineClose = void 0;
|
|
4
|
+
function pipelineClose(name, readableDownstream, sourceReadable, streamDone, logger) {
|
|
5
|
+
readableDownstream.push(null); // this closes the stream, so downstream Readable will receive `end` and won't write anything
|
|
6
|
+
if (!sourceReadable) {
|
|
7
|
+
logger.warn(`${name} sourceReadable is not provided, readable stream will not be stopped`);
|
|
8
|
+
}
|
|
9
|
+
else {
|
|
10
|
+
logger.log(`${name} is calling readable.unpipe() to pause the stream`);
|
|
11
|
+
sourceReadable.unpipe(); // it is expected to pause the stream
|
|
12
|
+
if (!streamDone) {
|
|
13
|
+
logger.log(`${name} streamDone is not provided, will do readable.destroy right away`);
|
|
14
|
+
sourceReadable.destroy();
|
|
15
|
+
}
|
|
16
|
+
else {
|
|
17
|
+
void streamDone.then(() => {
|
|
18
|
+
logger.log(`${name} streamDone, calling readable.destroy()`);
|
|
19
|
+
sourceReadable.destroy(); // this throws ERR_STREAM_PREMATURE_CLOSE
|
|
20
|
+
});
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
exports.pipelineClose = pipelineClose;
|
|
@@ -1,5 +1,40 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
import { Readable } from 'stream';
|
|
3
|
+
import { CommonLogger } from '@naturalcycles/js-lib';
|
|
4
|
+
import { AbortableTransform } from '../../index';
|
|
1
5
|
import { TransformOptions, TransformTyped } from '../stream.model';
|
|
6
|
+
export interface TransformLimitOptions extends TransformOptions {
|
|
7
|
+
/**
|
|
8
|
+
* Nullish value (e.g 0 or undefined) would mean "no limit"
|
|
9
|
+
*/
|
|
10
|
+
limit?: number;
|
|
11
|
+
/**
|
|
12
|
+
* If provided (recommended!) - it will call readable.destroy() on limit.
|
|
13
|
+
* Without it - it will only stop the downstream consumers, but won't stop
|
|
14
|
+
* the Readable ("source" of the stream).
|
|
15
|
+
* It is almost always crucial to stop the Source too, so, please provide the Readable here!
|
|
16
|
+
*/
|
|
17
|
+
sourceReadable?: Readable;
|
|
18
|
+
/**
|
|
19
|
+
* Please provide it (a Promise that resolves when the Stream is done, e.g finished consuming things)
|
|
20
|
+
* to be able to wait for Consumers before calling `readable.destroy`.
|
|
21
|
+
* Has no effect if `readable` is not provided.
|
|
22
|
+
*/
|
|
23
|
+
streamDone?: Promise<void>;
|
|
24
|
+
logger?: CommonLogger;
|
|
25
|
+
/**
|
|
26
|
+
* Set to true to enable additional debug messages, e.g it'll log
|
|
27
|
+
* when readable still emits values after the limit is reached.
|
|
28
|
+
*/
|
|
29
|
+
debug?: boolean;
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Class only exists to be able to do `instanceof TransformLimit`
|
|
33
|
+
* and to set sourceReadable+streamDone to it in `_pipeline`.
|
|
34
|
+
*/
|
|
35
|
+
export declare class TransformLimit extends AbortableTransform {
|
|
36
|
+
}
|
|
2
37
|
/**
|
|
3
38
|
* 0 or falsy value means "no limit"
|
|
4
39
|
*/
|
|
5
|
-
export declare function transformLimit<IN>(
|
|
40
|
+
export declare function transformLimit<IN>(opt?: TransformLimitOptions): TransformTyped<IN, IN>;
|
|
@@ -1,28 +1,46 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.transformLimit = void 0;
|
|
4
|
-
const
|
|
3
|
+
exports.transformLimit = exports.TransformLimit = void 0;
|
|
4
|
+
const index_1 = require("../../index");
|
|
5
|
+
const stream_util_1 = require("../stream.util");
|
|
6
|
+
/**
|
|
7
|
+
* Class only exists to be able to do `instanceof TransformLimit`
|
|
8
|
+
* and to set sourceReadable+streamDone to it in `_pipeline`.
|
|
9
|
+
*/
|
|
10
|
+
class TransformLimit extends index_1.AbortableTransform {
|
|
11
|
+
}
|
|
12
|
+
exports.TransformLimit = TransformLimit;
|
|
5
13
|
/**
|
|
6
14
|
* 0 or falsy value means "no limit"
|
|
7
15
|
*/
|
|
8
|
-
function transformLimit(
|
|
9
|
-
|
|
16
|
+
function transformLimit(opt = {}) {
|
|
17
|
+
const { logger = console, limit, debug } = opt;
|
|
18
|
+
if (!limit) {
|
|
19
|
+
// No limit - returning pass-through transform
|
|
20
|
+
return (0, index_1.transformNoOp)();
|
|
21
|
+
}
|
|
22
|
+
let i = 0; // so we start first chunk with 1
|
|
10
23
|
let ended = false;
|
|
11
|
-
return new
|
|
24
|
+
return new TransformLimit({
|
|
12
25
|
objectMode: true,
|
|
13
26
|
...opt,
|
|
14
|
-
transform(chunk,
|
|
15
|
-
|
|
16
|
-
if (
|
|
17
|
-
|
|
27
|
+
transform(chunk, _, cb) {
|
|
28
|
+
i++;
|
|
29
|
+
if (i === limit) {
|
|
30
|
+
ended = true;
|
|
31
|
+
logger.log(`transformLimit of ${limit} reached`);
|
|
32
|
+
this.push(chunk);
|
|
33
|
+
(0, stream_util_1.pipelineClose)('transformLimit', this, opt.sourceReadable || this.sourceReadable, opt.streamDone || this.streamDone, logger);
|
|
34
|
+
cb(); // after pause
|
|
18
35
|
}
|
|
19
|
-
else {
|
|
20
|
-
cb(null);
|
|
36
|
+
else if (!ended) {
|
|
37
|
+
cb(null, chunk);
|
|
21
38
|
}
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
//
|
|
39
|
+
else {
|
|
40
|
+
if (debug)
|
|
41
|
+
logger.log(`transformLimit.transform after limit`, i);
|
|
42
|
+
// If we ever HANG (don't call cb) - Node will do process.exit(0) to us
|
|
43
|
+
cb(); // ended, don't emit anything
|
|
26
44
|
}
|
|
27
45
|
},
|
|
28
46
|
});
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { AnyObject } from '@naturalcycles/js-lib';
|
|
1
|
+
import { AnyObject, CommonLogger } from '@naturalcycles/js-lib';
|
|
2
2
|
import { TransformOptions, TransformTyped } from '../stream.model';
|
|
3
3
|
export interface TransformLogProgressOptions<IN = any> extends TransformOptions {
|
|
4
4
|
/**
|
|
@@ -69,6 +69,7 @@ export interface TransformLogProgressOptions<IN = any> extends TransformOptions
|
|
|
69
69
|
* @default 1000
|
|
70
70
|
*/
|
|
71
71
|
logEvery?: number;
|
|
72
|
+
logger?: CommonLogger;
|
|
72
73
|
/**
|
|
73
74
|
* Function to return extra properties to the "progress object".
|
|
74
75
|
*
|
|
@@ -15,7 +15,7 @@ const inspectOpt = {
|
|
|
15
15
|
* Pass-through transform that optionally logs progress.
|
|
16
16
|
*/
|
|
17
17
|
function transformLogProgress(opt = {}) {
|
|
18
|
-
const { metric = 'progress', heapTotal: logHeapTotal = false, heapUsed: logHeapUsed = false, rss: logRss = true, peakRSS: logPeakRSS = true, logRPS = true, logEvery = 1000, batchSize = 1, extra, } = opt;
|
|
18
|
+
const { metric = 'progress', heapTotal: logHeapTotal = false, heapUsed: logHeapUsed = false, rss: logRss = true, peakRSS: logPeakRSS = true, logRPS = true, logEvery = 1000, batchSize = 1, extra, logger = console, } = opt;
|
|
19
19
|
const logProgress = opt.logProgress !== false && logEvery !== 0; // true by default
|
|
20
20
|
const logEvery10 = logEvery * 10;
|
|
21
21
|
const started = Date.now();
|
|
@@ -54,7 +54,7 @@ function transformLogProgress(opt = {}) {
|
|
|
54
54
|
const rps10 = Math.round(sma.push(lastRPS));
|
|
55
55
|
if (mem.rss > peakRSS)
|
|
56
56
|
peakRSS = mem.rss;
|
|
57
|
-
|
|
57
|
+
logger.log((0, util_1.inspect)({
|
|
58
58
|
[final ? `${metric}_final` : metric]: batchedProgress,
|
|
59
59
|
...(extra ? extra(chunk, progress) : {}),
|
|
60
60
|
...(logHeapUsed ? { heapUsed: (0, js_lib_1._mb)(mem.heapUsed) } : {}),
|
|
@@ -76,10 +76,10 @@ function transformLogProgress(opt = {}) {
|
|
|
76
76
|
if (perHour > 900) {
|
|
77
77
|
perHour = Math.round(perHour / 1000) + 'K';
|
|
78
78
|
}
|
|
79
|
-
|
|
79
|
+
logger.log(`${(0, colors_1.dimGrey)((0, time_lib_1.dayjs)().toPretty())} ${(0, colors_1.white)(metric)} took ${(0, colors_1.yellow)((0, js_lib_1._since)(started))} so far to process ${(0, colors_1.yellow)(batchedProgress)} rows, ~${(0, colors_1.yellow)(perHour)}/hour`);
|
|
80
80
|
}
|
|
81
81
|
else if (final) {
|
|
82
|
-
|
|
82
|
+
logger.log(`${(0, colors_1.boldWhite)(metric)} took ${(0, colors_1.yellow)((0, js_lib_1._since)(started))} to process ${(0, colors_1.yellow)(batchedProgress)} rows with total RPS of ${(0, colors_1.yellow)(rpsTotal)}`);
|
|
83
83
|
}
|
|
84
84
|
}
|
|
85
85
|
}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { AbortableAsyncMapper, AsyncPredicate, CommonLogger, ErrorMode } from '@naturalcycles/js-lib';
|
|
2
2
|
import { TransformTyped } from '../stream.model';
|
|
3
3
|
export interface TransformMapOptions<IN = any, OUT = IN> {
|
|
4
4
|
/**
|
|
@@ -37,10 +37,6 @@ export interface TransformMapOptions<IN = any, OUT = IN> {
|
|
|
37
37
|
* @default `stream`
|
|
38
38
|
*/
|
|
39
39
|
metric?: string;
|
|
40
|
-
/**
|
|
41
|
-
* If defined - called BEFORE `final()` callback is called.
|
|
42
|
-
*/
|
|
43
|
-
beforeFinal?: () => any;
|
|
44
40
|
logger?: CommonLogger;
|
|
45
41
|
}
|
|
46
42
|
export declare function notNullishPredicate(item: any): boolean;
|
|
@@ -56,4 +52,4 @@ export declare function notNullishPredicate(item: any): boolean;
|
|
|
56
52
|
*
|
|
57
53
|
* If an Array is returned by `mapper` - it will be flattened and multiple results will be emitted from it. Tested by Array.isArray().
|
|
58
54
|
*/
|
|
59
|
-
export declare function transformMap<IN = any, OUT = IN>(mapper:
|
|
55
|
+
export declare function transformMap<IN = any, OUT = IN>(mapper: AbortableAsyncMapper<IN, OUT>, opt?: TransformMapOptions<IN, OUT>): TransformTyped<IN, OUT>;
|
|
@@ -1,13 +1,16 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.transformMap = exports.notNullishPredicate = void 0;
|
|
4
|
-
const stream_1 = require("stream");
|
|
5
4
|
const js_lib_1 = require("@naturalcycles/js-lib");
|
|
5
|
+
const through2Concurrent = require("through2-concurrent");
|
|
6
6
|
const colors_1 = require("../../colors");
|
|
7
|
+
const stream_util_1 = require("../stream.util");
|
|
7
8
|
function notNullishPredicate(item) {
|
|
8
9
|
return item !== undefined && item !== null;
|
|
9
10
|
}
|
|
10
11
|
exports.notNullishPredicate = notNullishPredicate;
|
|
12
|
+
// doesn't work, cause here we don't construct our Transform instance ourselves
|
|
13
|
+
// export class TransformMap extends AbortableTransform {}
|
|
11
14
|
/**
|
|
12
15
|
* Like pMap, but for streams.
|
|
13
16
|
* Inspired by `through2`.
|
|
@@ -21,24 +24,16 @@ exports.notNullishPredicate = notNullishPredicate;
|
|
|
21
24
|
* If an Array is returned by `mapper` - it will be flattened and multiple results will be emitted from it. Tested by Array.isArray().
|
|
22
25
|
*/
|
|
23
26
|
function transformMap(mapper, opt = {}) {
|
|
24
|
-
const { concurrency = 16, predicate = notNullishPredicate, errorMode = js_lib_1.ErrorMode.THROW_IMMEDIATELY, flattenArrayOutput, onError,
|
|
27
|
+
const { concurrency = 16, predicate = notNullishPredicate, errorMode = js_lib_1.ErrorMode.THROW_IMMEDIATELY, flattenArrayOutput, onError, metric = 'stream', logger = console, } = opt;
|
|
25
28
|
let index = -1;
|
|
26
|
-
let
|
|
29
|
+
let isSettled = false;
|
|
27
30
|
let errors = 0;
|
|
28
31
|
const collectedErrors = []; // only used if errorMode == THROW_AGGREGATED
|
|
29
|
-
|
|
30
|
-
concurrency,
|
|
31
|
-
resolveOn: 'start',
|
|
32
|
-
// debug: true,
|
|
33
|
-
});
|
|
34
|
-
return new stream_1.Transform({
|
|
35
|
-
objectMode: true,
|
|
32
|
+
return through2Concurrent.obj({
|
|
33
|
+
maxConcurrency: concurrency,
|
|
36
34
|
async final(cb) {
|
|
37
|
-
// console.log('transformMap final'
|
|
38
|
-
|
|
39
|
-
await q.onIdle();
|
|
40
|
-
logErrorStats(logger, true);
|
|
41
|
-
await beforeFinal?.(); // call beforeFinal if defined
|
|
35
|
+
// console.log('transformMap final')
|
|
36
|
+
logErrorStats(true);
|
|
42
37
|
if (collectedErrors.length) {
|
|
43
38
|
// emit Aggregated error
|
|
44
39
|
cb(new js_lib_1.AggregatedError(collectedErrors));
|
|
@@ -48,48 +43,51 @@ function transformMap(mapper, opt = {}) {
|
|
|
48
43
|
cb();
|
|
49
44
|
}
|
|
50
45
|
},
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
//
|
|
59
|
-
await
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
passedResults.forEach(r => this.push(r));
|
|
65
|
-
}
|
|
66
|
-
catch (err) {
|
|
67
|
-
logger.error(err);
|
|
68
|
-
errors++;
|
|
69
|
-
logErrorStats(logger);
|
|
70
|
-
if (onError) {
|
|
71
|
-
try {
|
|
72
|
-
onError(err, chunk);
|
|
73
|
-
}
|
|
74
|
-
catch { }
|
|
75
|
-
}
|
|
76
|
-
if (errorMode === js_lib_1.ErrorMode.THROW_IMMEDIATELY) {
|
|
77
|
-
isRejected = true;
|
|
78
|
-
// Emit error immediately
|
|
79
|
-
// return cb(err as Error)
|
|
80
|
-
return this.emit('error', err);
|
|
81
|
-
}
|
|
82
|
-
if (errorMode === js_lib_1.ErrorMode.THROW_AGGREGATED) {
|
|
83
|
-
collectedErrors.push(err);
|
|
84
|
-
}
|
|
46
|
+
}, async function transformMapFn(chunk, _, cb) {
|
|
47
|
+
index++;
|
|
48
|
+
// console.log({chunk, _encoding})
|
|
49
|
+
// Stop processing if isSettled (either THROW_IMMEDIATELY was fired or END received)
|
|
50
|
+
if (isSettled)
|
|
51
|
+
return cb();
|
|
52
|
+
try {
|
|
53
|
+
const currentIndex = index; // because we need to pass it to 2 functions - mapper and predicate. Refers to INPUT index (since it may return multiple outputs)
|
|
54
|
+
const res = await mapper(chunk, currentIndex);
|
|
55
|
+
const passedResults = await (0, js_lib_1.pFilter)(flattenArrayOutput && Array.isArray(res) ? res : [res], async (r) => {
|
|
56
|
+
if (r === js_lib_1.END) {
|
|
57
|
+
isSettled = true; // will be checked later
|
|
58
|
+
return false;
|
|
85
59
|
}
|
|
60
|
+
return r !== js_lib_1.SKIP && (await predicate(r, currentIndex));
|
|
86
61
|
});
|
|
87
|
-
|
|
88
|
-
|
|
62
|
+
passedResults.forEach(r => this.push(r));
|
|
63
|
+
if (isSettled) {
|
|
64
|
+
logger.log(`transformMap END received at index ${index}`);
|
|
65
|
+
(0, stream_util_1.pipelineClose)('transformMap', this, this.sourceReadable, this.streamDone, logger);
|
|
66
|
+
}
|
|
67
|
+
cb(); // done processing
|
|
68
|
+
}
|
|
69
|
+
catch (err) {
|
|
70
|
+
logger.error(err);
|
|
71
|
+
errors++;
|
|
72
|
+
logErrorStats();
|
|
73
|
+
if (onError) {
|
|
74
|
+
try {
|
|
75
|
+
onError(err, chunk);
|
|
76
|
+
}
|
|
77
|
+
catch { }
|
|
78
|
+
}
|
|
79
|
+
if (errorMode === js_lib_1.ErrorMode.THROW_IMMEDIATELY) {
|
|
80
|
+
isSettled = true;
|
|
81
|
+
return cb(err); // Emit error immediately
|
|
82
|
+
}
|
|
83
|
+
if (errorMode === js_lib_1.ErrorMode.THROW_AGGREGATED) {
|
|
84
|
+
collectedErrors.push(err);
|
|
85
|
+
}
|
|
86
|
+
// Tell input stream that we're done processing, but emit nothing to output - not error nor result
|
|
89
87
|
cb();
|
|
90
|
-
}
|
|
88
|
+
}
|
|
91
89
|
});
|
|
92
|
-
function logErrorStats(
|
|
90
|
+
function logErrorStats(final = false) {
|
|
93
91
|
if (!errors)
|
|
94
92
|
return;
|
|
95
93
|
logger.log(`${metric} ${final ? 'final ' : ''}errors: ${(0, colors_1.yellow)(errors)}`);
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { ErrorMode, Mapper } from '@naturalcycles/js-lib';
|
|
1
|
+
import { CommonLogger, ErrorMode, Mapper } from '@naturalcycles/js-lib';
|
|
2
2
|
import { TransformTyped } from '../stream.model';
|
|
3
3
|
export interface TransformMapSimpleOptions {
|
|
4
4
|
/**
|
|
@@ -7,6 +7,7 @@ export interface TransformMapSimpleOptions {
|
|
|
7
7
|
* @default ErrorMode.THROW_IMMEDIATELY
|
|
8
8
|
*/
|
|
9
9
|
errorMode?: ErrorMode.THROW_IMMEDIATELY | ErrorMode.SUPPRESS;
|
|
10
|
+
logger?: CommonLogger;
|
|
10
11
|
}
|
|
11
12
|
/**
|
|
12
13
|
* Simplest version of `transformMap`.
|
|
@@ -14,7 +14,7 @@ const js_lib_1 = require("@naturalcycles/js-lib");
|
|
|
14
14
|
*/
|
|
15
15
|
function transformMapSimple(mapper, opt = {}) {
|
|
16
16
|
let index = -1;
|
|
17
|
-
const { errorMode = js_lib_1.ErrorMode.THROW_IMMEDIATELY } = opt;
|
|
17
|
+
const { errorMode = js_lib_1.ErrorMode.THROW_IMMEDIATELY, logger = console } = opt;
|
|
18
18
|
return new stream_1.Transform({
|
|
19
19
|
objectMode: true,
|
|
20
20
|
transform(chunk, _encoding, cb) {
|
|
@@ -22,7 +22,7 @@ function transformMapSimple(mapper, opt = {}) {
|
|
|
22
22
|
cb(null, mapper(chunk, ++index));
|
|
23
23
|
}
|
|
24
24
|
catch (err) {
|
|
25
|
-
|
|
25
|
+
logger.error(err);
|
|
26
26
|
if (errorMode === js_lib_1.ErrorMode.SUPPRESS) {
|
|
27
27
|
cb(); // suppress the error
|
|
28
28
|
}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { ErrorMode, Mapper, Predicate } from '@naturalcycles/js-lib';
|
|
1
|
+
import { CommonLogger, ErrorMode, Mapper, Predicate } from '@naturalcycles/js-lib';
|
|
2
2
|
import { TransformTyped } from '../stream.model';
|
|
3
3
|
export interface TransformMapSyncOptions<IN = any, OUT = IN> {
|
|
4
4
|
/**
|
|
@@ -34,6 +34,7 @@ export interface TransformMapSyncOptions<IN = any, OUT = IN> {
|
|
|
34
34
|
* @default `stream`
|
|
35
35
|
*/
|
|
36
36
|
metric?: string;
|
|
37
|
+
logger?: CommonLogger;
|
|
37
38
|
}
|
|
38
39
|
/**
|
|
39
40
|
* Sync (not async) version of transformMap.
|
|
@@ -11,7 +11,7 @@ const transformMap_1 = require("./transformMap");
|
|
|
11
11
|
*/
|
|
12
12
|
function transformMapSync(mapper, opt = {}) {
|
|
13
13
|
let index = -1;
|
|
14
|
-
const { predicate = transformMap_1.notNullishPredicate, errorMode = js_lib_1.ErrorMode.THROW_IMMEDIATELY, flattenArrayOutput = false, onError, metric = 'stream', objectMode = true, } = opt;
|
|
14
|
+
const { predicate = transformMap_1.notNullishPredicate, errorMode = js_lib_1.ErrorMode.THROW_IMMEDIATELY, flattenArrayOutput = false, onError, metric = 'stream', objectMode = true, logger = console, } = opt;
|
|
15
15
|
let isRejected = false;
|
|
16
16
|
let errors = 0;
|
|
17
17
|
const collectedErrors = []; // only used if errorMode == THROW_AGGREGATED
|
|
@@ -39,7 +39,7 @@ function transformMapSync(mapper, opt = {}) {
|
|
|
39
39
|
}
|
|
40
40
|
}
|
|
41
41
|
catch (err) {
|
|
42
|
-
|
|
42
|
+
logger.error(err);
|
|
43
43
|
errors++;
|
|
44
44
|
logErrorStats();
|
|
45
45
|
if (onError) {
|
|
@@ -75,7 +75,7 @@ function transformMapSync(mapper, opt = {}) {
|
|
|
75
75
|
function logErrorStats(final = false) {
|
|
76
76
|
if (!errors)
|
|
77
77
|
return;
|
|
78
|
-
|
|
78
|
+
logger.log(`${metric} ${final ? 'final ' : ''}errors: ${(0, colors_1.yellow)(errors)}`);
|
|
79
79
|
}
|
|
80
80
|
}
|
|
81
81
|
exports.transformMapSync = transformMapSync;
|
|
@@ -1,9 +1,12 @@
|
|
|
1
|
-
import { AsyncMapper } from '@naturalcycles/js-lib';
|
|
1
|
+
import { AsyncMapper, CommonLogger } from '@naturalcycles/js-lib';
|
|
2
2
|
import { TransformOptions, TransformTyped } from '../stream.model';
|
|
3
|
+
export interface TransformTapOptions extends TransformOptions {
|
|
4
|
+
logger?: CommonLogger;
|
|
5
|
+
}
|
|
3
6
|
/**
|
|
4
7
|
* Similar to RxJS `tap` - allows to run a function for each stream item, without affecting the result.
|
|
5
8
|
* Item is passed through to the output.
|
|
6
9
|
*
|
|
7
10
|
* Can also act as a counter, since `index` is passed to `fn`
|
|
8
11
|
*/
|
|
9
|
-
export declare function transformTap<IN>(fn: AsyncMapper<IN, any>, opt?:
|
|
12
|
+
export declare function transformTap<IN>(fn: AsyncMapper<IN, any>, opt?: TransformTapOptions): TransformTyped<IN, IN>;
|
|
@@ -9,6 +9,7 @@ const stream_1 = require("stream");
|
|
|
9
9
|
* Can also act as a counter, since `index` is passed to `fn`
|
|
10
10
|
*/
|
|
11
11
|
function transformTap(fn, opt = {}) {
|
|
12
|
+
const { logger = console } = opt;
|
|
12
13
|
let index = 0;
|
|
13
14
|
return new stream_1.Transform({
|
|
14
15
|
objectMode: true,
|
|
@@ -19,7 +20,7 @@ function transformTap(fn, opt = {}) {
|
|
|
19
20
|
await fn(chunk, index++);
|
|
20
21
|
}
|
|
21
22
|
catch (err) {
|
|
22
|
-
|
|
23
|
+
logger.error(err);
|
|
23
24
|
// suppressed error
|
|
24
25
|
}
|
|
25
26
|
cb(null, chunk); // pass through the item
|
|
@@ -5,5 +5,7 @@ import { TransformOptions, WritableTyped } from '../stream.model';
|
|
|
5
5
|
* Currently does NOT (!) maintain backpressure.
|
|
6
6
|
* Error in the forked pipeline will propagate up to the main pipeline (and log error, to be sure).
|
|
7
7
|
* Will wait until all forked pipelines are completed before completing the stream.
|
|
8
|
+
*
|
|
9
|
+
* @experimental
|
|
8
10
|
*/
|
|
9
11
|
export declare function writableFork<T>(chains: NodeJS.WritableStream[][], opt?: TransformOptions): WritableTyped<T>;
|
|
@@ -8,6 +8,8 @@ const __1 = require("../..");
|
|
|
8
8
|
* Currently does NOT (!) maintain backpressure.
|
|
9
9
|
* Error in the forked pipeline will propagate up to the main pipeline (and log error, to be sure).
|
|
10
10
|
* Will wait until all forked pipelines are completed before completing the stream.
|
|
11
|
+
*
|
|
12
|
+
* @experimental
|
|
11
13
|
*/
|
|
12
14
|
function writableFork(chains, opt) {
|
|
13
15
|
const readables = [];
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
import { Readable } from 'stream';
|
|
3
|
+
import { WritableTyped } from '../stream.model';
|
|
4
|
+
/**
|
|
5
|
+
* Allows to stop the Readable stream after the pipeline has processed X number of rows.
|
|
6
|
+
* It counts OUTPUT rows (not input), because this Writable is always at the end of the Pipeline.
|
|
7
|
+
* It ensures that everything has been processed before issuing a STOP on the readable.
|
|
8
|
+
*/
|
|
9
|
+
export declare function writableLimit<T>(readable: Readable, limit: number): WritableTyped<T>;
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.writableLimit = void 0;
|
|
4
|
+
const stream_1 = require("stream");
|
|
5
|
+
/**
|
|
6
|
+
* Allows to stop the Readable stream after the pipeline has processed X number of rows.
|
|
7
|
+
* It counts OUTPUT rows (not input), because this Writable is always at the end of the Pipeline.
|
|
8
|
+
* It ensures that everything has been processed before issuing a STOP on the readable.
|
|
9
|
+
*/
|
|
10
|
+
function writableLimit(readable, limit) {
|
|
11
|
+
let i = 0;
|
|
12
|
+
return new stream_1.Writable({
|
|
13
|
+
objectMode: true,
|
|
14
|
+
write(chunk, _, cb) {
|
|
15
|
+
if (limit === 0)
|
|
16
|
+
return cb(); // no limit, just passthrough
|
|
17
|
+
i++;
|
|
18
|
+
if (i === limit) {
|
|
19
|
+
console.log(`writableLimit of ${limit} reached`);
|
|
20
|
+
readable.destroy();
|
|
21
|
+
cb(); // do we need it?
|
|
22
|
+
}
|
|
23
|
+
else {
|
|
24
|
+
cb(); // passthrough
|
|
25
|
+
}
|
|
26
|
+
},
|
|
27
|
+
});
|
|
28
|
+
}
|
|
29
|
+
exports.writableLimit = writableLimit;
|
|
@@ -1,9 +1,16 @@
|
|
|
1
1
|
/// <reference types="node" />
|
|
2
2
|
import { Writable } from 'stream';
|
|
3
|
+
import { DeferredPromise } from '@naturalcycles/js-lib';
|
|
3
4
|
import { TransformOptions } from '../stream.model';
|
|
5
|
+
export interface WritableVoidOptions extends TransformOptions {
|
|
6
|
+
/**
|
|
7
|
+
* If set - it will be Resolved when the Stream is done (after final.cb)
|
|
8
|
+
*/
|
|
9
|
+
streamDone?: DeferredPromise;
|
|
10
|
+
}
|
|
4
11
|
/**
|
|
5
12
|
* Use as a "null-terminator" of stream.pipeline.
|
|
6
13
|
* It consumes the stream as quickly as possible without doing anything.
|
|
7
14
|
* Put it in the end of your pipeline in case it ends with Transform that needs a consumer.
|
|
8
15
|
*/
|
|
9
|
-
export declare function writableVoid(opt?:
|
|
16
|
+
export declare function writableVoid(opt?: WritableVoidOptions): Writable;
|
|
@@ -7,13 +7,17 @@ const stream_1 = require("stream");
|
|
|
7
7
|
* It consumes the stream as quickly as possible without doing anything.
|
|
8
8
|
* Put it in the end of your pipeline in case it ends with Transform that needs a consumer.
|
|
9
9
|
*/
|
|
10
|
-
function writableVoid(opt) {
|
|
10
|
+
function writableVoid(opt = {}) {
|
|
11
11
|
return new stream_1.Writable({
|
|
12
12
|
objectMode: true,
|
|
13
13
|
...opt,
|
|
14
14
|
write(chunk, _encoding, cb) {
|
|
15
15
|
cb();
|
|
16
16
|
},
|
|
17
|
+
final(cb) {
|
|
18
|
+
cb();
|
|
19
|
+
opt.streamDone?.resolve();
|
|
20
|
+
},
|
|
17
21
|
});
|
|
18
22
|
}
|
|
19
23
|
exports.writableVoid = writableVoid;
|