@naturalcycles/nodejs-lib 12.58.0 → 12.59.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/dist/index.d.ts +19 -18
  2. package/dist/index.js +19 -92
  3. package/dist/log/log.util.d.ts +4 -0
  4. package/dist/log/log.util.js +11 -0
  5. package/dist/stream/ndjson/ndjsonMap.d.ts +2 -2
  6. package/dist/stream/ndjson/ndjsonMap.js +4 -3
  7. package/dist/stream/ndjson/ndjsonStreamForEach.d.ts +2 -2
  8. package/dist/stream/pipeline/pipeline.d.ts +25 -3
  9. package/dist/stream/pipeline/pipeline.js +76 -9
  10. package/dist/stream/readable/readableCreate.d.ts +8 -0
  11. package/dist/stream/readable/readableCreate.js +9 -1
  12. package/dist/stream/readable/readableForEach.d.ts +2 -2
  13. package/dist/stream/readable/readableFromArray.d.ts +2 -2
  14. package/dist/stream/readable/readableFromArray.js +17 -13
  15. package/dist/stream/readable/readableMap.d.ts +2 -2
  16. package/dist/stream/readable/readableMap.js +22 -17
  17. package/dist/stream/stream.util.d.ts +4 -0
  18. package/dist/stream/stream.util.js +24 -0
  19. package/dist/stream/transform/transformLimit.d.ts +32 -1
  20. package/dist/stream/transform/transformLimit.js +33 -16
  21. package/dist/stream/transform/transformMap.d.ts +2 -11
  22. package/dist/stream/transform/transformMap.js +55 -67
  23. package/dist/stream/transform/worker/workerClassProxy.js +1 -0
  24. package/dist/stream/writable/writableLimit.d.ts +9 -0
  25. package/dist/stream/writable/writableLimit.js +29 -0
  26. package/dist/stream/writable/writableVoid.d.ts +8 -1
  27. package/dist/stream/writable/writableVoid.js +5 -1
  28. package/package.json +1 -1
  29. package/src/index.ts +17 -156
  30. package/src/log/log.util.ts +9 -0
  31. package/src/stream/ndjson/ndjsonMap.ts +7 -5
  32. package/src/stream/ndjson/ndjsonStreamForEach.ts +2 -2
  33. package/src/stream/pipeline/pipeline.ts +102 -9
  34. package/src/stream/readable/readableCreate.ts +9 -1
  35. package/src/stream/readable/readableForEach.ts +2 -2
  36. package/src/stream/readable/readableFromArray.ts +18 -21
  37. package/src/stream/readable/readableMap.ts +24 -21
  38. package/src/stream/stream.util.ts +29 -0
  39. package/src/stream/transform/transformLimit.ts +66 -20
  40. package/src/stream/transform/transformMap.ts +74 -93
  41. package/src/stream/transform/worker/workerClassProxy.js +1 -0
  42. package/src/stream/writable/writableLimit.ts +28 -0
  43. package/src/stream/writable/writableVoid.ts +13 -1
  44. package/dist/stream/transform/legacy/transformMap.d.ts +0 -17
  45. package/dist/stream/transform/legacy/transformMap.js +0 -94
  46. package/src/stream/transform/legacy/transformMap.ts +0 -133
@@ -1,38 +1,84 @@
1
- import { Transform } from 'stream'
1
+ import { Readable } from 'stream'
2
2
  import { CommonLogger } from '@naturalcycles/js-lib'
3
+ import { AbortableTransform, transformNoOp } from '../../index'
3
4
  import { TransformOptions, TransformTyped } from '../stream.model'
5
+ import { pipelineClose } from '../stream.util'
4
6
 
5
7
  export interface TransformLimitOptions extends TransformOptions {
8
+ /**
9
+ * Nullish value (e.g 0 or undefined) would mean "no limit"
10
+ */
11
+ limit?: number
12
+
13
+ /**
14
+ * If provided (recommended!) - it will call readable.destroy() on limit.
15
+ * Without it - it will only stop the downstream consumers, but won't stop
16
+ * the Readable ("source" of the stream).
17
+ * It is almost always crucial to stop the Source too, so, please provide the Readable here!
18
+ */
19
+ sourceReadable?: Readable
20
+
21
+ /**
22
+ * Please provide it (a Promise that resolves when the Stream is done, e.g finished consuming things)
23
+ * to be able to wait for Consumers before calling `readable.destroy`.
24
+ * Has no effect if `readable` is not provided.
25
+ */
26
+ streamDone?: Promise<void>
27
+
6
28
  logger?: CommonLogger
29
+
30
+ /**
31
+ * Set to true to enable additional debug messages, e.g it'll log
32
+ * when readable still emits values after the limit is reached.
33
+ */
34
+ debug?: boolean
7
35
  }
8
36
 
37
+ /**
38
+ * Class only exists to be able to do `instanceof TransformLimit`
39
+ * and to set sourceReadable+streamDone to it in `_pipeline`.
40
+ */
41
+ export class TransformLimit extends AbortableTransform {}
42
+
9
43
  /**
10
44
  * 0 or falsy value means "no limit"
11
45
  */
12
- export function transformLimit<IN>(
13
- limit?: number,
14
- opt: TransformLimitOptions = {},
15
- ): TransformTyped<IN, IN> {
16
- const { logger = console } = opt
17
- let index = 0
18
- let ended = false
46
+ export function transformLimit<IN>(opt: TransformLimitOptions = {}): TransformTyped<IN, IN> {
47
+ const { logger = console, limit, debug } = opt
19
48
 
20
- return new Transform({
49
+ if (!limit) {
50
+ // No limit - returning pass-through transform
51
+ return transformNoOp()
52
+ }
53
+
54
+ let i = 0 // so we start first chunk with 1
55
+ let ended = false
56
+ return new TransformLimit({
21
57
  objectMode: true,
22
58
  ...opt,
23
- transform(this: Transform, chunk: IN, _encoding, cb) {
24
- index++
25
-
26
- if (!ended) {
27
- cb(null, chunk) // pass through the item
28
- } else {
29
- cb(null) // pass-through empty
30
- }
59
+ transform(this: TransformLimit, chunk, _, cb) {
60
+ i++
31
61
 
32
- if (limit && index === limit) {
62
+ if (i === limit) {
33
63
  ended = true
34
- logger.log(`transformLimit: limit of ${limit} reached`)
35
- // this.emit('end') // this makes it "halt" on Node 14 lts
64
+ logger.log(`transformLimit of ${limit} reached`)
65
+ this.push(chunk)
66
+
67
+ pipelineClose(
68
+ 'transformLimit',
69
+ this,
70
+ opt.sourceReadable || this.sourceReadable,
71
+ opt.streamDone || this.streamDone,
72
+ logger,
73
+ )
74
+
75
+ cb() // after pause
76
+ } else if (!ended) {
77
+ cb(null, chunk)
78
+ } else {
79
+ if (debug) logger.log(`transformLimit.transform after limit`, i)
80
+ // If we ever HANG (don't call cb) - Node will do process.exit(0) to us
81
+ cb() // ended, don't emit anything
36
82
  }
37
83
  },
38
84
  })
@@ -1,16 +1,18 @@
1
- import { Transform } from 'stream'
2
1
  import {
2
+ AbortableAsyncMapper,
3
3
  AggregatedError,
4
- AsyncMapper,
5
4
  AsyncPredicate,
6
5
  CommonLogger,
6
+ END,
7
7
  ErrorMode,
8
8
  pFilter,
9
- PQueue,
9
+ SKIP,
10
10
  } from '@naturalcycles/js-lib'
11
+ import through2Concurrent = require('through2-concurrent')
11
12
  import { yellow } from '../../colors'
13
+ import { AbortableTransform } from '../pipeline/pipeline'
12
14
  import { TransformTyped } from '../stream.model'
13
- import { transformMapLegacy } from './legacy/transformMap'
15
+ import { pipelineClose } from '../stream.util'
14
16
 
15
17
  export interface TransformMapOptions<IN = any, OUT = IN> {
16
18
  /**
@@ -55,11 +57,6 @@ export interface TransformMapOptions<IN = any, OUT = IN> {
55
57
  */
56
58
  metric?: string
57
59
 
58
- /**
59
- * If defined - called BEFORE `final()` callback is called.
60
- */
61
- beforeFinal?: () => any
62
-
63
60
  logger?: CommonLogger
64
61
  }
65
62
 
@@ -67,10 +64,8 @@ export function notNullishPredicate(item: any): boolean {
67
64
  return item !== undefined && item !== null
68
65
  }
69
66
 
70
- /**
71
- * Temporary export legacy transformMap, to debug 503 errors
72
- */
73
- export const transformMap = transformMapLegacy
67
+ // doesn't work, cause here we don't construct our Transform instance ourselves
68
+ // export class TransformMap extends AbortableTransform {}
74
69
 
75
70
  /**
76
71
  * Like pMap, but for streams.
@@ -84,8 +79,8 @@ export const transformMap = transformMapLegacy
84
79
  *
85
80
  * If an Array is returned by `mapper` - it will be flattened and multiple results will be emitted from it. Tested by Array.isArray().
86
81
  */
87
- export function transformMapNew<IN = any, OUT = IN>(
88
- mapper: AsyncMapper<IN, OUT>,
82
+ export function transformMap<IN = any, OUT = IN>(
83
+ mapper: AbortableAsyncMapper<IN, OUT>,
89
84
  opt: TransformMapOptions<IN, OUT> = {},
90
85
  ): TransformTyped<IN, OUT> {
91
86
  const {
@@ -94,103 +89,89 @@ export function transformMapNew<IN = any, OUT = IN>(
94
89
  errorMode = ErrorMode.THROW_IMMEDIATELY,
95
90
  flattenArrayOutput,
96
91
  onError,
97
- beforeFinal,
98
92
  metric = 'stream',
99
93
  logger = console,
100
94
  } = opt
101
95
 
102
96
  let index = -1
103
- let isRejected = false
97
+ let isSettled = false
104
98
  let errors = 0
105
99
  const collectedErrors: Error[] = [] // only used if errorMode == THROW_AGGREGATED
106
100
 
107
- const q = new PQueue({
108
- concurrency,
109
- resolveOn: 'start',
110
- // debug: true,
111
- })
101
+ return through2Concurrent.obj(
102
+ {
103
+ maxConcurrency: concurrency,
104
+ async final(cb) {
105
+ // console.log('transformMap final')
112
106
 
113
- return new Transform({
114
- objectMode: true,
107
+ logErrorStats(true)
115
108
 
116
- async final(cb) {
117
- // console.log('transformMap final', {index}, q.inFlight, q.queueSize)
118
-
119
- // wait for the current inFlight jobs to complete and push their results
120
- await q.onIdle()
109
+ if (collectedErrors.length) {
110
+ // emit Aggregated error
111
+ cb(new AggregatedError(collectedErrors))
112
+ } else {
113
+ // emit no error
114
+ cb()
115
+ }
116
+ },
117
+ },
118
+ async function transformMapFn(this: AbortableTransform, chunk: IN, _, cb) {
119
+ index++
120
+ // console.log({chunk, _encoding})
121
+
122
+ // Stop processing if isSettled (either THROW_IMMEDIATELY was fired or END received)
123
+ if (isSettled) return cb()
124
+
125
+ try {
126
+ const currentIndex = index // because we need to pass it to 2 functions - mapper and predicate. Refers to INPUT index (since it may return multiple outputs)
127
+ const res = await mapper(chunk, currentIndex)
128
+ const passedResults = await pFilter(
129
+ flattenArrayOutput && Array.isArray(res) ? res : [res],
130
+ async r => {
131
+ if (r === END) {
132
+ isSettled = true // will be checked later
133
+ return false
134
+ }
135
+ return r !== SKIP && (await predicate(r, currentIndex))
136
+ },
137
+ )
138
+
139
+ passedResults.forEach(r => this.push(r))
140
+
141
+ if (isSettled) {
142
+ logger.log(`transformMap END received at index ${index}`)
143
+ pipelineClose('transformMap', this, this.sourceReadable, this.streamDone, logger)
144
+ }
121
145
 
122
- logErrorStats(logger, true)
146
+ cb() // done processing
147
+ } catch (err) {
148
+ logger.error(err)
149
+ errors++
150
+ logErrorStats()
123
151
 
124
- await beforeFinal?.() // call beforeFinal if defined
152
+ if (onError) {
153
+ try {
154
+ onError(err, chunk)
155
+ } catch {}
156
+ }
125
157
 
126
- if (collectedErrors.length) {
127
- // emit Aggregated error
128
- // For the same reason, magically, let's not call `cb`, but emit an error event instead
129
- // this.emit('error', new AggregatedError(collectedErrors))
130
- cb(new AggregatedError(collectedErrors))
131
- } else {
132
- // emit no error
133
- // It is truly a mistery, but calling cb() here was causing ERR_MULTIPLE_CALLBACK ?!
134
- // Commenting it out seems to work ?!
135
- // ?!
136
- // cb()
137
- }
138
- },
158
+ if (errorMode === ErrorMode.THROW_IMMEDIATELY) {
159
+ isSettled = true
160
+ return cb(err) // Emit error immediately
161
+ }
139
162
 
140
- async transform(this: Transform, chunk: IN, _encoding, cb) {
141
- index++
142
- // console.log('transform', {index})
143
-
144
- // Stop processing if THROW_IMMEDIATELY mode is used
145
- if (isRejected && errorMode === ErrorMode.THROW_IMMEDIATELY) return cb()
146
-
147
- // It resolves when it is successfully STARTED execution.
148
- // If it's queued instead - it'll wait and resolve only upon START.
149
- await q.push(async () => {
150
- try {
151
- const currentIndex = index // because we need to pass it to 2 functions - mapper and predicate. Refers to INPUT index (since it may return multiple outputs)
152
- const res = await mapper(chunk, currentIndex)
153
- const passedResults = await pFilter(
154
- flattenArrayOutput && Array.isArray(res) ? res : [res],
155
- async r => await predicate(r, currentIndex),
156
- )
157
-
158
- passedResults.forEach(r => this.push(r))
159
- } catch (err) {
160
- logger.error(err)
161
-
162
- errors++
163
-
164
- logErrorStats(logger)
165
-
166
- if (onError) {
167
- try {
168
- onError(err, chunk)
169
- } catch {}
170
- }
171
-
172
- if (errorMode === ErrorMode.THROW_IMMEDIATELY) {
173
- isRejected = true
174
- // Emit error immediately
175
- // return cb(err as Error)
176
- return this.emit('error', err as Error)
177
- }
178
-
179
- if (errorMode === ErrorMode.THROW_AGGREGATED) {
180
- collectedErrors.push(err as Error)
181
- }
163
+ if (errorMode === ErrorMode.THROW_AGGREGATED) {
164
+ collectedErrors.push(err as Error)
182
165
  }
183
- })
184
166
 
185
- // Resolved, which means it STARTED processing
186
- // This means we can take more load
187
- cb()
167
+ // Tell input stream that we're done processing, but emit nothing to output - not error nor result
168
+ cb()
169
+ }
188
170
  },
189
- })
171
+ )
190
172
 
191
- function logErrorStats(logger: CommonLogger, final = false): void {
173
+ function logErrorStats(final = false): void {
192
174
  if (!errors) return
193
-
194
175
  logger.log(`${metric} ${final ? 'final ' : ''}errors: ${yellow(errors)}`)
195
176
  }
196
177
  }
@@ -10,6 +10,7 @@ if (!workerFile) {
10
10
  // console.log(`worker#${workerIndex} created`)
11
11
 
12
12
  try {
13
+ // require('esbuild-register') // alternative
13
14
  require('ts-node/register/transpile-only')
14
15
  require('tsconfig-paths/register')
15
16
  } catch {} // require if exists
@@ -0,0 +1,28 @@
1
+ import { Readable, Writable } from 'stream'
2
+ import { WritableTyped } from '../stream.model'
3
+
4
+ /**
5
+ * Allows to stop the Readable stream after the pipeline has processed X number of rows.
6
+ * It counts OUTPUT rows (not input), because this Writable is always at the end of the Pipeline.
7
+ * It ensures that everything has been processed before issuing a STOP on the readable.
8
+ */
9
+ export function writableLimit<T>(readable: Readable, limit: number): WritableTyped<T> {
10
+ let i = 0
11
+
12
+ return new Writable({
13
+ objectMode: true,
14
+ write(chunk, _, cb) {
15
+ if (limit === 0) return cb() // no limit, just passthrough
16
+
17
+ i++
18
+
19
+ if (i === limit) {
20
+ console.log(`writableLimit of ${limit} reached`)
21
+ readable.destroy()
22
+ cb() // do we need it?
23
+ } else {
24
+ cb() // passthrough
25
+ }
26
+ },
27
+ })
28
+ }
@@ -1,17 +1,29 @@
1
1
  import { Writable } from 'stream'
2
+ import { DeferredPromise } from '@naturalcycles/js-lib'
2
3
  import { TransformOptions } from '../stream.model'
3
4
 
5
+ export interface WritableVoidOptions extends TransformOptions {
6
+ /**
7
+ * If set - it will be Resolved when the Stream is done (after final.cb)
8
+ */
9
+ streamDone?: DeferredPromise
10
+ }
11
+
4
12
  /**
5
13
  * Use as a "null-terminator" of stream.pipeline.
6
14
  * It consumes the stream as quickly as possible without doing anything.
7
15
  * Put it in the end of your pipeline in case it ends with Transform that needs a consumer.
8
16
  */
9
- export function writableVoid(opt?: TransformOptions): Writable {
17
+ export function writableVoid(opt: WritableVoidOptions = {}): Writable {
10
18
  return new Writable({
11
19
  objectMode: true,
12
20
  ...opt,
13
21
  write(chunk, _encoding, cb) {
14
22
  cb()
15
23
  },
24
+ final(cb) {
25
+ cb()
26
+ opt.streamDone?.resolve()
27
+ },
16
28
  })
17
29
  }
@@ -1,17 +0,0 @@
1
- import { AsyncMapper } from '@naturalcycles/js-lib';
2
- import { TransformTyped } from '../../stream.model';
3
- import { TransformMapOptions } from '../transformMap';
4
- export declare function notNullishPredicate(item: any): boolean;
5
- /**
6
- * Like pMap, but for streams.
7
- * Inspired by `through2`.
8
- * Main feature is concurrency control (implemented via `through2-concurrent`) and convenient options.
9
- * Using this allows native stream .pipe() to work and use backpressure.
10
- *
11
- * Only works in objectMode (due to through2Concurrent).
12
- *
13
- * Concurrency defaults to 16.
14
- *
15
- * If an Array is returned by `mapper` - it will be flattened and multiple results will be emitted from it. Tested by Array.isArray().
16
- */
17
- export declare function transformMapLegacy<IN = any, OUT = IN>(mapper: AsyncMapper<IN, OUT>, opt?: TransformMapOptions<IN, OUT>): TransformTyped<IN, OUT>;
@@ -1,94 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.transformMapLegacy = exports.notNullishPredicate = void 0;
4
- const js_lib_1 = require("@naturalcycles/js-lib");
5
- const through2Concurrent = require("through2-concurrent");
6
- const colors_1 = require("../../../colors");
7
- function notNullishPredicate(item) {
8
- return item !== undefined && item !== null;
9
- }
10
- exports.notNullishPredicate = notNullishPredicate;
11
- /**
12
- * Like pMap, but for streams.
13
- * Inspired by `through2`.
14
- * Main feature is concurrency control (implemented via `through2-concurrent`) and convenient options.
15
- * Using this allows native stream .pipe() to work and use backpressure.
16
- *
17
- * Only works in objectMode (due to through2Concurrent).
18
- *
19
- * Concurrency defaults to 16.
20
- *
21
- * If an Array is returned by `mapper` - it will be flattened and multiple results will be emitted from it. Tested by Array.isArray().
22
- */
23
- function transformMapLegacy(mapper, opt = {}) {
24
- const { concurrency = 16, predicate = notNullishPredicate, errorMode = js_lib_1.ErrorMode.THROW_IMMEDIATELY, flattenArrayOutput, onError, beforeFinal, metric = 'stream', logger = console, } = opt;
25
- let index = -1;
26
- let isRejected = false;
27
- let errors = 0;
28
- const collectedErrors = []; // only used if errorMode == THROW_AGGREGATED
29
- return through2Concurrent.obj({
30
- maxConcurrency: concurrency,
31
- // autoDestroy: true,
32
- async final(cb) {
33
- // console.log('transformMap final')
34
- logErrorStats(logger, true);
35
- await beforeFinal?.(); // call beforeFinal if defined
36
- if (collectedErrors.length) {
37
- // emit Aggregated error
38
- cb(new js_lib_1.AggregatedError(collectedErrors));
39
- }
40
- else {
41
- // emit no error
42
- cb();
43
- }
44
- },
45
- }, async function transformMapFn(chunk, _encoding, cb) {
46
- index++;
47
- // console.log({chunk, _encoding})
48
- // Stop processing if THROW_IMMEDIATELY mode is used
49
- if (isRejected && errorMode === js_lib_1.ErrorMode.THROW_IMMEDIATELY)
50
- return cb();
51
- try {
52
- const currentIndex = index; // because we need to pass it to 2 functions - mapper and predicate. Refers to INPUT index (since it may return multiple outputs)
53
- const res = await mapper(chunk, currentIndex);
54
- const passedResults = await (0, js_lib_1.pFilter)(flattenArrayOutput && Array.isArray(res) ? res : [res], async (r) => await predicate(r, currentIndex));
55
- if (passedResults.length === 0) {
56
- cb(); // 0 results
57
- }
58
- else {
59
- passedResults.forEach(r => {
60
- this.push(r);
61
- // cb(null, r)
62
- });
63
- cb(); // done processing
64
- }
65
- }
66
- catch (err) {
67
- logger.error(err);
68
- errors++;
69
- logErrorStats(logger);
70
- if (onError) {
71
- try {
72
- onError(err, chunk);
73
- }
74
- catch { }
75
- }
76
- if (errorMode === js_lib_1.ErrorMode.THROW_IMMEDIATELY) {
77
- isRejected = true;
78
- // Emit error immediately
79
- return cb(err);
80
- }
81
- if (errorMode === js_lib_1.ErrorMode.THROW_AGGREGATED) {
82
- collectedErrors.push(err);
83
- }
84
- // Tell input stream that we're done processing, but emit nothing to output - not error nor result
85
- cb();
86
- }
87
- });
88
- function logErrorStats(logger, final = false) {
89
- if (!errors)
90
- return;
91
- logger.log(`${metric} ${final ? 'final ' : ''}errors: ${(0, colors_1.yellow)(errors)}`);
92
- }
93
- }
94
- exports.transformMapLegacy = transformMapLegacy;
@@ -1,133 +0,0 @@
1
- import { Transform } from 'stream'
2
- import {
3
- AggregatedError,
4
- AsyncMapper,
5
- CommonLogger,
6
- ErrorMode,
7
- pFilter,
8
- } from '@naturalcycles/js-lib'
9
- import through2Concurrent = require('through2-concurrent')
10
- import { yellow } from '../../../colors'
11
- import { TransformTyped } from '../../stream.model'
12
- import { TransformMapOptions } from '../transformMap'
13
-
14
- export function notNullishPredicate(item: any): boolean {
15
- return item !== undefined && item !== null
16
- }
17
-
18
- /**
19
- * Like pMap, but for streams.
20
- * Inspired by `through2`.
21
- * Main feature is concurrency control (implemented via `through2-concurrent`) and convenient options.
22
- * Using this allows native stream .pipe() to work and use backpressure.
23
- *
24
- * Only works in objectMode (due to through2Concurrent).
25
- *
26
- * Concurrency defaults to 16.
27
- *
28
- * If an Array is returned by `mapper` - it will be flattened and multiple results will be emitted from it. Tested by Array.isArray().
29
- */
30
- export function transformMapLegacy<IN = any, OUT = IN>(
31
- mapper: AsyncMapper<IN, OUT>,
32
- opt: TransformMapOptions<IN, OUT> = {},
33
- ): TransformTyped<IN, OUT> {
34
- const {
35
- concurrency = 16,
36
- predicate = notNullishPredicate,
37
- errorMode = ErrorMode.THROW_IMMEDIATELY,
38
- flattenArrayOutput,
39
- onError,
40
- beforeFinal,
41
- metric = 'stream',
42
- logger = console,
43
- } = opt
44
-
45
- let index = -1
46
- let isRejected = false
47
- let errors = 0
48
- const collectedErrors: Error[] = [] // only used if errorMode == THROW_AGGREGATED
49
-
50
- return through2Concurrent.obj(
51
- {
52
- maxConcurrency: concurrency,
53
- // autoDestroy: true,
54
- async final(cb) {
55
- // console.log('transformMap final')
56
-
57
- logErrorStats(logger, true)
58
-
59
- await beforeFinal?.() // call beforeFinal if defined
60
-
61
- if (collectedErrors.length) {
62
- // emit Aggregated error
63
- cb(new AggregatedError(collectedErrors))
64
- } else {
65
- // emit no error
66
- cb()
67
- }
68
- },
69
- },
70
- async function transformMapFn(
71
- this: Transform,
72
- chunk: IN,
73
- _encoding: any,
74
- cb: (...args: any[]) => any,
75
- ) {
76
- index++
77
- // console.log({chunk, _encoding})
78
-
79
- // Stop processing if THROW_IMMEDIATELY mode is used
80
- if (isRejected && errorMode === ErrorMode.THROW_IMMEDIATELY) return cb()
81
-
82
- try {
83
- const currentIndex = index // because we need to pass it to 2 functions - mapper and predicate. Refers to INPUT index (since it may return multiple outputs)
84
- const res = await mapper(chunk, currentIndex)
85
- const passedResults = await pFilter(
86
- flattenArrayOutput && Array.isArray(res) ? res : [res],
87
- async r => await predicate(r, currentIndex),
88
- )
89
-
90
- if (passedResults.length === 0) {
91
- cb() // 0 results
92
- } else {
93
- passedResults.forEach(r => {
94
- this.push(r)
95
- // cb(null, r)
96
- })
97
- cb() // done processing
98
- }
99
- } catch (err) {
100
- logger.error(err)
101
-
102
- errors++
103
-
104
- logErrorStats(logger)
105
-
106
- if (onError) {
107
- try {
108
- onError(err, chunk)
109
- } catch {}
110
- }
111
-
112
- if (errorMode === ErrorMode.THROW_IMMEDIATELY) {
113
- isRejected = true
114
- // Emit error immediately
115
- return cb(err)
116
- }
117
-
118
- if (errorMode === ErrorMode.THROW_AGGREGATED) {
119
- collectedErrors.push(err as Error)
120
- }
121
-
122
- // Tell input stream that we're done processing, but emit nothing to output - not error nor result
123
- cb()
124
- }
125
- },
126
- )
127
-
128
- function logErrorStats(logger: CommonLogger, final = false): void {
129
- if (!errors) return
130
-
131
- logger.log(`${metric} ${final ? 'final ' : ''}errors: ${yellow(errors)}`)
132
- }
133
- }