@naturalcycles/nodejs-lib 15.21.0 → 15.23.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. package/dist/exec2/exec2.js +1 -0
  2. package/dist/stream/index.d.ts +2 -2
  3. package/dist/stream/index.js +2 -2
  4. package/dist/stream/ndjson/ndjsonMap.d.ts +1 -1
  5. package/dist/stream/ndjson/ndjsonMap.js +13 -15
  6. package/dist/stream/ndjson/ndjsonStreamForEach.d.ts +2 -2
  7. package/dist/stream/ndjson/ndjsonStreamForEach.js +9 -15
  8. package/dist/stream/ndjson/transformJsonParse.js +0 -1
  9. package/dist/stream/pipeline.d.ts +79 -0
  10. package/dist/stream/pipeline.js +220 -0
  11. package/dist/stream/readable/readableCombined.d.ts +30 -0
  12. package/dist/stream/readable/readableCombined.js +77 -0
  13. package/dist/stream/stream.util.d.ts +1 -3
  14. package/dist/stream/stream.util.js +1 -20
  15. package/dist/stream/transform/transformChunk.d.ts +5 -8
  16. package/dist/stream/transform/transformChunk.js +4 -2
  17. package/dist/stream/transform/transformFlatten.d.ts +1 -0
  18. package/dist/stream/transform/transformFlatten.js +15 -4
  19. package/dist/stream/transform/transformLimit.d.ts +3 -26
  20. package/dist/stream/transform/transformLimit.js +14 -23
  21. package/dist/stream/transform/transformMap.d.ts +15 -2
  22. package/dist/stream/transform/transformMap.js +25 -19
  23. package/dist/stream/transform/transformMapSync.d.ts +5 -3
  24. package/dist/stream/transform/transformMapSync.js +7 -8
  25. package/dist/stream/transform/transformTee.js +4 -2
  26. package/dist/stream/writable/writableForEach.d.ts +2 -1
  27. package/dist/stream/writable/writableFork.js +2 -2
  28. package/package.json +1 -1
  29. package/src/exec2/exec2.ts +1 -0
  30. package/src/stream/index.ts +2 -2
  31. package/src/stream/ndjson/ndjsonMap.ts +12 -22
  32. package/src/stream/ndjson/ndjsonStreamForEach.ts +8 -15
  33. package/src/stream/ndjson/transformJsonParse.ts +0 -1
  34. package/src/stream/pipeline.ts +301 -0
  35. package/src/stream/readable/readableCombined.ts +87 -0
  36. package/src/stream/stream.util.ts +1 -29
  37. package/src/stream/transform/transformChunk.ts +8 -11
  38. package/src/stream/transform/transformFlatten.ts +16 -4
  39. package/src/stream/transform/transformLimit.ts +20 -51
  40. package/src/stream/transform/transformMap.ts +45 -21
  41. package/src/stream/transform/transformMapSync.ts +14 -8
  42. package/src/stream/transform/transformTee.ts +5 -2
  43. package/src/stream/writable/writableForEach.ts +2 -2
  44. package/src/stream/writable/writableFork.ts +2 -2
  45. package/dist/stream/pipeline/pipeline.d.ts +0 -36
  46. package/dist/stream/pipeline/pipeline.js +0 -82
  47. package/dist/stream/readable/readableForEach.d.ts +0 -19
  48. package/dist/stream/readable/readableForEach.js +0 -30
  49. package/src/stream/pipeline/pipeline.ts +0 -114
  50. package/src/stream/readable/readableForEach.ts +0 -42
@@ -1,12 +1,13 @@
1
- import { _hc } from '@naturalcycles/js-lib'
1
+ import { _hc, type AbortableSignal } from '@naturalcycles/js-lib'
2
2
  import { _since } from '@naturalcycles/js-lib/datetime/time.util.js'
3
- import { _anyToError, ErrorMode } from '@naturalcycles/js-lib/error'
3
+ import { _anyToError, _assert, ErrorMode } from '@naturalcycles/js-lib/error'
4
4
  import type { CommonLogger } from '@naturalcycles/js-lib/log'
5
5
  import { _stringify } from '@naturalcycles/js-lib/string/stringify.js'
6
6
  import {
7
7
  type AbortableAsyncMapper,
8
8
  type AsyncPredicate,
9
9
  END,
10
+ type PositiveInteger,
10
11
  type Promisable,
11
12
  SKIP,
12
13
  type StringMap,
@@ -14,9 +15,8 @@ import {
14
15
  } from '@naturalcycles/js-lib/types'
15
16
  import through2Concurrent from 'through2-concurrent'
16
17
  import { yellow } from '../../colors/colors.js'
17
- import type { AbortableTransform } from '../pipeline/pipeline.js'
18
18
  import type { TransformTyped } from '../stream.model.js'
19
- import { pipelineClose } from '../stream.util.js'
19
+ import { PIPELINE_GRACEFUL_ABORT } from '../stream.util.js'
20
20
 
21
21
  export interface TransformMapOptions<IN = any, OUT = IN> {
22
22
  /**
@@ -38,7 +38,16 @@ export interface TransformMapOptions<IN = any, OUT = IN> {
38
38
  * UPD: changed back from 32 to 16, "to be on a safe side", as 32 sometimes
39
39
  * causes "Datastore timeout errors".
40
40
  */
41
- concurrency?: number
41
+ concurrency?: PositiveInteger
42
+
43
+ /**
44
+ * Defaults to 64 items.
45
+ * (objectMode default is 16, but we increased it)
46
+ *
47
+ * Affects both readable and writable highWaterMark (buffer).
48
+ * So, 64 means a total buffer of 128 (64 input and 64 output buffer).
49
+ */
50
+ highWaterMark?: PositiveInteger
42
51
 
43
52
  /**
44
53
  * @default THROW_IMMEDIATELY
@@ -71,6 +80,11 @@ export interface TransformMapOptions<IN = any, OUT = IN> {
71
80
  metric?: string
72
81
 
73
82
  logger?: CommonLogger
83
+
84
+ /**
85
+ * Allows to abort (gracefully stop) the stream from inside the Transform.
86
+ */
87
+ signal?: AbortableSignal
74
88
  }
75
89
 
76
90
  export interface TransformMapStats {
@@ -123,24 +137,29 @@ export function transformMap<IN = any, OUT = IN>(
123
137
  ): TransformTyped<IN, OUT> {
124
138
  const {
125
139
  concurrency = 16,
140
+ highWaterMark = 64,
126
141
  predicate, // we now default to "no predicate" (meaning pass-everything)
127
142
  errorMode = ErrorMode.THROW_IMMEDIATELY,
128
143
  onError,
129
144
  onDone,
130
145
  metric = 'stream',
131
146
  logger = console,
147
+ signal,
132
148
  } = opt
133
149
 
134
150
  const started = Date.now() as UnixTimestampMillis
135
151
  let index = -1
136
152
  let countOut = 0
137
153
  let isSettled = false
154
+ let ok = true
138
155
  let errors = 0
139
156
  const collectedErrors: Error[] = [] // only used if errorMode == THROW_AGGREGATED
140
157
 
141
158
  return through2Concurrent.obj(
142
159
  {
143
160
  maxConcurrency: concurrency,
161
+ readableHighWaterMark: highWaterMark,
162
+ writableHighWaterMark: highWaterMark,
144
163
  async final(cb) {
145
164
  // console.log('transformMap final')
146
165
 
@@ -172,7 +191,7 @@ export function transformMap<IN = any, OUT = IN>(
172
191
 
173
192
  try {
174
193
  await onDone?.({
175
- ok: true,
194
+ ok,
176
195
  collectedErrors,
177
196
  countErrors: errors,
178
197
  countIn: index + 1,
@@ -187,7 +206,7 @@ export function transformMap<IN = any, OUT = IN>(
187
206
  }
188
207
  },
189
208
  },
190
- async function transformMapFn(this: AbortableTransform, chunk: IN, _, cb) {
209
+ async function transformMapFn(chunk: IN, _, cb) {
191
210
  // Stop processing if isSettled (either THROW_IMMEDIATELY was fired or END received)
192
211
  if (isSettled) return cb()
193
212
 
@@ -201,7 +220,8 @@ export function transformMap<IN = any, OUT = IN>(
201
220
  if (res === END) {
202
221
  isSettled = true
203
222
  logger.log(`transformMap END received at index ${currentIndex}`)
204
- pipelineClose('transformMap', this, this.sourceReadable, this.streamDone, logger)
223
+ _assert(signal, 'signal is required when using END')
224
+ signal.abort(new Error(PIPELINE_GRACEFUL_ABORT))
205
225
  return cb()
206
226
  }
207
227
 
@@ -230,19 +250,23 @@ export function transformMap<IN = any, OUT = IN>(
230
250
 
231
251
  if (errorMode === ErrorMode.THROW_IMMEDIATELY) {
232
252
  isSettled = true
233
-
234
- try {
235
- await onDone?.({
236
- ok: false,
237
- collectedErrors,
238
- countErrors: errors,
239
- countIn: index + 1,
240
- countOut,
241
- started,
242
- })
243
- } catch (err) {
244
- logger.error(err)
245
- }
253
+ ok = false
254
+
255
+ // Tests show that onDone is still called at `final` (second time),
256
+ // so, we no longer call it here
257
+
258
+ // try {
259
+ // await onDone?.({
260
+ // ok: false,
261
+ // collectedErrors,
262
+ // countErrors: errors,
263
+ // countIn: index + 1,
264
+ // countOut,
265
+ // started,
266
+ // })
267
+ // } catch (err) {
268
+ // logger.error(err)
269
+ // }
246
270
 
247
271
  return cb(err) // Emit error immediately
248
272
  }
@@ -1,11 +1,12 @@
1
- import { _anyToError, ErrorMode } from '@naturalcycles/js-lib/error'
1
+ import { Transform } from 'node:stream'
2
+ import type { AbortableSignal } from '@naturalcycles/js-lib'
3
+ import { _anyToError, _assert, ErrorMode } from '@naturalcycles/js-lib/error'
2
4
  import type { CommonLogger } from '@naturalcycles/js-lib/log'
3
5
  import type { IndexedMapper, Predicate, UnixTimestampMillis } from '@naturalcycles/js-lib/types'
4
6
  import { END, SKIP } from '@naturalcycles/js-lib/types'
5
7
  import { yellow } from '../../colors/colors.js'
6
- import { AbortableTransform } from '../pipeline/pipeline.js'
7
8
  import type { TransformTyped } from '../stream.model.js'
8
- import { pipelineClose } from '../stream.util.js'
9
+ import { PIPELINE_GRACEFUL_ABORT } from '../stream.util.js'
9
10
  import type { TransformMapStats } from './transformMap.js'
10
11
 
11
12
  export interface TransformMapSyncOptions<IN = any, OUT = IN> {
@@ -54,9 +55,12 @@ export interface TransformMapSyncOptions<IN = any, OUT = IN> {
54
55
  metric?: string
55
56
 
56
57
  logger?: CommonLogger
57
- }
58
58
 
59
- export class TransformMapSync extends AbortableTransform {}
59
+ /**
60
+ * Allows to abort (gracefully stop) the stream from inside the Transform.
61
+ */
62
+ signal?: AbortableSignal
63
+ }
60
64
 
61
65
  /**
62
66
  * Sync (not async) version of transformMap.
@@ -74,6 +78,7 @@ export function transformMapSync<IN = any, OUT = IN>(
74
78
  metric = 'stream',
75
79
  objectMode = true,
76
80
  logger = console,
81
+ signal,
77
82
  } = opt
78
83
 
79
84
  const started = Date.now() as UnixTimestampMillis
@@ -83,10 +88,10 @@ export function transformMapSync<IN = any, OUT = IN>(
83
88
  let errors = 0
84
89
  const collectedErrors: Error[] = [] // only used if errorMode == THROW_AGGREGATED
85
90
 
86
- return new TransformMapSync({
91
+ return new Transform({
87
92
  objectMode,
88
93
  ...opt,
89
- transform(this: AbortableTransform, chunk: IN, _, cb) {
94
+ transform(chunk: IN, _, cb) {
90
95
  // Stop processing if isSettled
91
96
  if (isSettled) return cb()
92
97
 
@@ -99,7 +104,8 @@ export function transformMapSync<IN = any, OUT = IN>(
99
104
  if (v === END) {
100
105
  isSettled = true // will be checked later
101
106
  logger.log(`transformMapSync END received at index ${currentIndex}`)
102
- pipelineClose('transformMapSync', this, this.sourceReadable, this.streamDone, logger)
107
+ _assert(signal, 'signal is required when using END')
108
+ signal.abort(new Error(PIPELINE_GRACEFUL_ABORT))
103
109
  return cb()
104
110
  }
105
111
 
@@ -1,5 +1,5 @@
1
1
  import { Transform } from 'node:stream'
2
- import { _pipeline } from '../pipeline/pipeline.js'
2
+ import { pipeline } from 'node:stream/promises'
3
3
  import { readableCreate } from '../readable/readableCreate.js'
4
4
  import type { TransformTyped } from '../stream.model.js'
5
5
 
@@ -17,11 +17,14 @@ type AnyStream = NodeJS.WritableStream | NodeJS.ReadWriteStream
17
17
  export function transformTee<T>(streams: AnyStream[]): TransformTyped<T, T> {
18
18
  const readable = readableCreate<T>()
19
19
 
20
- const secondPipelinePromise = _pipeline([readable, ...streams])
20
+ const secondPipelinePromise = pipeline([readable, ...streams])
21
21
 
22
22
  return new Transform({
23
23
  objectMode: true,
24
24
  transform(chunk: T, _, cb) {
25
+ // todo: it's possible to start respecting backpressure,
26
+ // if we start to listen to the boolean output of .push()
27
+
25
28
  // pass to the "secondary" pipeline
26
29
  readable.push(chunk)
27
30
 
@@ -2,7 +2,7 @@ import type { AsyncIndexedMapper, IndexedMapper } from '@naturalcycles/js-lib/ty
2
2
  import { _passNothingPredicate } from '@naturalcycles/js-lib/types'
3
3
  import type { WritableTyped } from '../stream.model.js'
4
4
  import { transformMap, type TransformMapOptions } from '../transform/transformMap.js'
5
- import { transformMapSync } from '../transform/transformMapSync.js'
5
+ import { transformMapSync, type TransformMapSyncOptions } from '../transform/transformMapSync.js'
6
6
 
7
7
  /**
8
8
  * Just an alias to transformMap that declares OUT as void.
@@ -19,7 +19,7 @@ export function writableForEach<IN = any>(
19
19
  */
20
20
  export function writableForEachSync<IN = any>(
21
21
  mapper: IndexedMapper<IN, void>,
22
- opt: TransformMapOptions<IN, void> = {},
22
+ opt: TransformMapSyncOptions<IN, void> = {},
23
23
  ): WritableTyped<IN> {
24
24
  return transformMapSync<IN, void>(mapper, { ...opt, predicate: _passNothingPredicate })
25
25
  }
@@ -1,5 +1,5 @@
1
1
  import { Writable } from 'node:stream'
2
- import { _pipeline } from '../pipeline/pipeline.js'
2
+ import { pipeline } from 'node:stream/promises'
3
3
  import { readableCreate } from '../readable/readableCreate.js'
4
4
  import type { ReadableTyped, TransformOptions, WritableTyped } from '../stream.model.js'
5
5
 
@@ -22,7 +22,7 @@ export function writableFork<T>(
22
22
  const readable = readableCreate<T>()
23
23
  readables.push(readable)
24
24
 
25
- return await _pipeline([readable, ...chain])
25
+ return await pipeline([readable, ...chain])
26
26
  }),
27
27
  ).catch(err => {
28
28
  console.error(err) // ensure the error is logged
@@ -1,36 +0,0 @@
1
- import type { Readable } from 'node:stream';
2
- import { Transform } from 'node:stream';
3
- import type { DeferredPromise } from '@naturalcycles/js-lib/promise';
4
- type AnyStream = NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream;
5
- export interface PipelineOptions {
6
- /**
7
- * Set to true to allow ERR_STREAM_PREMATURE_CLOSE.
8
- * Required to support graceful close when using transformLimit
9
- */
10
- allowClose?: boolean;
11
- /**
12
- * Set to true to allow graceful abort (via AbortSignal).
13
- * "Graceful" means it'll swallow the AbortError and let the pipeline resolve normally.
14
- *
15
- * Default is false - AbortError will be thrown.
16
- */
17
- allowGracefulAbort?: boolean;
18
- signal?: AbortSignal;
19
- }
20
- /**
21
- * Promisified `stream.pipeline`.
22
- *
23
- * Supports opt.allowClose, which allows transformLimit to work (to actually stop source Readable)
24
- * without throwing an error (ERR_STREAM_PREMATURE_CLOSE).
25
- */
26
- export declare function _pipeline(streams: AnyStream[], opt?: PipelineOptions): Promise<void>;
27
- /**
28
- * Convenience function to make _pipeline collect all items at the end of the stream (should be Transform, not Writeable!)
29
- * and return.
30
- */
31
- export declare function _pipelineToArray<T>(streams: AnyStream[], opt?: PipelineOptions): Promise<T[]>;
32
- export declare class AbortableTransform extends Transform {
33
- sourceReadable?: Readable;
34
- streamDone?: DeferredPromise;
35
- }
36
- export {};
@@ -1,82 +0,0 @@
1
- import { Transform } from 'node:stream';
2
- import { pipeline } from 'node:stream/promises';
3
- import { _last } from '@naturalcycles/js-lib/array/array.util.js';
4
- import { pDefer } from '@naturalcycles/js-lib/promise/pDefer.js';
5
- import { writablePushToArray } from '../writable/writablePushToArray.js';
6
- /**
7
- * Promisified `stream.pipeline`.
8
- *
9
- * Supports opt.allowClose, which allows transformLimit to work (to actually stop source Readable)
10
- * without throwing an error (ERR_STREAM_PREMATURE_CLOSE).
11
- */
12
- export async function _pipeline(streams, opt = {}) {
13
- const first = streams[0];
14
- const rest = streams.slice(1);
15
- if (opt.allowClose) {
16
- // Do the magic of making the pipeline "abortable"
17
- //
18
- // How does it work:
19
- // It finds `sourceReadable` (basically, it's just first item in the passed array of streams)
20
- // Finds last "writable" (last item), patches the `_final` method of it to detect when the whole pipeline is "done",
21
- // sets the `streamDone` DeferredPromise that resolves when the pipeline is done.
22
- // Scans through all passed items, finds those that are capable of "closing" the stream
23
- // (currently its `transformLimit` or `transformMap`)
24
- // Patches them by attaching `sourceReadable` and `streamDone`.
25
- // These items (transformLimit and transformMap), when they need to "close the stream" - call `pipelineClose`.
26
- // `pipelineClose` is the result of 2 sleepless nights of googling and experimentation:)
27
- // It does:
28
- // 1. Stops the "downstream" by doing `this.push(null)`.
29
- // 2. Pauses the `sourceReadable` by calling sourceReadable.unpipe()
30
- // 3. Waits for `streamDone` to ensure that downstream chunks are fully processed (e.g written to disk).
31
- // 4. Calls `sourceReadable.destroy()`, which emits ERR_STREAM_PREMATURE_CLOSE
32
- // 5. _pipeline (this function) catches that specific error and suppresses it (because it's expected and
33
- // inevitable in this flow). Know a better way to close the stream? Tell me!
34
- const streamDone = pDefer();
35
- const sourceReadable = first;
36
- const last = _last(streams);
37
- const lastFinal = last._final?.bind(last) || ((cb) => cb());
38
- last._final = cb => {
39
- lastFinal(() => {
40
- cb();
41
- streamDone.resolve();
42
- });
43
- };
44
- rest.forEach(s => {
45
- // console.log(s)
46
- if (s instanceof AbortableTransform || s.constructor.name === 'AbortableTransform') {
47
- // console.log(`found ${s.constructor.name}, setting props`)
48
- ;
49
- s.sourceReadable = sourceReadable;
50
- s.streamDone = streamDone;
51
- }
52
- });
53
- }
54
- try {
55
- return await pipeline([first, ...rest], opt);
56
- }
57
- catch (err) {
58
- if (opt.allowClose && err?.code === 'ERR_STREAM_PREMATURE_CLOSE') {
59
- console.log('_pipeline closed (as expected)');
60
- return;
61
- }
62
- if (opt.allowGracefulAbort && err?.name === 'AbortError') {
63
- console.log('_pipeline closed via AbortSignal (as expected)');
64
- return;
65
- }
66
- // console.log(`_pipeline error`, err)
67
- throw err;
68
- }
69
- }
70
- /**
71
- * Convenience function to make _pipeline collect all items at the end of the stream (should be Transform, not Writeable!)
72
- * and return.
73
- */
74
- export async function _pipelineToArray(streams, opt = {}) {
75
- const a = [];
76
- await _pipeline([...streams, writablePushToArray(a)], opt);
77
- return a;
78
- }
79
- export class AbortableTransform extends Transform {
80
- sourceReadable;
81
- streamDone;
82
- }
@@ -1,19 +0,0 @@
1
- import type { AbortableAsyncMapper, IndexedMapper } from '@naturalcycles/js-lib/types';
2
- import type { ReadableTyped } from '../stream.model.js';
3
- import type { TransformMapOptions } from '../transform/transformMap.js';
4
- /**
5
- * Convenience function to do `.forEach` over a Readable.
6
- * Typed! (unlike default Readable).
7
- *
8
- * Try native readable.forEach() instead!
9
- *
10
- * @experimental
11
- */
12
- export declare function readableForEach<T>(readable: ReadableTyped<T>, mapper: AbortableAsyncMapper<T, void>, opt?: TransformMapOptions<T, void>): Promise<void>;
13
- /**
14
- * Convenience function to do `.forEach` over a Readable.
15
- * Typed! (unlike default Readable).
16
- *
17
- * @experimental
18
- */
19
- export declare function readableForEachSync<T>(readable: ReadableTyped<T>, mapper: IndexedMapper<T, void>): Promise<void>;
@@ -1,30 +0,0 @@
1
- import { _passNothingPredicate } from '@naturalcycles/js-lib/types';
2
- import { _pipeline } from '../pipeline/pipeline.js';
3
- import { transformMap } from '../transform/transformMap.js';
4
- /**
5
- * Convenience function to do `.forEach` over a Readable.
6
- * Typed! (unlike default Readable).
7
- *
8
- * Try native readable.forEach() instead!
9
- *
10
- * @experimental
11
- */
12
- export async function readableForEach(readable, mapper, opt = {}) {
13
- await _pipeline([
14
- readable,
15
- transformMap(mapper, { ...opt, predicate: _passNothingPredicate }),
16
- ]);
17
- }
18
- /**
19
- * Convenience function to do `.forEach` over a Readable.
20
- * Typed! (unlike default Readable).
21
- *
22
- * @experimental
23
- */
24
- export async function readableForEachSync(readable, mapper) {
25
- // async iteration
26
- let index = 0;
27
- for await (const item of readable) {
28
- mapper(item, index++);
29
- }
30
- }
@@ -1,114 +0,0 @@
1
- import type { Readable, Writable } from 'node:stream'
2
- import { Transform } from 'node:stream'
3
- import { pipeline } from 'node:stream/promises'
4
- import { _last } from '@naturalcycles/js-lib/array/array.util.js'
5
- import type { DeferredPromise } from '@naturalcycles/js-lib/promise'
6
- import { pDefer } from '@naturalcycles/js-lib/promise/pDefer.js'
7
- import type { AnyFunction } from '@naturalcycles/js-lib/types'
8
- import { writablePushToArray } from '../writable/writablePushToArray.js'
9
-
10
- type AnyStream = NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream
11
-
12
- export interface PipelineOptions {
13
- /**
14
- * Set to true to allow ERR_STREAM_PREMATURE_CLOSE.
15
- * Required to support graceful close when using transformLimit
16
- */
17
- allowClose?: boolean
18
-
19
- /**
20
- * Set to true to allow graceful abort (via AbortSignal).
21
- * "Graceful" means it'll swallow the AbortError and let the pipeline resolve normally.
22
- *
23
- * Default is false - AbortError will be thrown.
24
- */
25
- allowGracefulAbort?: boolean
26
-
27
- signal?: AbortSignal
28
- }
29
-
30
- /**
31
- * Promisified `stream.pipeline`.
32
- *
33
- * Supports opt.allowClose, which allows transformLimit to work (to actually stop source Readable)
34
- * without throwing an error (ERR_STREAM_PREMATURE_CLOSE).
35
- */
36
- export async function _pipeline(streams: AnyStream[], opt: PipelineOptions = {}): Promise<void> {
37
- const first = streams[0] as any
38
- const rest = streams.slice(1)
39
-
40
- if (opt.allowClose) {
41
- // Do the magic of making the pipeline "abortable"
42
- //
43
- // How does it work:
44
- // It finds `sourceReadable` (basically, it's just first item in the passed array of streams)
45
- // Finds last "writable" (last item), patches the `_final` method of it to detect when the whole pipeline is "done",
46
- // sets the `streamDone` DeferredPromise that resolves when the pipeline is done.
47
- // Scans through all passed items, finds those that are capable of "closing" the stream
48
- // (currently its `transformLimit` or `transformMap`)
49
- // Patches them by attaching `sourceReadable` and `streamDone`.
50
- // These items (transformLimit and transformMap), when they need to "close the stream" - call `pipelineClose`.
51
- // `pipelineClose` is the result of 2 sleepless nights of googling and experimentation:)
52
- // It does:
53
- // 1. Stops the "downstream" by doing `this.push(null)`.
54
- // 2. Pauses the `sourceReadable` by calling sourceReadable.unpipe()
55
- // 3. Waits for `streamDone` to ensure that downstream chunks are fully processed (e.g written to disk).
56
- // 4. Calls `sourceReadable.destroy()`, which emits ERR_STREAM_PREMATURE_CLOSE
57
- // 5. _pipeline (this function) catches that specific error and suppresses it (because it's expected and
58
- // inevitable in this flow). Know a better way to close the stream? Tell me!
59
- const streamDone = pDefer()
60
- const sourceReadable = first as Readable
61
- const last = _last(streams) as Writable
62
- const lastFinal = last._final?.bind(last) || ((cb: AnyFunction) => cb())
63
- last._final = cb => {
64
- lastFinal(() => {
65
- cb()
66
- streamDone.resolve()
67
- })
68
- }
69
-
70
- rest.forEach(s => {
71
- // console.log(s)
72
- if (s instanceof AbortableTransform || s.constructor.name === 'AbortableTransform') {
73
- // console.log(`found ${s.constructor.name}, setting props`)
74
- ;(s as AbortableTransform).sourceReadable = sourceReadable
75
- ;(s as AbortableTransform).streamDone = streamDone
76
- }
77
- })
78
- }
79
-
80
- try {
81
- return await pipeline([first, ...rest], opt)
82
- } catch (err) {
83
- if (opt.allowClose && (err as any)?.code === 'ERR_STREAM_PREMATURE_CLOSE') {
84
- console.log('_pipeline closed (as expected)')
85
- return
86
- }
87
-
88
- if (opt.allowGracefulAbort && (err as any)?.name === 'AbortError') {
89
- console.log('_pipeline closed via AbortSignal (as expected)')
90
- return
91
- }
92
-
93
- // console.log(`_pipeline error`, err)
94
- throw err
95
- }
96
- }
97
-
98
- /**
99
- * Convenience function to make _pipeline collect all items at the end of the stream (should be Transform, not Writeable!)
100
- * and return.
101
- */
102
- export async function _pipelineToArray<T>(
103
- streams: AnyStream[],
104
- opt: PipelineOptions = {},
105
- ): Promise<T[]> {
106
- const a: T[] = []
107
- await _pipeline([...streams, writablePushToArray(a)], opt)
108
- return a
109
- }
110
-
111
- export class AbortableTransform extends Transform {
112
- sourceReadable?: Readable
113
- streamDone?: DeferredPromise
114
- }
@@ -1,42 +0,0 @@
1
- import type { AbortableAsyncMapper, IndexedMapper } from '@naturalcycles/js-lib/types'
2
- import { _passNothingPredicate } from '@naturalcycles/js-lib/types'
3
- import { _pipeline } from '../pipeline/pipeline.js'
4
- import type { ReadableTyped } from '../stream.model.js'
5
- import type { TransformMapOptions } from '../transform/transformMap.js'
6
- import { transformMap } from '../transform/transformMap.js'
7
-
8
- /**
9
- * Convenience function to do `.forEach` over a Readable.
10
- * Typed! (unlike default Readable).
11
- *
12
- * Try native readable.forEach() instead!
13
- *
14
- * @experimental
15
- */
16
- export async function readableForEach<T>(
17
- readable: ReadableTyped<T>,
18
- mapper: AbortableAsyncMapper<T, void>,
19
- opt: TransformMapOptions<T, void> = {},
20
- ): Promise<void> {
21
- await _pipeline([
22
- readable,
23
- transformMap<T, void>(mapper, { ...opt, predicate: _passNothingPredicate }),
24
- ])
25
- }
26
-
27
- /**
28
- * Convenience function to do `.forEach` over a Readable.
29
- * Typed! (unlike default Readable).
30
- *
31
- * @experimental
32
- */
33
- export async function readableForEachSync<T>(
34
- readable: ReadableTyped<T>,
35
- mapper: IndexedMapper<T, void>,
36
- ): Promise<void> {
37
- // async iteration
38
- let index = 0
39
- for await (const item of readable) {
40
- mapper(item, index++)
41
- }
42
- }