@naturalcycles/nodejs-lib 15.22.0 → 15.24.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/dist/exec2/exec2.js +1 -0
  2. package/dist/stream/index.d.ts +1 -2
  3. package/dist/stream/index.js +1 -2
  4. package/dist/stream/ndjson/ndjsonMap.d.ts +1 -1
  5. package/dist/stream/ndjson/ndjsonMap.js +13 -15
  6. package/dist/stream/ndjson/ndjsonStreamForEach.d.ts +2 -2
  7. package/dist/stream/ndjson/ndjsonStreamForEach.js +9 -15
  8. package/dist/stream/pipeline.d.ts +93 -0
  9. package/dist/stream/pipeline.js +262 -0
  10. package/dist/stream/stream.util.d.ts +1 -3
  11. package/dist/stream/stream.util.js +1 -20
  12. package/dist/stream/transform/transformChunk.d.ts +5 -8
  13. package/dist/stream/transform/transformChunk.js +4 -2
  14. package/dist/stream/transform/transformFlatten.d.ts +1 -0
  15. package/dist/stream/transform/transformFlatten.js +15 -4
  16. package/dist/stream/transform/transformLimit.d.ts +3 -26
  17. package/dist/stream/transform/transformLimit.js +14 -23
  18. package/dist/stream/transform/transformMap.d.ts +5 -0
  19. package/dist/stream/transform/transformMap.js +22 -18
  20. package/dist/stream/transform/transformMapSync.d.ts +5 -3
  21. package/dist/stream/transform/transformMapSync.js +7 -8
  22. package/dist/stream/transform/transformSplit.js +2 -1
  23. package/dist/stream/transform/transformTee.js +4 -2
  24. package/dist/stream/writable/writableForEach.d.ts +2 -1
  25. package/dist/stream/writable/writableFork.js +2 -2
  26. package/package.json +1 -1
  27. package/src/exec2/exec2.ts +1 -0
  28. package/src/stream/index.ts +1 -2
  29. package/src/stream/ndjson/ndjsonMap.ts +12 -22
  30. package/src/stream/ndjson/ndjsonStreamForEach.ts +8 -15
  31. package/src/stream/pipeline.ts +351 -0
  32. package/src/stream/stream.util.ts +1 -29
  33. package/src/stream/transform/transformChunk.ts +8 -11
  34. package/src/stream/transform/transformFlatten.ts +16 -4
  35. package/src/stream/transform/transformLimit.ts +20 -51
  36. package/src/stream/transform/transformMap.ts +31 -20
  37. package/src/stream/transform/transformMapSync.ts +14 -8
  38. package/src/stream/transform/transformSplit.ts +2 -1
  39. package/src/stream/transform/transformTee.ts +5 -2
  40. package/src/stream/writable/writableForEach.ts +2 -2
  41. package/src/stream/writable/writableFork.ts +2 -2
  42. package/dist/stream/pipeline/pipeline.d.ts +0 -36
  43. package/dist/stream/pipeline/pipeline.js +0 -82
  44. package/dist/stream/readable/readableForEach.d.ts +0 -19
  45. package/dist/stream/readable/readableForEach.js +0 -30
  46. package/src/stream/pipeline/pipeline.ts +0 -114
  47. package/src/stream/readable/readableForEach.ts +0 -42
@@ -250,6 +250,7 @@ class Exec2 {
250
250
  console.log([
251
251
  ' ',
252
252
  dimGrey(envString),
253
+ // todo: only before first space
253
254
  white(_substringAfterLast(cmd, '/')),
254
255
  ...(opt.args || []),
255
256
  ]
@@ -5,11 +5,10 @@ export * from './ndjson/ndjsonMap.js';
5
5
  export * from './ndjson/ndjsonStreamForEach.js';
6
6
  export * from './ndjson/transformJsonParse.js';
7
7
  export * from './ndjson/transformToNDJson.js';
8
- export * from './pipeline/pipeline.js';
8
+ export * from './pipeline.js';
9
9
  export * from './progressLogger.js';
10
10
  export * from './readable/readableCombined.js';
11
11
  export * from './readable/readableCreate.js';
12
- export * from './readable/readableForEach.js';
13
12
  export * from './readable/readableFromArray.js';
14
13
  export * from './readable/readableToArray.js';
15
14
  export * from './stream.model.js';
@@ -5,11 +5,10 @@ export * from './ndjson/ndjsonMap.js';
5
5
  export * from './ndjson/ndjsonStreamForEach.js';
6
6
  export * from './ndjson/transformJsonParse.js';
7
7
  export * from './ndjson/transformToNDJson.js';
8
- export * from './pipeline/pipeline.js';
8
+ export * from './pipeline.js';
9
9
  export * from './progressLogger.js';
10
10
  export * from './readable/readableCombined.js';
11
11
  export * from './readable/readableCreate.js';
12
- export * from './readable/readableForEach.js';
13
12
  export * from './readable/readableFromArray.js';
14
13
  export * from './readable/readableToArray.js';
15
14
  export * from './stream.model.js';
@@ -1,5 +1,5 @@
1
1
  import type { AbortableAsyncMapper } from '@naturalcycles/js-lib/types';
2
- import { type TransformLogProgressOptions, type TransformMapOptions } from '../index.js';
2
+ import type { TransformLogProgressOptions, TransformMapOptions } from '../index.js';
3
3
  export interface NDJSONMapOptions<IN = any, OUT = IN> extends TransformMapOptions<IN, OUT>, TransformLogProgressOptions<IN> {
4
4
  inputFilePath: string;
5
5
  outputFilePath: string;
@@ -1,6 +1,5 @@
1
1
  import { ErrorMode } from '@naturalcycles/js-lib/error/errorMode.js';
2
- import { createReadStreamAsNDJSON, createWriteStreamAsNDJSON, transformFlatten, } from '../index.js';
3
- import { _pipeline, transformLimit, transformLogProgress, transformMap } from '../index.js';
2
+ import { Pipeline } from '../pipeline.js';
4
3
  /**
5
4
  * Unzips input file automatically, if it ends with `.gz`.
6
5
  * Zips output file automatically, if it ends with `.gz`.
@@ -11,17 +10,16 @@ export async function ndjsonMap(mapper, opt) {
11
10
  inputFilePath,
12
11
  outputFilePath,
13
12
  });
14
- const readable = createReadStreamAsNDJSON(inputFilePath).take(limitInput || Number.POSITIVE_INFINITY);
15
- await _pipeline([
16
- readable,
17
- transformLogProgress({ metric: 'read', ...opt }),
18
- transformMap(mapper, {
19
- errorMode: ErrorMode.SUPPRESS,
20
- ...opt,
21
- }),
22
- transformFlatten(),
23
- transformLimit({ limit: limitOutput, sourceReadable: readable }),
24
- transformLogProgress({ metric: 'saved', logEvery: logEveryOutput }),
25
- ...createWriteStreamAsNDJSON(outputFilePath),
26
- ]);
13
+ await Pipeline.fromNDJsonFile(inputFilePath)
14
+ .limitSource(limitInput)
15
+ .logProgress({ metric: 'read', ...opt })
16
+ .map(mapper, {
17
+ errorMode: ErrorMode.SUPPRESS,
18
+ ...opt,
19
+ })
20
+ .flattenIfNeeded()
21
+ // .typeCastAs<OUT>()
22
+ .limit(limitOutput)
23
+ .logProgress({ metric: 'saved', logEvery: logEveryOutput })
24
+ .toNDJsonFile(outputFilePath);
27
25
  }
@@ -1,6 +1,6 @@
1
1
  import type { AbortableAsyncMapper } from '@naturalcycles/js-lib/types';
2
- import { type TransformLogProgressOptions } from '../transform/transformLogProgress.js';
3
- import { type TransformMapOptions } from '../transform/transformMap.js';
2
+ import type { TransformLogProgressOptions } from '../transform/transformLogProgress.js';
3
+ import type { TransformMapOptions } from '../transform/transformMap.js';
4
4
  export interface NDJSONStreamForEachOptions<IN = any> extends TransformMapOptions<IN, void>, TransformLogProgressOptions<IN> {
5
5
  inputFilePath: string;
6
6
  }
@@ -1,21 +1,15 @@
1
1
  import { ErrorMode } from '@naturalcycles/js-lib/error/errorMode.js';
2
- import { _pipeline } from '../pipeline/pipeline.js';
3
- import { transformLogProgress, } from '../transform/transformLogProgress.js';
4
- import { transformMap } from '../transform/transformMap.js';
5
- import { writableVoid } from '../writable/writableVoid.js';
6
- import { createReadStreamAsNDJSON } from './createReadStreamAsNDJSON.js';
2
+ import { Pipeline } from '../pipeline.js';
7
3
  /**
8
4
  * Convenience function to `forEach` through an ndjson file.
9
5
  */
10
6
  export async function ndjsonStreamForEach(mapper, opt) {
11
- await _pipeline([
12
- createReadStreamAsNDJSON(opt.inputFilePath),
13
- transformMap(mapper, {
14
- errorMode: ErrorMode.THROW_AGGREGATED,
15
- ...opt,
16
- predicate: () => true, // to log progress properly
17
- }),
18
- transformLogProgress(opt),
19
- writableVoid(),
20
- ]);
7
+ await Pipeline.fromNDJsonFile(opt.inputFilePath)
8
+ .map(mapper, {
9
+ errorMode: ErrorMode.THROW_AGGREGATED,
10
+ ...opt,
11
+ predicate: () => true, // to log progress properly
12
+ })
13
+ .logProgress(opt)
14
+ .run();
21
15
  }
@@ -0,0 +1,93 @@
1
+ import { type Transform } from 'node:stream';
2
+ import type { ReadableStream as WebReadableStream } from 'node:stream/web';
3
+ import { type ZlibOptions } from 'node:zlib';
4
+ import type { AbortableAsyncMapper, AsyncIndexedMapper, AsyncPredicate, END, IndexedMapper, NonNegativeInteger, PositiveInteger, Predicate, SKIP } from '@naturalcycles/js-lib/types';
5
+ import type { ReadableTyped, TransformOptions, TransformTyped, WritableTyped } from './stream.model.js';
6
+ import { type TransformLogProgressOptions } from './transform/transformLogProgress.js';
7
+ import { type TransformMapOptions } from './transform/transformMap.js';
8
+ import { type TransformMapSimpleOptions } from './transform/transformMapSimple.js';
9
+ import { type TransformMapSyncOptions } from './transform/transformMapSync.js';
10
+ import { type TransformOffsetOptions } from './transform/transformOffset.js';
11
+ import { type TransformTapOptions } from './transform/transformTap.js';
12
+ import { type TransformThrottleOptions } from './transform/transformThrottle.js';
13
+ export declare class Pipeline<T> {
14
+ private readonly source;
15
+ private transforms;
16
+ private destination?;
17
+ private readableLimit?;
18
+ private objectMode;
19
+ private abortableSignal;
20
+ private constructor();
21
+ static from<T>(source: ReadableTyped<T>): Pipeline<T>;
22
+ static fromWeb<T>(webReadableStream: WebReadableStream<T>): Pipeline<T>;
23
+ /**
24
+ * Technically same as `fromIterable` (since Array is Iterable),
25
+ * but named a bit friendlier.
26
+ */
27
+ static fromArray<T>(input: T[]): Pipeline<T>;
28
+ static fromIterable<T>(input: Iterable<T> | AsyncIterable<T>): Pipeline<T>;
29
+ static fromFile(sourceFilePath: string): Pipeline<Uint8Array>;
30
+ static fromNDJsonFile<T>(sourceFilePath: string): Pipeline<T>;
31
+ /**
32
+ * Limits the source Readable, but using `.take(limit)` on it.
33
+ * This is THE preferred way of limiting the source.
34
+ */
35
+ limitSource(limit: NonNegativeInteger | undefined): this;
36
+ /**
37
+ * If possible - STRONGLY PREFER applying `.take(limit)` on the source Readable,
38
+ * as it's a clean graceful way of limiting the Readable. Example:
39
+ *
40
+ * Pipeline.from(myReadable.take(10))
41
+ *
42
+ * or
43
+ *
44
+ * Pipeline
45
+ * .from(myReadable)
46
+ * .limitSource(10)
47
+ *
48
+ * If applying `take` on Readable is not possible - use this method at your own risk.
49
+ * Why warning?
50
+ * The limit works by aborting the stream, and then catching the error - certainly
51
+ * less clean than `.take()` on the source.
52
+ */
53
+ limit(limit: NonNegativeInteger | undefined): this;
54
+ chunk(chunkSize: PositiveInteger, opt?: TransformOptions): Pipeline<T[]>;
55
+ flatten<TO>(this: Pipeline<readonly TO[]>): Pipeline<TO>;
56
+ flattenIfNeeded(): Pipeline<T extends readonly (infer TO)[] ? TO : T>;
57
+ logProgress(opt?: TransformLogProgressOptions): this;
58
+ map<TO>(mapper: AbortableAsyncMapper<T, TO | typeof SKIP | typeof END>, opt?: TransformMapOptions<T, TO>): Pipeline<TO>;
59
+ mapSync<TO>(mapper: IndexedMapper<T, TO | typeof SKIP | typeof END>, opt?: TransformMapSyncOptions): Pipeline<TO>;
60
+ mapSimple<TO>(mapper: IndexedMapper<T, TO>, opt?: TransformMapSimpleOptions): Pipeline<TO>;
61
+ filter(predicate: AsyncPredicate<T>, opt?: TransformMapOptions): this;
62
+ filterSync(predicate: Predicate<T>, opt?: TransformOptions): this;
63
+ offset(opt: TransformOffsetOptions): this;
64
+ tap(fn: AsyncIndexedMapper<T, any>, opt?: TransformTapOptions): this;
65
+ throttle(opt: TransformThrottleOptions): this;
66
+ transform<TO>(transform: TransformTyped<T, TO>): Pipeline<TO>;
67
+ /**
68
+ * Helper method to add multiple transforms at once.
69
+ * Not type safe! Prefer using singular `transform()` multiple times for type safety.
70
+ */
71
+ transformMany<TO>(transforms: Transform[]): Pipeline<TO>;
72
+ /**
73
+ * Utility method just to conveniently type-cast the current Pipeline type.
74
+ * No runtime effect.
75
+ */
76
+ typeCastAs<TO>(): Pipeline<TO>;
77
+ setObjectMode(objectMode: boolean): this;
78
+ /**
79
+ * Transform the stream of Objects into a stream of JSON lines.
80
+ * Technically, it goes into objectMode=false, so it's a binary stream at the end.
81
+ */
82
+ toNDJson(): Pipeline<Uint8Array>;
83
+ parseNDJson<TO = unknown>(this: Pipeline<Uint8Array>): Pipeline<TO>;
84
+ gzip(this: Pipeline<Uint8Array>, opt?: ZlibOptions): Pipeline<Uint8Array>;
85
+ gunzip(this: Pipeline<Uint8Array>, opt?: ZlibOptions): Pipeline<Uint8Array>;
86
+ toArray(opt?: TransformOptions): Promise<T[]>;
87
+ toFile(outputFilePath: string): Promise<void>;
88
+ toNDJsonFile(outputFilePath: string): Promise<void>;
89
+ to(destination: WritableTyped<T>): Promise<void>;
90
+ forEach(fn: AsyncIndexedMapper<T, void>, opt?: TransformMapOptions<T, void>): Promise<void>;
91
+ forEachSync(fn: IndexedMapper<T, void>, opt?: TransformMapSyncOptions<T, void>): Promise<void>;
92
+ run(): Promise<void>;
93
+ }
@@ -0,0 +1,262 @@
1
+ import { Readable } from 'node:stream';
2
+ import { pipeline } from 'node:stream/promises';
3
+ import { createUnzip } from 'node:zlib';
4
+ import { createGzip } from 'node:zlib';
5
+ import { createAbortableSignal } from '@naturalcycles/js-lib';
6
+ import { fs2 } from '../fs/fs2.js';
7
+ import { createReadStreamAsNDJSON } from './ndjson/createReadStreamAsNDJSON.js';
8
+ import { transformJsonParse } from './ndjson/transformJsonParse.js';
9
+ import { transformToNDJson } from './ndjson/transformToNDJson.js';
10
+ import { PIPELINE_GRACEFUL_ABORT } from './stream.util.js';
11
+ import { transformChunk } from './transform/transformChunk.js';
12
+ import { transformFilterSync } from './transform/transformFilter.js';
13
+ import { transformFlatten, transformFlattenIfNeeded } from './transform/transformFlatten.js';
14
+ import { transformLimit } from './transform/transformLimit.js';
15
+ import { transformLogProgress, } from './transform/transformLogProgress.js';
16
+ import { transformMap } from './transform/transformMap.js';
17
+ import { transformMapSimple, } from './transform/transformMapSimple.js';
18
+ import { transformMapSync } from './transform/transformMapSync.js';
19
+ import { transformOffset } from './transform/transformOffset.js';
20
+ import { transformSplitOnNewline } from './transform/transformSplit.js';
21
+ import { transformTap } from './transform/transformTap.js';
22
+ import { transformThrottle } from './transform/transformThrottle.js';
23
+ import { writablePushToArray } from './writable/writablePushToArray.js';
24
+ import { writableVoid } from './writable/writableVoid.js';
25
+ export class Pipeline {
26
+ // biome-ignore lint/correctness/noUnusedPrivateClassMembers: ok
27
+ source;
28
+ transforms = [];
29
+ destination;
30
+ readableLimit;
31
+ // biome-ignore lint/correctness/noUnusedPrivateClassMembers: ok
32
+ objectMode;
33
+ abortableSignal = createAbortableSignal();
34
+ constructor(source, objectMode = true) {
35
+ this.source = source;
36
+ this.objectMode = objectMode;
37
+ }
38
+ static from(source) {
39
+ return new Pipeline(source);
40
+ }
41
+ static fromWeb(webReadableStream) {
42
+ return new Pipeline(Readable.fromWeb(webReadableStream));
43
+ }
44
+ /**
45
+ * Technically same as `fromIterable` (since Array is Iterable),
46
+ * but named a bit friendlier.
47
+ */
48
+ static fromArray(input) {
49
+ return new Pipeline(Readable.from(input));
50
+ }
51
+ static fromIterable(input) {
52
+ return new Pipeline(Readable.from(input));
53
+ }
54
+ static fromFile(sourceFilePath) {
55
+ return new Pipeline(fs2.createReadStream(sourceFilePath), false);
56
+ }
57
+ static fromNDJsonFile(sourceFilePath) {
58
+ return new Pipeline(createReadStreamAsNDJSON(sourceFilePath));
59
+ }
60
+ /**
61
+ * Limits the source Readable, but using `.take(limit)` on it.
62
+ * This is THE preferred way of limiting the source.
63
+ */
64
+ limitSource(limit) {
65
+ this.readableLimit = limit;
66
+ return this;
67
+ }
68
+ /**
69
+ * If possible - STRONGLY PREFER applying `.take(limit)` on the source Readable,
70
+ * as it's a clean graceful way of limiting the Readable. Example:
71
+ *
72
+ * Pipeline.from(myReadable.take(10))
73
+ *
74
+ * or
75
+ *
76
+ * Pipeline
77
+ * .from(myReadable)
78
+ * .limitSource(10)
79
+ *
80
+ * If applying `take` on Readable is not possible - use this method at your own risk.
81
+ * Why warning?
82
+ * The limit works by aborting the stream, and then catching the error - certainly
83
+ * less clean than `.take()` on the source.
84
+ */
85
+ limit(limit) {
86
+ this.transforms.push(transformLimit({
87
+ limit,
88
+ signal: this.abortableSignal,
89
+ }));
90
+ return this;
91
+ }
92
+ chunk(chunkSize, opt) {
93
+ this.transforms.push(transformChunk(chunkSize, opt));
94
+ return this;
95
+ }
96
+ flatten() {
97
+ this.transforms.push(transformFlatten());
98
+ return this;
99
+ }
100
+ flattenIfNeeded() {
101
+ this.transforms.push(transformFlattenIfNeeded());
102
+ return this;
103
+ }
104
+ // TransformLogProgressOptions intentionally doesn't have <T> passed, as it's inconvenient in many cases
105
+ logProgress(opt) {
106
+ this.transforms.push(transformLogProgress(opt));
107
+ return this;
108
+ }
109
+ map(mapper, opt) {
110
+ this.transforms.push(transformMap(mapper, {
111
+ ...opt,
112
+ signal: this.abortableSignal,
113
+ }));
114
+ return this;
115
+ }
116
+ mapSync(mapper, opt) {
117
+ this.transforms.push(transformMapSync(mapper, {
118
+ ...opt,
119
+ signal: this.abortableSignal,
120
+ }));
121
+ return this;
122
+ }
123
+ mapSimple(mapper, opt) {
124
+ this.transforms.push(transformMapSimple(mapper, opt));
125
+ return this;
126
+ }
127
+ filter(predicate, opt) {
128
+ this.transforms.push(transformMap(v => v, {
129
+ predicate,
130
+ ...opt,
131
+ signal: this.abortableSignal,
132
+ }));
133
+ return this;
134
+ }
135
+ filterSync(predicate, opt) {
136
+ this.transforms.push(transformFilterSync(predicate, opt));
137
+ return this;
138
+ }
139
+ offset(opt) {
140
+ this.transforms.push(transformOffset(opt));
141
+ return this;
142
+ }
143
+ tap(fn, opt) {
144
+ this.transforms.push(transformTap(fn, opt));
145
+ return this;
146
+ }
147
+ throttle(opt) {
148
+ this.transforms.push(transformThrottle(opt));
149
+ return this;
150
+ }
151
+ // todo: tee/fork
152
+ transform(transform) {
153
+ this.transforms.push(transform);
154
+ return this;
155
+ }
156
+ /**
157
+ * Helper method to add multiple transforms at once.
158
+ * Not type safe! Prefer using singular `transform()` multiple times for type safety.
159
+ */
160
+ transformMany(transforms) {
161
+ this.transforms.push(...transforms);
162
+ return this;
163
+ }
164
+ /**
165
+ * Utility method just to conveniently type-cast the current Pipeline type.
166
+ * No runtime effect.
167
+ */
168
+ typeCastAs() {
169
+ return this;
170
+ }
171
+ setObjectMode(objectMode) {
172
+ this.objectMode = objectMode;
173
+ return this;
174
+ }
175
+ /**
176
+ * Transform the stream of Objects into a stream of JSON lines.
177
+ * Technically, it goes into objectMode=false, so it's a binary stream at the end.
178
+ */
179
+ toNDJson() {
180
+ this.transforms.push(transformToNDJson());
181
+ this.objectMode = false;
182
+ return this;
183
+ }
184
+ parseNDJson() {
185
+ // It was said that transformJsonParse() separately is 10% or more slower than .map(line => JSON.parse(line))
186
+ // So, we can investigate a speedup
187
+ this.transforms.push(transformSplitOnNewline(), transformJsonParse());
188
+ this.objectMode = true;
189
+ return this;
190
+ }
191
+ gzip(opt) {
192
+ this.transforms.push(createGzip(opt));
193
+ this.objectMode = false;
194
+ return this;
195
+ }
196
+ gunzip(opt) {
197
+ this.transforms.push(createUnzip(opt));
198
+ this.objectMode = false;
199
+ return this;
200
+ }
201
+ async toArray(opt) {
202
+ const arr = [];
203
+ this.destination = writablePushToArray(arr, opt);
204
+ await this.run();
205
+ return arr;
206
+ }
207
+ async toFile(outputFilePath) {
208
+ fs2.ensureFile(outputFilePath);
209
+ this.destination = fs2.createWriteStream(outputFilePath);
210
+ await this.run();
211
+ }
212
+ async toNDJsonFile(outputFilePath) {
213
+ fs2.ensureFile(outputFilePath);
214
+ this.transforms.push(transformToNDJson());
215
+ if (outputFilePath.endsWith('.gz')) {
216
+ this.transforms.push(createGzip({
217
+ // chunkSize: 64 * 1024, // no observed speedup
218
+ }));
219
+ }
220
+ this.destination = fs2.createWriteStream(outputFilePath, {
221
+ // highWaterMark: 64 * 1024, // no observed speedup
222
+ });
223
+ await this.run();
224
+ }
225
+ async to(destination) {
226
+ this.destination = destination;
227
+ await this.run();
228
+ }
229
+ async forEach(fn, opt) {
230
+ this.transforms.push(transformMap(fn, {
231
+ ...opt,
232
+ signal: this.abortableSignal,
233
+ }));
234
+ await this.run();
235
+ }
236
+ async forEachSync(fn, opt) {
237
+ this.transforms.push(transformMapSync(fn, {
238
+ ...opt,
239
+ signal: this.abortableSignal,
240
+ }));
241
+ await this.run();
242
+ }
243
+ async run() {
244
+ this.destination ||= writableVoid();
245
+ let { source } = this;
246
+ if (this.readableLimit) {
247
+ source = source.take(this.readableLimit);
248
+ }
249
+ try {
250
+ await pipeline([source, ...this.transforms, this.destination], {
251
+ signal: this.abortableSignal,
252
+ });
253
+ }
254
+ catch (err) {
255
+ if (err instanceof Error && err.cause?.message === PIPELINE_GRACEFUL_ABORT) {
256
+ console.log('pipeline gracefully aborted'); // todo: this message may be removed later
257
+ return;
258
+ }
259
+ throw err;
260
+ }
261
+ }
262
+ }
@@ -1,3 +1 @@
1
- import type { Readable } from 'node:stream';
2
- import type { CommonLogger } from '@naturalcycles/js-lib/log';
3
- export declare function pipelineClose(name: string, readableDownstream: Readable, sourceReadable: Readable | undefined, streamDone: Promise<void> | undefined, logger: CommonLogger): void;
1
+ export declare const PIPELINE_GRACEFUL_ABORT = "PIPELINE_GRACEFUL_ABORT";
@@ -1,20 +1 @@
1
- export function pipelineClose(name, readableDownstream, sourceReadable, streamDone, logger) {
2
- readableDownstream.push(null); // this closes the stream, so downstream Readable will receive `end` and won't write anything
3
- if (!sourceReadable) {
4
- logger.warn(`${name} sourceReadable is not provided, readable stream will not be stopped`);
5
- }
6
- else {
7
- logger.log(`${name} is calling readable.unpipe() to pause the stream`);
8
- sourceReadable.unpipe(); // it is expected to pause the stream
9
- if (!streamDone) {
10
- logger.log(`${name} streamDone is not provided, will do readable.destroy right away`);
11
- sourceReadable.destroy();
12
- }
13
- else {
14
- void streamDone.then(() => {
15
- logger.log(`${name} streamDone, calling readable.destroy()`);
16
- sourceReadable.destroy(); // this throws ERR_STREAM_PREMATURE_CLOSE
17
- });
18
- }
19
- }
20
- }
1
+ export const PIPELINE_GRACEFUL_ABORT = 'PIPELINE_GRACEFUL_ABORT';
@@ -1,14 +1,11 @@
1
+ import type { PositiveInteger } from '@naturalcycles/js-lib/types';
1
2
  import type { TransformOptions, TransformTyped } from '../stream.model.js';
2
- export interface TransformChunkOptions extends TransformOptions {
3
- /**
4
- * How many items to include in each chunk.
5
- * Last chunk will contain the remaining items, possibly less than chunkSize.
6
- */
7
- chunkSize: number;
8
- }
9
3
  /**
10
4
  * Similar to RxJS bufferCount(),
11
5
  * allows to "chunk" the input stream into chunks of `opt.chunkSize` size.
12
6
  * Last chunk will contain the remaining items, possibly less than chunkSize.
7
+ *
8
+ * `chunkSize` indicates how many items to include in each chunk.
9
+ * Last chunk will contain the remaining items, possibly less than chunkSize.
13
10
  */
14
- export declare function transformChunk<IN = any>(opt: TransformChunkOptions): TransformTyped<IN, IN[]>;
11
+ export declare function transformChunk<IN = any>(chunkSize: PositiveInteger, opt?: TransformOptions): TransformTyped<IN, IN[]>;
@@ -3,9 +3,11 @@ import { Transform } from 'node:stream';
3
3
  * Similar to RxJS bufferCount(),
4
4
  * allows to "chunk" the input stream into chunks of `opt.chunkSize` size.
5
5
  * Last chunk will contain the remaining items, possibly less than chunkSize.
6
+ *
7
+ * `chunkSize` indicates how many items to include in each chunk.
8
+ * Last chunk will contain the remaining items, possibly less than chunkSize.
6
9
  */
7
- export function transformChunk(opt) {
8
- const { chunkSize } = opt;
10
+ export function transformChunk(chunkSize, opt) {
9
11
  let buf = [];
10
12
  return new Transform({
11
13
  objectMode: true,
@@ -1,2 +1,3 @@
1
1
  import type { TransformTyped } from '../stream.model.js';
2
2
  export declare function transformFlatten<T>(): TransformTyped<T[], T>;
3
+ export declare function transformFlattenIfNeeded<T>(): TransformTyped<T[], T>;
@@ -3,15 +3,26 @@ export function transformFlatten() {
3
3
  return new Transform({
4
4
  objectMode: true,
5
5
  transform(chunk, _, cb) {
6
- if (!Array.isArray(chunk)) {
7
- // As a safety precaution, to not crash the pipeline - push as is
8
- this.push(chunk);
6
+ for (const item of chunk) {
7
+ this.push(item);
9
8
  }
10
- else {
9
+ cb(); // acknowledge
10
+ },
11
+ });
12
+ }
13
+ export function transformFlattenIfNeeded() {
14
+ return new Transform({
15
+ objectMode: true,
16
+ transform(chunk, _, cb) {
17
+ if (Array.isArray(chunk)) {
11
18
  for (const item of chunk) {
12
19
  this.push(item);
13
20
  }
14
21
  }
22
+ else {
23
+ // As a safety precaution, to not crash the pipeline - push as is
24
+ this.push(chunk);
25
+ }
15
26
  cb(); // acknowledge
16
27
  },
17
28
  });
@@ -1,6 +1,4 @@
1
- import type { Readable } from 'node:stream';
2
- import type { CommonLogger } from '@naturalcycles/js-lib/log';
3
- import { AbortableTransform } from '../pipeline/pipeline.js';
1
+ import type { AbortableSignal } from '@naturalcycles/js-lib';
4
2
  import type { TransformOptions, TransformTyped } from '../stream.model.js';
5
3
  export interface TransformLimitOptions extends TransformOptions {
6
4
  /**
@@ -8,29 +6,8 @@ export interface TransformLimitOptions extends TransformOptions {
8
6
  */
9
7
  limit?: number;
10
8
  /**
11
- * If provided (recommended!) - it will call readable.destroy() on limit.
12
- * Without it - it will only stop the downstream consumers, but won't stop
13
- * the Readable ("source" of the stream).
14
- * It is almost always crucial to stop the Source too, so, please provide the Readable here!
9
+ * Allows to abort (gracefully stop) the stream from inside the Transform.
15
10
  */
16
- sourceReadable?: Readable;
17
- /**
18
- * Please provide it (a Promise that resolves when the Stream is done, e.g finished consuming things)
19
- * to be able to wait for Consumers before calling `readable.destroy`.
20
- * Has no effect if `readable` is not provided.
21
- */
22
- streamDone?: Promise<void>;
23
- logger?: CommonLogger;
24
- /**
25
- * Set to true to enable additional debug messages, e.g it'll log
26
- * when readable still emits values after the limit is reached.
27
- */
28
- debug?: boolean;
29
- }
30
- /**
31
- * Class only exists to be able to do `instanceof TransformLimit`
32
- * and to set sourceReadable+streamDone to it in `_pipeline`.
33
- */
34
- export declare class TransformLimit extends AbortableTransform {
11
+ signal: AbortableSignal;
35
12
  }
36
13
  export declare function transformLimit<IN>(opt: TransformLimitOptions): TransformTyped<IN, IN>;