@naturalcycles/nodejs-lib 15.25.0 → 15.27.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/slack/slack.service.d.ts +1 -0
- package/dist/slack/slack.service.js +4 -3
- package/dist/stream/index.d.ts +2 -8
- package/dist/stream/index.js +2 -8
- package/dist/stream/ndjson/createReadStreamAsNDJson.d.ts +16 -0
- package/dist/stream/ndjson/{createReadStreamAsNDJSON.js → createReadStreamAsNDJson.js} +10 -13
- package/dist/stream/ndjson/ndjsonMap.d.ts +2 -0
- package/dist/stream/ndjson/ndjsonMap.js +2 -0
- package/dist/stream/pipeline.d.ts +2 -1
- package/dist/stream/pipeline.js +25 -8
- package/dist/stream/progressLogger.d.ts +3 -3
- package/dist/stream/readable/readableCombined.d.ts +4 -2
- package/dist/stream/readable/readableCombined.js +16 -11
- package/dist/stream/readable/readableCreate.d.ts +1 -3
- package/dist/stream/readable/readableCreate.js +4 -4
- package/dist/stream/stream.model.d.ts +16 -0
- package/dist/stream/transform/transformFork.d.ts +10 -0
- package/dist/stream/transform/transformFork.js +62 -0
- package/dist/stream/transform/transformLimit.d.ts +2 -1
- package/dist/stream/transform/transformLimit.js +3 -3
- package/dist/stream/transform/transformLogProgress.js +3 -2
- package/dist/stream/transform/transformMap.d.ts +2 -4
- package/dist/stream/transform/transformMap.js +3 -2
- package/dist/stream/transform/transformMapSimple.d.ts +2 -4
- package/dist/stream/transform/transformMapSimple.js +3 -2
- package/dist/stream/transform/transformMapSync.d.ts +2 -4
- package/dist/stream/transform/transformMapSync.js +3 -1
- package/dist/stream/transform/transformSplit.js +2 -2
- package/dist/stream/transform/transformThrottle.d.ts +2 -3
- package/dist/stream/transform/transformThrottle.js +22 -27
- package/dist/stream/writable/writableVoid.d.ts +1 -8
- package/dist/stream/writable/writableVoid.js +0 -1
- package/package.json +1 -1
- package/src/slack/slack.service.ts +6 -3
- package/src/stream/index.ts +2 -8
- package/src/stream/ndjson/{createReadStreamAsNDJSON.ts → createReadStreamAsNDJson.ts} +10 -13
- package/src/stream/ndjson/ndjsonMap.ts +2 -0
- package/src/stream/pipeline.ts +33 -9
- package/src/stream/progressLogger.ts +3 -3
- package/src/stream/readable/readableCombined.ts +22 -11
- package/src/stream/readable/readableCreate.ts +4 -3
- package/src/stream/stream.model.ts +18 -0
- package/src/stream/transform/transformFork.ts +74 -0
- package/src/stream/transform/transformLimit.ts +5 -4
- package/src/stream/transform/transformLogProgress.ts +3 -2
- package/src/stream/transform/transformMap.ts +4 -8
- package/src/stream/transform/transformMapSimple.ts +10 -7
- package/src/stream/transform/transformMapSync.ts +4 -6
- package/src/stream/transform/transformSplit.ts +2 -2
- package/src/stream/transform/transformThrottle.ts +28 -36
- package/src/stream/writable/writableVoid.ts +1 -10
- package/dist/stream/ndjson/createReadStreamAsNDJSON.d.ts +0 -19
- package/dist/stream/ndjson/createWriteStreamAsNDJSON.d.ts +0 -11
- package/dist/stream/ndjson/createWriteStreamAsNDJSON.js +0 -27
- package/dist/stream/ndjson/ndjsonStreamForEach.d.ts +0 -10
- package/dist/stream/ndjson/ndjsonStreamForEach.js +0 -15
- package/dist/stream/readable/readableToArray.d.ts +0 -9
- package/dist/stream/readable/readableToArray.js +0 -17
- package/dist/stream/transform/transformTee.d.ts +0 -13
- package/dist/stream/transform/transformTee.js +0 -37
- package/dist/stream/transform/transformToArray.d.ts +0 -5
- package/dist/stream/transform/transformToArray.js +0 -20
- package/dist/stream/writable/writableForEach.d.ts +0 -12
- package/dist/stream/writable/writableForEach.js +0 -15
- package/dist/stream/writable/writableFork.d.ts +0 -10
- package/dist/stream/writable/writableFork.js +0 -45
- package/dist/stream/writable/writableLimit.d.ts +0 -8
- package/dist/stream/writable/writableLimit.js +0 -25
- package/src/stream/ndjson/createWriteStreamAsNDJSON.ts +0 -30
- package/src/stream/ndjson/ndjsonStreamForEach.ts +0 -28
- package/src/stream/readable/readableToArray.ts +0 -19
- package/src/stream/transform/transformTee.ts +0 -48
- package/src/stream/transform/transformToArray.ts +0 -23
- package/src/stream/writable/writableForEach.ts +0 -25
- package/src/stream/writable/writableFork.ts +0 -56
- package/src/stream/writable/writableLimit.ts +0 -29
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { localTime } from '@naturalcycles/js-lib/datetime/localTime.js';
|
|
2
2
|
import { getFetcher } from '@naturalcycles/js-lib/http';
|
|
3
|
-
import {
|
|
3
|
+
import { createCommonLoggerAtLevel, } from '@naturalcycles/js-lib/log';
|
|
4
4
|
import { _omit } from '@naturalcycles/js-lib/object/object.util.js';
|
|
5
5
|
import { PQueue } from '@naturalcycles/js-lib/promise/pQueue.js';
|
|
6
6
|
import { _inspect } from '../index.js';
|
|
@@ -117,12 +117,13 @@ export class SlackService {
|
|
|
117
117
|
* Returns a CommonLogger implementation based on this SlackService instance.
|
|
118
118
|
*/
|
|
119
119
|
getCommonLogger(opt) {
|
|
120
|
-
const { minLogLevel = 'log', logChannel, warnChannel, errorChannel } = opt;
|
|
120
|
+
const { minLogLevel = 'log', debugChannel, logChannel, warnChannel, errorChannel } = opt;
|
|
121
121
|
const defaultChannel = this.cfg.defaults?.channel || DEFAULTS.channel;
|
|
122
122
|
const q = new PQueue({
|
|
123
123
|
concurrency: 1,
|
|
124
124
|
});
|
|
125
|
-
return
|
|
125
|
+
return createCommonLoggerAtLevel({
|
|
126
|
+
debug: (...args) => q.push(() => this.send({ items: args, channel: debugChannel || defaultChannel })),
|
|
126
127
|
log: (...args) => q.push(() => this.send({ items: args, channel: logChannel || defaultChannel })),
|
|
127
128
|
warn: (...args) => q.push(() => this.send({ items: args, channel: warnChannel || defaultChannel })),
|
|
128
129
|
error: (...args) => q.push(() => this.send({ items: args, channel: errorChannel || defaultChannel })),
|
package/dist/stream/index.d.ts
CHANGED
|
@@ -1,8 +1,6 @@
|
|
|
1
|
-
export * from './ndjson/
|
|
2
|
-
export * from './ndjson/createWriteStreamAsNDJSON.js';
|
|
1
|
+
export * from './ndjson/createReadStreamAsNDJson.js';
|
|
3
2
|
export * from './ndjson/ndjson.model.js';
|
|
4
3
|
export * from './ndjson/ndjsonMap.js';
|
|
5
|
-
export * from './ndjson/ndjsonStreamForEach.js';
|
|
6
4
|
export * from './ndjson/transformJsonParse.js';
|
|
7
5
|
export * from './ndjson/transformToNDJson.js';
|
|
8
6
|
export * from './pipeline.js';
|
|
@@ -10,11 +8,11 @@ export * from './progressLogger.js';
|
|
|
10
8
|
export * from './readable/readableCombined.js';
|
|
11
9
|
export * from './readable/readableCreate.js';
|
|
12
10
|
export * from './readable/readableFromArray.js';
|
|
13
|
-
export * from './readable/readableToArray.js';
|
|
14
11
|
export * from './stream.model.js';
|
|
15
12
|
export * from './transform/transformChunk.js';
|
|
16
13
|
export * from './transform/transformFilter.js';
|
|
17
14
|
export * from './transform/transformFlatten.js';
|
|
15
|
+
export * from './transform/transformFork.js';
|
|
18
16
|
export * from './transform/transformLimit.js';
|
|
19
17
|
export * from './transform/transformLogProgress.js';
|
|
20
18
|
export * from './transform/transformMap.js';
|
|
@@ -24,13 +22,9 @@ export * from './transform/transformNoOp.js';
|
|
|
24
22
|
export * from './transform/transformOffset.js';
|
|
25
23
|
export * from './transform/transformSplit.js';
|
|
26
24
|
export * from './transform/transformTap.js';
|
|
27
|
-
export * from './transform/transformTee.js';
|
|
28
25
|
export * from './transform/transformThrottle.js';
|
|
29
|
-
export * from './transform/transformToArray.js';
|
|
30
26
|
export * from './transform/worker/baseWorkerClass.js';
|
|
31
27
|
export * from './transform/worker/transformMultiThreaded.js';
|
|
32
28
|
export * from './transform/worker/transformMultiThreaded.model.js';
|
|
33
|
-
export * from './writable/writableForEach.js';
|
|
34
|
-
export * from './writable/writableFork.js';
|
|
35
29
|
export * from './writable/writablePushToArray.js';
|
|
36
30
|
export * from './writable/writableVoid.js';
|
package/dist/stream/index.js
CHANGED
|
@@ -1,8 +1,6 @@
|
|
|
1
|
-
export * from './ndjson/
|
|
2
|
-
export * from './ndjson/createWriteStreamAsNDJSON.js';
|
|
1
|
+
export * from './ndjson/createReadStreamAsNDJson.js';
|
|
3
2
|
export * from './ndjson/ndjson.model.js';
|
|
4
3
|
export * from './ndjson/ndjsonMap.js';
|
|
5
|
-
export * from './ndjson/ndjsonStreamForEach.js';
|
|
6
4
|
export * from './ndjson/transformJsonParse.js';
|
|
7
5
|
export * from './ndjson/transformToNDJson.js';
|
|
8
6
|
export * from './pipeline.js';
|
|
@@ -10,11 +8,11 @@ export * from './progressLogger.js';
|
|
|
10
8
|
export * from './readable/readableCombined.js';
|
|
11
9
|
export * from './readable/readableCreate.js';
|
|
12
10
|
export * from './readable/readableFromArray.js';
|
|
13
|
-
export * from './readable/readableToArray.js';
|
|
14
11
|
export * from './stream.model.js';
|
|
15
12
|
export * from './transform/transformChunk.js';
|
|
16
13
|
export * from './transform/transformFilter.js';
|
|
17
14
|
export * from './transform/transformFlatten.js';
|
|
15
|
+
export * from './transform/transformFork.js';
|
|
18
16
|
export * from './transform/transformLimit.js';
|
|
19
17
|
export * from './transform/transformLogProgress.js';
|
|
20
18
|
export * from './transform/transformMap.js';
|
|
@@ -24,13 +22,9 @@ export * from './transform/transformNoOp.js';
|
|
|
24
22
|
export * from './transform/transformOffset.js';
|
|
25
23
|
export * from './transform/transformSplit.js';
|
|
26
24
|
export * from './transform/transformTap.js';
|
|
27
|
-
export * from './transform/transformTee.js';
|
|
28
25
|
export * from './transform/transformThrottle.js';
|
|
29
|
-
export * from './transform/transformToArray.js';
|
|
30
26
|
export * from './transform/worker/baseWorkerClass.js';
|
|
31
27
|
export * from './transform/worker/transformMultiThreaded.js';
|
|
32
28
|
export * from './transform/worker/transformMultiThreaded.model.js';
|
|
33
|
-
export * from './writable/writableForEach.js';
|
|
34
|
-
export * from './writable/writableFork.js';
|
|
35
29
|
export * from './writable/writablePushToArray.js';
|
|
36
30
|
export * from './writable/writableVoid.js';
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import type { ReadableTyped } from '../stream.model.js';
|
|
2
|
+
/**
|
|
3
|
+
Returns a Readable of [already parsed] NDJSON objects.
|
|
4
|
+
|
|
5
|
+
Replaces a list of operations:
|
|
6
|
+
- requireFileToExist(inputPath)
|
|
7
|
+
- fs.createReadStream
|
|
8
|
+
- createUnzip (only if path ends with '.gz')
|
|
9
|
+
- transformSplitOnNewline
|
|
10
|
+
- transformJsonParse
|
|
11
|
+
|
|
12
|
+
To add a Limit or Offset: just add .take() or .drop(), example:
|
|
13
|
+
|
|
14
|
+
createReadStreamAsNDJson().take(100)
|
|
15
|
+
*/
|
|
16
|
+
export declare function createReadStreamAsNDJson<ROW = any>(inputPath: string): ReadableTyped<ROW>;
|
|
@@ -2,23 +2,20 @@ import { createUnzip } from 'node:zlib';
|
|
|
2
2
|
import { fs2 } from '../../fs/fs2.js';
|
|
3
3
|
import { transformSplitOnNewline } from '../transform/transformSplit.js';
|
|
4
4
|
/**
|
|
5
|
-
|
|
5
|
+
Returns a Readable of [already parsed] NDJSON objects.
|
|
6
6
|
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
7
|
+
Replaces a list of operations:
|
|
8
|
+
- requireFileToExist(inputPath)
|
|
9
|
+
- fs.createReadStream
|
|
10
|
+
- createUnzip (only if path ends with '.gz')
|
|
11
|
+
- transformSplitOnNewline
|
|
12
|
+
- transformJsonParse
|
|
13
13
|
|
|
14
|
-
|
|
14
|
+
To add a Limit or Offset: just add .take() or .drop(), example:
|
|
15
15
|
|
|
16
|
-
|
|
17
|
-
fs2.createReadStreamAsNDJSON().take(100),
|
|
18
|
-
transformX(),
|
|
19
|
-
])
|
|
16
|
+
createReadStreamAsNDJson().take(100)
|
|
20
17
|
*/
|
|
21
|
-
export function
|
|
18
|
+
export function createReadStreamAsNDJson(inputPath) {
|
|
22
19
|
fs2.requireFileToExist(inputPath);
|
|
23
20
|
let stream = fs2
|
|
24
21
|
.createReadStream(inputPath, {
|
|
@@ -13,5 +13,7 @@ export interface NDJSONMapOptions<IN = any, OUT = IN> extends TransformMapOption
|
|
|
13
13
|
/**
|
|
14
14
|
* Unzips input file automatically, if it ends with `.gz`.
|
|
15
15
|
* Zips output file automatically, if it ends with `.gz`.
|
|
16
|
+
*
|
|
17
|
+
* @deprecated use Pipeline directly
|
|
16
18
|
*/
|
|
17
19
|
export declare function ndjsonMap<IN = any, OUT = any>(mapper: AbortableAsyncMapper<IN, OUT>, opt: NDJSONMapOptions<IN, OUT>): Promise<void>;
|
|
@@ -3,6 +3,8 @@ import { Pipeline } from '../pipeline.js';
|
|
|
3
3
|
/**
|
|
4
4
|
* Unzips input file automatically, if it ends with `.gz`.
|
|
5
5
|
* Zips output file automatically, if it ends with `.gz`.
|
|
6
|
+
*
|
|
7
|
+
* @deprecated use Pipeline directly
|
|
6
8
|
*/
|
|
7
9
|
export async function ndjsonMap(mapper, opt) {
|
|
8
10
|
const { inputFilePath, outputFilePath, logEveryOutput = 100_000, limitInput, limitOutput } = opt;
|
|
@@ -26,8 +26,8 @@ export declare class Pipeline<T> {
|
|
|
26
26
|
*/
|
|
27
27
|
static fromArray<T>(input: T[]): Pipeline<T>;
|
|
28
28
|
static fromIterable<T>(input: Iterable<T> | AsyncIterable<T>): Pipeline<T>;
|
|
29
|
-
static fromFile(sourceFilePath: string): Pipeline<Uint8Array>;
|
|
30
29
|
static fromNDJsonFile<T>(sourceFilePath: string): Pipeline<T>;
|
|
30
|
+
static fromFile(sourceFilePath: string): Pipeline<Uint8Array>;
|
|
31
31
|
/**
|
|
32
32
|
* Limits the source Readable, but using `.take(limit)` on it.
|
|
33
33
|
* This is THE preferred way of limiting the source.
|
|
@@ -69,6 +69,7 @@ export declare class Pipeline<T> {
|
|
|
69
69
|
* Not type safe! Prefer using singular `transform()` multiple times for type safety.
|
|
70
70
|
*/
|
|
71
71
|
transformMany<TO>(transforms: Transform[]): Pipeline<TO>;
|
|
72
|
+
fork(fn: (pipeline: Pipeline<T>) => Pipeline<T>, opt?: TransformOptions): this;
|
|
72
73
|
/**
|
|
73
74
|
* Utility method just to conveniently type-cast the current Pipeline type.
|
|
74
75
|
* No runtime effect.
|
package/dist/stream/pipeline.js
CHANGED
|
@@ -4,13 +4,14 @@ import { createUnzip } from 'node:zlib';
|
|
|
4
4
|
import { createGzip } from 'node:zlib';
|
|
5
5
|
import { createAbortableSignal } from '@naturalcycles/js-lib';
|
|
6
6
|
import { fs2 } from '../fs/fs2.js';
|
|
7
|
-
import {
|
|
7
|
+
import { createReadStreamAsNDJson } from './ndjson/createReadStreamAsNDJson.js';
|
|
8
8
|
import { transformJsonParse } from './ndjson/transformJsonParse.js';
|
|
9
9
|
import { transformToNDJson } from './ndjson/transformToNDJson.js';
|
|
10
10
|
import { PIPELINE_GRACEFUL_ABORT } from './stream.util.js';
|
|
11
11
|
import { transformChunk } from './transform/transformChunk.js';
|
|
12
12
|
import { transformFilterSync } from './transform/transformFilter.js';
|
|
13
13
|
import { transformFlatten, transformFlattenIfNeeded } from './transform/transformFlatten.js';
|
|
14
|
+
import { transformFork } from './transform/transformFork.js';
|
|
14
15
|
import { transformLimit } from './transform/transformLimit.js';
|
|
15
16
|
import { transformLogProgress, } from './transform/transformLogProgress.js';
|
|
16
17
|
import { transformMap } from './transform/transformMap.js';
|
|
@@ -51,11 +52,18 @@ export class Pipeline {
|
|
|
51
52
|
static fromIterable(input) {
|
|
52
53
|
return new Pipeline(Readable.from(input));
|
|
53
54
|
}
|
|
54
|
-
static fromFile(sourceFilePath) {
|
|
55
|
-
return new Pipeline(fs2.createReadStream(sourceFilePath), false);
|
|
56
|
-
}
|
|
57
55
|
static fromNDJsonFile(sourceFilePath) {
|
|
58
|
-
|
|
56
|
+
// Important that createReadStreamAsNDJson function is used
|
|
57
|
+
// (and not Pipeline set of individual transforms),
|
|
58
|
+
// because createReadStreamAsNDJson returns a Readable,
|
|
59
|
+
// hence it allows to apply .take(limit) on it
|
|
60
|
+
// e.g like Pipeline.fromNDJsonFile().limitSource(limit)
|
|
61
|
+
return new Pipeline(createReadStreamAsNDJson(sourceFilePath));
|
|
62
|
+
}
|
|
63
|
+
static fromFile(sourceFilePath) {
|
|
64
|
+
return new Pipeline(fs2.createReadStream(sourceFilePath, {
|
|
65
|
+
highWaterMark: 64 * 1024, // no observed speedup
|
|
66
|
+
}), false);
|
|
59
67
|
}
|
|
60
68
|
/**
|
|
61
69
|
* Limits the source Readable, but using `.take(limit)` on it.
|
|
@@ -148,7 +156,6 @@ export class Pipeline {
|
|
|
148
156
|
this.transforms.push(transformThrottle(opt));
|
|
149
157
|
return this;
|
|
150
158
|
}
|
|
151
|
-
// todo: tee/fork
|
|
152
159
|
transform(transform) {
|
|
153
160
|
this.transforms.push(transform);
|
|
154
161
|
return this;
|
|
@@ -161,6 +168,10 @@ export class Pipeline {
|
|
|
161
168
|
this.transforms.push(...transforms);
|
|
162
169
|
return this;
|
|
163
170
|
}
|
|
171
|
+
fork(fn, opt) {
|
|
172
|
+
this.transforms.push(transformFork(fn, opt));
|
|
173
|
+
return this;
|
|
174
|
+
}
|
|
164
175
|
/**
|
|
165
176
|
* Utility method just to conveniently type-cast the current Pipeline type.
|
|
166
177
|
* No runtime effect.
|
|
@@ -203,12 +214,18 @@ export class Pipeline {
|
|
|
203
214
|
return this;
|
|
204
215
|
}
|
|
205
216
|
gzip(opt) {
|
|
206
|
-
this.transforms.push(createGzip(
|
|
217
|
+
this.transforms.push(createGzip({
|
|
218
|
+
// chunkSize: 64 * 1024, // no observed speedup
|
|
219
|
+
...opt,
|
|
220
|
+
}));
|
|
207
221
|
this.objectMode = false;
|
|
208
222
|
return this;
|
|
209
223
|
}
|
|
210
224
|
gunzip(opt) {
|
|
211
|
-
this.transforms.push(createUnzip(
|
|
225
|
+
this.transforms.push(createUnzip({
|
|
226
|
+
chunkSize: 64 * 1024, // speedup from ~3200 to 3800 rps!
|
|
227
|
+
...opt,
|
|
228
|
+
}));
|
|
212
229
|
this.objectMode = false;
|
|
213
230
|
return this;
|
|
214
231
|
}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import type { CommonLogger } from '@naturalcycles/js-lib/log';
|
|
2
|
-
import type { AnyObject } from '@naturalcycles/js-lib/types';
|
|
2
|
+
import type { AnyObject, PositiveInteger } from '@naturalcycles/js-lib/types';
|
|
3
3
|
import type { ReadableMapper } from './stream.model.js';
|
|
4
4
|
export interface ProgressLoggerCfg<T = any> {
|
|
5
5
|
/**
|
|
@@ -69,7 +69,7 @@ export interface ProgressLoggerCfg<T = any> {
|
|
|
69
69
|
*
|
|
70
70
|
* @default 1000
|
|
71
71
|
*/
|
|
72
|
-
logEvery?:
|
|
72
|
+
logEvery?: PositiveInteger;
|
|
73
73
|
logger?: CommonLogger;
|
|
74
74
|
/**
|
|
75
75
|
* Function to return extra properties to the "progress object".
|
|
@@ -89,7 +89,7 @@ export interface ProgressLoggerCfg<T = any> {
|
|
|
89
89
|
*
|
|
90
90
|
* Defaults to 1.
|
|
91
91
|
*/
|
|
92
|
-
chunkSize?:
|
|
92
|
+
chunkSize?: PositiveInteger;
|
|
93
93
|
/**
|
|
94
94
|
* Experimental logging of item (shunk) sizes, when json-stringified.
|
|
95
95
|
*
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { Readable } from 'node:stream';
|
|
2
2
|
import type { ReadableTyped } from '@naturalcycles/nodejs-lib/stream';
|
|
3
|
+
import type { TransformOptions } from '../stream.model.js';
|
|
3
4
|
/**
|
|
4
5
|
* Allows to combine multiple Readables into 1 Readable.
|
|
5
6
|
* As soon as any of the input Readables emit - the output Readable emits
|
|
@@ -12,8 +13,9 @@ import type { ReadableTyped } from '@naturalcycles/nodejs-lib/stream';
|
|
|
12
13
|
*/
|
|
13
14
|
export declare class ReadableCombined<T> extends Readable implements ReadableTyped<T> {
|
|
14
15
|
inputs: Readable[];
|
|
15
|
-
static create<T>(inputs: Readable[]): ReadableCombined<T>;
|
|
16
|
+
static create<T>(inputs: Readable[], opt?: TransformOptions): ReadableCombined<T>;
|
|
16
17
|
private constructor();
|
|
18
|
+
private logger;
|
|
17
19
|
/**
|
|
18
20
|
* If defined - we are in Paused mode
|
|
19
21
|
* and should await the lock to be resolved before proceeding.
|
|
@@ -24,7 +26,7 @@ export declare class ReadableCombined<T> extends Readable implements ReadableTyp
|
|
|
24
26
|
private countIn;
|
|
25
27
|
private countOut;
|
|
26
28
|
private countReads;
|
|
27
|
-
private
|
|
29
|
+
private run;
|
|
28
30
|
_read(): void;
|
|
29
31
|
private logStats;
|
|
30
32
|
}
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { Readable } from 'node:stream';
|
|
2
|
+
import { createCommonLoggerAtLevel } from '@naturalcycles/js-lib/log';
|
|
2
3
|
import { pDefer } from '@naturalcycles/js-lib/promise/pDefer.js';
|
|
3
4
|
import { pMap } from '@naturalcycles/js-lib/promise/pMap.js';
|
|
4
5
|
/**
|
|
@@ -13,14 +14,17 @@ import { pMap } from '@naturalcycles/js-lib/promise/pMap.js';
|
|
|
13
14
|
*/
|
|
14
15
|
export class ReadableCombined extends Readable {
|
|
15
16
|
inputs;
|
|
16
|
-
static create(inputs) {
|
|
17
|
-
return new ReadableCombined(inputs);
|
|
17
|
+
static create(inputs, opt = {}) {
|
|
18
|
+
return new ReadableCombined(inputs, opt);
|
|
18
19
|
}
|
|
19
|
-
constructor(inputs) {
|
|
20
|
-
|
|
20
|
+
constructor(inputs, opt) {
|
|
21
|
+
const { objectMode = true, highWaterMark } = opt;
|
|
22
|
+
super({ objectMode, highWaterMark });
|
|
21
23
|
this.inputs = inputs;
|
|
22
|
-
|
|
24
|
+
this.logger = createCommonLoggerAtLevel(opt.logger, opt.logLevel);
|
|
25
|
+
void this.run();
|
|
23
26
|
}
|
|
27
|
+
logger;
|
|
24
28
|
/**
|
|
25
29
|
* If defined - we are in Paused mode
|
|
26
30
|
* and should await the lock to be resolved before proceeding.
|
|
@@ -34,7 +38,8 @@ export class ReadableCombined extends Readable {
|
|
|
34
38
|
countOut = 0;
|
|
35
39
|
// biome-ignore lint/correctness/noUnusedPrivateClassMembers: ok
|
|
36
40
|
countReads = 0;
|
|
37
|
-
async
|
|
41
|
+
async run() {
|
|
42
|
+
const { logger } = this;
|
|
38
43
|
await pMap(this.inputs, async (input, i) => {
|
|
39
44
|
for await (const item of input) {
|
|
40
45
|
this.countIn++;
|
|
@@ -47,18 +52,18 @@ export class ReadableCombined extends Readable {
|
|
|
47
52
|
this.countOut++;
|
|
48
53
|
if (!shouldContinue && !this.lock) {
|
|
49
54
|
this.lock = pDefer();
|
|
50
|
-
|
|
55
|
+
logger.log(`ReadableCombined.push #${i} returned false, pausing the flow!`);
|
|
51
56
|
}
|
|
52
57
|
}
|
|
53
|
-
|
|
58
|
+
logger.log(`ReadableCombined: input #${i} done`);
|
|
54
59
|
});
|
|
55
|
-
|
|
60
|
+
logger.log(`ReadableCombined: all inputs done!`);
|
|
56
61
|
this.push(null);
|
|
57
62
|
}
|
|
58
63
|
_read() {
|
|
59
64
|
this.countReads++;
|
|
60
65
|
if (this.lock) {
|
|
61
|
-
|
|
66
|
+
this.logger.log(`ReadableCombined._read: resuming the flow!`);
|
|
62
67
|
// calling it in this order is important!
|
|
63
68
|
// this.lock should be undefined BEFORE we call lock.resolve()
|
|
64
69
|
const { lock } = this;
|
|
@@ -68,7 +73,7 @@ export class ReadableCombined extends Readable {
|
|
|
68
73
|
}
|
|
69
74
|
logStats() {
|
|
70
75
|
const { countIn, countOut, countReads } = this;
|
|
71
|
-
|
|
76
|
+
this.logger.debug({
|
|
72
77
|
countIn,
|
|
73
78
|
countOut,
|
|
74
79
|
countReads,
|
|
@@ -11,10 +11,8 @@ import type { ReadableTyped } from '../stream.model.js';
|
|
|
11
11
|
* e.g the read() method doesn't return anything, so, it will hang the Node process (or cause it to process.exit(0))
|
|
12
12
|
* if read() will be called AFTER everything was pushed and Readable is closed (by pushing `null`).
|
|
13
13
|
* Beware of it when e.g doing unit testing! Jest prefers to hang (not exit-0).
|
|
14
|
-
*
|
|
15
|
-
* @deprecated because of the caution above
|
|
16
14
|
*/
|
|
17
|
-
export declare function readableCreate<T>(items?: Iterable<T>, opt?: ReadableOptions): ReadableTyped<T>;
|
|
15
|
+
export declare function readableCreate<T>(items?: Iterable<T>, opt?: ReadableOptions, onRead?: () => void): ReadableTyped<T>;
|
|
18
16
|
/**
|
|
19
17
|
* Convenience type-safe wrapper around Readable.from() that infers the Type of input.
|
|
20
18
|
*/
|
|
@@ -10,14 +10,14 @@ import { Readable } from 'node:stream';
|
|
|
10
10
|
* e.g the read() method doesn't return anything, so, it will hang the Node process (or cause it to process.exit(0))
|
|
11
11
|
* if read() will be called AFTER everything was pushed and Readable is closed (by pushing `null`).
|
|
12
12
|
* Beware of it when e.g doing unit testing! Jest prefers to hang (not exit-0).
|
|
13
|
-
*
|
|
14
|
-
* @deprecated because of the caution above
|
|
15
13
|
*/
|
|
16
|
-
export function readableCreate(items = [], opt) {
|
|
14
|
+
export function readableCreate(items = [], opt, onRead) {
|
|
17
15
|
const readable = new Readable({
|
|
18
16
|
objectMode: true,
|
|
19
17
|
...opt,
|
|
20
|
-
read() {
|
|
18
|
+
read() {
|
|
19
|
+
onRead?.();
|
|
20
|
+
},
|
|
21
21
|
});
|
|
22
22
|
for (const item of items) {
|
|
23
23
|
readable.push(item);
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import type { Readable, Transform, Writable } from 'node:stream';
|
|
2
|
+
import type { CommonLogger, CommonLogLevel } from '@naturalcycles/js-lib/log';
|
|
2
3
|
import type { Promisable } from '@naturalcycles/js-lib/types';
|
|
3
4
|
export interface ReadableSignalOptions {
|
|
4
5
|
/** allows destroying the stream if the signal is aborted. */
|
|
@@ -46,4 +47,19 @@ export interface TransformOptions {
|
|
|
46
47
|
* @default 16
|
|
47
48
|
*/
|
|
48
49
|
highWaterMark?: number;
|
|
50
|
+
/**
|
|
51
|
+
* Defaults to `console`.
|
|
52
|
+
*/
|
|
53
|
+
logger?: CommonLogger;
|
|
54
|
+
/**
|
|
55
|
+
* Not every Transform implements it.
|
|
56
|
+
* Can be one of:
|
|
57
|
+
* debug - most verbose, when debugging is needed
|
|
58
|
+
* log - default level
|
|
59
|
+
* error - logs errors and warnings only
|
|
60
|
+
*
|
|
61
|
+
* Default is 'log'.
|
|
62
|
+
*
|
|
63
|
+
*/
|
|
64
|
+
logLevel?: CommonLogLevel;
|
|
49
65
|
}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { Pipeline } from '../pipeline.js';
|
|
2
|
+
import type { TransformOptions, TransformTyped } from '../stream.model.js';
|
|
3
|
+
/**
|
|
4
|
+
* Allows to "fork" away from the "main pipeline" into the "forked pipeline".
|
|
5
|
+
*
|
|
6
|
+
* Correctly keeps backpressure from both "downstreams" (main and forked).
|
|
7
|
+
*
|
|
8
|
+
* @experimental
|
|
9
|
+
*/
|
|
10
|
+
export declare function transformFork<T>(fn: (pipeline: Pipeline<T>) => Pipeline<T>, opt?: TransformOptions): TransformTyped<T, T>;
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import { Transform } from 'node:stream';
|
|
2
|
+
import { createCommonLoggerAtLevel } from '@naturalcycles/js-lib/log';
|
|
3
|
+
import { pDefer } from '@naturalcycles/js-lib/promise/pDefer.js';
|
|
4
|
+
import { Pipeline } from '../pipeline.js';
|
|
5
|
+
import { readableCreate } from '../readable/readableCreate.js';
|
|
6
|
+
/**
|
|
7
|
+
* Allows to "fork" away from the "main pipeline" into the "forked pipeline".
|
|
8
|
+
*
|
|
9
|
+
* Correctly keeps backpressure from both "downstreams" (main and forked).
|
|
10
|
+
*
|
|
11
|
+
* @experimental
|
|
12
|
+
*/
|
|
13
|
+
export function transformFork(fn, opt = {}) {
|
|
14
|
+
const { objectMode = true, highWaterMark } = opt;
|
|
15
|
+
const logger = createCommonLoggerAtLevel(opt.logger, opt.logLevel);
|
|
16
|
+
let lock;
|
|
17
|
+
const fork = readableCreate([], {}, () => {
|
|
18
|
+
// `_read` is called
|
|
19
|
+
if (!lock)
|
|
20
|
+
return;
|
|
21
|
+
// We had a lock - let's Resume
|
|
22
|
+
logger.log(`TransformFork: resume`);
|
|
23
|
+
const lockCopy = lock;
|
|
24
|
+
lock = undefined;
|
|
25
|
+
lockCopy.resolve();
|
|
26
|
+
});
|
|
27
|
+
const p = fn(Pipeline.from(fork));
|
|
28
|
+
void p.run().then(() => {
|
|
29
|
+
logger.log('TransformFork: done');
|
|
30
|
+
});
|
|
31
|
+
return new Transform({
|
|
32
|
+
objectMode,
|
|
33
|
+
highWaterMark,
|
|
34
|
+
async transform(chunk, _, cb) {
|
|
35
|
+
// pass through to the "main" pipeline
|
|
36
|
+
// Main pipeline should handle backpressure "automatically",
|
|
37
|
+
// so, we're not maintaining a Lock for it
|
|
38
|
+
this.push(chunk);
|
|
39
|
+
if (lock) {
|
|
40
|
+
// Forked pipeline is locked - let's wait for it to call _read
|
|
41
|
+
await lock;
|
|
42
|
+
// lock is undefined at this point
|
|
43
|
+
}
|
|
44
|
+
// pass to the "forked" pipeline
|
|
45
|
+
const shouldContinue = fork.push(chunk);
|
|
46
|
+
if (!shouldContinue && !lock) {
|
|
47
|
+
// Forked pipeline indicates that we should Pause
|
|
48
|
+
lock = pDefer();
|
|
49
|
+
logger.log(`TransformFork: pause`);
|
|
50
|
+
}
|
|
51
|
+
// acknowledge that we've finished processing the input chunk
|
|
52
|
+
cb();
|
|
53
|
+
},
|
|
54
|
+
async final(cb) {
|
|
55
|
+
logger.log('TransformFork: final');
|
|
56
|
+
// Pushing null "closes"/ends the secondary pipeline correctly
|
|
57
|
+
fork.push(null);
|
|
58
|
+
// Acknowledge that we've received `null` and passed it through to the fork
|
|
59
|
+
cb();
|
|
60
|
+
},
|
|
61
|
+
});
|
|
62
|
+
}
|
|
@@ -1,10 +1,11 @@
|
|
|
1
1
|
import type { AbortableSignal } from '@naturalcycles/js-lib';
|
|
2
|
+
import type { NonNegativeInteger } from '@naturalcycles/js-lib/types';
|
|
2
3
|
import type { TransformOptions, TransformTyped } from '../stream.model.js';
|
|
3
4
|
export interface TransformLimitOptions extends TransformOptions {
|
|
4
5
|
/**
|
|
5
6
|
* Nullish value (e.g 0 or undefined) would mean "no limit"
|
|
6
7
|
*/
|
|
7
|
-
limit?:
|
|
8
|
+
limit?: NonNegativeInteger;
|
|
8
9
|
/**
|
|
9
10
|
* Allows to abort (gracefully stop) the stream from inside the Transform.
|
|
10
11
|
*/
|
|
@@ -2,15 +2,15 @@ import { Transform } from 'node:stream';
|
|
|
2
2
|
import { PIPELINE_GRACEFUL_ABORT } from '../stream.util.js';
|
|
3
3
|
import { transformNoOp } from './transformNoOp.js';
|
|
4
4
|
export function transformLimit(opt) {
|
|
5
|
-
const { limit, signal } = opt;
|
|
5
|
+
const { limit, signal, objectMode = true, highWaterMark } = opt;
|
|
6
6
|
if (!limit) {
|
|
7
7
|
return transformNoOp();
|
|
8
8
|
}
|
|
9
9
|
let i = 0; // so we start first chunk with 1
|
|
10
10
|
let ended = false;
|
|
11
11
|
return new Transform({
|
|
12
|
-
objectMode
|
|
13
|
-
|
|
12
|
+
objectMode,
|
|
13
|
+
highWaterMark,
|
|
14
14
|
transform(chunk, _, cb) {
|
|
15
15
|
if (ended) {
|
|
16
16
|
return;
|
|
@@ -4,10 +4,11 @@ import { progressLogger } from '../progressLogger.js';
|
|
|
4
4
|
* Pass-through transform that optionally logs progress.
|
|
5
5
|
*/
|
|
6
6
|
export function transformLogProgress(opt = {}) {
|
|
7
|
+
const { objectMode = true, highWaterMark } = opt;
|
|
7
8
|
const progress = progressLogger(opt);
|
|
8
9
|
return new Transform({
|
|
9
|
-
objectMode
|
|
10
|
-
|
|
10
|
+
objectMode,
|
|
11
|
+
highWaterMark,
|
|
11
12
|
transform(chunk, _, cb) {
|
|
12
13
|
progress.log(chunk);
|
|
13
14
|
cb(null, chunk); // pass-through
|
|
@@ -1,9 +1,8 @@
|
|
|
1
1
|
import { type AbortableSignal } from '@naturalcycles/js-lib';
|
|
2
2
|
import { ErrorMode } from '@naturalcycles/js-lib/error';
|
|
3
|
-
import type { CommonLogger } from '@naturalcycles/js-lib/log';
|
|
4
3
|
import { type AbortableAsyncMapper, type AsyncPredicate, END, type PositiveInteger, type Promisable, SKIP, type StringMap, type UnixTimestampMillis } from '@naturalcycles/js-lib/types';
|
|
5
|
-
import type { TransformTyped } from '../stream.model.js';
|
|
6
|
-
export interface TransformMapOptions<IN = any, OUT = IN> {
|
|
4
|
+
import type { TransformOptions, TransformTyped } from '../stream.model.js';
|
|
5
|
+
export interface TransformMapOptions<IN = any, OUT = IN> extends TransformOptions {
|
|
7
6
|
/**
|
|
8
7
|
* Predicate to filter outgoing results (after mapper).
|
|
9
8
|
* Allows to not emit all results.
|
|
@@ -57,7 +56,6 @@ export interface TransformMapOptions<IN = any, OUT = IN> {
|
|
|
57
56
|
* @default `stream`
|
|
58
57
|
*/
|
|
59
58
|
metric?: string;
|
|
60
|
-
logger?: CommonLogger;
|
|
61
59
|
/**
|
|
62
60
|
* Allows to abort (gracefully stop) the stream from inside the Transform.
|
|
63
61
|
*/
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { _hc } from '@naturalcycles/js-lib';
|
|
2
2
|
import { _since } from '@naturalcycles/js-lib/datetime/time.util.js';
|
|
3
3
|
import { _anyToError, _assert, ErrorMode } from '@naturalcycles/js-lib/error';
|
|
4
|
+
import { createCommonLoggerAtLevel } from '@naturalcycles/js-lib/log';
|
|
4
5
|
import { _stringify } from '@naturalcycles/js-lib/string/stringify.js';
|
|
5
6
|
import { END, SKIP, } from '@naturalcycles/js-lib/types';
|
|
6
7
|
import through2Concurrent from 'through2-concurrent';
|
|
@@ -22,7 +23,7 @@ import { PIPELINE_GRACEFUL_ABORT } from '../stream.util.js';
|
|
|
22
23
|
*/
|
|
23
24
|
export function transformMap(mapper, opt = {}) {
|
|
24
25
|
const { concurrency = 16, highWaterMark = 64, predicate, // we now default to "no predicate" (meaning pass-everything)
|
|
25
|
-
errorMode = ErrorMode.THROW_IMMEDIATELY, onError, onDone, metric = 'stream',
|
|
26
|
+
errorMode = ErrorMode.THROW_IMMEDIATELY, onError, onDone, metric = 'stream', signal, } = opt;
|
|
26
27
|
const started = Date.now();
|
|
27
28
|
let index = -1;
|
|
28
29
|
let countOut = 0;
|
|
@@ -30,12 +31,12 @@ export function transformMap(mapper, opt = {}) {
|
|
|
30
31
|
let ok = true;
|
|
31
32
|
let errors = 0;
|
|
32
33
|
const collectedErrors = []; // only used if errorMode == THROW_AGGREGATED
|
|
34
|
+
const logger = createCommonLoggerAtLevel(opt.logger, opt.logLevel);
|
|
33
35
|
return through2Concurrent.obj({
|
|
34
36
|
maxConcurrency: concurrency,
|
|
35
37
|
readableHighWaterMark: highWaterMark,
|
|
36
38
|
writableHighWaterMark: highWaterMark,
|
|
37
39
|
async final(cb) {
|
|
38
|
-
// console.log('transformMap final')
|
|
39
40
|
logErrorStats(true);
|
|
40
41
|
if (collectedErrors.length) {
|
|
41
42
|
try {
|
|
@@ -1,15 +1,13 @@
|
|
|
1
1
|
import { ErrorMode } from '@naturalcycles/js-lib/error/errorMode.js';
|
|
2
|
-
import type { CommonLogger } from '@naturalcycles/js-lib/log';
|
|
3
2
|
import type { IndexedMapper } from '@naturalcycles/js-lib/types';
|
|
4
|
-
import type { TransformTyped } from '../stream.model.js';
|
|
5
|
-
export interface TransformMapSimpleOptions {
|
|
3
|
+
import type { TransformOptions, TransformTyped } from '../stream.model.js';
|
|
4
|
+
export interface TransformMapSimpleOptions extends TransformOptions {
|
|
6
5
|
/**
|
|
7
6
|
* Only supports THROW_IMMEDIATELY (default) and SUPPRESS.
|
|
8
7
|
*
|
|
9
8
|
* @default ErrorMode.THROW_IMMEDIATELY
|
|
10
9
|
*/
|
|
11
10
|
errorMode?: ErrorMode.THROW_IMMEDIATELY | ErrorMode.SUPPRESS;
|
|
12
|
-
logger?: CommonLogger;
|
|
13
11
|
}
|
|
14
12
|
/**
|
|
15
13
|
* Simplest version of `transformMap`.
|