@naturalcycles/nodejs-lib 15.26.0 → 15.27.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. package/dist/slack/slack.service.d.ts +1 -0
  2. package/dist/slack/slack.service.js +4 -3
  3. package/dist/stream/index.d.ts +2 -3
  4. package/dist/stream/index.js +2 -3
  5. package/dist/stream/ndjson/createReadStreamAsNDJson.d.ts +16 -0
  6. package/dist/stream/ndjson/createReadStreamAsNDJson.js +35 -0
  7. package/dist/stream/pipeline.d.ts +1 -0
  8. package/dist/stream/pipeline.js +12 -10
  9. package/dist/stream/progressLogger.d.ts +3 -3
  10. package/dist/stream/readable/readableCombined.d.ts +4 -2
  11. package/dist/stream/readable/readableCombined.js +16 -11
  12. package/dist/stream/readable/readableCreate.d.ts +1 -3
  13. package/dist/stream/readable/readableCreate.js +4 -4
  14. package/dist/stream/stream.model.d.ts +16 -0
  15. package/dist/stream/transform/transformFork.d.ts +10 -0
  16. package/dist/stream/transform/transformFork.js +62 -0
  17. package/dist/stream/transform/transformLimit.d.ts +2 -1
  18. package/dist/stream/transform/transformLimit.js +3 -3
  19. package/dist/stream/transform/transformLogProgress.js +3 -2
  20. package/dist/stream/transform/transformMap.d.ts +2 -4
  21. package/dist/stream/transform/transformMap.js +3 -2
  22. package/dist/stream/transform/transformMapSimple.d.ts +2 -4
  23. package/dist/stream/transform/transformMapSimple.js +3 -2
  24. package/dist/stream/transform/transformMapSync.d.ts +2 -4
  25. package/dist/stream/transform/transformMapSync.js +3 -1
  26. package/dist/stream/transform/transformSplit.js +2 -2
  27. package/dist/stream/transform/transformThrottle.d.ts +2 -3
  28. package/dist/stream/transform/transformThrottle.js +22 -27
  29. package/package.json +1 -1
  30. package/src/slack/slack.service.ts +6 -3
  31. package/src/stream/index.ts +2 -3
  32. package/src/stream/ndjson/createReadStreamAsNDJson.ts +43 -0
  33. package/src/stream/pipeline.ts +13 -12
  34. package/src/stream/progressLogger.ts +3 -3
  35. package/src/stream/readable/readableCombined.ts +22 -11
  36. package/src/stream/readable/readableCreate.ts +4 -3
  37. package/src/stream/stream.model.ts +18 -0
  38. package/src/stream/transform/transformFork.ts +74 -0
  39. package/src/stream/transform/transformLimit.ts +5 -4
  40. package/src/stream/transform/transformLogProgress.ts +3 -2
  41. package/src/stream/transform/transformMap.ts +4 -8
  42. package/src/stream/transform/transformMapSimple.ts +10 -7
  43. package/src/stream/transform/transformMapSync.ts +4 -6
  44. package/src/stream/transform/transformSplit.ts +2 -2
  45. package/src/stream/transform/transformThrottle.ts +28 -36
  46. package/dist/stream/transform/transformTee.d.ts +0 -13
  47. package/dist/stream/transform/transformTee.js +0 -37
  48. package/dist/stream/transform/transformToArray.d.ts +0 -5
  49. package/dist/stream/transform/transformToArray.js +0 -20
  50. package/dist/stream/writable/writableFork.d.ts +0 -10
  51. package/dist/stream/writable/writableFork.js +0 -45
  52. package/src/stream/transform/transformTee.ts +0 -48
  53. package/src/stream/transform/transformToArray.ts +0 -23
  54. package/src/stream/writable/writableFork.ts +0 -56
@@ -1,18 +1,15 @@
1
1
  import { Transform } from 'node:stream'
2
2
  import { ErrorMode } from '@naturalcycles/js-lib/error/errorMode.js'
3
- import type { CommonLogger } from '@naturalcycles/js-lib/log'
4
3
  import type { IndexedMapper } from '@naturalcycles/js-lib/types'
5
- import type { TransformTyped } from '../stream.model.js'
4
+ import type { TransformOptions, TransformTyped } from '../stream.model.js'
6
5
 
7
- export interface TransformMapSimpleOptions {
6
+ export interface TransformMapSimpleOptions extends TransformOptions {
8
7
  /**
9
8
  * Only supports THROW_IMMEDIATELY (default) and SUPPRESS.
10
9
  *
11
10
  * @default ErrorMode.THROW_IMMEDIATELY
12
11
  */
13
12
  errorMode?: ErrorMode.THROW_IMMEDIATELY | ErrorMode.SUPPRESS
14
-
15
- logger?: CommonLogger
16
13
  }
17
14
 
18
15
  /**
@@ -29,10 +26,16 @@ export function transformMapSimple<IN = any, OUT = IN>(
29
26
  opt: TransformMapSimpleOptions = {},
30
27
  ): TransformTyped<IN, OUT> {
31
28
  let index = -1
32
- const { errorMode = ErrorMode.THROW_IMMEDIATELY, logger = console } = opt
29
+ const {
30
+ errorMode = ErrorMode.THROW_IMMEDIATELY,
31
+ logger = console,
32
+ objectMode = true,
33
+ highWaterMark,
34
+ } = opt
33
35
 
34
36
  return new Transform({
35
- objectMode: true,
37
+ objectMode,
38
+ highWaterMark,
36
39
  transform(chunk: IN, _, cb) {
37
40
  try {
38
41
  cb(null, mapper(chunk, ++index))
@@ -1,15 +1,15 @@
1
1
  import { Transform } from 'node:stream'
2
2
  import type { AbortableSignal } from '@naturalcycles/js-lib'
3
3
  import { _anyToError, _assert, ErrorMode } from '@naturalcycles/js-lib/error'
4
- import type { CommonLogger } from '@naturalcycles/js-lib/log'
4
+ import { createCommonLoggerAtLevel } from '@naturalcycles/js-lib/log'
5
5
  import type { IndexedMapper, Predicate, UnixTimestampMillis } from '@naturalcycles/js-lib/types'
6
6
  import { END, SKIP } from '@naturalcycles/js-lib/types'
7
7
  import { yellow } from '../../colors/colors.js'
8
- import type { TransformTyped } from '../stream.model.js'
8
+ import type { TransformOptions, TransformTyped } from '../stream.model.js'
9
9
  import { PIPELINE_GRACEFUL_ABORT } from '../stream.util.js'
10
10
  import type { TransformMapStats } from './transformMap.js'
11
11
 
12
- export interface TransformMapSyncOptions<IN = any, OUT = IN> {
12
+ export interface TransformMapSyncOptions<IN = any, OUT = IN> extends TransformOptions {
13
13
  /**
14
14
  * @default true
15
15
  */
@@ -54,8 +54,6 @@ export interface TransformMapSyncOptions<IN = any, OUT = IN> {
54
54
  */
55
55
  metric?: string
56
56
 
57
- logger?: CommonLogger
58
-
59
57
  /**
60
58
  * Allows to abort (gracefully stop) the stream from inside the Transform.
61
59
  */
@@ -77,7 +75,6 @@ export function transformMapSync<IN = any, OUT = IN>(
77
75
  onDone,
78
76
  metric = 'stream',
79
77
  objectMode = true,
80
- logger = console,
81
78
  signal,
82
79
  } = opt
83
80
 
@@ -87,6 +84,7 @@ export function transformMapSync<IN = any, OUT = IN>(
87
84
  let isSettled = false
88
85
  let errors = 0
89
86
  const collectedErrors: Error[] = [] // only used if errorMode == THROW_AGGREGATED
87
+ const logger = createCommonLoggerAtLevel(opt.logger, opt.logLevel)
90
88
 
91
89
  return new Transform({
92
90
  objectMode,
@@ -19,7 +19,7 @@ export function transformSplitOnNewline(): TransformTyped<Buffer, Buffer> {
19
19
  writableHighWaterMark: 64 * 1024,
20
20
  readableObjectMode: true,
21
21
 
22
- transform(buf: Buffer, _enc, done) {
22
+ transform(buf: Buffer, _enc, cb) {
23
23
  let offset = 0
24
24
  let lastMatch = 0
25
25
  if (buffered) {
@@ -42,7 +42,7 @@ export function transformSplitOnNewline(): TransformTyped<Buffer, Buffer> {
42
42
  }
43
43
  }
44
44
 
45
- done()
45
+ cb()
46
46
  },
47
47
 
48
48
  flush(done) {
@@ -1,5 +1,6 @@
1
1
  import { Transform } from 'node:stream'
2
2
  import { _ms, _since, localTime } from '@naturalcycles/js-lib/datetime'
3
+ import { createCommonLoggerAtLevel } from '@naturalcycles/js-lib/log'
3
4
  import type { DeferredPromise } from '@naturalcycles/js-lib/promise'
4
5
  import { pDefer } from '@naturalcycles/js-lib/promise/pDefer.js'
5
6
  import type {
@@ -7,9 +8,9 @@ import type {
7
8
  PositiveInteger,
8
9
  UnixTimestampMillis,
9
10
  } from '@naturalcycles/js-lib/types'
10
- import type { TransformTyped } from '../stream.model.js'
11
+ import type { TransformOptions, TransformTyped } from '../stream.model.js'
11
12
 
12
- export interface TransformThrottleOptions {
13
+ export interface TransformThrottleOptions extends TransformOptions {
13
14
  /**
14
15
  * How many items to allow per `interval` of seconds.
15
16
  */
@@ -19,8 +20,6 @@ export interface TransformThrottleOptions {
19
20
  * How long is the interval (in seconds) where number of items should not exceed `throughput`.
20
21
  */
21
22
  interval: NumberOfSeconds
22
-
23
- debug?: boolean
24
23
  }
25
24
 
26
25
  /**
@@ -41,42 +40,40 @@ export interface TransformThrottleOptions {
41
40
  * @experimental
42
41
  */
43
42
  export function transformThrottle<T>(opt: TransformThrottleOptions): TransformTyped<T, T> {
44
- const { throughput, interval, debug } = opt
43
+ const { throughput, interval, objectMode = true, highWaterMark } = opt
45
44
 
46
45
  let count = 0
47
46
  let start: UnixTimestampMillis
48
- let paused: DeferredPromise | undefined
47
+ let lock: DeferredPromise | undefined
49
48
  let timeout: NodeJS.Timeout | undefined
49
+ const logger = createCommonLoggerAtLevel(opt.logger, opt.logLevel)
50
50
 
51
51
  return new Transform({
52
- objectMode: true,
52
+ objectMode,
53
+ highWaterMark,
53
54
  async transform(item: T, _, cb) {
54
55
  // console.log('incoming', item, { paused: !!paused, count })
55
56
  if (!start) {
56
57
  start = Date.now() as UnixTimestampMillis
57
58
  timeout = setTimeout(() => onInterval(this), interval * 1000)
58
- if (debug) {
59
- console.log(`${localTime.now().toPretty()} transformThrottle started with`, {
60
- throughput,
61
- interval,
62
- rps: Math.round(throughput / interval),
63
- })
64
- }
59
+ logger.log(`${localTime.now().toPretty()} transformThrottle started with`, {
60
+ throughput,
61
+ interval,
62
+ rps: Math.round(throughput / interval),
63
+ })
65
64
  }
66
65
 
67
- if (paused) {
68
- // console.log('awaiting pause', {item, count})
69
- await paused
66
+ if (lock) {
67
+ // console.log('awaiting lock', {item, count})
68
+ await lock
70
69
  }
71
70
 
72
71
  if (++count >= throughput) {
73
72
  // console.log('pausing now after', {item, count})
74
- paused = pDefer()
75
- if (debug) {
76
- console.log(
77
- `${localTime.now().toPretty()} transformThrottle activated: ${count} items passed in ${_since(start)}, will pause for ${_ms(interval * 1000 - (Date.now() - start))}`,
78
- )
79
- }
73
+ lock = pDefer()
74
+ logger.log(
75
+ `${localTime.now().toPretty()} transformThrottle activated: ${count} items passed in ${_since(start)}, will pause for ${_ms(interval * 1000 - (Date.now() - start))}`,
76
+ )
80
77
  }
81
78
 
82
79
  cb(null, item) // pass the item through
@@ -88,23 +85,18 @@ export function transformThrottle<T>(opt: TransformThrottleOptions): TransformTy
88
85
  })
89
86
 
90
87
  function onInterval(transform: Transform): void {
91
- if (paused) {
92
- if (debug) {
93
- console.log(`${localTime.now().toPretty()} transformThrottle resumed`)
94
- }
95
-
96
- paused.resolve()
97
- paused = undefined
88
+ if (lock) {
89
+ logger.log(`${localTime.now().toPretty()} transformThrottle resumed`)
90
+ lock.resolve()
91
+ lock = undefined
98
92
  } else {
99
- if (debug) {
100
- console.log(
101
- `${localTime.now().toPretty()} transformThrottle passed ${count} (of max ${throughput}) items in ${_since(start)}`,
102
- )
103
- }
93
+ logger.log(
94
+ `${localTime.now().toPretty()} transformThrottle passed ${count} (of max ${throughput}) items in ${_since(start)}`,
95
+ )
104
96
  }
105
97
 
106
98
  count = 0
107
- start = Date.now() as UnixTimestampMillis
99
+ start = localTime.nowUnixMillis()
108
100
  timeout = setTimeout(() => onInterval(transform), interval * 1000)
109
101
  }
110
102
  }
@@ -1,13 +0,0 @@
1
- import type { TransformTyped } from '../stream.model.js';
2
- type AnyStream = NodeJS.WritableStream | NodeJS.ReadWriteStream;
3
- /**
4
- * Allows to "tee"/"fork" away from the "main pipeline" into the "secondary pipeline".
5
- *
6
- * Important, that the main pipeline works "as normal", keeps backpressure, etc.
7
- * Secondary pipeline DOES NOT keep backpressure.
8
- * Therefor, the "slowest" pipeline should be made Primary (to keep backpressure).
9
- *
10
- * @experimental
11
- */
12
- export declare function transformTee<T>(streams: AnyStream[]): TransformTyped<T, T>;
13
- export {};
@@ -1,37 +0,0 @@
1
- import { Transform } from 'node:stream';
2
- import { pipeline } from 'node:stream/promises';
3
- import { readableCreate } from '../readable/readableCreate.js';
4
- /**
5
- * Allows to "tee"/"fork" away from the "main pipeline" into the "secondary pipeline".
6
- *
7
- * Important, that the main pipeline works "as normal", keeps backpressure, etc.
8
- * Secondary pipeline DOES NOT keep backpressure.
9
- * Therefor, the "slowest" pipeline should be made Primary (to keep backpressure).
10
- *
11
- * @experimental
12
- */
13
- export function transformTee(streams) {
14
- const readable = readableCreate();
15
- const secondPipelinePromise = pipeline([readable, ...streams]);
16
- return new Transform({
17
- objectMode: true,
18
- transform(chunk, _, cb) {
19
- // todo: it's possible to start respecting backpressure,
20
- // if we start to listen to the boolean output of .push()
21
- // pass to the "secondary" pipeline
22
- readable.push(chunk);
23
- // pass through to the "main" pipeline
24
- cb(null, chunk);
25
- },
26
- async final(cb) {
27
- console.log('transformTee final');
28
- // Pushing null "closes"/ends the secondary pipeline correctly
29
- readable.push(null);
30
- // Second pipeline is expected to finish now, let's await it
31
- await secondPipelinePromise;
32
- console.log('transformTee final secondPipeline done');
33
- // Because second pipeline is done - now we can signal main pipeline to be done as well
34
- cb();
35
- },
36
- });
37
- }
@@ -1,5 +0,0 @@
1
- import type { TransformOptions, TransformTyped } from '../stream.model.js';
2
- /**
3
- * Will collect all stream results in the array (keeping it in memory) and emit in the end as one result.
4
- */
5
- export declare function transformToArray<IN>(opt?: TransformOptions): TransformTyped<IN, IN[]>;
@@ -1,20 +0,0 @@
1
- import { Transform } from 'node:stream';
2
- /**
3
- * Will collect all stream results in the array (keeping it in memory) and emit in the end as one result.
4
- */
5
- export function transformToArray(opt = {}) {
6
- const res = [];
7
- return new Transform({
8
- objectMode: true,
9
- ...opt,
10
- transform(chunk, _, cb) {
11
- res.push(chunk);
12
- // callback to signal that we processed input, but not emitting any output
13
- cb();
14
- },
15
- final(cb) {
16
- this.push(res);
17
- cb();
18
- },
19
- });
20
- }
@@ -1,10 +0,0 @@
1
- import type { TransformOptions, WritableTyped } from '../stream.model.js';
2
- /**
3
- * Allows "forking" a stream inside pipeline into a number of pipeline chains (2 or more).
4
- * Currently does NOT (!) maintain backpressure.
5
- * Error in the forked pipeline will propagate up to the main pipeline (and log error, to be sure).
6
- * Will wait until all forked pipelines are completed before completing the stream.
7
- *
8
- * @experimental
9
- */
10
- export declare function writableFork<T>(chains: NodeJS.WritableStream[][], opt?: TransformOptions): WritableTyped<T>;
@@ -1,45 +0,0 @@
1
- import { Writable } from 'node:stream';
2
- import { pipeline } from 'node:stream/promises';
3
- import { readableCreate } from '../readable/readableCreate.js';
4
- /**
5
- * Allows "forking" a stream inside pipeline into a number of pipeline chains (2 or more).
6
- * Currently does NOT (!) maintain backpressure.
7
- * Error in the forked pipeline will propagate up to the main pipeline (and log error, to be sure).
8
- * Will wait until all forked pipelines are completed before completing the stream.
9
- *
10
- * @experimental
11
- */
12
- export function writableFork(chains, opt) {
13
- const readables = [];
14
- const allChainsDone = Promise.all(chains.map(async (chain) => {
15
- const readable = readableCreate();
16
- readables.push(readable);
17
- return await pipeline([readable, ...chain]);
18
- })).catch(err => {
19
- console.error(err); // ensure the error is logged
20
- throw err;
21
- });
22
- return new Writable({
23
- objectMode: true,
24
- ...opt,
25
- write(chunk, _, cb) {
26
- // Push/fork to all sub-streams
27
- // No backpressure is ensured here, it'll push regardless of the
28
- readables.forEach(readable => readable.push(chunk));
29
- cb();
30
- },
31
- async final(cb) {
32
- try {
33
- // Push null (complete) to all sub-streams
34
- readables.forEach(readable => readable.push(null));
35
- console.log(`writableFork.final is waiting for all chains to be done`);
36
- await allChainsDone;
37
- console.log(`writableFork.final all chains done`);
38
- cb();
39
- }
40
- catch (err) {
41
- cb(err);
42
- }
43
- },
44
- });
45
- }
@@ -1,48 +0,0 @@
1
- import { Transform } from 'node:stream'
2
- import { pipeline } from 'node:stream/promises'
3
- import { readableCreate } from '../readable/readableCreate.js'
4
- import type { TransformTyped } from '../stream.model.js'
5
-
6
- type AnyStream = NodeJS.WritableStream | NodeJS.ReadWriteStream
7
-
8
- /**
9
- * Allows to "tee"/"fork" away from the "main pipeline" into the "secondary pipeline".
10
- *
11
- * Important, that the main pipeline works "as normal", keeps backpressure, etc.
12
- * Secondary pipeline DOES NOT keep backpressure.
13
- * Therefor, the "slowest" pipeline should be made Primary (to keep backpressure).
14
- *
15
- * @experimental
16
- */
17
- export function transformTee<T>(streams: AnyStream[]): TransformTyped<T, T> {
18
- const readable = readableCreate<T>()
19
-
20
- const secondPipelinePromise = pipeline([readable, ...streams])
21
-
22
- return new Transform({
23
- objectMode: true,
24
- transform(chunk: T, _, cb) {
25
- // todo: it's possible to start respecting backpressure,
26
- // if we start to listen to the boolean output of .push()
27
-
28
- // pass to the "secondary" pipeline
29
- readable.push(chunk)
30
-
31
- // pass through to the "main" pipeline
32
- cb(null, chunk)
33
- },
34
- async final(cb) {
35
- console.log('transformTee final')
36
-
37
- // Pushing null "closes"/ends the secondary pipeline correctly
38
- readable.push(null)
39
-
40
- // Second pipeline is expected to finish now, let's await it
41
- await secondPipelinePromise
42
- console.log('transformTee final secondPipeline done')
43
-
44
- // Because second pipeline is done - now we can signal main pipeline to be done as well
45
- cb()
46
- },
47
- })
48
- }
@@ -1,23 +0,0 @@
1
- import { Transform } from 'node:stream'
2
- import type { TransformOptions, TransformTyped } from '../stream.model.js'
3
-
4
- /**
5
- * Will collect all stream results in the array (keeping it in memory) and emit in the end as one result.
6
- */
7
- export function transformToArray<IN>(opt: TransformOptions = {}): TransformTyped<IN, IN[]> {
8
- const res: IN[] = []
9
-
10
- return new Transform({
11
- objectMode: true,
12
- ...opt,
13
- transform(chunk: IN, _, cb) {
14
- res.push(chunk)
15
- // callback to signal that we processed input, but not emitting any output
16
- cb()
17
- },
18
- final(this: Transform, cb) {
19
- this.push(res)
20
- cb()
21
- },
22
- })
23
- }
@@ -1,56 +0,0 @@
1
- import { Writable } from 'node:stream'
2
- import { pipeline } from 'node:stream/promises'
3
- import { readableCreate } from '../readable/readableCreate.js'
4
- import type { ReadableTyped, TransformOptions, WritableTyped } from '../stream.model.js'
5
-
6
- /**
7
- * Allows "forking" a stream inside pipeline into a number of pipeline chains (2 or more).
8
- * Currently does NOT (!) maintain backpressure.
9
- * Error in the forked pipeline will propagate up to the main pipeline (and log error, to be sure).
10
- * Will wait until all forked pipelines are completed before completing the stream.
11
- *
12
- * @experimental
13
- */
14
- export function writableFork<T>(
15
- chains: NodeJS.WritableStream[][],
16
- opt?: TransformOptions,
17
- ): WritableTyped<T> {
18
- const readables: ReadableTyped<T>[] = []
19
-
20
- const allChainsDone = Promise.all(
21
- chains.map(async chain => {
22
- const readable = readableCreate<T>()
23
- readables.push(readable)
24
-
25
- return await pipeline([readable, ...chain])
26
- }),
27
- ).catch(err => {
28
- console.error(err) // ensure the error is logged
29
- throw err
30
- })
31
-
32
- return new Writable({
33
- objectMode: true,
34
- ...opt,
35
- write(chunk: T, _, cb) {
36
- // Push/fork to all sub-streams
37
- // No backpressure is ensured here, it'll push regardless of the
38
- readables.forEach(readable => readable.push(chunk))
39
-
40
- cb()
41
- },
42
- async final(cb) {
43
- try {
44
- // Push null (complete) to all sub-streams
45
- readables.forEach(readable => readable.push(null))
46
-
47
- console.log(`writableFork.final is waiting for all chains to be done`)
48
- await allChainsDone
49
- console.log(`writableFork.final all chains done`)
50
- cb()
51
- } catch (err) {
52
- cb(err as Error)
53
- }
54
- },
55
- })
56
- }