@naturalcycles/nodejs-lib 15.25.0 → 15.27.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. package/dist/slack/slack.service.d.ts +1 -0
  2. package/dist/slack/slack.service.js +4 -3
  3. package/dist/stream/index.d.ts +2 -8
  4. package/dist/stream/index.js +2 -8
  5. package/dist/stream/ndjson/createReadStreamAsNDJson.d.ts +16 -0
  6. package/dist/stream/ndjson/{createReadStreamAsNDJSON.js → createReadStreamAsNDJson.js} +10 -13
  7. package/dist/stream/ndjson/ndjsonMap.d.ts +2 -0
  8. package/dist/stream/ndjson/ndjsonMap.js +2 -0
  9. package/dist/stream/pipeline.d.ts +2 -1
  10. package/dist/stream/pipeline.js +25 -8
  11. package/dist/stream/progressLogger.d.ts +3 -3
  12. package/dist/stream/readable/readableCombined.d.ts +4 -2
  13. package/dist/stream/readable/readableCombined.js +16 -11
  14. package/dist/stream/readable/readableCreate.d.ts +1 -3
  15. package/dist/stream/readable/readableCreate.js +4 -4
  16. package/dist/stream/stream.model.d.ts +16 -0
  17. package/dist/stream/transform/transformFork.d.ts +10 -0
  18. package/dist/stream/transform/transformFork.js +62 -0
  19. package/dist/stream/transform/transformLimit.d.ts +2 -1
  20. package/dist/stream/transform/transformLimit.js +3 -3
  21. package/dist/stream/transform/transformLogProgress.js +3 -2
  22. package/dist/stream/transform/transformMap.d.ts +2 -4
  23. package/dist/stream/transform/transformMap.js +3 -2
  24. package/dist/stream/transform/transformMapSimple.d.ts +2 -4
  25. package/dist/stream/transform/transformMapSimple.js +3 -2
  26. package/dist/stream/transform/transformMapSync.d.ts +2 -4
  27. package/dist/stream/transform/transformMapSync.js +3 -1
  28. package/dist/stream/transform/transformSplit.js +2 -2
  29. package/dist/stream/transform/transformThrottle.d.ts +2 -3
  30. package/dist/stream/transform/transformThrottle.js +22 -27
  31. package/dist/stream/writable/writableVoid.d.ts +1 -8
  32. package/dist/stream/writable/writableVoid.js +0 -1
  33. package/package.json +1 -1
  34. package/src/slack/slack.service.ts +6 -3
  35. package/src/stream/index.ts +2 -8
  36. package/src/stream/ndjson/{createReadStreamAsNDJSON.ts → createReadStreamAsNDJson.ts} +10 -13
  37. package/src/stream/ndjson/ndjsonMap.ts +2 -0
  38. package/src/stream/pipeline.ts +33 -9
  39. package/src/stream/progressLogger.ts +3 -3
  40. package/src/stream/readable/readableCombined.ts +22 -11
  41. package/src/stream/readable/readableCreate.ts +4 -3
  42. package/src/stream/stream.model.ts +18 -0
  43. package/src/stream/transform/transformFork.ts +74 -0
  44. package/src/stream/transform/transformLimit.ts +5 -4
  45. package/src/stream/transform/transformLogProgress.ts +3 -2
  46. package/src/stream/transform/transformMap.ts +4 -8
  47. package/src/stream/transform/transformMapSimple.ts +10 -7
  48. package/src/stream/transform/transformMapSync.ts +4 -6
  49. package/src/stream/transform/transformSplit.ts +2 -2
  50. package/src/stream/transform/transformThrottle.ts +28 -36
  51. package/src/stream/writable/writableVoid.ts +1 -10
  52. package/dist/stream/ndjson/createReadStreamAsNDJSON.d.ts +0 -19
  53. package/dist/stream/ndjson/createWriteStreamAsNDJSON.d.ts +0 -11
  54. package/dist/stream/ndjson/createWriteStreamAsNDJSON.js +0 -27
  55. package/dist/stream/ndjson/ndjsonStreamForEach.d.ts +0 -10
  56. package/dist/stream/ndjson/ndjsonStreamForEach.js +0 -15
  57. package/dist/stream/readable/readableToArray.d.ts +0 -9
  58. package/dist/stream/readable/readableToArray.js +0 -17
  59. package/dist/stream/transform/transformTee.d.ts +0 -13
  60. package/dist/stream/transform/transformTee.js +0 -37
  61. package/dist/stream/transform/transformToArray.d.ts +0 -5
  62. package/dist/stream/transform/transformToArray.js +0 -20
  63. package/dist/stream/writable/writableForEach.d.ts +0 -12
  64. package/dist/stream/writable/writableForEach.js +0 -15
  65. package/dist/stream/writable/writableFork.d.ts +0 -10
  66. package/dist/stream/writable/writableFork.js +0 -45
  67. package/dist/stream/writable/writableLimit.d.ts +0 -8
  68. package/dist/stream/writable/writableLimit.js +0 -25
  69. package/src/stream/ndjson/createWriteStreamAsNDJSON.ts +0 -30
  70. package/src/stream/ndjson/ndjsonStreamForEach.ts +0 -28
  71. package/src/stream/readable/readableToArray.ts +0 -19
  72. package/src/stream/transform/transformTee.ts +0 -48
  73. package/src/stream/transform/transformToArray.ts +0 -23
  74. package/src/stream/writable/writableForEach.ts +0 -25
  75. package/src/stream/writable/writableFork.ts +0 -56
  76. package/src/stream/writable/writableLimit.ts +0 -29
@@ -1,5 +1,6 @@
1
1
  import { Transform } from 'node:stream'
2
2
  import type { AbortableSignal } from '@naturalcycles/js-lib'
3
+ import type { NonNegativeInteger } from '@naturalcycles/js-lib/types'
3
4
  import type { TransformOptions, TransformTyped } from '../stream.model.js'
4
5
  import { PIPELINE_GRACEFUL_ABORT } from '../stream.util.js'
5
6
  import { transformNoOp } from './transformNoOp.js'
@@ -8,7 +9,7 @@ export interface TransformLimitOptions extends TransformOptions {
8
9
  /**
9
10
  * Nullish value (e.g 0 or undefined) would mean "no limit"
10
11
  */
11
- limit?: number
12
+ limit?: NonNegativeInteger
12
13
 
13
14
  /**
14
15
  * Allows to abort (gracefully stop) the stream from inside the Transform.
@@ -17,7 +18,7 @@ export interface TransformLimitOptions extends TransformOptions {
17
18
  }
18
19
 
19
20
  export function transformLimit<IN>(opt: TransformLimitOptions): TransformTyped<IN, IN> {
20
- const { limit, signal } = opt
21
+ const { limit, signal, objectMode = true, highWaterMark } = opt
21
22
 
22
23
  if (!limit) {
23
24
  return transformNoOp()
@@ -26,8 +27,8 @@ export function transformLimit<IN>(opt: TransformLimitOptions): TransformTyped<I
26
27
  let i = 0 // so we start first chunk with 1
27
28
  let ended = false
28
29
  return new Transform({
29
- objectMode: true,
30
- ...opt,
30
+ objectMode,
31
+ highWaterMark,
31
32
  transform(chunk, _, cb) {
32
33
  if (ended) {
33
34
  return
@@ -13,11 +13,12 @@ export interface TransformLogProgressOptions<IN = any>
13
13
  export function transformLogProgress<IN = any>(
14
14
  opt: TransformLogProgressOptions = {},
15
15
  ): TransformTyped<IN, IN> {
16
+ const { objectMode = true, highWaterMark } = opt
16
17
  const progress = progressLogger(opt)
17
18
 
18
19
  return new Transform({
19
- objectMode: true,
20
- ...opt,
20
+ objectMode,
21
+ highWaterMark,
21
22
  transform(chunk: IN, _, cb) {
22
23
  progress.log(chunk)
23
24
  cb(null, chunk) // pass-through
@@ -1,7 +1,7 @@
1
1
  import { _hc, type AbortableSignal } from '@naturalcycles/js-lib'
2
2
  import { _since } from '@naturalcycles/js-lib/datetime/time.util.js'
3
3
  import { _anyToError, _assert, ErrorMode } from '@naturalcycles/js-lib/error'
4
- import type { CommonLogger } from '@naturalcycles/js-lib/log'
4
+ import { createCommonLoggerAtLevel } from '@naturalcycles/js-lib/log'
5
5
  import { _stringify } from '@naturalcycles/js-lib/string/stringify.js'
6
6
  import {
7
7
  type AbortableAsyncMapper,
@@ -15,10 +15,10 @@ import {
15
15
  } from '@naturalcycles/js-lib/types'
16
16
  import through2Concurrent from 'through2-concurrent'
17
17
  import { yellow } from '../../colors/colors.js'
18
- import type { TransformTyped } from '../stream.model.js'
18
+ import type { TransformOptions, TransformTyped } from '../stream.model.js'
19
19
  import { PIPELINE_GRACEFUL_ABORT } from '../stream.util.js'
20
20
 
21
- export interface TransformMapOptions<IN = any, OUT = IN> {
21
+ export interface TransformMapOptions<IN = any, OUT = IN> extends TransformOptions {
22
22
  /**
23
23
  * Predicate to filter outgoing results (after mapper).
24
24
  * Allows to not emit all results.
@@ -79,8 +79,6 @@ export interface TransformMapOptions<IN = any, OUT = IN> {
79
79
  */
80
80
  metric?: string
81
81
 
82
- logger?: CommonLogger
83
-
84
82
  /**
85
83
  * Allows to abort (gracefully stop) the stream from inside the Transform.
86
84
  */
@@ -143,7 +141,6 @@ export function transformMap<IN = any, OUT = IN>(
143
141
  onError,
144
142
  onDone,
145
143
  metric = 'stream',
146
- logger = console,
147
144
  signal,
148
145
  } = opt
149
146
 
@@ -154,6 +151,7 @@ export function transformMap<IN = any, OUT = IN>(
154
151
  let ok = true
155
152
  let errors = 0
156
153
  const collectedErrors: Error[] = [] // only used if errorMode == THROW_AGGREGATED
154
+ const logger = createCommonLoggerAtLevel(opt.logger, opt.logLevel)
157
155
 
158
156
  return through2Concurrent.obj(
159
157
  {
@@ -161,8 +159,6 @@ export function transformMap<IN = any, OUT = IN>(
161
159
  readableHighWaterMark: highWaterMark,
162
160
  writableHighWaterMark: highWaterMark,
163
161
  async final(cb) {
164
- // console.log('transformMap final')
165
-
166
162
  logErrorStats(true)
167
163
 
168
164
  if (collectedErrors.length) {
@@ -1,18 +1,15 @@
1
1
  import { Transform } from 'node:stream'
2
2
  import { ErrorMode } from '@naturalcycles/js-lib/error/errorMode.js'
3
- import type { CommonLogger } from '@naturalcycles/js-lib/log'
4
3
  import type { IndexedMapper } from '@naturalcycles/js-lib/types'
5
- import type { TransformTyped } from '../stream.model.js'
4
+ import type { TransformOptions, TransformTyped } from '../stream.model.js'
6
5
 
7
- export interface TransformMapSimpleOptions {
6
+ export interface TransformMapSimpleOptions extends TransformOptions {
8
7
  /**
9
8
  * Only supports THROW_IMMEDIATELY (default) and SUPPRESS.
10
9
  *
11
10
  * @default ErrorMode.THROW_IMMEDIATELY
12
11
  */
13
12
  errorMode?: ErrorMode.THROW_IMMEDIATELY | ErrorMode.SUPPRESS
14
-
15
- logger?: CommonLogger
16
13
  }
17
14
 
18
15
  /**
@@ -29,10 +26,16 @@ export function transformMapSimple<IN = any, OUT = IN>(
29
26
  opt: TransformMapSimpleOptions = {},
30
27
  ): TransformTyped<IN, OUT> {
31
28
  let index = -1
32
- const { errorMode = ErrorMode.THROW_IMMEDIATELY, logger = console } = opt
29
+ const {
30
+ errorMode = ErrorMode.THROW_IMMEDIATELY,
31
+ logger = console,
32
+ objectMode = true,
33
+ highWaterMark,
34
+ } = opt
33
35
 
34
36
  return new Transform({
35
- objectMode: true,
37
+ objectMode,
38
+ highWaterMark,
36
39
  transform(chunk: IN, _, cb) {
37
40
  try {
38
41
  cb(null, mapper(chunk, ++index))
@@ -1,15 +1,15 @@
1
1
  import { Transform } from 'node:stream'
2
2
  import type { AbortableSignal } from '@naturalcycles/js-lib'
3
3
  import { _anyToError, _assert, ErrorMode } from '@naturalcycles/js-lib/error'
4
- import type { CommonLogger } from '@naturalcycles/js-lib/log'
4
+ import { createCommonLoggerAtLevel } from '@naturalcycles/js-lib/log'
5
5
  import type { IndexedMapper, Predicate, UnixTimestampMillis } from '@naturalcycles/js-lib/types'
6
6
  import { END, SKIP } from '@naturalcycles/js-lib/types'
7
7
  import { yellow } from '../../colors/colors.js'
8
- import type { TransformTyped } from '../stream.model.js'
8
+ import type { TransformOptions, TransformTyped } from '../stream.model.js'
9
9
  import { PIPELINE_GRACEFUL_ABORT } from '../stream.util.js'
10
10
  import type { TransformMapStats } from './transformMap.js'
11
11
 
12
- export interface TransformMapSyncOptions<IN = any, OUT = IN> {
12
+ export interface TransformMapSyncOptions<IN = any, OUT = IN> extends TransformOptions {
13
13
  /**
14
14
  * @default true
15
15
  */
@@ -54,8 +54,6 @@ export interface TransformMapSyncOptions<IN = any, OUT = IN> {
54
54
  */
55
55
  metric?: string
56
56
 
57
- logger?: CommonLogger
58
-
59
57
  /**
60
58
  * Allows to abort (gracefully stop) the stream from inside the Transform.
61
59
  */
@@ -77,7 +75,6 @@ export function transformMapSync<IN = any, OUT = IN>(
77
75
  onDone,
78
76
  metric = 'stream',
79
77
  objectMode = true,
80
- logger = console,
81
78
  signal,
82
79
  } = opt
83
80
 
@@ -87,6 +84,7 @@ export function transformMapSync<IN = any, OUT = IN>(
87
84
  let isSettled = false
88
85
  let errors = 0
89
86
  const collectedErrors: Error[] = [] // only used if errorMode == THROW_AGGREGATED
87
+ const logger = createCommonLoggerAtLevel(opt.logger, opt.logLevel)
90
88
 
91
89
  return new Transform({
92
90
  objectMode,
@@ -19,7 +19,7 @@ export function transformSplitOnNewline(): TransformTyped<Buffer, Buffer> {
19
19
  writableHighWaterMark: 64 * 1024,
20
20
  readableObjectMode: true,
21
21
 
22
- transform(buf: Buffer, _enc, done) {
22
+ transform(buf: Buffer, _enc, cb) {
23
23
  let offset = 0
24
24
  let lastMatch = 0
25
25
  if (buffered) {
@@ -42,7 +42,7 @@ export function transformSplitOnNewline(): TransformTyped<Buffer, Buffer> {
42
42
  }
43
43
  }
44
44
 
45
- done()
45
+ cb()
46
46
  },
47
47
 
48
48
  flush(done) {
@@ -1,5 +1,6 @@
1
1
  import { Transform } from 'node:stream'
2
2
  import { _ms, _since, localTime } from '@naturalcycles/js-lib/datetime'
3
+ import { createCommonLoggerAtLevel } from '@naturalcycles/js-lib/log'
3
4
  import type { DeferredPromise } from '@naturalcycles/js-lib/promise'
4
5
  import { pDefer } from '@naturalcycles/js-lib/promise/pDefer.js'
5
6
  import type {
@@ -7,9 +8,9 @@ import type {
7
8
  PositiveInteger,
8
9
  UnixTimestampMillis,
9
10
  } from '@naturalcycles/js-lib/types'
10
- import type { TransformTyped } from '../stream.model.js'
11
+ import type { TransformOptions, TransformTyped } from '../stream.model.js'
11
12
 
12
- export interface TransformThrottleOptions {
13
+ export interface TransformThrottleOptions extends TransformOptions {
13
14
  /**
14
15
  * How many items to allow per `interval` of seconds.
15
16
  */
@@ -19,8 +20,6 @@ export interface TransformThrottleOptions {
19
20
  * How long is the interval (in seconds) where number of items should not exceed `throughput`.
20
21
  */
21
22
  interval: NumberOfSeconds
22
-
23
- debug?: boolean
24
23
  }
25
24
 
26
25
  /**
@@ -41,42 +40,40 @@ export interface TransformThrottleOptions {
41
40
  * @experimental
42
41
  */
43
42
  export function transformThrottle<T>(opt: TransformThrottleOptions): TransformTyped<T, T> {
44
- const { throughput, interval, debug } = opt
43
+ const { throughput, interval, objectMode = true, highWaterMark } = opt
45
44
 
46
45
  let count = 0
47
46
  let start: UnixTimestampMillis
48
- let paused: DeferredPromise | undefined
47
+ let lock: DeferredPromise | undefined
49
48
  let timeout: NodeJS.Timeout | undefined
49
+ const logger = createCommonLoggerAtLevel(opt.logger, opt.logLevel)
50
50
 
51
51
  return new Transform({
52
- objectMode: true,
52
+ objectMode,
53
+ highWaterMark,
53
54
  async transform(item: T, _, cb) {
54
55
  // console.log('incoming', item, { paused: !!paused, count })
55
56
  if (!start) {
56
57
  start = Date.now() as UnixTimestampMillis
57
58
  timeout = setTimeout(() => onInterval(this), interval * 1000)
58
- if (debug) {
59
- console.log(`${localTime.now().toPretty()} transformThrottle started with`, {
60
- throughput,
61
- interval,
62
- rps: Math.round(throughput / interval),
63
- })
64
- }
59
+ logger.log(`${localTime.now().toPretty()} transformThrottle started with`, {
60
+ throughput,
61
+ interval,
62
+ rps: Math.round(throughput / interval),
63
+ })
65
64
  }
66
65
 
67
- if (paused) {
68
- // console.log('awaiting pause', {item, count})
69
- await paused
66
+ if (lock) {
67
+ // console.log('awaiting lock', {item, count})
68
+ await lock
70
69
  }
71
70
 
72
71
  if (++count >= throughput) {
73
72
  // console.log('pausing now after', {item, count})
74
- paused = pDefer()
75
- if (debug) {
76
- console.log(
77
- `${localTime.now().toPretty()} transformThrottle activated: ${count} items passed in ${_since(start)}, will pause for ${_ms(interval * 1000 - (Date.now() - start))}`,
78
- )
79
- }
73
+ lock = pDefer()
74
+ logger.log(
75
+ `${localTime.now().toPretty()} transformThrottle activated: ${count} items passed in ${_since(start)}, will pause for ${_ms(interval * 1000 - (Date.now() - start))}`,
76
+ )
80
77
  }
81
78
 
82
79
  cb(null, item) // pass the item through
@@ -88,23 +85,18 @@ export function transformThrottle<T>(opt: TransformThrottleOptions): TransformTy
88
85
  })
89
86
 
90
87
  function onInterval(transform: Transform): void {
91
- if (paused) {
92
- if (debug) {
93
- console.log(`${localTime.now().toPretty()} transformThrottle resumed`)
94
- }
95
-
96
- paused.resolve()
97
- paused = undefined
88
+ if (lock) {
89
+ logger.log(`${localTime.now().toPretty()} transformThrottle resumed`)
90
+ lock.resolve()
91
+ lock = undefined
98
92
  } else {
99
- if (debug) {
100
- console.log(
101
- `${localTime.now().toPretty()} transformThrottle passed ${count} (of max ${throughput}) items in ${_since(start)}`,
102
- )
103
- }
93
+ logger.log(
94
+ `${localTime.now().toPretty()} transformThrottle passed ${count} (of max ${throughput}) items in ${_since(start)}`,
95
+ )
104
96
  }
105
97
 
106
98
  count = 0
107
- start = Date.now() as UnixTimestampMillis
99
+ start = localTime.nowUnixMillis()
108
100
  timeout = setTimeout(() => onInterval(transform), interval * 1000)
109
101
  }
110
102
  }
@@ -1,20 +1,12 @@
1
1
  import { Writable } from 'node:stream'
2
- import type { DeferredPromise } from '@naturalcycles/js-lib/promise'
3
2
  import type { TransformOptions } from '../stream.model.js'
4
3
 
5
- export interface WritableVoidOptions extends TransformOptions {
6
- /**
7
- * If set - it will be Resolved when the Stream is done (after final.cb)
8
- */
9
- streamDone?: DeferredPromise
10
- }
11
-
12
4
  /**
13
5
  * Use as a "null-terminator" of stream.pipeline.
14
6
  * It consumes the stream as quickly as possible without doing anything.
15
7
  * Put it in the end of your pipeline in case it ends with Transform that needs a consumer.
16
8
  */
17
- export function writableVoid(opt: WritableVoidOptions = {}): Writable {
9
+ export function writableVoid(opt: TransformOptions = {}): Writable {
18
10
  return new Writable({
19
11
  objectMode: true,
20
12
  ...opt,
@@ -23,7 +15,6 @@ export function writableVoid(opt: WritableVoidOptions = {}): Writable {
23
15
  },
24
16
  final(cb) {
25
17
  cb()
26
- opt.streamDone?.resolve()
27
18
  },
28
19
  })
29
20
  }
@@ -1,19 +0,0 @@
1
- import type { ReadableTyped } from '../stream.model.js';
2
- /**
3
- Returns a Readable of [already parsed] NDJSON objects.
4
-
5
- Replaces a list of operations:
6
- - requireFileToExist(inputPath)
7
- - fs.createReadStream
8
- - createUnzip (only if path ends with '.gz')
9
- - transformSplitOnNewline
10
- - transformJsonParse
11
-
12
- To add a Limit or Offset: just add .take() or .drop(), example:
13
-
14
- _pipeline([
15
- fs2.createReadStreamAsNDJSON().take(100),
16
- transformX(),
17
- ])
18
- */
19
- export declare function createReadStreamAsNDJSON<ROW = any>(inputPath: string): ReadableTyped<ROW>;
@@ -1,11 +0,0 @@
1
- import type { TransformTyped } from '../stream.model.js';
2
- /**
3
- Returns an array of Transforms, so that you can ...destructure them at
4
- the end of the _pipeline.
5
-
6
- Replaces a list of operations:
7
- - transformToNDJson
8
- - createGzip (only if path ends with '.gz')
9
- - fs.createWriteStream
10
- */
11
- export declare function createWriteStreamAsNDJSON(outputPath: string): TransformTyped<any, any>[];
@@ -1,27 +0,0 @@
1
- import { createGzip } from 'node:zlib';
2
- import { _isTruthy } from '@naturalcycles/js-lib';
3
- import { fs2 } from '../../fs/fs2.js';
4
- import { transformToNDJson } from './transformToNDJson.js';
5
- /**
6
- Returns an array of Transforms, so that you can ...destructure them at
7
- the end of the _pipeline.
8
-
9
- Replaces a list of operations:
10
- - transformToNDJson
11
- - createGzip (only if path ends with '.gz')
12
- - fs.createWriteStream
13
- */
14
- export function createWriteStreamAsNDJSON(outputPath) {
15
- fs2.ensureFile(outputPath);
16
- return [
17
- transformToNDJson(),
18
- outputPath.endsWith('.gz')
19
- ? createGzip({
20
- // chunkSize: 64 * 1024, // no observed speedup
21
- })
22
- : undefined,
23
- fs2.createWriteStream(outputPath, {
24
- // highWaterMark: 64 * 1024, // no observed speedup
25
- }),
26
- ].filter(_isTruthy);
27
- }
@@ -1,10 +0,0 @@
1
- import type { AbortableAsyncMapper } from '@naturalcycles/js-lib/types';
2
- import type { TransformLogProgressOptions } from '../transform/transformLogProgress.js';
3
- import type { TransformMapOptions } from '../transform/transformMap.js';
4
- export interface NDJSONStreamForEachOptions<IN = any> extends TransformMapOptions<IN, void>, TransformLogProgressOptions<IN> {
5
- inputFilePath: string;
6
- }
7
- /**
8
- * Convenience function to `forEach` through an ndjson file.
9
- */
10
- export declare function ndjsonStreamForEach<T>(mapper: AbortableAsyncMapper<T, void>, opt: NDJSONStreamForEachOptions<T>): Promise<void>;
@@ -1,15 +0,0 @@
1
- import { ErrorMode } from '@naturalcycles/js-lib/error/errorMode.js';
2
- import { Pipeline } from '../pipeline.js';
3
- /**
4
- * Convenience function to `forEach` through an ndjson file.
5
- */
6
- export async function ndjsonStreamForEach(mapper, opt) {
7
- await Pipeline.fromNDJsonFile(opt.inputFilePath)
8
- .map(mapper, {
9
- errorMode: ErrorMode.THROW_AGGREGATED,
10
- ...opt,
11
- predicate: () => true, // to log progress properly
12
- })
13
- .logProgress(opt)
14
- .run();
15
- }
@@ -1,9 +0,0 @@
1
- import type { ReadableTyped } from '../stream.model.js';
2
- /**
3
- * Convenience function to read the whole Readable stream into Array (in-memory)
4
- * and return that array.
5
- *
6
- * Native `await readable.toArray()` can be used instead.
7
- * This helper is kept for type-safery support.
8
- */
9
- export declare function readableToArray<T>(readable: ReadableTyped<T>): Promise<T[]>;
@@ -1,17 +0,0 @@
1
- /**
2
- * Convenience function to read the whole Readable stream into Array (in-memory)
3
- * and return that array.
4
- *
5
- * Native `await readable.toArray()` can be used instead.
6
- * This helper is kept for type-safery support.
7
- */
8
- export async function readableToArray(readable) {
9
- return await readable.toArray();
10
- // const a: T[] = []
11
- //
12
- // for await (const item of readable) {
13
- // a.push(item)
14
- // }
15
- //
16
- // return a
17
- }
@@ -1,13 +0,0 @@
1
- import type { TransformTyped } from '../stream.model.js';
2
- type AnyStream = NodeJS.WritableStream | NodeJS.ReadWriteStream;
3
- /**
4
- * Allows to "tee"/"fork" away from the "main pipeline" into the "secondary pipeline".
5
- *
6
- * Important, that the main pipeline works "as normal", keeps backpressure, etc.
7
- * Secondary pipeline DOES NOT keep backpressure.
8
- * Therefor, the "slowest" pipeline should be made Primary (to keep backpressure).
9
- *
10
- * @experimental
11
- */
12
- export declare function transformTee<T>(streams: AnyStream[]): TransformTyped<T, T>;
13
- export {};
@@ -1,37 +0,0 @@
1
- import { Transform } from 'node:stream';
2
- import { pipeline } from 'node:stream/promises';
3
- import { readableCreate } from '../readable/readableCreate.js';
4
- /**
5
- * Allows to "tee"/"fork" away from the "main pipeline" into the "secondary pipeline".
6
- *
7
- * Important, that the main pipeline works "as normal", keeps backpressure, etc.
8
- * Secondary pipeline DOES NOT keep backpressure.
9
- * Therefor, the "slowest" pipeline should be made Primary (to keep backpressure).
10
- *
11
- * @experimental
12
- */
13
- export function transformTee(streams) {
14
- const readable = readableCreate();
15
- const secondPipelinePromise = pipeline([readable, ...streams]);
16
- return new Transform({
17
- objectMode: true,
18
- transform(chunk, _, cb) {
19
- // todo: it's possible to start respecting backpressure,
20
- // if we start to listen to the boolean output of .push()
21
- // pass to the "secondary" pipeline
22
- readable.push(chunk);
23
- // pass through to the "main" pipeline
24
- cb(null, chunk);
25
- },
26
- async final(cb) {
27
- console.log('transformTee final');
28
- // Pushing null "closes"/ends the secondary pipeline correctly
29
- readable.push(null);
30
- // Second pipeline is expected to finish now, let's await it
31
- await secondPipelinePromise;
32
- console.log('transformTee final secondPipeline done');
33
- // Because second pipeline is done - now we can signal main pipeline to be done as well
34
- cb();
35
- },
36
- });
37
- }
@@ -1,5 +0,0 @@
1
- import type { TransformOptions, TransformTyped } from '../stream.model.js';
2
- /**
3
- * Will collect all stream results in the array (keeping it in memory) and emit in the end as one result.
4
- */
5
- export declare function transformToArray<IN>(opt?: TransformOptions): TransformTyped<IN, IN[]>;
@@ -1,20 +0,0 @@
1
- import { Transform } from 'node:stream';
2
- /**
3
- * Will collect all stream results in the array (keeping it in memory) and emit in the end as one result.
4
- */
5
- export function transformToArray(opt = {}) {
6
- const res = [];
7
- return new Transform({
8
- objectMode: true,
9
- ...opt,
10
- transform(chunk, _, cb) {
11
- res.push(chunk);
12
- // callback to signal that we processed input, but not emitting any output
13
- cb();
14
- },
15
- final(cb) {
16
- this.push(res);
17
- cb();
18
- },
19
- });
20
- }
@@ -1,12 +0,0 @@
1
- import type { AsyncIndexedMapper, IndexedMapper } from '@naturalcycles/js-lib/types';
2
- import type { WritableTyped } from '../stream.model.js';
3
- import { type TransformMapOptions } from '../transform/transformMap.js';
4
- import { type TransformMapSyncOptions } from '../transform/transformMapSync.js';
5
- /**
6
- * Just an alias to transformMap that declares OUT as void.
7
- */
8
- export declare function writableForEach<IN = any>(mapper: AsyncIndexedMapper<IN, void>, opt?: TransformMapOptions<IN, void>): WritableTyped<IN>;
9
- /**
10
- * Just an alias to transformMap that declares OUT as void.
11
- */
12
- export declare function writableForEachSync<IN = any>(mapper: IndexedMapper<IN, void>, opt?: TransformMapSyncOptions<IN, void>): WritableTyped<IN>;
@@ -1,15 +0,0 @@
1
- import { _passNothingPredicate } from '@naturalcycles/js-lib/types';
2
- import { transformMap } from '../transform/transformMap.js';
3
- import { transformMapSync } from '../transform/transformMapSync.js';
4
- /**
5
- * Just an alias to transformMap that declares OUT as void.
6
- */
7
- export function writableForEach(mapper, opt = {}) {
8
- return transformMap(mapper, { ...opt, predicate: _passNothingPredicate });
9
- }
10
- /**
11
- * Just an alias to transformMap that declares OUT as void.
12
- */
13
- export function writableForEachSync(mapper, opt = {}) {
14
- return transformMapSync(mapper, { ...opt, predicate: _passNothingPredicate });
15
- }
@@ -1,10 +0,0 @@
1
- import type { TransformOptions, WritableTyped } from '../stream.model.js';
2
- /**
3
- * Allows "forking" a stream inside pipeline into a number of pipeline chains (2 or more).
4
- * Currently does NOT (!) maintain backpressure.
5
- * Error in the forked pipeline will propagate up to the main pipeline (and log error, to be sure).
6
- * Will wait until all forked pipelines are completed before completing the stream.
7
- *
8
- * @experimental
9
- */
10
- export declare function writableFork<T>(chains: NodeJS.WritableStream[][], opt?: TransformOptions): WritableTyped<T>;
@@ -1,45 +0,0 @@
1
- import { Writable } from 'node:stream';
2
- import { pipeline } from 'node:stream/promises';
3
- import { readableCreate } from '../readable/readableCreate.js';
4
- /**
5
- * Allows "forking" a stream inside pipeline into a number of pipeline chains (2 or more).
6
- * Currently does NOT (!) maintain backpressure.
7
- * Error in the forked pipeline will propagate up to the main pipeline (and log error, to be sure).
8
- * Will wait until all forked pipelines are completed before completing the stream.
9
- *
10
- * @experimental
11
- */
12
- export function writableFork(chains, opt) {
13
- const readables = [];
14
- const allChainsDone = Promise.all(chains.map(async (chain) => {
15
- const readable = readableCreate();
16
- readables.push(readable);
17
- return await pipeline([readable, ...chain]);
18
- })).catch(err => {
19
- console.error(err); // ensure the error is logged
20
- throw err;
21
- });
22
- return new Writable({
23
- objectMode: true,
24
- ...opt,
25
- write(chunk, _, cb) {
26
- // Push/fork to all sub-streams
27
- // No backpressure is ensured here, it'll push regardless of the
28
- readables.forEach(readable => readable.push(chunk));
29
- cb();
30
- },
31
- async final(cb) {
32
- try {
33
- // Push null (complete) to all sub-streams
34
- readables.forEach(readable => readable.push(null));
35
- console.log(`writableFork.final is waiting for all chains to be done`);
36
- await allChainsDone;
37
- console.log(`writableFork.final all chains done`);
38
- cb();
39
- }
40
- catch (err) {
41
- cb(err);
42
- }
43
- },
44
- });
45
- }
@@ -1,8 +0,0 @@
1
- import type { Readable } from 'node:stream';
2
- import type { WritableTyped } from '../stream.model.js';
3
- /**
4
- * Allows to stop the Readable stream after the pipeline has processed X number of rows.
5
- * It counts OUTPUT rows (not input), because this Writable is always at the end of the Pipeline.
6
- * It ensures that everything has been processed before issuing a STOP on the readable.
7
- */
8
- export declare function writableLimit<T>(readable: Readable, limit: number): WritableTyped<T>;