@naturalcycles/nodejs-lib 12.58.0 → 12.59.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/dist/index.d.ts +19 -18
  2. package/dist/index.js +19 -92
  3. package/dist/log/log.util.d.ts +4 -0
  4. package/dist/log/log.util.js +11 -0
  5. package/dist/stream/ndjson/ndjsonMap.d.ts +2 -2
  6. package/dist/stream/ndjson/ndjsonMap.js +4 -3
  7. package/dist/stream/ndjson/ndjsonStreamForEach.d.ts +2 -2
  8. package/dist/stream/pipeline/pipeline.d.ts +25 -3
  9. package/dist/stream/pipeline/pipeline.js +76 -9
  10. package/dist/stream/readable/readableCreate.d.ts +8 -0
  11. package/dist/stream/readable/readableCreate.js +9 -1
  12. package/dist/stream/readable/readableForEach.d.ts +2 -2
  13. package/dist/stream/readable/readableFromArray.d.ts +2 -2
  14. package/dist/stream/readable/readableFromArray.js +17 -13
  15. package/dist/stream/readable/readableMap.d.ts +2 -2
  16. package/dist/stream/readable/readableMap.js +22 -17
  17. package/dist/stream/stream.util.d.ts +4 -0
  18. package/dist/stream/stream.util.js +24 -0
  19. package/dist/stream/transform/transformLimit.d.ts +32 -1
  20. package/dist/stream/transform/transformLimit.js +33 -16
  21. package/dist/stream/transform/transformMap.d.ts +2 -11
  22. package/dist/stream/transform/transformMap.js +55 -67
  23. package/dist/stream/transform/worker/workerClassProxy.js +1 -0
  24. package/dist/stream/writable/writableLimit.d.ts +9 -0
  25. package/dist/stream/writable/writableLimit.js +29 -0
  26. package/dist/stream/writable/writableVoid.d.ts +8 -1
  27. package/dist/stream/writable/writableVoid.js +5 -1
  28. package/package.json +1 -1
  29. package/src/index.ts +17 -156
  30. package/src/log/log.util.ts +9 -0
  31. package/src/stream/ndjson/ndjsonMap.ts +7 -5
  32. package/src/stream/ndjson/ndjsonStreamForEach.ts +2 -2
  33. package/src/stream/pipeline/pipeline.ts +102 -9
  34. package/src/stream/readable/readableCreate.ts +9 -1
  35. package/src/stream/readable/readableForEach.ts +2 -2
  36. package/src/stream/readable/readableFromArray.ts +18 -21
  37. package/src/stream/readable/readableMap.ts +24 -21
  38. package/src/stream/stream.util.ts +29 -0
  39. package/src/stream/transform/transformLimit.ts +66 -20
  40. package/src/stream/transform/transformMap.ts +74 -93
  41. package/src/stream/transform/worker/workerClassProxy.js +1 -0
  42. package/src/stream/writable/writableLimit.ts +28 -0
  43. package/src/stream/writable/writableVoid.ts +13 -1
  44. package/dist/stream/transform/legacy/transformMap.d.ts +0 -17
  45. package/dist/stream/transform/legacy/transformMap.js +0 -94
  46. package/src/stream/transform/legacy/transformMap.ts +0 -133
package/src/index.ts CHANGED
@@ -4,41 +4,15 @@ import type { AfterResponseHook, BeforeErrorHook, BeforeRequestHook, Got } from
4
4
  import { AnySchema, ValidationErrorItem } from 'joi'
5
5
  import { _chunkBuffer } from './buffer/buffer.util'
6
6
  import { tableDiff, TableDiffOptions } from './diff/tableDiff'
7
- import { getGot } from './got/getGot'
7
+ export * from './got/getGot'
8
8
  import { GetGotOptions } from './got/got.model'
9
- import { memoryUsage, memoryUsageFull, processSharedUtil } from './infra/process.util'
9
+ export * from './infra/process.util'
10
10
  import { Debug, IDebug, IDebugger } from './log/debug'
11
- import {
12
- base64ToBuffer,
13
- base64ToString,
14
- bufferToBase64,
15
- hash,
16
- md5,
17
- hashAsBuffer,
18
- md5AsBuffer,
19
- stringToBase64,
20
- } from './security/hash.util'
21
- import {
22
- ALPHABET_ALPHANUMERIC,
23
- ALPHABET_ALPHANUMERIC_LOWERCASE,
24
- ALPHABET_ALPHANUMERIC_UPPERCASE,
25
- ALPHABET_LOWERCASE,
26
- ALPHABET_NUMBER,
27
- ALPHABET_UPPERCASE,
28
- stringId,
29
- stringIdAsync,
30
- stringIdUnsafe,
31
- } from './security/id.util'
32
- import {
33
- getSecretMap,
34
- loadSecretsFromEnv,
35
- loadSecretsFromJsonFile,
36
- removeSecretsFromEnv,
37
- secret,
38
- secretOptional,
39
- setSecretMap,
40
- } from './security/secret.util'
11
+ export * from './security/hash.util'
12
+ export * from './security/id.util'
13
+ export * from './security/secret.util'
41
14
  import { hasColors } from './colors/colors'
15
+ export * from './log/log.util'
42
16
  import { slackDefaultMessagePrefixHook, SlackService } from './slack/slack.service'
43
17
  import {
44
18
  SlackApiBody,
@@ -70,7 +44,7 @@ import {
70
44
  TransformJsonParseOptions,
71
45
  } from './stream/ndjson/transformJsonParse'
72
46
  import { transformToNDJson, TransformToNDJsonOptions } from './stream/ndjson/transformToNDJson'
73
- import { _pipeline } from './stream/pipeline/pipeline'
47
+ export * from './stream/pipeline/pipeline'
74
48
  import { readableCreate, readableFrom } from './stream/readable/readableCreate'
75
49
  import { readableForEach, readableForEachSync } from './stream/readable/readableForEach'
76
50
  import { readableFromArray } from './stream/readable/readableFromArray'
@@ -83,13 +57,10 @@ import {
83
57
  TransformTyped,
84
58
  WritableTyped,
85
59
  } from './stream/stream.model'
86
- import { transformBuffer } from './stream/transform/transformBuffer'
87
- import { transformFilter, transformFilterSync } from './stream/transform/transformFilter'
88
- import { transformLimit } from './stream/transform/transformLimit'
89
- import {
90
- transformLogProgress,
91
- TransformLogProgressOptions,
92
- } from './stream/transform/transformLogProgress'
60
+ export * from './stream/transform/transformBuffer'
61
+ export * from './stream/transform/transformFilter'
62
+ export * from './stream/transform/transformLimit'
63
+ export * from './stream/transform/transformLogProgress'
93
64
  import { transformMap, TransformMapOptions } from './stream/transform/transformMap'
94
65
  import { transformMapSimple } from './stream/transform/transformMapSimple'
95
66
  import { transformNoOp } from './stream/transform/transformNoOp'
@@ -105,22 +76,13 @@ import {
105
76
  } from './stream/transform/worker/transformMultiThreaded'
106
77
  import { WorkerInput, WorkerOutput } from './stream/transform/worker/transformMultiThreaded.model'
107
78
  export * from './stream/writable/writableForEach'
108
- import { writableFork } from './stream/writable/writableFork'
109
- import { writablePushToArray } from './stream/writable/writablePushToArray'
110
- import { writableVoid } from './stream/writable/writableVoid'
79
+ export * from './stream/writable/writableFork'
80
+ export * from './stream/writable/writablePushToArray'
81
+ export * from './stream/writable/writableVoid'
111
82
  import { inspectAny, InspectAnyOptions, inspectAnyStringifyFn } from './string/inspectAny'
112
- import { requireEnvKeys, requireFileToExist } from './util/env.util'
83
+ export * from './util/env.util'
113
84
  import { LRUMemoCache } from './util/lruMemoCache'
114
- import {
115
- gunzipBuffer,
116
- gunzipToString,
117
- gzipBuffer,
118
- gzipString,
119
- unzipBuffer,
120
- unzipToString,
121
- zipBuffer,
122
- zipString,
123
- } from './util/zip.util'
85
+ export * from './util/zip.util'
124
86
  import { readAjvSchemas, readJsonSchemas } from './validation/ajv/ajv.util'
125
87
  import { AjvSchema, AjvSchemaCfg, AjvValidationOptions } from './validation/ajv/ajvSchema'
126
88
  import { AjvValidationError, AjvValidationErrorData } from './validation/ajv/ajvValidationError'
@@ -135,34 +97,7 @@ import {
135
97
  SchemaTyped,
136
98
  StringSchemaTyped,
137
99
  } from './validation/joi/joi.model'
138
- import {
139
- anyObjectSchema,
140
- anySchema,
141
- arraySchema,
142
- oneOfSchema,
143
- binarySchema,
144
- booleanDefaultToFalseSchema,
145
- booleanSchema,
146
- dateStringSchema,
147
- emailSchema,
148
- baseDBEntitySchema,
149
- savedDBEntitySchema,
150
- idSchema,
151
- integerSchema,
152
- ipAddressSchema,
153
- numberSchema,
154
- objectSchema,
155
- percentageSchema,
156
- semVerSchema,
157
- SEM_VER_PATTERN,
158
- slugSchema,
159
- stringSchema,
160
- unixTimestampSchema,
161
- urlSchema,
162
- userAgentSchema,
163
- utcOffsetSchema,
164
- verSchema,
165
- } from './validation/joi/joi.shared.schemas'
100
+ export * from './validation/joi/joi.shared.schemas'
166
101
  import { JoiValidationError, JoiValidationErrorData } from './validation/joi/joi.validation.error'
167
102
  import {
168
103
  convert,
@@ -207,7 +142,6 @@ export type {
207
142
  TransformMapSyncOptions,
208
143
  NDJSONStreamForEachOptions,
209
144
  TransformOptions,
210
- TransformLogProgressOptions,
211
145
  TransformMultiThreadedOptions,
212
146
  WorkerClassInterface,
213
147
  WorkerInput,
@@ -233,71 +167,8 @@ export {
233
167
  undefinedIfInvalid,
234
168
  convert,
235
169
  Joi,
236
- booleanSchema,
237
- booleanDefaultToFalseSchema,
238
- stringSchema,
239
- numberSchema,
240
- integerSchema,
241
- percentageSchema,
242
- dateStringSchema,
243
- arraySchema,
244
- binarySchema,
245
- objectSchema,
246
- oneOfSchema,
247
- anySchema,
248
- anyObjectSchema,
249
- baseDBEntitySchema,
250
- savedDBEntitySchema,
251
- idSchema,
252
- unixTimestampSchema,
253
- verSchema,
254
- emailSchema,
255
- SEM_VER_PATTERN,
256
- semVerSchema,
257
- userAgentSchema,
258
- utcOffsetSchema,
259
- ipAddressSchema,
260
- slugSchema,
261
- urlSchema,
262
- processSharedUtil,
263
- zipBuffer,
264
- gzipBuffer,
265
- unzipBuffer,
266
- gunzipBuffer,
267
- zipString,
268
- gzipString,
269
- unzipToString,
270
- gunzipToString,
271
- requireEnvKeys,
272
- requireFileToExist,
273
170
  LRUMemoCache,
274
- stringId,
275
- stringIdAsync,
276
- stringIdUnsafe,
277
- ALPHABET_NUMBER,
278
- ALPHABET_LOWERCASE,
279
- ALPHABET_UPPERCASE,
280
- ALPHABET_ALPHANUMERIC_LOWERCASE,
281
- ALPHABET_ALPHANUMERIC_UPPERCASE,
282
- ALPHABET_ALPHANUMERIC,
283
- md5,
284
- hash,
285
- hashAsBuffer,
286
- md5AsBuffer,
287
- stringToBase64,
288
- base64ToString,
289
- bufferToBase64,
290
- base64ToBuffer,
291
171
  Debug,
292
- getSecretMap,
293
- setSecretMap,
294
- loadSecretsFromEnv,
295
- loadSecretsFromJsonFile,
296
- removeSecretsFromEnv,
297
- secret,
298
- secretOptional,
299
- memoryUsage,
300
- memoryUsageFull,
301
172
  SlackService,
302
173
  slackDefaultMessagePrefixHook,
303
174
  readableCreate,
@@ -308,8 +179,6 @@ export {
308
179
  readableForEachSync,
309
180
  readableMap,
310
181
  readableMapToArray,
311
- _pipeline,
312
- transformBuffer,
313
182
  ndjsonMap,
314
183
  ndJsonFileRead,
315
184
  ndJsonFileWrite,
@@ -321,27 +190,19 @@ export {
321
190
  transformJsonParse,
322
191
  bufferReviver,
323
192
  transformToNDJson,
324
- transformFilter,
325
- transformFilterSync,
326
193
  transformMap,
327
194
  transformMapSync,
328
195
  transformMapSimple,
329
196
  transformNoOp,
330
- writablePushToArray,
331
197
  transformSplit,
332
198
  transformToString,
333
199
  transformToArray,
334
200
  transformTap,
335
- transformLogProgress,
336
- transformLimit,
337
- writableVoid,
338
- writableFork,
339
201
  transformMultiThreaded,
340
202
  BaseWorkerClass,
341
203
  tableDiff,
342
204
  inspectAny,
343
205
  inspectAnyStringifyFn,
344
- getGot,
345
206
  HTTPError,
346
207
  TimeoutError,
347
208
  _chunkBuffer,
@@ -0,0 +1,9 @@
1
+ import { commonLoggerCreate } from '@naturalcycles/js-lib'
2
+ import { inspectAny } from '../index'
3
+
4
+ /**
5
+ * CommonLogger that logs to process.stdout directly (bypassing console.log).
6
+ */
7
+ export const stdoutLogger = commonLoggerCreate((_level, args) => {
8
+ process.stdout.write(args.map(a => inspectAny(a)).join(' ') + '\n')
9
+ })
@@ -1,6 +1,6 @@
1
1
  import { createReadStream, createWriteStream } from 'fs'
2
2
  import { createGzip, createUnzip } from 'zlib'
3
- import { AsyncMapper, ErrorMode } from '@naturalcycles/js-lib'
3
+ import { AbortableAsyncMapper, ErrorMode } from '@naturalcycles/js-lib'
4
4
  import {
5
5
  requireFileToExist,
6
6
  transformJsonParse,
@@ -41,7 +41,7 @@ export interface NDJSONMapOptions<IN = any, OUT = IN>
41
41
  * Zips output file automatically, if it ends with `.gz`.
42
42
  */
43
43
  export async function ndjsonMap<IN = any, OUT = any>(
44
- mapper: AsyncMapper<IN, OUT>,
44
+ mapper: AbortableAsyncMapper<IN, OUT>,
45
45
  opt: NDJSONMapOptions<IN, OUT>,
46
46
  ): Promise<void> {
47
47
  const { inputFilePath, outputFilePath, logEveryOutput = 100_000, limitInput, limitOutput } = opt
@@ -56,19 +56,21 @@ export async function ndjsonMap<IN = any, OUT = any>(
56
56
  const transformUnzip = inputFilePath.endsWith('.gz') ? [createUnzip()] : []
57
57
  const transformZip = outputFilePath.endsWith('.gz') ? [createGzip()] : []
58
58
 
59
+ const readable = createReadStream(inputFilePath)
60
+
59
61
  await _pipeline([
60
- createReadStream(inputFilePath),
62
+ readable,
61
63
  ...transformUnzip,
62
64
  transformSplit(), // splits by \n
63
65
  transformJsonParse(),
64
- transformLimit(limitInput),
66
+ transformLimit({ limit: limitInput, sourceReadable: readable }),
65
67
  transformLogProgress({ metric: 'read', ...opt }),
66
68
  transformMap(mapper, {
67
69
  flattenArrayOutput: true,
68
70
  errorMode: ErrorMode.SUPPRESS,
69
71
  ...opt,
70
72
  }),
71
- transformLimit(limitOutput),
73
+ transformLimit({ limit: limitOutput, sourceReadable: readable }),
72
74
  transformLogProgress({ metric: 'saved', logEvery: logEveryOutput }),
73
75
  transformToNDJson(),
74
76
  ...transformZip,
@@ -1,6 +1,6 @@
1
1
  import * as fs from 'fs'
2
2
  import { createUnzip } from 'zlib'
3
- import { AsyncMapper, ErrorMode } from '@naturalcycles/js-lib'
3
+ import { AbortableAsyncMapper, ErrorMode } from '@naturalcycles/js-lib'
4
4
  import {
5
5
  requireFileToExist,
6
6
  transformJsonParse,
@@ -23,7 +23,7 @@ export interface NDJSONStreamForEachOptions<IN = any>
23
23
  * Convenience function to `forEach` through an ndjson file.
24
24
  */
25
25
  export async function ndjsonStreamForEach<T>(
26
- mapper: AsyncMapper<T, void>,
26
+ mapper: AbortableAsyncMapper<T, void>,
27
27
  opt: NDJSONStreamForEachOptions<T>,
28
28
  ): Promise<void> {
29
29
  requireFileToExist(opt.inputFilePath)
@@ -1,16 +1,109 @@
1
- import { pipeline } from 'stream'
2
- import { promisify } from 'util'
1
+ import { pipeline, Readable, Transform, Writable } from 'stream'
2
+ import { _last, AnyFunction, DeferredPromise, pDefer } from '@naturalcycles/js-lib'
3
+ import { writablePushToArray } from '../../index'
3
4
 
4
5
  type AnyStream = NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream
5
6
 
7
+ // /**
8
+ // * Promisified stream.pipeline()
9
+ // */
10
+ // export let _pipeline = promisify(pipeline)
11
+ //
12
+ // // Workaround https://github.com/nodejs/node/issues/40191
13
+ // // todo: remove it when fix is released in 16.x and in AppEngine 16.x
14
+ // if (process.version >= 'v16.10') {
15
+ // const { pipeline } = require('stream/promises')
16
+ // _pipeline = ((streams: AnyStream[]) => pipeline(...streams)) as any
17
+ // }
18
+
19
+ export interface PipelineOptions {
20
+ /**
21
+ * Set to true to allow ERR_STREAM_PREMATURE_CLOSE.
22
+ * Required to support graceful close when using transformLimit
23
+ */
24
+ allowClose?: boolean
25
+ }
26
+
27
+ /**
28
+ * Promisified `stream.pipeline`.
29
+ *
30
+ * Supports opt.allowClose, which allows transformLimit to work (to actually stop source Readable)
31
+ * without throwing an error (ERR_STREAM_PREMATURE_CLOSE).
32
+ */
33
+ export async function _pipeline(streams: AnyStream[], opt: PipelineOptions = {}): Promise<void> {
34
+ const first = streams[0] as any
35
+ const rest = streams.slice(1)
36
+
37
+ if (opt.allowClose) {
38
+ // Do the magic of making the pipeline "abortable"
39
+ //
40
+ // How does it work:
41
+ // It finds `sourceReadable` (basically, it's just first item in the passed array of streams)
42
+ // Finds last "writable" (last item), patches the `_final` method of it to detect when the whole pipeline is "done",
43
+ // sets the `streamDone` DeferredPromise that resolves when the pipeline is done.
44
+ // Scans through all passed items, finds those that are capable of "closing" the stream
45
+ // (currently its `transformLimit` or `transformMap`)
46
+ // Patches them by attaching `sourceReadable` and `streamDone`.
47
+ // These items (transformLimit and transformMap), when they need to "close the stream" - call `pipelineClose`.
48
+ // `pipelineClose` is the result of 2 sleepless nights of googling and experimentation:)
49
+ // It does:
50
+ // 1. Stops the "downstream" by doing `this.push(null)`.
51
+ // 2. Pauses the `sourceReadable` by calling sourceReadable.unpipe()
52
+ // 3. Waits for `streamDone` to ensure that downstream chunks are fully processed (e.g written to disk).
53
+ // 4. Calls `sourceReadable.destroy()`, which emits ERR_STREAM_PREMATURE_CLOSE
54
+ // 5. _pipeline (this function) catches that specific error and suppresses it (because it's expected and
55
+ // inevitable in this flow). Know a better way to close the stream? Tell me!
56
+ const streamDone = pDefer()
57
+ const sourceReadable = first as Readable
58
+ const last = _last(streams) as Writable
59
+ const lastFinal = last._final?.bind(last) || ((cb: AnyFunction) => cb())
60
+ last._final = cb => {
61
+ lastFinal(() => {
62
+ cb()
63
+ streamDone.resolve()
64
+ })
65
+ }
66
+
67
+ rest.forEach(s => {
68
+ // console.log(s)
69
+ if (s instanceof AbortableTransform || s.constructor.name === 'DestroyableTransform') {
70
+ // console.log(`found ${s.constructor.name}, setting props`)
71
+ ;(s as AbortableTransform).sourceReadable = sourceReadable
72
+ ;(s as AbortableTransform).streamDone = streamDone
73
+ }
74
+ })
75
+ }
76
+
77
+ return new Promise<void>((resolve, reject) => {
78
+ pipeline(first, ...(rest as any[]), (err: Error) => {
79
+ if (err) {
80
+ if (opt.allowClose && (err as any)?.code === 'ERR_STREAM_PREMATURE_CLOSE') {
81
+ console.log('_pipeline closed (as expected)')
82
+ return resolve()
83
+ }
84
+ // console.log(`_pipeline error`, err)
85
+ return reject(err)
86
+ }
87
+
88
+ resolve()
89
+ })
90
+ })
91
+ }
92
+
6
93
  /**
7
- * Promisified stream.pipeline()
94
+ * Convenience function to make _pipeline collect all items at the end of the stream (should be Transform, not Writeable!)
95
+ * and return.
8
96
  */
9
- export let _pipeline = promisify(pipeline)
97
+ export async function _pipelineToArray<T>(
98
+ streams: AnyStream[],
99
+ opt: PipelineOptions = {},
100
+ ): Promise<T[]> {
101
+ const a: T[] = []
102
+ await _pipeline([...streams, writablePushToArray(a)], opt)
103
+ return a
104
+ }
10
105
 
11
- // Workaround https://github.com/nodejs/node/issues/40191
12
- // todo: remove it when fix is released in 16.x and in AppEngine 16.x
13
- if (process.version >= 'v16.10') {
14
- const { pipeline } = require('stream/promises')
15
- _pipeline = ((streams: AnyStream[]) => pipeline(...streams)) as any
106
+ export class AbortableTransform extends Transform {
107
+ sourceReadable?: Readable
108
+ streamDone?: DeferredPromise
16
109
  }
@@ -6,6 +6,14 @@ import { ReadableTyped } from '../stream.model'
6
6
  * Push `null` to it to complete (similar to RxJS `.complete()`).
7
7
  *
8
8
  * Difference from Readable.from() is that this readable is not "finished" yet and allows pushing more to it.
9
+ *
10
+ * Caution!
11
+ * The implementation of this Readable is not fully compliant,
12
+ * e.g the read() method doesn't return anything, so, it will hand the Node process (or cause it to process.exit(0))
13
+ * if read() will be called AFTER everything was pushed and Readable is closed (by pushing `null`).
14
+ * Beware of it when e.g doing unit testing! Jest prefers to hang (not exit-0).
15
+ *
16
+ * @deprecated because of the caution above
9
17
  */
10
18
  export function readableCreate<T>(
11
19
  items: Iterable<T> = [],
@@ -14,7 +22,7 @@ export function readableCreate<T>(
14
22
  const readable = new Readable({
15
23
  objectMode: true,
16
24
  ...opt,
17
- read() {},
25
+ read() {}, // Caution, if this is called and Readable has not finished yet (null wasn't pushed) - it'll hang the process!
18
26
  })
19
27
  for (const item of items) {
20
28
  readable.push(item)
@@ -1,4 +1,4 @@
1
- import { AsyncMapper, Mapper, _passNothingPredicate } from '@naturalcycles/js-lib'
1
+ import { Mapper, _passNothingPredicate, AbortableAsyncMapper } from '@naturalcycles/js-lib'
2
2
  import { ReadableTyped, _pipeline } from '../../index'
3
3
  import { transformMap, TransformMapOptions } from '../transform/transformMap'
4
4
 
@@ -10,7 +10,7 @@ import { transformMap, TransformMapOptions } from '../transform/transformMap'
10
10
  */
11
11
  export async function readableForEach<T>(
12
12
  readable: ReadableTyped<T>,
13
- mapper: AsyncMapper<T, void>,
13
+ mapper: AbortableAsyncMapper<T, void>,
14
14
  opt: TransformMapOptions<T, void> = {},
15
15
  ): Promise<void> {
16
16
  await _pipeline([
@@ -1,5 +1,5 @@
1
1
  import { Readable, ReadableOptions } from 'stream'
2
- import { AsyncMapper, pMap, _passthroughMapper } from '@naturalcycles/js-lib'
2
+ import { _passthroughMapper, AbortableAsyncMapper } from '@naturalcycles/js-lib'
3
3
  import { ReadableTyped } from '../stream.model'
4
4
 
5
5
  /**
@@ -10,29 +10,26 @@ import { ReadableTyped } from '../stream.model'
10
10
  */
11
11
  export function readableFromArray<IN, OUT>(
12
12
  items: IN[],
13
- mapper: AsyncMapper<IN, OUT> = _passthroughMapper,
13
+ mapper: AbortableAsyncMapper<IN, OUT> = _passthroughMapper,
14
14
  opt?: ReadableOptions,
15
15
  ): ReadableTyped<OUT> {
16
- const readable = new Readable({
16
+ let i = -1
17
+
18
+ return new Readable({
17
19
  objectMode: true,
18
20
  ...opt,
19
- read() {},
20
- })
21
-
22
- void pMap(
23
- items,
24
- async (item, index) => {
25
- readable.push(await mapper(item, index))
21
+ async read() {
22
+ i++
23
+ if (i < items.length) {
24
+ try {
25
+ this.push(await mapper(items[i]!, i))
26
+ } catch (err) {
27
+ console.error(err)
28
+ this.destroy(err as Error)
29
+ }
30
+ } else {
31
+ this.push(null) // end
32
+ }
26
33
  },
27
- { concurrency: 1 },
28
- )
29
- .then(() => {
30
- readable.push(null) // done
31
- })
32
- .catch(err => {
33
- console.error(err)
34
- readable.push(err)
35
- })
36
-
37
- return readable
34
+ })
38
35
  }
@@ -1,28 +1,31 @@
1
- import { AsyncMapper } from '@naturalcycles/js-lib'
2
- import { readableCreate } from '../../index'
1
+ import { Transform } from 'stream'
2
+ import { AbortableAsyncMapper, SKIP } from '@naturalcycles/js-lib'
3
3
  import { ReadableTyped } from '../stream.model'
4
4
 
5
5
  export function readableMap<IN, OUT>(
6
6
  readable: ReadableTyped<IN>,
7
- mapper: AsyncMapper<IN, OUT>,
7
+ mapper: AbortableAsyncMapper<IN, OUT>,
8
8
  ): ReadableTyped<OUT> {
9
- const out = readableCreate<OUT>()
9
+ let i = -1
10
10
 
11
- void (async () => {
12
- try {
13
- let index = 0
14
- for await (const item of readable) {
15
- const v = await mapper(item, index++)
16
- out.push(v)
17
- }
18
-
19
- // We're done
20
- out.push(null)
21
- } catch (err) {
22
- console.error(err)
23
- out.emit('error', err)
24
- }
25
- })()
26
-
27
- return out
11
+ // todo: check if we need to handle errors somehow specifically
12
+ return readable.pipe(
13
+ new Transform({
14
+ objectMode: true,
15
+ async transform(chunk, _enc, cb) {
16
+ try {
17
+ const r = await mapper(chunk, ++i)
18
+ if (r === SKIP) {
19
+ cb()
20
+ } else {
21
+ // _assert(r !== END, `readableMap END not supported`)
22
+ cb(null, r)
23
+ }
24
+ } catch (err) {
25
+ console.error(err)
26
+ cb(err as Error)
27
+ }
28
+ },
29
+ }),
30
+ )
28
31
  }
@@ -0,0 +1,29 @@
1
+ import { Readable } from 'stream'
2
+ import { CommonLogger } from '@naturalcycles/js-lib'
3
+
4
+ export function pipelineClose(
5
+ name: string,
6
+ readableDownstream: Readable,
7
+ sourceReadable: Readable | undefined,
8
+ streamDone: Promise<void> | undefined,
9
+ logger: CommonLogger,
10
+ ): void {
11
+ readableDownstream.push(null) // this closes the stream, so downstream Readable will receive `end` and won't write anything
12
+
13
+ if (!sourceReadable) {
14
+ logger.warn(`${name} sourceReadable is not provided, readable stream will not be stopped`)
15
+ } else {
16
+ logger.log(`${name} is calling readable.unpipe() to pause the stream`)
17
+ sourceReadable.unpipe() // it is expected to pause the stream
18
+
19
+ if (!streamDone) {
20
+ logger.log(`${name} streamDone is not provided, will do readable.destroy right away`)
21
+ sourceReadable.destroy()
22
+ } else {
23
+ void streamDone.then(() => {
24
+ logger.log(`${name} streamDone, calling readable.destroy()`)
25
+ sourceReadable.destroy() // this throws ERR_STREAM_PREMATURE_CLOSE
26
+ })
27
+ }
28
+ }
29
+ }