@engine9-io/input-tools 2.0.0 → 2.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/ForEachEntry.js CHANGED
@@ -1,21 +1,19 @@
1
1
  import fs from 'node:fs';
2
2
  import nodestream from 'node:stream';
3
3
  import promises from 'node:stream/promises';
4
- import throttleDebounce from 'throttle-debounce';
4
+ import { throttle } from 'throttle-debounce';
5
5
  import parallelTransform from 'parallel-transform';
6
6
  import debug$0 from 'debug';
7
- import asyncMutex from 'async-mutex';
8
- import csv from 'csv';
7
+ import { Mutex } from 'async-mutex';
8
+ import { stringify, parse } from 'csv';
9
9
  import handlebars from 'handlebars';
10
10
  import ValidatingReadable from './ValidatingReadable.js';
11
11
  import FileUtilities from './file/FileUtilities.js';
12
12
  import { getTempFilename, getBatchTransform, getFile, streamPacket } from './file/tools.js';
13
13
  const { Transform, Writable } = nodestream;
14
14
  const { pipeline } = promises;
15
- const { throttle } = throttleDebounce;
16
15
  const debug = debug$0('@engine9-io/input-tools');
17
16
  const debugThrottle = throttle(1000, debug, { noLeading: false, noTrailing: false });
18
- const { Mutex } = asyncMutex;
19
17
  class ForEachEntry {
20
18
  constructor({ accountId } = {}) {
21
19
  this.fileUtilities = new FileUtilities({ accountId });
@@ -65,7 +63,7 @@ class ForEachEntry {
65
63
  }
66
64
  })
67
65
  )
68
- .pipe(csv.stringify({ header: true }))
66
+ .pipe(stringify({ header: true }))
69
67
  .pipe(writeStream);
70
68
  return this.outputStreams[name].items;
71
69
  });
@@ -162,7 +160,7 @@ class ForEachEntry {
162
160
  );
163
161
  await pipeline(
164
162
  inStream,
165
- csv.parse({
163
+ parse({
166
164
  relax: true,
167
165
  skip_empty_lines: true,
168
166
  max_limit_on_data_read: 10000000,