web-csv-toolbox 0.4.0 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/index.js CHANGED
@@ -5,83 +5,76 @@ const CR = "\r";
5
5
  const CRLF = "\r\n";
6
6
  const LF = "\n";
7
7
  const COMMA = ",";
8
- const DOUBLE_QUATE = '"';
8
+ const DOUBLE_QUOTE = '"';
9
9
  function assertCommonOptions(options) {
10
10
  if (typeof options.quotation === "string" && options.quotation.length === 0) {
11
11
  throw new Error("quotation must not be empty");
12
12
  }
13
- if (typeof options.demiliter === "string" && options.demiliter.length === 0) {
14
- throw new Error("demiliter must not be empty");
13
+ if (typeof options.delimiter === "string" && options.delimiter.length === 0) {
14
+ throw new Error("delimiter must not be empty");
15
15
  }
16
16
  if (options.quotation.includes(LF) || options.quotation.includes(CR)) {
17
17
  throw new Error("quotation must not include CR or LF");
18
18
  }
19
- if (options.demiliter.includes(LF) || options.demiliter.includes(CR)) {
20
- throw new Error("demiliter must not include CR or LF");
19
+ if (options.delimiter.includes(LF) || options.delimiter.includes(CR)) {
20
+ throw new Error("delimiter must not include CR or LF");
21
21
  }
22
22
  if (
23
- options.demiliter.includes(options.quotation) ||
24
- options.quotation.includes(options.demiliter)
23
+ options.delimiter.includes(options.quotation) ||
24
+ options.quotation.includes(options.delimiter)
25
25
  ) {
26
26
  throw new Error(
27
- "demiliter and quotation must not include each other as a substring",
27
+ "delimiter and quotation must not include each other as a substring",
28
28
  );
29
29
  }
30
30
  }
31
31
  function escapeRegExp(v) {
32
32
  return v.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
33
33
  }
34
- class LexerTransformer extends TransformStream {
35
- #demiliter;
36
- #demiliterLength;
34
+ class Lexer {
35
+ #delimiter;
36
+ #delimiterLength;
37
37
  #quotation;
38
38
  #quotationLength;
39
39
  #matcher;
40
40
  #buffer = "";
41
- get demiliter() {
42
- return this.#demiliter;
43
- }
44
- get quotation() {
45
- return this.#quotation;
46
- }
47
- constructor({ demiliter = COMMA, quotation = DOUBLE_QUATE } = {}) {
48
- assertCommonOptions({ demiliter, quotation });
49
- super({
50
- transform: (chunk, controller) => {
51
- if (chunk.length !== 0) {
52
- this.#buffer += chunk;
53
- for (const token of this.#tokens({ flush: false })) {
54
- controller.enqueue(token);
55
- }
56
- }
57
- },
58
- flush: (controller) => {
59
- for (const token of this.#tokens({ flush: true })) {
60
- controller.enqueue(token);
61
- }
62
- },
63
- });
64
- this.#demiliter = demiliter;
65
- this.#demiliterLength = demiliter.length;
41
+ #flush = false;
42
+ constructor({ delimiter = COMMA, quotation = DOUBLE_QUOTE } = {}) {
43
+ assertCommonOptions({ delimiter, quotation });
44
+ this.#delimiter = delimiter;
45
+ this.#delimiterLength = delimiter.length;
66
46
  this.#quotation = quotation;
67
47
  this.#quotationLength = quotation.length;
68
- const d = escapeRegExp(demiliter);
48
+ const d = escapeRegExp(delimiter);
69
49
  const q = escapeRegExp(quotation);
70
50
  this.#matcher = new RegExp(
71
51
  `^(?:(?!${q})(?!${d})(?![\\r\\n]))([\\S\\s\\uFEFF\\xA0]+?)(?=${q}|${d}|\\r|\\n|$)`,
72
52
  );
73
53
  }
74
- *#tokens({ flush }) {
54
+ lex(chunk, buffering = false) {
55
+ if (!buffering) {
56
+ this.#flush = true;
57
+ }
58
+ if (typeof chunk === "string" && chunk.length !== 0) {
59
+ this.#buffer += chunk;
60
+ }
61
+ return this.#tokens();
62
+ }
63
+ flush() {
64
+ this.#flush = true;
65
+ return [...this.#tokens()];
66
+ }
67
+ *#tokens() {
68
+ if (this.#flush) {
69
+ if (this.#buffer.endsWith(CRLF)) {
70
+ this.#buffer = this.#buffer.slice(0, -CRLF.length);
71
+ } else if (this.#buffer.endsWith(LF)) {
72
+ this.#buffer = this.#buffer.slice(0, -LF.length);
73
+ }
74
+ }
75
75
  let currentField = null;
76
- for (let token; (token = this.#nextToken({ flush })); ) {
77
- switch (token.type) {
78
- case Field:
79
- if (currentField) {
80
- currentField.value += token.value;
81
- } else {
82
- currentField = token;
83
- }
84
- break;
76
+ for (let token; (token = this.#nextToken()); ) {
77
+ switch (token) {
85
78
  case FieldDelimiter:
86
79
  if (currentField) {
87
80
  yield currentField;
@@ -96,37 +89,50 @@ class LexerTransformer extends TransformStream {
96
89
  }
97
90
  yield token;
98
91
  break;
92
+ default:
93
+ if (currentField) {
94
+ currentField.value += token.value;
95
+ } else {
96
+ currentField = token;
97
+ }
98
+ break;
99
99
  }
100
100
  }
101
101
  if (currentField) {
102
102
  yield currentField;
103
103
  }
104
104
  }
105
- #nextToken({ flush = false } = {}) {
105
+ #nextToken() {
106
106
  if (this.#buffer.length === 0) {
107
107
  return null;
108
108
  }
109
+ if (
110
+ this.#flush === false &&
111
+ (this.#buffer === CRLF || this.#buffer === LF)
112
+ ) {
113
+ return null;
114
+ }
109
115
  if (this.#buffer.startsWith(CRLF)) {
110
116
  this.#buffer = this.#buffer.slice(2);
111
- return { type: RecordDelimiter, value: CRLF };
117
+ return RecordDelimiter;
112
118
  }
113
119
  if (this.#buffer.startsWith(LF)) {
114
120
  this.#buffer = this.#buffer.slice(1);
115
- return { type: RecordDelimiter, value: LF };
121
+ return RecordDelimiter;
116
122
  }
117
- if (this.#buffer.startsWith(this.#demiliter)) {
118
- this.#buffer = this.#buffer.slice(this.#demiliterLength);
119
- return { type: FieldDelimiter, value: this.#demiliter };
123
+ if (this.#buffer.startsWith(this.#delimiter)) {
124
+ this.#buffer = this.#buffer.slice(this.#delimiterLength);
125
+ return FieldDelimiter;
120
126
  }
121
127
  if (this.#buffer.startsWith(this.#quotation)) {
122
- if (flush === false && this.#buffer.endsWith(this.#quotation)) {
128
+ if (this.#flush === false && this.#buffer.endsWith(this.#quotation)) {
123
129
  return null;
124
130
  }
125
- return this.extractQuotedString(flush);
131
+ return this.#extractQuotedString();
126
132
  }
127
133
  const match = this.#matcher.exec(this.#buffer);
128
134
  if (match) {
129
- if (flush === false && match[0].length === this.#buffer.length) {
135
+ if (this.#flush === false && match[0].length === this.#buffer.length) {
130
136
  return null;
131
137
  }
132
138
  this.#buffer = this.#buffer.slice(match[0].length);
@@ -134,32 +140,32 @@ class LexerTransformer extends TransformStream {
134
140
  }
135
141
  return null;
136
142
  }
137
- extractQuotedString(flush) {
143
+ #extractQuotedString() {
138
144
  let end = this.#quotationLength;
139
145
  let value = "";
140
146
  while (end < this.#buffer.length) {
141
147
  if (
142
148
  this.#buffer.slice(end, end + this.#quotationLength) ===
143
- this.quotation &&
149
+ this.#quotation &&
144
150
  this.#buffer.slice(
145
151
  end + this.#quotationLength,
146
152
  end + this.#quotationLength * 2,
147
- ) === this.quotation
153
+ ) === this.#quotation
148
154
  ) {
149
- value += this.quotation;
155
+ value += this.#quotation;
150
156
  end += this.#quotationLength * 2;
151
157
  continue;
152
158
  }
153
159
  if (
154
- this.#buffer.slice(end, end + this.#quotationLength) === this.quotation
160
+ this.#buffer.slice(end, end + this.#quotationLength) === this.#quotation
155
161
  ) {
156
162
  if (
157
- flush === false &&
163
+ this.#flush === false &&
158
164
  end + this.#quotationLength < this.#buffer.length &&
159
165
  this.#buffer.slice(
160
166
  end + this.#quotationLength,
161
- this.#demiliterLength,
162
- ) !== this.demiliter &&
167
+ this.#delimiterLength,
168
+ ) !== this.#delimiter &&
163
169
  this.#buffer.slice(
164
170
  end + this.#quotationLength,
165
171
  end + this.#quotationLength + 2,
@@ -180,58 +186,80 @@ class LexerTransformer extends TransformStream {
180
186
  return null;
181
187
  }
182
188
  }
183
- class RecordAssemblerTransformar extends TransformStream {
184
- #fieldIndex = 0;
185
- #row = [];
186
- #header;
187
- #darty = false;
189
+ class LexerTransformer extends TransformStream {
188
190
  constructor(options = {}) {
191
+ const lexer = new Lexer(options);
189
192
  super({
190
- transform: (token, controller) => {
191
- switch (token.type) {
192
- case Field:
193
- this.#darty = true;
194
- this.#row[this.#fieldIndex] = token.value;
195
- break;
196
- case FieldDelimiter:
197
- this.#fieldIndex++;
198
- break;
199
- case RecordDelimiter:
200
- if (this.#header === undefined) {
201
- this.#setHeader(this.#row);
202
- } else {
203
- if (this.#darty) {
204
- const record = Object.fromEntries(
205
- this.#header
206
- .filter((v) => v)
207
- .map((header, index) => [header, this.#row.at(index)]),
208
- );
209
- controller.enqueue(record);
210
- }
211
- }
212
- this.#fieldIndex = 0;
213
- this.#row = new Array(this.#header?.length);
214
- this.#darty = false;
215
- break;
193
+ transform: (chunk, controller) => {
194
+ if (chunk.length !== 0) {
195
+ controller.enqueue([...lexer.lex(chunk, true)]);
216
196
  }
217
197
  },
218
198
  flush: (controller) => {
219
- if (this.#fieldIndex !== 0 && this.#header !== undefined) {
220
- if (this.#darty) {
221
- const record = Object.fromEntries(
222
- this.#header
223
- .filter((v) => v)
224
- .map((header, index) => [header, this.#row.at(index)]),
225
- );
226
- controller.enqueue(record);
227
- }
228
- }
199
+ controller.enqueue(lexer.flush());
229
200
  },
230
201
  });
202
+ }
203
+ }
204
+ class RecordAssembler {
205
+ #fieldIndex = 0;
206
+ #row = [];
207
+ #header;
208
+ #dirty = false;
209
+ constructor(options = {}) {
231
210
  if (options.header !== undefined && Array.isArray(options.header)) {
232
211
  this.#setHeader(options.header);
233
212
  }
234
213
  }
214
+ *assemble(tokens, flush = true) {
215
+ for (const token of tokens) {
216
+ switch (token) {
217
+ case FieldDelimiter:
218
+ this.#fieldIndex++;
219
+ this.#dirty = true;
220
+ break;
221
+ case RecordDelimiter:
222
+ if (this.#header === undefined) {
223
+ this.#setHeader(this.#row);
224
+ } else {
225
+ if (this.#dirty) {
226
+ yield Object.fromEntries(
227
+ this.#header.map((header, index) => [
228
+ header,
229
+ this.#row.at(index),
230
+ ]),
231
+ );
232
+ } else {
233
+ yield Object.fromEntries(
234
+ this.#header.map((header) => [header, ""]),
235
+ );
236
+ }
237
+ }
238
+ this.#fieldIndex = 0;
239
+ this.#row = new Array(this.#header?.length).fill("");
240
+ this.#dirty = false;
241
+ break;
242
+ default:
243
+ this.#dirty = true;
244
+ this.#row[this.#fieldIndex] = token.value;
245
+ break;
246
+ }
247
+ }
248
+ if (flush) {
249
+ yield* this.flush();
250
+ }
251
+ }
252
+ *flush() {
253
+ if (this.#header !== undefined) {
254
+ if (this.#dirty) {
255
+ yield Object.fromEntries(
256
+ this.#header
257
+ .filter((v) => v)
258
+ .map((header, index) => [header, this.#row.at(index)]),
259
+ );
260
+ }
261
+ }
262
+ }
235
263
  #setHeader(header) {
236
264
  this.#header = header;
237
265
  if (this.#header.length === 0) {
@@ -242,16 +270,48 @@ class RecordAssemblerTransformar extends TransformStream {
242
270
  }
243
271
  }
244
272
  }
245
- class SingleValueReadableStream extends ReadableStream {
246
- constructor(value) {
273
+ class RecordAssemblerTransformer extends TransformStream {
274
+ constructor(options = {}) {
275
+ const assembler = new RecordAssembler(options);
247
276
  super({
248
- start(controller) {
249
- controller.enqueue(value);
250
- controller.close();
277
+ transform: (tokens, controller) => {
278
+ for (const token of assembler.assemble(tokens, false)) {
279
+ controller.enqueue(token);
280
+ }
281
+ },
282
+ flush: (controller) => {
283
+ for (const token of assembler.flush()) {
284
+ controller.enqueue(token);
285
+ }
251
286
  },
252
287
  });
253
288
  }
254
289
  }
290
+ function parseStringToArraySync(csv, options) {
291
+ const lexer = new Lexer(options);
292
+ const assembler = new RecordAssembler(options);
293
+ const tokens = lexer.lex(csv);
294
+ return [...assembler.assemble(tokens)];
295
+ }
296
+ function parseStringToIterableIterator(csv, options) {
297
+ const lexer = new Lexer(options);
298
+ const assembler = new RecordAssembler(options);
299
+ const tokens = lexer.lex(csv);
300
+ return assembler.assemble(tokens);
301
+ }
302
+ function parseStringToStream(csv, options) {
303
+ const lexer = new Lexer(options);
304
+ const assembler = new RecordAssembler(options);
305
+ return new ReadableStream({
306
+ start(controller) {
307
+ const tokens = lexer.lex(csv);
308
+ for (const record of assembler.assemble(tokens)) {
309
+ controller.enqueue(record);
310
+ }
311
+ controller.close();
312
+ },
313
+ });
314
+ }
255
315
  async function toArray(...args) {
256
316
  const rows = [];
257
317
  for await (const row of this(...args)) {
@@ -259,57 +319,132 @@ async function toArray(...args) {
259
319
  }
260
320
  return rows;
261
321
  }
262
- async function* parseStringStream(stream, options) {
263
- let controller;
264
- const readable = new ReadableStream({
265
- start: (controller_) => (controller = controller_),
266
- });
267
- await stream
268
- .pipeThrough(new LexerTransformer(options))
269
- .pipeThrough(new RecordAssemblerTransformar(options))
270
- .pipeTo(
271
- new WritableStream({
272
- write: (row) => controller.enqueue(row),
273
- close: () => controller.close(),
274
- }),
275
- );
276
- const reader = readable.getReader();
277
- try {
278
- while (true) {
279
- const { value, done } = await reader.read();
280
- if (done) break;
281
- yield value;
282
- }
283
- } finally {
284
- reader.releaseLock();
285
- }
322
+ async function* parseString(csv, options) {
323
+ yield* parseStringToIterableIterator(csv, options);
286
324
  }
287
- ((parseStringStream) => {
288
- Object.defineProperty(parseStringStream, "toArray", {
325
+ ((parseString) => {
326
+ Object.defineProperty(parseString, "toArray", {
289
327
  enumerable: true,
290
328
  writable: false,
291
329
  value: toArray,
292
330
  });
293
- })(parseStringStream || (parseStringStream = {}));
294
- async function* parseString(csv, options) {
295
- yield* parseStringStream(new SingleValueReadableStream(csv), options);
331
+ Object.defineProperty(parseString, "toArraySync", {
332
+ enumerable: true,
333
+ writable: false,
334
+ value: parseStringToArraySync,
335
+ });
336
+ Object.defineProperty(parseString, "toIterableIterator", {
337
+ enumerable: true,
338
+ writable: false,
339
+ value: parseStringToIterableIterator,
340
+ });
341
+ Object.defineProperty(parseString, "toStream", {
342
+ enumerable: true,
343
+ writable: false,
344
+ value: parseStringToStream,
345
+ });
346
+ })(parseString || (parseString = {}));
347
+ function convertBinaryToString(binary, options) {
348
+ return new TextDecoder(options?.charset, {
349
+ ignoreBOM: options?.ignoreBOM,
350
+ fatal: options?.fatal,
351
+ }).decode(binary instanceof ArrayBuffer ? new Uint8Array(binary) : binary);
296
352
  }
297
- ((parseString) => {
298
- Object.defineProperty(parseString, "toArray", {
353
+ function parseBinaryToArraySync(binary, options = {}) {
354
+ const csv = convertBinaryToString(binary, options);
355
+ return parseStringToArraySync(csv, options);
356
+ }
357
+ function parseBinaryToIterableIterator(binary, options = {}) {
358
+ const csv = convertBinaryToString(binary, options);
359
+ return parseStringToIterableIterator(csv, options);
360
+ }
361
+ function parseBinaryToStream(binary, options = {}) {
362
+ const csv = convertBinaryToString(binary, options);
363
+ return parseStringToStream(csv, options);
364
+ }
365
+ function iterableIteratorToAsync(iterator) {
366
+ return {
367
+ async next() {
368
+ const result = iterator.next();
369
+ return Promise.resolve(result);
370
+ },
371
+ [Symbol.asyncIterator]() {
372
+ return this;
373
+ },
374
+ };
375
+ }
376
+ function parseBinary(bytes, options) {
377
+ const iterator = parseBinaryToIterableIterator(bytes, options);
378
+ return iterableIteratorToAsync(iterator);
379
+ }
380
+ ((parseBinary) => {
381
+ Object.defineProperty(parseBinary, "toArray", {
299
382
  enumerable: true,
300
383
  writable: false,
301
384
  value: toArray,
302
385
  });
303
- })(parseString || (parseString = {}));
304
- async function* parseUint8ArrayStream(stream, options) {
386
+ Object.defineProperty(parseBinary, "toArraySync", {
387
+ enumerable: true,
388
+ writable: false,
389
+ value: parseBinaryToArraySync,
390
+ });
391
+ Object.defineProperty(parseBinary, "toIterableIterator", {
392
+ enumerable: true,
393
+ writable: false,
394
+ value: parseBinaryToIterableIterator,
395
+ });
396
+ Object.defineProperty(parseBinary, "toStream", {
397
+ enumerable: true,
398
+ writable: false,
399
+ value: parseBinaryToStream,
400
+ });
401
+ })(parseBinary || (parseBinary = {}));
402
+ function pipeline(stream, ...transformers) {
403
+ return new ReadableStream({
404
+ start: (controller) => {
405
+ (() =>
406
+ transformers
407
+ .reduce(
408
+ (stream, transformer) => stream.pipeThrough(transformer),
409
+ stream,
410
+ )
411
+ .pipeTo(
412
+ new WritableStream({
413
+ write: (v) => controller.enqueue(v),
414
+ close: () => controller.close(),
415
+ }),
416
+ ))();
417
+ },
418
+ });
419
+ }
420
+ function parseUint8ArrayStreamToStream(stream, options) {
305
421
  const { charset, fatal, ignoreBOM, decomposition } = options ?? {};
306
- yield* parseStringStream(
307
- [
308
- ...(decomposition ? [new DecompressionStream(decomposition)] : []),
309
- new TextDecoderStream(charset, { fatal, ignoreBOM }),
310
- ].reduce((stream, transformer) => stream.pipeThrough(transformer), stream),
311
- options,
312
- );
422
+ return decomposition
423
+ ? pipeline(
424
+ stream,
425
+ new DecompressionStream(decomposition),
426
+ new TextDecoderStream(charset, { fatal, ignoreBOM }),
427
+ new LexerTransformer(options),
428
+ new RecordAssemblerTransformer(options),
429
+ )
430
+ : pipeline(
431
+ stream,
432
+ new TextDecoderStream(charset, { fatal, ignoreBOM }),
433
+ new LexerTransformer(options),
434
+ new RecordAssemblerTransformer(options),
435
+ );
436
+ }
437
+ async function* streamToAsyncIterableIterator(stream) {
438
+ const reader = stream.getReader();
439
+ while (true) {
440
+ const { done, value } = await reader.read();
441
+ if (done) break;
442
+ yield value;
443
+ }
444
+ }
445
+ function parseUint8ArrayStream(stream, options) {
446
+ const recordStream = parseUint8ArrayStreamToStream(stream, options);
447
+ return streamToAsyncIterableIterator(recordStream);
313
448
  }
314
449
  ((parseUint8ArrayStream) => {
315
450
  Object.defineProperty(parseUint8ArrayStream, "toArray", {
@@ -317,27 +452,35 @@ async function* parseUint8ArrayStream(stream, options) {
317
452
  writable: false,
318
453
  value: toArray,
319
454
  });
455
+ Object.defineProperty(parseUint8ArrayStream, "toStream", {
456
+ enumerable: true,
457
+ writable: false,
458
+ value: parseUint8ArrayStreamToStream,
459
+ });
320
460
  })(parseUint8ArrayStream || (parseUint8ArrayStream = {}));
321
- function parseUint8Array(bytes, options) {
322
- return parseUint8ArrayStream(new SingleValueReadableStream(bytes), options);
461
+ function parseStringStreamToStream(stream, options) {
462
+ return pipeline(
463
+ stream,
464
+ new LexerTransformer(options),
465
+ new RecordAssemblerTransformer(options),
466
+ );
467
+ }
468
+ function parseStringStream(stream, options) {
469
+ const recordStream = parseStringStreamToStream(stream, options);
470
+ return streamToAsyncIterableIterator(recordStream);
323
471
  }
324
- ((parseUint8Array) => {
325
- Object.defineProperty(parseUint8Array, "toArray", {
472
+ ((parseStringStream) => {
473
+ Object.defineProperty(parseStringStream, "toArray", {
326
474
  enumerable: true,
327
475
  writable: false,
328
476
  value: toArray,
329
477
  });
330
- })(parseUint8Array || (parseUint8Array = {}));
331
- function parseArrayBuffer(buffer, options) {
332
- return parseUint8Array(new Uint8Array(buffer), options);
333
- }
334
- ((parseArrayBuffer) => {
335
- Object.defineProperty(parseArrayBuffer, "toArray", {
478
+ Object.defineProperty(parseStringStream, "toStream", {
336
479
  enumerable: true,
337
480
  writable: false,
338
- value: toArray,
481
+ value: parseStringStreamToStream,
339
482
  });
340
- })(parseArrayBuffer || (parseArrayBuffer = {}));
483
+ })(parseStringStream || (parseStringStream = {}));
341
484
  function parseMime(contentType) {
342
485
  const [type, ...parameters] = contentType.split(";");
343
486
  const result = {
@@ -350,7 +493,7 @@ function parseMime(contentType) {
350
493
  }
351
494
  return result;
352
495
  }
353
- function parseResponse(response, options) {
496
+ function getOptionsFromResponse(response, options = {}) {
354
497
  const { headers } = response;
355
498
  const contentType = headers.get("content-type") ?? "text/csv";
356
499
  const mime = parseMime(contentType);
@@ -359,14 +502,25 @@ function parseResponse(response, options) {
359
502
  }
360
503
  const decomposition = headers.get("content-encoding") ?? undefined;
361
504
  const charset = mime.parameters.charset ?? "utf-8";
362
- if (response.body === null) {
363
- throw new Error("Response body is null");
364
- }
365
- return parseUint8ArrayStream(response.body, {
505
+ return {
366
506
  decomposition,
367
507
  charset,
368
508
  ...options,
369
- });
509
+ };
510
+ }
511
+ function parseResponseToStream(response, options) {
512
+ const options_ = getOptionsFromResponse(response, options);
513
+ if (response.body === null) {
514
+ throw new Error("Response body is null");
515
+ }
516
+ return parseUint8ArrayStreamToStream(response.body, options_);
517
+ }
518
+ function parseResponse(response, options) {
519
+ const options_ = getOptionsFromResponse(response, options);
520
+ if (response.body === null) {
521
+ throw new Error("Response body is null");
522
+ }
523
+ return parseUint8ArrayStream(response.body, options_);
370
524
  }
371
525
  ((parseResponse) => {
372
526
  Object.defineProperty(parseResponse, "toArray", {
@@ -374,34 +528,27 @@ function parseResponse(response, options) {
374
528
  writable: false,
375
529
  value: toArray,
376
530
  });
377
- })(parseResponse || (parseResponse = {}));
378
- async function* parseStream(stream, options) {
379
- const [branch1, branch2] = stream.tee();
380
- const reader1 = branch1.getReader();
381
- const { value: firstChunk } = await reader1.read();
382
- reader1.releaseLock();
383
- if (typeof firstChunk === "string") {
384
- yield* parseStringStream(branch2, options);
385
- } else if (firstChunk instanceof Uint8Array) {
386
- yield* parseUint8ArrayStream(branch2, options);
387
- }
388
- }
389
- ((parseStream) => {
390
- Object.defineProperty(parseStream, "toArray", {
531
+ Object.defineProperty(parseResponse, "toStream", {
391
532
  enumerable: true,
392
533
  writable: false,
393
- value: toArray,
534
+ value: parseResponseToStream,
394
535
  });
395
- })(parseStream || (parseStream = {}));
536
+ })(parseResponse || (parseResponse = {}));
396
537
  async function* parse(csv, options) {
397
538
  if (typeof csv === "string") {
398
539
  yield* parseString(csv, options);
399
- } else if (csv instanceof Uint8Array) {
400
- yield* parseUint8Array(csv, options);
401
- } else if (csv instanceof ArrayBuffer) {
402
- yield* parseArrayBuffer(csv, options);
540
+ } else if (csv instanceof Uint8Array || csv instanceof ArrayBuffer) {
541
+ yield* parseBinary(csv, options);
403
542
  } else if (csv instanceof ReadableStream) {
404
- yield* parseStream(csv, options);
543
+ const [branch1, branch2] = csv.tee();
544
+ const reader1 = branch1.getReader();
545
+ const { value: firstChunk } = await reader1.read();
546
+ reader1.releaseLock();
547
+ if (typeof firstChunk === "string") {
548
+ yield* parseStringStream(branch2, options);
549
+ } else if (firstChunk instanceof Uint8Array) {
550
+ yield* parseUint8ArrayStream(branch2, options);
551
+ }
405
552
  } else if (csv instanceof Response) {
406
553
  yield* parseResponse(csv, options);
407
554
  }
@@ -417,14 +564,12 @@ export {
417
564
  Field,
418
565
  FieldDelimiter,
419
566
  LexerTransformer,
420
- RecordAssemblerTransformar,
567
+ RecordAssemblerTransformer,
421
568
  RecordDelimiter,
422
569
  parse,
423
- parseArrayBuffer,
570
+ parseBinary,
424
571
  parseResponse,
425
- parseStream,
426
572
  parseString,
427
573
  parseStringStream,
428
- parseUint8Array,
429
574
  parseUint8ArrayStream,
430
575
  };