web-csv-toolbox 0.3.2 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/index.d.ts CHANGED
@@ -15,26 +15,32 @@ declare const RecordDelimiter: unique symbol;
15
15
  declare const Field: unique symbol;
16
16
 
17
17
  /**
18
- * Token is a atomic unit of a CSV file.
19
- * It can be a field, field delimiter, or record delimiter.
18
+ * Field token type.
20
19
  * @category Types
21
- *
22
- * @example
23
- * ```ts
24
- * const fieldToken: Token = { type: Field, value: "foo" };
25
- * const fieldDelimiterToken: Token = { type: FieldDelimiter, value: "," };
26
- * const recordDelimiterToken: Token = { type: RecordDelimiter, value: "\n" };
27
- * ```
28
20
  */
29
- interface Token<T extends TokenType = TokenType> {
30
- type: T;
21
+ interface FieldToken {
22
+ type: typeof Field;
31
23
  value: string;
32
24
  }
33
25
  /**
34
- * Type of a token for CSV.
26
+ * Field delimiter token type.
27
+ * @category Types
28
+ */
29
+ interface FieldDelimiterToken {
30
+ type: typeof FieldDelimiter;
31
+ }
32
+ /**
33
+ * Record delimiter token type.
34
+ */
35
+ interface RecordDelimiterToken {
36
+ type: typeof RecordDelimiter;
37
+ }
38
+ /**
39
+ * Token is a atomic unit of a CSV file.
40
+ * It can be a field, field delimiter, or record delimiter.
35
41
  * @category Types
36
42
  */
37
- type TokenType = typeof FieldDelimiter | typeof RecordDelimiter | typeof Field;
43
+ type Token = FieldToken | typeof FieldDelimiter | typeof RecordDelimiter;
38
44
  /**
39
45
  * CSV Common Options.
40
46
  * @category Types
@@ -49,7 +55,7 @@ interface CommonOptions {
49
55
  * This library supports multi-character delimiters.
50
56
  * @default ','
51
57
  */
52
- demiliter?: string;
58
+ delimiter?: string;
53
59
  /**
54
60
  * CSV field quotation.
55
61
  *
@@ -170,6 +176,28 @@ type CSVRecord<Header extends ReadonlyArray<string>> = Record<
170
176
  Header[number],
171
177
  string
172
178
  >;
179
+ /**
180
+ * CSV String.
181
+ *
182
+ * @category Types
183
+ */
184
+ type CSVString = string | ReadableStream<string>;
185
+ /**
186
+ * CSV Binary.
187
+ *
188
+ * @category Types
189
+ */
190
+ type CSVBinary =
191
+ | ReadableStream<Uint8Array>
192
+ | Response
193
+ | ArrayBuffer
194
+ | Uint8Array;
195
+ /**
196
+ * CSV.
197
+ *
198
+ * @category Types
199
+ */
200
+ type CSV = CSVString | CSVBinary;
173
201
 
174
202
  /**
175
203
  * A transform stream that converts a stream of tokens into a stream of rows.
@@ -179,30 +207,30 @@ type CSVRecord<Header extends ReadonlyArray<string>> = Record<
179
207
  * @example Parse a CSV with headers by data
180
208
  * ```ts
181
209
  * new ReadableStream({
182
- * start(controller) {
183
- * controller.enqueue("name,age\r\n");
184
- * controller.enqueue("Alice,20\r\n");
185
- * controller.close();
186
- * }
210
+ * start(controller) {
211
+ * controller.enqueue("name,age\r\n");
212
+ * controller.enqueue("Alice,20\r\n");
213
+ * controller.close();
214
+ * }
187
215
  * })
188
- * .pipeThrough(new LexerTransformer())
189
- * .pipeTo(new WritableStream({ write(token) { console.log(token); }}));
216
+ * .pipeThrough(new LexerTransformer())
217
+ * .pipeTo(new WritableStream({ write(tokens) {
218
+ * for (const token of tokens) {
219
+ * console.log(token);
220
+ * }
221
+ * }}));
190
222
  * // { type: Field, value: "name" }
191
- * // { type: FieldDelimiter, value: "," }
223
+ * // FieldDelimiter
192
224
  * // { type: Field, value: "age" }
193
- * // { type: RecordDelimiter, value: "\r\n" }
225
+ * // RecordDelimiter
194
226
  * // { type: Field, value: "Alice" }
195
- * // { type: FieldDelimiter, value: "," }
227
+ * // FieldDelimiter
196
228
  * // { type: Field, value: "20" }
197
- * // { type: RecordDelimiter, value: "\r\n" }
229
+ * // RecordDelimiter
198
230
  * ```
199
231
  */
200
- declare class LexerTransformer extends TransformStream<string, Token> {
201
- #private;
202
- get demiliter(): string;
203
- get quotation(): string;
204
- constructor({ demiliter, quotation }?: CommonOptions);
205
- private extractQuotedString;
232
+ declare class LexerTransformer extends TransformStream<string, Token[]> {
233
+ constructor(options?: CommonOptions);
206
234
  }
207
235
 
208
236
  /**
@@ -223,7 +251,7 @@ declare class LexerTransformer extends TransformStream<string, Token> {
223
251
  * controller.close();
224
252
  * })
225
253
  * .pipeThrough(new LexerTransformer())
226
- * .pipeThrough(new RecordAssemblerTransformar())
254
+ * .pipeThrough(new RecordAssemblerTransformer())
227
255
  * .pipeTo(new WritableStream({ write(row) { console.log(row); }}));
228
256
  * // { name: "Alice", age: "20" }
229
257
  * // { name: "Bob", age: "25" }
@@ -241,17 +269,16 @@ declare class LexerTransformer extends TransformStream<string, Token> {
241
269
  * }
242
270
  * })
243
271
  * .pipeThrough(new LexerTransformer())
244
- * .pipeThrough(new RecordAssemblerTransformar({ header: ["name", "age"] }))
272
+ * .pipeThrough(new RecordAssemblerTransformer({ header: ["name", "age"] }))
245
273
  * .pipeTo(new WritableStream({ write(row) { console.log(row); }}));
246
274
  * // { name: "Alice", age: "20" }
247
275
  * // { name: "Bob", age: "25" }
248
276
  * // { name: "Charlie", age: "30" }
249
277
  * ```
250
278
  */
251
- declare class RecordAssemblerTransformar<
279
+ declare class RecordAssemblerTransformer<
252
280
  Header extends ReadonlyArray<string>,
253
- > extends TransformStream<Token, Record<Header[number], string | undefined>> {
254
- #private;
281
+ > extends TransformStream<Token[], CSVRecord<Header>> {
255
282
  constructor(options?: RecordAssemblerOptions<Header>);
256
283
  }
257
284
 
@@ -309,6 +336,213 @@ declare namespace parseString {
309
336
  csv: string,
310
337
  options?: ParseOptions<Header>,
311
338
  ): Promise<CSVRecord<Header>[]>;
339
+ /**
340
+ * Parse CSV string to records.
341
+ *
342
+ * @returns Array of records
343
+ *
344
+ * @example
345
+ *
346
+ * ```ts
347
+ * import { parseString } from 'web-csv-toolbox';
348
+ *
349
+ * const csv = `name,age
350
+ * Alice,42
351
+ * Bob,69`;
352
+ *
353
+ * const records = parseString.toArraySync(csv);
354
+ * console.log(records);
355
+ * // Prints:
356
+ * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]
357
+ * ```
358
+ */
359
+ function toArraySync<Header extends ReadonlyArray<string>>(
360
+ csv: string,
361
+ options?: ParseOptions<Header>,
362
+ ): CSVRecord<Header>[];
363
+ /**
364
+ * Parse CSV string to records.
365
+ *
366
+ * @returns Async iterable iterator of records
367
+ *
368
+ * @example
369
+ * ```ts
370
+ * import { parseString } from 'web-csv-toolbox';
371
+ *
372
+ * const csv = `name,age
373
+ * Alice,42
374
+ * Bob,69`;
375
+ *
376
+ * for (const record of parseString.toIterableIterator(csv)) {
377
+ * console.log(record);
378
+ * }
379
+ * // Prints:
380
+ * // { name: 'Alice', age: '42' }
381
+ * // { name: 'Bob', age: '69' }
382
+ * ```
383
+ */
384
+ function toIterableIterator<Header extends ReadonlyArray<string>>(
385
+ csv: string,
386
+ options?: ParseOptions<Header>,
387
+ ): IterableIterator<CSVRecord<Header>>;
388
+ /**
389
+ * Parse CSV string to records.
390
+ *
391
+ * @returns Readable stream of records
392
+ *
393
+ * @example
394
+ * ```ts
395
+ * import { parseString } from 'web-csv-toolbox';
396
+ *
397
+ * const csv = `name,age
398
+ * Alice,42
399
+ * Bob,69`;
400
+ *
401
+ * await parseString.toStream(csv)
402
+ * .pipeTo(
403
+ * new WritableStream({
404
+ * write(record) {
405
+ * console.log(record);
406
+ * },
407
+ * }),
408
+ * );
409
+ * // Prints:
410
+ * // { name: 'Alice', age: '42' }
411
+ * // { name: 'Bob', age: '69' }
412
+ * ```
413
+ */
414
+ function toStream<Header extends ReadonlyArray<string>>(
415
+ csv: string,
416
+ options?: ParseOptions<Header>,
417
+ ): ReadableStream<CSVRecord<Header>>;
418
+ }
419
+
420
+ /**
421
+ * Parse a binary from an {@link !Uint8Array}.
422
+ *
423
+ * @category Middle-level API
424
+ *
425
+ * @param bytes CSV bytes to parse.
426
+ * @param options Parsing options
427
+ * @returns Async iterable iterator of records.
428
+ *
429
+ * @example Parsing CSV binary
430
+ *
431
+ * ```ts
432
+ * import { parseUint8Array } from 'web-csv-toolbox';
433
+ *
434
+ * const csv = Uint8Array.from([
435
+ * // ...
436
+ * ]);
437
+ *
438
+ * for await (const record of parseUint8Array(csv)) {
439
+ * console.log(record);
440
+ * }
441
+ * ```
442
+ */
443
+ declare function parseBinary<Header extends ReadonlyArray<string>>(
444
+ bytes: Uint8Array | ArrayBuffer,
445
+ options?: ParseBinaryOptions<Header>,
446
+ ): AsyncIterableIterator<CSVRecord<Header>>;
447
+ declare namespace parseBinary {
448
+ /**
449
+ * Parse a binary from an {@link !Uint8Array} to an array of records.
450
+ *
451
+ * @param bytes CSV bytes to parse.
452
+ * @param options Parsing options
453
+ * @returns Array of records
454
+ *
455
+ * @example
456
+ * ```ts
457
+ * import { parseUint8Array } from 'web-csv-toolbox';
458
+ *
459
+ * const csv = Uint8Array.from([
460
+ * // ...
461
+ * ]);
462
+ *
463
+ * const records = await parseUint8Array.toArray(csv);
464
+ * ```
465
+ */
466
+ function toArray<Header extends ReadonlyArray<string>>(
467
+ bytes: Uint8Array | ArrayBuffer,
468
+ options?: ParseBinaryOptions<Header>,
469
+ ): Promise<CSVRecord<Header>[]>;
470
+ /**
471
+ * Parse a binary from an {@link !Uint8Array} to an array of records.
472
+ *
473
+ * @param bytes CSV bytes to parse.
474
+ * @param options Parsing options
475
+ * @returns Array of records
476
+ * @example
477
+ *
478
+ * ```ts
479
+ * import { parseUint8Array } from 'web-csv-toolbox';
480
+ *
481
+ * const csv = Uint8Array.from([
482
+ * // ...
483
+ * ]);
484
+ *
485
+ * const records = parseUint8Array.toArraySync(csv);
486
+ * ```
487
+ */
488
+ function toArraySync<Header extends ReadonlyArray<string>>(
489
+ bytes: Uint8Array | ArrayBuffer,
490
+ options?: ParseBinaryOptions<Header>,
491
+ ): CSVRecord<Header>[];
492
+ /**
493
+ * Parse a binary from an {@link !Uint8Array} to an iterable iterator of records.
494
+ *
495
+ * @param bytes CSV bytes to parse.
496
+ * @param options Parsing options
497
+ * @returns Async iterable iterator of records.
498
+ * @example
499
+ * ```ts
500
+ * import { parseUint8Array } from 'web-csv-toolbox';
501
+ *
502
+ * const csv = Uint8Array.from([
503
+ * // ...
504
+ * ]);
505
+ *
506
+ * for (const record of parseUint8Array.toIterableIterator(csv)) {
507
+ * console.log(record);
508
+ * }
509
+ * ```
510
+ */
511
+ function toIterableIterator<Header extends ReadonlyArray<string>>(
512
+ bytes: Uint8Array,
513
+ options?: ParseBinaryOptions<Header>,
514
+ ): IterableIterator<CSVRecord<Header>>;
515
+ /**
516
+ * Parse a binary from an {@link !Uint8Array} to a stream of records.
517
+ *
518
+ * @param bytes CSV bytes to parse.
519
+ * @param options Parsing options
520
+ * @returns Stream of records.
521
+ *
522
+ * @example
523
+ *
524
+ * ```ts
525
+ * import { parseUint8Array } from 'web-csv-toolbox';
526
+ *
527
+ * const csv = Uint8Array.from([
528
+ * // ...
529
+ * ]);
530
+ *
531
+ * const stream = parseUint8Array.toStream(csv);
532
+ *
533
+ * await stream.pipeTo(
534
+ * new WritableStream({
535
+ * write(record) {
536
+ * console.log(record);
537
+ * },
538
+ * }),
539
+ * );
540
+ * ```
541
+ */
542
+ function toStream<Header extends ReadonlyArray<string>>(
543
+ bytes: Uint8Array,
544
+ options?: ParseBinaryOptions<Header>,
545
+ ): ReadableStream<CSVRecord<Header>>;
312
546
  }
313
547
 
314
548
  /**
@@ -319,15 +553,15 @@ declare namespace parseString {
319
553
  * @remarks
320
554
  * If you want to parse a string, use {@link parseStringStream}.
321
555
  * @param stream CSV string to parse
322
- * @param options Parsing options. See {@link ParseBinaryOptions}.
556
+ * @param options Parsing options.
323
557
  * @returns Async iterable iterator of records.
324
558
  *
325
- * If you want array of records, use {@link parseBinaryStream.toArray} function.
559
+ * If you want array of records, use {@link parseUint8ArrayStream.toArray} function.
326
560
  *
327
561
  * @example Parsing CSV binary
328
562
  *
329
563
  * ```ts
330
- * import { parseBinaryStream } from 'web-csv-toolbox';
564
+ * import { parseUint8ArrayStream } from 'web-csv-toolbox';
331
565
  *
332
566
  * const csv = Uint8Array.from([
333
567
  * // ...
@@ -340,16 +574,16 @@ declare namespace parseString {
340
574
  * },
341
575
  * });
342
576
  *
343
- * for await (const record of parseBinaryStream(csv)) {
344
- * console.log(record);
577
+ * for await (const record of parseUint8ArrayStream(csv)) {
578
+ * console.log(record);
345
579
  * }
346
580
  * ```
347
581
  */
348
- declare function parseBinaryStream<Header extends ReadonlyArray<string>>(
582
+ declare function parseUint8ArrayStream<Header extends ReadonlyArray<string>>(
349
583
  stream: ReadableStream<Uint8Array>,
350
584
  options?: ParseBinaryOptions<Header>,
351
585
  ): AsyncIterableIterator<CSVRecord<Header>>;
352
- declare namespace parseBinaryStream {
586
+ declare namespace parseUint8ArrayStream {
353
587
  /**
354
588
  * Parse CSV binary to array of records,
355
589
  * ideal for smaller data sets.
@@ -358,7 +592,7 @@ declare namespace parseBinaryStream {
358
592
  *
359
593
  * @example Parsing CSV binary
360
594
  * ```ts
361
- * import { parseBinaryStream } from 'web-csv-toolbox';
595
+ * import { parseUint8ArrayStream } from 'web-csv-toolbox';
362
596
  *
363
597
  * const csv = Uint8Array.from([
364
598
  * // ...
@@ -371,7 +605,7 @@ declare namespace parseBinaryStream {
371
605
  * },
372
606
  * });
373
607
  *
374
- * const records = await parseBinaryStream.toArray(stream);
608
+ * const records = await parseUint8ArrayStream.toArray(stream);
375
609
  * console.log(records);
376
610
  * ```
377
611
  */
@@ -379,6 +613,39 @@ declare namespace parseBinaryStream {
379
613
  stream: ReadableStream<Uint8Array>,
380
614
  options?: ParseBinaryOptions<Header>,
381
615
  ): Promise<CSVRecord<Header>[]>;
616
+ /**
617
+ * Parse CSV binary to array of records.
618
+ *
619
+ * @returns Stream of records
620
+ *
621
+ * @example Parsing CSV binary
622
+ * ```ts
623
+ * import { parseUint8ArrayStream } from 'web-csv-toolbox';
624
+ *
625
+ * const csv = Uint8Array.from([
626
+ * // ...
627
+ * ]);
628
+ *
629
+ * const stream = new ReadableStream({
630
+ * start(controller) {
631
+ * controller.enqueue(csv);
632
+ * controller.close();
633
+ * },
634
+ * });
635
+ *
636
+ * await parseUint8ArrayStream.toStream(stream)
637
+ * .pipeTo(new WritableStream({
638
+ * write(record) {
639
+ * console.log(record);
640
+ * },
641
+ * }),
642
+ * );
643
+ * ```
644
+ */
645
+ function toStream<Header extends ReadonlyArray<string>>(
646
+ stream: ReadableStream<Uint8Array>,
647
+ options?: ParseBinaryOptions<Header>,
648
+ ): ReadableStream<CSVRecord<Header>[]>;
382
649
  }
383
650
 
384
651
  /**
@@ -408,7 +675,7 @@ declare namespace parseBinaryStream {
408
675
  * });
409
676
  *
410
677
  * for await (const record of parseStringStream(csv)) {
411
- * console.log(record);
678
+ * console.log(record);
412
679
  * }
413
680
  * // Prints:
414
681
  * // { name: 'Alice', age: '42' }
@@ -451,6 +718,41 @@ declare namespace parseStringStream {
451
718
  stream: ReadableStream<string>,
452
719
  options?: ParseOptions<Header>,
453
720
  ): Promise<CSVRecord<Header>[]>;
721
+ /**
722
+ * Parse CSV string stream to records.
723
+ *
724
+ * @returns Array of records
725
+ *
726
+ * @example
727
+ *
728
+ * ```ts
729
+ * import { parseStringStream } from 'web-csv-toolbox';
730
+ *
731
+ * const csv = `name,age
732
+ * Alice,42
733
+ * Bob,69`;
734
+ *
735
+ * const stream = new ReadableStream({
736
+ * start(controller) {
737
+ * controller.enqueue(csv);
738
+ * controller.close();
739
+ * },
740
+ * });
741
+ *
742
+ * await parseStringStream.toStream(stream)
743
+ * .pipeTo(
744
+ * new WritableStream({
745
+ * write(record) {
746
+ * console.log(record);
747
+ * },
748
+ * }),
749
+ * );
750
+ * ```
751
+ */
752
+ function toStream<Header extends ReadonlyArray<string>>(
753
+ stream: ReadableStream<string>,
754
+ options?: ParseOptions<Header>,
755
+ ): ReadableStream<CSVRecord<Header>>;
454
756
  }
455
757
 
456
758
  /**
@@ -510,88 +812,36 @@ declare namespace parseResponse {
510
812
  response: Response,
511
813
  options?: ParseOptions<Header>,
512
814
  ): Promise<CSVRecord<Header>[]>;
513
- }
514
-
515
- /**
516
- * Parse CSV Stream to records,
517
- * ideal for smaller data sets.
518
- *
519
- * {@link !ReadableStream} of {@link !String} and {@link !Uint8Array} are supported.
520
- *
521
- * @remarks
522
- * {@link parseStringStream} and {@link parseBinaryStream} are used internally.
523
- * If you known the type of the stream, it performs better to use them directly.
524
- *
525
- * If you want to parse a string, use {@link parseStringStream}.
526
- * If you want to parse a Uint8Array, use {@link parseBinaryStream}.
527
- *
528
- * @category Middle-level API
529
- * @param csv CSV string to parse
530
- * @param options Parsing options. See {@link ParseOptions}.
531
- * @returns Async iterable iterator of records.
532
- *
533
- * If you want array of records, use {@link parseStream.toArray} function.
534
- *
535
- * @example Parsing CSV string stream
536
- *
537
- * ```ts
538
- *
539
- * import { parseStream } from 'web-csv-toolbox';
540
- *
541
- * const csv = `name,age
542
- * Alice,42
543
- * Bob,69`;
544
- *
545
- * const stream = new ReadableStream({
546
- * start(controller) {
547
- * controller.enqueue(csv);
548
- * controller.close();
549
- * },
550
- * });
551
- *
552
- * for await (const record of parseStream(stream)) {
553
- * console.log(record);
554
- * }
555
- * // Prints:
556
- * // { name: 'Alice', age: '42' }
557
- * // { name: 'Bob', age: '69' }
558
- * ```
559
- *
560
- * @example Parsing CSV binary stream
561
- *
562
- * ```ts
563
- * import { parseStream } from 'web-csv-toolbox';
564
- *
565
- * const csv = Uint8Array.from([
566
- * // ...
567
- * ]);
568
- *
569
- * const stream = new ReadableStream({
570
- * start(controller) {
571
- * controller.enqueue(csv);
572
- * controller.close();
573
- * },
574
- * });
575
- *
576
- * for await (const record of parseStream(stream)) {
577
- * console.log(record);
578
- * }
579
- * ```
580
- */
581
- declare function parseStream<Header extends ReadonlyArray<string>>(
582
- stream: ReadableStream<Uint8Array | string>,
583
- options?: ParseBinaryOptions<Header>,
584
- ): AsyncIterableIterator<CSVRecord<Header>>;
585
- declare namespace parseStream {
586
815
  /**
587
- * Parse CSV Stream to array of records.
816
+ * Parse CSV Response to stream of records.
588
817
  *
589
- * @returns Array of records
818
+ * @param response Response to parse
819
+ * @returns Stream of records
820
+ *
821
+ * @example Parsing CSV Response
822
+ *
823
+ * ```ts
824
+ * import { parseResponse } from 'web-csv-toolbox';
825
+ *
826
+ * const response = await fetch('https://example.com/data.csv');
827
+ *
828
+ * await parseResponse.toStream(response)
829
+ * .pipeTo(
830
+ * new WritableStream({
831
+ * write(record) {
832
+ * console.log(record);
833
+ * },
834
+ * }),
835
+ * );
836
+ * // Prints:
837
+ * // { name: 'Alice', age: '42' }
838
+ * // { name: 'Bob', age: '69' }
839
+ * ```
590
840
  */
591
- function toArray<Header extends ReadonlyArray<string>>(
592
- stream: ReadableStream<Uint8Array>,
593
- options?: ParseBinaryOptions<Header>,
594
- ): Promise<CSVRecord<Header>[]>;
841
+ function toStream<Header extends ReadonlyArray<string>>(
842
+ response: Response,
843
+ options?: ParseOptions<Header>,
844
+ ): ReadableStream<CSVRecord<Header>[]>;
595
845
  }
596
846
 
597
847
  /**
@@ -610,17 +860,18 @@ declare namespace parseStream {
610
860
  * @category High-level API
611
861
  *
612
862
  * @remarks
613
- * {@link parseString}, {@link parseBinaryStream},
863
+ * {@link parseString}, {@link parseBinary}, {@link parseUint8ArrayStream},
614
864
  * {@link parseStringStream} and {@link parseResponse} are used internally.
615
865
  *
616
866
  * If you known the type of the CSV, it performs better to use them directly.
617
867
  *
618
- * | If you want to parse a... | Use... | Data are treated as... |
619
- * | ----------------------------------- | ------------------------- | ---------------------- |
620
- * | {@link !String} | {@link parseString} | String |
621
- * | {@link !ReadableStream}<string> | {@link parseStringStream} | String |
622
- * | {@link !ReadableStream}<Uint8Array> | {@link parseBinaryStream} | Binary |
623
- * | {@link !Response} | {@link parseResponse} | Binary |
868
+ * | If you want to parse a... | Use... | Options... |
869
+ * | -------------------------------------------- | ----------------------------- | -------------------------- |
870
+ * | {@link !String} | {@link parseString} | {@link ParseOptions} |
871
+ * | {@link !ReadableStream}<{@link !String}> | {@link parseStringStream} | {@link ParseOptions} |
872
+ * | {@link !Uint8Array} \| {@link !ArrayBuffer} | {@link parseBinary} | {@link ParseBinaryOptions} |
873
+ * | {@link !ReadableStream}<{@link !Uint8Array}> | {@link parseUint8ArrayStream} | {@link ParseBinaryOptions} |
874
+ * | {@link !Response} | {@link parseResponse} | {@link ParseBinaryOptions} |
624
875
  *
625
876
  * @example Parsing CSV files from strings
626
877
  *
@@ -699,7 +950,7 @@ declare namespace parseStream {
699
950
  * ```
700
951
  */
701
952
  declare function parse<Header extends ReadonlyArray<string>>(
702
- csv: string | ReadableStream<string>,
953
+ csv: CSVString,
703
954
  options?: ParseOptions<Header>,
704
955
  ): AsyncIterableIterator<CSVRecord<Header>>;
705
956
  /**
@@ -739,7 +990,7 @@ declare function parse<Header extends ReadonlyArray<string>>(
739
990
  * ```
740
991
  */
741
992
  declare function parse<Header extends ReadonlyArray<string>>(
742
- csv: ReadableStream<Uint8Array> | Response,
993
+ csv: CSVBinary,
743
994
  options?: ParseBinaryOptions<Header>,
744
995
  ): AsyncIterableIterator<CSVRecord<Header>>;
745
996
  declare namespace parse {
@@ -763,7 +1014,7 @@ declare namespace parse {
763
1014
  * ```
764
1015
  */
765
1016
  function toArray<Header extends ReadonlyArray<string>>(
766
- csv: string | ReadableStream<string>,
1017
+ csv: CSVString,
767
1018
  options?: ParseOptions<Header>,
768
1019
  ): Promise<CSVRecord<Header>[]>;
769
1020
  /**
@@ -782,29 +1033,34 @@ declare namespace parse {
782
1033
  * ```
783
1034
  */
784
1035
  function toArray<Header extends ReadonlyArray<string>>(
785
- csv: ReadableStream<Uint8Array> | Response,
786
- options?: ParseOptions<Header>,
1036
+ csv: CSVBinary,
1037
+ options?: ParseBinaryOptions<Header>,
787
1038
  ): Promise<CSVRecord<Header>[]>;
788
1039
  }
789
1040
 
790
1041
  export {
791
1042
  type BinaryOptions,
1043
+ type CSV,
1044
+ type CSVBinary,
792
1045
  type CSVRecord,
1046
+ type CSVString,
793
1047
  type CommonOptions,
794
1048
  Field,
795
1049
  FieldDelimiter,
1050
+ type FieldDelimiterToken,
1051
+ type FieldToken,
796
1052
  LexerTransformer,
797
1053
  type ParseBinaryOptions,
798
1054
  type ParseOptions,
799
1055
  type RecordAssemblerOptions,
800
- RecordAssemblerTransformar,
1056
+ RecordAssemblerTransformer,
801
1057
  RecordDelimiter,
1058
+ type RecordDelimiterToken,
802
1059
  type Token,
803
- type TokenType,
804
1060
  parse,
805
- parseBinaryStream,
1061
+ parseBinary,
806
1062
  parseResponse,
807
- parseStream,
808
1063
  parseString,
809
1064
  parseStringStream,
1065
+ parseUint8ArrayStream,
810
1066
  };