web-csv-toolbox 0.4.0 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -253,10 +253,8 @@ catering to users who need more detailed and fine-tuned functionality.
253
253
 
254
254
  - **`function parseString(string[, options])`**: [📑](https://kamiazya.github.io/web-csv-toolbox/functions/parseString-1.html)
255
255
  - Efficient parsing of CSV strings.
256
- - **`function parseArrayBuffer(buffer[, options])`**: [📑](https://kamiazya.github.io/web-csv-toolbox/functions/parseArrayBuffer-1.html)
257
- - Parse CSV Binary of ArrayBuffer.
258
- - **`function parseUint8Array(buffer[, options])`**: [📑](https://kamiazya.github.io/web-csv-toolbox/functions/parseUint8Array-1.html)
259
- - Parse CSV Binary of Uint8Array.
256
+ - **`function parseBinary(buffer[, options])`**: [📑](https://kamiazya.github.io/web-csv-toolbox/functions/parseBinary-1.html)
257
+ - Parse CSV Binary of ArrayBuffer or Uint8Array.
260
258
  - **`function parseResponse(response[, options])`**: [📑](https://kamiazya.github.io/web-csv-toolbox/functions/parseResponse-1.html)
261
259
  - Customized parsing directly from `Response` objects.
262
260
  - **`function parseStream(stream[, options])`**: [📑](https://kamiazya.github.io/web-csv-toolbox/functions/parseStream-1.html)
@@ -273,7 +271,7 @@ ideal for developers looking for in-depth control and flexibility.
273
271
 
274
272
  - **`class LexerTransformer`**: [📑](https://kamiazya.github.io/web-csv-toolbox/classes/LexerTransformer.html)
275
273
  - A TransformStream class for lexical analysis of CSV data.
276
- - **`class RecordAssemblerTransformer`**: [📑](https://kamiazya.github.io/web-csv-toolbox/classes/RecordAssemblerTransformar.html)
274
+ - **`class RecordAssemblerTransformer`**: [📑](https://kamiazya.github.io/web-csv-toolbox/classes/RecordAssemblerTransformer.html)
277
275
  - Handles the assembly of parsed data into records.
278
276
 
279
277
  ## Options Configuration 🛠️
package/lib/index.d.ts CHANGED
@@ -15,26 +15,32 @@ declare const RecordDelimiter: unique symbol;
15
15
  declare const Field: unique symbol;
16
16
 
17
17
  /**
18
- * Token is a atomic unit of a CSV file.
19
- * It can be a field, field delimiter, or record delimiter.
18
+ * Field token type.
20
19
  * @category Types
21
- *
22
- * @example
23
- * ```ts
24
- * const fieldToken: Token = { type: Field, value: "foo" };
25
- * const fieldDelimiterToken: Token = { type: FieldDelimiter, value: "," };
26
- * const recordDelimiterToken: Token = { type: RecordDelimiter, value: "\n" };
27
- * ```
28
20
  */
29
- interface Token<T extends TokenType = TokenType> {
30
- type: T;
21
+ interface FieldToken {
22
+ type: typeof Field;
31
23
  value: string;
32
24
  }
33
25
  /**
34
- * Type of a token for CSV.
26
+ * Field delimiter token type.
27
+ * @category Types
28
+ */
29
+ interface FieldDelimiterToken {
30
+ type: typeof FieldDelimiter;
31
+ }
32
+ /**
33
+ * Record delimiter token type.
34
+ */
35
+ interface RecordDelimiterToken {
36
+ type: typeof RecordDelimiter;
37
+ }
38
+ /**
39
+ * Token is a atomic unit of a CSV file.
40
+ * It can be a field, field delimiter, or record delimiter.
35
41
  * @category Types
36
42
  */
37
- type TokenType = typeof FieldDelimiter | typeof RecordDelimiter | typeof Field;
43
+ type Token = FieldToken | typeof FieldDelimiter | typeof RecordDelimiter;
38
44
  /**
39
45
  * CSV Common Options.
40
46
  * @category Types
@@ -49,7 +55,7 @@ interface CommonOptions {
49
55
  * This library supports multi-character delimiters.
50
56
  * @default ','
51
57
  */
52
- demiliter?: string;
58
+ delimiter?: string;
53
59
  /**
54
60
  * CSV field quotation.
55
61
  *
@@ -201,30 +207,30 @@ type CSV = CSVString | CSVBinary;
201
207
  * @example Parse a CSV with headers by data
202
208
  * ```ts
203
209
  * new ReadableStream({
204
- * start(controller) {
205
- * controller.enqueue("name,age\r\n");
206
- * controller.enqueue("Alice,20\r\n");
207
- * controller.close();
208
- * }
210
+ * start(controller) {
211
+ * controller.enqueue("name,age\r\n");
212
+ * controller.enqueue("Alice,20\r\n");
213
+ * controller.close();
214
+ * }
209
215
  * })
210
- * .pipeThrough(new LexerTransformer())
211
- * .pipeTo(new WritableStream({ write(token) { console.log(token); }}));
216
+ * .pipeThrough(new LexerTransformer())
217
+ * .pipeTo(new WritableStream({ write(tokens) {
218
+ * for (const token of tokens) {
219
+ * console.log(token);
220
+ * }
221
+ * }}));
212
222
  * // { type: Field, value: "name" }
213
- * // { type: FieldDelimiter, value: "," }
223
+ * // FieldDelimiter
214
224
  * // { type: Field, value: "age" }
215
- * // { type: RecordDelimiter, value: "\r\n" }
225
+ * // RecordDelimiter
216
226
  * // { type: Field, value: "Alice" }
217
- * // { type: FieldDelimiter, value: "," }
227
+ * // FieldDelimiter
218
228
  * // { type: Field, value: "20" }
219
- * // { type: RecordDelimiter, value: "\r\n" }
229
+ * // RecordDelimiter
220
230
  * ```
221
231
  */
222
- declare class LexerTransformer extends TransformStream<string, Token> {
223
- #private;
224
- get demiliter(): string;
225
- get quotation(): string;
226
- constructor({ demiliter, quotation }?: CommonOptions);
227
- private extractQuotedString;
232
+ declare class LexerTransformer extends TransformStream<string, Token[]> {
233
+ constructor(options?: CommonOptions);
228
234
  }
229
235
 
230
236
  /**
@@ -245,7 +251,7 @@ declare class LexerTransformer extends TransformStream<string, Token> {
245
251
  * controller.close();
246
252
  * })
247
253
  * .pipeThrough(new LexerTransformer())
248
- * .pipeThrough(new RecordAssemblerTransformar())
254
+ * .pipeThrough(new RecordAssemblerTransformer())
249
255
  * .pipeTo(new WritableStream({ write(row) { console.log(row); }}));
250
256
  * // { name: "Alice", age: "20" }
251
257
  * // { name: "Bob", age: "25" }
@@ -263,17 +269,16 @@ declare class LexerTransformer extends TransformStream<string, Token> {
263
269
  * }
264
270
  * })
265
271
  * .pipeThrough(new LexerTransformer())
266
- * .pipeThrough(new RecordAssemblerTransformar({ header: ["name", "age"] }))
272
+ * .pipeThrough(new RecordAssemblerTransformer({ header: ["name", "age"] }))
267
273
  * .pipeTo(new WritableStream({ write(row) { console.log(row); }}));
268
274
  * // { name: "Alice", age: "20" }
269
275
  * // { name: "Bob", age: "25" }
270
276
  * // { name: "Charlie", age: "30" }
271
277
  * ```
272
278
  */
273
- declare class RecordAssemblerTransformar<
279
+ declare class RecordAssemblerTransformer<
274
280
  Header extends ReadonlyArray<string>,
275
- > extends TransformStream<Token, Record<Header[number], string | undefined>> {
276
- #private;
281
+ > extends TransformStream<Token[], CSVRecord<Header>> {
277
282
  constructor(options?: RecordAssemblerOptions<Header>);
278
283
  }
279
284
 
@@ -331,6 +336,85 @@ declare namespace parseString {
331
336
  csv: string,
332
337
  options?: ParseOptions<Header>,
333
338
  ): Promise<CSVRecord<Header>[]>;
339
+ /**
340
+ * Parse CSV string to records.
341
+ *
342
+ * @returns Array of records
343
+ *
344
+ * @example
345
+ *
346
+ * ```ts
347
+ * import { parseString } from 'web-csv-toolbox';
348
+ *
349
+ * const csv = `name,age
350
+ * Alice,42
351
+ * Bob,69`;
352
+ *
353
+ * const records = parseString.toArraySync(csv);
354
+ * console.log(records);
355
+ * // Prints:
356
+ * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]
357
+ * ```
358
+ */
359
+ function toArraySync<Header extends ReadonlyArray<string>>(
360
+ csv: string,
361
+ options?: ParseOptions<Header>,
362
+ ): CSVRecord<Header>[];
363
+ /**
364
+ * Parse CSV string to records.
365
+ *
366
+ * @returns Async iterable iterator of records
367
+ *
368
+ * @example
369
+ * ```ts
370
+ * import { parseString } from 'web-csv-toolbox';
371
+ *
372
+ * const csv = `name,age
373
+ * Alice,42
374
+ * Bob,69`;
375
+ *
376
+ * for (const record of parseString.toIterableIterator(csv)) {
377
+ * console.log(record);
378
+ * }
379
+ * // Prints:
380
+ * // { name: 'Alice', age: '42' }
381
+ * // { name: 'Bob', age: '69' }
382
+ * ```
383
+ */
384
+ function toIterableIterator<Header extends ReadonlyArray<string>>(
385
+ csv: string,
386
+ options?: ParseOptions<Header>,
387
+ ): IterableIterator<CSVRecord<Header>>;
388
+ /**
389
+ * Parse CSV string to records.
390
+ *
391
+ * @returns Readable stream of records
392
+ *
393
+ * @example
394
+ * ```ts
395
+ * import { parseString } from 'web-csv-toolbox';
396
+ *
397
+ * const csv = `name,age
398
+ * Alice,42
399
+ * Bob,69`;
400
+ *
401
+ * await parseString.toStream(csv)
402
+ * .pipeTo(
403
+ * new WritableStream({
404
+ * write(record) {
405
+ * console.log(record);
406
+ * },
407
+ * }),
408
+ * );
409
+ * // Prints:
410
+ * // { name: 'Alice', age: '42' }
411
+ * // { name: 'Bob', age: '69' }
412
+ * ```
413
+ */
414
+ function toStream<Header extends ReadonlyArray<string>>(
415
+ csv: string,
416
+ options?: ParseOptions<Header>,
417
+ ): ReadableStream<CSVRecord<Header>>;
334
418
  }
335
419
 
336
420
  /**
@@ -356,11 +440,11 @@ declare namespace parseString {
356
440
  * }
357
441
  * ```
358
442
  */
359
- declare function parseUint8Array<Header extends ReadonlyArray<string>>(
360
- bytes: Uint8Array,
443
+ declare function parseBinary<Header extends ReadonlyArray<string>>(
444
+ bytes: Uint8Array | ArrayBuffer,
361
445
  options?: ParseBinaryOptions<Header>,
362
446
  ): AsyncIterableIterator<CSVRecord<Header>>;
363
- declare namespace parseUint8Array {
447
+ declare namespace parseBinary {
364
448
  /**
365
449
  * Parse a binary from an {@link !Uint8Array} to an array of records.
366
450
  *
@@ -380,9 +464,85 @@ declare namespace parseUint8Array {
380
464
  * ```
381
465
  */
382
466
  function toArray<Header extends ReadonlyArray<string>>(
383
- bytes: Uint8Array,
467
+ bytes: Uint8Array | ArrayBuffer,
384
468
  options?: ParseBinaryOptions<Header>,
385
469
  ): Promise<CSVRecord<Header>[]>;
470
+ /**
471
+ * Parse a binary from an {@link !Uint8Array} to an array of records.
472
+ *
473
+ * @param bytes CSV bytes to parse.
474
+ * @param options Parsing options
475
+ * @returns Array of records
476
+ * @example
477
+ *
478
+ * ```ts
479
+ * import { parseUint8Array } from 'web-csv-toolbox';
480
+ *
481
+ * const csv = Uint8Array.from([
482
+ * // ...
483
+ * ]);
484
+ *
485
+ * const records = parseUint8Array.toArraySync(csv);
486
+ * ```
487
+ */
488
+ function toArraySync<Header extends ReadonlyArray<string>>(
489
+ bytes: Uint8Array | ArrayBuffer,
490
+ options?: ParseBinaryOptions<Header>,
491
+ ): CSVRecord<Header>[];
492
+ /**
493
+ * Parse a binary from an {@link !Uint8Array} to an iterable iterator of records.
494
+ *
495
+ * @param bytes CSV bytes to parse.
496
+ * @param options Parsing options
497
+ * @returns Async iterable iterator of records.
498
+ * @example
499
+ * ```ts
500
+ * import { parseUint8Array } from 'web-csv-toolbox';
501
+ *
502
+ * const csv = Uint8Array.from([
503
+ * // ...
504
+ * ]);
505
+ *
506
+ * for (const record of parseUint8Array.toIterableIterator(csv)) {
507
+ * console.log(record);
508
+ * }
509
+ * ```
510
+ */
511
+ function toIterableIterator<Header extends ReadonlyArray<string>>(
512
+ bytes: Uint8Array,
513
+ options?: ParseBinaryOptions<Header>,
514
+ ): IterableIterator<CSVRecord<Header>>;
515
+ /**
516
+ * Parse a binary from an {@link !Uint8Array} to a stream of records.
517
+ *
518
+ * @param bytes CSV bytes to parse.
519
+ * @param options Parsing options
520
+ * @returns Stream of records.
521
+ *
522
+ * @example
523
+ *
524
+ * ```ts
525
+ * import { parseUint8Array } from 'web-csv-toolbox';
526
+ *
527
+ * const csv = Uint8Array.from([
528
+ * // ...
529
+ * ]);
530
+ *
531
+ * const stream = parseUint8Array.toStream(csv);
532
+ *
533
+ * await stream.pipeTo(
534
+ * new WritableStream({
535
+ * write(record) {
536
+ * console.log(record);
537
+ * },
538
+ * }),
539
+ * );
540
+ * ```
541
+ */
542
+ function toStream<Header extends ReadonlyArray<string>>(
543
+ bytes: Uint8Array,
544
+ options?: ParseBinaryOptions<Header>,
545
+ ): ReadableStream<CSVRecord<Header>>;
386
546
  }
387
547
 
388
548
  /**
@@ -453,66 +613,39 @@ declare namespace parseUint8ArrayStream {
453
613
  stream: ReadableStream<Uint8Array>,
454
614
  options?: ParseBinaryOptions<Header>,
455
615
  ): Promise<CSVRecord<Header>[]>;
456
- }
457
-
458
- /**
459
- * Parse a binary from an {@link !ArrayBuffer}.
460
- *
461
- * @category Middle-level API
462
-
463
- * @param buffer CSV ArrayBuffer to parse.
464
- * @param options Parsing options
465
- * @returns Async iterable iterator of records.
466
- *
467
- * @example Parsing CSV files from ArrayBuffers
468
- *
469
- * ```ts
470
- * import { parseArrayBuffer } from 'web-csv-toolbox';
471
- *
472
- * const csv = `name,age
473
- * Alice,42
474
- * Bob,69`;
475
- *
476
- * const buffer = new TextEncoder().encode(csv).buffer;
477
- *
478
- * for await (const record of parseArrayBuffer(buffer)) {
479
- * console.log(record);
480
- * }
481
- * // Prints:
482
- * // { name: 'Alice', age: '42' }
483
- * // { name: 'Bob', age: '69' }
484
- * ```
485
- */
486
- declare function parseArrayBuffer<Header extends ReadonlyArray<string>>(
487
- buffer: ArrayBuffer,
488
- options?: ParseBinaryOptions<Header>,
489
- ): AsyncIterableIterator<CSVRecord<Header>>;
490
- declare namespace parseArrayBuffer {
491
616
  /**
492
- * Parse a binary from an {@link !ArrayBuffer} to an array of records.
493
- * @param buffer CSV ArrayBuffer to parse.
494
- * @param options Parsing options
495
- * @returns Array of records
496
- * @example
617
+ * Parse CSV binary to array of records.
618
+ *
619
+ * @returns Stream of records
620
+ *
621
+ * @example Parsing CSV binary
497
622
  * ```ts
498
- * import { parseArrayBuffer } from 'web-csv-toolbox';
623
+ * import { parseUint8ArrayStream } from 'web-csv-toolbox';
499
624
  *
500
- * const csv = `name,age
501
- * Alice,42
502
- * Bob,69`;
625
+ * const csv = Uint8Array.from([
626
+ * // ...
627
+ * ]);
503
628
  *
504
- * const buffer = new TextEncoder().encode(csv).buffer;
629
+ * const stream = new ReadableStream({
630
+ * start(controller) {
631
+ * controller.enqueue(csv);
632
+ * controller.close();
633
+ * },
634
+ * });
505
635
  *
506
- * const records = await parseArrayBuffer.toArray(buffer);
507
- * console.log(records);
508
- * // Prints:
509
- * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]
636
+ * await parseUint8ArrayStream.toStream(stream)
637
+ * .pipeTo(new WritableStream({
638
+ * write(record) {
639
+ * console.log(record);
640
+ * },
641
+ * }),
642
+ * );
510
643
  * ```
511
644
  */
512
- function toArray<Header extends ReadonlyArray<string>>(
513
- buffer: ArrayBuffer,
645
+ function toStream<Header extends ReadonlyArray<string>>(
646
+ stream: ReadableStream<Uint8Array>,
514
647
  options?: ParseBinaryOptions<Header>,
515
- ): Promise<CSVRecord<Header>[]>;
648
+ ): ReadableStream<CSVRecord<Header>[]>;
516
649
  }
517
650
 
518
651
  /**
@@ -542,7 +675,7 @@ declare namespace parseArrayBuffer {
542
675
  * });
543
676
  *
544
677
  * for await (const record of parseStringStream(csv)) {
545
- * console.log(record);
678
+ * console.log(record);
546
679
  * }
547
680
  * // Prints:
548
681
  * // { name: 'Alice', age: '42' }
@@ -585,6 +718,41 @@ declare namespace parseStringStream {
585
718
  stream: ReadableStream<string>,
586
719
  options?: ParseOptions<Header>,
587
720
  ): Promise<CSVRecord<Header>[]>;
721
+ /**
722
+ * Parse CSV string stream to records.
723
+ *
724
+ * @returns Array of records
725
+ *
726
+ * @example
727
+ *
728
+ * ```ts
729
+ * import { parseStringStream } from 'web-csv-toolbox';
730
+ *
731
+ * const csv = `name,age
732
+ * Alice,42
733
+ * Bob,69`;
734
+ *
735
+ * const stream = new ReadableStream({
736
+ * start(controller) {
737
+ * controller.enqueue(csv);
738
+ * controller.close();
739
+ * },
740
+ * });
741
+ *
742
+ * await parseStringStream.toStream(stream)
743
+ * .pipeTo(
744
+ * new WritableStream({
745
+ * write(record) {
746
+ * console.log(record);
747
+ * },
748
+ * }),
749
+ * );
750
+ * ```
751
+ */
752
+ function toStream<Header extends ReadonlyArray<string>>(
753
+ stream: ReadableStream<string>,
754
+ options?: ParseOptions<Header>,
755
+ ): ReadableStream<CSVRecord<Header>>;
588
756
  }
589
757
 
590
758
  /**
@@ -644,88 +812,36 @@ declare namespace parseResponse {
644
812
  response: Response,
645
813
  options?: ParseOptions<Header>,
646
814
  ): Promise<CSVRecord<Header>[]>;
647
- }
648
-
649
- /**
650
- * Parse CSV Stream to records,
651
- * ideal for smaller data sets.
652
- *
653
- * {@link !ReadableStream} of {@link !String} and {@link !Uint8Array} are supported.
654
- *
655
- * @remarks
656
- * {@link parseStringStream} and {@link parseUint8ArrayStream} are used internally.
657
- * If you known the type of the stream, it performs better to use them directly.
658
- *
659
- * If you want to parse a string, use {@link parseStringStream}.
660
- * If you want to parse a Uint8Array, use {@link parseUint8ArrayStream}.
661
- *
662
- * @category Middle-level API
663
- * @param csv CSV string to parse
664
- * @param options Parsing options. See {@link ParseOptions}.
665
- * @returns Async iterable iterator of records.
666
- *
667
- * If you want array of records, use {@link parseStream.toArray} function.
668
- *
669
- * @example Parsing CSV string stream
670
- *
671
- * ```ts
672
- *
673
- * import { parseStream } from 'web-csv-toolbox';
674
- *
675
- * const csv = `name,age
676
- * Alice,42
677
- * Bob,69`;
678
- *
679
- * const stream = new ReadableStream({
680
- * start(controller) {
681
- * controller.enqueue(csv);
682
- * controller.close();
683
- * },
684
- * });
685
- *
686
- * for await (const record of parseStream(stream)) {
687
- * console.log(record);
688
- * }
689
- * // Prints:
690
- * // { name: 'Alice', age: '42' }
691
- * // { name: 'Bob', age: '69' }
692
- * ```
693
- *
694
- * @example Parsing CSV binary stream
695
- *
696
- * ```ts
697
- * import { parseStream } from 'web-csv-toolbox';
698
- *
699
- * const csv = Uint8Array.from([
700
- * // ...
701
- * ]);
702
- *
703
- * const stream = new ReadableStream({
704
- * start(controller) {
705
- * controller.enqueue(csv);
706
- * controller.close();
707
- * },
708
- * });
709
- *
710
- * for await (const record of parseStream(stream)) {
711
- * console.log(record);
712
- * }
713
- * ```
714
- */
715
- declare function parseStream<Header extends ReadonlyArray<string>>(
716
- stream: ReadableStream<Uint8Array | string>,
717
- options?: ParseBinaryOptions<Header>,
718
- ): AsyncIterableIterator<CSVRecord<Header>>;
719
- declare namespace parseStream {
720
815
  /**
721
- * Parse CSV Stream to array of records.
816
+ * Parse CSV Response to stream of records.
722
817
  *
723
- * @returns Array of records
818
+ * @param response Response to parse
819
+ * @returns Stream of records
820
+ *
821
+ * @example Parsing CSV Response
822
+ *
823
+ * ```ts
824
+ * import { parseResponse } from 'web-csv-toolbox';
825
+ *
826
+ * const response = await fetch('https://example.com/data.csv');
827
+ *
828
+ * await parseResponse.toStream(response)
829
+ * .pipeTo(
830
+ * new WritableStream({
831
+ * write(record) {
832
+ * console.log(record);
833
+ * },
834
+ * }),
835
+ * );
836
+ * // Prints:
837
+ * // { name: 'Alice', age: '42' }
838
+ * // { name: 'Bob', age: '69' }
839
+ * ```
724
840
  */
725
- function toArray<Header extends ReadonlyArray<string>>(
726
- stream: ReadableStream<Uint8Array>,
727
- options?: ParseBinaryOptions<Header>,
728
- ): Promise<CSVRecord<Header>[]>;
841
+ function toStream<Header extends ReadonlyArray<string>>(
842
+ response: Response,
843
+ options?: ParseOptions<Header>,
844
+ ): ReadableStream<CSVRecord<Header>[]>;
729
845
  }
730
846
 
731
847
  /**
@@ -744,20 +860,18 @@ declare namespace parseStream {
744
860
  * @category High-level API
745
861
  *
746
862
  * @remarks
747
- * {@link parseString}, {@link parseUint8ArrayStream},
748
- * {@link parseArrayBuffer}, {@link parseUint8Array},
863
+ * {@link parseString}, {@link parseBinary}, {@link parseUint8ArrayStream},
749
864
  * {@link parseStringStream} and {@link parseResponse} are used internally.
750
865
  *
751
866
  * If you known the type of the CSV, it performs better to use them directly.
752
867
  *
753
- * | If you want to parse a... | Use... | Data are treated as... |
754
- * | ----------------------------------- | ----------------------------- | ---------------------- |
755
- * | {@link !String} | {@link parseString} | String |
756
- * | {@link !ReadableStream}<string> | {@link parseStringStream} | String |
757
- * | {@link !ReadableStream}<Uint8Array> | {@link parseUint8ArrayStream} | Binary |
758
- * | {@link !Response} | {@link parseResponse} | Binary |
759
- * | {@link !ArrayBuffer} | {@link parseArrayBuffer} | Binary |
760
- * | {@link !Uint8Array} | {@link parseUint8Array} | Binary |
868
+ * | If you want to parse a... | Use... | Options... |
869
+ * | -------------------------------------------- | ----------------------------- | -------------------------- |
870
+ * | {@link !String} | {@link parseString} | {@link ParseOptions} |
871
+ * | {@link !ReadableStream}<{@link !String}> | {@link parseStringStream} | {@link ParseOptions} |
872
+ * | {@link !Uint8Array} \| {@link !ArrayBuffer} | {@link parseBinary} | {@link ParseBinaryOptions} |
873
+ * | {@link !ReadableStream}<{@link !Uint8Array}> | {@link parseUint8ArrayStream} | {@link ParseBinaryOptions} |
874
+ * | {@link !Response} | {@link parseResponse} | {@link ParseBinaryOptions} |
761
875
  *
762
876
  * @example Parsing CSV files from strings
763
877
  *
@@ -933,20 +1047,20 @@ export {
933
1047
  type CommonOptions,
934
1048
  Field,
935
1049
  FieldDelimiter,
1050
+ type FieldDelimiterToken,
1051
+ type FieldToken,
936
1052
  LexerTransformer,
937
1053
  type ParseBinaryOptions,
938
1054
  type ParseOptions,
939
1055
  type RecordAssemblerOptions,
940
- RecordAssemblerTransformar,
1056
+ RecordAssemblerTransformer,
941
1057
  RecordDelimiter,
1058
+ type RecordDelimiterToken,
942
1059
  type Token,
943
- type TokenType,
944
1060
  parse,
945
- parseArrayBuffer,
1061
+ parseBinary,
946
1062
  parseResponse,
947
- parseStream,
948
1063
  parseString,
949
1064
  parseStringStream,
950
- parseUint8Array,
951
1065
  parseUint8ArrayStream,
952
1066
  };