web-csv-toolbox 0.5.0 → 0.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/index.d.ts DELETED
@@ -1,1066 +0,0 @@
1
- /**
2
- * FiledDelimiter is a symbol for field delimiter of CSV.
3
- * @category Constants
4
- */
5
- declare const FieldDelimiter: unique symbol;
6
- /**
7
- * RecordDelimiter is a symbol for record delimiter of CSV.
8
- * @category Constants
9
- */
10
- declare const RecordDelimiter: unique symbol;
11
- /**
12
- * Field is a symbol for field of CSV.
13
- * @category Constants
14
- */
15
- declare const Field: unique symbol;
16
-
17
- /**
18
- * Field token type.
19
- * @category Types
20
- */
21
- interface FieldToken {
22
- type: typeof Field;
23
- value: string;
24
- }
25
- /**
26
- * Field delimiter token type.
27
- * @category Types
28
- */
29
- interface FieldDelimiterToken {
30
- type: typeof FieldDelimiter;
31
- }
32
- /**
33
- * Record delimiter token type.
34
- */
35
- interface RecordDelimiterToken {
36
- type: typeof RecordDelimiter;
37
- }
38
- /**
39
- * Token is a atomic unit of a CSV file.
40
- * It can be a field, field delimiter, or record delimiter.
41
- * @category Types
42
- */
43
- type Token = FieldToken | typeof FieldDelimiter | typeof RecordDelimiter;
44
- /**
45
- * CSV Common Options.
46
- * @category Types
47
- */
48
- interface CommonOptions {
49
- /**
50
- * CSV field delimiter.
51
- *
52
- * @remarks
53
- * If you want to parse TSV, specify `'\t'`.
54
- *
55
- * This library supports multi-character delimiters.
56
- * @default ','
57
- */
58
- delimiter?: string;
59
- /**
60
- * CSV field quotation.
61
- *
62
- * @remarks
63
- * This library supports multi-character quotations.
64
- *
65
- * @default '"'
66
- */
67
- quotation?: string;
68
- }
69
- /**
70
- * CSV Parsing Options for binary.
71
- * @category Types
72
- */
73
- interface BinaryOptions {
74
- /**
75
- * If the binary is compressed by a compression algorithm,
76
- * the decompressed CSV can be parsed by specifying the algorithm.
77
- *
78
- * @remarks
79
- * Make sure the runtime you are running supports stream decompression.
80
- *
81
- * See {@link https://developer.mozilla.org/en-US/docs/Web/API/DecompressionStream#browser_compatibility | DecompressionStream Compatibility}.
82
- */
83
- decomposition?: CompressionFormat;
84
- /**
85
- * You can specify the character encoding of the binary.
86
- *
87
- * @remarks
88
- * {@link !TextDecoderStream} is used internally.
89
- *
90
- * See {@link https://developer.mozilla.org/en-US/docs/Web/API/Encoding_API/Encodings | Encoding API Compatibility}
91
- * for the encoding formats that can be specified.
92
- *
93
- * @default 'utf-8'
94
- */
95
- charset?: string;
96
- /**
97
- * If the binary has a BOM, you can specify whether to ignore it.
98
- *
99
- * @remarks
100
- * If you specify true, the BOM will be ignored.
101
- * If you specify false or not specify it, the BOM will be treated as a normal character.
102
- * See {@link https://developer.mozilla.org/en-US/docs/Web/API/TextDecoderStream/ignoreBOM | TextDecoderOptions.ignoreBOM} for more information about the BOM.
103
- * @default false
104
- */
105
- ignoreBOM?: boolean;
106
- /**
107
- * If the binary has a invalid character, you can specify whether to throw an error.
108
- *
109
- * @remarks
110
- * If the property is `true` then a decoder will throw a {@link !TypeError}
111
- * if it encounters malformed data while decoding.
112
- *
113
- * If `false` the decoder will substitute the invalid data
114
- * with the replacement character `U+FFFD` (�).
115
- *
116
- * See {@link https://developer.mozilla.org/en-US/docs/Web/API/TextDecoderStream/fatal | TextDecoderOptions.fatal} for more information.
117
- *
118
- * @default false
119
- */
120
- fatal?: boolean;
121
- }
122
- /**
123
- * Record Assembler Options for CSV.
124
- * @category Types
125
- *
126
- * @remarks
127
- * If you specify `header: ['foo', 'bar']`,
128
- * the first record will be treated as a normal record.
129
- *
130
- * If you don't specify `header`,
131
- * the first record will be treated as a header.
132
- */
133
- interface RecordAssemblerOptions<Header extends ReadonlyArray<string>> {
134
- /**
135
- * CSV header.
136
- *
137
- * @remarks
138
- * If you specify this option,
139
- * the first record will be treated as a normal record.
140
- *
141
- * If you don't specify this option,
142
- * the first record will be treated as a header.
143
- *
144
- * @default undefined
145
- */
146
- header?: Header;
147
- }
148
- /**
149
- * Parse options for CSV string.
150
- * @category Types
151
- */
152
- interface ParseOptions<Header extends ReadonlyArray<string>>
153
- extends CommonOptions,
154
- RecordAssemblerOptions<Header> {}
155
- /**
156
- * Parse options for CSV binary.
157
- * @category Types
158
- */
159
- interface ParseBinaryOptions<Header extends ReadonlyArray<string>>
160
- extends ParseOptions<Header>,
161
- BinaryOptions {}
162
- /**
163
- * CSV Record.
164
- * @category Types
165
- * @template Header Header of the CSV.
166
- *
167
- * @example Header is ["foo", "bar"]
168
- * ```ts
169
- * const record: CSVRecord<["foo", "bar"]> = {
170
- * foo: "1",
171
- * bar: "2",
172
- * };
173
- * ```
174
- */
175
- type CSVRecord<Header extends ReadonlyArray<string>> = Record<
176
- Header[number],
177
- string
178
- >;
179
- /**
180
- * CSV String.
181
- *
182
- * @category Types
183
- */
184
- type CSVString = string | ReadableStream<string>;
185
- /**
186
- * CSV Binary.
187
- *
188
- * @category Types
189
- */
190
- type CSVBinary =
191
- | ReadableStream<Uint8Array>
192
- | Response
193
- | ArrayBuffer
194
- | Uint8Array;
195
- /**
196
- * CSV.
197
- *
198
- * @category Types
199
- */
200
- type CSV = CSVString | CSVBinary;
201
-
202
- /**
203
- * A transform stream that converts a stream of tokens into a stream of rows.
204
- *
205
- * @category Low-level API
206
- *
207
- * @example Parse a CSV with headers by data
208
- * ```ts
209
- * new ReadableStream({
210
- * start(controller) {
211
- * controller.enqueue("name,age\r\n");
212
- * controller.enqueue("Alice,20\r\n");
213
- * controller.close();
214
- * }
215
- * })
216
- * .pipeThrough(new LexerTransformer())
217
- * .pipeTo(new WritableStream({ write(tokens) {
218
- * for (const token of tokens) {
219
- * console.log(token);
220
- * }
221
- * }}));
222
- * // { type: Field, value: "name" }
223
- * // FieldDelimiter
224
- * // { type: Field, value: "age" }
225
- * // RecordDelimiter
226
- * // { type: Field, value: "Alice" }
227
- * // FieldDelimiter
228
- * // { type: Field, value: "20" }
229
- * // RecordDelimiter
230
- * ```
231
- */
232
- declare class LexerTransformer extends TransformStream<string, Token[]> {
233
- constructor(options?: CommonOptions);
234
- }
235
-
236
- /**
237
- * A transform stream that converts a stream of tokens into a stream of rows.
238
- * @template Header The type of the header row.
239
- * @param options The options for the parser.
240
- *
241
- * @category Low-level API
242
- *
243
- * @example Parse a CSV with headers by data
244
- * ```ts
245
- * new ReadableStream({
246
- * start(controller) {
247
- * controller.enqueue("name,age\r\n");
248
- * controller.enqueue("Alice,20\r\n");
249
- * controller.enqueue("Bob,25\r\n");
250
- * controller.enqueue("Charlie,30\r\n");
251
- * controller.close();
252
- * })
253
- * .pipeThrough(new LexerTransformer())
254
- * .pipeThrough(new RecordAssemblerTransformer())
255
- * .pipeTo(new WritableStream({ write(row) { console.log(row); }}));
256
- * // { name: "Alice", age: "20" }
257
- * // { name: "Bob", age: "25" }
258
- * // { name: "Charlie", age: "30" }
259
- * ```
260
- *
261
- * @example Parse a CSV with headers by options
262
- * ```ts
263
- * new ReadableStream({
264
- * start(controller) {
265
- * controller.enqueue("Alice,20\r\n");
266
- * controller.enqueue("Bob,25\r\n");
267
- * controller.enqueue("Charlie,30\r\n");
268
- * controller.close();
269
- * }
270
- * })
271
- * .pipeThrough(new LexerTransformer())
272
- * .pipeThrough(new RecordAssemblerTransformer({ header: ["name", "age"] }))
273
- * .pipeTo(new WritableStream({ write(row) { console.log(row); }}));
274
- * // { name: "Alice", age: "20" }
275
- * // { name: "Bob", age: "25" }
276
- * // { name: "Charlie", age: "30" }
277
- * ```
278
- */
279
- declare class RecordAssemblerTransformer<
280
- Header extends ReadonlyArray<string>,
281
- > extends TransformStream<Token[], CSVRecord<Header>> {
282
- constructor(options?: RecordAssemblerOptions<Header>);
283
- }
284
-
285
- /**
286
- * Parse CSV string to records.
287
- *
288
- * @category Middle-level API
289
- * @param csv CSV string to parse
290
- * @param options Parsing options. See {@link ParseOptions}.
291
- * @returns Async iterable iterator of records.
292
- *
293
- * If you want array of records, use {@link parseString.toArray} function.
294
- * @example Parsing CSV files from strings
295
- *
296
- * ```ts
297
- * import { parseString } from 'web-csv-toolbox';
298
- *
299
- * const csv = `name,age
300
- * Alice,42
301
- * Bob,69`;
302
- *
303
- * for await (const record of parseString(csv)) {
304
- * console.log(record);
305
- * }
306
- * // Prints:
307
- * // { name: 'Alice', age: '42' }
308
- * // { name: 'Bob', age: '69' }
309
- * ```
310
- */
311
- declare function parseString<Header extends ReadonlyArray<string>>(
312
- csv: string,
313
- options?: ParseOptions<Header>,
314
- ): AsyncIterableIterator<CSVRecord<Header>>;
315
- declare namespace parseString {
316
- /**
317
- * Parse CSV string to records.
318
- *
319
- * @returns Array of records
320
- *
321
- * @example
322
- * ```ts
323
- * import { parseString } from 'web-csv-toolbox';
324
- *
325
- * const csv = `name,age
326
- * Alice,42
327
- * Bob,69`;
328
- *
329
- * const records = await parseString.toArray(csv);
330
- * console.log(records);
331
- * // Prints:
332
- * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]
333
- * ```
334
- */
335
- function toArray<Header extends ReadonlyArray<string>>(
336
- csv: string,
337
- options?: ParseOptions<Header>,
338
- ): Promise<CSVRecord<Header>[]>;
339
- /**
340
- * Parse CSV string to records.
341
- *
342
- * @returns Array of records
343
- *
344
- * @example
345
- *
346
- * ```ts
347
- * import { parseString } from 'web-csv-toolbox';
348
- *
349
- * const csv = `name,age
350
- * Alice,42
351
- * Bob,69`;
352
- *
353
- * const records = parseString.toArraySync(csv);
354
- * console.log(records);
355
- * // Prints:
356
- * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]
357
- * ```
358
- */
359
- function toArraySync<Header extends ReadonlyArray<string>>(
360
- csv: string,
361
- options?: ParseOptions<Header>,
362
- ): CSVRecord<Header>[];
363
- /**
364
- * Parse CSV string to records.
365
- *
366
- * @returns Async iterable iterator of records
367
- *
368
- * @example
369
- * ```ts
370
- * import { parseString } from 'web-csv-toolbox';
371
- *
372
- * const csv = `name,age
373
- * Alice,42
374
- * Bob,69`;
375
- *
376
- * for (const record of parseString.toIterableIterator(csv)) {
377
- * console.log(record);
378
- * }
379
- * // Prints:
380
- * // { name: 'Alice', age: '42' }
381
- * // { name: 'Bob', age: '69' }
382
- * ```
383
- */
384
- function toIterableIterator<Header extends ReadonlyArray<string>>(
385
- csv: string,
386
- options?: ParseOptions<Header>,
387
- ): IterableIterator<CSVRecord<Header>>;
388
- /**
389
- * Parse CSV string to records.
390
- *
391
- * @returns Readable stream of records
392
- *
393
- * @example
394
- * ```ts
395
- * import { parseString } from 'web-csv-toolbox';
396
- *
397
- * const csv = `name,age
398
- * Alice,42
399
- * Bob,69`;
400
- *
401
- * await parseString.toStream(csv)
402
- * .pipeTo(
403
- * new WritableStream({
404
- * write(record) {
405
- * console.log(record);
406
- * },
407
- * }),
408
- * );
409
- * // Prints:
410
- * // { name: 'Alice', age: '42' }
411
- * // { name: 'Bob', age: '69' }
412
- * ```
413
- */
414
- function toStream<Header extends ReadonlyArray<string>>(
415
- csv: string,
416
- options?: ParseOptions<Header>,
417
- ): ReadableStream<CSVRecord<Header>>;
418
- }
419
-
420
- /**
421
- * Parse a binary from an {@link !Uint8Array}.
422
- *
423
- * @category Middle-level API
424
- *
425
- * @param bytes CSV bytes to parse.
426
- * @param options Parsing options
427
- * @returns Async iterable iterator of records.
428
- *
429
- * @example Parsing CSV binary
430
- *
431
- * ```ts
432
- * import { parseUint8Array } from 'web-csv-toolbox';
433
- *
434
- * const csv = Uint8Array.from([
435
- * // ...
436
- * ]);
437
- *
438
- * for await (const record of parseUint8Array(csv)) {
439
- * console.log(record);
440
- * }
441
- * ```
442
- */
443
- declare function parseBinary<Header extends ReadonlyArray<string>>(
444
- bytes: Uint8Array | ArrayBuffer,
445
- options?: ParseBinaryOptions<Header>,
446
- ): AsyncIterableIterator<CSVRecord<Header>>;
447
- declare namespace parseBinary {
448
- /**
449
- * Parse a binary from an {@link !Uint8Array} to an array of records.
450
- *
451
- * @param bytes CSV bytes to parse.
452
- * @param options Parsing options
453
- * @returns Array of records
454
- *
455
- * @example
456
- * ```ts
457
- * import { parseUint8Array } from 'web-csv-toolbox';
458
- *
459
- * const csv = Uint8Array.from([
460
- * // ...
461
- * ]);
462
- *
463
- * const records = await parseUint8Array.toArray(csv);
464
- * ```
465
- */
466
- function toArray<Header extends ReadonlyArray<string>>(
467
- bytes: Uint8Array | ArrayBuffer,
468
- options?: ParseBinaryOptions<Header>,
469
- ): Promise<CSVRecord<Header>[]>;
470
- /**
471
- * Parse a binary from an {@link !Uint8Array} to an array of records.
472
- *
473
- * @param bytes CSV bytes to parse.
474
- * @param options Parsing options
475
- * @returns Array of records
476
- * @example
477
- *
478
- * ```ts
479
- * import { parseUint8Array } from 'web-csv-toolbox';
480
- *
481
- * const csv = Uint8Array.from([
482
- * // ...
483
- * ]);
484
- *
485
- * const records = parseUint8Array.toArraySync(csv);
486
- * ```
487
- */
488
- function toArraySync<Header extends ReadonlyArray<string>>(
489
- bytes: Uint8Array | ArrayBuffer,
490
- options?: ParseBinaryOptions<Header>,
491
- ): CSVRecord<Header>[];
492
- /**
493
- * Parse a binary from an {@link !Uint8Array} to an iterable iterator of records.
494
- *
495
- * @param bytes CSV bytes to parse.
496
- * @param options Parsing options
497
- * @returns Async iterable iterator of records.
498
- * @example
499
- * ```ts
500
- * import { parseUint8Array } from 'web-csv-toolbox';
501
- *
502
- * const csv = Uint8Array.from([
503
- * // ...
504
- * ]);
505
- *
506
- * for (const record of parseUint8Array.toIterableIterator(csv)) {
507
- * console.log(record);
508
- * }
509
- * ```
510
- */
511
- function toIterableIterator<Header extends ReadonlyArray<string>>(
512
- bytes: Uint8Array,
513
- options?: ParseBinaryOptions<Header>,
514
- ): IterableIterator<CSVRecord<Header>>;
515
- /**
516
- * Parse a binary from an {@link !Uint8Array} to a stream of records.
517
- *
518
- * @param bytes CSV bytes to parse.
519
- * @param options Parsing options
520
- * @returns Stream of records.
521
- *
522
- * @example
523
- *
524
- * ```ts
525
- * import { parseUint8Array } from 'web-csv-toolbox';
526
- *
527
- * const csv = Uint8Array.from([
528
- * // ...
529
- * ]);
530
- *
531
- * const stream = parseUint8Array.toStream(csv);
532
- *
533
- * await stream.pipeTo(
534
- * new WritableStream({
535
- * write(record) {
536
- * console.log(record);
537
- * },
538
- * }),
539
- * );
540
- * ```
541
- */
542
- function toStream<Header extends ReadonlyArray<string>>(
543
- bytes: Uint8Array,
544
- options?: ParseBinaryOptions<Header>,
545
- ): ReadableStream<CSVRecord<Header>>;
546
- }
547
-
548
- /**
549
- * Parse CSV to records.
550
- * This function is for parsing a binary stream.
551
- *
552
- * @category Middle-level API
553
- * @remarks
554
- * If you want to parse a string, use {@link parseStringStream}.
555
- * @param stream CSV string to parse
556
- * @param options Parsing options.
557
- * @returns Async iterable iterator of records.
558
- *
559
- * If you want array of records, use {@link parseUint8ArrayStream.toArray} function.
560
- *
561
- * @example Parsing CSV binary
562
- *
563
- * ```ts
564
- * import { parseUint8ArrayStream } from 'web-csv-toolbox';
565
- *
566
- * const csv = Uint8Array.from([
567
- * // ...
568
- * ]);
569
- *
570
- * const stream = new ReadableStream({
571
- * start(controller) {
572
- * controller.enqueue(csv);
573
- * controller.close();
574
- * },
575
- * });
576
- *
577
- * for await (const record of parseUint8ArrayStream(csv)) {
578
- * console.log(record);
579
- * }
580
- * ```
581
- */
582
- declare function parseUint8ArrayStream<Header extends ReadonlyArray<string>>(
583
- stream: ReadableStream<Uint8Array>,
584
- options?: ParseBinaryOptions<Header>,
585
- ): AsyncIterableIterator<CSVRecord<Header>>;
586
- declare namespace parseUint8ArrayStream {
587
- /**
588
- * Parse CSV binary to array of records,
589
- * ideal for smaller data sets.
590
- *
591
- * @returns Array of records
592
- *
593
- * @example Parsing CSV binary
594
- * ```ts
595
- * import { parseUint8ArrayStream } from 'web-csv-toolbox';
596
- *
597
- * const csv = Uint8Array.from([
598
- * // ...
599
- * ]);
600
- *
601
- * const stream = new ReadableStream({
602
- * start(controller) {
603
- * controller.enqueue(csv);
604
- * controller.close();
605
- * },
606
- * });
607
- *
608
- * const records = await parseUint8ArrayStream.toArray(stream);
609
- * console.log(records);
610
- * ```
611
- */
612
- function toArray<Header extends ReadonlyArray<string>>(
613
- stream: ReadableStream<Uint8Array>,
614
- options?: ParseBinaryOptions<Header>,
615
- ): Promise<CSVRecord<Header>[]>;
616
- /**
617
- * Parse CSV binary to array of records.
618
- *
619
- * @returns Stream of records
620
- *
621
- * @example Parsing CSV binary
622
- * ```ts
623
- * import { parseUint8ArrayStream } from 'web-csv-toolbox';
624
- *
625
- * const csv = Uint8Array.from([
626
- * // ...
627
- * ]);
628
- *
629
- * const stream = new ReadableStream({
630
- * start(controller) {
631
- * controller.enqueue(csv);
632
- * controller.close();
633
- * },
634
- * });
635
- *
636
- * await parseUint8ArrayStream.toStream(stream)
637
- * .pipeTo(new WritableStream({
638
- * write(record) {
639
- * console.log(record);
640
- * },
641
- * }),
642
- * );
643
- * ```
644
- */
645
- function toStream<Header extends ReadonlyArray<string>>(
646
- stream: ReadableStream<Uint8Array>,
647
- options?: ParseBinaryOptions<Header>,
648
- ): ReadableStream<CSVRecord<Header>[]>;
649
- }
650
-
651
- /**
652
- * Parse CSV string stream to records.
653
- *
654
- * @category Middle-level API
655
- * @param stream CSV string stream to parse
656
- * @param options Parsing options.
657
- * @returns Async iterable iterator of records.
658
- *
659
- * If you want array of records, use {@link parseStringStream.toArray} function.
660
- *
661
- * @example Parsing CSV files from strings
662
- *
663
- * ```ts
664
- * import { parseStringStream } from 'web-csv-toolbox';
665
- *
666
- * const csv = `name,age
667
- * Alice,42
668
- * Bob,69`;
669
- *
670
- * const stream = new ReadableStream({
671
- * start(controller) {
672
- * controller.enqueue(csv);
673
- * controller.close();
674
- * },
675
- * });
676
- *
677
- * for await (const record of parseStringStream(csv)) {
678
- * console.log(record);
679
- * }
680
- * // Prints:
681
- * // { name: 'Alice', age: '42' }
682
- * // { name: 'Bob', age: '69' }
683
- * ```
684
- */
685
- declare function parseStringStream<Header extends ReadonlyArray<string>>(
686
- stream: ReadableStream<string>,
687
- options?: ParseOptions<Header>,
688
- ): AsyncIterableIterator<CSVRecord<Header>>;
689
- declare namespace parseStringStream {
690
- /**
691
- * Parse CSV string stream to records.
692
- *
693
- * @returns Array of records
694
- *
695
- * @example
696
- *
697
- * ```ts
698
- * import { parseStringStream } from 'web-csv-toolbox';
699
- *
700
- * const csv = `name,age
701
- * Alice,42
702
- * Bob,69`;
703
- *
704
- * const stream = new ReadableStream({
705
- * start(controller) {
706
- * controller.enqueue(csv);
707
- * controller.close();
708
- * },
709
- * });
710
- *
711
- * const records = await parseStringStream.toArray(stream);
712
- * console.log(records);
713
- * // Prints:
714
- * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]
715
- * ```
716
- */
717
- function toArray<Header extends ReadonlyArray<string>>(
718
- stream: ReadableStream<string>,
719
- options?: ParseOptions<Header>,
720
- ): Promise<CSVRecord<Header>[]>;
721
- /**
722
- * Parse CSV string stream to records.
723
- *
724
- * @returns Array of records
725
- *
726
- * @example
727
- *
728
- * ```ts
729
- * import { parseStringStream } from 'web-csv-toolbox';
730
- *
731
- * const csv = `name,age
732
- * Alice,42
733
- * Bob,69`;
734
- *
735
- * const stream = new ReadableStream({
736
- * start(controller) {
737
- * controller.enqueue(csv);
738
- * controller.close();
739
- * },
740
- * });
741
- *
742
- * await parseStringStream.toStream(stream)
743
- * .pipeTo(
744
- * new WritableStream({
745
- * write(record) {
746
- * console.log(record);
747
- * },
748
- * }),
749
- * );
750
- * ```
751
- */
752
- function toStream<Header extends ReadonlyArray<string>>(
753
- stream: ReadableStream<string>,
754
- options?: ParseOptions<Header>,
755
- ): ReadableStream<CSVRecord<Header>>;
756
- }
757
-
758
- /**
759
- * Parse HTTP Response what contains CSV to records,
760
- * ideal for smaller data sets.
761
- *
762
- * @remarks
763
- * This function automatically treats response headers.
764
- *
765
- * - If `Content-Type` header is not set, it assumes `text/csv`.
766
- * - If `Content-Type` header is not `text/csv`, it throws an error.
767
- * - If `Content-Type` header has charset parameter, it uses it for decoding.
768
- * - If `Content-Encoding` header is set, it decompresses the response.
769
- * - Should there be any conflicting information between the header and the options, the option's value will take precedence.
770
- *
771
- * @category Middle-level API
772
- * @param response
773
- * @param options
774
- * @returns Async iterable iterator of records.
775
- *
776
- * If you want array of records, use {@link parseResponse.toArray} function.
777
- *
778
- * @example Parsing CSV Response
779
- *
780
- * ```ts
781
- * import { parseResponse } from 'web-csv-toolbox';
782
- *
783
- * const response = await fetch('https://example.com/data.csv');
784
- *
785
- * for await (const record of parseResponse(response)) {
786
- * console.log(record);
787
- * }
788
- * ```
789
- */
790
- declare function parseResponse<Header extends ReadonlyArray<string>>(
791
- response: Response,
792
- options?: ParseOptions<Header>,
793
- ): AsyncIterableIterator<CSVRecord<Header>>;
794
- declare namespace parseResponse {
795
- /**
796
- * Parse CSV Response to array of records.
797
- *
798
- * @returns Array of records
799
- *
800
- * @example Parsing CSV Response
801
- *
802
- * ```ts
803
- * import { parseResponse } from 'web-csv-toolbox';
804
- *
805
- * const response = await fetch('https://example.com/data.csv');
806
- *
807
- * const records = await parseResponse.toArray(response);
808
- * console.log(records);
809
- * ```
810
- */
811
- function toArray<Header extends ReadonlyArray<string>>(
812
- response: Response,
813
- options?: ParseOptions<Header>,
814
- ): Promise<CSVRecord<Header>[]>;
815
- /**
816
- * Parse CSV Response to stream of records.
817
- *
818
- * @param response Response to parse
819
- * @returns Stream of records
820
- *
821
- * @example Parsing CSV Response
822
- *
823
- * ```ts
824
- * import { parseResponse } from 'web-csv-toolbox';
825
- *
826
- * const response = await fetch('https://example.com/data.csv');
827
- *
828
- * await parseResponse.toStream(response)
829
- * .pipeTo(
830
- * new WritableStream({
831
- * write(record) {
832
- * console.log(record);
833
- * },
834
- * }),
835
- * );
836
- * // Prints:
837
- * // { name: 'Alice', age: '42' }
838
- * // { name: 'Bob', age: '69' }
839
- * ```
840
- */
841
- function toStream<Header extends ReadonlyArray<string>>(
842
- response: Response,
843
- options?: ParseOptions<Header>,
844
- ): ReadableStream<CSVRecord<Header>[]>;
845
- }
846
-
847
- /**
848
- * Parse CSV to records.
849
- *
850
- * {@link !String}, {@link !ReadableStream}<string | {@link !Uint8Array}> and {@link !Response} are supported.
851
- *
852
- *
853
- * @typeParam Header Header type like `['name', 'age']`.
854
- *
855
- * @param csv CSV string to parse.
856
- * @param options Parsing options for CSV string parsing.
857
- * @returns Async iterable iterator of records.
858
- *
859
- * If you want array of records, use {@link parse.toArray} function.
860
- * @category High-level API
861
- *
862
- * @remarks
863
- * {@link parseString}, {@link parseBinary}, {@link parseUint8ArrayStream},
864
- * {@link parseStringStream} and {@link parseResponse} are used internally.
865
- *
866
- * If you known the type of the CSV, it performs better to use them directly.
867
- *
868
- * | If you want to parse a... | Use... | Options... |
869
- * | -------------------------------------------- | ----------------------------- | -------------------------- |
870
- * | {@link !String} | {@link parseString} | {@link ParseOptions} |
871
- * | {@link !ReadableStream}<{@link !String}> | {@link parseStringStream} | {@link ParseOptions} |
872
- * | {@link !Uint8Array} \| {@link !ArrayBuffer} | {@link parseBinary} | {@link ParseBinaryOptions} |
873
- * | {@link !ReadableStream}<{@link !Uint8Array}> | {@link parseUint8ArrayStream} | {@link ParseBinaryOptions} |
874
- * | {@link !Response} | {@link parseResponse} | {@link ParseBinaryOptions} |
875
- *
876
- * @example Parsing CSV files from strings
877
- *
878
- * ```ts
879
- * import { parse } from 'web-csv-toolbox';
880
- *
881
- * const csv = `name,age
882
- * Alice,42
883
- * Bob,69`;
884
- *
885
- * for await (const record of parse(csv)) {
886
- * console.log(record);
887
- * }
888
- * // Prints:
889
- * // { name: 'Alice', age: '42' }
890
- * // { name: 'Bob', age: '69' }
891
- * ```
892
- *
893
- * @example Parsing CSV files from streams
894
- *
895
- * ```ts
896
- * import { parse } from 'web-csv-toolbox';
897
- *
898
- * const csv = `name,age
899
- * Alice,42
900
- * Bob,69`;
901
- *
902
- * const stream = new ReadableStream({
903
- * start(controller) {
904
- * controller.enqueue(csv);
905
- * controller.close();
906
- * }
907
- * });
908
- *
909
- * for await (const record of parse(stream)) {
910
- * console.log(record);
911
- * }
912
- * // Prints:
913
- * // { name: 'Alice', age: '42' }
914
- * // { name: 'Bob', age: '69' }
915
- * ```
916
- *
917
- *
918
- * @example Parsing CSV files with headers
919
- *
920
- * ```ts
921
- * import { parse } from 'web-csv-toolbox';
922
- *
923
- * // This CSV has no header.
924
- * const csv = `Alice,42
925
- * Bob,69`;
926
- *
927
- * for await (const record of parse(csv, { header: ['name', 'age'] })) {
928
- * console.log(record);
929
- * }
930
- * // Prints:
931
- * // { name: 'Alice', age: '42' }
932
- * // { name: 'Bob', age: '69' }
933
- * ```
934
- *
935
- * @example Parsing CSV files with different delimiters characters
936
- *
937
- * ```ts
938
- * import { parse } from 'web-csv-toolbox';
939
- *
940
- * const csv = `name\tage
941
- * Alice\t42
942
- * Bob\t69`;
943
- *
944
- * for await (const record of parse(csv, { delimiter: '\t' })) {
945
- * console.log(record);
946
- * }
947
- * // Prints:
948
- * // { name: 'Alice', age: '42' }
949
- * // { name: 'Bob', age: '69' }
950
- * ```
951
- */
952
- declare function parse<Header extends ReadonlyArray<string>>(
953
- csv: CSVString,
954
- options?: ParseOptions<Header>,
955
- ): AsyncIterableIterator<CSVRecord<Header>>;
956
- /**
957
- * Parse CSV binary to records.
958
- *
959
- * @param csv CSV binary to parse.
960
- * @param options Parsing options for CSV binary parsing.
961
- *
962
- * @example Parsing CSV files from responses
963
- *
964
- * ```ts
965
- * import { parse } from 'web-csv-toolbox';
966
- *
967
- * // This CSV data is not gzipped and encoded in utf-8.
968
- * const response = await fetch('https://example.com/data.csv');
969
- *
970
- * for await (const record of parse(response)) {
971
- * // ...
972
- * }
973
- * ```
974
- *
975
- * @example Parsing CSV files with options spcialized for binary
976
- *
977
- * ```ts
978
- * import { parse } from 'web-csv-toolbox';
979
- *
980
- * // This CSV data is gzipped and encoded in shift-jis and has BOM.
981
- * const response = await fetch('https://example.com/data.csv.gz');
982
- *
983
- * for await (const record of parse(response, {
984
- * charset: 'shift-jis',
985
- * ignoreBOM: true,
986
- * decomposition: 'gzip',
987
- * })) {
988
- * // ...
989
- * }
990
- * ```
991
- */
992
- declare function parse<Header extends ReadonlyArray<string>>(
993
- csv: CSVBinary,
994
- options?: ParseBinaryOptions<Header>,
995
- ): AsyncIterableIterator<CSVRecord<Header>>;
996
- declare namespace parse {
997
- /**
998
- * Parse CSV string to array of records,
999
- * ideal for smaller data sets.
1000
- *
1001
- * @example Parse a CSV as array of records
1002
- *
1003
- * ```ts
1004
- * import { parse } from 'web-csv-toolbox';
1005
- *
1006
- * const csv = `name,age
1007
- * Alice,42
1008
- * Bob,69`;
1009
- *
1010
- * const records = await parse.toArray(csv);
1011
- * console.log(records);
1012
- * // Prints:
1013
- * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]
1014
- * ```
1015
- */
1016
- function toArray<Header extends ReadonlyArray<string>>(
1017
- csv: CSVString,
1018
- options?: ParseOptions<Header>,
1019
- ): Promise<CSVRecord<Header>[]>;
1020
- /**
1021
- * Parse CSV string to array of records,
1022
- * ideal for smaller data sets.
1023
- *
1024
- * @example Parse a CSV as array of records
1025
- *
1026
- * ```ts
1027
- * import { parse } from 'web-csv-toolbox';
1028
- *
1029
- * const response = await fetch('https://example.com/data.csv');
1030
- *
1031
- * const records = await parse.toArray(response);
1032
- * console.log(records);
1033
- * ```
1034
- */
1035
- function toArray<Header extends ReadonlyArray<string>>(
1036
- csv: CSVBinary,
1037
- options?: ParseBinaryOptions<Header>,
1038
- ): Promise<CSVRecord<Header>[]>;
1039
- }
1040
-
1041
- export {
1042
- type BinaryOptions,
1043
- type CSV,
1044
- type CSVBinary,
1045
- type CSVRecord,
1046
- type CSVString,
1047
- type CommonOptions,
1048
- Field,
1049
- FieldDelimiter,
1050
- type FieldDelimiterToken,
1051
- type FieldToken,
1052
- LexerTransformer,
1053
- type ParseBinaryOptions,
1054
- type ParseOptions,
1055
- type RecordAssemblerOptions,
1056
- RecordAssemblerTransformer,
1057
- RecordDelimiter,
1058
- type RecordDelimiterToken,
1059
- type Token,
1060
- parse,
1061
- parseBinary,
1062
- parseResponse,
1063
- parseString,
1064
- parseStringStream,
1065
- parseUint8ArrayStream,
1066
- };