web-csv-toolbox 0.4.0 → 0.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/index.d.ts DELETED
@@ -1,952 +0,0 @@
1
- /**
2
- * FiledDelimiter is a symbol for field delimiter of CSV.
3
- * @category Constants
4
- */
5
- declare const FieldDelimiter: unique symbol;
6
- /**
7
- * RecordDelimiter is a symbol for record delimiter of CSV.
8
- * @category Constants
9
- */
10
- declare const RecordDelimiter: unique symbol;
11
- /**
12
- * Field is a symbol for field of CSV.
13
- * @category Constants
14
- */
15
- declare const Field: unique symbol;
16
-
17
- /**
18
- * Token is a atomic unit of a CSV file.
19
- * It can be a field, field delimiter, or record delimiter.
20
- * @category Types
21
- *
22
- * @example
23
- * ```ts
24
- * const fieldToken: Token = { type: Field, value: "foo" };
25
- * const fieldDelimiterToken: Token = { type: FieldDelimiter, value: "," };
26
- * const recordDelimiterToken: Token = { type: RecordDelimiter, value: "\n" };
27
- * ```
28
- */
29
- interface Token<T extends TokenType = TokenType> {
30
- type: T;
31
- value: string;
32
- }
33
- /**
34
- * Type of a token for CSV.
35
- * @category Types
36
- */
37
- type TokenType = typeof FieldDelimiter | typeof RecordDelimiter | typeof Field;
38
- /**
39
- * CSV Common Options.
40
- * @category Types
41
- */
42
- interface CommonOptions {
43
- /**
44
- * CSV field delimiter.
45
- *
46
- * @remarks
47
- * If you want to parse TSV, specify `'\t'`.
48
- *
49
- * This library supports multi-character delimiters.
50
- * @default ','
51
- */
52
- demiliter?: string;
53
- /**
54
- * CSV field quotation.
55
- *
56
- * @remarks
57
- * This library supports multi-character quotations.
58
- *
59
- * @default '"'
60
- */
61
- quotation?: string;
62
- }
63
- /**
64
- * CSV Parsing Options for binary.
65
- * @category Types
66
- */
67
- interface BinaryOptions {
68
- /**
69
- * If the binary is compressed by a compression algorithm,
70
- * the decompressed CSV can be parsed by specifying the algorithm.
71
- *
72
- * @remarks
73
- * Make sure the runtime you are running supports stream decompression.
74
- *
75
- * See {@link https://developer.mozilla.org/en-US/docs/Web/API/DecompressionStream#browser_compatibility | DecompressionStream Compatibility}.
76
- */
77
- decomposition?: CompressionFormat;
78
- /**
79
- * You can specify the character encoding of the binary.
80
- *
81
- * @remarks
82
- * {@link !TextDecoderStream} is used internally.
83
- *
84
- * See {@link https://developer.mozilla.org/en-US/docs/Web/API/Encoding_API/Encodings | Encoding API Compatibility}
85
- * for the encoding formats that can be specified.
86
- *
87
- * @default 'utf-8'
88
- */
89
- charset?: string;
90
- /**
91
- * If the binary has a BOM, you can specify whether to ignore it.
92
- *
93
- * @remarks
94
- * If you specify true, the BOM will be ignored.
95
- * If you specify false or not specify it, the BOM will be treated as a normal character.
96
- * See {@link https://developer.mozilla.org/en-US/docs/Web/API/TextDecoderStream/ignoreBOM | TextDecoderOptions.ignoreBOM} for more information about the BOM.
97
- * @default false
98
- */
99
- ignoreBOM?: boolean;
100
- /**
101
- * If the binary has a invalid character, you can specify whether to throw an error.
102
- *
103
- * @remarks
104
- * If the property is `true` then a decoder will throw a {@link !TypeError}
105
- * if it encounters malformed data while decoding.
106
- *
107
- * If `false` the decoder will substitute the invalid data
108
- * with the replacement character `U+FFFD` (�).
109
- *
110
- * See {@link https://developer.mozilla.org/en-US/docs/Web/API/TextDecoderStream/fatal | TextDecoderOptions.fatal} for more information.
111
- *
112
- * @default false
113
- */
114
- fatal?: boolean;
115
- }
116
- /**
117
- * Record Assembler Options for CSV.
118
- * @category Types
119
- *
120
- * @remarks
121
- * If you specify `header: ['foo', 'bar']`,
122
- * the first record will be treated as a normal record.
123
- *
124
- * If you don't specify `header`,
125
- * the first record will be treated as a header.
126
- */
127
- interface RecordAssemblerOptions<Header extends ReadonlyArray<string>> {
128
- /**
129
- * CSV header.
130
- *
131
- * @remarks
132
- * If you specify this option,
133
- * the first record will be treated as a normal record.
134
- *
135
- * If you don't specify this option,
136
- * the first record will be treated as a header.
137
- *
138
- * @default undefined
139
- */
140
- header?: Header;
141
- }
142
- /**
143
- * Parse options for CSV string.
144
- * @category Types
145
- */
146
- interface ParseOptions<Header extends ReadonlyArray<string>>
147
- extends CommonOptions,
148
- RecordAssemblerOptions<Header> {}
149
- /**
150
- * Parse options for CSV binary.
151
- * @category Types
152
- */
153
- interface ParseBinaryOptions<Header extends ReadonlyArray<string>>
154
- extends ParseOptions<Header>,
155
- BinaryOptions {}
156
- /**
157
- * CSV Record.
158
- * @category Types
159
- * @template Header Header of the CSV.
160
- *
161
- * @example Header is ["foo", "bar"]
162
- * ```ts
163
- * const record: CSVRecord<["foo", "bar"]> = {
164
- * foo: "1",
165
- * bar: "2",
166
- * };
167
- * ```
168
- */
169
- type CSVRecord<Header extends ReadonlyArray<string>> = Record<
170
- Header[number],
171
- string
172
- >;
173
- /**
174
- * CSV String.
175
- *
176
- * @category Types
177
- */
178
- type CSVString = string | ReadableStream<string>;
179
- /**
180
- * CSV Binary.
181
- *
182
- * @category Types
183
- */
184
- type CSVBinary =
185
- | ReadableStream<Uint8Array>
186
- | Response
187
- | ArrayBuffer
188
- | Uint8Array;
189
- /**
190
- * CSV.
191
- *
192
- * @category Types
193
- */
194
- type CSV = CSVString | CSVBinary;
195
-
196
- /**
197
- * A transform stream that converts a stream of tokens into a stream of rows.
198
- *
199
- * @category Low-level API
200
- *
201
- * @example Parse a CSV with headers by data
202
- * ```ts
203
- * new ReadableStream({
204
- * start(controller) {
205
- * controller.enqueue("name,age\r\n");
206
- * controller.enqueue("Alice,20\r\n");
207
- * controller.close();
208
- * }
209
- * })
210
- * .pipeThrough(new LexerTransformer())
211
- * .pipeTo(new WritableStream({ write(token) { console.log(token); }}));
212
- * // { type: Field, value: "name" }
213
- * // { type: FieldDelimiter, value: "," }
214
- * // { type: Field, value: "age" }
215
- * // { type: RecordDelimiter, value: "\r\n" }
216
- * // { type: Field, value: "Alice" }
217
- * // { type: FieldDelimiter, value: "," }
218
- * // { type: Field, value: "20" }
219
- * // { type: RecordDelimiter, value: "\r\n" }
220
- * ```
221
- */
222
- declare class LexerTransformer extends TransformStream<string, Token> {
223
- #private;
224
- get demiliter(): string;
225
- get quotation(): string;
226
- constructor({ demiliter, quotation }?: CommonOptions);
227
- private extractQuotedString;
228
- }
229
-
230
- /**
231
- * A transform stream that converts a stream of tokens into a stream of rows.
232
- * @template Header The type of the header row.
233
- * @param options The options for the parser.
234
- *
235
- * @category Low-level API
236
- *
237
- * @example Parse a CSV with headers by data
238
- * ```ts
239
- * new ReadableStream({
240
- * start(controller) {
241
- * controller.enqueue("name,age\r\n");
242
- * controller.enqueue("Alice,20\r\n");
243
- * controller.enqueue("Bob,25\r\n");
244
- * controller.enqueue("Charlie,30\r\n");
245
- * controller.close();
246
- * })
247
- * .pipeThrough(new LexerTransformer())
248
- * .pipeThrough(new RecordAssemblerTransformar())
249
- * .pipeTo(new WritableStream({ write(row) { console.log(row); }}));
250
- * // { name: "Alice", age: "20" }
251
- * // { name: "Bob", age: "25" }
252
- * // { name: "Charlie", age: "30" }
253
- * ```
254
- *
255
- * @example Parse a CSV with headers by options
256
- * ```ts
257
- * new ReadableStream({
258
- * start(controller) {
259
- * controller.enqueue("Alice,20\r\n");
260
- * controller.enqueue("Bob,25\r\n");
261
- * controller.enqueue("Charlie,30\r\n");
262
- * controller.close();
263
- * }
264
- * })
265
- * .pipeThrough(new LexerTransformer())
266
- * .pipeThrough(new RecordAssemblerTransformar({ header: ["name", "age"] }))
267
- * .pipeTo(new WritableStream({ write(row) { console.log(row); }}));
268
- * // { name: "Alice", age: "20" }
269
- * // { name: "Bob", age: "25" }
270
- * // { name: "Charlie", age: "30" }
271
- * ```
272
- */
273
- declare class RecordAssemblerTransformar<
274
- Header extends ReadonlyArray<string>,
275
- > extends TransformStream<Token, Record<Header[number], string | undefined>> {
276
- #private;
277
- constructor(options?: RecordAssemblerOptions<Header>);
278
- }
279
-
280
- /**
281
- * Parse CSV string to records.
282
- *
283
- * @category Middle-level API
284
- * @param csv CSV string to parse
285
- * @param options Parsing options. See {@link ParseOptions}.
286
- * @returns Async iterable iterator of records.
287
- *
288
- * If you want array of records, use {@link parseString.toArray} function.
289
- * @example Parsing CSV files from strings
290
- *
291
- * ```ts
292
- * import { parseString } from 'web-csv-toolbox';
293
- *
294
- * const csv = `name,age
295
- * Alice,42
296
- * Bob,69`;
297
- *
298
- * for await (const record of parseString(csv)) {
299
- * console.log(record);
300
- * }
301
- * // Prints:
302
- * // { name: 'Alice', age: '42' }
303
- * // { name: 'Bob', age: '69' }
304
- * ```
305
- */
306
- declare function parseString<Header extends ReadonlyArray<string>>(
307
- csv: string,
308
- options?: ParseOptions<Header>,
309
- ): AsyncIterableIterator<CSVRecord<Header>>;
310
- declare namespace parseString {
311
- /**
312
- * Parse CSV string to records.
313
- *
314
- * @returns Array of records
315
- *
316
- * @example
317
- * ```ts
318
- * import { parseString } from 'web-csv-toolbox';
319
- *
320
- * const csv = `name,age
321
- * Alice,42
322
- * Bob,69`;
323
- *
324
- * const records = await parseString.toArray(csv);
325
- * console.log(records);
326
- * // Prints:
327
- * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]
328
- * ```
329
- */
330
- function toArray<Header extends ReadonlyArray<string>>(
331
- csv: string,
332
- options?: ParseOptions<Header>,
333
- ): Promise<CSVRecord<Header>[]>;
334
- }
335
-
336
- /**
337
- * Parse a binary from an {@link !Uint8Array}.
338
- *
339
- * @category Middle-level API
340
- *
341
- * @param bytes CSV bytes to parse.
342
- * @param options Parsing options
343
- * @returns Async iterable iterator of records.
344
- *
345
- * @example Parsing CSV binary
346
- *
347
- * ```ts
348
- * import { parseUint8Array } from 'web-csv-toolbox';
349
- *
350
- * const csv = Uint8Array.from([
351
- * // ...
352
- * ]);
353
- *
354
- * for await (const record of parseUint8Array(csv)) {
355
- * console.log(record);
356
- * }
357
- * ```
358
- */
359
- declare function parseUint8Array<Header extends ReadonlyArray<string>>(
360
- bytes: Uint8Array,
361
- options?: ParseBinaryOptions<Header>,
362
- ): AsyncIterableIterator<CSVRecord<Header>>;
363
- declare namespace parseUint8Array {
364
- /**
365
- * Parse a binary from an {@link !Uint8Array} to an array of records.
366
- *
367
- * @param bytes CSV bytes to parse.
368
- * @param options Parsing options
369
- * @returns Array of records
370
- *
371
- * @example
372
- * ```ts
373
- * import { parseUint8Array } from 'web-csv-toolbox';
374
- *
375
- * const csv = Uint8Array.from([
376
- * // ...
377
- * ]);
378
- *
379
- * const records = await parseUint8Array.toArray(csv);
380
- * ```
381
- */
382
- function toArray<Header extends ReadonlyArray<string>>(
383
- bytes: Uint8Array,
384
- options?: ParseBinaryOptions<Header>,
385
- ): Promise<CSVRecord<Header>[]>;
386
- }
387
-
388
- /**
389
- * Parse CSV to records.
390
- * This function is for parsing a binary stream.
391
- *
392
- * @category Middle-level API
393
- * @remarks
394
- * If you want to parse a string, use {@link parseStringStream}.
395
- * @param stream CSV string to parse
396
- * @param options Parsing options.
397
- * @returns Async iterable iterator of records.
398
- *
399
- * If you want array of records, use {@link parseUint8ArrayStream.toArray} function.
400
- *
401
- * @example Parsing CSV binary
402
- *
403
- * ```ts
404
- * import { parseUint8ArrayStream } from 'web-csv-toolbox';
405
- *
406
- * const csv = Uint8Array.from([
407
- * // ...
408
- * ]);
409
- *
410
- * const stream = new ReadableStream({
411
- * start(controller) {
412
- * controller.enqueue(csv);
413
- * controller.close();
414
- * },
415
- * });
416
- *
417
- * for await (const record of parseUint8ArrayStream(csv)) {
418
- * console.log(record);
419
- * }
420
- * ```
421
- */
422
- declare function parseUint8ArrayStream<Header extends ReadonlyArray<string>>(
423
- stream: ReadableStream<Uint8Array>,
424
- options?: ParseBinaryOptions<Header>,
425
- ): AsyncIterableIterator<CSVRecord<Header>>;
426
- declare namespace parseUint8ArrayStream {
427
- /**
428
- * Parse CSV binary to array of records,
429
- * ideal for smaller data sets.
430
- *
431
- * @returns Array of records
432
- *
433
- * @example Parsing CSV binary
434
- * ```ts
435
- * import { parseUint8ArrayStream } from 'web-csv-toolbox';
436
- *
437
- * const csv = Uint8Array.from([
438
- * // ...
439
- * ]);
440
- *
441
- * const stream = new ReadableStream({
442
- * start(controller) {
443
- * controller.enqueue(csv);
444
- * controller.close();
445
- * },
446
- * });
447
- *
448
- * const records = await parseUint8ArrayStream.toArray(stream);
449
- * console.log(records);
450
- * ```
451
- */
452
- function toArray<Header extends ReadonlyArray<string>>(
453
- stream: ReadableStream<Uint8Array>,
454
- options?: ParseBinaryOptions<Header>,
455
- ): Promise<CSVRecord<Header>[]>;
456
- }
457
-
458
- /**
459
- * Parse a binary from an {@link !ArrayBuffer}.
460
- *
461
- * @category Middle-level API
462
-
463
- * @param buffer CSV ArrayBuffer to parse.
464
- * @param options Parsing options
465
- * @returns Async iterable iterator of records.
466
- *
467
- * @example Parsing CSV files from ArrayBuffers
468
- *
469
- * ```ts
470
- * import { parseArrayBuffer } from 'web-csv-toolbox';
471
- *
472
- * const csv = `name,age
473
- * Alice,42
474
- * Bob,69`;
475
- *
476
- * const buffer = new TextEncoder().encode(csv).buffer;
477
- *
478
- * for await (const record of parseArrayBuffer(buffer)) {
479
- * console.log(record);
480
- * }
481
- * // Prints:
482
- * // { name: 'Alice', age: '42' }
483
- * // { name: 'Bob', age: '69' }
484
- * ```
485
- */
486
- declare function parseArrayBuffer<Header extends ReadonlyArray<string>>(
487
- buffer: ArrayBuffer,
488
- options?: ParseBinaryOptions<Header>,
489
- ): AsyncIterableIterator<CSVRecord<Header>>;
490
- declare namespace parseArrayBuffer {
491
- /**
492
- * Parse a binary from an {@link !ArrayBuffer} to an array of records.
493
- * @param buffer CSV ArrayBuffer to parse.
494
- * @param options Parsing options
495
- * @returns Array of records
496
- * @example
497
- * ```ts
498
- * import { parseArrayBuffer } from 'web-csv-toolbox';
499
- *
500
- * const csv = `name,age
501
- * Alice,42
502
- * Bob,69`;
503
- *
504
- * const buffer = new TextEncoder().encode(csv).buffer;
505
- *
506
- * const records = await parseArrayBuffer.toArray(buffer);
507
- * console.log(records);
508
- * // Prints:
509
- * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]
510
- * ```
511
- */
512
- function toArray<Header extends ReadonlyArray<string>>(
513
- buffer: ArrayBuffer,
514
- options?: ParseBinaryOptions<Header>,
515
- ): Promise<CSVRecord<Header>[]>;
516
- }
517
-
518
- /**
519
- * Parse CSV string stream to records.
520
- *
521
- * @category Middle-level API
522
- * @param stream CSV string stream to parse
523
- * @param options Parsing options.
524
- * @returns Async iterable iterator of records.
525
- *
526
- * If you want array of records, use {@link parseStringStream.toArray} function.
527
- *
528
- * @example Parsing CSV files from strings
529
- *
530
- * ```ts
531
- * import { parseStringStream } from 'web-csv-toolbox';
532
- *
533
- * const csv = `name,age
534
- * Alice,42
535
- * Bob,69`;
536
- *
537
- * const stream = new ReadableStream({
538
- * start(controller) {
539
- * controller.enqueue(csv);
540
- * controller.close();
541
- * },
542
- * });
543
- *
544
- * for await (const record of parseStringStream(csv)) {
545
- * console.log(record);
546
- * }
547
- * // Prints:
548
- * // { name: 'Alice', age: '42' }
549
- * // { name: 'Bob', age: '69' }
550
- * ```
551
- */
552
- declare function parseStringStream<Header extends ReadonlyArray<string>>(
553
- stream: ReadableStream<string>,
554
- options?: ParseOptions<Header>,
555
- ): AsyncIterableIterator<CSVRecord<Header>>;
556
- declare namespace parseStringStream {
557
- /**
558
- * Parse CSV string stream to records.
559
- *
560
- * @returns Array of records
561
- *
562
- * @example
563
- *
564
- * ```ts
565
- * import { parseStringStream } from 'web-csv-toolbox';
566
- *
567
- * const csv = `name,age
568
- * Alice,42
569
- * Bob,69`;
570
- *
571
- * const stream = new ReadableStream({
572
- * start(controller) {
573
- * controller.enqueue(csv);
574
- * controller.close();
575
- * },
576
- * });
577
- *
578
- * const records = await parseStringStream.toArray(stream);
579
- * console.log(records);
580
- * // Prints:
581
- * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]
582
- * ```
583
- */
584
- function toArray<Header extends ReadonlyArray<string>>(
585
- stream: ReadableStream<string>,
586
- options?: ParseOptions<Header>,
587
- ): Promise<CSVRecord<Header>[]>;
588
- }
589
-
590
- /**
591
- * Parse HTTP Response what contains CSV to records,
592
- * ideal for smaller data sets.
593
- *
594
- * @remarks
595
- * This function automatically treats response headers.
596
- *
597
- * - If `Content-Type` header is not set, it assumes `text/csv`.
598
- * - If `Content-Type` header is not `text/csv`, it throws an error.
599
- * - If `Content-Type` header has charset parameter, it uses it for decoding.
600
- * - If `Content-Encoding` header is set, it decompresses the response.
601
- * - Should there be any conflicting information between the header and the options, the option's value will take precedence.
602
- *
603
- * @category Middle-level API
604
- * @param response
605
- * @param options
606
- * @returns Async iterable iterator of records.
607
- *
608
- * If you want array of records, use {@link parseResponse.toArray} function.
609
- *
610
- * @example Parsing CSV Response
611
- *
612
- * ```ts
613
- * import { parseResponse } from 'web-csv-toolbox';
614
- *
615
- * const response = await fetch('https://example.com/data.csv');
616
- *
617
- * for await (const record of parseResponse(response)) {
618
- * console.log(record);
619
- * }
620
- * ```
621
- */
622
- declare function parseResponse<Header extends ReadonlyArray<string>>(
623
- response: Response,
624
- options?: ParseOptions<Header>,
625
- ): AsyncIterableIterator<CSVRecord<Header>>;
626
- declare namespace parseResponse {
627
- /**
628
- * Parse CSV Response to array of records.
629
- *
630
- * @returns Array of records
631
- *
632
- * @example Parsing CSV Response
633
- *
634
- * ```ts
635
- * import { parseResponse } from 'web-csv-toolbox';
636
- *
637
- * const response = await fetch('https://example.com/data.csv');
638
- *
639
- * const records = await parseResponse.toArray(response);
640
- * console.log(records);
641
- * ```
642
- */
643
- function toArray<Header extends ReadonlyArray<string>>(
644
- response: Response,
645
- options?: ParseOptions<Header>,
646
- ): Promise<CSVRecord<Header>[]>;
647
- }
648
-
649
- /**
650
- * Parse CSV Stream to records,
651
- * ideal for smaller data sets.
652
- *
653
- * {@link !ReadableStream} of {@link !String} and {@link !Uint8Array} are supported.
654
- *
655
- * @remarks
656
- * {@link parseStringStream} and {@link parseUint8ArrayStream} are used internally.
657
- * If you known the type of the stream, it performs better to use them directly.
658
- *
659
- * If you want to parse a string, use {@link parseStringStream}.
660
- * If you want to parse a Uint8Array, use {@link parseUint8ArrayStream}.
661
- *
662
- * @category Middle-level API
663
- * @param csv CSV string to parse
664
- * @param options Parsing options. See {@link ParseOptions}.
665
- * @returns Async iterable iterator of records.
666
- *
667
- * If you want array of records, use {@link parseStream.toArray} function.
668
- *
669
- * @example Parsing CSV string stream
670
- *
671
- * ```ts
672
- *
673
- * import { parseStream } from 'web-csv-toolbox';
674
- *
675
- * const csv = `name,age
676
- * Alice,42
677
- * Bob,69`;
678
- *
679
- * const stream = new ReadableStream({
680
- * start(controller) {
681
- * controller.enqueue(csv);
682
- * controller.close();
683
- * },
684
- * });
685
- *
686
- * for await (const record of parseStream(stream)) {
687
- * console.log(record);
688
- * }
689
- * // Prints:
690
- * // { name: 'Alice', age: '42' }
691
- * // { name: 'Bob', age: '69' }
692
- * ```
693
- *
694
- * @example Parsing CSV binary stream
695
- *
696
- * ```ts
697
- * import { parseStream } from 'web-csv-toolbox';
698
- *
699
- * const csv = Uint8Array.from([
700
- * // ...
701
- * ]);
702
- *
703
- * const stream = new ReadableStream({
704
- * start(controller) {
705
- * controller.enqueue(csv);
706
- * controller.close();
707
- * },
708
- * });
709
- *
710
- * for await (const record of parseStream(stream)) {
711
- * console.log(record);
712
- * }
713
- * ```
714
- */
715
- declare function parseStream<Header extends ReadonlyArray<string>>(
716
- stream: ReadableStream<Uint8Array | string>,
717
- options?: ParseBinaryOptions<Header>,
718
- ): AsyncIterableIterator<CSVRecord<Header>>;
719
- declare namespace parseStream {
720
- /**
721
- * Parse CSV Stream to array of records.
722
- *
723
- * @returns Array of records
724
- */
725
- function toArray<Header extends ReadonlyArray<string>>(
726
- stream: ReadableStream<Uint8Array>,
727
- options?: ParseBinaryOptions<Header>,
728
- ): Promise<CSVRecord<Header>[]>;
729
- }
730
-
731
- /**
732
- * Parse CSV to records.
733
- *
734
- * {@link !String}, {@link !ReadableStream}<string | {@link !Uint8Array}> and {@link !Response} are supported.
735
- *
736
- *
737
- * @typeParam Header Header type like `['name', 'age']`.
738
- *
739
- * @param csv CSV string to parse.
740
- * @param options Parsing options for CSV string parsing.
741
- * @returns Async iterable iterator of records.
742
- *
743
- * If you want array of records, use {@link parse.toArray} function.
744
- * @category High-level API
745
- *
746
- * @remarks
747
- * {@link parseString}, {@link parseUint8ArrayStream},
748
- * {@link parseArrayBuffer}, {@link parseUint8Array},
749
- * {@link parseStringStream} and {@link parseResponse} are used internally.
750
- *
751
- * If you known the type of the CSV, it performs better to use them directly.
752
- *
753
- * | If you want to parse a... | Use... | Data are treated as... |
754
- * | ----------------------------------- | ----------------------------- | ---------------------- |
755
- * | {@link !String} | {@link parseString} | String |
756
- * | {@link !ReadableStream}<string> | {@link parseStringStream} | String |
757
- * | {@link !ReadableStream}<Uint8Array> | {@link parseUint8ArrayStream} | Binary |
758
- * | {@link !Response} | {@link parseResponse} | Binary |
759
- * | {@link !ArrayBuffer} | {@link parseArrayBuffer} | Binary |
760
- * | {@link !Uint8Array} | {@link parseUint8Array} | Binary |
761
- *
762
- * @example Parsing CSV files from strings
763
- *
764
- * ```ts
765
- * import { parse } from 'web-csv-toolbox';
766
- *
767
- * const csv = `name,age
768
- * Alice,42
769
- * Bob,69`;
770
- *
771
- * for await (const record of parse(csv)) {
772
- * console.log(record);
773
- * }
774
- * // Prints:
775
- * // { name: 'Alice', age: '42' }
776
- * // { name: 'Bob', age: '69' }
777
- * ```
778
- *
779
- * @example Parsing CSV files from streams
780
- *
781
- * ```ts
782
- * import { parse } from 'web-csv-toolbox';
783
- *
784
- * const csv = `name,age
785
- * Alice,42
786
- * Bob,69`;
787
- *
788
- * const stream = new ReadableStream({
789
- * start(controller) {
790
- * controller.enqueue(csv);
791
- * controller.close();
792
- * }
793
- * });
794
- *
795
- * for await (const record of parse(stream)) {
796
- * console.log(record);
797
- * }
798
- * // Prints:
799
- * // { name: 'Alice', age: '42' }
800
- * // { name: 'Bob', age: '69' }
801
- * ```
802
- *
803
- *
804
- * @example Parsing CSV files with headers
805
- *
806
- * ```ts
807
- * import { parse } from 'web-csv-toolbox';
808
- *
809
- * // This CSV has no header.
810
- * const csv = `Alice,42
811
- * Bob,69`;
812
- *
813
- * for await (const record of parse(csv, { header: ['name', 'age'] })) {
814
- * console.log(record);
815
- * }
816
- * // Prints:
817
- * // { name: 'Alice', age: '42' }
818
- * // { name: 'Bob', age: '69' }
819
- * ```
820
- *
821
- * @example Parsing CSV files with different delimiters characters
822
- *
823
- * ```ts
824
- * import { parse } from 'web-csv-toolbox';
825
- *
826
- * const csv = `name\tage
827
- * Alice\t42
828
- * Bob\t69`;
829
- *
830
- * for await (const record of parse(csv, { delimiter: '\t' })) {
831
- * console.log(record);
832
- * }
833
- * // Prints:
834
- * // { name: 'Alice', age: '42' }
835
- * // { name: 'Bob', age: '69' }
836
- * ```
837
- */
838
- declare function parse<Header extends ReadonlyArray<string>>(
839
- csv: CSVString,
840
- options?: ParseOptions<Header>,
841
- ): AsyncIterableIterator<CSVRecord<Header>>;
842
- /**
843
- * Parse CSV binary to records.
844
- *
845
- * @param csv CSV binary to parse.
846
- * @param options Parsing options for CSV binary parsing.
847
- *
848
- * @example Parsing CSV files from responses
849
- *
850
- * ```ts
851
- * import { parse } from 'web-csv-toolbox';
852
- *
853
- * // This CSV data is not gzipped and encoded in utf-8.
854
- * const response = await fetch('https://example.com/data.csv');
855
- *
856
- * for await (const record of parse(response)) {
857
- * // ...
858
- * }
859
- * ```
860
- *
861
- * @example Parsing CSV files with options spcialized for binary
862
- *
863
- * ```ts
864
- * import { parse } from 'web-csv-toolbox';
865
- *
866
- * // This CSV data is gzipped and encoded in shift-jis and has BOM.
867
- * const response = await fetch('https://example.com/data.csv.gz');
868
- *
869
- * for await (const record of parse(response, {
870
- * charset: 'shift-jis',
871
- * ignoreBOM: true,
872
- * decomposition: 'gzip',
873
- * })) {
874
- * // ...
875
- * }
876
- * ```
877
- */
878
- declare function parse<Header extends ReadonlyArray<string>>(
879
- csv: CSVBinary,
880
- options?: ParseBinaryOptions<Header>,
881
- ): AsyncIterableIterator<CSVRecord<Header>>;
882
- declare namespace parse {
883
- /**
884
- * Parse CSV string to array of records,
885
- * ideal for smaller data sets.
886
- *
887
- * @example Parse a CSV as array of records
888
- *
889
- * ```ts
890
- * import { parse } from 'web-csv-toolbox';
891
- *
892
- * const csv = `name,age
893
- * Alice,42
894
- * Bob,69`;
895
- *
896
- * const records = await parse.toArray(csv);
897
- * console.log(records);
898
- * // Prints:
899
- * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]
900
- * ```
901
- */
902
- function toArray<Header extends ReadonlyArray<string>>(
903
- csv: CSVString,
904
- options?: ParseOptions<Header>,
905
- ): Promise<CSVRecord<Header>[]>;
906
- /**
907
- * Parse CSV string to array of records,
908
- * ideal for smaller data sets.
909
- *
910
- * @example Parse a CSV as array of records
911
- *
912
- * ```ts
913
- * import { parse } from 'web-csv-toolbox';
914
- *
915
- * const response = await fetch('https://example.com/data.csv');
916
- *
917
- * const records = await parse.toArray(response);
918
- * console.log(records);
919
- * ```
920
- */
921
- function toArray<Header extends ReadonlyArray<string>>(
922
- csv: CSVBinary,
923
- options?: ParseBinaryOptions<Header>,
924
- ): Promise<CSVRecord<Header>[]>;
925
- }
926
-
927
- export {
928
- type BinaryOptions,
929
- type CSV,
930
- type CSVBinary,
931
- type CSVRecord,
932
- type CSVString,
933
- type CommonOptions,
934
- Field,
935
- FieldDelimiter,
936
- LexerTransformer,
937
- type ParseBinaryOptions,
938
- type ParseOptions,
939
- type RecordAssemblerOptions,
940
- RecordAssemblerTransformar,
941
- RecordDelimiter,
942
- type Token,
943
- type TokenType,
944
- parse,
945
- parseArrayBuffer,
946
- parseResponse,
947
- parseStream,
948
- parseString,
949
- parseStringStream,
950
- parseUint8Array,
951
- parseUint8ArrayStream,
952
- };