web-csv-toolbox 0.12.1-next-290a5172a9f724fd09008b078b7472caefbba3cb → 0.12.1-next-7c7903ff7d0e716f84a2595c20c3a509beccb8e9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -383,6 +383,130 @@ console.log(result);
383
383
  | `ignoreBOM` | Whether to ignore Byte Order Mark (BOM) | `false` | See [TextDecoderOptions.ignoreBOM](https://developer.mozilla.org/en-US/docs/Web/API/TextDecoderStream/ignoreBOM) for more information about the BOM. |
384
384
  | `fatal` | Throw an error on invalid characters | `false` | See [TextDecoderOptions.fatal](https://developer.mozilla.org/en-US/docs/Web/API/TextDecoderStream/fatal) for more information. |
385
385
 
386
+ ## Performance & Best Practices ⚡
387
+
388
+ ### Memory Characteristics
389
+
390
+ web-csv-toolbox uses different memory patterns depending on the API you choose:
391
+
392
+ #### 🌊 Streaming APIs (Memory Efficient)
393
+
394
+ ##### Recommended for large files (> 10MB)
395
+
396
+ ```js
397
+ import { parse } from 'web-csv-toolbox';
398
+
399
+ // ✅ Memory efficient: processes one record at a time
400
+ const response = await fetch('https://example.com/large-data.csv');
401
+ for await (const record of parse(response)) {
402
+ console.log(record);
403
+ // Memory footprint: ~few KB per iteration
404
+ }
405
+ ```
406
+
407
+ - **Memory usage**: O(1) - constant per record
408
+ - **Suitable for**: Files of any size, browser environments
409
+ - **Max file size**: Limited only by available storage/network
410
+
411
+ #### 📦 Array-Based APIs (Memory Intensive)
412
+
413
+ ##### Recommended for small files (< 1MB)
414
+
415
+ ```js
416
+ import { parse } from 'web-csv-toolbox';
417
+
418
+ // ⚠️ Loads entire result into memory
419
+ const csv = await fetch('data.csv').then(r => r.text());
420
+ const records = await parse.toArray(csv);
421
+ // Memory footprint: entire file + parsed array
422
+ ```
423
+
424
+ - **Memory usage**: O(n) - proportional to file size
425
+ - **Suitable for**: Small datasets, quick prototyping
426
+ - **Recommended max**: ~10MB (browser), ~100MB (Node.js)
427
+
428
+ ### Platform-Specific Considerations
429
+
430
+ | Platform | Streaming | Array-Based | Notes |
431
+ |----------|-----------|-------------|-------|
432
+ | **Browser** | Any size | < 10MB | Browser heap limits apply (~100MB-4GB depending on browser) |
433
+ | **Node.js** | Any size | < 100MB | Use `--max-old-space-size` flag for larger heaps |
434
+ | **Deno** | Any size | < 100MB | Similar to Node.js |
435
+
436
+ ### Performance Tips
437
+
438
+ #### 1. Use streaming for large files
439
+
440
+ ```js
441
+ import { parse } from 'web-csv-toolbox';
442
+
443
+ const response = await fetch('https://example.com/large-data.csv');
444
+
445
+ // ✅ Good: Streaming approach (constant memory usage)
446
+ for await (const record of parse(response)) {
447
+ // Process each record immediately
448
+ console.log(record);
449
+ // Memory footprint: O(1) - only one record in memory at a time
450
+ }
451
+
452
+ // ❌ Avoid: Loading entire file into memory first
453
+ const response2 = await fetch('https://example.com/large-data.csv');
454
+ const text = await response2.text(); // Loads entire file into memory
455
+ const records = await parse.toArray(text); // Loads all records into memory
456
+ for (const record of records) {
457
+ console.log(record);
458
+ // Memory footprint: O(n) - entire file + all records in memory
459
+ }
460
+ ```
461
+
462
+ #### 2. Enable AbortSignal for timeout protection
463
+
464
+ ```js
465
+ import { parse } from 'web-csv-toolbox';
466
+
467
+ // Set up a timeout of 30 seconds (30000 milliseconds)
468
+ const signal = AbortSignal.timeout(30000);
469
+
470
+ const response = await fetch('https://example.com/large-data.csv');
471
+
472
+ try {
473
+ for await (const record of parse(response, { signal })) {
474
+ // Process each record
475
+ console.log(record);
476
+ }
477
+ } catch (error) {
478
+ if (error instanceof DOMException && error.name === 'TimeoutError') {
479
+ // Handle timeout
480
+ console.log('CSV processing was aborted due to timeout.');
481
+ } else {
482
+ // Handle other errors
483
+ console.error('An error occurred during CSV processing:', error);
484
+ }
485
+ }
486
+ ```
487
+
488
+ #### 3. Use WebAssembly parser for CPU-intensive workloads (Experimental)
489
+
490
+ ```js
491
+ import { parseStringToArraySyncWASM } from 'web-csv-toolbox';
492
+
493
+ // 2-3x faster for large CSV strings (UTF-8 only)
494
+ const records = parseStringToArraySyncWASM(csvString);
495
+ ```
496
+
497
+ ### Known Limitations
498
+
499
+ - **Delimiter/Quotation**: Must be a single character (multi-character delimiters not supported)
500
+ - **WASM Parser**: UTF-8 encoding only, double-quote (`"`) only
501
+ - **Streaming**: Best performance with chunk sizes > 1KB
502
+
503
+ ### Security Considerations
504
+
505
+ For production use with untrusted input, consider:
506
+ - Setting timeouts using `AbortSignal.timeout()` to prevent resource exhaustion
507
+ - Implementing file size limits at the application level
508
+ - Validating parsed data before use
509
+
386
510
  ## How to Contribute 💪
387
511
 
388
512
  ## Star ⭐
package/dist/parse.d.ts CHANGED
@@ -30,6 +30,14 @@ import { PickCSVHeader } from './utils/types.ts';
30
30
  * | {@link !ReadableStream}<{@link !Uint8Array}> | {@link parseUint8ArrayStream} | {@link ParseBinaryOptions} |
31
31
  * | {@link !Response} | {@link parseResponse} | {@link ParseBinaryOptions} |
32
32
  *
33
+ * **Performance Characteristics:**
34
+ * - **Memory usage**: O(1) - constant per record (streaming approach)
35
+ * - **Suitable for**: Files of any size, browser and server environments
36
+ * - **Recommended for**: Large files (> 10MB) or memory-constrained environments
37
+ *
38
+ * This function processes CSV data as an async iterable iterator, yielding one record at a time.
39
+ * Memory footprint remains constant regardless of file size, making it ideal for large datasets.
40
+ *
33
41
  * @example Parsing CSV files from strings
34
42
  *
35
43
  * ```ts
@@ -152,6 +160,15 @@ export declare namespace parse {
152
160
  * Parse CSV string to array of records,
153
161
  * ideal for smaller data sets.
154
162
  *
163
+ * @remarks
164
+ * **Performance Characteristics:**
165
+ * - **Memory usage**: O(n) - proportional to file size (loads entire result into memory)
166
+ * - **Suitable for**: Small datasets, quick prototyping
167
+ * - **Recommended max**: ~10MB (browser), ~100MB (Node.js/Deno)
168
+ *
169
+ * This function collects all records into an array before returning.
170
+ * For large files, consider using the streaming {@link parse} function instead.
171
+ *
155
172
  * @example Parse a CSV as array of records
156
173
  *
157
174
  * ```ts
@@ -169,9 +186,18 @@ export declare namespace parse {
169
186
  */
170
187
  function toArray<Header extends ReadonlyArray<string>>(csv: CSVString, options?: ParseOptions<Header>): Promise<CSVRecord<Header>[]>;
171
188
  /**
172
- * Parse CSV string to array of records,
189
+ * Parse CSV binary to array of records,
173
190
  * ideal for smaller data sets.
174
191
  *
192
+ * @remarks
193
+ * **Performance Characteristics:**
194
+ * - **Memory usage**: O(n) - proportional to file size (loads entire result into memory)
195
+ * - **Suitable for**: Small datasets, quick prototyping
196
+ * - **Recommended max**: ~10MB (browser), ~100MB (Node.js/Deno)
197
+ *
198
+ * This function collects all records into an array before returning.
199
+ * For large files, consider using the streaming {@link parse} function instead.
200
+ *
175
201
  * @example Parse a CSV as array of records
176
202
  *
177
203
  * ```ts
package/dist/parse.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"file":"parse.js","sources":["../src/parse.ts"],"sourcesContent":["import type {\n CSV,\n CSVBinary,\n CSVRecord,\n CSVString,\n ParseBinaryOptions,\n ParseOptions,\n} from \"./common/types.ts\";\nimport type { DEFAULT_DELIMITER, DEFAULT_QUOTATION } from \"./constants.ts\";\nimport { parseBinary } from \"./parseBinary.ts\";\nimport { parseResponse } from \"./parseResponse.ts\";\nimport { parseString } from \"./parseString.ts\";\nimport { parseStringStream } from \"./parseStringStream.ts\";\nimport { parseUint8ArrayStream } from \"./parseUint8ArrayStream.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\nimport type { PickCSVHeader } from \"./utils/types.ts\";\n\n/**\n * Parse CSV to records.\n *\n * {@link !String}, {@link !ReadableStream}<string | {@link !Uint8Array}> and {@link !Response} are supported.\n *\n *\n * @typeParam Header Header type like `['name', 'age']`.\n *\n * @param csv CSV string to parse.\n * @param options Parsing options for CSV string parsing.\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parse.toArray} function.\n * @category High-level API\n *\n * @remarks\n * {@link parseString}, {@link parseBinary}, {@link parseUint8ArrayStream},\n * {@link parseStringStream} and {@link parseResponse} are used internally.\n *\n * If you known the type of the CSV, it performs better to use them directly.\n *\n * | If you want to parse a... | Use... | Options... |\n * | -------------------------------------------- | ----------------------------- | -------------------------- |\n * | {@link !String} | {@link parseString} | {@link ParseOptions} |\n * | {@link !ReadableStream}<{@link !String}> | {@link parseStringStream} | {@link ParseOptions} |\n * | {@link !Uint8Array} \\| {@link !ArrayBuffer} | {@link parseBinary} | {@link ParseBinaryOptions} |\n * | {@link !ReadableStream}<{@link !Uint8Array}> | {@link parseUint8ArrayStream} | {@link ParseBinaryOptions} |\n * | {@link !Response} | {@link parseResponse} | {@link ParseBinaryOptions} |\n *\n * @example Parsing CSV files from strings\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * for await (const record of parse(csv)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n *\n * @example Parsing CSV files from streams\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * }\n * });\n *\n * for await (const record of parse(stream)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n *\n *\n * @example Parsing CSV files with headers\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * // This CSV has no header.\n * const csv = `Alice,42\n * Bob,69`;\n *\n * for await (const record of parse(csv, { header: ['name', 'age'] })) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n *\n * @example Parsing CSV files with different delimiters characters\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const csv = `name\\tage\n * Alice\\t42\n * Bob\\t69`;\n *\n * for await (const record of parse(csv, { delimiter: '\\t' })) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\nexport function parse<const CSVSource extends CSVString>(\n csv: CSVSource,\n): AsyncIterableIterator<CSVRecord<PickCSVHeader<CSVSource>>>;\nexport function parse<const Header extends ReadonlyArray<string>>(\n csv: CSVString,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport function parse<const Header extends ReadonlyArray<string>>(\n csv: CSVString,\n options: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport function parse<\n const CSVSource extends CSVString,\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n const Header extends ReadonlyArray<string> = PickCSVHeader<\n CSVSource,\n Delimiter,\n Quotation\n >,\n>(\n csv: CSVSource,\n options: ParseOptions<Header, Delimiter, Quotation>,\n): AsyncIterableIterator<CSVRecord<Header>>;\n/**\n * Parse CSV binary to records.\n *\n * @param csv CSV binary to parse.\n * @param options Parsing options for CSV binary parsing.\n *\n * @example Parsing CSV files from responses\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * // This CSV data is not gzipped and encoded in utf-8.\n * const response = await fetch('https://example.com/data.csv');\n *\n * for await (const record of parse(response)) {\n * // ...\n * }\n * ```\n *\n * @example Parsing CSV files with options spcialized for binary\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * // This CSV data is gzipped and encoded in shift-jis and has BOM.\n * const response = await fetch('https://example.com/data.csv.gz');\n *\n * for await (const record of parse(response, {\n * charset: 'shift-jis',\n * ignoreBOM: true,\n * decomposition: 'gzip',\n * })) {\n * // ...\n * }\n * ```\n */\nexport function parse<const Header extends ReadonlyArray<string>>(\n csv: CSVBinary,\n options?: ParseBinaryOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport async function* parse<const Header extends ReadonlyArray<string>>(\n csv: CSV,\n options?: ParseBinaryOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n if (typeof csv === \"string\") {\n yield* parseString(csv, options);\n } else if (csv instanceof Uint8Array || csv instanceof ArrayBuffer) {\n yield* parseBinary(csv, options);\n } else if (csv instanceof ReadableStream) {\n const [branch1, branch2] = csv.tee();\n const reader1 = branch1.getReader();\n const { value: firstChunk } = await reader1.read();\n reader1.releaseLock();\n if (typeof firstChunk === \"string\") {\n yield* parseStringStream(branch2 as ReadableStream<string>, options);\n } else if (firstChunk instanceof Uint8Array) {\n yield* parseUint8ArrayStream(\n branch2 as ReadableStream<Uint8Array>,\n options,\n );\n }\n } else if (csv instanceof Response) {\n yield* parseResponse(csv, options);\n }\n}\n\nexport declare namespace parse {\n /**\n * Parse CSV string to array of records,\n * ideal for smaller data sets.\n *\n * @example Parse a CSV as array of records\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const records = await parse.toArray(csv);\n * console.log(records);\n * // Prints:\n * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n csv: CSVString,\n options?: ParseOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV string to array of records,\n * ideal for smaller data sets.\n *\n * @example Parse a CSV as array of records\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const response = await fetch('https://example.com/data.csv');\n *\n * const records = await parse.toArray(response);\n * console.log(records);\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n csv: CSVBinary,\n options?: ParseBinaryOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n}\n\nObject.defineProperties(parse, {\n toArray: {\n enumerable: true,\n writable: false,\n value: internal.convertThisAsyncIterableIteratorToArray,\n },\n});\n"],"names":["internal.convertThisAsyncIterableIteratorToArray"],"mappings":";;;;;;;AAyLuB,gBAAA,KAAA,CACrB,KACA,OAC0C,EAAA;AAC1C,EAAI,IAAA,OAAO,QAAQ,QAAU,EAAA;AAC3B,IAAO,OAAA,WAAA,CAAY,KAAK,OAAO,CAAA;AAAA,GACtB,MAAA,IAAA,GAAA,YAAe,UAAc,IAAA,GAAA,YAAe,WAAa,EAAA;AAClE,IAAO,OAAA,WAAA,CAAY,KAAK,OAAO,CAAA;AAAA,GACjC,MAAA,IAAW,eAAe,cAAgB,EAAA;AACxC,IAAA,MAAM,CAAC,OAAA,EAAS,OAAO,CAAA,GAAI,IAAI,GAAI,EAAA;AACnC,IAAM,MAAA,OAAA,GAAU,QAAQ,SAAU,EAAA;AAClC,IAAA,MAAM,EAAE,KAAO,EAAA,UAAA,EAAe,GAAA,MAAM,QAAQ,IAAK,EAAA;AACjD,IAAA,OAAA,CAAQ,WAAY,EAAA;AACpB,IAAI,IAAA,OAAO,eAAe,QAAU,EAAA;AAClC,MAAO,OAAA,iBAAA,CAAkB,SAAmC,OAAO,CAAA;AAAA,KACrE,MAAA,IAAW,sBAAsB,UAAY,EAAA;AAC3C,MAAO,OAAA,qBAAA;AAAA,QACL,OAAA;AAAA,QACA;AAAA,OACF;AAAA;AACF,GACF,MAAA,IAAW,eAAe,QAAU,EAAA;AAClC,IAAO,OAAA,aAAA,CAAc,KAAK,OAAO,CAAA;AAAA;AAErC;AA+CA,MAAA,CAAO,iBAAiB,KAAO,EAAA;AAAA,EAC7B,OAAS,EAAA;AAAA,IACP,UAAY,EAAA,IAAA;AAAA,IACZ,QAAU,EAAA,KAAA;AAAA,IACV,OAAOA;AAAS;AAEpB,CAAC,CAAA;;;;"}
1
+ {"version":3,"file":"parse.js","sources":["../src/parse.ts"],"sourcesContent":["import type {\n CSV,\n CSVBinary,\n CSVRecord,\n CSVString,\n ParseBinaryOptions,\n ParseOptions,\n} from \"./common/types.ts\";\nimport type { DEFAULT_DELIMITER, DEFAULT_QUOTATION } from \"./constants.ts\";\nimport { parseBinary } from \"./parseBinary.ts\";\nimport { parseResponse } from \"./parseResponse.ts\";\nimport { parseString } from \"./parseString.ts\";\nimport { parseStringStream } from \"./parseStringStream.ts\";\nimport { parseUint8ArrayStream } from \"./parseUint8ArrayStream.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\nimport type { PickCSVHeader } from \"./utils/types.ts\";\n\n/**\n * Parse CSV to records.\n *\n * {@link !String}, {@link !ReadableStream}<string | {@link !Uint8Array}> and {@link !Response} are supported.\n *\n *\n * @typeParam Header Header type like `['name', 'age']`.\n *\n * @param csv CSV string to parse.\n * @param options Parsing options for CSV string parsing.\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parse.toArray} function.\n * @category High-level API\n *\n * @remarks\n * {@link parseString}, {@link parseBinary}, {@link parseUint8ArrayStream},\n * {@link parseStringStream} and {@link parseResponse} are used internally.\n *\n * If you known the type of the CSV, it performs better to use them directly.\n *\n * | If you want to parse a... | Use... | Options... |\n * | -------------------------------------------- | ----------------------------- | -------------------------- |\n * | {@link !String} | {@link parseString} | {@link ParseOptions} |\n * | {@link !ReadableStream}<{@link !String}> | {@link parseStringStream} | {@link ParseOptions} |\n * | {@link !Uint8Array} \\| {@link !ArrayBuffer} | {@link parseBinary} | {@link ParseBinaryOptions} |\n * | {@link !ReadableStream}<{@link !Uint8Array}> | {@link parseUint8ArrayStream} | {@link ParseBinaryOptions} |\n * | {@link !Response} | {@link parseResponse} | {@link ParseBinaryOptions} |\n *\n * **Performance Characteristics:**\n * - **Memory usage**: O(1) - constant per record (streaming approach)\n * - **Suitable for**: Files of any size, browser and server environments\n * - **Recommended for**: Large files (> 10MB) or memory-constrained environments\n *\n * This function processes CSV data as an async iterable iterator, yielding one record at a time.\n * Memory footprint remains constant regardless of file size, making it ideal for large datasets.\n *\n * @example Parsing CSV files from strings\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * for await (const record of parse(csv)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n *\n * @example Parsing CSV files from streams\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * }\n * });\n *\n * for await (const record of parse(stream)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n *\n *\n * @example Parsing CSV files with headers\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * // This CSV has no header.\n * const csv = `Alice,42\n * Bob,69`;\n *\n * for await (const record of parse(csv, { header: ['name', 'age'] })) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n *\n * @example Parsing CSV files with different delimiters characters\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const csv = `name\\tage\n * Alice\\t42\n * Bob\\t69`;\n *\n * for await (const record of parse(csv, { delimiter: '\\t' })) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\nexport function parse<const CSVSource extends CSVString>(\n csv: CSVSource,\n): AsyncIterableIterator<CSVRecord<PickCSVHeader<CSVSource>>>;\nexport function parse<const Header extends ReadonlyArray<string>>(\n csv: CSVString,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport function parse<const Header extends ReadonlyArray<string>>(\n csv: CSVString,\n options: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport function parse<\n const CSVSource extends CSVString,\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n const Header extends ReadonlyArray<string> = PickCSVHeader<\n CSVSource,\n Delimiter,\n Quotation\n >,\n>(\n csv: CSVSource,\n options: ParseOptions<Header, Delimiter, Quotation>,\n): AsyncIterableIterator<CSVRecord<Header>>;\n/**\n * Parse CSV binary to records.\n *\n * @param csv CSV binary to parse.\n * @param options Parsing options for CSV binary parsing.\n *\n * @example Parsing CSV files from responses\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * // This CSV data is not gzipped and encoded in utf-8.\n * const response = await fetch('https://example.com/data.csv');\n *\n * for await (const record of parse(response)) {\n * // ...\n * }\n * ```\n *\n * @example Parsing CSV files with options spcialized for binary\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * // This CSV data is gzipped and encoded in shift-jis and has BOM.\n * const response = await fetch('https://example.com/data.csv.gz');\n *\n * for await (const record of parse(response, {\n * charset: 'shift-jis',\n * ignoreBOM: true,\n * decomposition: 'gzip',\n * })) {\n * // ...\n * }\n * ```\n */\nexport function parse<const Header extends ReadonlyArray<string>>(\n csv: CSVBinary,\n options?: ParseBinaryOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport async function* parse<const Header extends ReadonlyArray<string>>(\n csv: CSV,\n options?: ParseBinaryOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n if (typeof csv === \"string\") {\n yield* parseString(csv, options);\n } else if (csv instanceof Uint8Array || csv instanceof ArrayBuffer) {\n yield* parseBinary(csv, options);\n } else if (csv instanceof ReadableStream) {\n const [branch1, branch2] = csv.tee();\n const reader1 = branch1.getReader();\n const { value: firstChunk } = await reader1.read();\n reader1.releaseLock();\n if (typeof firstChunk === \"string\") {\n yield* parseStringStream(branch2 as ReadableStream<string>, options);\n } else if (firstChunk instanceof Uint8Array) {\n yield* parseUint8ArrayStream(\n branch2 as ReadableStream<Uint8Array>,\n options,\n );\n }\n } else if (csv instanceof Response) {\n yield* parseResponse(csv, options);\n }\n}\n\nexport declare namespace parse {\n /**\n * Parse CSV string to array of records,\n * ideal for smaller data sets.\n *\n * @remarks\n * **Performance Characteristics:**\n * - **Memory usage**: O(n) - proportional to file size (loads entire result into memory)\n * - **Suitable for**: Small datasets, quick prototyping\n * - **Recommended max**: ~10MB (browser), ~100MB (Node.js/Deno)\n *\n * This function collects all records into an array before returning.\n * For large files, consider using the streaming {@link parse} function instead.\n *\n * @example Parse a CSV as array of records\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const records = await parse.toArray(csv);\n * console.log(records);\n * // Prints:\n * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n csv: CSVString,\n options?: ParseOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV binary to array of records,\n * ideal for smaller data sets.\n *\n * @remarks\n * **Performance Characteristics:**\n * - **Memory usage**: O(n) - proportional to file size (loads entire result into memory)\n * - **Suitable for**: Small datasets, quick prototyping\n * - **Recommended max**: ~10MB (browser), ~100MB (Node.js/Deno)\n *\n * This function collects all records into an array before returning.\n * For large files, consider using the streaming {@link parse} function instead.\n *\n * @example Parse a CSV as array of records\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const response = await fetch('https://example.com/data.csv');\n *\n * const records = await parse.toArray(response);\n * console.log(records);\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n csv: CSVBinary,\n options?: ParseBinaryOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n}\n\nObject.defineProperties(parse, {\n toArray: {\n enumerable: true,\n writable: false,\n value: internal.convertThisAsyncIterableIteratorToArray,\n },\n});\n"],"names":["internal.convertThisAsyncIterableIteratorToArray"],"mappings":";;;;;;;AAiMuB,gBAAA,KAAA,CACrB,KACA,OAC0C,EAAA;AAC1C,EAAI,IAAA,OAAO,QAAQ,QAAU,EAAA;AAC3B,IAAO,OAAA,WAAA,CAAY,KAAK,OAAO,CAAA;AAAA,GACtB,MAAA,IAAA,GAAA,YAAe,UAAc,IAAA,GAAA,YAAe,WAAa,EAAA;AAClE,IAAO,OAAA,WAAA,CAAY,KAAK,OAAO,CAAA;AAAA,GACjC,MAAA,IAAW,eAAe,cAAgB,EAAA;AACxC,IAAA,MAAM,CAAC,OAAA,EAAS,OAAO,CAAA,GAAI,IAAI,GAAI,EAAA;AACnC,IAAM,MAAA,OAAA,GAAU,QAAQ,SAAU,EAAA;AAClC,IAAA,MAAM,EAAE,KAAO,EAAA,UAAA,EAAe,GAAA,MAAM,QAAQ,IAAK,EAAA;AACjD,IAAA,OAAA,CAAQ,WAAY,EAAA;AACpB,IAAI,IAAA,OAAO,eAAe,QAAU,EAAA;AAClC,MAAO,OAAA,iBAAA,CAAkB,SAAmC,OAAO,CAAA;AAAA,KACrE,MAAA,IAAW,sBAAsB,UAAY,EAAA;AAC3C,MAAO,OAAA,qBAAA;AAAA,QACL,OAAA;AAAA,QACA;AAAA,OACF;AAAA;AACF,GACF,MAAA,IAAW,eAAe,QAAU,EAAA;AAClC,IAAO,OAAA,aAAA,CAAc,KAAK,OAAO,CAAA;AAAA;AAErC;AAiEA,MAAA,CAAO,iBAAiB,KAAO,EAAA;AAAA,EAC7B,OAAS,EAAA;AAAA,IACP,UAAY,EAAA,IAAA;AAAA,IACZ,QAAU,EAAA,KAAA;AAAA,IACV,OAAOA;AAAS;AAEpB,CAAC,CAAA;;;;"}
@@ -10,6 +10,13 @@ import { PickCSVHeader } from './utils/types.ts';
10
10
  * @returns Async iterable iterator of records.
11
11
  *
12
12
  * If you want array of records, use {@link parseString.toArray} function.
13
+ *
14
+ * @remarks
15
+ * **Performance Characteristics:**
16
+ * - **Memory usage**: O(1) - constant per record (streaming approach)
17
+ * - **Suitable for**: Files of any size
18
+ * - **Recommended for**: Large CSV strings (> 10MB) or memory-constrained environments
19
+ *
13
20
  * @example Parsing CSV files from strings
14
21
  *
15
22
  * ```ts
@@ -1 +1 @@
1
- {"version":3,"file":"parseString.js","sources":["../src/parseString.ts"],"sourcesContent":["import type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\nimport type { DEFAULT_DELIMITER, DEFAULT_QUOTATION } from \"./constants.ts\";\nimport { parseStringToArraySync } from \"./parseStringToArraySync.ts\";\nimport { parseStringToIterableIterator } from \"./parseStringToIterableIterator.ts\";\nimport { parseStringToStream } from \"./parseStringToStream.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\nimport type { PickCSVHeader } from \"./utils/types.ts\";\n\n/**\n * Parse CSV string to records.\n *\n * @category Middle-level API\n * @param csv CSV string to parse\n * @param options Parsing options. See {@link ParseOptions}.\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parseString.toArray} function.\n * @example Parsing CSV files from strings\n *\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * for await (const record of parseString(csv)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\nexport function parseString<const CSVSource extends string>(\n csv: CSVSource,\n): AsyncIterableIterator<CSVRecord<PickCSVHeader<CSVSource>>>;\nexport function parseString<const Header extends ReadonlyArray<string>>(\n csv: string,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport function parseString<const Header extends ReadonlyArray<string>>(\n csv: string,\n options: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport function parseString<\n const CSVSource extends string,\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n const Header extends ReadonlyArray<string> = PickCSVHeader<\n CSVSource,\n Delimiter,\n Quotation\n >,\n>(\n csv: CSVSource,\n options?: ParseOptions<Header, Delimiter, Quotation>,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport function parseString(\n csv: string,\n options?: ParseOptions,\n): AsyncIterableIterator<CSVRecord<string[]>>;\nexport async function* parseString<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n try {\n yield* parseStringToIterableIterator(csv, options);\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\nexport declare namespace parseString {\n /**\n * Parse CSV string to records.\n *\n * @returns Array of records\n *\n * @example\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const records = await parseString.toArray(csv);\n * console.log(records);\n * // Prints:\n * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV string to records.\n *\n * @returns Array of records\n *\n * @example\n *\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const records = parseString.toArraySync(csv);\n * console.log(records);\n * // Prints:\n * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]\n * ```\n */\n export function toArraySync<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): CSVRecord<Header>[];\n /**\n * Parse CSV string to records.\n *\n * @returns Async iterable iterator of records\n *\n * @example\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * for (const record of parseString.toIterableIterator(csv)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\n export function toIterableIterator<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): IterableIterator<CSVRecord<Header>>;\n /**\n * Parse CSV string to records.\n *\n * @returns Readable stream of records\n *\n * @example\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * await parseString.toStream(csv)\n * .pipeTo(\n * new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\n export function toStream<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): ReadableStream<CSVRecord<Header>>;\n}\nObject.defineProperties(parseString, {\n toArray: {\n enumerable: true,\n writable: false,\n value: internal.convertThisAsyncIterableIteratorToArray,\n },\n toArraySync: {\n enumerable: true,\n writable: false,\n value: parseStringToArraySync,\n },\n toIterableIterator: {\n enumerable: true,\n writable: false,\n value: parseStringToIterableIterator,\n },\n toStream: {\n enumerable: true,\n writable: false,\n value: parseStringToStream,\n },\n});\n"],"names":["internal.convertThisAsyncIterableIteratorToArray"],"mappings":";;;;;;AA8DuB,gBAAA,WAAA,CACrB,KACA,OAC0C,EAAA;AAC1C,EAAI,IAAA;AACF,IAAO,OAAA,6BAAA,CAA8B,KAAK,OAAO,CAAA;AAAA,WAC1C,KAAO,EAAA;AACd,IAAA,wBAAA,CAAyB,KAAK,CAAA;AAAA;AAElC;AAyGA,MAAA,CAAO,iBAAiB,WAAa,EAAA;AAAA,EACnC,OAAS,EAAA;AAAA,IACP,UAAY,EAAA,IAAA;AAAA,IACZ,QAAU,EAAA,KAAA;AAAA,IACV,OAAOA;AAAS,GAClB;AAAA,EACA,WAAa,EAAA;AAAA,IACX,UAAY,EAAA,IAAA;AAAA,IACZ,QAAU,EAAA,KAAA;AAAA,IACV,KAAO,EAAA;AAAA,GACT;AAAA,EACA,kBAAoB,EAAA;AAAA,IAClB,UAAY,EAAA,IAAA;AAAA,IACZ,QAAU,EAAA,KAAA;AAAA,IACV,KAAO,EAAA;AAAA,GACT;AAAA,EACA,QAAU,EAAA;AAAA,IACR,UAAY,EAAA,IAAA;AAAA,IACZ,QAAU,EAAA,KAAA;AAAA,IACV,KAAO,EAAA;AAAA;AAEX,CAAC,CAAA;;;;"}
1
+ {"version":3,"file":"parseString.js","sources":["../src/parseString.ts"],"sourcesContent":["import type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\nimport type { DEFAULT_DELIMITER, DEFAULT_QUOTATION } from \"./constants.ts\";\nimport { parseStringToArraySync } from \"./parseStringToArraySync.ts\";\nimport { parseStringToIterableIterator } from \"./parseStringToIterableIterator.ts\";\nimport { parseStringToStream } from \"./parseStringToStream.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\nimport type { PickCSVHeader } from \"./utils/types.ts\";\n\n/**\n * Parse CSV string to records.\n *\n * @category Middle-level API\n * @param csv CSV string to parse\n * @param options Parsing options. See {@link ParseOptions}.\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parseString.toArray} function.\n *\n * @remarks\n * **Performance Characteristics:**\n * - **Memory usage**: O(1) - constant per record (streaming approach)\n * - **Suitable for**: Files of any size\n * - **Recommended for**: Large CSV strings (> 10MB) or memory-constrained environments\n *\n * @example Parsing CSV files from strings\n *\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * for await (const record of parseString(csv)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\nexport function parseString<const CSVSource extends string>(\n csv: CSVSource,\n): AsyncIterableIterator<CSVRecord<PickCSVHeader<CSVSource>>>;\nexport function parseString<const Header extends ReadonlyArray<string>>(\n csv: string,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport function parseString<const Header extends ReadonlyArray<string>>(\n csv: string,\n options: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport function parseString<\n const CSVSource extends string,\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n const Header extends ReadonlyArray<string> = PickCSVHeader<\n CSVSource,\n Delimiter,\n Quotation\n >,\n>(\n csv: CSVSource,\n options?: ParseOptions<Header, Delimiter, Quotation>,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport function parseString(\n csv: string,\n options?: ParseOptions,\n): AsyncIterableIterator<CSVRecord<string[]>>;\nexport async function* parseString<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n try {\n yield* parseStringToIterableIterator(csv, options);\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\nexport declare namespace parseString {\n /**\n * Parse CSV string to records.\n *\n * @returns Array of records\n *\n * @example\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const records = await parseString.toArray(csv);\n * console.log(records);\n * // Prints:\n * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV string to records.\n *\n * @returns Array of records\n *\n * @example\n *\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const records = parseString.toArraySync(csv);\n * console.log(records);\n * // Prints:\n * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]\n * ```\n */\n export function toArraySync<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): CSVRecord<Header>[];\n /**\n * Parse CSV string to records.\n *\n * @returns Async iterable iterator of records\n *\n * @example\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * for (const record of parseString.toIterableIterator(csv)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\n export function toIterableIterator<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): IterableIterator<CSVRecord<Header>>;\n /**\n * Parse CSV string to records.\n *\n * @returns Readable stream of records\n *\n * @example\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * await parseString.toStream(csv)\n * .pipeTo(\n * new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\n export function toStream<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): ReadableStream<CSVRecord<Header>>;\n}\nObject.defineProperties(parseString, {\n toArray: {\n enumerable: true,\n writable: false,\n value: internal.convertThisAsyncIterableIteratorToArray,\n },\n toArraySync: {\n enumerable: true,\n writable: false,\n value: parseStringToArraySync,\n },\n toIterableIterator: {\n enumerable: true,\n writable: false,\n value: parseStringToIterableIterator,\n },\n toStream: {\n enumerable: true,\n writable: false,\n value: parseStringToStream,\n },\n});\n"],"names":["internal.convertThisAsyncIterableIteratorToArray"],"mappings":";;;;;;AAqEuB,gBAAA,WAAA,CACrB,KACA,OAC0C,EAAA;AAC1C,EAAI,IAAA;AACF,IAAO,OAAA,6BAAA,CAA8B,KAAK,OAAO,CAAA;AAAA,WAC1C,KAAO,EAAA;AACd,IAAA,wBAAA,CAAyB,KAAK,CAAA;AAAA;AAElC;AAyGA,MAAA,CAAO,iBAAiB,WAAa,EAAA;AAAA,EACnC,OAAS,EAAA;AAAA,IACP,UAAY,EAAA,IAAA;AAAA,IACZ,QAAU,EAAA,KAAA;AAAA,IACV,OAAOA;AAAS,GAClB;AAAA,EACA,WAAa,EAAA;AAAA,IACX,UAAY,EAAA,IAAA;AAAA,IACZ,QAAU,EAAA,KAAA;AAAA,IACV,KAAO,EAAA;AAAA,GACT;AAAA,EACA,kBAAoB,EAAA;AAAA,IAClB,UAAY,EAAA,IAAA;AAAA,IACZ,QAAU,EAAA,KAAA;AAAA,IACV,KAAO,EAAA;AAAA,GACT;AAAA,EACA,QAAU,EAAA;AAAA,IACR,UAAY,EAAA,IAAA;AAAA,IACZ,QAAU,EAAA,KAAA;AAAA,IACV,KAAO,EAAA;AAAA;AAEX,CAAC,CAAA;;;;"}
@@ -12,6 +12,17 @@ import { PickCSVHeader } from './utils/types.ts';
12
12
  * This function uses WebAssembly to parse CSV string.
13
13
  * Before calling this function, you must call {@link loadWASM} function.
14
14
  *
15
+ * **Performance Characteristics:**
16
+ * - **Speed**: 2-3x faster than JavaScript parser for large CSV strings
17
+ * - **Memory usage**: O(n) - proportional to file size (loads entire result into memory)
18
+ * - **Suitable for**: CPU-intensive workloads, large CSV strings on server-side
19
+ * - **Recommended max**: ~100MB (Node.js/Deno)
20
+ *
21
+ * **Limitations:**
22
+ * - Only supports UTF-8 string (not UTF-16)
23
+ * - Only supports double quote (`"`) as quotation character
24
+ * - Only supports single character as delimiter
25
+ *
15
26
  * This function only supports UTF-8 string.
16
27
  * If you pass a string that is not UTF-8, like UTF-16, it throws an error.
17
28
  * This function only supports double quote as quotation.
@@ -1 +1 @@
1
- {"version":3,"file":"parseStringToArraySyncWASM.js","sources":["../src/parseStringToArraySyncWASM.ts"],"sourcesContent":["import { parseStringToArraySync } from \"web-csv-toolbox-wasm\";\nimport { assertCommonOptions } from \"./assertCommonOptions.ts\";\nimport type { CSVRecord, CommonOptions } from \"./common/types.ts\";\nimport {\n DEFAULT_DELIMITER,\n DEFAULT_QUOTATION,\n DOUBLE_QUOTE,\n} from \"./constants.ts\";\nimport type { loadWASM } from \"./loadWASM.ts\";\nimport type { PickCSVHeader } from \"./utils/types.ts\";\n\n/**\n * Parse CSV string to record of arrays.\n *\n * @param csv CSV string\n * @param options Parse options\n * @returns Record of arrays\n *\n * @remarks\n * This function uses WebAssembly to parse CSV string.\n * Before calling this function, you must call {@link loadWASM} function.\n *\n * This function only supports UTF-8 string.\n * If you pass a string that is not UTF-8, like UTF-16, it throws an error.\n * This function only supports double quote as quotation.\n * So, `options.quotation` must be `\"` (double quote). Otherwise, it throws an error.\n *\n * And this function only supports single character as delimiter.\n * So, `options.delimiter` must be a single character. Otherwise, it throws an error.\n *\n * @example\n *\n * ```ts\n * import { loadWASM, parseStringWASM } from \"web-csv-toolbox\";\n *\n * await loadWASM();\n *\n * const csv = \"a,b,c\\n1,2,3\";\n *\n * const result = parseStringToArraySyncWASM(csv);\n * console.log(result);\n * // Prints:\n * // [{ a: \"1\", b: \"2\", c: \"3\" }]\n * ```\n * @beta\n * @throws {RangeError | TypeError} - If provided options are invalid.\n */\nexport function parseStringToArraySyncWASM<\n const CSVSource extends string,\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n const Header extends ReadonlyArray<string> = PickCSVHeader<\n CSVSource,\n Delimiter,\n Quotation\n >,\n>(\n csv: CSVSource,\n options: CommonOptions<Delimiter, Quotation>,\n): CSVRecord<Header>[];\nexport function parseStringToArraySyncWASM<\n const CSVSource extends string,\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n const Header extends ReadonlyArray<string> = PickCSVHeader<CSVSource>,\n>(\n csv: CSVSource,\n options?: CommonOptions<Delimiter, Quotation>,\n): CSVRecord<Header>[];\nexport function parseStringToArraySyncWASM<\n const Header extends ReadonlyArray<string>,\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n>(\n csv: string,\n options?: CommonOptions<Delimiter, Quotation>,\n): CSVRecord<Header>[];\nexport function parseStringToArraySyncWASM<\n const Header extends readonly string[],\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n>(\n csv: string,\n options: CommonOptions<Delimiter, Quotation> = {},\n): CSVRecord<Header>[] {\n const { delimiter = DEFAULT_DELIMITER, quotation = DEFAULT_QUOTATION } =\n options;\n if (typeof delimiter !== \"string\" || delimiter.length !== 1) {\n throw new RangeError(\n \"Invalid delimiter, must be a single character on WASM.\",\n );\n }\n if (quotation !== DOUBLE_QUOTE) {\n throw new RangeError(\"Invalid quotation, must be double quote on WASM.\");\n }\n assertCommonOptions({ delimiter, quotation });\n const demiliterCode = delimiter.charCodeAt(0);\n return JSON.parse(parseStringToArraySync(csv, demiliterCode));\n}\n"],"names":[],"mappings":";;;;AA6EO,SAAS,0BAKd,CAAA,GAAA,EACA,OAA+C,GAAA,EAC1B,EAAA;AACrB,EAAA,MAAM,EAAE,SAAA,GAAY,iBAAmB,EAAA,SAAA,GAAY,mBACjD,GAAA,OAAA;AACF,EAAA,IAAI,OAAO,SAAA,KAAc,QAAY,IAAA,SAAA,CAAU,WAAW,CAAG,EAAA;AAC3D,IAAA,MAAM,IAAI,UAAA;AAAA,MACR;AAAA,KACF;AAAA;AAEF,EAAA,IAAI,cAAc,YAAc,EAAA;AAC9B,IAAM,MAAA,IAAI,WAAW,kDAAkD,CAAA;AAAA;AAEzE,EAAoB,mBAAA,CAAA,EAAE,SAAW,EAAA,SAAA,EAAW,CAAA;AAC5C,EAAM,MAAA,aAAA,GAAgB,SAAU,CAAA,UAAA,CAAW,CAAC,CAAA;AAC5C,EAAA,OAAO,IAAK,CAAA,KAAA,CAAM,sBAAuB,CAAA,GAAA,EAAK,aAAa,CAAC,CAAA;AAC9D;;;;"}
1
+ {"version":3,"file":"parseStringToArraySyncWASM.js","sources":["../src/parseStringToArraySyncWASM.ts"],"sourcesContent":["import { parseStringToArraySync } from \"web-csv-toolbox-wasm\";\nimport { assertCommonOptions } from \"./assertCommonOptions.ts\";\nimport type { CSVRecord, CommonOptions } from \"./common/types.ts\";\nimport {\n DEFAULT_DELIMITER,\n DEFAULT_QUOTATION,\n DOUBLE_QUOTE,\n} from \"./constants.ts\";\nimport type { loadWASM } from \"./loadWASM.ts\";\nimport type { PickCSVHeader } from \"./utils/types.ts\";\n\n/**\n * Parse CSV string to record of arrays.\n *\n * @param csv CSV string\n * @param options Parse options\n * @returns Record of arrays\n *\n * @remarks\n * This function uses WebAssembly to parse CSV string.\n * Before calling this function, you must call {@link loadWASM} function.\n *\n * **Performance Characteristics:**\n * - **Speed**: 2-3x faster than JavaScript parser for large CSV strings\n * - **Memory usage**: O(n) - proportional to file size (loads entire result into memory)\n * - **Suitable for**: CPU-intensive workloads, large CSV strings on server-side\n * - **Recommended max**: ~100MB (Node.js/Deno)\n *\n * **Limitations:**\n * - Only supports UTF-8 string (not UTF-16)\n * - Only supports double quote (`\"`) as quotation character\n * - Only supports single character as delimiter\n *\n * This function only supports UTF-8 string.\n * If you pass a string that is not UTF-8, like UTF-16, it throws an error.\n * This function only supports double quote as quotation.\n * So, `options.quotation` must be `\"` (double quote). Otherwise, it throws an error.\n *\n * And this function only supports single character as delimiter.\n * So, `options.delimiter` must be a single character. Otherwise, it throws an error.\n *\n * @example\n *\n * ```ts\n * import { loadWASM, parseStringWASM } from \"web-csv-toolbox\";\n *\n * await loadWASM();\n *\n * const csv = \"a,b,c\\n1,2,3\";\n *\n * const result = parseStringToArraySyncWASM(csv);\n * console.log(result);\n * // Prints:\n * // [{ a: \"1\", b: \"2\", c: \"3\" }]\n * ```\n * @beta\n * @throws {RangeError | TypeError} - If provided options are invalid.\n */\nexport function parseStringToArraySyncWASM<\n const CSVSource extends string,\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n const Header extends ReadonlyArray<string> = PickCSVHeader<\n CSVSource,\n Delimiter,\n Quotation\n >,\n>(\n csv: CSVSource,\n options: CommonOptions<Delimiter, Quotation>,\n): CSVRecord<Header>[];\nexport function parseStringToArraySyncWASM<\n const CSVSource extends string,\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n const Header extends ReadonlyArray<string> = PickCSVHeader<CSVSource>,\n>(\n csv: CSVSource,\n options?: CommonOptions<Delimiter, Quotation>,\n): CSVRecord<Header>[];\nexport function parseStringToArraySyncWASM<\n const Header extends ReadonlyArray<string>,\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n>(\n csv: string,\n options?: CommonOptions<Delimiter, Quotation>,\n): CSVRecord<Header>[];\nexport function parseStringToArraySyncWASM<\n const Header extends readonly string[],\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n>(\n csv: string,\n options: CommonOptions<Delimiter, Quotation> = {},\n): CSVRecord<Header>[] {\n const { delimiter = DEFAULT_DELIMITER, quotation = DEFAULT_QUOTATION } =\n options;\n if (typeof delimiter !== \"string\" || delimiter.length !== 1) {\n throw new RangeError(\n \"Invalid delimiter, must be a single character on WASM.\",\n );\n }\n if (quotation !== DOUBLE_QUOTE) {\n throw new RangeError(\"Invalid quotation, must be double quote on WASM.\");\n }\n assertCommonOptions({ delimiter, quotation });\n const demiliterCode = delimiter.charCodeAt(0);\n return JSON.parse(parseStringToArraySync(csv, demiliterCode));\n}\n"],"names":[],"mappings":";;;;AAwFO,SAAS,0BAKd,CAAA,GAAA,EACA,OAA+C,GAAA,EAC1B,EAAA;AACrB,EAAA,MAAM,EAAE,SAAA,GAAY,iBAAmB,EAAA,SAAA,GAAY,mBACjD,GAAA,OAAA;AACF,EAAA,IAAI,OAAO,SAAA,KAAc,QAAY,IAAA,SAAA,CAAU,WAAW,CAAG,EAAA;AAC3D,IAAA,MAAM,IAAI,UAAA;AAAA,MACR;AAAA,KACF;AAAA;AAEF,EAAA,IAAI,cAAc,YAAc,EAAA;AAC9B,IAAM,MAAA,IAAI,WAAW,kDAAkD,CAAA;AAAA;AAEzE,EAAoB,mBAAA,CAAA,EAAE,SAAW,EAAA,SAAA,EAAW,CAAA;AAC5C,EAAM,MAAA,aAAA,GAAgB,SAAU,CAAA,UAAA,CAAW,CAAC,CAAA;AAC5C,EAAA,OAAO,IAAK,CAAA,KAAA,CAAM,sBAAuB,CAAA,GAAA,EAAK,aAAa,CAAC,CAAA;AAC9D;;;;"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "web-csv-toolbox",
3
- "version": "0.12.1-next-290a5172a9f724fd09008b078b7472caefbba3cb",
3
+ "version": "0.12.1-next-7c7903ff7d0e716f84a2595c20c3a509beccb8e9",
4
4
  "description": "A CSV Toolbox utilizing Web Standard APIs.",
5
5
  "type": "module",
6
6
  "module": "dist/web-csv-toolbox.js",