web-csv-toolbox 0.13.0-next-7d51d5285be9cffa5103de58469d8de0c98959d7 → 0.13.0-next-9da8ea20512f2a1e07c4d78092cecedb63cd5455

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (189) hide show
  1. package/README.md +6 -6
  2. package/dist/CSVLexer.js.map +1 -1
  3. package/dist/CSVLexerTransformer.d.ts +12 -14
  4. package/dist/CSVLexerTransformer.js +10 -14
  5. package/dist/CSVLexerTransformer.js.map +1 -1
  6. package/dist/CSVRecordAssembler.d.ts +2 -2
  7. package/dist/CSVRecordAssembler.js +62 -44
  8. package/dist/CSVRecordAssembler.js.map +1 -1
  9. package/dist/CSVRecordAssemblerTransformer.d.ts +10 -10
  10. package/dist/CSVRecordAssemblerTransformer.js +4 -4
  11. package/dist/CSVRecordAssemblerTransformer.js.map +1 -1
  12. package/dist/_virtual/web_csv_toolbox_wasm_bg.wasm.js +1 -1
  13. package/dist/assertCommonOptions.js.map +1 -1
  14. package/dist/common/constants.js.map +1 -1
  15. package/dist/common/errors.js.map +1 -1
  16. package/dist/common/types.d.ts +311 -14
  17. package/dist/commonParseErrorHandling.js.map +1 -1
  18. package/dist/constants.js.map +1 -1
  19. package/dist/createWorker.node.d.ts +2 -0
  20. package/dist/createWorker.web.d.ts +2 -0
  21. package/dist/execution/EnginePresets.d.ts +143 -0
  22. package/dist/execution/EnginePresets.js +129 -0
  23. package/dist/execution/EnginePresets.js.map +1 -0
  24. package/dist/execution/InternalEngineConfig.d.ts +89 -0
  25. package/dist/execution/InternalEngineConfig.js +175 -0
  26. package/dist/execution/InternalEngineConfig.js.map +1 -0
  27. package/dist/execution/main/parseBinaryInMain.d.ts +12 -0
  28. package/dist/execution/main/parseStreamInMain.d.ts +12 -0
  29. package/dist/execution/main/parseStringInMain.d.ts +12 -0
  30. package/dist/execution/main/parseUint8ArrayStreamInMain.d.ts +12 -0
  31. package/dist/execution/wasm/parseBinaryInWASM.d.ts +18 -0
  32. package/dist/execution/wasm/parseBinaryInWASM.js +15 -0
  33. package/dist/execution/wasm/parseBinaryInWASM.js.map +1 -0
  34. package/dist/execution/wasm/parseStringInWASM.d.ts +16 -0
  35. package/dist/execution/worker/helpers/ReusableWorkerPool.d.ts +152 -0
  36. package/dist/execution/worker/helpers/ReusableWorkerPool.js +238 -0
  37. package/dist/execution/worker/helpers/ReusableWorkerPool.js.map +1 -0
  38. package/dist/execution/worker/helpers/TransientWorkerPool.d.ts +89 -0
  39. package/dist/execution/worker/helpers/WorkerManager.d.ts +27 -0
  40. package/dist/execution/worker/helpers/WorkerPool.d.ts +50 -0
  41. package/dist/execution/worker/helpers/WorkerSession.d.ts +78 -0
  42. package/dist/execution/worker/helpers/WorkerSession.js +58 -0
  43. package/dist/execution/worker/helpers/WorkerSession.js.map +1 -0
  44. package/dist/execution/worker/helpers/createWorker.node.d.ts +8 -0
  45. package/dist/execution/worker/helpers/createWorker.node.js +15 -0
  46. package/dist/execution/worker/helpers/createWorker.node.js.map +1 -0
  47. package/dist/execution/worker/helpers/createWorker.web.d.ts +8 -0
  48. package/dist/execution/worker/helpers/createWorker.web.js +11 -0
  49. package/dist/execution/worker/helpers/createWorker.web.js.map +1 -0
  50. package/dist/execution/worker/helpers/worker.node.d.ts +1 -0
  51. package/dist/execution/worker/helpers/worker.node.js +11 -0
  52. package/dist/execution/worker/helpers/worker.node.js.map +1 -0
  53. package/dist/execution/worker/helpers/worker.shared.d.ts +90 -0
  54. package/dist/execution/worker/helpers/worker.shared.js +241 -0
  55. package/dist/execution/worker/helpers/worker.shared.js.map +1 -0
  56. package/dist/execution/worker/helpers/worker.web.d.ts +1 -0
  57. package/dist/execution/worker/helpers/worker.web.js +16 -0
  58. package/dist/execution/worker/helpers/worker.web.js.map +1 -0
  59. package/dist/execution/worker/parseBinaryInWorker.node.d.ts +8 -0
  60. package/dist/execution/worker/parseBinaryInWorker.node.js +24 -0
  61. package/dist/execution/worker/parseBinaryInWorker.node.js.map +1 -0
  62. package/dist/execution/worker/parseBinaryInWorker.web.d.ts +8 -0
  63. package/dist/execution/worker/parseBinaryInWorker.web.js +24 -0
  64. package/dist/execution/worker/parseBinaryInWorker.web.js.map +1 -0
  65. package/dist/execution/worker/parseBinaryInWorkerWASM.node.d.ts +8 -0
  66. package/dist/execution/worker/parseBinaryInWorkerWASM.node.js +24 -0
  67. package/dist/execution/worker/parseBinaryInWorkerWASM.node.js.map +1 -0
  68. package/dist/execution/worker/parseBinaryInWorkerWASM.web.d.ts +8 -0
  69. package/dist/execution/worker/parseBinaryInWorkerWASM.web.js +24 -0
  70. package/dist/execution/worker/parseBinaryInWorkerWASM.web.js.map +1 -0
  71. package/dist/execution/worker/parseStreamInWorker.node.d.ts +15 -0
  72. package/dist/execution/worker/parseStreamInWorker.node.js +26 -0
  73. package/dist/execution/worker/parseStreamInWorker.node.js.map +1 -0
  74. package/dist/execution/worker/parseStreamInWorker.web.d.ts +12 -0
  75. package/dist/execution/worker/parseStreamInWorker.web.js +25 -0
  76. package/dist/execution/worker/parseStreamInWorker.web.js.map +1 -0
  77. package/dist/execution/worker/parseStringInWorker.node.d.ts +11 -0
  78. package/dist/execution/worker/parseStringInWorker.node.js +24 -0
  79. package/dist/execution/worker/parseStringInWorker.node.js.map +1 -0
  80. package/dist/execution/worker/parseStringInWorker.web.d.ts +11 -0
  81. package/dist/execution/worker/parseStringInWorker.web.js +24 -0
  82. package/dist/execution/worker/parseStringInWorker.web.js.map +1 -0
  83. package/dist/execution/worker/parseStringInWorkerWASM.node.d.ts +8 -0
  84. package/dist/execution/worker/parseStringInWorkerWASM.node.js +24 -0
  85. package/dist/execution/worker/parseStringInWorkerWASM.node.js.map +1 -0
  86. package/dist/execution/worker/parseStringInWorkerWASM.web.d.ts +8 -0
  87. package/dist/execution/worker/parseStringInWorkerWASM.web.js +24 -0
  88. package/dist/execution/worker/parseStringInWorkerWASM.web.js.map +1 -0
  89. package/dist/execution/worker/parseUint8ArrayStreamInWorker.node.d.ts +12 -0
  90. package/dist/execution/worker/parseUint8ArrayStreamInWorker.node.js +26 -0
  91. package/dist/execution/worker/parseUint8ArrayStreamInWorker.node.js.map +1 -0
  92. package/dist/execution/worker/parseUint8ArrayStreamInWorker.web.d.ts +9 -0
  93. package/dist/execution/worker/parseUint8ArrayStreamInWorker.web.js +25 -0
  94. package/dist/execution/worker/parseUint8ArrayStreamInWorker.web.js.map +1 -0
  95. package/dist/execution/worker/strategies/MessageStreamingStrategy.d.ts +17 -0
  96. package/dist/execution/worker/strategies/MessageStreamingStrategy.js +58 -0
  97. package/dist/execution/worker/strategies/MessageStreamingStrategy.js.map +1 -0
  98. package/dist/execution/worker/strategies/TransferableStreamStrategy.d.ts +25 -0
  99. package/dist/execution/worker/strategies/TransferableStreamStrategy.js +159 -0
  100. package/dist/execution/worker/strategies/TransferableStreamStrategy.js.map +1 -0
  101. package/dist/execution/worker/strategies/WorkerStrategy.d.ts +27 -0
  102. package/dist/execution/worker/strategies/WorkerStrategySelector.d.ts +43 -0
  103. package/dist/execution/worker/strategies/WorkerStrategySelector.js +89 -0
  104. package/dist/execution/worker/strategies/WorkerStrategySelector.js.map +1 -0
  105. package/dist/execution/worker/utils/messageHandler.d.ts +21 -0
  106. package/dist/execution/worker/utils/messageHandler.js +109 -0
  107. package/dist/execution/worker/utils/messageHandler.js.map +1 -0
  108. package/dist/execution/worker/utils/serializeOptions.d.ts +9 -0
  109. package/dist/execution/worker/utils/serializeOptions.js +14 -0
  110. package/dist/execution/worker/utils/serializeOptions.js.map +1 -0
  111. package/dist/execution/worker/utils/streamCollector.node.d.ts +14 -0
  112. package/dist/execution/worker/utils/streamCollector.node.js +78 -0
  113. package/dist/execution/worker/utils/streamCollector.node.js.map +1 -0
  114. package/dist/execution/worker/utils/workerUtils.d.ts +14 -0
  115. package/dist/execution/worker/utils/workerUtils.js +25 -0
  116. package/dist/execution/worker/utils/workerUtils.js.map +1 -0
  117. package/dist/getOptionsFromResponse.constants.node.d.ts +10 -0
  118. package/dist/getOptionsFromResponse.constants.node.js +8 -0
  119. package/dist/getOptionsFromResponse.constants.node.js.map +1 -0
  120. package/dist/getOptionsFromResponse.constants.web.d.ts +30 -0
  121. package/dist/getOptionsFromResponse.constants.web.js +7 -0
  122. package/dist/getOptionsFromResponse.constants.web.js.map +1 -0
  123. package/dist/getOptionsFromResponse.d.ts +2 -1
  124. package/dist/getOptionsFromResponse.js +5 -9
  125. package/dist/getOptionsFromResponse.js.map +1 -1
  126. package/dist/loadWASM.js.map +1 -1
  127. package/dist/loadWASM.web.js.map +1 -1
  128. package/dist/parse.d.ts +1 -1
  129. package/dist/parse.js +29 -5
  130. package/dist/parse.js.map +1 -1
  131. package/dist/parseBinary.d.ts +2 -1
  132. package/dist/parseBinary.js +32 -3
  133. package/dist/parseBinary.js.map +1 -1
  134. package/dist/parseBinaryInWorker.node.d.ts +2 -0
  135. package/dist/parseBinaryInWorker.web.d.ts +2 -0
  136. package/dist/parseBinaryInWorkerWASM.node.d.ts +2 -0
  137. package/dist/parseBinaryInWorkerWASM.web.d.ts +2 -0
  138. package/dist/parseBinaryToArraySync.d.ts +2 -1
  139. package/dist/parseBinaryToArraySync.js.map +1 -1
  140. package/dist/parseBinaryToIterableIterator.d.ts +2 -1
  141. package/dist/parseBinaryToIterableIterator.js.map +1 -1
  142. package/dist/parseBinaryToStream.d.ts +2 -1
  143. package/dist/parseBinaryToStream.js.map +1 -1
  144. package/dist/parseResponse.d.ts +1 -1
  145. package/dist/parseResponse.js +15 -8
  146. package/dist/parseResponse.js.map +1 -1
  147. package/dist/parseResponseToStream.d.ts +2 -1
  148. package/dist/parseResponseToStream.js.map +1 -1
  149. package/dist/parseStreamInWorker.node.d.ts +2 -0
  150. package/dist/parseStreamInWorker.web.d.ts +2 -0
  151. package/dist/parseString.d.ts +31 -0
  152. package/dist/parseString.js +27 -1
  153. package/dist/parseString.js.map +1 -1
  154. package/dist/parseStringInWorker.node.d.ts +2 -0
  155. package/dist/parseStringInWorker.web.d.ts +2 -0
  156. package/dist/parseStringInWorkerWASM.node.d.ts +2 -0
  157. package/dist/parseStringInWorkerWASM.web.d.ts +2 -0
  158. package/dist/parseStringStream.d.ts +43 -1
  159. package/dist/parseStringStream.js +24 -3
  160. package/dist/parseStringStream.js.map +1 -1
  161. package/dist/parseStringStreamToStream.js.map +1 -1
  162. package/dist/parseStringToArraySync.js.map +1 -1
  163. package/dist/parseStringToArraySyncWASM.js.map +1 -1
  164. package/dist/parseStringToIterableIterator.js.map +1 -1
  165. package/dist/parseStringToStream.js.map +1 -1
  166. package/dist/parseUint8ArrayStream.d.ts +4 -3
  167. package/dist/parseUint8ArrayStream.js +24 -3
  168. package/dist/parseUint8ArrayStream.js.map +1 -1
  169. package/dist/parseUint8ArrayStreamInWorker.node.d.ts +2 -0
  170. package/dist/parseUint8ArrayStreamInWorker.web.d.ts +2 -0
  171. package/dist/parseUint8ArrayStreamToStream.d.ts +2 -1
  172. package/dist/parseUint8ArrayStreamToStream.js +11 -5
  173. package/dist/parseUint8ArrayStreamToStream.js.map +1 -1
  174. package/dist/utils/convertBinaryToString.js.map +1 -1
  175. package/dist/utils/convertIterableIteratorToAsync.js.map +1 -1
  176. package/dist/utils/convertStreamToAsyncIterableIterator.js +2 -2
  177. package/dist/utils/convertStreamToAsyncIterableIterator.js.map +1 -1
  178. package/dist/utils/convertThisAsyncIterableIteratorToArray.d.ts +1 -1
  179. package/dist/utils/convertThisAsyncIterableIteratorToArray.js.map +1 -1
  180. package/dist/utils/escapeRegExp.js.map +1 -1
  181. package/dist/utils/parseMime.js.map +1 -1
  182. package/dist/utils/pipeline.js.map +1 -1
  183. package/dist/web-csv-toolbox.d.ts +4 -0
  184. package/dist/web-csv-toolbox.js +3 -0
  185. package/dist/web-csv-toolbox.js.map +1 -1
  186. package/dist/web_csv_toolbox_wasm_bg.wasm +0 -0
  187. package/dist/worker.node.d.ts +1 -0
  188. package/dist/worker.web.d.ts +1 -0
  189. package/package.json +53 -10
@@ -1 +1 @@
1
- {"version":3,"file":"parseString.js","sources":["../src/parseString.ts"],"sourcesContent":["import type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\nimport type { DEFAULT_DELIMITER, DEFAULT_QUOTATION } from \"./constants.ts\";\nimport { parseStringToArraySync } from \"./parseStringToArraySync.ts\";\nimport { parseStringToIterableIterator } from \"./parseStringToIterableIterator.ts\";\nimport { parseStringToStream } from \"./parseStringToStream.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\nimport type { PickCSVHeader } from \"./utils/types.ts\";\n\n/**\n * Parse CSV string to records.\n *\n * @category Middle-level API\n * @param csv CSV string to parse\n * @param options Parsing options. See {@link ParseOptions}.\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parseString.toArray} function.\n *\n * @remarks\n * **Performance Characteristics:**\n * - **Memory usage**: O(1) - constant per record (streaming approach)\n * - **Suitable for**: Files of any size\n * - **Recommended for**: Large CSV strings (> 10MB) or memory-constrained environments\n *\n * @example Parsing CSV files from strings\n *\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * for await (const record of parseString(csv)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\nexport function parseString<const CSVSource extends string>(\n csv: CSVSource,\n): AsyncIterableIterator<CSVRecord<PickCSVHeader<CSVSource>>>;\nexport function parseString<const Header extends ReadonlyArray<string>>(\n csv: string,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport function parseString<const Header extends ReadonlyArray<string>>(\n csv: string,\n options: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport function parseString<\n const CSVSource extends string,\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n const Header extends ReadonlyArray<string> = PickCSVHeader<\n CSVSource,\n Delimiter,\n Quotation\n >,\n>(\n csv: CSVSource,\n options?: ParseOptions<Header, Delimiter, Quotation>,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport function parseString(\n csv: string,\n options?: ParseOptions,\n): AsyncIterableIterator<CSVRecord<string[]>>;\nexport async function* parseString<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n try {\n yield* parseStringToIterableIterator(csv, options);\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\nexport declare namespace parseString {\n /**\n * Parse CSV string to records.\n *\n * @returns Array of records\n *\n * @example\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const records = await parseString.toArray(csv);\n * console.log(records);\n * // Prints:\n * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV string to records.\n *\n * @returns Array of records\n *\n * @example\n *\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const records = parseString.toArraySync(csv);\n * console.log(records);\n * // Prints:\n * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]\n * ```\n */\n export function toArraySync<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): CSVRecord<Header>[];\n /**\n * Parse CSV string to records.\n *\n * @returns Async iterable iterator of records\n *\n * @example\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * for (const record of parseString.toIterableIterator(csv)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\n export function toIterableIterator<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): IterableIterator<CSVRecord<Header>>;\n /**\n * Parse CSV string to records.\n *\n * @returns Readable stream of records\n *\n * @example\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * await parseString.toStream(csv)\n * .pipeTo(\n * new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\n export function toStream<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): ReadableStream<CSVRecord<Header>>;\n}\nObject.defineProperties(parseString, {\n toArray: {\n enumerable: true,\n writable: false,\n value: internal.convertThisAsyncIterableIteratorToArray,\n },\n toArraySync: {\n enumerable: true,\n writable: false,\n value: parseStringToArraySync,\n },\n toIterableIterator: {\n enumerable: true,\n writable: false,\n value: parseStringToIterableIterator,\n },\n toStream: {\n enumerable: true,\n writable: false,\n value: parseStringToStream,\n },\n});\n"],"names":["internal.convertThisAsyncIterableIteratorToArray"],"mappings":";;;;;;AAqEuB,gBAAA,WAAA,CACrB,KACA,OAC0C,EAAA;AAC1C,EAAI,IAAA;AACF,IAAO,OAAA,6BAAA,CAA8B,KAAK,OAAO,CAAA;AAAA,WAC1C,KAAO,EAAA;AACd,IAAA,wBAAA,CAAyB,KAAK,CAAA;AAAA;AAElC;AAyGA,MAAA,CAAO,iBAAiB,WAAa,EAAA;AAAA,EACnC,OAAS,EAAA;AAAA,IACP,UAAY,EAAA,IAAA;AAAA,IACZ,QAAU,EAAA,KAAA;AAAA,IACV,OAAOA;AAAS,GAClB;AAAA,EACA,WAAa,EAAA;AAAA,IACX,UAAY,EAAA,IAAA;AAAA,IACZ,QAAU,EAAA,KAAA;AAAA,IACV,KAAO,EAAA;AAAA,GACT;AAAA,EACA,kBAAoB,EAAA;AAAA,IAClB,UAAY,EAAA,IAAA;AAAA,IACZ,QAAU,EAAA,KAAA;AAAA,IACV,KAAO,EAAA;AAAA,GACT;AAAA,EACA,QAAU,EAAA;AAAA,IACR,UAAY,EAAA,IAAA;AAAA,IACZ,QAAU,EAAA,KAAA;AAAA,IACV,KAAO,EAAA;AAAA;AAEX,CAAC,CAAA;;;;"}
1
+ {"version":3,"file":"parseString.js","sources":["../src/parseString.ts"],"sourcesContent":["import type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\nimport type { DEFAULT_DELIMITER, DEFAULT_QUOTATION } from \"./constants.ts\";\nimport { InternalEngineConfig } from \"./execution/InternalEngineConfig.ts\";\nimport { WorkerSession } from \"./execution/worker/helpers/WorkerSession.ts\";\nimport { executeWithWorkerStrategy } from \"./execution/worker/strategies/WorkerStrategySelector.ts\";\nimport { parseStringToArraySync } from \"./parseStringToArraySync.ts\";\nimport { parseStringToArraySyncWASM } from \"./parseStringToArraySyncWASM.ts\";\nimport { parseStringToIterableIterator } from \"./parseStringToIterableIterator.ts\";\nimport { parseStringToStream } from \"./parseStringToStream.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\nimport type { PickCSVHeader } from \"./utils/types.ts\";\n\n/**\n * Parse CSV string to records.\n *\n * @category Middle-level API\n * @param csv CSV string to parse\n * @param options Parsing options. See {@link ParseOptions}.\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parseString.toArray} function.\n *\n * @remarks\n * **Performance Characteristics:**\n * - **Memory usage**: O(1) - constant per record (streaming approach)\n * - **Suitable for**: Files of any size\n * - **Recommended for**: Large CSV strings (> 10MB) or memory-constrained environments\n *\n * **Execution Strategies:**\n * Control how parsing is executed using the `engine` option:\n * - **Main thread** (default): `engine: { worker: false }` - No overhead, good for small files\n * - **Worker thread**: `engine: { worker: true }` - Offloads parsing, good for large files\n * - **WebAssembly**: `engine: { wasm: true }` - Fast parsing, limited to UTF-8 and double-quotes\n * - **Combined**: `engine: { worker: true, wasm: true }` - Worker + WASM for maximum performance\n *\n * Use {@link EnginePresets} for convenient configurations:\n * ```ts\n * import { parseString, EnginePresets } from 'web-csv-toolbox';\n *\n * // Use fastest available execution method\n * for await (const record of parseString(csv, {\n * engine: EnginePresets.fastest()\n * })) {\n * console.log(record);\n * }\n * ```\n *\n * @example Parsing CSV files from strings\n *\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * for await (const record of parseString(csv)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n *\n * @example Using worker execution for better performance\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * // Offload parsing to a worker thread\n * for await (const record of parseString(largeCSV, {\n * engine: { worker: true }\n * })) {\n * console.log(record);\n * }\n * ```\n */\nexport function parseString<const CSVSource extends string>(\n csv: CSVSource,\n): AsyncIterableIterator<CSVRecord<PickCSVHeader<CSVSource>>>;\nexport function parseString<const Header extends ReadonlyArray<string>>(\n csv: string,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport function parseString<const Header extends ReadonlyArray<string>>(\n csv: string,\n options: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport function parseString<\n const CSVSource extends string,\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n const Header extends ReadonlyArray<string> = PickCSVHeader<\n CSVSource,\n Delimiter,\n Quotation\n >,\n>(\n csv: CSVSource,\n options?: ParseOptions<Header, Delimiter, Quotation>,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport function parseString(\n csv: string,\n options?: ParseOptions,\n): AsyncIterableIterator<CSVRecord<string[]>>;\nexport async function* parseString<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n try {\n // Parse engine configuration\n const engineConfig = new InternalEngineConfig(options?.engine);\n\n if (engineConfig.hasWorker()) {\n // Worker execution\n const session = engineConfig.workerPool\n ? await WorkerSession.create({\n workerPool: engineConfig.workerPool,\n workerURL: engineConfig.workerURL,\n })\n : null;\n\n try {\n yield* executeWithWorkerStrategy<CSVRecord<Header>>(\n csv,\n options,\n session,\n engineConfig,\n );\n } finally {\n session?.[Symbol.dispose]();\n }\n } else {\n // Main thread execution\n if (engineConfig.hasWasm()) {\n yield* parseStringToArraySyncWASM(csv, options);\n } else {\n yield* parseStringToIterableIterator(csv, options);\n }\n }\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\nexport declare namespace parseString {\n /**\n * Parse CSV string to records.\n *\n * @returns Array of records\n *\n * @example\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const records = await parseString.toArray(csv);\n * console.log(records);\n * // Prints:\n * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV string to records.\n *\n * @returns Array of records\n *\n * @example\n *\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const records = parseString.toArraySync(csv);\n * console.log(records);\n * // Prints:\n * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]\n * ```\n */\n export function toArraySync<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): CSVRecord<Header>[];\n /**\n * Parse CSV string to records.\n *\n * @returns Async iterable iterator of records\n *\n * @example\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * for (const record of parseString.toIterableIterator(csv)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\n export function toIterableIterator<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): IterableIterator<CSVRecord<Header>>;\n /**\n * Parse CSV string to records.\n *\n * @returns Readable stream of records\n *\n * @example\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * await parseString.toStream(csv)\n * .pipeTo(\n * new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\n export function toStream<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): ReadableStream<CSVRecord<Header>>;\n}\nObject.defineProperties(parseString, {\n toArray: {\n enumerable: true,\n writable: false,\n value: internal.convertThisAsyncIterableIteratorToArray,\n },\n toArraySync: {\n enumerable: true,\n writable: false,\n value: parseStringToArraySync,\n },\n toIterableIterator: {\n enumerable: true,\n writable: false,\n value: parseStringToIterableIterator,\n },\n toStream: {\n enumerable: true,\n writable: false,\n value: parseStringToStream,\n },\n});\n"],"names":["internal.convertThisAsyncIterableIteratorToArray"],"mappings":";;;;;;;;;;AAwGA,gBAAuB,WAAA,CACrB,KACA,OAAA,EAC0C;AAC1C,EAAA,IAAI;AAEF,IAAA,MAAM,YAAA,GAAe,IAAI,oBAAA,CAAqB,OAAA,EAAS,MAAM,CAAA;AAE7D,IAAA,IAAI,YAAA,CAAa,WAAU,EAAG;AAE5B,MAAA,MAAM,OAAA,GAAU,YAAA,CAAa,UAAA,GACzB,MAAM,cAAc,MAAA,CAAO;AAAA,QACzB,YAAY,YAAA,CAAa,UAAA;AAAA,QACzB,WAAW,YAAA,CAAa;AAAA,OACzB,CAAA,GACD,IAAA;AAEJ,MAAA,IAAI;AACF,QAAA,OAAO,yBAAA;AAAA,UACL,GAAA;AAAA,UACA,OAAA;AAAA,UACA,OAAA;AAAA,UACA;AAAA,SACF;AAAA,MACF,CAAA,SAAE;AACA,QAAA,OAAA,GAAU,MAAA,CAAO,OAAO,CAAA,EAAE;AAAA,MAC5B;AAAA,IACF,CAAA,MAAO;AAEL,MAAA,IAAI,YAAA,CAAa,SAAQ,EAAG;AAC1B,QAAA,OAAO,0BAAA,CAA2B,KAAK,OAAO,CAAA;AAAA,MAChD,CAAA,MAAO;AACL,QAAA,OAAO,6BAAA,CAA8B,KAAK,OAAO,CAAA;AAAA,MACnD;AAAA,IACF;AAAA,EACF,SAAS,KAAA,EAAO;AACd,IAAA,wBAAA,CAAyB,KAAK,CAAA;AAAA,EAChC;AACF;AAyGA,MAAA,CAAO,iBAAiB,WAAA,EAAa;AAAA,EACnC,OAAA,EAAS;AAAA,IACP,UAAA,EAAY,IAAA;AAAA,IACZ,QAAA,EAAU,KAAA;AAAA,IACV,OAAOA;AAAS,GAClB;AAAA,EACA,WAAA,EAAa;AAAA,IACX,UAAA,EAAY,IAAA;AAAA,IACZ,QAAA,EAAU,KAAA;AAAA,IACV,KAAA,EAAO;AAAA,GACT;AAAA,EACA,kBAAA,EAAoB;AAAA,IAClB,UAAA,EAAY,IAAA;AAAA,IACZ,QAAA,EAAU,KAAA;AAAA,IACV,KAAA,EAAO;AAAA,GACT;AAAA,EACA,QAAA,EAAU;AAAA,IACR,UAAA,EAAY,IAAA;AAAA,IACZ,QAAA,EAAU,KAAA;AAAA,IACV,KAAA,EAAO;AAAA;AAEX,CAAC,CAAA;;;;"}
@@ -0,0 +1,2 @@
1
+ export * from './execution/worker/parseStringInWorker.node'
2
+ export {}
@@ -0,0 +1,2 @@
1
+ export * from './execution/worker/parseStringInWorker.web'
2
+ export {}
@@ -0,0 +1,2 @@
1
+ export * from './execution/worker/parseStringInWorkerWASM.node'
2
+ export {}
@@ -0,0 +1,2 @@
1
+ export * from './execution/worker/parseStringInWorkerWASM.web'
2
+ export {}
@@ -11,6 +11,32 @@ import { PickCSVHeader } from './utils/types.ts';
11
11
  *
12
12
  * If you want array of records, use {@link parseStringStream.toArray} function.
13
13
  *
14
+ * @remarks
15
+ * **Stream Execution Strategies:**
16
+ *
17
+ * For streams, the engine configuration supports two worker strategies:
18
+ * - **stream-transfer** (recommended): Zero-copy stream transfer to worker
19
+ * - Supported on Chrome, Firefox, Edge
20
+ * - Automatically falls back to message-streaming on Safari
21
+ * - **message-streaming**: Records sent via postMessage
22
+ * - Works on all browsers including Safari
23
+ * - Slightly higher overhead but more compatible
24
+ *
25
+ * By default, streams use main thread execution. To use workers with streams:
26
+ * ```ts
27
+ * import { parseStringStream, EnginePresets } from 'web-csv-toolbox';
28
+ *
29
+ * // Use worker with automatic stream-transfer (falls back if not supported)
30
+ * for await (const record of parseStringStream(stream, {
31
+ * engine: EnginePresets.workerStreamTransfer()
32
+ * })) {
33
+ * console.log(record);
34
+ * }
35
+ * ```
36
+ *
37
+ * Note: WASM execution is not supported for streams. If you specify
38
+ * `engine: { wasm: true }` with a stream, it will fall back to main thread.
39
+ *
14
40
  * @example Parsing CSV files from strings
15
41
  *
16
42
  * ```ts
@@ -27,13 +53,29 @@ import { PickCSVHeader } from './utils/types.ts';
27
53
  * },
28
54
  * });
29
55
  *
30
- * for await (const record of parseStringStream(csv)) {
56
+ * for await (const record of parseStringStream(stream)) {
31
57
  * console.log(record);
32
58
  * }
33
59
  * // Prints:
34
60
  * // { name: 'Alice', age: '42' }
35
61
  * // { name: 'Bob', age: '69' }
36
62
  * ```
63
+ *
64
+ * @example Using worker with stream transfer for large files
65
+ * ```ts
66
+ * import { parseStringStream } from 'web-csv-toolbox';
67
+ *
68
+ * const response = await fetch('large-file.csv');
69
+ * const stream = response.body
70
+ * .pipeThrough(new TextDecoderStream());
71
+ *
72
+ * // Use worker with stream-transfer strategy
73
+ * for await (const record of parseStringStream(stream, {
74
+ * engine: { worker: true, workerStrategy: 'stream-transfer' }
75
+ * })) {
76
+ * console.log(record);
77
+ * }
78
+ * ```
37
79
  */
38
80
  export declare function parseStringStream<const CSVSource extends ReadableStream<string>, const Delimiter extends string = DEFAULT_DELIMITER, const Quotation extends string = DEFAULT_QUOTATION, const Header extends ReadonlyArray<string> = PickCSVHeader<CSVSource, Delimiter, Quotation>>(csv: CSVSource, options: ParseOptions<Header, Delimiter, Quotation>): AsyncIterableIterator<CSVRecord<Header>>;
39
81
  export declare function parseStringStream<const CSVSource extends ReadableStream<string>, const Header extends ReadonlyArray<string> = PickCSVHeader<CSVSource>>(csv: CSVSource, options?: ParseOptions<Header>): AsyncIterableIterator<CSVRecord<Header>>;
@@ -1,10 +1,31 @@
1
+ import { InternalEngineConfig } from './execution/InternalEngineConfig.js';
2
+ import { WorkerSession } from './execution/worker/helpers/WorkerSession.js';
3
+ import { executeWithWorkerStrategy } from './execution/worker/strategies/WorkerStrategySelector.js';
1
4
  import { parseStringStreamToStream } from './parseStringStreamToStream.js';
2
5
  import { convertStreamToAsyncIterableIterator } from './utils/convertStreamToAsyncIterableIterator.js';
3
6
  import { convertThisAsyncIterableIteratorToArray } from './utils/convertThisAsyncIterableIteratorToArray.js';
4
7
 
5
- function parseStringStream(stream, options) {
6
- const recordStream = parseStringStreamToStream(stream, options);
7
- return convertStreamToAsyncIterableIterator(recordStream);
8
+ async function* parseStringStream(stream, options) {
9
+ const engineConfig = new InternalEngineConfig(options?.engine);
10
+ if (engineConfig.hasWorker() && engineConfig.hasStreamTransfer()) {
11
+ const session = engineConfig.workerPool ? await WorkerSession.create({
12
+ workerPool: engineConfig.workerPool,
13
+ workerURL: engineConfig.workerURL
14
+ }) : null;
15
+ try {
16
+ yield* executeWithWorkerStrategy(
17
+ stream,
18
+ options,
19
+ session,
20
+ engineConfig
21
+ );
22
+ } finally {
23
+ session?.[Symbol.dispose]();
24
+ }
25
+ } else {
26
+ const recordStream = parseStringStreamToStream(stream, options);
27
+ yield* convertStreamToAsyncIterableIterator(recordStream);
28
+ }
8
29
  }
9
30
  Object.defineProperties(parseStringStream, {
10
31
  toArray: {
@@ -1 +1 @@
1
- {"version":3,"file":"parseStringStream.js","sources":["../src/parseStringStream.ts"],"sourcesContent":["import type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport type { DEFAULT_DELIMITER, DEFAULT_QUOTATION } from \"./constants.ts\";\nimport { parseStringStreamToStream } from \"./parseStringStreamToStream.ts\";\nimport { convertStreamToAsyncIterableIterator } from \"./utils/convertStreamToAsyncIterableIterator.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\nimport type { PickCSVHeader } from \"./utils/types.ts\";\n\n/**\n * Parse CSV string stream to records.\n *\n * @category Middle-level API\n * @param stream CSV string stream to parse\n * @param options Parsing options.\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parseStringStream.toArray} function.\n *\n * @example Parsing CSV files from strings\n *\n * ```ts\n * import { parseStringStream } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * for await (const record of parseStringStream(csv)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\nexport function parseStringStream<\n const CSVSource extends ReadableStream<string>,\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n const Header extends ReadonlyArray<string> = PickCSVHeader<\n CSVSource,\n Delimiter,\n Quotation\n >,\n>(\n csv: CSVSource,\n options: ParseOptions<Header, Delimiter, Quotation>,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport function parseStringStream<\n const CSVSource extends ReadableStream<string>,\n const Header extends ReadonlyArray<string> = PickCSVHeader<CSVSource>,\n>(\n csv: CSVSource,\n options?: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport function parseStringStream<const Header extends ReadonlyArray<string>>(\n stream: ReadableStream<string>,\n options?: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport function parseStringStream<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<string>,\n options?: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n const recordStream = parseStringStreamToStream(stream, options);\n return convertStreamToAsyncIterableIterator(recordStream);\n}\n\nexport declare namespace parseStringStream {\n /**\n * Parse CSV string stream to records.\n *\n * @returns Array of records\n *\n * @example\n *\n * ```ts\n * import { parseStringStream } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * const records = await parseStringStream.toArray(stream);\n * console.log(records);\n * // Prints:\n * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<string>,\n options?: ParseOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV string stream to records.\n *\n * @returns Array of records\n *\n * @example\n *\n * ```ts\n * import { parseStringStream } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * await parseStringStream.toStream(stream)\n * .pipeTo(\n * new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * ```\n */\n export function toStream<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<string>,\n options?: ParseOptions<Header>,\n ): ReadableStream<CSVRecord<Header>>;\n}\n\nObject.defineProperties(parseStringStream, {\n toArray: {\n enumerable: true,\n writable: false,\n value: internal.convertThisAsyncIterableIteratorToArray,\n },\n toStream: {\n enumerable: true,\n writable: false,\n value: parseStringStreamToStream,\n },\n});\n"],"names":["internal.convertThisAsyncIterableIteratorToArray"],"mappings":";;;;AAiEgB,SAAA,iBAAA,CACd,QACA,OAC0C,EAAA;AAC1C,EAAM,MAAA,YAAA,GAAe,yBAA0B,CAAA,MAAA,EAAQ,OAAO,CAAA;AAC9D,EAAA,OAAO,qCAAqC,YAAY,CAAA;AAC1D;AAuEA,MAAA,CAAO,iBAAiB,iBAAmB,EAAA;AAAA,EACzC,OAAS,EAAA;AAAA,IACP,UAAY,EAAA,IAAA;AAAA,IACZ,QAAU,EAAA,KAAA;AAAA,IACV,OAAOA;AAAS,GAClB;AAAA,EACA,QAAU,EAAA;AAAA,IACR,UAAY,EAAA,IAAA;AAAA,IACZ,QAAU,EAAA,KAAA;AAAA,IACV,KAAO,EAAA;AAAA;AAEX,CAAC,CAAA;;;;"}
1
+ {"version":3,"file":"parseStringStream.js","sources":["../src/parseStringStream.ts"],"sourcesContent":["import type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport type { DEFAULT_DELIMITER, DEFAULT_QUOTATION } from \"./constants.ts\";\nimport { InternalEngineConfig } from \"./execution/InternalEngineConfig.ts\";\nimport { WorkerSession } from \"./execution/worker/helpers/WorkerSession.ts\";\nimport { executeWithWorkerStrategy } from \"./execution/worker/strategies/WorkerStrategySelector.ts\";\nimport { parseStringStreamToStream } from \"./parseStringStreamToStream.ts\";\nimport { convertStreamToAsyncIterableIterator } from \"./utils/convertStreamToAsyncIterableIterator.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\nimport type { PickCSVHeader } from \"./utils/types.ts\";\n\n/**\n * Parse CSV string stream to records.\n *\n * @category Middle-level API\n * @param stream CSV string stream to parse\n * @param options Parsing options.\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parseStringStream.toArray} function.\n *\n * @remarks\n * **Stream Execution Strategies:**\n *\n * For streams, the engine configuration supports two worker strategies:\n * - **stream-transfer** (recommended): Zero-copy stream transfer to worker\n * - Supported on Chrome, Firefox, Edge\n * - Automatically falls back to message-streaming on Safari\n * - **message-streaming**: Records sent via postMessage\n * - Works on all browsers including Safari\n * - Slightly higher overhead but more compatible\n *\n * By default, streams use main thread execution. To use workers with streams:\n * ```ts\n * import { parseStringStream, EnginePresets } from 'web-csv-toolbox';\n *\n * // Use worker with automatic stream-transfer (falls back if not supported)\n * for await (const record of parseStringStream(stream, {\n * engine: EnginePresets.workerStreamTransfer()\n * })) {\n * console.log(record);\n * }\n * ```\n *\n * Note: WASM execution is not supported for streams. If you specify\n * `engine: { wasm: true }` with a stream, it will fall back to main thread.\n *\n * @example Parsing CSV files from strings\n *\n * ```ts\n * import { parseStringStream } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * for await (const record of parseStringStream(stream)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n *\n * @example Using worker with stream transfer for large files\n * ```ts\n * import { parseStringStream } from 'web-csv-toolbox';\n *\n * const response = await fetch('large-file.csv');\n * const stream = response.body\n * .pipeThrough(new TextDecoderStream());\n *\n * // Use worker with stream-transfer strategy\n * for await (const record of parseStringStream(stream, {\n * engine: { worker: true, workerStrategy: 'stream-transfer' }\n * })) {\n * console.log(record);\n * }\n * ```\n */\nexport function parseStringStream<\n const CSVSource extends ReadableStream<string>,\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n const Header extends ReadonlyArray<string> = PickCSVHeader<\n CSVSource,\n Delimiter,\n Quotation\n >,\n>(\n csv: CSVSource,\n options: ParseOptions<Header, Delimiter, Quotation>,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport function parseStringStream<\n const CSVSource extends ReadableStream<string>,\n const Header extends ReadonlyArray<string> = PickCSVHeader<CSVSource>,\n>(\n csv: CSVSource,\n options?: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport function parseStringStream<const Header extends ReadonlyArray<string>>(\n stream: ReadableStream<string>,\n options?: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport async function* parseStringStream<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<string>,\n options?: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n // Parse engine configuration\n const engineConfig = new InternalEngineConfig(options?.engine);\n\n // Note: Worker execution with ReadableStream requires TransferableStream support\n // which is not available in Safari. For now, always use main thread execution.\n // TODO: Implement stream-transfer strategy for browsers that support it\n if (engineConfig.hasWorker() && engineConfig.hasStreamTransfer()) {\n // Worker execution with stream-transfer strategy\n const session = engineConfig.workerPool\n ? await WorkerSession.create({\n workerPool: engineConfig.workerPool,\n workerURL: engineConfig.workerURL,\n })\n : null;\n\n try {\n yield* executeWithWorkerStrategy<CSVRecord<Header>>(\n stream,\n options,\n session,\n engineConfig,\n );\n } finally {\n session?.[Symbol.dispose]();\n }\n } else {\n // Main thread execution (default for streams)\n const recordStream = parseStringStreamToStream(stream, options);\n yield* convertStreamToAsyncIterableIterator(recordStream);\n }\n}\n\nexport declare namespace parseStringStream {\n /**\n * Parse CSV string stream to records.\n *\n * @returns Array of records\n *\n * @example\n *\n * ```ts\n * import { parseStringStream } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * const records = await parseStringStream.toArray(stream);\n * console.log(records);\n * // Prints:\n * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<string>,\n options?: ParseOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV string stream to records.\n *\n * @returns Array of records\n *\n * @example\n *\n * ```ts\n * import { parseStringStream } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * await parseStringStream.toStream(stream)\n * .pipeTo(\n * new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * ```\n */\n export function toStream<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<string>,\n options?: ParseOptions<Header>,\n ): ReadableStream<CSVRecord<Header>>;\n}\n\nObject.defineProperties(parseStringStream, {\n toArray: {\n enumerable: true,\n writable: false,\n value: internal.convertThisAsyncIterableIteratorToArray,\n },\n toStream: {\n enumerable: true,\n writable: false,\n value: parseStringStreamToStream,\n },\n});\n"],"names":["internal.convertThisAsyncIterableIteratorToArray"],"mappings":";;;;;;;AA8GA,gBAAuB,iBAAA,CACrB,QACA,OAAA,EAC0C;AAE1C,EAAA,MAAM,YAAA,GAAe,IAAI,oBAAA,CAAqB,OAAA,EAAS,MAAM,CAAA;AAK7D,EAAA,IAAI,YAAA,CAAa,SAAA,EAAU,IAAK,YAAA,CAAa,mBAAkB,EAAG;AAEhE,IAAA,MAAM,OAAA,GAAU,YAAA,CAAa,UAAA,GACzB,MAAM,cAAc,MAAA,CAAO;AAAA,MACzB,YAAY,YAAA,CAAa,UAAA;AAAA,MACzB,WAAW,YAAA,CAAa;AAAA,KACzB,CAAA,GACD,IAAA;AAEJ,IAAA,IAAI;AACF,MAAA,OAAO,yBAAA;AAAA,QACL,MAAA;AAAA,QACA,OAAA;AAAA,QACA,OAAA;AAAA,QACA;AAAA,OACF;AAAA,IACF,CAAA,SAAE;AACA,MAAA,OAAA,GAAU,MAAA,CAAO,OAAO,CAAA,EAAE;AAAA,IAC5B;AAAA,EACF,CAAA,MAAO;AAEL,IAAA,MAAM,YAAA,GAAe,yBAAA,CAA0B,MAAA,EAAQ,OAAO,CAAA;AAC9D,IAAA,OAAO,qCAAqC,YAAY,CAAA;AAAA,EAC1D;AACF;AAuEA,MAAA,CAAO,iBAAiB,iBAAA,EAAmB;AAAA,EACzC,OAAA,EAAS;AAAA,IACP,UAAA,EAAY,IAAA;AAAA,IACZ,QAAA,EAAU,KAAA;AAAA,IACV,OAAOA;AAAS,GAClB;AAAA,EACA,QAAA,EAAU;AAAA,IACR,UAAA,EAAY,IAAA;AAAA,IACZ,QAAA,EAAU,KAAA;AAAA,IACV,KAAA,EAAO;AAAA;AAEX,CAAC,CAAA;;;;"}
@@ -1 +1 @@
1
- {"version":3,"file":"parseStringStreamToStream.js","sources":["../src/parseStringStreamToStream.ts"],"sourcesContent":["import { CSVLexerTransformer } from \"./CSVLexerTransformer.ts\";\nimport { CSVRecordAssemblerTransformer } from \"./CSVRecordAssemblerTransformer.ts\";\nimport type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport type { DEFAULT_DELIMITER, DEFAULT_QUOTATION } from \"./constants.ts\";\nimport { pipeline } from \"./utils/pipeline.ts\";\nimport type { PickCSVHeader } from \"./utils/types.ts\";\n\nexport function parseStringStreamToStream<\n const CSVSource extends ReadableStream<string>,\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n const Header extends ReadonlyArray<string> = PickCSVHeader<\n CSVSource,\n Delimiter,\n Quotation\n >,\n>(\n stream: CSVSource,\n options: ParseOptions<Header, Delimiter, Quotation>,\n): ReadableStream<CSVRecord<Header>>;\nexport function parseStringStreamToStream<\n const CSVSource extends ReadableStream<string>,\n const Header extends ReadonlyArray<string> = PickCSVHeader<CSVSource>,\n>(\n stream: CSVSource,\n options?: ParseOptions<Header>,\n): ReadableStream<CSVRecord<Header>>;\nexport function parseStringStreamToStream<\n const Header extends ReadonlyArray<string>,\n>(\n stream: ReadableStream<string>,\n options?: ParseOptions<Header>,\n): ReadableStream<CSVRecord<Header>>;\nexport function parseStringStreamToStream<\n const Header extends ReadonlyArray<string>,\n>(\n stream: ReadableStream<string>,\n options?: ParseOptions<Header>,\n): ReadableStream<CSVRecord<Header>> {\n return pipeline(\n stream,\n new CSVLexerTransformer(options),\n new CSVRecordAssemblerTransformer(options),\n );\n}\n"],"names":[],"mappings":";;;;AAiCgB,SAAA,yBAAA,CAGd,QACA,OACmC,EAAA;AACnC,EAAO,OAAA,QAAA;AAAA,IACL,MAAA;AAAA,IACA,IAAI,oBAAoB,OAAO,CAAA;AAAA,IAC/B,IAAI,8BAA8B,OAAO;AAAA,GAC3C;AACF;;;;"}
1
+ {"version":3,"file":"parseStringStreamToStream.js","sources":["../src/parseStringStreamToStream.ts"],"sourcesContent":["import { CSVLexerTransformer } from \"./CSVLexerTransformer.ts\";\nimport { CSVRecordAssemblerTransformer } from \"./CSVRecordAssemblerTransformer.ts\";\nimport type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport type { DEFAULT_DELIMITER, DEFAULT_QUOTATION } from \"./constants.ts\";\nimport { pipeline } from \"./utils/pipeline.ts\";\nimport type { PickCSVHeader } from \"./utils/types.ts\";\n\nexport function parseStringStreamToStream<\n const CSVSource extends ReadableStream<string>,\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n const Header extends ReadonlyArray<string> = PickCSVHeader<\n CSVSource,\n Delimiter,\n Quotation\n >,\n>(\n stream: CSVSource,\n options: ParseOptions<Header, Delimiter, Quotation>,\n): ReadableStream<CSVRecord<Header>>;\nexport function parseStringStreamToStream<\n const CSVSource extends ReadableStream<string>,\n const Header extends ReadonlyArray<string> = PickCSVHeader<CSVSource>,\n>(\n stream: CSVSource,\n options?: ParseOptions<Header>,\n): ReadableStream<CSVRecord<Header>>;\nexport function parseStringStreamToStream<\n const Header extends ReadonlyArray<string>,\n>(\n stream: ReadableStream<string>,\n options?: ParseOptions<Header>,\n): ReadableStream<CSVRecord<Header>>;\nexport function parseStringStreamToStream<\n const Header extends ReadonlyArray<string>,\n>(\n stream: ReadableStream<string>,\n options?: ParseOptions<Header>,\n): ReadableStream<CSVRecord<Header>> {\n return pipeline(\n stream,\n new CSVLexerTransformer(options),\n new CSVRecordAssemblerTransformer(options),\n );\n}\n"],"names":[],"mappings":";;;;AAiCO,SAAS,yBAAA,CAGd,QACA,OAAA,EACmC;AACnC,EAAA,OAAO,QAAA;AAAA,IACL,MAAA;AAAA,IACA,IAAI,oBAAoB,OAAO,CAAA;AAAA,IAC/B,IAAI,8BAA8B,OAAO;AAAA,GAC3C;AACF;;;;"}
@@ -1 +1 @@
1
- {"version":3,"file":"parseStringToArraySync.js","sources":["../src/parseStringToArraySync.ts"],"sourcesContent":["import { CSVLexer } from \"./CSVLexer.ts\";\nimport { CSVRecordAssembler } from \"./CSVRecordAssembler.ts\";\nimport type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\nimport type { DEFAULT_DELIMITER, DEFAULT_QUOTATION } from \"./constants.ts\";\nimport type { PickCSVHeader } from \"./utils/types.ts\";\n\n/**\n * Synchronously parses a CSV string into an array of records.\n *\n * @param csv - The CSV string to parse.\n * @param options - Parsing options including delimiter, quotation, header, etc.\n * @returns An array of CSV records.\n * @throws {ParseError} If the CSV data is malformed.\n *\n * @remarks\n * **WARNING**: This function loads all parsed records into memory as an array.\n * For CSV data with a large number of records, consider using `parseStringToIterableIterator()`\n * to iterate over records without loading them all into memory at once.\n *\n * @example\n * ```ts\n * const csv = \"name,age\\nAlice,30\\nBob,25\";\n * const records = parseStringToArraySync(csv);\n * // [{ name: \"Alice\", age: \"30\" }, { name: \"Bob\", age: \"25\" }]\n * ```\n */\nexport function parseStringToArraySync<\n const CSVSource extends string,\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n const Header extends ReadonlyArray<string> = PickCSVHeader<\n CSVSource,\n Delimiter,\n Quotation\n >,\n>(\n csv: CSVSource,\n options: ParseOptions<Header, Delimiter, Quotation>,\n): CSVRecord<Header>[];\nexport function parseStringToArraySync<\n const CSVSource extends string,\n const Header extends ReadonlyArray<string> = PickCSVHeader<CSVSource>,\n>(csv: CSVSource, options?: ParseOptions<Header>): CSVRecord<Header>[];\nexport function parseStringToArraySync<\n const Header extends ReadonlyArray<string>,\n>(csv: string, options?: ParseOptions<Header>): CSVRecord<Header>[];\nexport function parseStringToArraySync<\n const Header extends ReadonlyArray<string>,\n>(csv: string, options?: ParseOptions<Header>): CSVRecord<Header>[] {\n try {\n const lexer = new CSVLexer(options);\n const assembler = new CSVRecordAssembler(options);\n const tokens = lexer.lex(csv);\n return [...assembler.assemble(tokens)];\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\n"],"names":[],"mappings":";;;;AA+CgB,SAAA,sBAAA,CAEd,KAAa,OAAqD,EAAA;AAClE,EAAI,IAAA;AACF,IAAM,MAAA,KAAA,GAAQ,IAAI,QAAA,CAAS,OAAO,CAAA;AAClC,IAAM,MAAA,SAAA,GAAY,IAAI,kBAAA,CAAmB,OAAO,CAAA;AAChD,IAAM,MAAA,MAAA,GAAS,KAAM,CAAA,GAAA,CAAI,GAAG,CAAA;AAC5B,IAAA,OAAO,CAAC,GAAG,SAAU,CAAA,QAAA,CAAS,MAAM,CAAC,CAAA;AAAA,WAC9B,KAAO,EAAA;AACd,IAAA,wBAAA,CAAyB,KAAK,CAAA;AAAA;AAElC;;;;"}
1
+ {"version":3,"file":"parseStringToArraySync.js","sources":["../src/parseStringToArraySync.ts"],"sourcesContent":["import { CSVLexer } from \"./CSVLexer.ts\";\nimport { CSVRecordAssembler } from \"./CSVRecordAssembler.ts\";\nimport type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\nimport type { DEFAULT_DELIMITER, DEFAULT_QUOTATION } from \"./constants.ts\";\nimport type { PickCSVHeader } from \"./utils/types.ts\";\n\n/**\n * Synchronously parses a CSV string into an array of records.\n *\n * @param csv - The CSV string to parse.\n * @param options - Parsing options including delimiter, quotation, header, etc.\n * @returns An array of CSV records.\n * @throws {ParseError} If the CSV data is malformed.\n *\n * @remarks\n * **WARNING**: This function loads all parsed records into memory as an array.\n * For CSV data with a large number of records, consider using `parseStringToIterableIterator()`\n * to iterate over records without loading them all into memory at once.\n *\n * @example\n * ```ts\n * const csv = \"name,age\\nAlice,30\\nBob,25\";\n * const records = parseStringToArraySync(csv);\n * // [{ name: \"Alice\", age: \"30\" }, { name: \"Bob\", age: \"25\" }]\n * ```\n */\nexport function parseStringToArraySync<\n const CSVSource extends string,\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n const Header extends ReadonlyArray<string> = PickCSVHeader<\n CSVSource,\n Delimiter,\n Quotation\n >,\n>(\n csv: CSVSource,\n options: ParseOptions<Header, Delimiter, Quotation>,\n): CSVRecord<Header>[];\nexport function parseStringToArraySync<\n const CSVSource extends string,\n const Header extends ReadonlyArray<string> = PickCSVHeader<CSVSource>,\n>(csv: CSVSource, options?: ParseOptions<Header>): CSVRecord<Header>[];\nexport function parseStringToArraySync<\n const Header extends ReadonlyArray<string>,\n>(csv: string, options?: ParseOptions<Header>): CSVRecord<Header>[];\nexport function parseStringToArraySync<\n const Header extends ReadonlyArray<string>,\n>(csv: string, options?: ParseOptions<Header>): CSVRecord<Header>[] {\n try {\n const lexer = new CSVLexer(options);\n const assembler = new CSVRecordAssembler(options);\n const tokens = lexer.lex(csv);\n return [...assembler.assemble(tokens)];\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\n"],"names":[],"mappings":";;;;AA+CO,SAAS,sBAAA,CAEd,KAAa,OAAA,EAAqD;AAClE,EAAA,IAAI;AACF,IAAA,MAAM,KAAA,GAAQ,IAAI,QAAA,CAAS,OAAO,CAAA;AAClC,IAAA,MAAM,SAAA,GAAY,IAAI,kBAAA,CAAmB,OAAO,CAAA;AAChD,IAAA,MAAM,MAAA,GAAS,KAAA,CAAM,GAAA,CAAI,GAAG,CAAA;AAC5B,IAAA,OAAO,CAAC,GAAG,SAAA,CAAU,QAAA,CAAS,MAAM,CAAC,CAAA;AAAA,EACvC,SAAS,KAAA,EAAO;AACd,IAAA,wBAAA,CAAyB,KAAK,CAAA;AAAA,EAChC;AACF;;;;"}
@@ -1 +1 @@
1
- {"version":3,"file":"parseStringToArraySyncWASM.js","sources":["../src/parseStringToArraySyncWASM.ts"],"sourcesContent":["import { parseStringToArraySync } from \"web-csv-toolbox-wasm\";\nimport { assertCommonOptions } from \"./assertCommonOptions.ts\";\nimport type { CSVRecord, CommonOptions } from \"./common/types.ts\";\nimport {\n DEFAULT_DELIMITER,\n DEFAULT_QUOTATION,\n DOUBLE_QUOTE,\n} from \"./constants.ts\";\nimport type { loadWASM } from \"./loadWASM.ts\";\nimport type { PickCSVHeader } from \"./utils/types.ts\";\n\n/**\n * Parse CSV string to record of arrays.\n *\n * @param csv CSV string\n * @param options Parse options\n * @returns Record of arrays\n *\n * @remarks\n * This function uses WebAssembly to parse CSV string.\n * Before calling this function, you must call {@link loadWASM} function.\n *\n * **Performance Characteristics:**\n * - **Speed**: 2-3x faster than JavaScript parser for large CSV strings\n * - **Memory usage**: O(n) - proportional to file size (loads entire result into memory)\n * - **Suitable for**: CPU-intensive workloads, large CSV strings on server-side\n * - **Recommended max**: ~100MB (Node.js/Deno)\n *\n * **Limitations:**\n * - Only supports UTF-8 string (not UTF-16)\n * - Only supports double quote (`\"`) as quotation character\n * - Only supports single character as delimiter\n *\n * This function only supports UTF-8 string.\n * If you pass a string that is not UTF-8, like UTF-16, it throws an error.\n * This function only supports double quote as quotation.\n * So, `options.quotation` must be `\"` (double quote). Otherwise, it throws an error.\n *\n * And this function only supports single character as delimiter.\n * So, `options.delimiter` must be a single character. Otherwise, it throws an error.\n *\n * @example\n *\n * ```ts\n * import { loadWASM, parseStringWASM } from \"web-csv-toolbox\";\n *\n * await loadWASM();\n *\n * const csv = \"a,b,c\\n1,2,3\";\n *\n * const result = parseStringToArraySyncWASM(csv);\n * console.log(result);\n * // Prints:\n * // [{ a: \"1\", b: \"2\", c: \"3\" }]\n * ```\n * @beta\n * @throws {RangeError | TypeError} - If provided options are invalid.\n */\nexport function parseStringToArraySyncWASM<\n const CSVSource extends string,\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n const Header extends ReadonlyArray<string> = PickCSVHeader<\n CSVSource,\n Delimiter,\n Quotation\n >,\n>(\n csv: CSVSource,\n options: CommonOptions<Delimiter, Quotation>,\n): CSVRecord<Header>[];\nexport function parseStringToArraySyncWASM<\n const CSVSource extends string,\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n const Header extends ReadonlyArray<string> = PickCSVHeader<CSVSource>,\n>(\n csv: CSVSource,\n options?: CommonOptions<Delimiter, Quotation>,\n): CSVRecord<Header>[];\nexport function parseStringToArraySyncWASM<\n const Header extends ReadonlyArray<string>,\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n>(\n csv: string,\n options?: CommonOptions<Delimiter, Quotation>,\n): CSVRecord<Header>[];\nexport function parseStringToArraySyncWASM<\n const Header extends readonly string[],\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n>(\n csv: string,\n options: CommonOptions<Delimiter, Quotation> = {},\n): CSVRecord<Header>[] {\n const {\n delimiter = DEFAULT_DELIMITER,\n quotation = DEFAULT_QUOTATION,\n maxBufferSize = 10485760,\n } = options;\n if (typeof delimiter !== \"string\" || delimiter.length !== 1) {\n throw new RangeError(\n \"Invalid delimiter, must be a single character on WASM.\",\n );\n }\n if (quotation !== DOUBLE_QUOTE) {\n throw new RangeError(\"Invalid quotation, must be double quote on WASM.\");\n }\n assertCommonOptions({ delimiter, quotation, maxBufferSize });\n const demiliterCode = delimiter.charCodeAt(0);\n return JSON.parse(parseStringToArraySync(csv, demiliterCode));\n}\n"],"names":[],"mappings":";;;;AAwFO,SAAS,0BAKd,CAAA,GAAA,EACA,OAA+C,GAAA,EAC1B,EAAA;AACrB,EAAM,MAAA;AAAA,IACJ,SAAY,GAAA,iBAAA;AAAA,IACZ,SAAY,GAAA,iBAAA;AAAA,IACZ,aAAgB,GAAA;AAAA,GACd,GAAA,OAAA;AACJ,EAAA,IAAI,OAAO,SAAA,KAAc,QAAY,IAAA,SAAA,CAAU,WAAW,CAAG,EAAA;AAC3D,IAAA,MAAM,IAAI,UAAA;AAAA,MACR;AAAA,KACF;AAAA;AAEF,EAAA,IAAI,cAAc,YAAc,EAAA;AAC9B,IAAM,MAAA,IAAI,WAAW,kDAAkD,CAAA;AAAA;AAEzE,EAAA,mBAAA,CAAoB,EAAE,SAAA,EAAW,SAAW,EAAA,aAAA,EAAe,CAAA;AAC3D,EAAM,MAAA,aAAA,GAAgB,SAAU,CAAA,UAAA,CAAW,CAAC,CAAA;AAC5C,EAAA,OAAO,IAAK,CAAA,KAAA,CAAM,sBAAuB,CAAA,GAAA,EAAK,aAAa,CAAC,CAAA;AAC9D;;;;"}
1
+ {"version":3,"file":"parseStringToArraySyncWASM.js","sources":["../src/parseStringToArraySyncWASM.ts"],"sourcesContent":["import { parseStringToArraySync } from \"web-csv-toolbox-wasm\";\nimport { assertCommonOptions } from \"./assertCommonOptions.ts\";\nimport type { CSVRecord, CommonOptions } from \"./common/types.ts\";\nimport {\n DEFAULT_DELIMITER,\n DEFAULT_QUOTATION,\n DOUBLE_QUOTE,\n} from \"./constants.ts\";\nimport type { loadWASM } from \"./loadWASM.ts\";\nimport type { PickCSVHeader } from \"./utils/types.ts\";\n\n/**\n * Parse CSV string to record of arrays.\n *\n * @param csv CSV string\n * @param options Parse options\n * @returns Record of arrays\n *\n * @remarks\n * This function uses WebAssembly to parse CSV string.\n * Before calling this function, you must call {@link loadWASM} function.\n *\n * **Performance Characteristics:**\n * - **Speed**: 2-3x faster than JavaScript parser for large CSV strings\n * - **Memory usage**: O(n) - proportional to file size (loads entire result into memory)\n * - **Suitable for**: CPU-intensive workloads, large CSV strings on server-side\n * - **Recommended max**: ~100MB (Node.js/Deno)\n *\n * **Limitations:**\n * - Only supports UTF-8 string (not UTF-16)\n * - Only supports double quote (`\"`) as quotation character\n * - Only supports single character as delimiter\n *\n * This function only supports UTF-8 string.\n * If you pass a string that is not UTF-8, like UTF-16, it throws an error.\n * This function only supports double quote as quotation.\n * So, `options.quotation` must be `\"` (double quote). Otherwise, it throws an error.\n *\n * And this function only supports single character as delimiter.\n * So, `options.delimiter` must be a single character. Otherwise, it throws an error.\n *\n * @example\n *\n * ```ts\n * import { loadWASM, parseStringWASM } from \"web-csv-toolbox\";\n *\n * await loadWASM();\n *\n * const csv = \"a,b,c\\n1,2,3\";\n *\n * const result = parseStringToArraySyncWASM(csv);\n * console.log(result);\n * // Prints:\n * // [{ a: \"1\", b: \"2\", c: \"3\" }]\n * ```\n * @beta\n * @throws {RangeError | TypeError} - If provided options are invalid.\n */\nexport function parseStringToArraySyncWASM<\n const CSVSource extends string,\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n const Header extends ReadonlyArray<string> = PickCSVHeader<\n CSVSource,\n Delimiter,\n Quotation\n >,\n>(\n csv: CSVSource,\n options: CommonOptions<Delimiter, Quotation>,\n): CSVRecord<Header>[];\nexport function parseStringToArraySyncWASM<\n const CSVSource extends string,\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n const Header extends ReadonlyArray<string> = PickCSVHeader<CSVSource>,\n>(\n csv: CSVSource,\n options?: CommonOptions<Delimiter, Quotation>,\n): CSVRecord<Header>[];\nexport function parseStringToArraySyncWASM<\n const Header extends ReadonlyArray<string>,\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n>(\n csv: string,\n options?: CommonOptions<Delimiter, Quotation>,\n): CSVRecord<Header>[];\nexport function parseStringToArraySyncWASM<\n const Header extends readonly string[],\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n>(\n csv: string,\n options: CommonOptions<Delimiter, Quotation> = {},\n): CSVRecord<Header>[] {\n const {\n delimiter = DEFAULT_DELIMITER,\n quotation = DEFAULT_QUOTATION,\n maxBufferSize = 10485760,\n } = options;\n if (typeof delimiter !== \"string\" || delimiter.length !== 1) {\n throw new RangeError(\n \"Invalid delimiter, must be a single character on WASM.\",\n );\n }\n if (quotation !== DOUBLE_QUOTE) {\n throw new RangeError(\"Invalid quotation, must be double quote on WASM.\");\n }\n assertCommonOptions({ delimiter, quotation, maxBufferSize });\n const demiliterCode = delimiter.charCodeAt(0);\n return JSON.parse(parseStringToArraySync(csv, demiliterCode));\n}\n"],"names":[],"mappings":";;;;AAwFO,SAAS,0BAAA,CAKd,GAAA,EACA,OAAA,GAA+C,EAAC,EAC3B;AACrB,EAAA,MAAM;AAAA,IACJ,SAAA,GAAY,iBAAA;AAAA,IACZ,SAAA,GAAY,iBAAA;AAAA,IACZ,aAAA,GAAgB;AAAA,GAClB,GAAI,OAAA;AACJ,EAAA,IAAI,OAAO,SAAA,KAAc,QAAA,IAAY,SAAA,CAAU,WAAW,CAAA,EAAG;AAC3D,IAAA,MAAM,IAAI,UAAA;AAAA,MACR;AAAA,KACF;AAAA,EACF;AACA,EAAA,IAAI,cAAc,YAAA,EAAc;AAC9B,IAAA,MAAM,IAAI,WAAW,kDAAkD,CAAA;AAAA,EACzE;AACA,EAAA,mBAAA,CAAoB,EAAE,SAAA,EAAW,SAAA,EAAW,aAAA,EAAe,CAAA;AAC3D,EAAA,MAAM,aAAA,GAAgB,SAAA,CAAU,UAAA,CAAW,CAAC,CAAA;AAC5C,EAAA,OAAO,IAAA,CAAK,KAAA,CAAM,sBAAA,CAAuB,GAAA,EAAK,aAAa,CAAC,CAAA;AAC9D;;;;"}
@@ -1 +1 @@
1
- {"version":3,"file":"parseStringToIterableIterator.js","sources":["../src/parseStringToIterableIterator.ts"],"sourcesContent":["import { CSVLexer } from \"./CSVLexer.ts\";\nimport { CSVRecordAssembler } from \"./CSVRecordAssembler.ts\";\nimport type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\nimport type { DEFAULT_DELIMITER, DEFAULT_QUOTATION } from \"./constants.ts\";\nimport type { PickCSVHeader } from \"./utils/types.ts\";\n\nexport function parseStringToIterableIterator<\n const CSVSource extends string,\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n const Header extends ReadonlyArray<string> = PickCSVHeader<\n CSVSource,\n Delimiter,\n Quotation\n >,\n>(\n stream: CSVSource,\n options: ParseOptions<Header, Delimiter, Quotation>,\n): IterableIterator<CSVRecord<Header>>;\nexport function parseStringToIterableIterator<\n const CSVSource extends string,\n const Header extends ReadonlyArray<string> = PickCSVHeader<CSVSource>,\n>(\n stream: CSVSource,\n options?: ParseOptions<Header>,\n): IterableIterator<CSVRecord<Header>>;\nexport function parseStringToIterableIterator<\n const Header extends ReadonlyArray<string>,\n>(\n stream: string,\n options?: ParseOptions<Header>,\n): IterableIterator<CSVRecord<Header>>;\nexport function parseStringToIterableIterator<\n const Header extends ReadonlyArray<string>,\n>(\n csv: string,\n options?: ParseOptions<Header>,\n): IterableIterator<CSVRecord<Header>> {\n try {\n const lexer = new CSVLexer(options);\n const assembler = new CSVRecordAssembler(options);\n const tokens = lexer.lex(csv);\n return assembler.assemble(tokens);\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\n"],"names":[],"mappings":";;;;AAiCgB,SAAA,6BAAA,CAGd,KACA,OACqC,EAAA;AACrC,EAAI,IAAA;AACF,IAAM,MAAA,KAAA,GAAQ,IAAI,QAAA,CAAS,OAAO,CAAA;AAClC,IAAM,MAAA,SAAA,GAAY,IAAI,kBAAA,CAAmB,OAAO,CAAA;AAChD,IAAM,MAAA,MAAA,GAAS,KAAM,CAAA,GAAA,CAAI,GAAG,CAAA;AAC5B,IAAO,OAAA,SAAA,CAAU,SAAS,MAAM,CAAA;AAAA,WACzB,KAAO,EAAA;AACd,IAAA,wBAAA,CAAyB,KAAK,CAAA;AAAA;AAElC;;;;"}
1
+ {"version":3,"file":"parseStringToIterableIterator.js","sources":["../src/parseStringToIterableIterator.ts"],"sourcesContent":["import { CSVLexer } from \"./CSVLexer.ts\";\nimport { CSVRecordAssembler } from \"./CSVRecordAssembler.ts\";\nimport type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\nimport type { DEFAULT_DELIMITER, DEFAULT_QUOTATION } from \"./constants.ts\";\nimport type { PickCSVHeader } from \"./utils/types.ts\";\n\nexport function parseStringToIterableIterator<\n const CSVSource extends string,\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n const Header extends ReadonlyArray<string> = PickCSVHeader<\n CSVSource,\n Delimiter,\n Quotation\n >,\n>(\n stream: CSVSource,\n options: ParseOptions<Header, Delimiter, Quotation>,\n): IterableIterator<CSVRecord<Header>>;\nexport function parseStringToIterableIterator<\n const CSVSource extends string,\n const Header extends ReadonlyArray<string> = PickCSVHeader<CSVSource>,\n>(\n stream: CSVSource,\n options?: ParseOptions<Header>,\n): IterableIterator<CSVRecord<Header>>;\nexport function parseStringToIterableIterator<\n const Header extends ReadonlyArray<string>,\n>(\n stream: string,\n options?: ParseOptions<Header>,\n): IterableIterator<CSVRecord<Header>>;\nexport function parseStringToIterableIterator<\n const Header extends ReadonlyArray<string>,\n>(\n csv: string,\n options?: ParseOptions<Header>,\n): IterableIterator<CSVRecord<Header>> {\n try {\n const lexer = new CSVLexer(options);\n const assembler = new CSVRecordAssembler(options);\n const tokens = lexer.lex(csv);\n return assembler.assemble(tokens);\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\n"],"names":[],"mappings":";;;;AAiCO,SAAS,6BAAA,CAGd,KACA,OAAA,EACqC;AACrC,EAAA,IAAI;AACF,IAAA,MAAM,KAAA,GAAQ,IAAI,QAAA,CAAS,OAAO,CAAA;AAClC,IAAA,MAAM,SAAA,GAAY,IAAI,kBAAA,CAAmB,OAAO,CAAA;AAChD,IAAA,MAAM,MAAA,GAAS,KAAA,CAAM,GAAA,CAAI,GAAG,CAAA;AAC5B,IAAA,OAAO,SAAA,CAAU,SAAS,MAAM,CAAA;AAAA,EAClC,SAAS,KAAA,EAAO;AACd,IAAA,wBAAA,CAAyB,KAAK,CAAA;AAAA,EAChC;AACF;;;;"}
@@ -1 +1 @@
1
- {"version":3,"file":"parseStringToStream.js","sources":["../src/parseStringToStream.ts"],"sourcesContent":["import { CSVLexer } from \"./CSVLexer.ts\";\nimport { CSVRecordAssembler } from \"./CSVRecordAssembler.ts\";\nimport type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\nimport type { DEFAULT_DELIMITER, DEFAULT_QUOTATION } from \"./constants.ts\";\nimport type { PickCSVHeader } from \"./utils/types.ts\";\n\nexport function parseStringToStream<\n const CSVSource extends string,\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n const Header extends ReadonlyArray<string> = PickCSVHeader<\n CSVSource,\n Delimiter,\n Quotation\n >,\n>(\n stream: CSVSource,\n options: ParseOptions<Header, Delimiter, Quotation>,\n): ReadableStream<CSVRecord<Header>>;\nexport function parseStringToStream<\n const CSVSource extends string,\n const Header extends ReadonlyArray<string> = PickCSVHeader<CSVSource>,\n>(\n stream: CSVSource,\n options?: ParseOptions<Header>,\n): ReadableStream<CSVRecord<Header>>;\nexport function parseStringToStream<const Header extends ReadonlyArray<string>>(\n stream: string,\n options?: ParseOptions<Header>,\n): ReadableStream<CSVRecord<Header>>;\nexport function parseStringToStream<const Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n): ReadableStream<CSVRecord<Header>> {\n try {\n const lexer = new CSVLexer(options);\n const assembler = new CSVRecordAssembler(options);\n return new ReadableStream({\n start(controller) {\n const tokens = lexer.lex(csv);\n for (const record of assembler.assemble(tokens)) {\n controller.enqueue(record);\n }\n controller.close();\n },\n });\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\n"],"names":[],"mappings":";;;;AA+BgB,SAAA,mBAAA,CACd,KACA,OACmC,EAAA;AACnC,EAAI,IAAA;AACF,IAAM,MAAA,KAAA,GAAQ,IAAI,QAAA,CAAS,OAAO,CAAA;AAClC,IAAM,MAAA,SAAA,GAAY,IAAI,kBAAA,CAAmB,OAAO,CAAA;AAChD,IAAA,OAAO,IAAI,cAAe,CAAA;AAAA,MACxB,MAAM,UAAY,EAAA;AAChB,QAAM,MAAA,MAAA,GAAS,KAAM,CAAA,GAAA,CAAI,GAAG,CAAA;AAC5B,QAAA,KAAA,MAAW,MAAU,IAAA,SAAA,CAAU,QAAS,CAAA,MAAM,CAAG,EAAA;AAC/C,UAAA,UAAA,CAAW,QAAQ,MAAM,CAAA;AAAA;AAE3B,QAAA,UAAA,CAAW,KAAM,EAAA;AAAA;AACnB,KACD,CAAA;AAAA,WACM,KAAO,EAAA;AACd,IAAA,wBAAA,CAAyB,KAAK,CAAA;AAAA;AAElC;;;;"}
1
+ {"version":3,"file":"parseStringToStream.js","sources":["../src/parseStringToStream.ts"],"sourcesContent":["import { CSVLexer } from \"./CSVLexer.ts\";\nimport { CSVRecordAssembler } from \"./CSVRecordAssembler.ts\";\nimport type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\nimport type { DEFAULT_DELIMITER, DEFAULT_QUOTATION } from \"./constants.ts\";\nimport type { PickCSVHeader } from \"./utils/types.ts\";\n\nexport function parseStringToStream<\n const CSVSource extends string,\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n const Header extends ReadonlyArray<string> = PickCSVHeader<\n CSVSource,\n Delimiter,\n Quotation\n >,\n>(\n stream: CSVSource,\n options: ParseOptions<Header, Delimiter, Quotation>,\n): ReadableStream<CSVRecord<Header>>;\nexport function parseStringToStream<\n const CSVSource extends string,\n const Header extends ReadonlyArray<string> = PickCSVHeader<CSVSource>,\n>(\n stream: CSVSource,\n options?: ParseOptions<Header>,\n): ReadableStream<CSVRecord<Header>>;\nexport function parseStringToStream<const Header extends ReadonlyArray<string>>(\n stream: string,\n options?: ParseOptions<Header>,\n): ReadableStream<CSVRecord<Header>>;\nexport function parseStringToStream<const Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n): ReadableStream<CSVRecord<Header>> {\n try {\n const lexer = new CSVLexer(options);\n const assembler = new CSVRecordAssembler(options);\n return new ReadableStream({\n start(controller) {\n const tokens = lexer.lex(csv);\n for (const record of assembler.assemble(tokens)) {\n controller.enqueue(record);\n }\n controller.close();\n },\n });\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\n"],"names":[],"mappings":";;;;AA+BO,SAAS,mBAAA,CACd,KACA,OAAA,EACmC;AACnC,EAAA,IAAI;AACF,IAAA,MAAM,KAAA,GAAQ,IAAI,QAAA,CAAS,OAAO,CAAA;AAClC,IAAA,MAAM,SAAA,GAAY,IAAI,kBAAA,CAAmB,OAAO,CAAA;AAChD,IAAA,OAAO,IAAI,cAAA,CAAe;AAAA,MACxB,MAAM,UAAA,EAAY;AAChB,QAAA,MAAM,MAAA,GAAS,KAAA,CAAM,GAAA,CAAI,GAAG,CAAA;AAC5B,QAAA,KAAA,MAAW,MAAA,IAAU,SAAA,CAAU,QAAA,CAAS,MAAM,CAAA,EAAG;AAC/C,UAAA,UAAA,CAAW,QAAQ,MAAM,CAAA;AAAA,QAC3B;AACA,QAAA,UAAA,CAAW,KAAA,EAAM;AAAA,MACnB;AAAA,KACD,CAAA;AAAA,EACH,SAAS,KAAA,EAAO;AACd,IAAA,wBAAA,CAAyB,KAAK,CAAA;AAAA,EAChC;AACF;;;;"}
@@ -1,4 +1,5 @@
1
1
  import { CSVRecord, ParseBinaryOptions } from './common/types.ts';
2
+ import { DEFAULT_DELIMITER } from './constants.ts';
2
3
  /**
3
4
  * Parse CSV to records.
4
5
  * This function is for parsing a binary stream.
@@ -28,12 +29,12 @@ import { CSVRecord, ParseBinaryOptions } from './common/types.ts';
28
29
  * },
29
30
  * });
30
31
  *
31
- * for await (const record of parseUint8ArrayStream(csv)) {
32
+ * for await (const record of parseUint8ArrayStream(stream)) {
32
33
  * console.log(record);
33
34
  * }
34
35
  * ```
35
36
  */
36
- export declare function parseUint8ArrayStream<Header extends ReadonlyArray<string>>(stream: ReadableStream<Uint8Array>, options?: ParseBinaryOptions<Header>): AsyncIterableIterator<CSVRecord<Header>>;
37
+ export declare function parseUint8ArrayStream<Header extends ReadonlyArray<string>, Delimiter extends string = DEFAULT_DELIMITER, Quotation extends string = '"'>(stream: ReadableStream<Uint8Array>, options?: ParseBinaryOptions<Header, Delimiter, Quotation>): AsyncIterableIterator<CSVRecord<Header>>;
37
38
  export declare namespace parseUint8ArrayStream {
38
39
  /**
39
40
  * Parse CSV binary to array of records,
@@ -90,5 +91,5 @@ export declare namespace parseUint8ArrayStream {
90
91
  * );
91
92
  * ```
92
93
  */
93
- function toStream<Header extends ReadonlyArray<string>>(stream: ReadableStream<Uint8Array>, options?: ParseBinaryOptions<Header>): ReadableStream<CSVRecord<Header>[]>;
94
+ function toStream<Header extends ReadonlyArray<string>>(stream: ReadableStream<Uint8Array>, options?: ParseBinaryOptions<Header>): ReadableStream<CSVRecord<Header>>;
94
95
  }
@@ -1,10 +1,31 @@
1
+ import { InternalEngineConfig } from './execution/InternalEngineConfig.js';
2
+ import { WorkerSession } from './execution/worker/helpers/WorkerSession.js';
3
+ import { executeWithWorkerStrategy } from './execution/worker/strategies/WorkerStrategySelector.js';
1
4
  import { parseUint8ArrayStreamToStream } from './parseUint8ArrayStreamToStream.js';
2
5
  import { convertStreamToAsyncIterableIterator } from './utils/convertStreamToAsyncIterableIterator.js';
3
6
  import { convertThisAsyncIterableIteratorToArray } from './utils/convertThisAsyncIterableIteratorToArray.js';
4
7
 
5
- function parseUint8ArrayStream(stream, options) {
6
- const recordStream = parseUint8ArrayStreamToStream(stream, options);
7
- return convertStreamToAsyncIterableIterator(recordStream);
8
+ async function* parseUint8ArrayStream(stream, options) {
9
+ const engineConfig = new InternalEngineConfig(options?.engine);
10
+ if (engineConfig.hasWorker() && engineConfig.hasStreamTransfer()) {
11
+ const session = engineConfig.workerPool ? await WorkerSession.create({
12
+ workerPool: engineConfig.workerPool,
13
+ workerURL: engineConfig.workerURL
14
+ }) : null;
15
+ try {
16
+ yield* executeWithWorkerStrategy(
17
+ stream,
18
+ options,
19
+ session,
20
+ engineConfig
21
+ );
22
+ } finally {
23
+ session?.[Symbol.dispose]();
24
+ }
25
+ } else {
26
+ const recordStream = parseUint8ArrayStreamToStream(stream, options);
27
+ yield* convertStreamToAsyncIterableIterator(recordStream);
28
+ }
8
29
  }
9
30
  Object.defineProperties(parseUint8ArrayStream, {
10
31
  toArray: {
@@ -1 +1 @@
1
- {"version":3,"file":"parseUint8ArrayStream.js","sources":["../src/parseUint8ArrayStream.ts"],"sourcesContent":["import type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { parseStringStream } from \"./parseStringStream.ts\";\nimport { parseUint8ArrayStreamToStream } from \"./parseUint8ArrayStreamToStream.ts\";\nimport { convertStreamToAsyncIterableIterator } from \"./utils/convertStreamToAsyncIterableIterator.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\n\n/**\n * Parse CSV to records.\n * This function is for parsing a binary stream.\n *\n * @category Middle-level API\n * @remarks\n * If you want to parse a string, use {@link parseStringStream}.\n * @param stream CSV string to parse\n * @param options Parsing options.\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parseUint8ArrayStream.toArray} function.\n *\n * @example Parsing CSV binary\n *\n * ```ts\n * import { parseUint8ArrayStream } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * for await (const record of parseUint8ArrayStream(csv)) {\n * console.log(record);\n * }\n * ```\n */\nexport function parseUint8ArrayStream<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<Uint8Array>,\n options?: ParseBinaryOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n const recordStream = parseUint8ArrayStreamToStream(stream, options);\n return convertStreamToAsyncIterableIterator(recordStream);\n}\n\nexport declare namespace parseUint8ArrayStream {\n /**\n * Parse CSV binary to array of records,\n * ideal for smaller data sets.\n *\n * @returns Array of records\n *\n * @example Parsing CSV binary\n * ```ts\n * import { parseUint8ArrayStream } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * const records = await parseUint8ArrayStream.toArray(stream);\n * console.log(records);\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<Uint8Array>,\n options?: ParseBinaryOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV binary to array of records.\n *\n * @returns Stream of records\n *\n * @example Parsing CSV binary\n * ```ts\n * import { parseUint8ArrayStream } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * await parseUint8ArrayStream.toStream(stream)\n * .pipeTo(new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * ```\n */\n export function toStream<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<Uint8Array>,\n options?: ParseBinaryOptions<Header>,\n ): ReadableStream<CSVRecord<Header>[]>;\n}\nObject.defineProperties(parseUint8ArrayStream, {\n toArray: {\n enumerable: true,\n writable: false,\n value: internal.convertThisAsyncIterableIteratorToArray,\n },\n toStream: {\n enumerable: true,\n writable: false,\n value: parseUint8ArrayStreamToStream,\n },\n});\n"],"names":["internal.convertThisAsyncIterableIteratorToArray"],"mappings":";;;;AAwCgB,SAAA,qBAAA,CACd,QACA,OAC0C,EAAA;AAC1C,EAAM,MAAA,YAAA,GAAe,6BAA8B,CAAA,MAAA,EAAQ,OAAO,CAAA;AAClE,EAAA,OAAO,qCAAqC,YAAY,CAAA;AAC1D;AAkEA,MAAA,CAAO,iBAAiB,qBAAuB,EAAA;AAAA,EAC7C,OAAS,EAAA;AAAA,IACP,UAAY,EAAA,IAAA;AAAA,IACZ,QAAU,EAAA,KAAA;AAAA,IACV,OAAOA;AAAS,GAClB;AAAA,EACA,QAAU,EAAA;AAAA,IACR,UAAY,EAAA,IAAA;AAAA,IACZ,QAAU,EAAA,KAAA;AAAA,IACV,KAAO,EAAA;AAAA;AAEX,CAAC,CAAA;;;;"}
1
+ {"version":3,"file":"parseUint8ArrayStream.js","sources":["../src/parseUint8ArrayStream.ts"],"sourcesContent":["import type {\n CSVRecord,\n ParseBinaryOptions,\n ParseOptions,\n} from \"./common/types.ts\";\nimport type { DEFAULT_DELIMITER } from \"./constants.ts\";\nimport { InternalEngineConfig } from \"./execution/InternalEngineConfig.ts\";\nimport { WorkerSession } from \"./execution/worker/helpers/WorkerSession.ts\";\nimport { executeWithWorkerStrategy } from \"./execution/worker/strategies/WorkerStrategySelector.ts\";\nimport { parseUint8ArrayStreamToStream } from \"./parseUint8ArrayStreamToStream.ts\";\nimport { convertStreamToAsyncIterableIterator } from \"./utils/convertStreamToAsyncIterableIterator.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\n\n/**\n * Parse CSV to records.\n * This function is for parsing a binary stream.\n *\n * @category Middle-level API\n * @remarks\n * If you want to parse a string, use {@link parseStringStream}.\n * @param stream CSV string to parse\n * @param options Parsing options.\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parseUint8ArrayStream.toArray} function.\n *\n * @example Parsing CSV binary\n *\n * ```ts\n * import { parseUint8ArrayStream } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * for await (const record of parseUint8ArrayStream(stream)) {\n * console.log(record);\n * }\n * ```\n */\nexport async function* parseUint8ArrayStream<\n Header extends ReadonlyArray<string>,\n Delimiter extends string = DEFAULT_DELIMITER,\n Quotation extends string = '\"',\n>(\n stream: ReadableStream<Uint8Array>,\n options?: ParseBinaryOptions<Header, Delimiter, Quotation>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n // Parse engine configuration\n const engineConfig = new InternalEngineConfig(options?.engine);\n\n // Note: Worker execution with ReadableStream requires TransferableStream support\n // which is not available in Safari. For now, always use main thread execution.\n // TODO: Implement stream-transfer strategy for browsers that support it\n if (engineConfig.hasWorker() && engineConfig.hasStreamTransfer()) {\n // Worker execution with stream-transfer strategy\n const session = engineConfig.workerPool\n ? await WorkerSession.create({\n workerPool: engineConfig.workerPool,\n workerURL: engineConfig.workerURL,\n })\n : null;\n\n try {\n yield* executeWithWorkerStrategy<CSVRecord<Header>>(\n stream,\n options as\n | ParseOptions<Header>\n | ParseBinaryOptions<Header>\n | undefined,\n session,\n engineConfig,\n );\n } finally {\n session?.[Symbol.dispose]();\n }\n } else {\n // Main thread execution (default for streams)\n const recordStream = parseUint8ArrayStreamToStream(stream, options);\n yield* convertStreamToAsyncIterableIterator(recordStream);\n }\n}\n\nexport declare namespace parseUint8ArrayStream {\n /**\n * Parse CSV binary to array of records,\n * ideal for smaller data sets.\n *\n * @returns Array of records\n *\n * @example Parsing CSV binary\n * ```ts\n * import { parseUint8ArrayStream } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * const records = await parseUint8ArrayStream.toArray(stream);\n * console.log(records);\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<Uint8Array>,\n options?: ParseBinaryOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV binary to array of records.\n *\n * @returns Stream of records\n *\n * @example Parsing CSV binary\n * ```ts\n * import { parseUint8ArrayStream } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * await parseUint8ArrayStream.toStream(stream)\n * .pipeTo(new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * ```\n */\n export function toStream<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<Uint8Array>,\n options?: ParseBinaryOptions<Header>,\n ): ReadableStream<CSVRecord<Header>>;\n}\nObject.defineProperties(parseUint8ArrayStream, {\n toArray: {\n enumerable: true,\n writable: false,\n value: internal.convertThisAsyncIterableIteratorToArray,\n },\n toStream: {\n enumerable: true,\n writable: false,\n value: parseUint8ArrayStreamToStream,\n },\n});\n"],"names":["internal.convertThisAsyncIterableIteratorToArray"],"mappings":";;;;;;;AA+CA,gBAAuB,qBAAA,CAKrB,QACA,OAAA,EAC0C;AAE1C,EAAA,MAAM,YAAA,GAAe,IAAI,oBAAA,CAAqB,OAAA,EAAS,MAAM,CAAA;AAK7D,EAAA,IAAI,YAAA,CAAa,SAAA,EAAU,IAAK,YAAA,CAAa,mBAAkB,EAAG;AAEhE,IAAA,MAAM,OAAA,GAAU,YAAA,CAAa,UAAA,GACzB,MAAM,cAAc,MAAA,CAAO;AAAA,MACzB,YAAY,YAAA,CAAa,UAAA;AAAA,MACzB,WAAW,YAAA,CAAa;AAAA,KACzB,CAAA,GACD,IAAA;AAEJ,IAAA,IAAI;AACF,MAAA,OAAO,yBAAA;AAAA,QACL,MAAA;AAAA,QACA,OAAA;AAAA,QAIA,OAAA;AAAA,QACA;AAAA,OACF;AAAA,IACF,CAAA,SAAE;AACA,MAAA,OAAA,GAAU,MAAA,CAAO,OAAO,CAAA,EAAE;AAAA,IAC5B;AAAA,EACF,CAAA,MAAO;AAEL,IAAA,MAAM,YAAA,GAAe,6BAAA,CAA8B,MAAA,EAAQ,OAAO,CAAA;AAClE,IAAA,OAAO,qCAAqC,YAAY,CAAA;AAAA,EAC1D;AACF;AAkEA,MAAA,CAAO,iBAAiB,qBAAA,EAAuB;AAAA,EAC7C,OAAA,EAAS;AAAA,IACP,UAAA,EAAY,IAAA;AAAA,IACZ,QAAA,EAAU,KAAA;AAAA,IACV,OAAOA;AAAS,GAClB;AAAA,EACA,QAAA,EAAU;AAAA,IACR,UAAA,EAAY,IAAA;AAAA,IACZ,QAAA,EAAU,KAAA;AAAA,IACV,KAAA,EAAO;AAAA;AAEX,CAAC,CAAA;;;;"}
@@ -0,0 +1,2 @@
1
+ export * from './execution/worker/parseUint8ArrayStreamInWorker.node'
2
+ export {}
@@ -0,0 +1,2 @@
1
+ export * from './execution/worker/parseUint8ArrayStreamInWorker.web'
2
+ export {}
@@ -1,2 +1,3 @@
1
1
  import { CSVRecord, ParseBinaryOptions } from './common/types.ts';
2
- export declare function parseUint8ArrayStreamToStream<Header extends readonly string[]>(stream: ReadableStream<Uint8Array>, options?: ParseBinaryOptions<Header>): ReadableStream<CSVRecord<Header>>;
2
+ import { DEFAULT_DELIMITER } from './constants.ts';
3
+ export declare function parseUint8ArrayStreamToStream<Header extends readonly string[], Delimiter extends string = DEFAULT_DELIMITER, Quotation extends string = '"'>(stream: ReadableStream<Uint8Array>, options?: ParseBinaryOptions<Header, Delimiter, Quotation>): ReadableStream<CSVRecord<Header>>;
@@ -3,16 +3,22 @@ import { CSVRecordAssemblerTransformer } from './CSVRecordAssemblerTransformer.j
3
3
  import { pipeline } from './utils/pipeline.js';
4
4
 
5
5
  function parseUint8ArrayStreamToStream(stream, options) {
6
- const { charset, fatal, ignoreBOM, decomposition } = options ?? {};
7
- return decomposition ? pipeline(
6
+ const { charset, fatal, ignoreBOM, decompression } = options ?? {};
7
+ return decompression ? pipeline(
8
8
  stream,
9
- new DecompressionStream(decomposition),
10
- new TextDecoderStream(charset, { fatal, ignoreBOM }),
9
+ new DecompressionStream(decompression),
10
+ new TextDecoderStream(charset, {
11
+ fatal,
12
+ ignoreBOM
13
+ }),
11
14
  new CSVLexerTransformer(options),
12
15
  new CSVRecordAssemblerTransformer(options)
13
16
  ) : pipeline(
14
17
  stream,
15
- new TextDecoderStream(charset, { fatal, ignoreBOM }),
18
+ new TextDecoderStream(charset, {
19
+ fatal,
20
+ ignoreBOM
21
+ }),
16
22
  new CSVLexerTransformer(options),
17
23
  new CSVRecordAssemblerTransformer(options)
18
24
  );
@@ -1 +1 @@
1
- {"version":3,"file":"parseUint8ArrayStreamToStream.js","sources":["../src/parseUint8ArrayStreamToStream.ts"],"sourcesContent":["import { CSVLexerTransformer } from \"./CSVLexerTransformer.ts\";\nimport { CSVRecordAssemblerTransformer } from \"./CSVRecordAssemblerTransformer.ts\";\nimport type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { pipeline } from \"./utils/pipeline.ts\";\n\nexport function parseUint8ArrayStreamToStream<Header extends readonly string[]>(\n stream: ReadableStream<Uint8Array>,\n options?: ParseBinaryOptions<Header>,\n): ReadableStream<CSVRecord<Header>> {\n const { charset, fatal, ignoreBOM, decomposition } = options ?? {};\n return decomposition\n ? pipeline(\n stream,\n new DecompressionStream(decomposition),\n new TextDecoderStream(charset, { fatal, ignoreBOM }),\n new CSVLexerTransformer(options),\n new CSVRecordAssemblerTransformer(options),\n )\n : pipeline(\n stream,\n new TextDecoderStream(charset, { fatal, ignoreBOM }),\n new CSVLexerTransformer(options),\n new CSVRecordAssemblerTransformer(options),\n );\n}\n"],"names":[],"mappings":";;;;AAKgB,SAAA,6BAAA,CACd,QACA,OACmC,EAAA;AACnC,EAAA,MAAM,EAAE,OAAS,EAAA,KAAA,EAAO,WAAW,aAAc,EAAA,GAAI,WAAW,EAAC;AACjE,EAAA,OAAO,aACH,GAAA,QAAA;AAAA,IACE,MAAA;AAAA,IACA,IAAI,oBAAoB,aAAa,CAAA;AAAA,IACrC,IAAI,iBAAkB,CAAA,OAAA,EAAS,EAAE,KAAA,EAAO,WAAW,CAAA;AAAA,IACnD,IAAI,oBAAoB,OAAO,CAAA;AAAA,IAC/B,IAAI,8BAA8B,OAAO;AAAA,GAE3C,GAAA,QAAA;AAAA,IACE,MAAA;AAAA,IACA,IAAI,iBAAkB,CAAA,OAAA,EAAS,EAAE,KAAA,EAAO,WAAW,CAAA;AAAA,IACnD,IAAI,oBAAoB,OAAO,CAAA;AAAA,IAC/B,IAAI,8BAA8B,OAAO;AAAA,GAC3C;AACN;;;;"}
1
+ {"version":3,"file":"parseUint8ArrayStreamToStream.js","sources":["../src/parseUint8ArrayStreamToStream.ts"],"sourcesContent":["import { CSVLexerTransformer } from \"./CSVLexerTransformer.ts\";\nimport { CSVRecordAssemblerTransformer } from \"./CSVRecordAssemblerTransformer.ts\";\nimport type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport type { DEFAULT_DELIMITER, DEFAULT_QUOTATION } from \"./constants.ts\";\nimport { pipeline } from \"./utils/pipeline.ts\";\n\nexport function parseUint8ArrayStreamToStream<\n Header extends readonly string[],\n Delimiter extends string = DEFAULT_DELIMITER,\n Quotation extends string = '\"',\n>(\n stream: ReadableStream<Uint8Array>,\n options?: ParseBinaryOptions<Header, Delimiter, Quotation>,\n): ReadableStream<CSVRecord<Header>> {\n const { charset, fatal, ignoreBOM, decompression } = options ?? {};\n return decompression\n ? pipeline(\n stream,\n new DecompressionStream(decompression) as unknown as TransformStream<\n Uint8Array,\n Uint8Array\n >,\n new TextDecoderStream(charset, {\n fatal,\n ignoreBOM,\n }) as unknown as TransformStream<Uint8Array, string>,\n new CSVLexerTransformer(options),\n new CSVRecordAssemblerTransformer(options),\n )\n : pipeline(\n stream,\n new TextDecoderStream(charset, {\n fatal,\n ignoreBOM,\n }) as unknown as TransformStream<Uint8Array, string>,\n new CSVLexerTransformer(options),\n new CSVRecordAssemblerTransformer(options),\n );\n}\n"],"names":[],"mappings":";;;;AAMO,SAAS,6BAAA,CAKd,QACA,OAAA,EACmC;AACnC,EAAA,MAAM,EAAE,OAAA,EAAS,KAAA,EAAO,WAAW,aAAA,EAAc,GAAI,WAAW,EAAC;AACjE,EAAA,OAAO,aAAA,GACH,QAAA;AAAA,IACE,MAAA;AAAA,IACA,IAAI,oBAAoB,aAAa,CAAA;AAAA,IAIrC,IAAI,kBAAkB,OAAA,EAAS;AAAA,MAC7B,KAAA;AAAA,MACA;AAAA,KACD,CAAA;AAAA,IACD,IAAI,oBAAoB,OAAO,CAAA;AAAA,IAC/B,IAAI,8BAA8B,OAAO;AAAA,GAC3C,GACA,QAAA;AAAA,IACE,MAAA;AAAA,IACA,IAAI,kBAAkB,OAAA,EAAS;AAAA,MAC7B,KAAA;AAAA,MACA;AAAA,KACD,CAAA;AAAA,IACD,IAAI,oBAAoB,OAAO,CAAA;AAAA,IAC/B,IAAI,8BAA8B,OAAO;AAAA,GAC3C;AACN;;;;"}
@@ -1 +1 @@
1
- {"version":3,"file":"convertBinaryToString.js","sources":["../../src/utils/convertBinaryToString.ts"],"sourcesContent":["import type { BinaryOptions } from \"../common/types.ts\";\n\n/**\n * Default maximum binary size in bytes (100MB).\n */\nconst DEFAULT_MAX_BINARY_SIZE = 100 * 1024 * 1024;\n\n/**\n * Converts a binary string to a string.\n *\n * @param binary - The binary string to convert.\n * @param options - The options for parsing the binary string.\n * @returns The converted string.\n * @throws {RangeError} The given charset is not supported or binary size exceeds the limit.\n * @throws {TypeError} The encoded data was not valid.\n */\nexport function convertBinaryToString(\n binary: Uint8Array | ArrayBuffer,\n options: BinaryOptions,\n): string {\n const maxBinarySize = options?.maxBinarySize ?? DEFAULT_MAX_BINARY_SIZE;\n\n // Validate maxBinarySize\n if (\n !(\n Number.isFinite(maxBinarySize) ||\n maxBinarySize === Number.POSITIVE_INFINITY\n ) ||\n (Number.isFinite(maxBinarySize) && maxBinarySize < 0)\n ) {\n throw new RangeError(\n \"maxBinarySize must be a non-negative number or Number.POSITIVE_INFINITY\",\n );\n }\n\n // Check binary size\n if (Number.isFinite(maxBinarySize) && binary.byteLength > maxBinarySize) {\n throw new RangeError(\n `Binary size (${binary.byteLength} bytes) exceeded maximum allowed size of ${maxBinarySize} bytes`,\n );\n }\n\n return new TextDecoder(options?.charset, {\n ignoreBOM: options?.ignoreBOM,\n fatal: options?.fatal,\n }).decode(binary instanceof ArrayBuffer ? new Uint8Array(binary) : binary);\n}\n"],"names":[],"mappings":"AAKA,MAAM,uBAAA,GAA0B,MAAM,IAAO,GAAA,IAAA;AAW7B,SAAA,qBAAA,CACd,QACA,OACQ,EAAA;AACR,EAAM,MAAA,aAAA,GAAgB,SAAS,aAAiB,IAAA,uBAAA;AAGhD,EAAA,IACE,EACE,MAAA,CAAO,QAAS,CAAA,aAAa,CAC7B,IAAA,aAAA,KAAkB,MAAO,CAAA,iBAAA,CAAA,IAE1B,MAAO,CAAA,QAAA,CAAS,aAAa,CAAA,IAAK,gBAAgB,CACnD,EAAA;AACA,IAAA,MAAM,IAAI,UAAA;AAAA,MACR;AAAA,KACF;AAAA;AAIF,EAAA,IAAI,OAAO,QAAS,CAAA,aAAa,CAAK,IAAA,MAAA,CAAO,aAAa,aAAe,EAAA;AACvE,IAAA,MAAM,IAAI,UAAA;AAAA,MACR,CAAgB,aAAA,EAAA,MAAA,CAAO,UAAU,CAAA,yCAAA,EAA4C,aAAa,CAAA,MAAA;AAAA,KAC5F;AAAA;AAGF,EAAO,OAAA,IAAI,WAAY,CAAA,OAAA,EAAS,OAAS,EAAA;AAAA,IACvC,WAAW,OAAS,EAAA,SAAA;AAAA,IACpB,OAAO,OAAS,EAAA;AAAA,GACjB,EAAE,MAAO,CAAA,MAAA,YAAkB,cAAc,IAAI,UAAA,CAAW,MAAM,CAAA,GAAI,MAAM,CAAA;AAC3E;;;;"}
1
+ {"version":3,"file":"convertBinaryToString.js","sources":["../../src/utils/convertBinaryToString.ts"],"sourcesContent":["import type { BinaryOptions } from \"../common/types.ts\";\n\n/**\n * Default maximum binary size in bytes (100MB).\n */\nconst DEFAULT_MAX_BINARY_SIZE = 100 * 1024 * 1024;\n\n/**\n * Converts a binary string to a string.\n *\n * @param binary - The binary string to convert.\n * @param options - The options for parsing the binary string.\n * @returns The converted string.\n * @throws {RangeError} The given charset is not supported or binary size exceeds the limit.\n * @throws {TypeError} The encoded data was not valid.\n */\nexport function convertBinaryToString(\n binary: Uint8Array | ArrayBuffer,\n options: BinaryOptions,\n): string {\n const maxBinarySize = options?.maxBinarySize ?? DEFAULT_MAX_BINARY_SIZE;\n\n // Validate maxBinarySize\n if (\n !(\n Number.isFinite(maxBinarySize) ||\n maxBinarySize === Number.POSITIVE_INFINITY\n ) ||\n (Number.isFinite(maxBinarySize) && maxBinarySize < 0)\n ) {\n throw new RangeError(\n \"maxBinarySize must be a non-negative number or Number.POSITIVE_INFINITY\",\n );\n }\n\n // Check binary size\n if (Number.isFinite(maxBinarySize) && binary.byteLength > maxBinarySize) {\n throw new RangeError(\n `Binary size (${binary.byteLength} bytes) exceeded maximum allowed size of ${maxBinarySize} bytes`,\n );\n }\n\n return new TextDecoder(options?.charset, {\n ignoreBOM: options?.ignoreBOM,\n fatal: options?.fatal,\n }).decode(binary instanceof ArrayBuffer ? new Uint8Array(binary) : binary);\n}\n"],"names":[],"mappings":"AAKA,MAAM,uBAAA,GAA0B,MAAM,IAAA,GAAO,IAAA;AAWtC,SAAS,qBAAA,CACd,QACA,OAAA,EACQ;AACR,EAAA,MAAM,aAAA,GAAgB,SAAS,aAAA,IAAiB,uBAAA;AAGhD,EAAA,IACE,EACE,MAAA,CAAO,QAAA,CAAS,aAAa,CAAA,IAC7B,aAAA,KAAkB,MAAA,CAAO,iBAAA,CAAA,IAE1B,MAAA,CAAO,QAAA,CAAS,aAAa,CAAA,IAAK,gBAAgB,CAAA,EACnD;AACA,IAAA,MAAM,IAAI,UAAA;AAAA,MACR;AAAA,KACF;AAAA,EACF;AAGA,EAAA,IAAI,OAAO,QAAA,CAAS,aAAa,CAAA,IAAK,MAAA,CAAO,aAAa,aAAA,EAAe;AACvE,IAAA,MAAM,IAAI,UAAA;AAAA,MACR,CAAA,aAAA,EAAgB,MAAA,CAAO,UAAU,CAAA,yCAAA,EAA4C,aAAa,CAAA,MAAA;AAAA,KAC5F;AAAA,EACF;AAEA,EAAA,OAAO,IAAI,WAAA,CAAY,OAAA,EAAS,OAAA,EAAS;AAAA,IACvC,WAAW,OAAA,EAAS,SAAA;AAAA,IACpB,OAAO,OAAA,EAAS;AAAA,GACjB,EAAE,MAAA,CAAO,MAAA,YAAkB,cAAc,IAAI,UAAA,CAAW,MAAM,CAAA,GAAI,MAAM,CAAA;AAC3E;;;;"}
@@ -1 +1 @@
1
- {"version":3,"file":"convertIterableIteratorToAsync.js","sources":["../../src/utils/convertIterableIteratorToAsync.ts"],"sourcesContent":["export function convertIterableIteratorToAsync<T>(\n iterator: IterableIterator<T>,\n): AsyncIterableIterator<T> {\n return {\n async next() {\n return iterator.next();\n },\n [Symbol.asyncIterator]() {\n return this;\n },\n };\n}\n"],"names":[],"mappings":"AAAO,SAAS,+BACd,QAC0B,EAAA;AAC1B,EAAO,OAAA;AAAA,IACL,MAAM,IAAO,GAAA;AACX,MAAA,OAAO,SAAS,IAAK,EAAA;AAAA,KACvB;AAAA,IACA,CAAC,MAAO,CAAA,aAAa,CAAI,GAAA;AACvB,MAAO,OAAA,IAAA;AAAA;AACT,GACF;AACF;;;;"}
1
+ {"version":3,"file":"convertIterableIteratorToAsync.js","sources":["../../src/utils/convertIterableIteratorToAsync.ts"],"sourcesContent":["export function convertIterableIteratorToAsync<T>(\n iterator: IterableIterator<T>,\n): AsyncIterableIterator<T> {\n return {\n async next() {\n return iterator.next();\n },\n [Symbol.asyncIterator]() {\n return this;\n },\n };\n}\n"],"names":[],"mappings":"AAAO,SAAS,+BACd,QAAA,EAC0B;AAC1B,EAAA,OAAO;AAAA,IACL,MAAM,IAAA,GAAO;AACX,MAAA,OAAO,SAAS,IAAA,EAAK;AAAA,IACvB,CAAA;AAAA,IACA,CAAC,MAAA,CAAO,aAAa,CAAA,GAAI;AACvB,MAAA,OAAO,IAAA;AAAA,IACT;AAAA,GACF;AACF;;;;"}
@@ -2,7 +2,7 @@ function convertStreamToAsyncIterableIterator(stream) {
2
2
  if (Symbol.asyncIterator in stream && typeof stream[Symbol.asyncIterator] === "function") {
3
3
  return stream[Symbol.asyncIterator]();
4
4
  }
5
- return async function* () {
5
+ return (async function* () {
6
6
  const reader = stream.getReader();
7
7
  let completed = false;
8
8
  let errored = false;
@@ -27,7 +27,7 @@ function convertStreamToAsyncIterableIterator(stream) {
27
27
  }
28
28
  reader.releaseLock();
29
29
  }
30
- }();
30
+ })();
31
31
  }
32
32
 
33
33
  export { convertStreamToAsyncIterableIterator };
@@ -1 +1 @@
1
- {"version":3,"file":"convertStreamToAsyncIterableIterator.js","sources":["../../src/utils/convertStreamToAsyncIterableIterator.ts"],"sourcesContent":["/**\n * Converts a ReadableStream to an AsyncIterableIterator.\n *\n * This function preferentially uses the native async iteration support\n * and falls back to manual reader-based iteration for environments that\n * don't support it (primarily Safari as of 2025).\n *\n * @template T - The type of values in the stream\n * @param stream - The ReadableStream to convert\n * @returns An AsyncIterableIterator that yields values from the stream\n *\n * @example\n * ```ts\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue('chunk1');\n * controller.enqueue('chunk2');\n * controller.close();\n * }\n * });\n *\n * for await (const chunk of convertStreamToAsyncIterableIterator(stream)) {\n * console.log(chunk);\n * }\n * ```\n */\nexport function convertStreamToAsyncIterableIterator<T>(\n stream: ReadableStream<T>,\n): AsyncIterableIterator<T> {\n // Use native async iteration if available\n // Check both that the symbol exists and that it's a function\n if (\n Symbol.asyncIterator in stream &&\n typeof (stream as any)[Symbol.asyncIterator] === \"function\"\n ) {\n // ReadableStream is AsyncIterable in modern environments\n // Cast to AsyncIterableIterator since the native iterator is compatible\n return (stream as AsyncIterable<T>)[\n Symbol.asyncIterator\n ]() as AsyncIterableIterator<T>;\n }\n\n // TODO: Once Safari supports ReadableStream async iteration, this fallback\n // may no longer be necessary and this entire function could be removed in favor\n // of using ReadableStream directly as an AsyncIterable.\n // Track Safari support: https://bugs.webkit.org/show_bug.cgi?id=223619\n\n // Fallback for Safari\n return (async function* () {\n const reader = stream.getReader();\n let completed = false;\n let errored = false;\n\n try {\n while (true) {\n const { done, value } = await reader.read();\n if (done) {\n completed = true;\n break;\n }\n yield value;\n }\n } catch (error) {\n errored = true;\n // Cancel the stream on error to release underlying resources\n // and signal to the source that no more data is needed\n await reader.cancel(error).catch(() => {\n // Ignore cancel errors as we're already in an error state\n });\n throw error;\n } finally {\n // Cancel on early termination (e.g., break in for-await-of)\n if (!completed && !errored) {\n await reader.cancel().catch(() => {\n // Ignore cancel errors on early termination\n });\n }\n reader.releaseLock();\n }\n })();\n}\n"],"names":[],"mappings":"AA0BO,SAAS,qCACd,MAC0B,EAAA;AAG1B,EACE,IAAA,MAAA,CAAO,iBAAiB,MACxB,IAAA,OAAQ,OAAe,MAAO,CAAA,aAAa,MAAM,UACjD,EAAA;AAGA,IAAQ,OAAA,MAAA,CACN,MAAO,CAAA,aACT,CAAE,EAAA;AAAA;AASJ,EAAA,OAAQ,mBAAmB;AACzB,IAAM,MAAA,MAAA,GAAS,OAAO,SAAU,EAAA;AAChC,IAAA,IAAI,SAAY,GAAA,KAAA;AAChB,IAAA,IAAI,OAAU,GAAA,KAAA;AAEd,IAAI,IAAA;AACF,MAAA,OAAO,IAAM,EAAA;AACX,QAAA,MAAM,EAAE,IAAM,EAAA,KAAA,EAAU,GAAA,MAAM,OAAO,IAAK,EAAA;AAC1C,QAAA,IAAI,IAAM,EAAA;AACR,UAAY,SAAA,GAAA,IAAA;AACZ,UAAA;AAAA;AAEF,QAAM,MAAA,KAAA;AAAA;AACR,aACO,KAAO,EAAA;AACd,MAAU,OAAA,GAAA,IAAA;AAGV,MAAA,MAAM,MAAO,CAAA,MAAA,CAAO,KAAK,CAAA,CAAE,MAAM,MAAM;AAAA,OAEtC,CAAA;AACD,MAAM,MAAA,KAAA;AAAA,KACN,SAAA;AAEA,MAAI,IAAA,CAAC,SAAa,IAAA,CAAC,OAAS,EAAA;AAC1B,QAAA,MAAM,MAAO,CAAA,MAAA,EAAS,CAAA,KAAA,CAAM,MAAM;AAAA,SAEjC,CAAA;AAAA;AAEH,MAAA,MAAA,CAAO,WAAY,EAAA;AAAA;AACrB,GACC,EAAA;AACL;;;;"}
1
+ {"version":3,"file":"convertStreamToAsyncIterableIterator.js","sources":["../../src/utils/convertStreamToAsyncIterableIterator.ts"],"sourcesContent":["/**\n * Converts a ReadableStream to an AsyncIterableIterator.\n *\n * This function preferentially uses the native async iteration support\n * and falls back to manual reader-based iteration for environments that\n * don't support it (primarily Safari as of 2025).\n *\n * @template T - The type of values in the stream\n * @param stream - The ReadableStream to convert\n * @returns An AsyncIterableIterator that yields values from the stream\n *\n * @example\n * ```ts\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue('chunk1');\n * controller.enqueue('chunk2');\n * controller.close();\n * }\n * });\n *\n * for await (const chunk of convertStreamToAsyncIterableIterator(stream)) {\n * console.log(chunk);\n * }\n * ```\n */\nexport function convertStreamToAsyncIterableIterator<T>(\n stream: ReadableStream<T>,\n): AsyncIterableIterator<T> {\n // Use native async iteration if available\n // Check both that the symbol exists and that it's a function\n if (\n Symbol.asyncIterator in stream &&\n typeof (stream as any)[Symbol.asyncIterator] === \"function\"\n ) {\n // ReadableStream is AsyncIterable in modern environments\n // Cast to AsyncIterableIterator since the native iterator is compatible\n return (stream as AsyncIterable<T>)[\n Symbol.asyncIterator\n ]() as AsyncIterableIterator<T>;\n }\n\n // TODO: Once Safari supports ReadableStream async iteration, this fallback\n // may no longer be necessary and this entire function could be removed in favor\n // of using ReadableStream directly as an AsyncIterable.\n // Track Safari support: https://bugs.webkit.org/show_bug.cgi?id=223619\n\n // Fallback for Safari\n return (async function* () {\n const reader = stream.getReader();\n let completed = false;\n let errored = false;\n\n try {\n while (true) {\n const { done, value } = await reader.read();\n if (done) {\n completed = true;\n break;\n }\n yield value;\n }\n } catch (error) {\n errored = true;\n // Cancel the stream on error to release underlying resources\n // and signal to the source that no more data is needed\n await reader.cancel(error).catch(() => {\n // Ignore cancel errors as we're already in an error state\n });\n throw error;\n } finally {\n // Cancel on early termination (e.g., break in for-await-of)\n if (!completed && !errored) {\n await reader.cancel().catch(() => {\n // Ignore cancel errors on early termination\n });\n }\n reader.releaseLock();\n }\n })();\n}\n"],"names":[],"mappings":"AA0BO,SAAS,qCACd,MAAA,EAC0B;AAG1B,EAAA,IACE,MAAA,CAAO,iBAAiB,MAAA,IACxB,OAAQ,OAAe,MAAA,CAAO,aAAa,MAAM,UAAA,EACjD;AAGA,IAAA,OAAQ,MAAA,CACN,MAAA,CAAO,aACT,CAAA,EAAE;AAAA,EACJ;AAQA,EAAA,OAAA,CAAQ,mBAAmB;AACzB,IAAA,MAAM,MAAA,GAAS,OAAO,SAAA,EAAU;AAChC,IAAA,IAAI,SAAA,GAAY,KAAA;AAChB,IAAA,IAAI,OAAA,GAAU,KAAA;AAEd,IAAA,IAAI;AACF,MAAA,OAAO,IAAA,EAAM;AACX,QAAA,MAAM,EAAE,IAAA,EAAM,KAAA,EAAM,GAAI,MAAM,OAAO,IAAA,EAAK;AAC1C,QAAA,IAAI,IAAA,EAAM;AACR,UAAA,SAAA,GAAY,IAAA;AACZ,UAAA;AAAA,QACF;AACA,QAAA,MAAM,KAAA;AAAA,MACR;AAAA,IACF,SAAS,KAAA,EAAO;AACd,MAAA,OAAA,GAAU,IAAA;AAGV,MAAA,MAAM,MAAA,CAAO,MAAA,CAAO,KAAK,CAAA,CAAE,MAAM,MAAM;AAAA,MAEvC,CAAC,CAAA;AACD,MAAA,MAAM,KAAA;AAAA,IACR,CAAA,SAAE;AAEA,MAAA,IAAI,CAAC,SAAA,IAAa,CAAC,OAAA,EAAS;AAC1B,QAAA,MAAM,MAAA,CAAO,MAAA,EAAO,CAAE,KAAA,CAAM,MAAM;AAAA,QAElC,CAAC,CAAA;AAAA,MACH;AACA,MAAA,MAAA,CAAO,WAAA,EAAY;AAAA,IACrB;AAAA,EACF,CAAA,GAAG;AACL;;;;"}
@@ -1 +1 @@
1
- export declare function convertThisAsyncIterableIteratorToArray<O, T extends (...args: any[]) => AsyncGenerator<O>>(this: T, ...args: Parameters<T>): Promise<O[]>;
1
+ export declare function convertThisAsyncIterableIteratorToArray<O, T extends (...args: any[]) => AsyncIterableIterator<O>>(this: T, ...args: Parameters<T>): Promise<O[]>;
@@ -1 +1 @@
1
- {"version":3,"file":"convertThisAsyncIterableIteratorToArray.js","sources":["../../src/utils/convertThisAsyncIterableIteratorToArray.ts"],"sourcesContent":["export async function convertThisAsyncIterableIteratorToArray<\n O,\n T extends (...args: any[]) => AsyncGenerator<O>,\n>(this: T, ...args: Parameters<T>): Promise<O[]> {\n const rows: O[] = [];\n for await (const row of this(...args)) {\n rows.push(row);\n }\n return rows;\n}\n"],"names":[],"mappings":"AAAA,eAAsB,2CAGR,IAAmC,EAAA;AAC/C,EAAA,MAAM,OAAY,EAAC;AACnB,EAAA,WAAA,MAAiB,GAAO,IAAA,IAAA,CAAK,GAAG,IAAI,CAAG,EAAA;AACrC,IAAA,IAAA,CAAK,KAAK,GAAG,CAAA;AAAA;AAEf,EAAO,OAAA,IAAA;AACT;;;;"}
1
+ {"version":3,"file":"convertThisAsyncIterableIteratorToArray.js","sources":["../../src/utils/convertThisAsyncIterableIteratorToArray.ts"],"sourcesContent":["export async function convertThisAsyncIterableIteratorToArray<\n O,\n T extends (...args: any[]) => AsyncIterableIterator<O>,\n>(this: T, ...args: Parameters<T>): Promise<O[]> {\n const rows: O[] = [];\n for await (const row of this(...args)) {\n rows.push(row);\n }\n return rows;\n}\n"],"names":[],"mappings":"AAAA,eAAsB,2CAGR,IAAA,EAAmC;AAC/C,EAAA,MAAM,OAAY,EAAC;AACnB,EAAA,WAAA,MAAiB,GAAA,IAAO,IAAA,CAAK,GAAG,IAAI,CAAA,EAAG;AACrC,IAAA,IAAA,CAAK,KAAK,GAAG,CAAA;AAAA,EACf;AACA,EAAA,OAAO,IAAA;AACT;;;;"}
@@ -1 +1 @@
1
- {"version":3,"file":"escapeRegExp.js","sources":["../../src/utils/escapeRegExp.ts"],"sourcesContent":["/**\n * Escape a string for use in a regular expression.\n *\n * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_expressions#escaping Regular expressions#Escaping | MDN}\n * @param v string to escape\n * @returns escaped string\n */\nexport function escapeRegExp(v: string) {\n return v.replace(/[.*+?^${}()|[\\]\\\\]/g, \"\\\\$&\");\n}\n"],"names":[],"mappings":"AAOO,SAAS,aAAa,CAAW,EAAA;AACtC,EAAO,OAAA,CAAA,CAAE,OAAQ,CAAA,qBAAA,EAAuB,MAAM,CAAA;AAChD;;;;"}
1
+ {"version":3,"file":"escapeRegExp.js","sources":["../../src/utils/escapeRegExp.ts"],"sourcesContent":["/**\n * Escape a string for use in a regular expression.\n *\n * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_expressions#escaping Regular expressions#Escaping | MDN}\n * @param v string to escape\n * @returns escaped string\n */\nexport function escapeRegExp(v: string) {\n return v.replace(/[.*+?^${}()|[\\]\\\\]/g, \"\\\\$&\");\n}\n"],"names":[],"mappings":"AAOO,SAAS,aAAa,CAAA,EAAW;AACtC,EAAA,OAAO,CAAA,CAAE,OAAA,CAAQ,qBAAA,EAAuB,MAAM,CAAA;AAChD;;;;"}
@@ -1 +1 @@
1
- {"version":3,"file":"parseMime.js","sources":["../../src/utils/parseMime.ts"],"sourcesContent":["export interface ParseMimeResult {\n type: string;\n parameters: {\n [key: string]: string;\n };\n}\n\nexport function parseMime(contentType: string) {\n const [type, ...parameters] = contentType.split(\";\");\n const result: ParseMimeResult = {\n type: type.trim(),\n parameters: {},\n };\n for (const paramator of parameters) {\n const [key, value] = paramator.split(\"=\");\n result.parameters[key.trim()] = value.trim();\n }\n return result;\n}\n"],"names":[],"mappings":"AAOO,SAAS,UAAU,WAAqB,EAAA;AAC7C,EAAA,MAAM,CAAC,IAAM,EAAA,GAAG,UAAU,CAAI,GAAA,WAAA,CAAY,MAAM,GAAG,CAAA;AACnD,EAAA,MAAM,MAA0B,GAAA;AAAA,IAC9B,IAAA,EAAM,KAAK,IAAK,EAAA;AAAA,IAChB,YAAY;AAAC,GACf;AACA,EAAA,KAAA,MAAW,aAAa,UAAY,EAAA;AAClC,IAAA,MAAM,CAAC,GAAK,EAAA,KAAK,CAAI,GAAA,SAAA,CAAU,MAAM,GAAG,CAAA;AACxC,IAAA,MAAA,CAAO,WAAW,GAAI,CAAA,IAAA,EAAM,CAAA,GAAI,MAAM,IAAK,EAAA;AAAA;AAE7C,EAAO,OAAA,MAAA;AACT;;;;"}
1
+ {"version":3,"file":"parseMime.js","sources":["../../src/utils/parseMime.ts"],"sourcesContent":["export interface ParseMimeResult {\n type: string;\n parameters: {\n [key: string]: string;\n };\n}\n\nexport function parseMime(contentType: string) {\n const [type, ...parameters] = contentType.split(\";\");\n const result: ParseMimeResult = {\n type: type.trim(),\n parameters: {},\n };\n for (const paramator of parameters) {\n const [key, value] = paramator.split(\"=\");\n result.parameters[key.trim()] = value.trim();\n }\n return result;\n}\n"],"names":[],"mappings":"AAOO,SAAS,UAAU,WAAA,EAAqB;AAC7C,EAAA,MAAM,CAAC,IAAA,EAAM,GAAG,UAAU,CAAA,GAAI,WAAA,CAAY,MAAM,GAAG,CAAA;AACnD,EAAA,MAAM,MAAA,GAA0B;AAAA,IAC9B,IAAA,EAAM,KAAK,IAAA,EAAK;AAAA,IAChB,YAAY;AAAC,GACf;AACA,EAAA,KAAA,MAAW,aAAa,UAAA,EAAY;AAClC,IAAA,MAAM,CAAC,GAAA,EAAK,KAAK,CAAA,GAAI,SAAA,CAAU,MAAM,GAAG,CAAA;AACxC,IAAA,MAAA,CAAO,WAAW,GAAA,CAAI,IAAA,EAAM,CAAA,GAAI,MAAM,IAAA,EAAK;AAAA,EAC7C;AACA,EAAA,OAAO,MAAA;AACT;;;;"}
@@ -1 +1 @@
1
- {"version":3,"file":"pipeline.js","sources":["../../src/utils/pipeline.ts"],"sourcesContent":["export function pipeline<I, T, O>(\n stream: ReadableStream<I>,\n transformer1: TransformStream<I, T>,\n transformer2: TransformStream<T, O>,\n): ReadableStream<O>;\nexport function pipeline<I, T1, T2, O>(\n stream: ReadableStream<I>,\n transformer1: TransformStream<I, T1>,\n transformer2: TransformStream<T1, T2>,\n transformer3: TransformStream<T2, O>,\n): ReadableStream<O>;\nexport function pipeline<I, T1, T2, T3, O>(\n stream: ReadableStream<I>,\n transformer1: TransformStream<I, T1>,\n transformer2: TransformStream<T1, T2>,\n transformer3: TransformStream<T2, T3>,\n transformer4: TransformStream<T3, O>,\n): ReadableStream<O>;\nexport function pipeline<I, O>(\n stream: ReadableStream<I>,\n ...transformers: TransformStream[]\n): ReadableStream<O> {\n return new ReadableStream({\n start: (controller) => {\n (() =>\n transformers\n .reduce<ReadableStream>(\n (stream, transformer) => stream.pipeThrough(transformer),\n stream,\n )\n .pipeTo(\n new WritableStream({\n write: (v) => controller.enqueue(v),\n close: () => controller.close(),\n }),\n )\n .catch((error) => controller.error(error)))();\n },\n });\n}\n"],"names":["stream"],"mappings":"AAkBgB,SAAA,QAAA,CACd,WACG,YACgB,EAAA;AACnB,EAAA,OAAO,IAAI,cAAe,CAAA;AAAA,IACxB,KAAA,EAAO,CAAC,UAAe,KAAA;AACrB,MAAA,CAAC,MACC,YACG,CAAA,MAAA;AAAA,QACC,CAACA,OAAAA,EAAQ,WAAgBA,KAAAA,OAAAA,CAAO,YAAY,WAAW,CAAA;AAAA,QACvD;AAAA,OAED,CAAA,MAAA;AAAA,QACC,IAAI,cAAe,CAAA;AAAA,UACjB,KAAO,EAAA,CAAC,CAAM,KAAA,UAAA,CAAW,QAAQ,CAAC,CAAA;AAAA,UAClC,KAAA,EAAO,MAAM,UAAA,CAAW,KAAM;AAAA,SAC/B;AAAA,OACH,CACC,MAAM,CAAC,KAAA,KAAU,WAAW,KAAM,CAAA,KAAK,CAAC,CAAG,GAAA;AAAA;AAClD,GACD,CAAA;AACH;;;;"}
1
+ {"version":3,"file":"pipeline.js","sources":["../../src/utils/pipeline.ts"],"sourcesContent":["export function pipeline<I, T, O>(\n stream: ReadableStream<I>,\n transformer1: TransformStream<I, T>,\n transformer2: TransformStream<T, O>,\n): ReadableStream<O>;\nexport function pipeline<I, T1, T2, O>(\n stream: ReadableStream<I>,\n transformer1: TransformStream<I, T1>,\n transformer2: TransformStream<T1, T2>,\n transformer3: TransformStream<T2, O>,\n): ReadableStream<O>;\nexport function pipeline<I, T1, T2, T3, O>(\n stream: ReadableStream<I>,\n transformer1: TransformStream<I, T1>,\n transformer2: TransformStream<T1, T2>,\n transformer3: TransformStream<T2, T3>,\n transformer4: TransformStream<T3, O>,\n): ReadableStream<O>;\nexport function pipeline<I, O>(\n stream: ReadableStream<I>,\n ...transformers: TransformStream[]\n): ReadableStream<O> {\n return new ReadableStream({\n start: (controller) => {\n (() =>\n transformers\n .reduce<ReadableStream>(\n (stream, transformer) => stream.pipeThrough(transformer),\n stream,\n )\n .pipeTo(\n new WritableStream({\n write: (v) => controller.enqueue(v),\n close: () => controller.close(),\n }),\n )\n .catch((error) => controller.error(error)))();\n },\n });\n}\n"],"names":["stream"],"mappings":"AAkBO,SAAS,QAAA,CACd,WACG,YAAA,EACgB;AACnB,EAAA,OAAO,IAAI,cAAA,CAAe;AAAA,IACxB,KAAA,EAAO,CAAC,UAAA,KAAe;AACrB,MAAA,CAAC,MACC,YAAA,CACG,MAAA;AAAA,QACC,CAACA,OAAAA,EAAQ,WAAA,KAAgBA,OAAAA,CAAO,YAAY,WAAW,CAAA;AAAA,QACvD;AAAA,OACF,CACC,MAAA;AAAA,QACC,IAAI,cAAA,CAAe;AAAA,UACjB,KAAA,EAAO,CAAC,CAAA,KAAM,UAAA,CAAW,QAAQ,CAAC,CAAA;AAAA,UAClC,KAAA,EAAO,MAAM,UAAA,CAAW,KAAA;AAAM,SAC/B;AAAA,OACH,CACC,MAAM,CAAC,KAAA,KAAU,WAAW,KAAA,CAAM,KAAK,CAAC,CAAA,GAAG;AAAA,IAClD;AAAA,GACD,CAAA;AACH;;;;"}
@@ -13,3 +13,7 @@ export * from './parseStringToArraySyncWASM.ts';
13
13
  export * from './parseUint8ArrayStream.ts';
14
14
  export * from './CSVRecordAssembler.ts';
15
15
  export * from './CSVRecordAssemblerTransformer.ts';
16
+ export type { WorkerPool } from './execution/worker/helpers/WorkerPool.ts';
17
+ export { ReusableWorkerPool } from './execution/worker/helpers/ReusableWorkerPool.ts';
18
+ export * from './execution/worker/helpers/WorkerSession.ts';
19
+ export * from './execution/EnginePresets.ts';
@@ -12,4 +12,7 @@ export { parseStringToArraySyncWASM } from './parseStringToArraySyncWASM.js';
12
12
  export { parseUint8ArrayStream } from './parseUint8ArrayStream.js';
13
13
  export { CSVRecordAssembler } from './CSVRecordAssembler.js';
14
14
  export { CSVRecordAssemblerTransformer } from './CSVRecordAssemblerTransformer.js';
15
+ export { ReusableWorkerPool } from './execution/worker/helpers/ReusableWorkerPool.js';
16
+ export { WorkerSession } from './execution/worker/helpers/WorkerSession.js';
17
+ export { EnginePresets } from './execution/EnginePresets.js';
15
18
  //# sourceMappingURL=web-csv-toolbox.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"web-csv-toolbox.js","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;;;;;;"}
1
+ {"version":3,"file":"web-csv-toolbox.js","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;;;;;;;;;"}
Binary file
@@ -0,0 +1 @@
1
+ export {}
@@ -0,0 +1 @@
1
+ export {}