web-csv-toolbox 0.11.0-next-3e76d727a5e0c4f1fbd537e0a89bed474495294b → 0.11.0-next-5d01c3998b1d65f9ecf06ae0cb3ec382001832c5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- {"version":3,"file":"web-csv-toolbox.umd.cjs","sources":["../src/common/constants.ts","../src/common/errors.ts","../src/constants.ts","../src/assertCommonOptions.ts","../src/utils/escapeRegExp.ts","../src/Lexer.ts","../src/LexerTransformer.ts","../src/commonParseErrorHandling.ts","../src/RecordAssembler.ts","../src/parseStringToArraySync.ts","../src/utils/convertBinaryToString.ts","../src/parseStringToIterableIterator.ts","../src/parseBinaryToIterableIterator.ts","../src/parseStringToStream.ts","../src/utils/convertThisAsyncIterableIteratorToArray.ts","../src/parseBinary.ts","../src/utils/convertIterableIteratorToAsync.ts","../src/getOptionsFromResponse.ts","../src/utils/parseMime.ts","../src/parseBinaryToArraySync.ts","../src/parseBinaryToStream.ts","../src/RecordAssemblerTransformer.ts","../src/utils/pipeline.ts","../src/parseUint8ArrayStreamToStream.ts","../src/utils/convertStreamToAsyncIterableIterator.ts","../src/parseUint8ArrayStream.ts","../src/parseResponse.ts","../src/parseString.ts","../src/parseStringStreamToStream.ts","../src/parseStringStream.ts","../src/parse.ts","../src/parseResponseToStream.ts","../src/loadWASM.ts","../src/parseStringToArraySyncWASM.ts"],"sourcesContent":["/**\n * FiledDelimiter is a symbol for field delimiter of CSV.\n * @category Constants\n */\nexport const FieldDelimiter = Symbol.for(\"web-csv-toolbox.FieldDelimiter\");\n/**\n * RecordDelimiter is a symbol for record delimiter of CSV.\n * @category Constants\n */\nexport const RecordDelimiter = Symbol.for(\"web-csv-toolbox.RecordDelimiter\");\n/**\n * Field is a symbol for field of CSV.\n * @category Constants\n */\nexport const Field = Symbol.for(\"web-csv-toolbox.Field\");\n","import type { Position } from \"./types.js\";\n\n/**\n * Error class for invalid option errors.\n */\nexport class InvalidOptionError extends Error {\n constructor(message?: string, options?: ErrorOptions) {\n super(message, options);\n this.name = \"InvalidOptionError\";\n }\n}\n\n/**\n * Options for creating a parse error.\n */\nexport interface ParseErrorOptions extends ErrorOptions {\n /**\n * The position where the error occurred.\n */\n position?: Position;\n}\n\n/**\n * Error class for parse errors.\n *\n * @remarks\n * This error is thrown when a parsing error occurs.\n * {@link ParseError} is a subclass of {@link !SyntaxError}.\n *\n * This is in reference to the specification\n * that the error thrown when a parse error occurs in the {@link !JSON.parse} function is {@link !SyntaxError}.\n */\nexport class ParseError extends SyntaxError {\n /**\n * The position where the error occurred.\n */\n public position?: Position;\n\n constructor(message?: string, options?: ParseErrorOptions) {\n super(message, { cause: options?.cause });\n this.name = \"ParseError\";\n this.position = options?.position;\n }\n}\n","export const CR = \"\\r\";\nexport const CRLF = \"\\r\\n\";\nexport const LF = \"\\n\";\n\n/**\n * COMMA is a symbol for comma(,).\n */\nexport const COMMA = \",\";\n\n/**\n * DOUBLE_QUOTE is a symbol for double quote(\").\n */\nexport const DOUBLE_QUOTE = '\"';\n","import { InvalidOptionError } from \"./common/errors.ts\";\nimport type { CommonOptions } from \"./common/types.ts\";\nimport { CR, LF } from \"./constants.ts\";\n\n/**\n * Asserts that the provided value is a string and satisfies certain conditions.\n * @param value - The value to be checked.\n * @param name - The name of the option.\n * @throws {InvalidOptionError} If the value is empty, longer than 1 byte, or includes CR or LF.\n * @throws {TypeError} If the value is not a string.\n */\nfunction assertOptionValue(\n value: string,\n name: string,\n): asserts value is string {\n if (typeof value === \"string\") {\n switch (true) {\n case value.length === 0:\n throw new InvalidOptionError(`${name} must not be empty`);\n case value.length > 1:\n throw new InvalidOptionError(`${name} must be a single character`);\n case value === LF:\n case value === CR:\n throw new InvalidOptionError(`${name} must not include CR or LF`);\n default:\n break;\n }\n } else {\n throw new TypeError(`${name} must be a string`);\n }\n}\n\n/**\n * Asserts that the provided options object contains all the required properties.\n * Throws an error if any required property is missing\n * or if the delimiter and quotation length is not 1 byte character,\n * or if the delimiter is the same as the quotation.\n *\n * @example\n *\n * ```ts\n * assertCommonOptions({\n * quotation: '\"',\n * delimiter: ',',\n * });\n * ```\n *\n * @param options - The options object to be validated.\n * @throws {InvalidOptionError} If any required property is missing or if the delimiter is the same as the quotation.\n * @throws {TypeError} If any required property is not a string.\n */\nexport function assertCommonOptions(\n options: Required<CommonOptions>,\n): asserts options is Required<CommonOptions> {\n for (const name of [\"delimiter\", \"quotation\"] as const) {\n assertOptionValue(options[name], name);\n }\n if (options.delimiter === options.quotation) {\n throw new InvalidOptionError(\n \"delimiter must not be the same as quotation, use different characters\",\n );\n }\n}\n","/**\n * Escape a string for use in a regular expression.\n *\n * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_expressions#escaping Regular expressions#Escaping | MDN}\n * @param v string to escape\n * @returns escaped string\n */\nexport function escapeRegExp(v: string) {\n return v.replace(/[.*+?^${}()|[\\]\\\\]/g, \"\\\\$&\");\n}\n","import { assertCommonOptions } from \"./assertCommonOptions.ts\";\nimport { Field, FieldDelimiter, RecordDelimiter } from \"./common/constants.ts\";\nimport { ParseError } from \"./common/errors.ts\";\nimport type {\n AbortSignalOptions,\n CommonOptions,\n Position,\n RecordDelimiterToken,\n Token,\n} from \"./common/types.ts\";\nimport { COMMA, CRLF, DOUBLE_QUOTE, LF } from \"./constants.ts\";\nimport { escapeRegExp } from \"./utils/escapeRegExp.ts\";\n\n/**\n * CSV Lexer.\n *\n * Lexter tokenizes CSV data into fields and records.\n */\nexport class Lexer {\n #delimiter: string;\n #quotation: string;\n #buffer = \"\";\n #flush = false;\n #matcher: RegExp;\n #fieldDelimiterLength: number;\n\n #cursor: Position = {\n line: 1,\n column: 1,\n offset: 0,\n };\n #rowNumber = 1;\n\n #signal?: AbortSignal;\n\n /**\n * Constructs a new Lexer instance.\n * @param options - The common options for the lexer.\n */\n constructor({\n delimiter = COMMA,\n quotation = DOUBLE_QUOTE,\n signal,\n }: CommonOptions & AbortSignalOptions = {}) {\n assertCommonOptions({ delimiter, quotation });\n this.#delimiter = delimiter;\n this.#quotation = quotation;\n this.#fieldDelimiterLength = delimiter.length;\n const d = escapeRegExp(delimiter);\n const q = escapeRegExp(quotation);\n this.#matcher = new RegExp(\n `^(?:(?!${q})(?!${d})(?![\\\\r\\\\n]))([\\\\S\\\\s\\\\uFEFF\\\\xA0]+?)(?=${q}|${d}|\\\\r|\\\\n|$)`,\n );\n if (signal) {\n this.#signal = signal;\n }\n }\n\n /**\n * Lexes the given chunk of CSV data.\n * @param chunk - The chunk of CSV data to be lexed.\n * @param buffering - Indicates whether the lexer is buffering or not.\n * @returns An iterable iterator of tokens.\n */\n public lex(chunk: string | null, buffering = false): IterableIterator<Token> {\n if (!buffering) {\n this.#flush = true;\n }\n if (typeof chunk === \"string\" && chunk.length !== 0) {\n this.#buffer += chunk;\n }\n\n return this.#tokens();\n }\n\n /**\n * Flushes the lexer and returns any remaining tokens.\n * @returns An array of tokens.\n */\n public flush(): Token[] {\n this.#flush = true;\n return [...this.#tokens()];\n }\n\n /**\n * Generates tokens from the buffered CSV data.\n * @yields Tokens from the buffered CSV data.\n */\n *#tokens(): Generator<Token> {\n if (this.#flush) {\n // Trim the last CRLF or LF\n if (this.#buffer.endsWith(CRLF)) {\n this.#buffer = this.#buffer.slice(0, -2 /* -CRLF.length */);\n } else if (this.#buffer.endsWith(LF)) {\n this.#buffer = this.#buffer.slice(0, -1 /* -LF.length */);\n }\n }\n let token: Token | null;\n while ((token = this.#nextToken())) {\n yield token;\n }\n }\n\n /**\n * Retrieves the next token from the buffered CSV data.\n * @returns The next token or null if there are no more tokens.\n */\n #nextToken(): Token | null {\n this.#signal?.throwIfAborted();\n if (this.#buffer.length === 0) {\n return null;\n }\n // Buffer is Record Delimiter, defer to the next iteration.\n if (\n this.#flush === false &&\n (this.#buffer === CRLF || this.#buffer === LF)\n ) {\n return null;\n }\n\n // Check for CRLF\n if (this.#buffer.startsWith(CRLF)) {\n this.#buffer = this.#buffer.slice(2);\n const start: Position = { ...this.#cursor };\n this.#cursor.line++;\n this.#cursor.column = 1;\n this.#cursor.offset += 2; // CRLF.length\n const token: RecordDelimiterToken = {\n type: RecordDelimiter,\n value: CRLF,\n location: {\n start,\n end: { ...this.#cursor },\n rowNumber: this.#rowNumber++,\n },\n };\n return token;\n }\n\n // Check for LF\n if (this.#buffer.startsWith(LF)) {\n this.#buffer = this.#buffer.slice(1);\n const start: Position = { ...this.#cursor };\n this.#cursor.line++;\n this.#cursor.column = 1;\n this.#cursor.offset += 1; // LF.length\n const token: RecordDelimiterToken = {\n type: RecordDelimiter,\n value: LF,\n location: {\n start,\n end: { ...this.#cursor },\n rowNumber: this.#rowNumber++,\n },\n };\n return token;\n }\n\n // Check for Delimiter\n if (this.#buffer.startsWith(this.#delimiter)) {\n this.#buffer = this.#buffer.slice(1);\n const start: Position = { ...this.#cursor };\n this.#cursor.column += this.#fieldDelimiterLength;\n this.#cursor.offset += this.#fieldDelimiterLength;\n return {\n type: FieldDelimiter,\n value: this.#delimiter,\n location: {\n start,\n end: { ...this.#cursor },\n rowNumber: this.#rowNumber,\n },\n };\n }\n\n // Check for Quoted String\n if (this.#buffer.startsWith(this.#quotation)) {\n /**\n * Extract Quoted field.\n *\n * The following code is equivalent to the following:\n *\n * If the next character is a quote:\n * - If the character after that is a quote, then append a quote to the value and skip two characters.\n * - Otherwise, return the quoted string.\n * Otherwise, append the character to the value and skip one character.\n *\n * ```plaintext\n * | `i` | `i + 1` | `i + 2` |\n * |------------|------------|----------|\n * | cur | next | | => Variable names\n * | #quotation | #quotation | | => Escaped quote\n * | #quotation | (EOF) | | => Closing quote\n * | #quotation | undefined | | => End of buffer\n * | undefined | | | => End of buffer\n * ```\n */\n let value = \"\";\n let offset = 1; // Skip the opening quote\n let column = 2; // Skip the opening quote\n let line = 0;\n\n // Define variables\n let cur: string = this.#buffer[offset];\n let next: string | undefined = this.#buffer[offset + 1];\n do {\n // If the current character is a quote, check the next characters for closing quotes.\n if (cur === this.#quotation) {\n // If the cur character is a quote and the next character is a quote,\n // then append a quote to the value and skip two characters.\n if (next === this.#quotation) {\n // Append a quote to the value and skip two characters.\n value += this.#quotation;\n offset += 2;\n cur = this.#buffer[offset];\n next = this.#buffer[offset + 1];\n\n // Update the diff\n column += 2;\n continue;\n }\n\n // If the cur character is a quote and the next character is undefined,\n // then return null.\n if (next === undefined && this.#flush === false) {\n return null;\n }\n\n // Otherwise, return the quoted string.\n // Update the buffer and return the token\n offset++;\n this.#buffer = this.#buffer.slice(offset);\n const start: Position = { ...this.#cursor };\n this.#cursor.column += column;\n this.#cursor.offset += offset;\n this.#cursor.line += line;\n return {\n type: Field,\n value,\n location: {\n start,\n end: { ...this.#cursor },\n rowNumber: this.#rowNumber,\n },\n };\n }\n\n // Append the character to the value.\n value += cur;\n\n // Prepare for the next iteration\n if (cur === LF) {\n // If the current character is a LF,\n // then increment the line number and reset the column number.\n line++;\n column = 1;\n } else {\n // Otherwise, increment the column number and offset.\n column++;\n }\n\n offset++;\n cur = next;\n next = this.#buffer[offset + 1];\n } while (cur !== undefined);\n\n if (this.#flush) {\n throw new ParseError(\"Unexpected EOF while parsing quoted field.\", {\n position: { ...this.#cursor },\n });\n }\n return null;\n }\n\n // Check for Unquoted String\n const match = this.#matcher.exec(this.#buffer);\n if (match) {\n // If we're flushing and the match doesn't consume the entire buffer,\n // then return null\n if (this.#flush === false && match[0].length === this.#buffer.length) {\n return null;\n }\n const value = match[1];\n this.#buffer = this.#buffer.slice(value.length);\n const start: Position = { ...this.#cursor };\n this.#cursor.column += value.length;\n this.#cursor.offset += value.length;\n return {\n type: Field,\n value,\n location: {\n start,\n end: { ...this.#cursor },\n rowNumber: this.#rowNumber,\n },\n };\n }\n\n // Otherwise, return null\n return null;\n }\n}\n","import { Lexer } from \"./Lexer.ts\";\nimport type { CommonOptions, Token } from \"./common/types.ts\";\n\n/**\n * A transform stream that converts a stream of tokens into a stream of rows.\n *\n * @category Low-level API\n *\n * @example Parse a CSV with headers by data\n * ```ts\n * new ReadableStream({\n * start(controller) {\n * controller.enqueue(\"name,age\\r\\n\");\n * controller.enqueue(\"Alice,20\\r\\n\");\n * controller.close();\n * }\n * })\n * .pipeThrough(new LexerTransformer())\n * .pipeTo(new WritableStream({ write(tokens) {\n * for (const token of tokens) {\n * console.log(token);\n * }\n * }}));\n * // { type: Field, value: \"name\", location: {...} }\n * // { type: FieldDelimiter, value: \",\", location: {...} }\n * // { type: Field, value: \"age\", location: {...} }\n * // { type: RecordDelimiter, value: \"\\r\\n\", location: {...} }\n * // { type: Field, value: \"Alice\", location: {...} }\n * // { type: FieldDelimiter, value: \",\", location: {...} }\n * // { type: Field, value: \"20\" }\n * // { type: RecordDelimiter, value: \"\\r\\n\", location: {...} }\n * ```\n */\nexport class LexerTransformer extends TransformStream<string, Token[]> {\n public readonly lexer: Lexer;\n constructor(options: CommonOptions = {}) {\n super({\n transform: (chunk, controller) => {\n if (chunk.length !== 0) {\n try {\n controller.enqueue([...this.lexer.lex(chunk, true)]);\n } catch (error) {\n controller.error(error);\n }\n }\n },\n flush: (controller) => {\n try {\n controller.enqueue(this.lexer.flush());\n } catch (error) {\n controller.error(error);\n }\n },\n });\n this.lexer = new Lexer(options);\n }\n}\n","import { InvalidOptionError, ParseError } from \"./common/errors\";\n\n/**\n * Common error handling for parsing CSV data.\n *\n * @param error - The error to handle.\n * @throws {ParseError} When an error occurs while parsing the CSV data.\n * @throws {InvalidOptionError} When an invalid option is provided.\n */\n\nexport function commonParseErrorHandling(error: unknown): never {\n if (error instanceof ParseError || error instanceof InvalidOptionError) {\n throw error;\n }\n throw new ParseError(\"An error occurred while parsing the CSV data.\", {\n cause: error,\n });\n}\n","import { FieldDelimiter, RecordDelimiter } from \"./common/constants.ts\";\nimport { ParseError } from \"./common/errors.ts\";\nimport type {\n CSVRecord,\n RecordAssemblerOptions,\n Token,\n} from \"./common/types.ts\";\n\nexport class RecordAssembler<Header extends ReadonlyArray<string>> {\n #fieldIndex = 0;\n #row: string[] = [];\n #header: Header | undefined;\n #dirty = false;\n #signal?: AbortSignal;\n\n constructor(options: RecordAssemblerOptions<Header> = {}) {\n if (options.header !== undefined && Array.isArray(options.header)) {\n this.#setHeader(options.header);\n }\n if (options.signal) {\n this.#signal = options.signal;\n }\n }\n\n public *assemble(\n tokens: Iterable<Token>,\n flush = true,\n ): IterableIterator<CSVRecord<Header>> {\n for (const token of tokens) {\n this.#signal?.throwIfAborted();\n switch (token.type) {\n case FieldDelimiter:\n this.#fieldIndex++;\n this.#dirty = true;\n break;\n case RecordDelimiter:\n if (this.#header === undefined) {\n this.#setHeader(this.#row as unknown as Header);\n } else {\n if (this.#dirty) {\n yield Object.fromEntries(\n this.#header.map((header, index) => [\n header,\n this.#row.at(index),\n ]),\n ) as unknown as CSVRecord<Header>;\n } else {\n yield Object.fromEntries(\n this.#header.map((header) => [header, \"\"]),\n ) as CSVRecord<Header>;\n }\n }\n // Reset the row fields buffer.\n this.#fieldIndex = 0;\n this.#row = new Array(this.#header?.length).fill(\"\");\n this.#dirty = false;\n break;\n default:\n this.#dirty = true;\n this.#row[this.#fieldIndex] = token.value;\n break;\n }\n }\n\n if (flush) {\n yield* this.flush();\n }\n }\n\n public *flush(): Generator<CSVRecord<Header>> {\n if (this.#header !== undefined) {\n if (this.#dirty) {\n yield Object.fromEntries(\n this.#header\n .filter((v) => v)\n .map((header, index) => [header, this.#row.at(index)]),\n ) as unknown as CSVRecord<Header>;\n }\n }\n }\n\n #setHeader(header: Header) {\n this.#header = header;\n if (this.#header.length === 0) {\n throw new ParseError(\"The header must not be empty.\");\n }\n if (new Set(this.#header).size !== this.#header.length) {\n throw new ParseError(\"The header must not contain duplicate fields.\");\n }\n }\n}\n","import { Lexer } from \"./Lexer.ts\";\nimport { RecordAssembler } from \"./RecordAssembler.ts\";\nimport type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\n\nexport function parseStringToArraySync<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n): CSVRecord<Header>[] {\n try {\n const lexer = new Lexer(options);\n const assembler = new RecordAssembler(options);\n const tokens = lexer.lex(csv);\n return [...assembler.assemble(tokens)];\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\n","import type { BinaryOptions } from \"../common/types.ts\";\n\n/**\n * Converts a binary string to a string.\n *\n * @param binary - The binary string to convert.\n * @param options - The options for parsing the binary string.\n * @returns The converted string.\n * @throws {RangeError} The given charset is not supported.\n * @throws {TypeError} The encoded data was not valid.\n */\nexport function convertBinaryToString(\n binary: Uint8Array | ArrayBuffer,\n options: BinaryOptions,\n): string {\n return new TextDecoder(options?.charset, {\n ignoreBOM: options?.ignoreBOM,\n fatal: options?.fatal,\n }).decode(binary instanceof ArrayBuffer ? new Uint8Array(binary) : binary);\n}\n","import { Lexer } from \"./Lexer.ts\";\nimport { RecordAssembler } from \"./RecordAssembler.ts\";\nimport type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\n\nexport function parseStringToIterableIterator<\n Header extends ReadonlyArray<string>,\n>(\n csv: string,\n options?: ParseOptions<Header>,\n): IterableIterator<CSVRecord<Header>> {\n try {\n const lexer = new Lexer(options);\n const assembler = new RecordAssembler(options);\n const tokens = lexer.lex(csv);\n return assembler.assemble(tokens);\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\n","import type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\nimport { parseStringToIterableIterator } from \"./parseStringToIterableIterator.ts\";\nimport { convertBinaryToString } from \"./utils/convertBinaryToString.ts\";\n\n/**\n * Parses the given binary data into an iterable iterator of CSV records.\n *\n * @param binary - The binary data to parse.\n * @param options - The parse options.\n * @returns An iterable iterator of CSV records.\n * @throws {ParseError} When an error occurs while parsing the CSV data.\n */\nexport function parseBinaryToIterableIterator<\n Header extends ReadonlyArray<string>,\n>(\n binary: Uint8Array | ArrayBuffer,\n options: ParseBinaryOptions<Header> = {},\n): IterableIterator<CSVRecord<Header>> {\n try {\n const csv = convertBinaryToString(binary, options);\n return parseStringToIterableIterator(csv, options);\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\n","import { Lexer } from \"./Lexer.ts\";\nimport { RecordAssembler } from \"./RecordAssembler.ts\";\nimport type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\n\nexport function parseStringToStream<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n): ReadableStream<CSVRecord<Header>> {\n try {\n const lexer = new Lexer(options);\n const assembler = new RecordAssembler(options);\n return new ReadableStream({\n start(controller) {\n const tokens = lexer.lex(csv);\n for (const record of assembler.assemble(tokens)) {\n controller.enqueue(record);\n }\n controller.close();\n },\n });\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\n","export async function convertThisAsyncIterableIteratorToArray<\n O,\n T extends (...args: any[]) => AsyncGenerator<O>,\n>(this: T, ...args: Parameters<T>): Promise<O[]> {\n const rows: O[] = [];\n for await (const row of this(...args)) {\n rows.push(row);\n }\n return rows;\n}\n","import type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { parseBinaryToArraySync } from \"./parseBinaryToArraySync.ts\";\nimport { parseBinaryToIterableIterator } from \"./parseBinaryToIterableIterator.ts\";\nimport { parseBinaryToStream } from \"./parseBinaryToStream.ts\";\nimport { convertIterableIteratorToAsync } from \"./utils/convertIterableIteratorToAsync.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\n\n/**\n * Parse a binary from an {@link !Uint8Array}.\n *\n * @category Middle-level API\n *\n * @param bytes CSV bytes to parse.\n * @param options Parsing options\n * @returns Async iterable iterator of records.\n *\n * @example Parsing CSV binary\n *\n * ```ts\n * import { parseUint8Array } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * for await (const record of parseUint8Array(csv)) {\n * console.log(record);\n * }\n * ```\n */\nexport function parseBinary<Header extends ReadonlyArray<string>>(\n bytes: Uint8Array | ArrayBuffer,\n options?: ParseBinaryOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n const iterator = parseBinaryToIterableIterator(bytes, options);\n return convertIterableIteratorToAsync(iterator);\n}\n\nexport declare namespace parseBinary {\n /**\n * Parse a binary from an {@link !Uint8Array} to an array of records.\n *\n * @param bytes CSV bytes to parse.\n * @param options Parsing options\n * @returns Array of records\n *\n * @example\n * ```ts\n * import { parseUint8Array } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * const records = await parseUint8Array.toArray(csv);\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n bytes: Uint8Array | ArrayBuffer,\n options?: ParseBinaryOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse a binary from an {@link !Uint8Array} to an array of records.\n *\n * @param bytes CSV bytes to parse.\n * @param options Parsing options\n * @returns Array of records\n * @example\n *\n * ```ts\n * import { parseUint8Array } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * const records = parseUint8Array.toArraySync(csv);\n * ```\n */\n export function toArraySync<Header extends ReadonlyArray<string>>(\n bytes: Uint8Array | ArrayBuffer,\n options?: ParseBinaryOptions<Header>,\n ): CSVRecord<Header>[];\n\n /**\n * Parse a binary from an {@link !Uint8Array} to an iterable iterator of records.\n *\n * @param bytes CSV bytes to parse.\n * @param options Parsing options\n * @returns Async iterable iterator of records.\n * @example\n * ```ts\n * import { parseUint8Array } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * for (const record of parseUint8Array.toIterableIterator(csv)) {\n * console.log(record);\n * }\n * ```\n */\n export function toIterableIterator<Header extends ReadonlyArray<string>>(\n bytes: Uint8Array,\n options?: ParseBinaryOptions<Header>,\n ): IterableIterator<CSVRecord<Header>>;\n\n /**\n * Parse a binary from an {@link !Uint8Array} to a stream of records.\n *\n * @param bytes CSV bytes to parse.\n * @param options Parsing options\n * @returns Stream of records.\n *\n * @example\n *\n * ```ts\n * import { parseUint8Array } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * const stream = parseUint8Array.toStream(csv);\n *\n * await stream.pipeTo(\n * new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * ```\n */\n export function toStream<Header extends ReadonlyArray<string>>(\n bytes: Uint8Array,\n options?: ParseBinaryOptions<Header>,\n ): ReadableStream<CSVRecord<Header>>;\n}\n\nObject.defineProperties(parseBinary, {\n toArray: {\n enumerable: true,\n writable: false,\n value: internal.convertThisAsyncIterableIteratorToArray,\n },\n toArraySync: {\n enumerable: true,\n writable: false,\n value: parseBinaryToArraySync,\n },\n toIterableIterator: {\n enumerable: true,\n writable: false,\n value: parseBinaryToIterableIterator,\n },\n toStream: {\n enumerable: true,\n writable: false,\n value: parseBinaryToStream,\n },\n});\n","export function convertIterableIteratorToAsync<T>(\n iterator: IterableIterator<T>,\n): AsyncIterableIterator<T> {\n return {\n async next() {\n return iterator.next();\n },\n [Symbol.asyncIterator]() {\n return this;\n },\n };\n}\n","import type { ParseBinaryOptions } from \"./common/types.ts\";\nimport { parseMime } from \"./utils/parseMime.ts\";\n\n/**\n * Extracts the options from the response object.\n *\n * @param response - The response object from which to extract the options.\n * @param options - The options to merge with the extracted options.\n * @returns The options extracted from the response.\n * @throws {RangeError} - The content type is not supported.\n */\nexport function getOptionsFromResponse<Header extends ReadonlyArray<string>>(\n response: Response,\n options: ParseBinaryOptions<Header> = {},\n): ParseBinaryOptions<Header> {\n const { headers } = response;\n const contentType = headers.get(\"content-type\") ?? \"text/csv\";\n const mime = parseMime(contentType);\n if (mime.type !== \"text/csv\") {\n throw new RangeError(`Invalid mime type: \"${contentType}\"`);\n }\n const decomposition =\n (headers.get(\"content-encoding\") as CompressionFormat) ?? undefined;\n const charset = mime.parameters.charset ?? \"utf-8\";\n // TODO: Support header=present and header=absent\n // const header = mime.parameters.header ?? \"present\";\n return {\n decomposition,\n charset,\n ...options,\n };\n}\n","export interface ParseMimeResult {\n type: string;\n parameters: {\n [key: string]: string;\n };\n}\n\nexport function parseMime(contentType: string) {\n const [type, ...parameters] = contentType.split(\";\");\n const result: ParseMimeResult = {\n type: type.trim(),\n parameters: {},\n };\n for (const paramator of parameters) {\n const [key, value] = paramator.split(\"=\");\n result.parameters[key.trim()] = value.trim();\n }\n return result;\n}\n","import type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\nimport { parseStringToArraySync } from \"./parseStringToArraySync.ts\";\nimport { convertBinaryToString } from \"./utils/convertBinaryToString.ts\";\n\nexport function parseBinaryToArraySync<Header extends ReadonlyArray<string>>(\n binary: Uint8Array | ArrayBuffer,\n options: ParseBinaryOptions<Header> = {},\n): CSVRecord<Header>[] {\n try {\n const csv = convertBinaryToString(binary, options);\n return parseStringToArraySync(csv, options);\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\n","import type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\nimport { parseStringToStream } from \"./parseStringToStream.ts\";\nimport { convertBinaryToString } from \"./utils/convertBinaryToString.ts\";\n\nexport function parseBinaryToStream<Header extends ReadonlyArray<string>>(\n binary: Uint8Array | ArrayBuffer,\n options: ParseBinaryOptions<Header> = {},\n): ReadableStream<CSVRecord<Header>> {\n try {\n const csv = convertBinaryToString(binary, options);\n return parseStringToStream(csv, options);\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\n","import { RecordAssembler } from \"./RecordAssembler.ts\";\nimport type {\n CSVRecord,\n RecordAssemblerOptions,\n Token,\n} from \"./common/types.ts\";\n\n/**\n * A transform stream that converts a stream of tokens into a stream of rows.\n * @template Header The type of the header row.\n * @param options The options for the parser.\n *\n * @category Low-level API\n *\n * @example Parse a CSV with headers by data\n * ```ts\n * new ReadableStream({\n * start(controller) {\n * controller.enqueue(\"name,age\\r\\n\");\n * controller.enqueue(\"Alice,20\\r\\n\");\n * controller.enqueue(\"Bob,25\\r\\n\");\n * controller.enqueue(\"Charlie,30\\r\\n\");\n * controller.close();\n * })\n * .pipeThrough(new LexerTransformer())\n * .pipeThrough(new RecordAssemblerTransformer())\n * .pipeTo(new WritableStream({ write(row) { console.log(row); }}));\n * // { name: \"Alice\", age: \"20\" }\n * // { name: \"Bob\", age: \"25\" }\n * // { name: \"Charlie\", age: \"30\" }\n * ```\n *\n * @example Parse a CSV with headers by options\n * ```ts\n * new ReadableStream({\n * start(controller) {\n * controller.enqueue(\"Alice,20\\r\\n\");\n * controller.enqueue(\"Bob,25\\r\\n\");\n * controller.enqueue(\"Charlie,30\\r\\n\");\n * controller.close();\n * }\n * })\n * .pipeThrough(new LexerTransformer())\n * .pipeThrough(new RecordAssemblerTransformer({ header: [\"name\", \"age\"] }))\n * .pipeTo(new WritableStream({ write(row) { console.log(row); }}));\n * // { name: \"Alice\", age: \"20\" }\n * // { name: \"Bob\", age: \"25\" }\n * // { name: \"Charlie\", age: \"30\" }\n * ```\n */\nexport class RecordAssemblerTransformer<\n Header extends ReadonlyArray<string>,\n> extends TransformStream<Token[], CSVRecord<Header>> {\n public readonly assembler: RecordAssembler<Header>;\n\n constructor(options: RecordAssemblerOptions<Header> = {}) {\n super({\n transform: (tokens, controller) => {\n try {\n for (const token of this.assembler.assemble(tokens, false)) {\n controller.enqueue(token);\n }\n } catch (error) {\n controller.error(error);\n }\n },\n flush: (controller) => {\n try {\n for (const token of this.assembler.flush()) {\n controller.enqueue(token);\n }\n } catch (error) {\n controller.error(error);\n }\n },\n });\n this.assembler = new RecordAssembler(options);\n }\n}\n","export function pipeline<I, T, O>(\n stream: ReadableStream<I>,\n transformer1: TransformStream<I, T>,\n transformer2: TransformStream<T, O>,\n): ReadableStream<O>;\nexport function pipeline<I, T1, T2, O>(\n stream: ReadableStream<I>,\n transformer1: TransformStream<I, T1>,\n transformer2: TransformStream<T1, T2>,\n transformer3: TransformStream<T2, O>,\n): ReadableStream<O>;\nexport function pipeline<I, T1, T2, T3, O>(\n stream: ReadableStream<I>,\n transformer1: TransformStream<I, T1>,\n transformer2: TransformStream<T1, T2>,\n transformer3: TransformStream<T2, T3>,\n transformer4: TransformStream<T3, O>,\n): ReadableStream<O>;\nexport function pipeline<I, O>(\n stream: ReadableStream<I>,\n ...transformers: TransformStream[]\n): ReadableStream<O> {\n return new ReadableStream({\n start: (controller) => {\n (() =>\n transformers\n .reduce<ReadableStream>(\n (stream, transformer) => stream.pipeThrough(transformer),\n stream,\n )\n .pipeTo(\n new WritableStream({\n write: (v) => controller.enqueue(v),\n close: () => controller.close(),\n }),\n )\n .catch((error) => controller.error(error)))();\n },\n });\n}\n","import { LexerTransformer } from \"./LexerTransformer.ts\";\nimport { RecordAssemblerTransformer } from \"./RecordAssemblerTransformer.ts\";\nimport type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { pipeline } from \"./utils/pipeline.ts\";\n\nexport function parseUint8ArrayStreamToStream<Header extends readonly string[]>(\n stream: ReadableStream<Uint8Array>,\n options?: ParseBinaryOptions<Header>,\n): ReadableStream<CSVRecord<Header>> {\n const { charset, fatal, ignoreBOM, decomposition } = options ?? {};\n return decomposition\n ? pipeline(\n stream,\n new DecompressionStream(decomposition),\n new TextDecoderStream(charset, { fatal, ignoreBOM }),\n new LexerTransformer(options),\n new RecordAssemblerTransformer(options),\n )\n : pipeline(\n stream,\n new TextDecoderStream(charset, { fatal, ignoreBOM }),\n new LexerTransformer(options),\n new RecordAssemblerTransformer(options),\n );\n}\n","export async function* convertStreamToAsyncIterableIterator<T>(\n stream: ReadableStream<T>,\n): AsyncIterableIterator<T> {\n const reader = stream.getReader();\n while (true) {\n const { done, value } = await reader.read();\n if (done) break;\n yield value;\n }\n}\n","import type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { parseStringStream } from \"./parseStringStream.ts\";\nimport { parseUint8ArrayStreamToStream } from \"./parseUint8ArrayStreamToStream.ts\";\nimport { convertStreamToAsyncIterableIterator } from \"./utils/convertStreamToAsyncIterableIterator.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\n\n/**\n * Parse CSV to records.\n * This function is for parsing a binary stream.\n *\n * @category Middle-level API\n * @remarks\n * If you want to parse a string, use {@link parseStringStream}.\n * @param stream CSV string to parse\n * @param options Parsing options.\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parseUint8ArrayStream.toArray} function.\n *\n * @example Parsing CSV binary\n *\n * ```ts\n * import { parseUint8ArrayStream } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * for await (const record of parseUint8ArrayStream(csv)) {\n * console.log(record);\n * }\n * ```\n */\nexport function parseUint8ArrayStream<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<Uint8Array>,\n options?: ParseBinaryOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n const recordStream = parseUint8ArrayStreamToStream(stream, options);\n return convertStreamToAsyncIterableIterator(recordStream);\n}\n\nexport declare namespace parseUint8ArrayStream {\n /**\n * Parse CSV binary to array of records,\n * ideal for smaller data sets.\n *\n * @returns Array of records\n *\n * @example Parsing CSV binary\n * ```ts\n * import { parseUint8ArrayStream } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * const records = await parseUint8ArrayStream.toArray(stream);\n * console.log(records);\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<Uint8Array>,\n options?: ParseBinaryOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV binary to array of records.\n *\n * @returns Stream of records\n *\n * @example Parsing CSV binary\n * ```ts\n * import { parseUint8ArrayStream } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * await parseUint8ArrayStream.toStream(stream)\n * .pipeTo(new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * ```\n */\n export function toStream<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<Uint8Array>,\n options?: ParseBinaryOptions<Header>,\n ): ReadableStream<CSVRecord<Header>[]>;\n}\nObject.defineProperties(parseUint8ArrayStream, {\n toArray: {\n enumerable: true,\n writable: false,\n value: internal.convertThisAsyncIterableIteratorToArray,\n },\n toStream: {\n enumerable: true,\n writable: false,\n value: parseUint8ArrayStreamToStream,\n },\n});\n","import type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\nimport { getOptionsFromResponse } from \"./getOptionsFromResponse.ts\";\nimport { parseResponseToStream } from \"./parseResponseToStream.ts\";\nimport { parseUint8ArrayStream } from \"./parseUint8ArrayStream.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\n\n/**\n * Parse HTTP Response what contains CSV to records,\n * ideal for smaller data sets.\n *\n * @remarks\n * This function automatically treats response headers.\n *\n * - If `Content-Type` header is not set, it assumes `text/csv`.\n * - If `Content-Type` header is not `text/csv`, it throws an error.\n * - If `Content-Type` header has charset parameter, it uses it for decoding.\n * - If `Content-Encoding` header is set, it decompresses the response.\n * - Should there be any conflicting information between the header and the options, the option's value will take precedence.\n *\n * @category Middle-level API\n * @param response\n * @param options\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parseResponse.toArray} function.\n *\n * @example Parsing CSV Response\n *\n * ```ts\n * import { parseResponse } from 'web-csv-toolbox';\n *\n * const response = await fetch('https://example.com/data.csv');\n *\n * for await (const record of parseResponse(response)) {\n * console.log(record);\n * }\n * ```\n */\nexport function parseResponse<Header extends ReadonlyArray<string>>(\n response: Response,\n options?: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n try {\n const options_ = getOptionsFromResponse(response, options);\n if (response.body === null) {\n throw new RangeError(\"Response body is null\");\n }\n return parseUint8ArrayStream(response.body, options_);\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\n\nexport declare namespace parseResponse {\n /**\n * Parse CSV Response to array of records.\n *\n * @returns Array of records\n *\n * @example Parsing CSV Response\n *\n * ```ts\n * import { parseResponse } from 'web-csv-toolbox';\n *\n * const response = await fetch('https://example.com/data.csv');\n *\n * const records = await parseResponse.toArray(response);\n * console.log(records);\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n response: Response,\n options?: ParseOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV Response to stream of records.\n *\n * @param response Response to parse\n * @returns Stream of records\n *\n * @example Parsing CSV Response\n *\n * ```ts\n * import { parseResponse } from 'web-csv-toolbox';\n *\n * const response = await fetch('https://example.com/data.csv');\n *\n * await parseResponse.toStream(response)\n * .pipeTo(\n * new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\n export function toStream<Header extends ReadonlyArray<string>>(\n response: Response,\n options?: ParseOptions<Header>,\n ): ReadableStream<CSVRecord<Header>[]>;\n}\n\nObject.defineProperties(parseResponse, {\n toArray: {\n enumerable: true,\n writable: false,\n value: internal.convertThisAsyncIterableIteratorToArray,\n },\n toStreamSync: {\n enumerable: true,\n writable: false,\n value: parseResponseToStream,\n },\n});\n","import type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\nimport { parseStringToArraySync } from \"./parseStringToArraySync.ts\";\nimport { parseStringToIterableIterator } from \"./parseStringToIterableIterator.ts\";\nimport { parseStringToStream } from \"./parseStringToStream.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\n\n/**\n * Parse CSV string to records.\n *\n * @category Middle-level API\n * @param csv CSV string to parse\n * @param options Parsing options. See {@link ParseOptions}.\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parseString.toArray} function.\n * @example Parsing CSV files from strings\n *\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * for await (const record of parseString(csv)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\nexport async function* parseString<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n try {\n yield* parseStringToIterableIterator(csv, options);\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\nexport declare namespace parseString {\n /**\n * Parse CSV string to records.\n *\n * @returns Array of records\n *\n * @example\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const records = await parseString.toArray(csv);\n * console.log(records);\n * // Prints:\n * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV string to records.\n *\n * @returns Array of records\n *\n * @example\n *\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const records = parseString.toArraySync(csv);\n * console.log(records);\n * // Prints:\n * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]\n * ```\n */\n export function toArraySync<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): CSVRecord<Header>[];\n /**\n * Parse CSV string to records.\n *\n * @returns Async iterable iterator of records\n *\n * @example\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * for (const record of parseString.toIterableIterator(csv)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\n export function toIterableIterator<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): IterableIterator<CSVRecord<Header>>;\n /**\n * Parse CSV string to records.\n *\n * @returns Readable stream of records\n *\n * @example\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * await parseString.toStream(csv)\n * .pipeTo(\n * new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\n export function toStream<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): ReadableStream<CSVRecord<Header>>;\n}\nObject.defineProperties(parseString, {\n toArray: {\n enumerable: true,\n writable: false,\n value: internal.convertThisAsyncIterableIteratorToArray,\n },\n toArraySync: {\n enumerable: true,\n writable: false,\n value: parseStringToArraySync,\n },\n toIterableIterator: {\n enumerable: true,\n writable: false,\n value: parseStringToIterableIterator,\n },\n toStream: {\n enumerable: true,\n writable: false,\n value: parseStringToStream,\n },\n});\n","import { LexerTransformer } from \"./LexerTransformer.ts\";\nimport { RecordAssemblerTransformer } from \"./RecordAssemblerTransformer.ts\";\nimport type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { pipeline } from \"./utils/pipeline.ts\";\n\nexport function parseStringStreamToStream<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<string>,\n options?: ParseOptions<Header>,\n): ReadableStream<CSVRecord<Header>> {\n return pipeline(\n stream,\n new LexerTransformer(options),\n new RecordAssemblerTransformer(options),\n );\n}\n","import type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { parseStringStreamToStream } from \"./parseStringStreamToStream.ts\";\nimport { convertStreamToAsyncIterableIterator } from \"./utils/convertStreamToAsyncIterableIterator.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\n\n/**\n * Parse CSV string stream to records.\n *\n * @category Middle-level API\n * @param stream CSV string stream to parse\n * @param options Parsing options.\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parseStringStream.toArray} function.\n *\n * @example Parsing CSV files from strings\n *\n * ```ts\n * import { parseStringStream } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * for await (const record of parseStringStream(csv)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\nexport function parseStringStream<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<string>,\n options?: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n const recordStream = parseStringStreamToStream(stream, options);\n return convertStreamToAsyncIterableIterator(recordStream);\n}\n\nexport declare namespace parseStringStream {\n /**\n * Parse CSV string stream to records.\n *\n * @returns Array of records\n *\n * @example\n *\n * ```ts\n * import { parseStringStream } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * const records = await parseStringStream.toArray(stream);\n * console.log(records);\n * // Prints:\n * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<string>,\n options?: ParseOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV string stream to records.\n *\n * @returns Array of records\n *\n * @example\n *\n * ```ts\n * import { parseStringStream } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * await parseStringStream.toStream(stream)\n * .pipeTo(\n * new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * ```\n */\n export function toStream<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<string>,\n options?: ParseOptions<Header>,\n ): ReadableStream<CSVRecord<Header>>;\n}\n\nObject.defineProperties(parseStringStream, {\n toArray: {\n enumerable: true,\n writable: false,\n value: internal.convertThisAsyncIterableIteratorToArray,\n },\n toStream: {\n enumerable: true,\n writable: false,\n value: parseStringStreamToStream,\n },\n});\n","import type {\n CSV,\n CSVBinary,\n CSVRecord,\n CSVString,\n ParseBinaryOptions,\n ParseOptions,\n} from \"./common/types.ts\";\nimport { parseBinary } from \"./parseBinary.ts\";\nimport { parseResponse } from \"./parseResponse.ts\";\nimport { parseString } from \"./parseString.ts\";\nimport { parseStringStream } from \"./parseStringStream.ts\";\nimport { parseUint8ArrayStream } from \"./parseUint8ArrayStream.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\n\n/**\n * Parse CSV to records.\n *\n * {@link !String}, {@link !ReadableStream}<string | {@link !Uint8Array}> and {@link !Response} are supported.\n *\n *\n * @typeParam Header Header type like `['name', 'age']`.\n *\n * @param csv CSV string to parse.\n * @param options Parsing options for CSV string parsing.\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parse.toArray} function.\n * @category High-level API\n *\n * @remarks\n * {@link parseString}, {@link parseBinary}, {@link parseUint8ArrayStream},\n * {@link parseStringStream} and {@link parseResponse} are used internally.\n *\n * If you known the type of the CSV, it performs better to use them directly.\n *\n * | If you want to parse a... | Use... | Options... |\n * | -------------------------------------------- | ----------------------------- | -------------------------- |\n * | {@link !String} | {@link parseString} | {@link ParseOptions} |\n * | {@link !ReadableStream}<{@link !String}> | {@link parseStringStream} | {@link ParseOptions} |\n * | {@link !Uint8Array} \\| {@link !ArrayBuffer} | {@link parseBinary} | {@link ParseBinaryOptions} |\n * | {@link !ReadableStream}<{@link !Uint8Array}> | {@link parseUint8ArrayStream} | {@link ParseBinaryOptions} |\n * | {@link !Response} | {@link parseResponse} | {@link ParseBinaryOptions} |\n *\n * @example Parsing CSV files from strings\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * for await (const record of parse(csv)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n *\n * @example Parsing CSV files from streams\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * }\n * });\n *\n * for await (const record of parse(stream)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n *\n *\n * @example Parsing CSV files with headers\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * // This CSV has no header.\n * const csv = `Alice,42\n * Bob,69`;\n *\n * for await (const record of parse(csv, { header: ['name', 'age'] })) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n *\n * @example Parsing CSV files with different delimiters characters\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const csv = `name\\tage\n * Alice\\t42\n * Bob\\t69`;\n *\n * for await (const record of parse(csv, { delimiter: '\\t' })) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\nexport function parse<Header extends ReadonlyArray<string>>(\n csv: CSVString,\n options?: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>>;\n/**\n * Parse CSV binary to records.\n *\n * @param csv CSV binary to parse.\n * @param options Parsing options for CSV binary parsing.\n *\n * @example Parsing CSV files from responses\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * // This CSV data is not gzipped and encoded in utf-8.\n * const response = await fetch('https://example.com/data.csv');\n *\n * for await (const record of parse(response)) {\n * // ...\n * }\n * ```\n *\n * @example Parsing CSV files with options spcialized for binary\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * // This CSV data is gzipped and encoded in shift-jis and has BOM.\n * const response = await fetch('https://example.com/data.csv.gz');\n *\n * for await (const record of parse(response, {\n * charset: 'shift-jis',\n * ignoreBOM: true,\n * decomposition: 'gzip',\n * })) {\n * // ...\n * }\n * ```\n */\nexport function parse<Header extends ReadonlyArray<string>>(\n csv: CSVBinary,\n options?: ParseBinaryOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport async function* parse<Header extends ReadonlyArray<string>>(\n csv: CSV,\n options?: ParseBinaryOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n if (typeof csv === \"string\") {\n yield* parseString(csv, options);\n } else if (csv instanceof Uint8Array || csv instanceof ArrayBuffer) {\n yield* parseBinary(csv, options);\n } else if (csv instanceof ReadableStream) {\n const [branch1, branch2] = csv.tee();\n const reader1 = branch1.getReader();\n const { value: firstChunk } = await reader1.read();\n reader1.releaseLock();\n if (typeof firstChunk === \"string\") {\n yield* parseStringStream(branch2 as ReadableStream<string>, options);\n } else if (firstChunk instanceof Uint8Array) {\n yield* parseUint8ArrayStream(\n branch2 as ReadableStream<Uint8Array>,\n options,\n );\n }\n } else if (csv instanceof Response) {\n yield* parseResponse(csv, options);\n }\n}\n\nexport declare namespace parse {\n /**\n * Parse CSV string to array of records,\n * ideal for smaller data sets.\n *\n * @example Parse a CSV as array of records\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const records = await parse.toArray(csv);\n * console.log(records);\n * // Prints:\n * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n csv: CSVString,\n options?: ParseOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV string to array of records,\n * ideal for smaller data sets.\n *\n * @example Parse a CSV as array of records\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const response = await fetch('https://example.com/data.csv');\n *\n * const records = await parse.toArray(response);\n * console.log(records);\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n csv: CSVBinary,\n options?: ParseBinaryOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n}\n\nObject.defineProperties(parse, {\n toArray: {\n enumerable: true,\n writable: false,\n value: internal.convertThisAsyncIterableIteratorToArray,\n },\n});\n","import type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\nimport { getOptionsFromResponse } from \"./getOptionsFromResponse.ts\";\nimport { parseUint8ArrayStreamToStream } from \"./parseUint8ArrayStreamToStream.ts\";\n\nexport function parseResponseToStream<Header extends ReadonlyArray<string>>(\n response: Response,\n options?: ParseBinaryOptions<Header>,\n): ReadableStream<CSVRecord<Header>> {\n try {\n const options_ = getOptionsFromResponse(response, options);\n if (response.body === null) {\n throw new RangeError(\"Response body is null\");\n }\n return parseUint8ArrayStreamToStream(response.body, options_);\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\n","import init, { type InitInput } from \"web-csv-toolbox-wasm\";\n\nimport dataURL from \"web-csv-toolbox-wasm/web_csv_toolbox_wasm_bg.wasm\";\n\n/**\n * Load WASM module.\n *\n * This must be called before calling WebAssembly functions.\n *\n * @example\n *\n * ```ts\n * import { loadWASM, parseStringWASM } from \"web-csv-toolbox\";\n *\n * await loadWASM();\n *\n * const csv = \"a,b,c\\n1,2,3\";\n * const parsed = parseStringWASM(csv);\n * ```\n */\nexport async function loadWASM(input?: InitInput | Promise<InitInput>) {\n await init(input ?? dataURL);\n}\n","import { parseStringToArraySync } from \"web-csv-toolbox-wasm\";\nimport { assertCommonOptions } from \"./assertCommonOptions.ts\";\nimport { InvalidOptionError } from \"./common/errors.ts\";\nimport type { CSVRecord, CommonOptions } from \"./common/types.ts\";\nimport { COMMA, DOUBLE_QUOTE } from \"./constants.ts\";\nimport type { loadWASM } from \"./loadWASM.ts\";\n\n/**\n * Parse CSV string to record of arrays.\n *\n * @param csv CSV string\n * @param options Parse options\n * @returns Record of arrays\n *\n * @remarks\n * This function uses WebAssembly to parse CSV string.\n * Before calling this function, you must call {@link loadWASM} function.\n *\n * This function only supports UTF-8 string.\n * If you pass a string that is not UTF-8, like UTF-16, it throws an error.\n * This function only supports double quote as quotation.\n * So, `options.quotation` must be `\"` (double quote). Otherwise, it throws an error.\n *\n * And this function only supports single character as delimiter.\n * So, `options.delimiter` must be a single character. Otherwise, it throws an error.\n *\n * @example\n *\n * ```ts\n * import { loadWASM, parseStringWASM } from \"web-csv-toolbox\";\n *\n * await loadWASM();\n *\n * const csv = \"a,b,c\\n1,2,3\";\n *\n * const result = parseStringToArraySyncWASM(csv);\n * console.log(result);\n * // Prints:\n * // [{ a: \"1\", b: \"2\", c: \"3\" }]\n * ```\n * @beta\n */\nexport function parseStringToArraySyncWASM<Header extends readonly string[]>(\n csv: string,\n options: CommonOptions = {},\n): CSVRecord<Header>[] {\n const { delimiter = COMMA, quotation = DOUBLE_QUOTE } = options;\n if (typeof delimiter !== \"string\" || delimiter.length !== 1) {\n throw new InvalidOptionError(\n \"Invalid delimiter, must be a single character on WASM.\",\n );\n }\n if (quotation !== DOUBLE_QUOTE) {\n throw new InvalidOptionError(\n \"Invalid quotation, must be double quote on WASM.\",\n );\n }\n assertCommonOptions({ delimiter, quotation });\n const demiliterCode = delimiter.charCodeAt(0);\n return JSON.parse(parseStringToArraySync(csv, demiliterCode));\n}\n"],"names":["FieldDelimiter","Symbol","for","RecordDelimiter","Field","InvalidOptionError","Error","constructor","message","options","super","this","name","ParseError","SyntaxError","position","cause","CR","CRLF","LF","COMMA","DOUBLE_QUOTE","assertOptionValue","value","TypeError","length","assertCommonOptions","delimiter","quotation","escapeRegExp","v","replace","Lexer","buffer","flush","matcher","fieldDelimiterLength","cursor","line","column","offset","rowNumber","signal","d","q","RegExp","lex","chunk","buffering","tokens","token","endsWith","slice","nextToken","throwIfAborted","startsWith","start","type","location","end","cur","next","match","exec","LexerTransformer","TransformStream","lexer","transform","controller","enqueue","error","commonParseErrorHandling","RecordAssembler","fieldIndex","row","header","dirty","Array","isArray","setHeader","assemble","Object","fromEntries","map","index","at","fill","filter","Set","size","parseStringToArraySync","csv","assembler","convertBinaryToString","binary","TextDecoder","charset","ignoreBOM","fatal","decode","ArrayBuffer","Uint8Array","parseStringToIterableIterator","parseBinaryToIterableIterator","parseStringToStream","ReadableStream","record","close","async","convertThisAsyncIterableIteratorToArray","args","rows","push","parseBinary","bytes","iterator","asyncIterator","convertIterableIteratorToAsync","getOptionsFromResponse","response","headers","contentType","get","mime","parameters","split","result","trim","paramator","key","parseMime","RangeError","decomposition","defineProperties","toArray","enumerable","writable","internal.convertThisAsyncIterableIteratorToArray","toArraySync","toIterableIterator","toStream","RecordAssemblerTransformer","pipeline","stream","transformers","reduce","transformer","pipeThrough","pipeTo","WritableStream","write","catch","parseUint8ArrayStreamToStream","DecompressionStream","TextDecoderStream","convertStreamToAsyncIterableIterator","reader","getReader","done","read","parseUint8ArrayStream","parseResponse","options_","body","parseString","parseStringStreamToStream","parseStringStream","parse","branch1","branch2","tee","reader1","firstChunk","releaseLock","Response","toStreamSync","input","init","demiliterCode","charCodeAt","JSON"],"mappings":"2OAIa,MAAAA,EAAiBC,OAAOC,IAAI,kCAK5BC,EAAkBF,OAAOC,IAAI,mCAK7BE,EAAQH,OAAOC,IAAI,yBCTzB,MAAMG,UAA2BC,MACtC,WAAAC,CAAYC,EAAkBC,GAC5BC,MAAMF,EAASC,GACfE,KAAKC,KAAO,oBACd,EAuBK,MAAMC,UAAmBC,YAIvBC,SAEP,WAAAR,CAAYC,EAAkBC,GAC5BC,MAAMF,EAAS,CAAEQ,MAAOP,GAASO,QACjCL,KAAKC,KAAO,aACZD,KAAKI,SAAWN,GAASM,QAC3B,EC1CK,MAAME,EAAK,KACLC,EAAO,OACPC,EAAK,KAKLC,EAAQ,IAKRC,EAAe,ICD5B,SAASC,EACPC,EACAX,GAEI,GAAiB,iBAAVW,EAaT,MAAM,IAAIC,UAAU,GAAGZ,sBAZvB,QAAQ,GACN,KAAsB,IAAjBW,EAAME,OACT,MAAM,IAAIpB,EAAmB,GAAGO,uBAClC,KAAKW,EAAME,OAAS,EAClB,MAAM,IAAIpB,EAAmB,GAAGO,gCAClC,KAAKW,IAAUJ,EACf,KAAKI,IAAUN,EACb,MAAM,IAAIZ,EAAmB,GAAGO,+BAOxC,CAqBO,SAASc,EACdjB,GAEA,IAAA,MAAWG,IAAQ,CAAC,YAAa,aACbU,EAAAb,EAAQG,GAAOA,GAE/B,GAAAH,EAAQkB,YAAclB,EAAQmB,UAChC,MAAM,IAAIvB,EACR,wEAGN,CCvDO,SAASwB,EAAaC,GACpB,OAAAA,EAAEC,QAAQ,sBAAuB,OAC1C,CCSO,MAAMC,EACXL,GACAC,GACAK,GAAU,GACVC,IAAS,EACTC,GACAC,GAEAC,GAAoB,CAClBC,KAAM,EACNC,OAAQ,EACRC,OAAQ,GAEVC,GAAa,EAEbC,GAMA,WAAAnC,EAAYoB,UACVA,EAAYP,EAAAQ,UACZA,EAAYP,EAAAqB,OACZA,GACsC,IAClBhB,EAAA,CAAEC,YAAWC,cACjCjB,MAAKgB,EAAaA,EAClBhB,MAAKiB,EAAaA,EAClBjB,MAAKyB,EAAwBT,EAAUF,OACjC,MAAAkB,EAAId,EAAaF,GACjBiB,EAAIf,EAAaD,GACvBjB,MAAKwB,EAAW,IAAIU,OAClB,UAAUD,QAAQD,6CAA6CC,KAAKD,gBAElED,IACF/B,MAAK+B,EAAUA,EAEnB,CAQO,GAAAI,CAAIC,EAAsBC,GAAY,GAQ3C,OAPKA,IACHrC,MAAKuB,GAAS,GAEK,iBAAVa,GAAuC,IAAjBA,EAAMtB,SACrCd,MAAKsB,GAAWc,GAGXpC,MAAKsC,GACd,CAMO,KAAAf,GAEL,OADAvB,MAAKuB,GAAS,EACP,IAAIvB,MAAKsC,IAClB,CAMA,GAACA,GASK,IAAAC,EACI,IATJvC,MAAKuB,IAEHvB,MAAKsB,EAAQkB,SAASjC,GACnBP,MAAAsB,EAAUtB,MAAKsB,EAAQmB,MAAM,GAAG,GAC5BzC,MAAKsB,EAAQkB,SAAShC,KAC1BR,MAAAsB,EAAUtB,MAAKsB,EAAQmB,MAAM,GAAG,KAIjCF,EAAQvC,MAAK0C,WACbH,CAEV,CAMA,EAAAG,GAEM,GADJ1C,MAAK+B,GAASY,iBACc,IAAxB3C,MAAKsB,EAAQR,OACR,OAAA,KAIP,IAAgB,IAAhBd,MAAKuB,IACJvB,MAAKsB,IAAYf,GAAQP,MAAKsB,IAAYd,GAEpC,OAAA,KAIT,GAAIR,MAAKsB,EAAQsB,WAAWrC,GAAO,CACjCP,MAAKsB,EAAUtB,MAAKsB,EAAQmB,MAAM,GAClC,MAAMI,EAAkB,IAAK7C,MAAK0B,GAClC1B,MAAK0B,EAAQC,OACb3B,MAAK0B,EAAQE,OAAS,EACtB5B,MAAK0B,EAAQG,QAAU,EAUhB,MAT6B,CAClCiB,KAAMtD,EACNoB,MAAOL,EACPwC,SAAU,CACRF,QACAG,IAAK,IAAKhD,MAAK0B,GACfI,UAAW9B,MAAK8B,KAItB,CAGA,GAAI9B,MAAKsB,EAAQsB,WAAWpC,GAAK,CAC/BR,MAAKsB,EAAUtB,MAAKsB,EAAQmB,MAAM,GAClC,MAAMI,EAAkB,IAAK7C,MAAK0B,GAClC1B,MAAK0B,EAAQC,OACb3B,MAAK0B,EAAQE,OAAS,EACtB5B,MAAK0B,EAAQG,QAAU,EAUhB,MAT6B,CAClCiB,KAAMtD,EACNoB,MAAOJ,EACPuC,SAAU,CACRF,QACAG,IAAK,IAAKhD,MAAK0B,GACfI,UAAW9B,MAAK8B,KAItB,CAGA,GAAI9B,MAAKsB,EAAQsB,WAAW5C,MAAKgB,GAAa,CAC5ChB,MAAKsB,EAAUtB,MAAKsB,EAAQmB,MAAM,GAClC,MAAMI,EAAkB,IAAK7C,MAAK0B,GAG3B,OAFF1B,MAAA0B,EAAQE,QAAU5B,MAAKyB,EACvBzB,MAAA0B,EAAQG,QAAU7B,MAAKyB,EACrB,CACLqB,KAAMzD,EACNuB,MAAOZ,MAAKgB,EACZ+B,SAAU,CACRF,QACAG,IAAK,IAAKhD,MAAK0B,GACfI,UAAW9B,MAAK8B,GAGtB,CAGA,GAAI9B,MAAKsB,EAAQsB,WAAW5C,MAAKiB,GAAa,CAqB5C,IAAIL,EAAQ,GACRiB,EAAS,EACTD,EAAS,EACTD,EAAO,EAGPsB,EAAcjD,MAAKsB,EAAQO,GAC3BqB,EAA2BlD,MAAKsB,EAAQO,EAAS,GAClD,EAAA,CAEG,GAAAoB,IAAQjD,MAAKiB,EAAY,CAGvB,GAAAiC,IAASlD,MAAKiB,EAAY,CAE5BL,GAASZ,MAAKiB,EACJY,GAAA,EACJoB,EAAAjD,MAAKsB,EAAQO,GACZqB,EAAAlD,MAAKsB,EAAQO,EAAS,GAGnBD,GAAA,EACV,QACF,CAIA,QAAa,IAATsB,IAAsC,IAAhBlD,MAAKuB,EACtB,OAAA,KAKTM,IACA7B,MAAKsB,EAAUtB,MAAKsB,EAAQmB,MAAMZ,GAClC,MAAMgB,EAAkB,IAAK7C,MAAK0B,GAI3B,OAHP1B,MAAK0B,EAAQE,QAAUA,EACvB5B,MAAK0B,EAAQG,QAAUA,EACvB7B,MAAK0B,EAAQC,MAAQA,EACd,CACLmB,KAAMrD,EACNmB,QACAmC,SAAU,CACRF,QACAG,IAAK,IAAKhD,MAAK0B,GACfI,UAAW9B,MAAK8B,GAGtB,CAGSlB,GAAAqC,EAGLA,IAAQzC,GAGVmB,IACSC,EAAA,GAGTA,IAGFC,IACMoB,EAAAC,EACCA,EAAAlD,MAAKsB,EAAQO,EAAS,EAAC,YACf,IAARoB,GAET,GAAIjD,MAAKuB,EACD,MAAA,IAAIrB,EAAW,6CAA8C,CACjEE,SAAU,IAAKJ,MAAK0B,KAGjB,OAAA,IACT,CAGA,MAAMyB,EAAQnD,MAAKwB,EAAS4B,KAAKpD,MAAKsB,GACtC,GAAI6B,EAAO,CAGL,IAAgB,IAAhBnD,MAAKuB,GAAoB4B,EAAM,GAAGrC,SAAWd,MAAKsB,EAAQR,OACrD,OAAA,KAEH,MAAAF,EAAQuC,EAAM,GACpBnD,MAAKsB,EAAUtB,MAAKsB,EAAQmB,MAAM7B,EAAME,QACxC,MAAM+B,EAAkB,IAAK7C,MAAK0B,GAG3B,OAFF1B,MAAA0B,EAAQE,QAAUhB,EAAME,OACxBd,MAAA0B,EAAQG,QAAUjB,EAAME,OACtB,CACLgC,KAAMrD,EACNmB,QACAmC,SAAU,CACRF,QACAG,IAAK,IAAKhD,MAAK0B,GACfI,UAAW9B,MAAK8B,GAGtB,CAGO,OAAA,IACT,EC3QK,MAAMuB,UAAyBC,gBACpBC,MAChB,WAAA3D,CAAYE,EAAyB,IAC7BC,MAAA,CACJyD,UAAW,CAACpB,EAAOqB,KACb,GAAiB,IAAjBrB,EAAMtB,OACJ,IACS2C,EAAAC,QAAQ,IAAI1D,KAAKuD,MAAMpB,IAAIC,GAAO,WACtCuB,GACPF,EAAWE,MAAMA,EACnB,CACF,EAEFpC,MAAQkC,IACF,IACFA,EAAWC,QAAQ1D,KAAKuD,MAAMhC,eACvBoC,GACPF,EAAWE,MAAMA,EACnB,KAGC3D,KAAAuD,MAAQ,IAAIlC,EAAMvB,EACzB,y6EC7CK,SAAS8D,EAAyBD,GACnC,GAAAA,aAAiBzD,GAAcyD,aAAiBjE,EAC5C,MAAAiE,EAEF,MAAA,IAAIzD,EAAW,gDAAiD,CACpEG,MAAOsD,GAEX,CCTO,MAAME,EACXC,GAAc,EACdC,GAAiB,GACjBC,GACAC,IAAS,EACTlC,GAEA,WAAAnC,CAAYE,EAA0C,SAC7B,IAAnBA,EAAQkE,QAAwBE,MAAMC,QAAQrE,EAAQkE,SACnDhE,MAAAoE,EAAWtE,EAAQkE,QAEtBlE,EAAQiC,SACV/B,MAAK+B,EAAUjC,EAAQiC,OAE3B,CAEA,SAAQsC,CACN/B,EACAf,GAAQ,GAER,IAAA,MAAWgB,KAASD,EAElB,OADAtC,MAAK+B,GAASY,iBACNJ,EAAMO,MACZ,KAAKzD,EACEW,MAAA8D,IACL9D,MAAKiE,GAAS,EACd,MACF,KAAKzE,OACkB,IAAjBQ,MAAKgE,EACFhE,MAAAoE,EAAWpE,MAAK+D,GAEjB/D,MAAKiE,QACDK,OAAOC,YACXvE,MAAKgE,EAAQQ,KAAI,CAACR,EAAQS,IAAU,CAClCT,EACAhE,MAAK+D,EAAKW,GAAGD,aAIXH,OAAOC,YACXvE,MAAKgE,EAAQQ,KAAKR,GAAW,CAACA,EAAQ,OAK5ChE,MAAK8D,EAAc,EACd9D,MAAA+D,EAAO,IAAIG,MAAMlE,MAAKgE,GAASlD,QAAQ6D,KAAK,IACjD3E,MAAKiE,GAAS,EACd,MACF,QACEjE,MAAKiE,GAAS,EACdjE,MAAK+D,EAAK/D,MAAK8D,GAAevB,EAAM3B,MAKtCW,UACKvB,KAAKuB,QAEhB,CAEA,MAAQA,QACe,IAAjBvB,MAAKgE,GACHhE,MAAKiE,UACDK,OAAOC,YACXvE,MAAKgE,EACFY,QAAQzD,GAAMA,IACdqD,KAAI,CAACR,EAAQS,IAAU,CAACT,EAAQhE,MAAK+D,EAAKW,GAAGD,OAIxD,CAEA,EAAAL,CAAWJ,GAEL,GADJhE,MAAKgE,EAAUA,EACa,IAAxBhE,MAAKgE,EAAQlD,OACT,MAAA,IAAIZ,EAAW,iCAEnB,GAAA,IAAI2E,IAAI7E,MAAKgE,GAASc,OAAS9E,MAAKgE,EAAQlD,OACxC,MAAA,IAAIZ,EAAW,gDAEzB,ECpFc,SAAA6E,EACdC,EACAlF,GAEI,IACI,MAAAyD,EAAQ,IAAIlC,EAAMvB,GAClBmF,EAAY,IAAIpB,EAAgB/D,GAChCwC,EAASiB,EAAMpB,IAAI6C,GACzB,MAAO,IAAIC,EAAUZ,SAAS/B,UACvBqB,GACPC,EAAyBD,EAC3B,CACF,CCNgB,SAAAuB,EACdC,EACArF,GAEO,OAAA,IAAIsF,YAAYtF,GAASuF,QAAS,CACvCC,UAAWxF,GAASwF,UACpBC,MAAOzF,GAASyF,QACfC,OAAOL,aAAkBM,YAAc,IAAIC,WAAWP,GAAUA,EACrE,CCdgB,SAAAQ,EAGdX,EACAlF,GAEI,IACI,MAAAyD,EAAQ,IAAIlC,EAAMvB,GAClBmF,EAAY,IAAIpB,EAAgB/D,GAChCwC,EAASiB,EAAMpB,IAAI6C,GAClB,OAAAC,EAAUZ,SAAS/B,SACnBqB,GACPC,EAAyBD,EAC3B,CACF,CCNO,SAASiC,EAGdT,EACArF,EAAsC,IAElC,IAEK,OAAA6F,EADKT,EAAsBC,EAAQrF,GACAA,SACnC6D,GACPC,EAAyBD,EAC3B,CACF,CCpBgB,SAAAkC,EACdb,EACAlF,GAEI,IACI,MAAAyD,EAAQ,IAAIlC,EAAMvB,GAClBmF,EAAY,IAAIpB,EAAgB/D,GACtC,OAAO,IAAIgG,eAAe,CACxB,KAAAjD,CAAMY,GACE,MAAAnB,EAASiB,EAAMpB,IAAI6C,GACzB,IAAA,MAAWe,KAAUd,EAAUZ,SAAS/B,GACtCmB,EAAWC,QAAQqC,GAErBtC,EAAWuC,OACb,UAEKrC,GACPC,EAAyBD,EAC3B,CACF,CCxBAsC,eAAsBC,KAGRC,GACZ,MAAMC,EAAY,GAClB,UAAA,MAAiBrC,KAAO/D,QAAQmG,GAC9BC,EAAKC,KAAKtC,GAEL,OAAAqC,CACT,CCqBgB,SAAAE,EACdC,EACAzG,GAGA,OCnCK,SACL0G,GAEO,MAAA,CACLP,KAAa,SACJO,EAAStD,OAElB,CAAC5D,OAAOmH,iBACC,OAAAzG,IACT,EAEJ,CDwBS0G,CADUd,EAA8BW,EAAOzG,GAExD,CEzBO,SAAS6G,EACdC,EACA9G,EAAsC,IAEhC,MAAA+G,QAAEA,GAAYD,EACdE,EAAcD,EAAQE,IAAI,iBAAmB,WAC7CC,ECVD,SAAmBF,GACxB,MAAOhE,KAASmE,GAAcH,EAAYI,MAAM,KAC1CC,EAA0B,CAC9BrE,KAAMA,EAAKsE,OACXH,WAAY,CAAC,GAEf,IAAA,MAAWI,KAAaJ,EAAY,CAClC,MAAOK,EAAK1G,GAASyG,EAAUH,MAAM,KACrCC,EAAOF,WAAWK,EAAIF,QAAUxG,EAAMwG,MACxC,CACO,OAAAD,CACT,CDDeI,CAAUT,GACnB,GAAc,aAAdE,EAAKlE,KACP,MAAM,IAAI0E,WAAW,uBAAuBV,MAOvC,MAAA,CACLW,cALCZ,EAAQE,IAAI,0BAA6C,EAM1D1B,QALc2B,EAAKC,WAAW5B,SAAW,WAMtCvF,EAEP,CF8GAwE,OAAOoD,iBAAiBpB,EAAa,CACnCqB,QAAS,CACPC,YAAY,EACZC,UAAU,EACVjH,MAAOkH,GAETC,YAAa,CACXH,YAAY,EACZC,UAAU,EACVjH,MIjJG,SACLuE,EACArF,EAAsC,IAElC,IAEK,OAAAiF,EADKG,EAAsBC,EAAQrF,GACPA,SAC5B6D,GACPC,EAAyBD,EAC3B,CACF,GJyIEqE,mBAAoB,CAClBJ,YAAY,EACZC,UAAU,EACVjH,MAAOgF,GAETqC,SAAU,CACRL,YAAY,EACZC,UAAU,EACVjH,MK3JG,SACLuE,EACArF,EAAsC,IAElC,IAEK,OAAA+F,EADKX,EAAsBC,EAAQrF,GACVA,SACzB6D,GACPC,EAAyBD,EAC3B,CACF,KCmCO,MAAMuE,UAEH5E,gBACQ2B,UAEhB,WAAArF,CAAYE,EAA0C,IAC9CC,MAAA,CACJyD,UAAW,CAAClB,EAAQmB,KACd,IACF,IAAA,MAAWlB,KAASvC,KAAKiF,UAAUZ,SAAS/B,GAAQ,GAClDmB,EAAWC,QAAQnB,SAEdoB,GACPF,EAAWE,MAAMA,EACnB,GAEFpC,MAAQkC,IACF,IACF,IAAA,MAAWlB,KAASvC,KAAKiF,UAAU1D,QACjCkC,EAAWC,QAAQnB,SAEdoB,GACPF,EAAWE,MAAMA,EACnB,KAGC3D,KAAAiF,UAAY,IAAIpB,EAAgB/D,EACvC,EC3Dc,SAAAqI,EACdC,KACGC,GAEH,OAAO,IAAIvC,eAAe,CACxBjD,MAAQY,IAEJ4E,EACGC,QACC,CAACF,EAAQG,IAAgBH,EAAOI,YAAYD,IAC5CH,GAEDK,OACC,IAAIC,eAAe,CACjBC,MAAQxH,GAAMsC,EAAWC,QAAQvC,GACjC6E,MAAO,IAAMvC,EAAWuC,WAG3B4C,OAAOjF,GAAUF,EAAWE,MAAMA,IAAS,GAGtD,CClCgB,SAAAkF,EACdT,EACAtI,GAEA,MAAMuF,QAAEA,EAASE,MAAAA,EAAAD,UAAOA,gBAAWmC,GAAkB3H,GAAW,GAChE,OAAO2H,EACHU,EACEC,EACA,IAAIU,oBAAoBrB,GACxB,IAAIsB,kBAAkB1D,EAAS,CAAEE,QAAOD,cACxC,IAAIjC,EAAiBvD,GACrB,IAAIoI,EAA2BpI,IAEjCqI,EACEC,EACA,IAAIW,kBAAkB1D,EAAS,CAAEE,QAAOD,cACxC,IAAIjC,EAAiBvD,GACrB,IAAIoI,EAA2BpI,GAEvC,CCxBAmG,eAAuB+C,EACrBZ,GAEM,MAAAa,EAASb,EAAOc,YACtB,OAAa,CACX,MAAMC,KAAEA,EAAMvI,MAAAA,SAAgBqI,EAAOG,OACrC,GAAID,EAAM,YACJvI,CACR,CACF,CC+BgB,SAAAyI,EACdjB,EACAtI,GAGA,OAAOkJ,EADcH,EAA8BT,EAAQtI,GAE7D,CCPgB,SAAAwJ,EACd1C,EACA9G,GAEI,IACI,MAAAyJ,EAAW5C,EAAuBC,EAAU9G,GAC9C,GAAkB,OAAlB8G,EAAS4C,KACL,MAAA,IAAIhC,WAAW,yBAEhB,OAAA6B,EAAsBzC,EAAS4C,KAAMD,SACrC5F,GACPC,EAAyBD,EAC3B,CACF,CCnBuBsC,eAAAwD,EACrBzE,EACAlF,GAEI,UACK6F,EAA8BX,EAAKlF,SACnC6D,GACPC,EAAyBD,EAC3B,CACF,CCrCgB,SAAA+F,EACdtB,EACAtI,GAEO,OAAAqI,EACLC,EACA,IAAI/E,EAAiBvD,GACrB,IAAIoI,EAA2BpI,GAEnC,CCyBgB,SAAA6J,EACdvB,EACAtI,GAGA,OAAOkJ,EADcU,EAA0BtB,EAAQtI,GAEzD,CCuHuBmG,eAAA2D,EACrB5E,EACAlF,GAEI,GAAe,iBAARkF,QACFyE,EAAYzE,EAAKlF,QACf,GAAAkF,aAAeU,YAAcV,aAAeS,kBAC9Ca,EAAYtB,EAAKlF,QAC1B,GAAWkF,aAAec,eAAgB,CACxC,MAAO+D,EAASC,GAAW9E,EAAI+E,MACzBC,EAAUH,EAAQX,aAChBtI,MAAOqJ,SAAqBD,EAAQZ,OAC5CY,EAAQE,cACkB,iBAAfD,QACFN,EAAkBG,EAAmChK,GACnDmK,aAAsBvE,mBACxB2D,EACLS,EACAhK,GAEJ,MACSkF,aAAemF,iBACjBb,EAActE,EAAKlF,GAE9B,CL5EAwE,OAAOoD,iBAAiB2B,EAAuB,CAC7C1B,QAAS,CACPC,YAAY,EACZC,UAAU,EACVjH,MAAOkH,GAETG,SAAU,CACRL,YAAY,EACZC,UAAU,EACVjH,MAAOiI,KCdXvE,OAAOoD,iBAAiB4B,EAAe,CACrC3B,QAAS,CACPC,YAAY,EACZC,UAAU,EACVjH,MAAOkH,GAETsC,aAAc,CACZxC,YAAY,EACZC,UAAU,EACVjH,MK/GY,SACdgG,EACA9G,GAEI,IACI,MAAAyJ,EAAW5C,EAAuBC,EAAU9G,GAC9C,GAAkB,OAAlB8G,EAAS4C,KACL,MAAA,IAAIhC,WAAW,yBAEhB,OAAAqB,EAA8BjC,EAAS4C,KAAMD,SAC7C5F,GACPC,EAAyBD,EAC3B,CACF,KJiIAW,OAAOoD,iBAAiB+B,EAAa,CACnC9B,QAAS,CACPC,YAAY,EACZC,UAAU,EACVjH,MAAOkH,GAETC,YAAa,CACXH,YAAY,EACZC,UAAU,EACVjH,MAAOmE,GAETiD,mBAAoB,CAClBJ,YAAY,EACZC,UAAU,EACVjH,MAAO+E,GAETsC,SAAU,CACRL,YAAY,EACZC,UAAU,EACVjH,MAAOiF,KElDXvB,OAAOoD,iBAAiBiC,EAAmB,CACzChC,QAAS,CACPC,YAAY,EACZC,UAAU,EACVjH,MAAOkH,GAETG,SAAU,CACRL,YAAY,EACZC,UAAU,EACVjH,MAAO8I,KC8GXpF,OAAOoD,iBAAiBkC,EAAO,CAC7BjC,QAAS,CACPC,YAAY,EACZC,UAAU,EACVjH,MAAOkH,2JE3NX7B,eAA+BoE,SACvBC,EAAKD,+mzGACb,iHCoBO,SACLrF,EACAlF,EAAyB,IAEzB,MAAMkB,UAAEA,EAAYP,EAAOQ,UAAAA,EAAYP,GAAiBZ,EACxD,GAAyB,iBAAdkB,GAA+C,IAArBA,EAAUF,OAC7C,MAAM,IAAIpB,EACR,0DAGJ,GAAIuB,IAAcP,EAChB,MAAM,IAAIhB,EACR,oDAGgBqB,EAAA,CAAEC,YAAWC,cAC3B,MAAAsJ,EAAgBvJ,EAAUwJ,WAAW,GAC3C,OAAOC,KAAKb,MAAM7E,EAAuBC,EAAKuF,GAChD"}
1
+ {"version":3,"file":"web-csv-toolbox.umd.cjs","sources":["../src/common/constants.ts","../src/common/errors.ts","../src/constants.ts","../src/assertCommonOptions.ts","../src/utils/escapeRegExp.ts","../src/Lexer.ts","../src/LexerTransformer.ts","../src/commonParseErrorHandling.ts","../src/RecordAssembler.ts","../src/parseStringToArraySync.ts","../src/utils/convertBinaryToString.ts","../src/parseStringToIterableIterator.ts","../src/parseBinaryToIterableIterator.ts","../src/parseStringToStream.ts","../src/utils/convertThisAsyncIterableIteratorToArray.ts","../src/parseBinary.ts","../src/utils/convertIterableIteratorToAsync.ts","../src/getOptionsFromResponse.ts","../src/utils/parseMime.ts","../src/parseBinaryToArraySync.ts","../src/parseBinaryToStream.ts","../src/RecordAssemblerTransformer.ts","../src/utils/pipeline.ts","../src/parseUint8ArrayStreamToStream.ts","../src/utils/convertStreamToAsyncIterableIterator.ts","../src/parseUint8ArrayStream.ts","../src/parseResponse.ts","../src/parseString.ts","../src/parseStringStreamToStream.ts","../src/parseStringStream.ts","../src/parse.ts","../src/parseResponseToStream.ts","../src/loadWASM.ts","../src/parseStringToArraySyncWASM.ts"],"sourcesContent":["/**\n * FiledDelimiter is a symbol for field delimiter of CSV.\n * @category Constants\n */\nexport const FieldDelimiter = Symbol.for(\"web-csv-toolbox.FieldDelimiter\");\n/**\n * RecordDelimiter is a symbol for record delimiter of CSV.\n * @category Constants\n */\nexport const RecordDelimiter = Symbol.for(\"web-csv-toolbox.RecordDelimiter\");\n/**\n * Field is a symbol for field of CSV.\n * @category Constants\n */\nexport const Field = Symbol.for(\"web-csv-toolbox.Field\");\n","import type { Position } from \"./types.js\";\n\n/**\n * Options for creating a parse error.\n */\nexport interface ParseErrorOptions extends ErrorOptions {\n /**\n * The position where the error occurred.\n */\n position?: Position;\n}\n\n/**\n * Error class for parse errors.\n *\n * @remarks\n * This error is thrown when a parsing error occurs.\n * {@link ParseError} is a subclass of {@link !SyntaxError}.\n *\n * This is in reference to the specification\n * that the error thrown when a parse error occurs in the {@link !JSON.parse} function is {@link !SyntaxError}.\n */\nexport class ParseError extends SyntaxError {\n /**\n * The position where the error occurred.\n */\n public position?: Position;\n\n constructor(message?: string, options?: ParseErrorOptions) {\n super(message, { cause: options?.cause });\n this.name = \"ParseError\";\n this.position = options?.position;\n }\n}\n","export const CR = \"\\r\";\nexport const CRLF = \"\\r\\n\";\nexport const LF = \"\\n\";\n\n/**\n * COMMA is a symbol for comma(,).\n */\nexport const COMMA = \",\";\n\n/**\n * DOUBLE_QUOTE is a symbol for double quote(\").\n */\nexport const DOUBLE_QUOTE = '\"';\n","import type { CommonOptions } from \"./common/types.ts\";\nimport { CR, LF } from \"./constants.ts\";\n\n/**\n * Asserts that the provided value is a string and satisfies certain conditions.\n * @param value - The value to be checked.\n * @param name - The name of the option.\n * @throws {RangeError} If the value is empty, longer than 1 byte, or includes CR or LF.\n * @throws {TypeError} If the value is not a string.\n */\nfunction assertOptionValue(\n value: string,\n name: string,\n): asserts value is string {\n if (typeof value === \"string\") {\n switch (true) {\n case value.length === 0:\n throw new RangeError(`${name} must not be empty`);\n case value.length > 1:\n throw new RangeError(`${name} must be a single character`);\n case value === LF:\n case value === CR:\n throw new RangeError(`${name} must not include CR or LF`);\n default:\n break;\n }\n } else {\n throw new TypeError(`${name} must be a string`);\n }\n}\n\n/**\n * Asserts that the provided options object contains all the required properties.\n * Throws an error if any required property is missing\n * or if the delimiter and quotation length is not 1 byte character,\n * or if the delimiter is the same as the quotation.\n *\n * @example\n *\n * ```ts\n * assertCommonOptions({\n * quotation: '\"',\n * delimiter: ',',\n * });\n * ```\n *\n * @param options - The options object to be validated.\n * @throws {RangeError} If any required property is missing or if the delimiter is the same as the quotation.\n * @throws {TypeError} If any required property is not a string.\n */\nexport function assertCommonOptions(\n options: Required<CommonOptions>,\n): asserts options is Required<CommonOptions> {\n for (const name of [\"delimiter\", \"quotation\"] as const) {\n assertOptionValue(options[name], name);\n }\n if (options.delimiter === options.quotation) {\n throw new RangeError(\n \"delimiter must not be the same as quotation, use different characters\",\n );\n }\n}\n","/**\n * Escape a string for use in a regular expression.\n *\n * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_expressions#escaping Regular expressions#Escaping | MDN}\n * @param v string to escape\n * @returns escaped string\n */\nexport function escapeRegExp(v: string) {\n return v.replace(/[.*+?^${}()|[\\]\\\\]/g, \"\\\\$&\");\n}\n","import { assertCommonOptions } from \"./assertCommonOptions.ts\";\nimport { Field, FieldDelimiter, RecordDelimiter } from \"./common/constants.ts\";\nimport { ParseError } from \"./common/errors.ts\";\nimport type {\n AbortSignalOptions,\n CommonOptions,\n Position,\n RecordDelimiterToken,\n Token,\n} from \"./common/types.ts\";\nimport { COMMA, CRLF, DOUBLE_QUOTE, LF } from \"./constants.ts\";\nimport { escapeRegExp } from \"./utils/escapeRegExp.ts\";\n\n/**\n * CSV Lexer.\n *\n * Lexter tokenizes CSV data into fields and records.\n */\nexport class Lexer {\n #delimiter: string;\n #quotation: string;\n #buffer = \"\";\n #flush = false;\n #matcher: RegExp;\n #fieldDelimiterLength: number;\n\n #cursor: Position = {\n line: 1,\n column: 1,\n offset: 0,\n };\n #rowNumber = 1;\n\n #signal?: AbortSignal;\n\n /**\n * Constructs a new Lexer instance.\n * @param options - The common options for the lexer.\n */\n constructor({\n delimiter = COMMA,\n quotation = DOUBLE_QUOTE,\n signal,\n }: CommonOptions & AbortSignalOptions = {}) {\n assertCommonOptions({ delimiter, quotation });\n this.#delimiter = delimiter;\n this.#quotation = quotation;\n this.#fieldDelimiterLength = delimiter.length;\n const d = escapeRegExp(delimiter);\n const q = escapeRegExp(quotation);\n this.#matcher = new RegExp(\n `^(?:(?!${q})(?!${d})(?![\\\\r\\\\n]))([\\\\S\\\\s\\\\uFEFF\\\\xA0]+?)(?=${q}|${d}|\\\\r|\\\\n|$)`,\n );\n if (signal) {\n this.#signal = signal;\n }\n }\n\n /**\n * Lexes the given chunk of CSV data.\n * @param chunk - The chunk of CSV data to be lexed.\n * @param buffering - Indicates whether the lexer is buffering or not.\n * @returns An iterable iterator of tokens.\n */\n public lex(chunk: string | null, buffering = false): IterableIterator<Token> {\n if (!buffering) {\n this.#flush = true;\n }\n if (typeof chunk === \"string\" && chunk.length !== 0) {\n this.#buffer += chunk;\n }\n\n return this.#tokens();\n }\n\n /**\n * Flushes the lexer and returns any remaining tokens.\n * @returns An array of tokens.\n */\n public flush(): Token[] {\n this.#flush = true;\n return [...this.#tokens()];\n }\n\n /**\n * Generates tokens from the buffered CSV data.\n * @yields Tokens from the buffered CSV data.\n */\n *#tokens(): Generator<Token> {\n if (this.#flush) {\n // Trim the last CRLF or LF\n if (this.#buffer.endsWith(CRLF)) {\n this.#buffer = this.#buffer.slice(0, -2 /* -CRLF.length */);\n } else if (this.#buffer.endsWith(LF)) {\n this.#buffer = this.#buffer.slice(0, -1 /* -LF.length */);\n }\n }\n let token: Token | null;\n while ((token = this.#nextToken())) {\n yield token;\n }\n }\n\n /**\n * Retrieves the next token from the buffered CSV data.\n * @returns The next token or null if there are no more tokens.\n */\n #nextToken(): Token | null {\n this.#signal?.throwIfAborted();\n if (this.#buffer.length === 0) {\n return null;\n }\n // Buffer is Record Delimiter, defer to the next iteration.\n if (\n this.#flush === false &&\n (this.#buffer === CRLF || this.#buffer === LF)\n ) {\n return null;\n }\n\n // Check for CRLF\n if (this.#buffer.startsWith(CRLF)) {\n this.#buffer = this.#buffer.slice(2);\n const start: Position = { ...this.#cursor };\n this.#cursor.line++;\n this.#cursor.column = 1;\n this.#cursor.offset += 2; // CRLF.length\n const token: RecordDelimiterToken = {\n type: RecordDelimiter,\n value: CRLF,\n location: {\n start,\n end: { ...this.#cursor },\n rowNumber: this.#rowNumber++,\n },\n };\n return token;\n }\n\n // Check for LF\n if (this.#buffer.startsWith(LF)) {\n this.#buffer = this.#buffer.slice(1);\n const start: Position = { ...this.#cursor };\n this.#cursor.line++;\n this.#cursor.column = 1;\n this.#cursor.offset += 1; // LF.length\n const token: RecordDelimiterToken = {\n type: RecordDelimiter,\n value: LF,\n location: {\n start,\n end: { ...this.#cursor },\n rowNumber: this.#rowNumber++,\n },\n };\n return token;\n }\n\n // Check for Delimiter\n if (this.#buffer.startsWith(this.#delimiter)) {\n this.#buffer = this.#buffer.slice(1);\n const start: Position = { ...this.#cursor };\n this.#cursor.column += this.#fieldDelimiterLength;\n this.#cursor.offset += this.#fieldDelimiterLength;\n return {\n type: FieldDelimiter,\n value: this.#delimiter,\n location: {\n start,\n end: { ...this.#cursor },\n rowNumber: this.#rowNumber,\n },\n };\n }\n\n // Check for Quoted String\n if (this.#buffer.startsWith(this.#quotation)) {\n /**\n * Extract Quoted field.\n *\n * The following code is equivalent to the following:\n *\n * If the next character is a quote:\n * - If the character after that is a quote, then append a quote to the value and skip two characters.\n * - Otherwise, return the quoted string.\n * Otherwise, append the character to the value and skip one character.\n *\n * ```plaintext\n * | `i` | `i + 1` | `i + 2` |\n * |------------|------------|----------|\n * | cur | next | | => Variable names\n * | #quotation | #quotation | | => Escaped quote\n * | #quotation | (EOF) | | => Closing quote\n * | #quotation | undefined | | => End of buffer\n * | undefined | | | => End of buffer\n * ```\n */\n let value = \"\";\n let offset = 1; // Skip the opening quote\n let column = 2; // Skip the opening quote\n let line = 0;\n\n // Define variables\n let cur: string = this.#buffer[offset];\n let next: string | undefined = this.#buffer[offset + 1];\n do {\n // If the current character is a quote, check the next characters for closing quotes.\n if (cur === this.#quotation) {\n // If the cur character is a quote and the next character is a quote,\n // then append a quote to the value and skip two characters.\n if (next === this.#quotation) {\n // Append a quote to the value and skip two characters.\n value += this.#quotation;\n offset += 2;\n cur = this.#buffer[offset];\n next = this.#buffer[offset + 1];\n\n // Update the diff\n column += 2;\n continue;\n }\n\n // If the cur character is a quote and the next character is undefined,\n // then return null.\n if (next === undefined && this.#flush === false) {\n return null;\n }\n\n // Otherwise, return the quoted string.\n // Update the buffer and return the token\n offset++;\n this.#buffer = this.#buffer.slice(offset);\n const start: Position = { ...this.#cursor };\n this.#cursor.column += column;\n this.#cursor.offset += offset;\n this.#cursor.line += line;\n return {\n type: Field,\n value,\n location: {\n start,\n end: { ...this.#cursor },\n rowNumber: this.#rowNumber,\n },\n };\n }\n\n // Append the character to the value.\n value += cur;\n\n // Prepare for the next iteration\n if (cur === LF) {\n // If the current character is a LF,\n // then increment the line number and reset the column number.\n line++;\n column = 1;\n } else {\n // Otherwise, increment the column number and offset.\n column++;\n }\n\n offset++;\n cur = next;\n next = this.#buffer[offset + 1];\n } while (cur !== undefined);\n\n if (this.#flush) {\n throw new ParseError(\"Unexpected EOF while parsing quoted field.\", {\n position: { ...this.#cursor },\n });\n }\n return null;\n }\n\n // Check for Unquoted String\n const match = this.#matcher.exec(this.#buffer);\n if (match) {\n // If we're flushing and the match doesn't consume the entire buffer,\n // then return null\n if (this.#flush === false && match[0].length === this.#buffer.length) {\n return null;\n }\n const value = match[1];\n this.#buffer = this.#buffer.slice(value.length);\n const start: Position = { ...this.#cursor };\n this.#cursor.column += value.length;\n this.#cursor.offset += value.length;\n return {\n type: Field,\n value,\n location: {\n start,\n end: { ...this.#cursor },\n rowNumber: this.#rowNumber,\n },\n };\n }\n\n // Otherwise, return null\n return null;\n }\n}\n","import { Lexer } from \"./Lexer.ts\";\nimport type { CommonOptions, Token } from \"./common/types.ts\";\n\n/**\n * A transform stream that converts a stream of tokens into a stream of rows.\n *\n * @category Low-level API\n *\n * @example Parse a CSV with headers by data\n * ```ts\n * new ReadableStream({\n * start(controller) {\n * controller.enqueue(\"name,age\\r\\n\");\n * controller.enqueue(\"Alice,20\\r\\n\");\n * controller.close();\n * }\n * })\n * .pipeThrough(new LexerTransformer())\n * .pipeTo(new WritableStream({ write(tokens) {\n * for (const token of tokens) {\n * console.log(token);\n * }\n * }}));\n * // { type: Field, value: \"name\", location: {...} }\n * // { type: FieldDelimiter, value: \",\", location: {...} }\n * // { type: Field, value: \"age\", location: {...} }\n * // { type: RecordDelimiter, value: \"\\r\\n\", location: {...} }\n * // { type: Field, value: \"Alice\", location: {...} }\n * // { type: FieldDelimiter, value: \",\", location: {...} }\n * // { type: Field, value: \"20\" }\n * // { type: RecordDelimiter, value: \"\\r\\n\", location: {...} }\n * ```\n */\nexport class LexerTransformer extends TransformStream<string, Token[]> {\n public readonly lexer: Lexer;\n constructor(options: CommonOptions = {}) {\n super({\n transform: (chunk, controller) => {\n if (chunk.length !== 0) {\n try {\n controller.enqueue([...this.lexer.lex(chunk, true)]);\n } catch (error) {\n controller.error(error);\n }\n }\n },\n flush: (controller) => {\n try {\n controller.enqueue(this.lexer.flush());\n } catch (error) {\n controller.error(error);\n }\n },\n });\n this.lexer = new Lexer(options);\n }\n}\n","import { ParseError } from \"./common/errors\";\n\n/**\n * Common error handling for parsing CSV data.\n *\n * @param error - The error to handle.\n * @throws {ParseError} When an error occurs while parsing the CSV data.\n * @throws {RangeError} When an invalid option is provided.\n * @throws {TypeError} When an invalid option is provided.\n */\n\nexport function commonParseErrorHandling(error: unknown): never {\n if (\n error instanceof ParseError ||\n error instanceof RangeError ||\n error instanceof TypeError\n ) {\n throw error;\n }\n throw new ParseError(\"An error occurred while parsing the CSV data.\", {\n cause: error,\n });\n}\n","import { FieldDelimiter, RecordDelimiter } from \"./common/constants.ts\";\nimport { ParseError } from \"./common/errors.ts\";\nimport type {\n CSVRecord,\n RecordAssemblerOptions,\n Token,\n} from \"./common/types.ts\";\n\nexport class RecordAssembler<Header extends ReadonlyArray<string>> {\n #fieldIndex = 0;\n #row: string[] = [];\n #header: Header | undefined;\n #dirty = false;\n #signal?: AbortSignal;\n\n constructor(options: RecordAssemblerOptions<Header> = {}) {\n if (options.header !== undefined && Array.isArray(options.header)) {\n this.#setHeader(options.header);\n }\n if (options.signal) {\n this.#signal = options.signal;\n }\n }\n\n public *assemble(\n tokens: Iterable<Token>,\n flush = true,\n ): IterableIterator<CSVRecord<Header>> {\n for (const token of tokens) {\n this.#signal?.throwIfAborted();\n switch (token.type) {\n case FieldDelimiter:\n this.#fieldIndex++;\n this.#dirty = true;\n break;\n case RecordDelimiter:\n if (this.#header === undefined) {\n this.#setHeader(this.#row as unknown as Header);\n } else {\n if (this.#dirty) {\n yield Object.fromEntries(\n this.#header.map((header, index) => [\n header,\n this.#row.at(index),\n ]),\n ) as unknown as CSVRecord<Header>;\n } else {\n yield Object.fromEntries(\n this.#header.map((header) => [header, \"\"]),\n ) as CSVRecord<Header>;\n }\n }\n // Reset the row fields buffer.\n this.#fieldIndex = 0;\n this.#row = new Array(this.#header?.length).fill(\"\");\n this.#dirty = false;\n break;\n default:\n this.#dirty = true;\n this.#row[this.#fieldIndex] = token.value;\n break;\n }\n }\n\n if (flush) {\n yield* this.flush();\n }\n }\n\n public *flush(): Generator<CSVRecord<Header>> {\n if (this.#header !== undefined) {\n if (this.#dirty) {\n yield Object.fromEntries(\n this.#header\n .filter((v) => v)\n .map((header, index) => [header, this.#row.at(index)]),\n ) as unknown as CSVRecord<Header>;\n }\n }\n }\n\n #setHeader(header: Header) {\n this.#header = header;\n if (this.#header.length === 0) {\n throw new ParseError(\"The header must not be empty.\");\n }\n if (new Set(this.#header).size !== this.#header.length) {\n throw new ParseError(\"The header must not contain duplicate fields.\");\n }\n }\n}\n","import { Lexer } from \"./Lexer.ts\";\nimport { RecordAssembler } from \"./RecordAssembler.ts\";\nimport type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\n\nexport function parseStringToArraySync<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n): CSVRecord<Header>[] {\n try {\n const lexer = new Lexer(options);\n const assembler = new RecordAssembler(options);\n const tokens = lexer.lex(csv);\n return [...assembler.assemble(tokens)];\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\n","import type { BinaryOptions } from \"../common/types.ts\";\n\n/**\n * Converts a binary string to a string.\n *\n * @param binary - The binary string to convert.\n * @param options - The options for parsing the binary string.\n * @returns The converted string.\n * @throws {RangeError} The given charset is not supported.\n * @throws {TypeError} The encoded data was not valid.\n */\nexport function convertBinaryToString(\n binary: Uint8Array | ArrayBuffer,\n options: BinaryOptions,\n): string {\n return new TextDecoder(options?.charset, {\n ignoreBOM: options?.ignoreBOM,\n fatal: options?.fatal,\n }).decode(binary instanceof ArrayBuffer ? new Uint8Array(binary) : binary);\n}\n","import { Lexer } from \"./Lexer.ts\";\nimport { RecordAssembler } from \"./RecordAssembler.ts\";\nimport type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\n\nexport function parseStringToIterableIterator<\n Header extends ReadonlyArray<string>,\n>(\n csv: string,\n options?: ParseOptions<Header>,\n): IterableIterator<CSVRecord<Header>> {\n try {\n const lexer = new Lexer(options);\n const assembler = new RecordAssembler(options);\n const tokens = lexer.lex(csv);\n return assembler.assemble(tokens);\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\n","import type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\nimport { parseStringToIterableIterator } from \"./parseStringToIterableIterator.ts\";\nimport { convertBinaryToString } from \"./utils/convertBinaryToString.ts\";\n\n/**\n * Parses the given binary data into an iterable iterator of CSV records.\n *\n * @param binary - The binary data to parse.\n * @param options - The parse options.\n * @returns An iterable iterator of CSV records.\n * @throws {ParseError} When an error occurs while parsing the CSV data.\n */\nexport function parseBinaryToIterableIterator<\n Header extends ReadonlyArray<string>,\n>(\n binary: Uint8Array | ArrayBuffer,\n options: ParseBinaryOptions<Header> = {},\n): IterableIterator<CSVRecord<Header>> {\n try {\n const csv = convertBinaryToString(binary, options);\n return parseStringToIterableIterator(csv, options);\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\n","import { Lexer } from \"./Lexer.ts\";\nimport { RecordAssembler } from \"./RecordAssembler.ts\";\nimport type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\n\nexport function parseStringToStream<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n): ReadableStream<CSVRecord<Header>> {\n try {\n const lexer = new Lexer(options);\n const assembler = new RecordAssembler(options);\n return new ReadableStream({\n start(controller) {\n const tokens = lexer.lex(csv);\n for (const record of assembler.assemble(tokens)) {\n controller.enqueue(record);\n }\n controller.close();\n },\n });\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\n","export async function convertThisAsyncIterableIteratorToArray<\n O,\n T extends (...args: any[]) => AsyncGenerator<O>,\n>(this: T, ...args: Parameters<T>): Promise<O[]> {\n const rows: O[] = [];\n for await (const row of this(...args)) {\n rows.push(row);\n }\n return rows;\n}\n","import type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { parseBinaryToArraySync } from \"./parseBinaryToArraySync.ts\";\nimport { parseBinaryToIterableIterator } from \"./parseBinaryToIterableIterator.ts\";\nimport { parseBinaryToStream } from \"./parseBinaryToStream.ts\";\nimport { convertIterableIteratorToAsync } from \"./utils/convertIterableIteratorToAsync.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\n\n/**\n * Parse a binary from an {@link !Uint8Array}.\n *\n * @category Middle-level API\n *\n * @param bytes CSV bytes to parse.\n * @param options Parsing options\n * @returns Async iterable iterator of records.\n *\n * @example Parsing CSV binary\n *\n * ```ts\n * import { parseUint8Array } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * for await (const record of parseUint8Array(csv)) {\n * console.log(record);\n * }\n * ```\n */\nexport function parseBinary<Header extends ReadonlyArray<string>>(\n bytes: Uint8Array | ArrayBuffer,\n options?: ParseBinaryOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n const iterator = parseBinaryToIterableIterator(bytes, options);\n return convertIterableIteratorToAsync(iterator);\n}\n\nexport declare namespace parseBinary {\n /**\n * Parse a binary from an {@link !Uint8Array} to an array of records.\n *\n * @param bytes CSV bytes to parse.\n * @param options Parsing options\n * @returns Array of records\n *\n * @example\n * ```ts\n * import { parseUint8Array } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * const records = await parseUint8Array.toArray(csv);\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n bytes: Uint8Array | ArrayBuffer,\n options?: ParseBinaryOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse a binary from an {@link !Uint8Array} to an array of records.\n *\n * @param bytes CSV bytes to parse.\n * @param options Parsing options\n * @returns Array of records\n * @example\n *\n * ```ts\n * import { parseUint8Array } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * const records = parseUint8Array.toArraySync(csv);\n * ```\n */\n export function toArraySync<Header extends ReadonlyArray<string>>(\n bytes: Uint8Array | ArrayBuffer,\n options?: ParseBinaryOptions<Header>,\n ): CSVRecord<Header>[];\n\n /**\n * Parse a binary from an {@link !Uint8Array} to an iterable iterator of records.\n *\n * @param bytes CSV bytes to parse.\n * @param options Parsing options\n * @returns Async iterable iterator of records.\n * @example\n * ```ts\n * import { parseUint8Array } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * for (const record of parseUint8Array.toIterableIterator(csv)) {\n * console.log(record);\n * }\n * ```\n */\n export function toIterableIterator<Header extends ReadonlyArray<string>>(\n bytes: Uint8Array,\n options?: ParseBinaryOptions<Header>,\n ): IterableIterator<CSVRecord<Header>>;\n\n /**\n * Parse a binary from an {@link !Uint8Array} to a stream of records.\n *\n * @param bytes CSV bytes to parse.\n * @param options Parsing options\n * @returns Stream of records.\n *\n * @example\n *\n * ```ts\n * import { parseUint8Array } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * const stream = parseUint8Array.toStream(csv);\n *\n * await stream.pipeTo(\n * new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * ```\n */\n export function toStream<Header extends ReadonlyArray<string>>(\n bytes: Uint8Array,\n options?: ParseBinaryOptions<Header>,\n ): ReadableStream<CSVRecord<Header>>;\n}\n\nObject.defineProperties(parseBinary, {\n toArray: {\n enumerable: true,\n writable: false,\n value: internal.convertThisAsyncIterableIteratorToArray,\n },\n toArraySync: {\n enumerable: true,\n writable: false,\n value: parseBinaryToArraySync,\n },\n toIterableIterator: {\n enumerable: true,\n writable: false,\n value: parseBinaryToIterableIterator,\n },\n toStream: {\n enumerable: true,\n writable: false,\n value: parseBinaryToStream,\n },\n});\n","export function convertIterableIteratorToAsync<T>(\n iterator: IterableIterator<T>,\n): AsyncIterableIterator<T> {\n return {\n async next() {\n return iterator.next();\n },\n [Symbol.asyncIterator]() {\n return this;\n },\n };\n}\n","import type { ParseBinaryOptions } from \"./common/types.ts\";\nimport { parseMime } from \"./utils/parseMime.ts\";\n\n/**\n * Extracts the options from the response object.\n *\n * @param response - The response object from which to extract the options.\n * @param options - The options to merge with the extracted options.\n * @returns The options extracted from the response.\n * @throws {RangeError} - The content type is not supported.\n */\nexport function getOptionsFromResponse<Header extends ReadonlyArray<string>>(\n response: Response,\n options: ParseBinaryOptions<Header> = {},\n): ParseBinaryOptions<Header> {\n const { headers } = response;\n const contentType = headers.get(\"content-type\") ?? \"text/csv\";\n const mime = parseMime(contentType);\n if (mime.type !== \"text/csv\") {\n throw new RangeError(`Invalid mime type: \"${contentType}\"`);\n }\n const decomposition =\n (headers.get(\"content-encoding\") as CompressionFormat) ?? undefined;\n const charset = mime.parameters.charset ?? \"utf-8\";\n // TODO: Support header=present and header=absent\n // const header = mime.parameters.header ?? \"present\";\n return {\n decomposition,\n charset,\n ...options,\n };\n}\n","export interface ParseMimeResult {\n type: string;\n parameters: {\n [key: string]: string;\n };\n}\n\nexport function parseMime(contentType: string) {\n const [type, ...parameters] = contentType.split(\";\");\n const result: ParseMimeResult = {\n type: type.trim(),\n parameters: {},\n };\n for (const paramator of parameters) {\n const [key, value] = paramator.split(\"=\");\n result.parameters[key.trim()] = value.trim();\n }\n return result;\n}\n","import type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\nimport { parseStringToArraySync } from \"./parseStringToArraySync.ts\";\nimport { convertBinaryToString } from \"./utils/convertBinaryToString.ts\";\n\nexport function parseBinaryToArraySync<Header extends ReadonlyArray<string>>(\n binary: Uint8Array | ArrayBuffer,\n options: ParseBinaryOptions<Header> = {},\n): CSVRecord<Header>[] {\n try {\n const csv = convertBinaryToString(binary, options);\n return parseStringToArraySync(csv, options);\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\n","import type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\nimport { parseStringToStream } from \"./parseStringToStream.ts\";\nimport { convertBinaryToString } from \"./utils/convertBinaryToString.ts\";\n\nexport function parseBinaryToStream<Header extends ReadonlyArray<string>>(\n binary: Uint8Array | ArrayBuffer,\n options: ParseBinaryOptions<Header> = {},\n): ReadableStream<CSVRecord<Header>> {\n try {\n const csv = convertBinaryToString(binary, options);\n return parseStringToStream(csv, options);\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\n","import { RecordAssembler } from \"./RecordAssembler.ts\";\nimport type {\n CSVRecord,\n RecordAssemblerOptions,\n Token,\n} from \"./common/types.ts\";\n\n/**\n * A transform stream that converts a stream of tokens into a stream of rows.\n * @template Header The type of the header row.\n * @param options The options for the parser.\n *\n * @category Low-level API\n *\n * @example Parse a CSV with headers by data\n * ```ts\n * new ReadableStream({\n * start(controller) {\n * controller.enqueue(\"name,age\\r\\n\");\n * controller.enqueue(\"Alice,20\\r\\n\");\n * controller.enqueue(\"Bob,25\\r\\n\");\n * controller.enqueue(\"Charlie,30\\r\\n\");\n * controller.close();\n * })\n * .pipeThrough(new LexerTransformer())\n * .pipeThrough(new RecordAssemblerTransformer())\n * .pipeTo(new WritableStream({ write(row) { console.log(row); }}));\n * // { name: \"Alice\", age: \"20\" }\n * // { name: \"Bob\", age: \"25\" }\n * // { name: \"Charlie\", age: \"30\" }\n * ```\n *\n * @example Parse a CSV with headers by options\n * ```ts\n * new ReadableStream({\n * start(controller) {\n * controller.enqueue(\"Alice,20\\r\\n\");\n * controller.enqueue(\"Bob,25\\r\\n\");\n * controller.enqueue(\"Charlie,30\\r\\n\");\n * controller.close();\n * }\n * })\n * .pipeThrough(new LexerTransformer())\n * .pipeThrough(new RecordAssemblerTransformer({ header: [\"name\", \"age\"] }))\n * .pipeTo(new WritableStream({ write(row) { console.log(row); }}));\n * // { name: \"Alice\", age: \"20\" }\n * // { name: \"Bob\", age: \"25\" }\n * // { name: \"Charlie\", age: \"30\" }\n * ```\n */\nexport class RecordAssemblerTransformer<\n Header extends ReadonlyArray<string>,\n> extends TransformStream<Token[], CSVRecord<Header>> {\n public readonly assembler: RecordAssembler<Header>;\n\n constructor(options: RecordAssemblerOptions<Header> = {}) {\n super({\n transform: (tokens, controller) => {\n try {\n for (const token of this.assembler.assemble(tokens, false)) {\n controller.enqueue(token);\n }\n } catch (error) {\n controller.error(error);\n }\n },\n flush: (controller) => {\n try {\n for (const token of this.assembler.flush()) {\n controller.enqueue(token);\n }\n } catch (error) {\n controller.error(error);\n }\n },\n });\n this.assembler = new RecordAssembler(options);\n }\n}\n","export function pipeline<I, T, O>(\n stream: ReadableStream<I>,\n transformer1: TransformStream<I, T>,\n transformer2: TransformStream<T, O>,\n): ReadableStream<O>;\nexport function pipeline<I, T1, T2, O>(\n stream: ReadableStream<I>,\n transformer1: TransformStream<I, T1>,\n transformer2: TransformStream<T1, T2>,\n transformer3: TransformStream<T2, O>,\n): ReadableStream<O>;\nexport function pipeline<I, T1, T2, T3, O>(\n stream: ReadableStream<I>,\n transformer1: TransformStream<I, T1>,\n transformer2: TransformStream<T1, T2>,\n transformer3: TransformStream<T2, T3>,\n transformer4: TransformStream<T3, O>,\n): ReadableStream<O>;\nexport function pipeline<I, O>(\n stream: ReadableStream<I>,\n ...transformers: TransformStream[]\n): ReadableStream<O> {\n return new ReadableStream({\n start: (controller) => {\n (() =>\n transformers\n .reduce<ReadableStream>(\n (stream, transformer) => stream.pipeThrough(transformer),\n stream,\n )\n .pipeTo(\n new WritableStream({\n write: (v) => controller.enqueue(v),\n close: () => controller.close(),\n }),\n )\n .catch((error) => controller.error(error)))();\n },\n });\n}\n","import { LexerTransformer } from \"./LexerTransformer.ts\";\nimport { RecordAssemblerTransformer } from \"./RecordAssemblerTransformer.ts\";\nimport type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { pipeline } from \"./utils/pipeline.ts\";\n\nexport function parseUint8ArrayStreamToStream<Header extends readonly string[]>(\n stream: ReadableStream<Uint8Array>,\n options?: ParseBinaryOptions<Header>,\n): ReadableStream<CSVRecord<Header>> {\n const { charset, fatal, ignoreBOM, decomposition } = options ?? {};\n return decomposition\n ? pipeline(\n stream,\n new DecompressionStream(decomposition),\n new TextDecoderStream(charset, { fatal, ignoreBOM }),\n new LexerTransformer(options),\n new RecordAssemblerTransformer(options),\n )\n : pipeline(\n stream,\n new TextDecoderStream(charset, { fatal, ignoreBOM }),\n new LexerTransformer(options),\n new RecordAssemblerTransformer(options),\n );\n}\n","export async function* convertStreamToAsyncIterableIterator<T>(\n stream: ReadableStream<T>,\n): AsyncIterableIterator<T> {\n const reader = stream.getReader();\n while (true) {\n const { done, value } = await reader.read();\n if (done) break;\n yield value;\n }\n}\n","import type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { parseStringStream } from \"./parseStringStream.ts\";\nimport { parseUint8ArrayStreamToStream } from \"./parseUint8ArrayStreamToStream.ts\";\nimport { convertStreamToAsyncIterableIterator } from \"./utils/convertStreamToAsyncIterableIterator.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\n\n/**\n * Parse CSV to records.\n * This function is for parsing a binary stream.\n *\n * @category Middle-level API\n * @remarks\n * If you want to parse a string, use {@link parseStringStream}.\n * @param stream CSV string to parse\n * @param options Parsing options.\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parseUint8ArrayStream.toArray} function.\n *\n * @example Parsing CSV binary\n *\n * ```ts\n * import { parseUint8ArrayStream } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * for await (const record of parseUint8ArrayStream(csv)) {\n * console.log(record);\n * }\n * ```\n */\nexport function parseUint8ArrayStream<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<Uint8Array>,\n options?: ParseBinaryOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n const recordStream = parseUint8ArrayStreamToStream(stream, options);\n return convertStreamToAsyncIterableIterator(recordStream);\n}\n\nexport declare namespace parseUint8ArrayStream {\n /**\n * Parse CSV binary to array of records,\n * ideal for smaller data sets.\n *\n * @returns Array of records\n *\n * @example Parsing CSV binary\n * ```ts\n * import { parseUint8ArrayStream } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * const records = await parseUint8ArrayStream.toArray(stream);\n * console.log(records);\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<Uint8Array>,\n options?: ParseBinaryOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV binary to array of records.\n *\n * @returns Stream of records\n *\n * @example Parsing CSV binary\n * ```ts\n * import { parseUint8ArrayStream } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * await parseUint8ArrayStream.toStream(stream)\n * .pipeTo(new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * ```\n */\n export function toStream<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<Uint8Array>,\n options?: ParseBinaryOptions<Header>,\n ): ReadableStream<CSVRecord<Header>[]>;\n}\nObject.defineProperties(parseUint8ArrayStream, {\n toArray: {\n enumerable: true,\n writable: false,\n value: internal.convertThisAsyncIterableIteratorToArray,\n },\n toStream: {\n enumerable: true,\n writable: false,\n value: parseUint8ArrayStreamToStream,\n },\n});\n","import type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\nimport { getOptionsFromResponse } from \"./getOptionsFromResponse.ts\";\nimport { parseResponseToStream } from \"./parseResponseToStream.ts\";\nimport { parseUint8ArrayStream } from \"./parseUint8ArrayStream.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\n\n/**\n * Parse HTTP Response what contains CSV to records,\n * ideal for smaller data sets.\n *\n * @remarks\n * This function automatically treats response headers.\n *\n * - If `Content-Type` header is not set, it assumes `text/csv`.\n * - If `Content-Type` header is not `text/csv`, it throws an error.\n * - If `Content-Type` header has charset parameter, it uses it for decoding.\n * - If `Content-Encoding` header is set, it decompresses the response.\n * - Should there be any conflicting information between the header and the options, the option's value will take precedence.\n *\n * @category Middle-level API\n * @param response\n * @param options\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parseResponse.toArray} function.\n *\n * @example Parsing CSV Response\n *\n * ```ts\n * import { parseResponse } from 'web-csv-toolbox';\n *\n * const response = await fetch('https://example.com/data.csv');\n *\n * for await (const record of parseResponse(response)) {\n * console.log(record);\n * }\n * ```\n */\nexport function parseResponse<Header extends ReadonlyArray<string>>(\n response: Response,\n options?: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n try {\n const options_ = getOptionsFromResponse(response, options);\n if (response.body === null) {\n throw new RangeError(\"Response body is null\");\n }\n return parseUint8ArrayStream(response.body, options_);\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\n\nexport declare namespace parseResponse {\n /**\n * Parse CSV Response to array of records.\n *\n * @returns Array of records\n *\n * @example Parsing CSV Response\n *\n * ```ts\n * import { parseResponse } from 'web-csv-toolbox';\n *\n * const response = await fetch('https://example.com/data.csv');\n *\n * const records = await parseResponse.toArray(response);\n * console.log(records);\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n response: Response,\n options?: ParseOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV Response to stream of records.\n *\n * @param response Response to parse\n * @returns Stream of records\n *\n * @example Parsing CSV Response\n *\n * ```ts\n * import { parseResponse } from 'web-csv-toolbox';\n *\n * const response = await fetch('https://example.com/data.csv');\n *\n * await parseResponse.toStream(response)\n * .pipeTo(\n * new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\n export function toStream<Header extends ReadonlyArray<string>>(\n response: Response,\n options?: ParseOptions<Header>,\n ): ReadableStream<CSVRecord<Header>[]>;\n}\n\nObject.defineProperties(parseResponse, {\n toArray: {\n enumerable: true,\n writable: false,\n value: internal.convertThisAsyncIterableIteratorToArray,\n },\n toStreamSync: {\n enumerable: true,\n writable: false,\n value: parseResponseToStream,\n },\n});\n","import type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\nimport { parseStringToArraySync } from \"./parseStringToArraySync.ts\";\nimport { parseStringToIterableIterator } from \"./parseStringToIterableIterator.ts\";\nimport { parseStringToStream } from \"./parseStringToStream.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\n\n/**\n * Parse CSV string to records.\n *\n * @category Middle-level API\n * @param csv CSV string to parse\n * @param options Parsing options. See {@link ParseOptions}.\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parseString.toArray} function.\n * @example Parsing CSV files from strings\n *\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * for await (const record of parseString(csv)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\nexport async function* parseString<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n try {\n yield* parseStringToIterableIterator(csv, options);\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\nexport declare namespace parseString {\n /**\n * Parse CSV string to records.\n *\n * @returns Array of records\n *\n * @example\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const records = await parseString.toArray(csv);\n * console.log(records);\n * // Prints:\n * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV string to records.\n *\n * @returns Array of records\n *\n * @example\n *\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const records = parseString.toArraySync(csv);\n * console.log(records);\n * // Prints:\n * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]\n * ```\n */\n export function toArraySync<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): CSVRecord<Header>[];\n /**\n * Parse CSV string to records.\n *\n * @returns Async iterable iterator of records\n *\n * @example\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * for (const record of parseString.toIterableIterator(csv)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\n export function toIterableIterator<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): IterableIterator<CSVRecord<Header>>;\n /**\n * Parse CSV string to records.\n *\n * @returns Readable stream of records\n *\n * @example\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * await parseString.toStream(csv)\n * .pipeTo(\n * new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\n export function toStream<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): ReadableStream<CSVRecord<Header>>;\n}\nObject.defineProperties(parseString, {\n toArray: {\n enumerable: true,\n writable: false,\n value: internal.convertThisAsyncIterableIteratorToArray,\n },\n toArraySync: {\n enumerable: true,\n writable: false,\n value: parseStringToArraySync,\n },\n toIterableIterator: {\n enumerable: true,\n writable: false,\n value: parseStringToIterableIterator,\n },\n toStream: {\n enumerable: true,\n writable: false,\n value: parseStringToStream,\n },\n});\n","import { LexerTransformer } from \"./LexerTransformer.ts\";\nimport { RecordAssemblerTransformer } from \"./RecordAssemblerTransformer.ts\";\nimport type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { pipeline } from \"./utils/pipeline.ts\";\n\nexport function parseStringStreamToStream<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<string>,\n options?: ParseOptions<Header>,\n): ReadableStream<CSVRecord<Header>> {\n return pipeline(\n stream,\n new LexerTransformer(options),\n new RecordAssemblerTransformer(options),\n );\n}\n","import type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { parseStringStreamToStream } from \"./parseStringStreamToStream.ts\";\nimport { convertStreamToAsyncIterableIterator } from \"./utils/convertStreamToAsyncIterableIterator.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\n\n/**\n * Parse CSV string stream to records.\n *\n * @category Middle-level API\n * @param stream CSV string stream to parse\n * @param options Parsing options.\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parseStringStream.toArray} function.\n *\n * @example Parsing CSV files from strings\n *\n * ```ts\n * import { parseStringStream } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * for await (const record of parseStringStream(csv)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\nexport function parseStringStream<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<string>,\n options?: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n const recordStream = parseStringStreamToStream(stream, options);\n return convertStreamToAsyncIterableIterator(recordStream);\n}\n\nexport declare namespace parseStringStream {\n /**\n * Parse CSV string stream to records.\n *\n * @returns Array of records\n *\n * @example\n *\n * ```ts\n * import { parseStringStream } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * const records = await parseStringStream.toArray(stream);\n * console.log(records);\n * // Prints:\n * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<string>,\n options?: ParseOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV string stream to records.\n *\n * @returns Array of records\n *\n * @example\n *\n * ```ts\n * import { parseStringStream } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * await parseStringStream.toStream(stream)\n * .pipeTo(\n * new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * ```\n */\n export function toStream<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<string>,\n options?: ParseOptions<Header>,\n ): ReadableStream<CSVRecord<Header>>;\n}\n\nObject.defineProperties(parseStringStream, {\n toArray: {\n enumerable: true,\n writable: false,\n value: internal.convertThisAsyncIterableIteratorToArray,\n },\n toStream: {\n enumerable: true,\n writable: false,\n value: parseStringStreamToStream,\n },\n});\n","import type {\n CSV,\n CSVBinary,\n CSVRecord,\n CSVString,\n ParseBinaryOptions,\n ParseOptions,\n} from \"./common/types.ts\";\nimport { parseBinary } from \"./parseBinary.ts\";\nimport { parseResponse } from \"./parseResponse.ts\";\nimport { parseString } from \"./parseString.ts\";\nimport { parseStringStream } from \"./parseStringStream.ts\";\nimport { parseUint8ArrayStream } from \"./parseUint8ArrayStream.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\n\n/**\n * Parse CSV to records.\n *\n * {@link !String}, {@link !ReadableStream}<string | {@link !Uint8Array}> and {@link !Response} are supported.\n *\n *\n * @typeParam Header Header type like `['name', 'age']`.\n *\n * @param csv CSV string to parse.\n * @param options Parsing options for CSV string parsing.\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parse.toArray} function.\n * @category High-level API\n *\n * @remarks\n * {@link parseString}, {@link parseBinary}, {@link parseUint8ArrayStream},\n * {@link parseStringStream} and {@link parseResponse} are used internally.\n *\n * If you known the type of the CSV, it performs better to use them directly.\n *\n * | If you want to parse a... | Use... | Options... |\n * | -------------------------------------------- | ----------------------------- | -------------------------- |\n * | {@link !String} | {@link parseString} | {@link ParseOptions} |\n * | {@link !ReadableStream}<{@link !String}> | {@link parseStringStream} | {@link ParseOptions} |\n * | {@link !Uint8Array} \\| {@link !ArrayBuffer} | {@link parseBinary} | {@link ParseBinaryOptions} |\n * | {@link !ReadableStream}<{@link !Uint8Array}> | {@link parseUint8ArrayStream} | {@link ParseBinaryOptions} |\n * | {@link !Response} | {@link parseResponse} | {@link ParseBinaryOptions} |\n *\n * @example Parsing CSV files from strings\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * for await (const record of parse(csv)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n *\n * @example Parsing CSV files from streams\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * }\n * });\n *\n * for await (const record of parse(stream)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n *\n *\n * @example Parsing CSV files with headers\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * // This CSV has no header.\n * const csv = `Alice,42\n * Bob,69`;\n *\n * for await (const record of parse(csv, { header: ['name', 'age'] })) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n *\n * @example Parsing CSV files with different delimiters characters\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const csv = `name\\tage\n * Alice\\t42\n * Bob\\t69`;\n *\n * for await (const record of parse(csv, { delimiter: '\\t' })) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\nexport function parse<Header extends ReadonlyArray<string>>(\n csv: CSVString,\n options?: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>>;\n/**\n * Parse CSV binary to records.\n *\n * @param csv CSV binary to parse.\n * @param options Parsing options for CSV binary parsing.\n *\n * @example Parsing CSV files from responses\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * // This CSV data is not gzipped and encoded in utf-8.\n * const response = await fetch('https://example.com/data.csv');\n *\n * for await (const record of parse(response)) {\n * // ...\n * }\n * ```\n *\n * @example Parsing CSV files with options spcialized for binary\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * // This CSV data is gzipped and encoded in shift-jis and has BOM.\n * const response = await fetch('https://example.com/data.csv.gz');\n *\n * for await (const record of parse(response, {\n * charset: 'shift-jis',\n * ignoreBOM: true,\n * decomposition: 'gzip',\n * })) {\n * // ...\n * }\n * ```\n */\nexport function parse<Header extends ReadonlyArray<string>>(\n csv: CSVBinary,\n options?: ParseBinaryOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport async function* parse<Header extends ReadonlyArray<string>>(\n csv: CSV,\n options?: ParseBinaryOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n if (typeof csv === \"string\") {\n yield* parseString(csv, options);\n } else if (csv instanceof Uint8Array || csv instanceof ArrayBuffer) {\n yield* parseBinary(csv, options);\n } else if (csv instanceof ReadableStream) {\n const [branch1, branch2] = csv.tee();\n const reader1 = branch1.getReader();\n const { value: firstChunk } = await reader1.read();\n reader1.releaseLock();\n if (typeof firstChunk === \"string\") {\n yield* parseStringStream(branch2 as ReadableStream<string>, options);\n } else if (firstChunk instanceof Uint8Array) {\n yield* parseUint8ArrayStream(\n branch2 as ReadableStream<Uint8Array>,\n options,\n );\n }\n } else if (csv instanceof Response) {\n yield* parseResponse(csv, options);\n }\n}\n\nexport declare namespace parse {\n /**\n * Parse CSV string to array of records,\n * ideal for smaller data sets.\n *\n * @example Parse a CSV as array of records\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const records = await parse.toArray(csv);\n * console.log(records);\n * // Prints:\n * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n csv: CSVString,\n options?: ParseOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV string to array of records,\n * ideal for smaller data sets.\n *\n * @example Parse a CSV as array of records\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const response = await fetch('https://example.com/data.csv');\n *\n * const records = await parse.toArray(response);\n * console.log(records);\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n csv: CSVBinary,\n options?: ParseBinaryOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n}\n\nObject.defineProperties(parse, {\n toArray: {\n enumerable: true,\n writable: false,\n value: internal.convertThisAsyncIterableIteratorToArray,\n },\n});\n","import type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\nimport { getOptionsFromResponse } from \"./getOptionsFromResponse.ts\";\nimport { parseUint8ArrayStreamToStream } from \"./parseUint8ArrayStreamToStream.ts\";\n\nexport function parseResponseToStream<Header extends ReadonlyArray<string>>(\n response: Response,\n options?: ParseBinaryOptions<Header>,\n): ReadableStream<CSVRecord<Header>> {\n try {\n const options_ = getOptionsFromResponse(response, options);\n if (response.body === null) {\n throw new RangeError(\"Response body is null\");\n }\n return parseUint8ArrayStreamToStream(response.body, options_);\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\n","import init, { type InitInput } from \"web-csv-toolbox-wasm\";\n\nimport dataURL from \"web-csv-toolbox-wasm/web_csv_toolbox_wasm_bg.wasm\";\n\n/**\n * Load WASM module.\n *\n * This must be called before calling WebAssembly functions.\n *\n * @example\n *\n * ```ts\n * import { loadWASM, parseStringWASM } from \"web-csv-toolbox\";\n *\n * await loadWASM();\n *\n * const csv = \"a,b,c\\n1,2,3\";\n * const parsed = parseStringWASM(csv);\n * ```\n */\nexport async function loadWASM(input?: InitInput | Promise<InitInput>) {\n await init(input ?? dataURL);\n}\n","import { parseStringToArraySync } from \"web-csv-toolbox-wasm\";\nimport { assertCommonOptions } from \"./assertCommonOptions.ts\";\nimport type { CSVRecord, CommonOptions } from \"./common/types.ts\";\nimport { COMMA, DOUBLE_QUOTE } from \"./constants.ts\";\nimport type { loadWASM } from \"./loadWASM.ts\";\n\n/**\n * Parse CSV string to record of arrays.\n *\n * @param csv CSV string\n * @param options Parse options\n * @returns Record of arrays\n *\n * @remarks\n * This function uses WebAssembly to parse CSV string.\n * Before calling this function, you must call {@link loadWASM} function.\n *\n * This function only supports UTF-8 string.\n * If you pass a string that is not UTF-8, like UTF-16, it throws an error.\n * This function only supports double quote as quotation.\n * So, `options.quotation` must be `\"` (double quote). Otherwise, it throws an error.\n *\n * And this function only supports single character as delimiter.\n * So, `options.delimiter` must be a single character. Otherwise, it throws an error.\n *\n * @example\n *\n * ```ts\n * import { loadWASM, parseStringWASM } from \"web-csv-toolbox\";\n *\n * await loadWASM();\n *\n * const csv = \"a,b,c\\n1,2,3\";\n *\n * const result = parseStringToArraySyncWASM(csv);\n * console.log(result);\n * // Prints:\n * // [{ a: \"1\", b: \"2\", c: \"3\" }]\n * ```\n * @beta\n * @throws {RangeError | TypeError} - If provided options are invalid.\n */\nexport function parseStringToArraySyncWASM<Header extends readonly string[]>(\n csv: string,\n options: CommonOptions = {},\n): CSVRecord<Header>[] {\n const { delimiter = COMMA, quotation = DOUBLE_QUOTE } = options;\n if (typeof delimiter !== \"string\" || delimiter.length !== 1) {\n throw new RangeError(\n \"Invalid delimiter, must be a single character on WASM.\",\n );\n }\n if (quotation !== DOUBLE_QUOTE) {\n throw new RangeError(\"Invalid quotation, must be double quote on WASM.\");\n }\n assertCommonOptions({ delimiter, quotation });\n const demiliterCode = delimiter.charCodeAt(0);\n return JSON.parse(parseStringToArraySync(csv, demiliterCode));\n}\n"],"names":["FieldDelimiter","Symbol","for","RecordDelimiter","Field","ParseError","SyntaxError","position","constructor","message","options","super","cause","this","name","CR","CRLF","LF","COMMA","DOUBLE_QUOTE","assertOptionValue","value","TypeError","length","RangeError","assertCommonOptions","delimiter","quotation","escapeRegExp","v","replace","Lexer","buffer","flush","matcher","fieldDelimiterLength","cursor","line","column","offset","rowNumber","signal","d","q","RegExp","lex","chunk","buffering","tokens","token","endsWith","slice","nextToken","throwIfAborted","startsWith","start","type","location","end","cur","next","match","exec","LexerTransformer","TransformStream","lexer","transform","controller","enqueue","error","commonParseErrorHandling","RecordAssembler","fieldIndex","row","header","dirty","Array","isArray","setHeader","assemble","Object","fromEntries","map","index","at","fill","filter","Set","size","parseStringToArraySync","csv","assembler","convertBinaryToString","binary","TextDecoder","charset","ignoreBOM","fatal","decode","ArrayBuffer","Uint8Array","parseStringToIterableIterator","parseBinaryToIterableIterator","parseStringToStream","ReadableStream","record","close","async","convertThisAsyncIterableIteratorToArray","args","rows","push","parseBinary","bytes","iterator","asyncIterator","convertIterableIteratorToAsync","getOptionsFromResponse","response","headers","contentType","get","mime","parameters","split","result","trim","paramator","key","parseMime","decomposition","defineProperties","toArray","enumerable","writable","internal.convertThisAsyncIterableIteratorToArray","toArraySync","toIterableIterator","toStream","RecordAssemblerTransformer","pipeline","stream","transformers","reduce","transformer","pipeThrough","pipeTo","WritableStream","write","catch","parseUint8ArrayStreamToStream","DecompressionStream","TextDecoderStream","convertStreamToAsyncIterableIterator","reader","getReader","done","read","parseUint8ArrayStream","parseResponse","options_","body","parseString","parseStringStreamToStream","parseStringStream","parse","branch1","branch2","tee","reader1","firstChunk","releaseLock","Response","toStreamSync","input","init","demiliterCode","charCodeAt","JSON"],"mappings":"2OAIa,MAAAA,EAAiBC,OAAOC,IAAI,kCAK5BC,EAAkBF,OAAOC,IAAI,mCAK7BE,EAAQH,OAAOC,IAAI,yBCQzB,MAAMG,UAAmBC,YAIvBC,SAEP,WAAAC,CAAYC,EAAkBC,GAC5BC,MAAMF,EAAS,CAAEG,MAAOF,GAASE,QACjCC,KAAKC,KAAO,aACZD,KAAKN,SAAWG,GAASH,QAC3B,EChCK,MAAMQ,EAAK,KACLC,EAAO,OACPC,EAAK,KAKLC,EAAQ,IAKRC,EAAe,ICF5B,SAASC,EACPC,EACAP,GAEI,GAAiB,iBAAVO,EAaT,MAAM,IAAIC,UAAU,GAAGR,sBAZvB,QAAQ,GACN,KAAsB,IAAjBO,EAAME,OACT,MAAM,IAAIC,WAAW,GAAGV,uBAC1B,KAAKO,EAAME,OAAS,EAClB,MAAM,IAAIC,WAAW,GAAGV,gCAC1B,KAAKO,IAAUJ,EACf,KAAKI,IAAUN,EACb,MAAM,IAAIS,WAAW,GAAGV,+BAOhC,CAqBO,SAASW,EACdf,GAEA,IAAA,MAAWI,IAAQ,CAAC,YAAa,aACbM,EAAAV,EAAQI,GAAOA,GAE/B,GAAAJ,EAAQgB,YAAchB,EAAQiB,UAChC,MAAM,IAAIH,WACR,wEAGN,CCtDO,SAASI,EAAaC,GACpB,OAAAA,EAAEC,QAAQ,sBAAuB,OAC1C,CCSO,MAAMC,EACXL,GACAC,GACAK,GAAU,GACVC,IAAS,EACTC,GACAC,GAEAC,GAAoB,CAClBC,KAAM,EACNC,OAAQ,EACRC,OAAQ,GAEVC,GAAa,EAEbC,GAMA,WAAAjC,EAAYkB,UACVA,EAAYR,EAAAS,UACZA,EAAYR,EAAAsB,OACZA,GACsC,IAClBhB,EAAA,CAAEC,YAAWC,cACjCd,MAAKa,EAAaA,EAClBb,MAAKc,EAAaA,EAClBd,MAAKsB,EAAwBT,EAAUH,OACjC,MAAAmB,EAAId,EAAaF,GACjBiB,EAAIf,EAAaD,GACvBd,MAAKqB,EAAW,IAAIU,OAClB,UAAUD,QAAQD,6CAA6CC,KAAKD,gBAElED,IACF5B,MAAK4B,EAAUA,EAEnB,CAQO,GAAAI,CAAIC,EAAsBC,GAAY,GAQ3C,OAPKA,IACHlC,MAAKoB,GAAS,GAEK,iBAAVa,GAAuC,IAAjBA,EAAMvB,SACrCV,MAAKmB,GAAWc,GAGXjC,MAAKmC,GACd,CAMO,KAAAf,GAEL,OADApB,MAAKoB,GAAS,EACP,IAAIpB,MAAKmC,IAClB,CAMA,GAACA,GASK,IAAAC,EACI,IATJpC,MAAKoB,IAEHpB,MAAKmB,EAAQkB,SAASlC,GACnBH,MAAAmB,EAAUnB,MAAKmB,EAAQmB,MAAM,GAAG,GAC5BtC,MAAKmB,EAAQkB,SAASjC,KAC1BJ,MAAAmB,EAAUnB,MAAKmB,EAAQmB,MAAM,GAAG,KAIjCF,EAAQpC,MAAKuC,WACbH,CAEV,CAMA,EAAAG,GAEM,GADJvC,MAAK4B,GAASY,iBACc,IAAxBxC,MAAKmB,EAAQT,OACR,OAAA,KAIP,IAAgB,IAAhBV,MAAKoB,IACJpB,MAAKmB,IAAYhB,GAAQH,MAAKmB,IAAYf,GAEpC,OAAA,KAIT,GAAIJ,MAAKmB,EAAQsB,WAAWtC,GAAO,CACjCH,MAAKmB,EAAUnB,MAAKmB,EAAQmB,MAAM,GAClC,MAAMI,EAAkB,IAAK1C,MAAKuB,GAClCvB,MAAKuB,EAAQC,OACbxB,MAAKuB,EAAQE,OAAS,EACtBzB,MAAKuB,EAAQG,QAAU,EAUhB,MAT6B,CAClCiB,KAAMrD,EACNkB,MAAOL,EACPyC,SAAU,CACRF,QACAG,IAAK,IAAK7C,MAAKuB,GACfI,UAAW3B,MAAK2B,KAItB,CAGA,GAAI3B,MAAKmB,EAAQsB,WAAWrC,GAAK,CAC/BJ,MAAKmB,EAAUnB,MAAKmB,EAAQmB,MAAM,GAClC,MAAMI,EAAkB,IAAK1C,MAAKuB,GAClCvB,MAAKuB,EAAQC,OACbxB,MAAKuB,EAAQE,OAAS,EACtBzB,MAAKuB,EAAQG,QAAU,EAUhB,MAT6B,CAClCiB,KAAMrD,EACNkB,MAAOJ,EACPwC,SAAU,CACRF,QACAG,IAAK,IAAK7C,MAAKuB,GACfI,UAAW3B,MAAK2B,KAItB,CAGA,GAAI3B,MAAKmB,EAAQsB,WAAWzC,MAAKa,GAAa,CAC5Cb,MAAKmB,EAAUnB,MAAKmB,EAAQmB,MAAM,GAClC,MAAMI,EAAkB,IAAK1C,MAAKuB,GAG3B,OAFFvB,MAAAuB,EAAQE,QAAUzB,MAAKsB,EACvBtB,MAAAuB,EAAQG,QAAU1B,MAAKsB,EACrB,CACLqB,KAAMxD,EACNqB,MAAOR,MAAKa,EACZ+B,SAAU,CACRF,QACAG,IAAK,IAAK7C,MAAKuB,GACfI,UAAW3B,MAAK2B,GAGtB,CAGA,GAAI3B,MAAKmB,EAAQsB,WAAWzC,MAAKc,GAAa,CAqB5C,IAAIN,EAAQ,GACRkB,EAAS,EACTD,EAAS,EACTD,EAAO,EAGPsB,EAAc9C,MAAKmB,EAAQO,GAC3BqB,EAA2B/C,MAAKmB,EAAQO,EAAS,GAClD,EAAA,CAEG,GAAAoB,IAAQ9C,MAAKc,EAAY,CAGvB,GAAAiC,IAAS/C,MAAKc,EAAY,CAE5BN,GAASR,MAAKc,EACJY,GAAA,EACJoB,EAAA9C,MAAKmB,EAAQO,GACZqB,EAAA/C,MAAKmB,EAAQO,EAAS,GAGnBD,GAAA,EACV,QACF,CAIA,QAAa,IAATsB,IAAsC,IAAhB/C,MAAKoB,EACtB,OAAA,KAKTM,IACA1B,MAAKmB,EAAUnB,MAAKmB,EAAQmB,MAAMZ,GAClC,MAAMgB,EAAkB,IAAK1C,MAAKuB,GAI3B,OAHPvB,MAAKuB,EAAQE,QAAUA,EACvBzB,MAAKuB,EAAQG,QAAUA,EACvB1B,MAAKuB,EAAQC,MAAQA,EACd,CACLmB,KAAMpD,EACNiB,QACAoC,SAAU,CACRF,QACAG,IAAK,IAAK7C,MAAKuB,GACfI,UAAW3B,MAAK2B,GAGtB,CAGSnB,GAAAsC,EAGLA,IAAQ1C,GAGVoB,IACSC,EAAA,GAGTA,IAGFC,IACMoB,EAAAC,EACCA,EAAA/C,MAAKmB,EAAQO,EAAS,EAAC,YACf,IAARoB,GAET,GAAI9C,MAAKoB,EACD,MAAA,IAAI5B,EAAW,6CAA8C,CACjEE,SAAU,IAAKM,MAAKuB,KAGjB,OAAA,IACT,CAGA,MAAMyB,EAAQhD,MAAKqB,EAAS4B,KAAKjD,MAAKmB,GACtC,GAAI6B,EAAO,CAGL,IAAgB,IAAhBhD,MAAKoB,GAAoB4B,EAAM,GAAGtC,SAAWV,MAAKmB,EAAQT,OACrD,OAAA,KAEH,MAAAF,EAAQwC,EAAM,GACpBhD,MAAKmB,EAAUnB,MAAKmB,EAAQmB,MAAM9B,EAAME,QACxC,MAAMgC,EAAkB,IAAK1C,MAAKuB,GAG3B,OAFFvB,MAAAuB,EAAQE,QAAUjB,EAAME,OACxBV,MAAAuB,EAAQG,QAAUlB,EAAME,OACtB,CACLiC,KAAMpD,EACNiB,QACAoC,SAAU,CACRF,QACAG,IAAK,IAAK7C,MAAKuB,GACfI,UAAW3B,MAAK2B,GAGtB,CAGO,OAAA,IACT,EC3QK,MAAMuB,UAAyBC,gBACpBC,MAChB,WAAAzD,CAAYE,EAAyB,IAC7BC,MAAA,CACJuD,UAAW,CAACpB,EAAOqB,KACb,GAAiB,IAAjBrB,EAAMvB,OACJ,IACS4C,EAAAC,QAAQ,IAAIvD,KAAKoD,MAAMpB,IAAIC,GAAO,WACtCuB,GACPF,EAAWE,MAAMA,EACnB,CACF,EAEFpC,MAAQkC,IACF,IACFA,EAAWC,QAAQvD,KAAKoD,MAAMhC,eACvBoC,GACPF,EAAWE,MAAMA,EACnB,KAGCxD,KAAAoD,MAAQ,IAAIlC,EAAMrB,EACzB,y6EC5CK,SAAS4D,EAAyBD,GACvC,GACEA,aAAiBhE,GACjBgE,aAAiB7C,YACjB6C,aAAiB/C,UAEX,MAAA+C,EAEF,MAAA,IAAIhE,EAAW,gDAAiD,CACpEO,MAAOyD,GAEX,CCdO,MAAME,EACXC,GAAc,EACdC,GAAiB,GACjBC,GACAC,IAAS,EACTlC,GAEA,WAAAjC,CAAYE,EAA0C,SAC7B,IAAnBA,EAAQgE,QAAwBE,MAAMC,QAAQnE,EAAQgE,SACnD7D,MAAAiE,EAAWpE,EAAQgE,QAEtBhE,EAAQ+B,SACV5B,MAAK4B,EAAU/B,EAAQ+B,OAE3B,CAEA,SAAQsC,CACN/B,EACAf,GAAQ,GAER,IAAA,MAAWgB,KAASD,EAElB,OADAnC,MAAK4B,GAASY,iBACNJ,EAAMO,MACZ,KAAKxD,EACEa,MAAA2D,IACL3D,MAAK8D,GAAS,EACd,MACF,KAAKxE,OACkB,IAAjBU,MAAK6D,EACF7D,MAAAiE,EAAWjE,MAAK4D,GAEjB5D,MAAK8D,QACDK,OAAOC,YACXpE,MAAK6D,EAAQQ,KAAI,CAACR,EAAQS,IAAU,CAClCT,EACA7D,MAAK4D,EAAKW,GAAGD,aAIXH,OAAOC,YACXpE,MAAK6D,EAAQQ,KAAKR,GAAW,CAACA,EAAQ,OAK5C7D,MAAK2D,EAAc,EACd3D,MAAA4D,EAAO,IAAIG,MAAM/D,MAAK6D,GAASnD,QAAQ8D,KAAK,IACjDxE,MAAK8D,GAAS,EACd,MACF,QACE9D,MAAK8D,GAAS,EACd9D,MAAK4D,EAAK5D,MAAK2D,GAAevB,EAAM5B,MAKtCY,UACKpB,KAAKoB,QAEhB,CAEA,MAAQA,QACe,IAAjBpB,MAAK6D,GACH7D,MAAK8D,UACDK,OAAOC,YACXpE,MAAK6D,EACFY,QAAQzD,GAAMA,IACdqD,KAAI,CAACR,EAAQS,IAAU,CAACT,EAAQ7D,MAAK4D,EAAKW,GAAGD,OAIxD,CAEA,EAAAL,CAAWJ,GAEL,GADJ7D,MAAK6D,EAAUA,EACa,IAAxB7D,MAAK6D,EAAQnD,OACT,MAAA,IAAIlB,EAAW,iCAEnB,GAAA,IAAIkF,IAAI1E,MAAK6D,GAASc,OAAS3E,MAAK6D,EAAQnD,OACxC,MAAA,IAAIlB,EAAW,gDAEzB,ECpFc,SAAAoF,EACdC,EACAhF,GAEI,IACI,MAAAuD,EAAQ,IAAIlC,EAAMrB,GAClBiF,EAAY,IAAIpB,EAAgB7D,GAChCsC,EAASiB,EAAMpB,IAAI6C,GACzB,MAAO,IAAIC,EAAUZ,SAAS/B,UACvBqB,GACPC,EAAyBD,EAC3B,CACF,CCNgB,SAAAuB,EACdC,EACAnF,GAEO,OAAA,IAAIoF,YAAYpF,GAASqF,QAAS,CACvCC,UAAWtF,GAASsF,UACpBC,MAAOvF,GAASuF,QACfC,OAAOL,aAAkBM,YAAc,IAAIC,WAAWP,GAAUA,EACrE,CCdgB,SAAAQ,EAGdX,EACAhF,GAEI,IACI,MAAAuD,EAAQ,IAAIlC,EAAMrB,GAClBiF,EAAY,IAAIpB,EAAgB7D,GAChCsC,EAASiB,EAAMpB,IAAI6C,GAClB,OAAAC,EAAUZ,SAAS/B,SACnBqB,GACPC,EAAyBD,EAC3B,CACF,CCNO,SAASiC,EAGdT,EACAnF,EAAsC,IAElC,IAEK,OAAA2F,EADKT,EAAsBC,EAAQnF,GACAA,SACnC2D,GACPC,EAAyBD,EAC3B,CACF,CCpBgB,SAAAkC,EACdb,EACAhF,GAEI,IACI,MAAAuD,EAAQ,IAAIlC,EAAMrB,GAClBiF,EAAY,IAAIpB,EAAgB7D,GACtC,OAAO,IAAI8F,eAAe,CACxB,KAAAjD,CAAMY,GACE,MAAAnB,EAASiB,EAAMpB,IAAI6C,GACzB,IAAA,MAAWe,KAAUd,EAAUZ,SAAS/B,GACtCmB,EAAWC,QAAQqC,GAErBtC,EAAWuC,OACb,UAEKrC,GACPC,EAAyBD,EAC3B,CACF,CCxBAsC,eAAsBC,KAGRC,GACZ,MAAMC,EAAY,GAClB,UAAA,MAAiBrC,KAAO5D,QAAQgG,GAC9BC,EAAKC,KAAKtC,GAEL,OAAAqC,CACT,CCqBgB,SAAAE,EACdC,EACAvG,GAGA,OCnCK,SACLwG,GAEO,MAAA,CACLP,KAAa,SACJO,EAAStD,OAElB,CAAC3D,OAAOkH,iBACC,OAAAtG,IACT,EAEJ,CDwBSuG,CADUd,EAA8BW,EAAOvG,GAExD,CEzBO,SAAS2G,EACdC,EACA5G,EAAsC,IAEhC,MAAA6G,QAAEA,GAAYD,EACdE,EAAcD,EAAQE,IAAI,iBAAmB,WAC7CC,ECVD,SAAmBF,GACxB,MAAOhE,KAASmE,GAAcH,EAAYI,MAAM,KAC1CC,EAA0B,CAC9BrE,KAAMA,EAAKsE,OACXH,WAAY,CAAC,GAEf,IAAA,MAAWI,KAAaJ,EAAY,CAClC,MAAOK,EAAK3G,GAAS0G,EAAUH,MAAM,KACrCC,EAAOF,WAAWK,EAAIF,QAAUzG,EAAMyG,MACxC,CACO,OAAAD,CACT,CDDeI,CAAUT,GACnB,GAAc,aAAdE,EAAKlE,KACP,MAAM,IAAIhC,WAAW,uBAAuBgG,MAOvC,MAAA,CACLU,cALCX,EAAQE,IAAI,0BAA6C,EAM1D1B,QALc2B,EAAKC,WAAW5B,SAAW,WAMtCrF,EAEP,CF8GAsE,OAAOmD,iBAAiBnB,EAAa,CACnCoB,QAAS,CACPC,YAAY,EACZC,UAAU,EACVjH,MAAOkH,GAETC,YAAa,CACXH,YAAY,EACZC,UAAU,EACVjH,MIjJG,SACLwE,EACAnF,EAAsC,IAElC,IAEK,OAAA+E,EADKG,EAAsBC,EAAQnF,GACPA,SAC5B2D,GACPC,EAAyBD,EAC3B,CACF,GJyIEoE,mBAAoB,CAClBJ,YAAY,EACZC,UAAU,EACVjH,MAAOiF,GAEToC,SAAU,CACRL,YAAY,EACZC,UAAU,EACVjH,MK3JG,SACLwE,EACAnF,EAAsC,IAElC,IAEK,OAAA6F,EADKX,EAAsBC,EAAQnF,GACVA,SACzB2D,GACPC,EAAyBD,EAC3B,CACF,KCmCO,MAAMsE,UAEH3E,gBACQ2B,UAEhB,WAAAnF,CAAYE,EAA0C,IAC9CC,MAAA,CACJuD,UAAW,CAAClB,EAAQmB,KACd,IACF,IAAA,MAAWlB,KAASpC,KAAK8E,UAAUZ,SAAS/B,GAAQ,GAClDmB,EAAWC,QAAQnB,SAEdoB,GACPF,EAAWE,MAAMA,EACnB,GAEFpC,MAAQkC,IACF,IACF,IAAA,MAAWlB,KAASpC,KAAK8E,UAAU1D,QACjCkC,EAAWC,QAAQnB,SAEdoB,GACPF,EAAWE,MAAMA,EACnB,KAGCxD,KAAA8E,UAAY,IAAIpB,EAAgB7D,EACvC,EC3Dc,SAAAkI,EACdC,KACGC,GAEH,OAAO,IAAItC,eAAe,CACxBjD,MAAQY,IAEJ2E,EACGC,QACC,CAACF,EAAQG,IAAgBH,EAAOI,YAAYD,IAC5CH,GAEDK,OACC,IAAIC,eAAe,CACjBC,MAAQvH,GAAMsC,EAAWC,QAAQvC,GACjC6E,MAAO,IAAMvC,EAAWuC,WAG3B2C,OAAOhF,GAAUF,EAAWE,MAAMA,IAAS,GAGtD,CClCgB,SAAAiF,EACdT,EACAnI,GAEA,MAAMqF,QAAEA,EAASE,MAAAA,EAAAD,UAAOA,gBAAWkC,GAAkBxH,GAAW,GAChE,OAAOwH,EACHU,EACEC,EACA,IAAIU,oBAAoBrB,GACxB,IAAIsB,kBAAkBzD,EAAS,CAAEE,QAAOD,cACxC,IAAIjC,EAAiBrD,GACrB,IAAIiI,EAA2BjI,IAEjCkI,EACEC,EACA,IAAIW,kBAAkBzD,EAAS,CAAEE,QAAOD,cACxC,IAAIjC,EAAiBrD,GACrB,IAAIiI,EAA2BjI,GAEvC,CCxBAiG,eAAuB8C,EACrBZ,GAEM,MAAAa,EAASb,EAAOc,YACtB,OAAa,CACX,MAAMC,KAAEA,EAAMvI,MAAAA,SAAgBqI,EAAOG,OACrC,GAAID,EAAM,YACJvI,CACR,CACF,CC+BgB,SAAAyI,EACdjB,EACAnI,GAGA,OAAO+I,EADcH,EAA8BT,EAAQnI,GAE7D,CCPgB,SAAAqJ,EACdzC,EACA5G,GAEI,IACI,MAAAsJ,EAAW3C,EAAuBC,EAAU5G,GAC9C,GAAkB,OAAlB4G,EAAS2C,KACL,MAAA,IAAIzI,WAAW,yBAEhB,OAAAsI,EAAsBxC,EAAS2C,KAAMD,SACrC3F,GACPC,EAAyBD,EAC3B,CACF,CCnBuBsC,eAAAuD,EACrBxE,EACAhF,GAEI,UACK2F,EAA8BX,EAAKhF,SACnC2D,GACPC,EAAyBD,EAC3B,CACF,CCrCgB,SAAA8F,EACdtB,EACAnI,GAEO,OAAAkI,EACLC,EACA,IAAI9E,EAAiBrD,GACrB,IAAIiI,EAA2BjI,GAEnC,CCyBgB,SAAA0J,EACdvB,EACAnI,GAGA,OAAO+I,EADcU,EAA0BtB,EAAQnI,GAEzD,CCuHuBiG,eAAA0D,EACrB3E,EACAhF,GAEI,GAAe,iBAARgF,QACFwE,EAAYxE,EAAKhF,QACf,GAAAgF,aAAeU,YAAcV,aAAeS,kBAC9Ca,EAAYtB,EAAKhF,QAC1B,GAAWgF,aAAec,eAAgB,CACxC,MAAO8D,EAASC,GAAW7E,EAAI8E,MACzBC,EAAUH,EAAQX,aAChBtI,MAAOqJ,SAAqBD,EAAQZ,OAC5CY,EAAQE,cACkB,iBAAfD,QACFN,EAAkBG,EAAmC7J,GACnDgK,aAAsBtE,mBACxB0D,EACLS,EACA7J,GAEJ,MACSgF,aAAekF,iBACjBb,EAAcrE,EAAKhF,GAE9B,CL5EAsE,OAAOmD,iBAAiB2B,EAAuB,CAC7C1B,QAAS,CACPC,YAAY,EACZC,UAAU,EACVjH,MAAOkH,GAETG,SAAU,CACRL,YAAY,EACZC,UAAU,EACVjH,MAAOiI,KCdXtE,OAAOmD,iBAAiB4B,EAAe,CACrC3B,QAAS,CACPC,YAAY,EACZC,UAAU,EACVjH,MAAOkH,GAETsC,aAAc,CACZxC,YAAY,EACZC,UAAU,EACVjH,MK/GY,SACdiG,EACA5G,GAEI,IACI,MAAAsJ,EAAW3C,EAAuBC,EAAU5G,GAC9C,GAAkB,OAAlB4G,EAAS2C,KACL,MAAA,IAAIzI,WAAW,yBAEhB,OAAA8H,EAA8BhC,EAAS2C,KAAMD,SAC7C3F,GACPC,EAAyBD,EAC3B,CACF,KJiIAW,OAAOmD,iBAAiB+B,EAAa,CACnC9B,QAAS,CACPC,YAAY,EACZC,UAAU,EACVjH,MAAOkH,GAETC,YAAa,CACXH,YAAY,EACZC,UAAU,EACVjH,MAAOoE,GAETgD,mBAAoB,CAClBJ,YAAY,EACZC,UAAU,EACVjH,MAAOgF,GAETqC,SAAU,CACRL,YAAY,EACZC,UAAU,EACVjH,MAAOkF,KElDXvB,OAAOmD,iBAAiBiC,EAAmB,CACzChC,QAAS,CACPC,YAAY,EACZC,UAAU,EACVjH,MAAOkH,GAETG,SAAU,CACRL,YAAY,EACZC,UAAU,EACVjH,MAAO8I,KC8GXnF,OAAOmD,iBAAiBkC,EAAO,CAC7BjC,QAAS,CACPC,YAAY,EACZC,UAAU,EACVjH,MAAOkH,oIE3NX5B,eAA+BmE,SACvBC,EAAKD,+97GACb,iHCoBO,SACLpF,EACAhF,EAAyB,IAEzB,MAAMgB,UAAEA,EAAYR,EAAOS,UAAAA,EAAYR,GAAiBT,EACxD,GAAyB,iBAAdgB,GAA+C,IAArBA,EAAUH,OAC7C,MAAM,IAAIC,WACR,0DAGJ,GAAIG,IAAcR,EACV,MAAA,IAAIK,WAAW,oDAEHC,EAAA,CAAEC,YAAWC,cAC3B,MAAAqJ,EAAgBtJ,EAAUuJ,WAAW,GAC3C,OAAOC,KAAKb,MAAM5E,EAAuBC,EAAKsF,GAChD"}
Binary file
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "web-csv-toolbox",
3
- "version": "0.11.0-next-3e76d727a5e0c4f1fbd537e0a89bed474495294b",
3
+ "version": "0.11.0-next-5d01c3998b1d65f9ecf06ae0cb3ec382001832c5",
4
4
  "description": "A CSV Toolbox utilizing Web Standard APIs.",
5
5
  "type": "module",
6
6
  "main": "dist/cjs/web-csv-toolbox.cjs",