web-csv-toolbox 0.5.0 → 0.5.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -4,9 +4,12 @@
4
4
  [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
5
5
  [![PRs Welcome](https://img.shields.io/badge/PRs-welcome-brightgreen.svg)](http://makeapullrequest.com)
6
6
  ![node version](https://img.shields.io/node/v/web-csv-toolbox)
7
+
8
+ ![npm package minimized gzipped size](https://img.shields.io/bundlejs/size/web-csv-toolbox)
9
+ ![GitHub code size in bytes](https://img.shields.io/github/languages/code-size/kamiazya/web-csv-toolbox)
7
10
  ![npm](https://img.shields.io/npm/dm/web-csv-toolbox)
8
11
 
9
- # `🌐 web-csv-toolbox 💽`
12
+ # `🌐 web-csv-toolbox 🧰`
10
13
 
11
14
  A CSV Toolbox utilizing Web Standard APIs.
12
15
 
@@ -14,14 +17,13 @@ A CSV Toolbox utilizing Web Standard APIs.
14
17
 
15
18
  [![GitHub](https://img.shields.io/badge/-GitHub-181717?logo=GitHub&style=flat)](https://github.com/kamiazya/web-csv-toolbox)
16
19
  [![npm](https://img.shields.io/badge/-npm-CB3837?logo=npm&style=flat)](https://www.npmjs.com/package/web-csv-toolbox)
17
- [![yarn](https://img.shields.io/badge/-yarn-ffffff?logo=Yarn&style=flat)](https://yarnpkg.com/package/web-csv-toolbox)
18
20
  [![API Reference](https://img.shields.io/badge/-API%20Refarence-3178C6?logo=TypeScript&style=flat&logoColor=fff)](https://kamiazya.github.io/web-csv-toolbox/)
19
21
  [![Sponsor](https://img.shields.io/badge/-GitHub%20Sponsor-fff?logo=GitHub%20Sponsors&style=flat)](https://github.com/sponsors/kamiazya)
20
22
 
21
23
 
22
24
  [![format: Biome](https://img.shields.io/badge/format%20with-Biome-F7B911?logo=biome&style=flat)](https://biomejs.dev/)
23
25
  [![test: Vitest](https://img.shields.io/badge/tested%20with-Vitest-6E9F18?logo=vitest&style=flat)](https://vitest.dev/)
24
- [![build: Rollup](https://img.shields.io/badge/build%20with-Rollup-EC4A3F?logo=rollup.js&style=flat)](https://rollupjs.org/)
26
+ [![build: Vite](https://img.shields.io/badge/build%20with-Vite-646CFF?logo=vite&style=flat)](https://rollupjs.org/)
25
27
 
26
28
  </div>
27
29
 
@@ -93,7 +95,7 @@ Bob,69`;
93
95
 
94
96
  ```html
95
97
  <script type="module">
96
- import { parse } from 'https://unpkg.com/web-csv-toolbox/lib/index.js';
98
+ import { parse } from 'https://unpkg.com/web-csv-toolbox?module';
97
99
 
98
100
  const csv = `name,age
99
101
  Alice,42
@@ -0,0 +1,2 @@
1
+ "use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const e=Symbol.for("web-csv-toolbox.FieldDelimiter"),t=Symbol.for("web-csv-toolbox.RecordDelimiter"),r=Symbol.for("web-csv-toolbox.Field"),i="\r\n",n="\n";function s(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}class o{#e;#t;#r;#i;#n;#s="";#o=!1;constructor({delimiter:e=",",quotation:t='"'}={}){!function(e){if("string"==typeof e.quotation&&0===e.quotation.length)throw new Error("quotation must not be empty");if("string"==typeof e.delimiter&&0===e.delimiter.length)throw new Error("delimiter must not be empty");if(e.quotation.includes(n)||e.quotation.includes("\r"))throw new Error("quotation must not include CR or LF");if(e.delimiter.includes(n)||e.delimiter.includes("\r"))throw new Error("delimiter must not include CR or LF");if(e.delimiter.includes(e.quotation)||e.quotation.includes(e.delimiter))throw new Error("delimiter and quotation must not include each other as a substring")}({delimiter:e,quotation:t}),this.#e=e,this.#t=e.length,this.#r=t,this.#i=t.length;const r=s(e),i=s(t);this.#n=new RegExp(`^(?:(?!${i})(?!${r})(?![\\r\\n]))([\\S\\s\\uFEFF\\xA0]+?)(?=${i}|${r}|\\r|\\n|$)`)}lex(e,t=!1){return t||(this.#o=!0),"string"==typeof e&&0!==e.length&&(this.#s+=e),this.#a()}flush(){return this.#o=!0,[...this.#a()]}*#a(){this.#o&&(this.#s.endsWith(i)?this.#s=this.#s.slice(0,-2):this.#s.endsWith(n)&&(this.#s=this.#s.slice(0,-1)));let r=null;for(let i;i=this.#u();)switch(i){case e:case t:r&&(yield r,r=null),yield i;break;default:r?r.value+=i.value:r=i}r&&(yield r)}#u(){if(0===this.#s.length)return null;if(!1===this.#o&&(this.#s===i||this.#s===n))return null;if(this.#s.startsWith(i))return this.#s=this.#s.slice(2),t;if(this.#s.startsWith(n))return this.#s=this.#s.slice(1),t;if(this.#s.startsWith(this.#e))return this.#s=this.#s.slice(this.#t),e;if(this.#s.startsWith(this.#r))return!1===this.#o&&this.#s.endsWith(this.#r)?null:this.#l();const s=this.#n.exec(this.#s);return s?!1===this.#o&&s[0].length===this.#s.length?null:(this.#s=this.#s.slice(s[0].length),{type:r,value:s[0]}):null}#l(){let e=this.#i,t="";for(;e<this.#s.length;)if(this.#s.slice(e,e+this.#i)!==this.#r||this.#s.slice(e+this.#i,e+2*this.#i)!==this.#r){if(this.#s.slice(e,e+this.#i)===this.#r)return!1===this.#o&&e+this.#i<this.#s.length&&this.#s.slice(e+this.#i,this.#t)!==this.#e&&this.#s.slice(e+this.#i,e+this.#i+2)!==i&&this.#s.slice(e+this.#i,e+this.#i+1)!==n?null:(this.#s=this.#s.slice(e+this.#i),{type:r,value:t});t+=this.#s[e],e++}else t+=this.#r,e+=2*this.#i;return null}}class a extends TransformStream{constructor(e={}){const t=new o(e);super({transform:(e,r)=>{0!==e.length&&r.enqueue([...t.lex(e,!0)])},flush:e=>{e.enqueue(t.flush())}})}}class u{#f=0;#h=[];#c;#d=!1;constructor(e={}){void 0!==e.header&&Array.isArray(e.header)&&this.#b(e.header)}*assemble(r,i=!0){for(const n of r)switch(n){case e:this.#f++,this.#d=!0;break;case t:void 0===this.#c?this.#b(this.#h):this.#d?yield Object.fromEntries(this.#c.map(((e,t)=>[e,this.#h.at(t)]))):yield Object.fromEntries(this.#c.map((e=>[e,""]))),this.#f=0,this.#h=new Array(this.#c?.length).fill(""),this.#d=!1;break;default:this.#d=!0,this.#h[this.#f]=n.value}i&&(yield*this.flush())}*flush(){void 0!==this.#c&&this.#d&&(yield Object.fromEntries(this.#c.filter((e=>e)).map(((e,t)=>[e,this.#h.at(t)]))))}#b(e){if(this.#c=e,0===this.#c.length)throw new Error("The header must not be empty.");if(new Set(this.#c).size!==this.#c.length)throw new Error("The header must not contain duplicate fields.")}}class l extends TransformStream{constructor(e={}){const t=new u(e);super({transform:(e,r)=>{for(const i of t.assemble(e,!1))r.enqueue(i)},flush:e=>{for(const r of t.flush())e.enqueue(r)}})}}function f(e,t){const r=new o(t),i=new u(t),n=r.lex(e);return[...i.assemble(n)]}function h(e,t){const r=new o(t),i=new u(t),n=r.lex(e);return i.assemble(n)}function c(e,t){const r=new o(t),i=new u(t);return new ReadableStream({start(t){const n=r.lex(e);for(const e of i.assemble(n))t.enqueue(e);t.close()}})}async function d(...e){const t=[];for await(const r of this(...e))t.push(r);return t}async function*b(e,t){yield*h(e,t)}var m,y,w,p,g,x;function q(e,t){return new TextDecoder(t?.charset,{ignoreBOM:t?.ignoreBOM,fatal:t?.fatal}).decode(e instanceof ArrayBuffer?new Uint8Array(e):e)}function v(e,t={}){return f(q(e,t),t)}function S(e,t={}){return h(q(e,t),t)}function O(e,t={}){return c(q(e,t),t)}function L(e,t){return function(e){return{async next(){const t=e.next();return Promise.resolve(t)},[Symbol.asyncIterator](){return this}}}(S(e,t))}function j(e,...t){return new ReadableStream({start:r=>{t.reduce(((e,t)=>e.pipeThrough(t)),e).pipeTo(new WritableStream({write:e=>r.enqueue(e),close:()=>r.close()}))}})}function A(e,t){const{charset:r,fatal:i,ignoreBOM:n,decomposition:s}=t??{};return s?j(e,new DecompressionStream(s),new TextDecoderStream(r,{fatal:i,ignoreBOM:n}),new a(t),new l(t)):j(e,new TextDecoderStream(r,{fatal:i,ignoreBOM:n}),new a(t),new l(t))}async function*P(e){const t=e.getReader();for(;;){const{done:e,value:r}=await t.read();if(e)break;yield r}}function E(e,t){return P(A(e,t))}function R(e,t){return j(e,new a(t),new l(t))}function T(e,t){return P(R(e,t))}function k(e,t={}){const{headers:r}=e,i=r.get("content-type")??"text/csv",n=function(e){const[t,...r]=e.split(";"),i={type:t.trim(),parameters:{}};for(const n of r){const[e,t]=n.split("=");i.parameters[e.trim()]=t.trim()}return i}(i);if("text/csv"!==n.type)throw new Error(`Invalid mime type: ${i}`);return{decomposition:r.get("content-encoding")??void 0,charset:n.parameters.charset??"utf-8",...t}}function I(e,t){const r=k(e,t);if(null===e.body)throw new Error("Response body is null");return A(e.body,r)}function F(e,t){const r=k(e,t);if(null===e.body)throw new Error("Response body is null");return E(e.body,r)}async function*B(e,t){if("string"==typeof e)yield*b(e,t);else if(e instanceof Uint8Array||e instanceof ArrayBuffer)yield*L(e,t);else if(e instanceof ReadableStream){const[r,i]=e.tee(),n=r.getReader(),{value:s}=await n.read();n.releaseLock(),"string"==typeof s?yield*T(i,t):s instanceof Uint8Array&&(yield*E(i,t))}else e instanceof Response&&(yield*F(e,t))}m=b||(b={}),Object.defineProperty(m,"toArray",{enumerable:!0,writable:!1,value:d}),Object.defineProperty(m,"toArraySync",{enumerable:!0,writable:!1,value:f}),Object.defineProperty(m,"toIterableIterator",{enumerable:!0,writable:!1,value:h}),Object.defineProperty(m,"toStream",{enumerable:!0,writable:!1,value:c}),y=L||(L={}),Object.defineProperty(y,"toArray",{enumerable:!0,writable:!1,value:d}),Object.defineProperty(y,"toArraySync",{enumerable:!0,writable:!1,value:v}),Object.defineProperty(y,"toIterableIterator",{enumerable:!0,writable:!1,value:S}),Object.defineProperty(y,"toStream",{enumerable:!0,writable:!1,value:O}),w=E||(E={}),Object.defineProperty(w,"toArray",{enumerable:!0,writable:!1,value:d}),Object.defineProperty(w,"toStream",{enumerable:!0,writable:!1,value:A}),p=T||(T={}),Object.defineProperty(p,"toArray",{enumerable:!0,writable:!1,value:d}),Object.defineProperty(p,"toStream",{enumerable:!0,writable:!1,value:R}),g=F||(F={}),Object.defineProperty(g,"toArray",{enumerable:!0,writable:!1,value:d}),Object.defineProperty(g,"toStream",{enumerable:!0,writable:!1,value:I}),x=B||(B={}),Object.defineProperty(x,"toArray",{enumerable:!0,writable:!1,value:d}),exports.Field=r,exports.FieldDelimiter=e,exports.LexerTransformer=a,exports.RecordAssemblerTransformer=l,exports.RecordDelimiter=t,exports.parse=B,exports.parseBinary=L,exports.parseResponse=F,exports.parseString=b,exports.parseStringStream=T,exports.parseUint8ArrayStream=E;
2
+ //# sourceMappingURL=web-csv-toolbox.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"web-csv-toolbox.cjs","sources":["../src/lib/common/constants.ts","../src/lib/internal/constants.ts","../src/lib/internal/utils/escapeRegExp.ts","../src/lib/internal/Lexer.ts","../src/lib/internal/assertCommonOptions.ts","../src/lib/transformers/LexerTransformer.ts","../src/lib/internal/RecordAssembler.ts","../src/lib/transformers/RecordAssemblerTransformer.ts","../src/lib/internal/parseStringToArraySync.ts","../src/lib/internal/parseStringToIterableIterator.ts","../src/lib/internal/parseStringToStream.ts","../src/lib/internal/utils/toArray.ts","../src/lib/parseString.ts","../src/lib/parseBinary.ts","../src/lib/parseUint8ArrayStream.ts","../src/lib/parseStringStream.ts","../src/lib/parseResponse.ts","../src/lib/parse.ts","../src/lib/internal/convertBinaryToString.ts","../src/lib/internal/parseBinaryToArraySync.ts","../src/lib/internal/parseBinaryToIterableIterator.ts","../src/lib/internal/parseBinaryToStream.ts","../src/lib/internal/utils/iterableIteratorToAsync.ts","../src/lib/internal/utils/pipeline.ts","../src/lib/internal/parseUint8ArrayStreamToStream.ts","../src/lib/internal/utils/streamToAsyncIterableIterator.ts","../src/lib/internal/parseStringStreamToStream.ts","../src/lib/internal/getOptionsFromResponse.ts","../src/lib/internal/parseMime.ts","../src/lib/internal/parseResponseToStream.ts"],"sourcesContent":["/**\n * FiledDelimiter is a symbol for field delimiter of CSV.\n * @category Constants\n */\nexport const FieldDelimiter = Symbol.for(\"web-csv-toolbox.FieldDelimiter\");\n/**\n * RecordDelimiter is a symbol for record delimiter of CSV.\n * @category Constants\n */\nexport const RecordDelimiter = Symbol.for(\"web-csv-toolbox.RecordDelimiter\");\n/**\n * Field is a symbol for field of CSV.\n * @category Constants\n */\nexport const Field = Symbol.for(\"web-csv-toolbox.Field\");\n","export const CR = \"\\r\";\nexport const CRLF = \"\\r\\n\";\nexport const LF = \"\\n\";\n\n/**\n * COMMA is a symbol for comma(,).\n */\nexport const COMMA = \",\";\n\n/**\n * DOUBLE_QUOTE is a symbol for double quote(\").\n */\nexport const DOUBLE_QUOTE = '\"';\n","/**\n * Escape a string for use in a regular expression.\n *\n * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_expressions#escaping Regular expressions#Escaping | MDN}\n * @param v string to escape\n * @returns escaped string\n */\nexport function escapeRegExp(v: string) {\n return v.replace(/[.*+?^${}()|[\\]\\\\]/g, \"\\\\$&\");\n}\n","import { Field, FieldDelimiter, RecordDelimiter } from \"../common/constants.ts\";\nimport { CommonOptions, Token } from \"../common/types.ts\";\nimport { assertCommonOptions } from \"./assertCommonOptions.ts\";\nimport { COMMA, CRLF, DOUBLE_QUOTE, LF } from \"./constants.ts\";\nimport { escapeRegExp } from \"./utils/escapeRegExp.ts\";\n\nexport class Lexer {\n #delimiter: string;\n #delimiterLength: number;\n #quotation: string;\n #quotationLength: number;\n #matcher: RegExp;\n #buffer = \"\";\n #flush = false;\n\n constructor({\n delimiter = COMMA,\n quotation = DOUBLE_QUOTE,\n }: CommonOptions = {}) {\n assertCommonOptions({ delimiter, quotation });\n this.#delimiter = delimiter;\n this.#delimiterLength = delimiter.length;\n this.#quotation = quotation;\n this.#quotationLength = quotation.length;\n\n const d = escapeRegExp(delimiter);\n const q = escapeRegExp(quotation);\n this.#matcher = new RegExp(\n `^(?:(?!${q})(?!${d})(?![\\\\r\\\\n]))([\\\\S\\\\s\\\\uFEFF\\\\xA0]+?)(?=${q}|${d}|\\\\r|\\\\n|$)`,\n );\n }\n\n public lex(chunk: string | null, buffering = false): IterableIterator<Token> {\n if (!buffering) {\n this.#flush = true;\n }\n if (typeof chunk === \"string\" && chunk.length !== 0) {\n this.#buffer += chunk;\n }\n\n return this.#tokens();\n }\n\n public flush(): Token[] {\n this.#flush = true;\n return [...this.#tokens()];\n }\n\n *#tokens(): Generator<Token> {\n if (this.#flush) {\n // Trim the last CRLF or LF\n if (this.#buffer.endsWith(CRLF)) {\n this.#buffer = this.#buffer.slice(0, -CRLF.length);\n } else if (this.#buffer.endsWith(LF)) {\n this.#buffer = this.#buffer.slice(0, -LF.length);\n }\n }\n let currentField: Token | null = null;\n for (let token: Token | null; (token = this.#nextToken()); ) {\n switch (token) {\n case FieldDelimiter:\n if (currentField) {\n yield currentField;\n currentField = null;\n }\n yield token;\n break;\n case RecordDelimiter:\n if (currentField) {\n yield currentField;\n currentField = null;\n }\n yield token;\n break;\n default:\n if (currentField) {\n currentField.value += token.value;\n } else {\n currentField = token;\n }\n break;\n }\n }\n if (currentField) {\n yield currentField;\n }\n }\n\n #nextToken(): Token | null {\n if (this.#buffer.length === 0) {\n return null;\n }\n // Buffer is Record Delimiter, defer to the next iteration.\n if (\n this.#flush === false &&\n (this.#buffer === CRLF || this.#buffer === LF)\n ) {\n return null;\n }\n\n // Check for CRLF\n if (this.#buffer.startsWith(CRLF)) {\n this.#buffer = this.#buffer.slice(2);\n return RecordDelimiter;\n }\n\n // Check for LF\n if (this.#buffer.startsWith(LF)) {\n this.#buffer = this.#buffer.slice(1);\n return RecordDelimiter;\n }\n\n // Check for Delimiter\n if (this.#buffer.startsWith(this.#delimiter)) {\n this.#buffer = this.#buffer.slice(this.#delimiterLength);\n return FieldDelimiter;\n }\n\n // Check for Quoted String\n if (this.#buffer.startsWith(this.#quotation)) {\n // If not flushing and the buffer doesn't end with a quote, then return null.\n if (this.#flush === false && this.#buffer.endsWith(this.#quotation)) {\n return null;\n }\n return this.#extractQuotedString();\n }\n\n // Check for Unquoted String\n const match = this.#matcher.exec(this.#buffer);\n if (match) {\n // If we're flushing and the match doesn't consume the entire buffer,\n // then return null\n if (this.#flush === false && match[0].length === this.#buffer.length) {\n return null;\n }\n this.#buffer = this.#buffer.slice(match[0].length);\n return { type: Field, value: match[0] };\n }\n\n // Otherwise, return null\n return null;\n }\n\n #extractQuotedString(): Token | null {\n let end = this.#quotationLength; // Skip the opening quote\n let value = \"\";\n\n while (end < this.#buffer.length) {\n // Escaped quote\n if (\n this.#buffer.slice(end, end + this.#quotationLength) ===\n this.#quotation &&\n this.#buffer.slice(\n end + this.#quotationLength,\n end + this.#quotationLength * 2,\n ) === this.#quotation\n ) {\n value += this.#quotation;\n end += this.#quotationLength * 2;\n continue;\n }\n\n // Closing quote\n if (\n this.#buffer.slice(end, end + this.#quotationLength) === this.#quotation\n ) {\n // If flushing and the buffer doesn't end with a quote, then return null\n if (\n this.#flush === false &&\n end + this.#quotationLength < this.#buffer.length &&\n this.#buffer.slice(\n end + this.#quotationLength,\n this.#delimiterLength,\n ) !== this.#delimiter &&\n this.#buffer.slice(\n end + this.#quotationLength,\n end + this.#quotationLength + 2 /** CRLF.length */,\n ) !== CRLF &&\n this.#buffer.slice(\n end + this.#quotationLength,\n end + this.#quotationLength + 1 /** LF.length */,\n ) !== LF\n ) {\n return null;\n }\n\n // Otherwise, return the quoted string\n this.#buffer = this.#buffer.slice(end + this.#quotationLength);\n return { type: Field, value };\n }\n\n value += this.#buffer[end];\n end++;\n }\n\n // If we get here, we've reached the end of the buffer\n return null;\n }\n}\n","import { CommonOptions } from \"../common/types.ts\";\nimport { CR, LF } from \"./constants.ts\";\n\n/**\n * Assert that the options are valid.\n *\n * @param options The options to assert.\n */\nexport function assertCommonOptions(options: Required<CommonOptions>): void {\n if (typeof options.quotation === \"string\" && options.quotation.length === 0) {\n throw new Error(\"quotation must not be empty\");\n }\n if (typeof options.delimiter === \"string\" && options.delimiter.length === 0) {\n throw new Error(\"delimiter must not be empty\");\n }\n if (options.quotation.includes(LF) || options.quotation.includes(CR)) {\n throw new Error(\"quotation must not include CR or LF\");\n }\n if (options.delimiter.includes(LF) || options.delimiter.includes(CR)) {\n throw new Error(\"delimiter must not include CR or LF\");\n }\n if (\n options.delimiter.includes(options.quotation) ||\n options.quotation.includes(options.delimiter)\n ) {\n throw new Error(\n \"delimiter and quotation must not include each other as a substring\",\n );\n }\n}\n","import { CommonOptions, Token } from \"../common/types.ts\";\nimport { Lexer } from \"../internal/Lexer.ts\";\n\n/**\n * A transform stream that converts a stream of tokens into a stream of rows.\n *\n * @category Low-level API\n *\n * @example Parse a CSV with headers by data\n * ```ts\n * new ReadableStream({\n * start(controller) {\n * controller.enqueue(\"name,age\\r\\n\");\n * controller.enqueue(\"Alice,20\\r\\n\");\n * controller.close();\n * }\n * })\n * .pipeThrough(new LexerTransformer())\n * .pipeTo(new WritableStream({ write(tokens) {\n * for (const token of tokens) {\n * console.log(token);\n * }\n * }}));\n * // { type: Field, value: \"name\" }\n * // FieldDelimiter\n * // { type: Field, value: \"age\" }\n * // RecordDelimiter\n * // { type: Field, value: \"Alice\" }\n * // FieldDelimiter\n * // { type: Field, value: \"20\" }\n * // RecordDelimiter\n * ```\n */\nexport class LexerTransformer extends TransformStream<string, Token[]> {\n constructor(options: CommonOptions = {}) {\n const lexer = new Lexer(options);\n super({\n transform: (chunk, controller) => {\n if (chunk.length !== 0) {\n controller.enqueue([...lexer.lex(chunk, true)]);\n }\n },\n flush: (controller) => {\n controller.enqueue(lexer.flush());\n },\n });\n }\n}\n","import { FieldDelimiter, RecordDelimiter } from \"../common/constants.ts\";\nimport { CSVRecord, RecordAssemblerOptions, Token } from \"../common/types.ts\";\n\nexport class RecordAssembler<Header extends ReadonlyArray<string>> {\n #fieldIndex = 0;\n #row: string[] = [];\n #header: Header | undefined;\n #dirty = false;\n\n constructor(options: RecordAssemblerOptions<Header> = {}) {\n if (options.header !== undefined && Array.isArray(options.header)) {\n this.#setHeader(options.header);\n }\n }\n\n public *assemble(\n tokens: Iterable<Token>,\n flush = true,\n ): IterableIterator<CSVRecord<Header>> {\n for (const token of tokens) {\n switch (token) {\n case FieldDelimiter:\n this.#fieldIndex++;\n this.#dirty = true;\n break;\n case RecordDelimiter:\n if (this.#header === undefined) {\n this.#setHeader(this.#row as unknown as Header);\n } else {\n if (this.#dirty) {\n yield Object.fromEntries(\n this.#header.map((header, index) => [\n header,\n this.#row.at(index),\n ]),\n ) as unknown as CSVRecord<Header>;\n } else {\n yield Object.fromEntries(\n this.#header.map((header) => [header, \"\"]),\n ) as CSVRecord<Header>;\n }\n }\n // Reset the row fields buffer.\n this.#fieldIndex = 0;\n this.#row = new Array(this.#header?.length).fill(\"\");\n this.#dirty = false;\n break;\n default:\n this.#dirty = true;\n this.#row[this.#fieldIndex] = token.value;\n break;\n }\n }\n\n if (flush) {\n yield* this.flush();\n }\n }\n\n public *flush(): Generator<CSVRecord<Header>> {\n if (this.#header !== undefined) {\n if (this.#dirty) {\n yield Object.fromEntries(\n this.#header\n .filter((v) => v)\n .map((header, index) => [header, this.#row.at(index)]),\n ) as unknown as CSVRecord<Header>;\n }\n }\n }\n\n #setHeader(header: Header) {\n this.#header = header;\n if (this.#header.length === 0) {\n throw new Error(\"The header must not be empty.\");\n }\n if (new Set(this.#header).size !== this.#header.length) {\n throw new Error(\"The header must not contain duplicate fields.\");\n }\n }\n}\n","import { CSVRecord, RecordAssemblerOptions, Token } from \"../common/types.ts\";\nimport { RecordAssembler } from \"../internal/RecordAssembler.ts\";\n\n/**\n * A transform stream that converts a stream of tokens into a stream of rows.\n * @template Header The type of the header row.\n * @param options The options for the parser.\n *\n * @category Low-level API\n *\n * @example Parse a CSV with headers by data\n * ```ts\n * new ReadableStream({\n * start(controller) {\n * controller.enqueue(\"name,age\\r\\n\");\n * controller.enqueue(\"Alice,20\\r\\n\");\n * controller.enqueue(\"Bob,25\\r\\n\");\n * controller.enqueue(\"Charlie,30\\r\\n\");\n * controller.close();\n * })\n * .pipeThrough(new LexerTransformer())\n * .pipeThrough(new RecordAssemblerTransformer())\n * .pipeTo(new WritableStream({ write(row) { console.log(row); }}));\n * // { name: \"Alice\", age: \"20\" }\n * // { name: \"Bob\", age: \"25\" }\n * // { name: \"Charlie\", age: \"30\" }\n * ```\n *\n * @example Parse a CSV with headers by options\n * ```ts\n * new ReadableStream({\n * start(controller) {\n * controller.enqueue(\"Alice,20\\r\\n\");\n * controller.enqueue(\"Bob,25\\r\\n\");\n * controller.enqueue(\"Charlie,30\\r\\n\");\n * controller.close();\n * }\n * })\n * .pipeThrough(new LexerTransformer())\n * .pipeThrough(new RecordAssemblerTransformer({ header: [\"name\", \"age\"] }))\n * .pipeTo(new WritableStream({ write(row) { console.log(row); }}));\n * // { name: \"Alice\", age: \"20\" }\n * // { name: \"Bob\", age: \"25\" }\n * // { name: \"Charlie\", age: \"30\" }\n * ```\n */\nexport class RecordAssemblerTransformer<\n Header extends ReadonlyArray<string>,\n> extends TransformStream<Token[], CSVRecord<Header>> {\n constructor(options: RecordAssemblerOptions<Header> = {}) {\n const assembler = new RecordAssembler(options);\n super({\n transform: (tokens, controller) => {\n for (const token of assembler.assemble(tokens, false)) {\n controller.enqueue(token);\n }\n },\n flush: (controller) => {\n for (const token of assembler.flush()) {\n controller.enqueue(token);\n }\n },\n });\n }\n}\n","import { CSVRecord, ParseOptions } from \"../common/types.ts\";\nimport { Lexer } from \"./Lexer.ts\";\nimport { RecordAssembler } from \"./RecordAssembler.ts\";\n\nexport function parseStringToArraySync<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n): CSVRecord<Header>[] {\n const lexer = new Lexer(options);\n const assembler = new RecordAssembler(options);\n const tokens = lexer.lex(csv);\n return [...assembler.assemble(tokens)];\n}\n","import { CSVRecord, ParseOptions } from \"../common/types.ts\";\nimport { Lexer } from \"./Lexer.ts\";\nimport { RecordAssembler } from \"./RecordAssembler.ts\";\n\nexport function parseStringToIterableIterator<\n Header extends ReadonlyArray<string>,\n>(\n csv: string,\n options?: ParseOptions<Header>,\n): IterableIterator<CSVRecord<Header>> {\n const lexer = new Lexer(options);\n const assembler = new RecordAssembler(options);\n const tokens = lexer.lex(csv);\n return assembler.assemble(tokens);\n}\n","import { CSVRecord, ParseOptions } from \"../common/types.ts\";\nimport { Lexer } from \"./Lexer.ts\";\nimport { RecordAssembler } from \"./RecordAssembler.ts\";\n\nexport function parseStringToStream<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n): ReadableStream<CSVRecord<Header>> {\n const lexer = new Lexer(options);\n const assembler = new RecordAssembler(options);\n return new ReadableStream({\n start(controller) {\n const tokens = lexer.lex(csv);\n for (const record of assembler.assemble(tokens)) {\n controller.enqueue(record);\n }\n controller.close();\n },\n });\n}\n","export async function toArray<\n O,\n T extends (...args: any[]) => AsyncGenerator<O>,\n>(this: T, ...args: Parameters<T>): Promise<O[]> {\n const rows: O[] = [];\n for await (const row of this(...args)) {\n rows.push(row);\n }\n return rows;\n}\n","import { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { parseStringToArraySync } from \"./internal/parseStringToArraySync.ts\";\nimport { parseStringToIterableIterator } from \"./internal/parseStringToIterableIterator.ts\";\nimport { parseStringToStream } from \"./internal/parseStringToStream.ts\";\nimport * as internal from \"./internal/utils/toArray.ts\";\n\n/**\n * Parse CSV string to records.\n *\n * @category Middle-level API\n * @param csv CSV string to parse\n * @param options Parsing options. See {@link ParseOptions}.\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parseString.toArray} function.\n * @example Parsing CSV files from strings\n *\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * for await (const record of parseString(csv)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\nexport async function* parseString<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n yield* parseStringToIterableIterator(csv, options);\n}\nexport namespace parseString {\n /**\n * Parse CSV string to records.\n *\n * @returns Array of records\n *\n * @example\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const records = await parseString.toArray(csv);\n * console.log(records);\n * // Prints:\n * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]\n * ```\n */\n export declare function toArray<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n Object.defineProperty(parseString, \"toArray\", {\n enumerable: true,\n writable: false,\n value: internal.toArray,\n });\n\n /**\n * Parse CSV string to records.\n *\n * @returns Array of records\n *\n * @example\n *\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const records = parseString.toArraySync(csv);\n * console.log(records);\n * // Prints:\n * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]\n * ```\n */\n export declare function toArraySync<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): CSVRecord<Header>[];\n Object.defineProperty(parseString, \"toArraySync\", {\n enumerable: true,\n writable: false,\n value: parseStringToArraySync,\n });\n\n /**\n * Parse CSV string to records.\n *\n * @returns Async iterable iterator of records\n *\n * @example\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * for (const record of parseString.toIterableIterator(csv)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\n export declare function toIterableIterator<\n Header extends ReadonlyArray<string>,\n >(\n csv: string,\n options?: ParseOptions<Header>,\n ): IterableIterator<CSVRecord<Header>>;\n Object.defineProperty(parseString, \"toIterableIterator\", {\n enumerable: true,\n writable: false,\n value: parseStringToIterableIterator,\n });\n\n /**\n * Parse CSV string to records.\n *\n * @returns Readable stream of records\n *\n * @example\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * await parseString.toStream(csv)\n * .pipeTo(\n * new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\n export declare function toStream<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): ReadableStream<CSVRecord<Header>>;\n Object.defineProperty(parseString, \"toStream\", {\n enumerable: true,\n writable: false,\n value: parseStringToStream,\n });\n}\n","import { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { parseBinaryToArraySync } from \"./internal/parseBinaryToArraySync.ts\";\nimport { parseBinaryToIterableIterator } from \"./internal/parseBinaryToIterableIterator.ts\";\nimport { parseBinaryToStream } from \"./internal/parseBinaryToStream.ts\";\nimport { iterableIteratorToAsync } from \"./internal/utils/iterableIteratorToAsync.ts\";\nimport * as internal from \"./internal/utils/toArray.ts\";\n\n/**\n * Parse a binary from an {@link !Uint8Array}.\n *\n * @category Middle-level API\n *\n * @param bytes CSV bytes to parse.\n * @param options Parsing options\n * @returns Async iterable iterator of records.\n *\n * @example Parsing CSV binary\n *\n * ```ts\n * import { parseUint8Array } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * for await (const record of parseUint8Array(csv)) {\n * console.log(record);\n * }\n * ```\n */\nexport function parseBinary<Header extends ReadonlyArray<string>>(\n bytes: Uint8Array | ArrayBuffer,\n options?: ParseBinaryOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n const iterator = parseBinaryToIterableIterator(bytes, options);\n return iterableIteratorToAsync(iterator);\n}\n\nexport namespace parseBinary {\n /**\n * Parse a binary from an {@link !Uint8Array} to an array of records.\n *\n * @param bytes CSV bytes to parse.\n * @param options Parsing options\n * @returns Array of records\n *\n * @example\n * ```ts\n * import { parseUint8Array } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * const records = await parseUint8Array.toArray(csv);\n * ```\n */\n export declare function toArray<Header extends ReadonlyArray<string>>(\n bytes: Uint8Array | ArrayBuffer,\n options?: ParseBinaryOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n Object.defineProperty(parseBinary, \"toArray\", {\n enumerable: true,\n writable: false,\n value: internal.toArray,\n });\n\n /**\n * Parse a binary from an {@link !Uint8Array} to an array of records.\n *\n * @param bytes CSV bytes to parse.\n * @param options Parsing options\n * @returns Array of records\n * @example\n *\n * ```ts\n * import { parseUint8Array } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * const records = parseUint8Array.toArraySync(csv);\n * ```\n */\n export declare function toArraySync<Header extends ReadonlyArray<string>>(\n bytes: Uint8Array | ArrayBuffer,\n options?: ParseBinaryOptions<Header>,\n ): CSVRecord<Header>[];\n Object.defineProperty(parseBinary, \"toArraySync\", {\n enumerable: true,\n writable: false,\n value: parseBinaryToArraySync,\n });\n\n /**\n * Parse a binary from an {@link !Uint8Array} to an iterable iterator of records.\n *\n * @param bytes CSV bytes to parse.\n * @param options Parsing options\n * @returns Async iterable iterator of records.\n * @example\n * ```ts\n * import { parseUint8Array } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * for (const record of parseUint8Array.toIterableIterator(csv)) {\n * console.log(record);\n * }\n * ```\n */\n export declare function toIterableIterator<\n Header extends ReadonlyArray<string>,\n >(\n bytes: Uint8Array,\n options?: ParseBinaryOptions<Header>,\n ): IterableIterator<CSVRecord<Header>>;\n Object.defineProperty(parseBinary, \"toIterableIterator\", {\n enumerable: true,\n writable: false,\n value: parseBinaryToIterableIterator,\n });\n\n /**\n * Parse a binary from an {@link !Uint8Array} to a stream of records.\n *\n * @param bytes CSV bytes to parse.\n * @param options Parsing options\n * @returns Stream of records.\n *\n * @example\n *\n * ```ts\n * import { parseUint8Array } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * const stream = parseUint8Array.toStream(csv);\n *\n * await stream.pipeTo(\n * new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * ```\n */\n export declare function toStream<Header extends ReadonlyArray<string>>(\n bytes: Uint8Array,\n options?: ParseBinaryOptions<Header>,\n ): ReadableStream<CSVRecord<Header>>;\n Object.defineProperty(parseBinary, \"toStream\", {\n enumerable: true,\n writable: false,\n value: parseBinaryToStream,\n });\n}\n","import { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { parseUint8ArrayStreamToStream } from \"./internal/parseUint8ArrayStreamToStream.ts\";\nimport { streamToAsyncIterableIterator } from \"./internal/utils/streamToAsyncIterableIterator.ts\";\nimport * as internal from \"./internal/utils/toArray.ts\";\nimport { parseStringStream } from \"./parseStringStream.ts\";\n\n/**\n * Parse CSV to records.\n * This function is for parsing a binary stream.\n *\n * @category Middle-level API\n * @remarks\n * If you want to parse a string, use {@link parseStringStream}.\n * @param stream CSV string to parse\n * @param options Parsing options.\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parseUint8ArrayStream.toArray} function.\n *\n * @example Parsing CSV binary\n *\n * ```ts\n * import { parseUint8ArrayStream } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * for await (const record of parseUint8ArrayStream(csv)) {\n * console.log(record);\n * }\n * ```\n */\nexport function parseUint8ArrayStream<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<Uint8Array>,\n options?: ParseBinaryOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n const recordStream = parseUint8ArrayStreamToStream(stream, options);\n return streamToAsyncIterableIterator(recordStream);\n}\n\nexport namespace parseUint8ArrayStream {\n /**\n * Parse CSV binary to array of records,\n * ideal for smaller data sets.\n *\n * @returns Array of records\n *\n * @example Parsing CSV binary\n * ```ts\n * import { parseUint8ArrayStream } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * const records = await parseUint8ArrayStream.toArray(stream);\n * console.log(records);\n * ```\n */\n export declare function toArray<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<Uint8Array>,\n options?: ParseBinaryOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n Object.defineProperty(parseUint8ArrayStream, \"toArray\", {\n enumerable: true,\n writable: false,\n value: internal.toArray,\n });\n\n /**\n * Parse CSV binary to array of records.\n *\n * @returns Stream of records\n *\n * @example Parsing CSV binary\n * ```ts\n * import { parseUint8ArrayStream } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * await parseUint8ArrayStream.toStream(stream)\n * .pipeTo(new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * ```\n */\n export declare function toStream<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<Uint8Array>,\n options?: ParseBinaryOptions<Header>,\n ): ReadableStream<CSVRecord<Header>[]>;\n Object.defineProperty(parseUint8ArrayStream, \"toStream\", {\n enumerable: true,\n writable: false,\n value: parseUint8ArrayStreamToStream,\n });\n}\n","import { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { parseStringStreamToStream } from \"./internal/parseStringStreamToStream.ts\";\nimport { streamToAsyncIterableIterator } from \"./internal/utils/streamToAsyncIterableIterator.ts\";\nimport * as internal from \"./internal/utils/toArray.ts\";\n\n/**\n * Parse CSV string stream to records.\n *\n * @category Middle-level API\n * @param stream CSV string stream to parse\n * @param options Parsing options.\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parseStringStream.toArray} function.\n *\n * @example Parsing CSV files from strings\n *\n * ```ts\n * import { parseStringStream } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * for await (const record of parseStringStream(csv)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\nexport function parseStringStream<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<string>,\n options?: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n const recordStream = parseStringStreamToStream(stream, options);\n return streamToAsyncIterableIterator(recordStream);\n}\n\nexport namespace parseStringStream {\n /**\n * Parse CSV string stream to records.\n *\n * @returns Array of records\n *\n * @example\n *\n * ```ts\n * import { parseStringStream } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * const records = await parseStringStream.toArray(stream);\n * console.log(records);\n * // Prints:\n * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]\n * ```\n */\n export declare function toArray<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<string>,\n options?: ParseOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n Object.defineProperty(parseStringStream, \"toArray\", {\n enumerable: true,\n writable: false,\n value: internal.toArray,\n });\n\n /**\n * Parse CSV string stream to records.\n *\n * @returns Array of records\n *\n * @example\n *\n * ```ts\n * import { parseStringStream } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * await parseStringStream.toStream(stream)\n * .pipeTo(\n * new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * ```\n */\n export declare function toStream<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<string>,\n options?: ParseOptions<Header>,\n ): ReadableStream<CSVRecord<Header>>;\n Object.defineProperty(parseStringStream, \"toStream\", {\n enumerable: true,\n writable: false,\n value: parseStringStreamToStream,\n });\n}\n","import { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { getOptionsFromResponse } from \"./internal/getOptionsFromResponse.ts\";\nimport { parseResponseToStream } from \"./internal/parseResponseToStream.ts\";\nimport * as internal from \"./internal/utils/toArray.ts\";\nimport { parseUint8ArrayStream } from \"./parseUint8ArrayStream.ts\";\n\n/**\n * Parse HTTP Response what contains CSV to records,\n * ideal for smaller data sets.\n *\n * @remarks\n * This function automatically treats response headers.\n *\n * - If `Content-Type` header is not set, it assumes `text/csv`.\n * - If `Content-Type` header is not `text/csv`, it throws an error.\n * - If `Content-Type` header has charset parameter, it uses it for decoding.\n * - If `Content-Encoding` header is set, it decompresses the response.\n * - Should there be any conflicting information between the header and the options, the option's value will take precedence.\n *\n * @category Middle-level API\n * @param response\n * @param options\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parseResponse.toArray} function.\n *\n * @example Parsing CSV Response\n *\n * ```ts\n * import { parseResponse } from 'web-csv-toolbox';\n *\n * const response = await fetch('https://example.com/data.csv');\n *\n * for await (const record of parseResponse(response)) {\n * console.log(record);\n * }\n * ```\n */\nexport function parseResponse<Header extends ReadonlyArray<string>>(\n response: Response,\n options?: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n const options_ = getOptionsFromResponse(response, options);\n if (response.body === null) {\n throw new Error(\"Response body is null\");\n }\n return parseUint8ArrayStream(response.body, options_);\n}\n\nexport namespace parseResponse {\n /**\n * Parse CSV Response to array of records.\n *\n * @returns Array of records\n *\n * @example Parsing CSV Response\n *\n * ```ts\n * import { parseResponse } from 'web-csv-toolbox';\n *\n * const response = await fetch('https://example.com/data.csv');\n *\n * const records = await parseResponse.toArray(response);\n * console.log(records);\n * ```\n */\n export declare function toArray<Header extends ReadonlyArray<string>>(\n response: Response,\n options?: ParseOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n Object.defineProperty(parseResponse, \"toArray\", {\n enumerable: true,\n writable: false,\n value: internal.toArray,\n });\n\n /**\n * Parse CSV Response to stream of records.\n *\n * @param response Response to parse\n * @returns Stream of records\n *\n * @example Parsing CSV Response\n *\n * ```ts\n * import { parseResponse } from 'web-csv-toolbox';\n *\n * const response = await fetch('https://example.com/data.csv');\n *\n * await parseResponse.toStream(response)\n * .pipeTo(\n * new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\n export declare function toStream<Header extends ReadonlyArray<string>>(\n response: Response,\n options?: ParseOptions<Header>,\n ): ReadableStream<CSVRecord<Header>[]>;\n Object.defineProperty(parseResponse, \"toStream\", {\n enumerable: true,\n writable: false,\n value: parseResponseToStream,\n });\n}\n","import {\n CSV,\n CSVBinary,\n CSVRecord,\n CSVString,\n ParseBinaryOptions,\n ParseOptions,\n} from \"./common/types.ts\";\nimport * as internal from \"./internal/utils/toArray.ts\";\nimport { parseBinary } from \"./parseBinary.ts\";\nimport { parseResponse } from \"./parseResponse.ts\";\nimport { parseString } from \"./parseString.ts\";\nimport { parseStringStream } from \"./parseStringStream.ts\";\nimport { parseUint8ArrayStream } from \"./parseUint8ArrayStream.ts\";\n\n/**\n * Parse CSV to records.\n *\n * {@link !String}, {@link !ReadableStream}<string | {@link !Uint8Array}> and {@link !Response} are supported.\n *\n *\n * @typeParam Header Header type like `['name', 'age']`.\n *\n * @param csv CSV string to parse.\n * @param options Parsing options for CSV string parsing.\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parse.toArray} function.\n * @category High-level API\n *\n * @remarks\n * {@link parseString}, {@link parseBinary}, {@link parseUint8ArrayStream},\n * {@link parseStringStream} and {@link parseResponse} are used internally.\n *\n * If you known the type of the CSV, it performs better to use them directly.\n *\n * | If you want to parse a... | Use... | Options... |\n * | -------------------------------------------- | ----------------------------- | -------------------------- |\n * | {@link !String} | {@link parseString} | {@link ParseOptions} |\n * | {@link !ReadableStream}<{@link !String}> | {@link parseStringStream} | {@link ParseOptions} |\n * | {@link !Uint8Array} \\| {@link !ArrayBuffer} | {@link parseBinary} | {@link ParseBinaryOptions} |\n * | {@link !ReadableStream}<{@link !Uint8Array}> | {@link parseUint8ArrayStream} | {@link ParseBinaryOptions} |\n * | {@link !Response} | {@link parseResponse} | {@link ParseBinaryOptions} |\n *\n * @example Parsing CSV files from strings\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * for await (const record of parse(csv)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n *\n * @example Parsing CSV files from streams\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * }\n * });\n *\n * for await (const record of parse(stream)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n *\n *\n * @example Parsing CSV files with headers\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * // This CSV has no header.\n * const csv = `Alice,42\n * Bob,69`;\n *\n * for await (const record of parse(csv, { header: ['name', 'age'] })) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n *\n * @example Parsing CSV files with different delimiters characters\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const csv = `name\\tage\n * Alice\\t42\n * Bob\\t69`;\n *\n * for await (const record of parse(csv, { delimiter: '\\t' })) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\nexport function parse<Header extends ReadonlyArray<string>>(\n csv: CSVString,\n options?: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>>;\n/**\n * Parse CSV binary to records.\n *\n * @param csv CSV binary to parse.\n * @param options Parsing options for CSV binary parsing.\n *\n * @example Parsing CSV files from responses\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * // This CSV data is not gzipped and encoded in utf-8.\n * const response = await fetch('https://example.com/data.csv');\n *\n * for await (const record of parse(response)) {\n * // ...\n * }\n * ```\n *\n * @example Parsing CSV files with options spcialized for binary\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * // This CSV data is gzipped and encoded in shift-jis and has BOM.\n * const response = await fetch('https://example.com/data.csv.gz');\n *\n * for await (const record of parse(response, {\n * charset: 'shift-jis',\n * ignoreBOM: true,\n * decomposition: 'gzip',\n * })) {\n * // ...\n * }\n * ```\n */\nexport function parse<Header extends ReadonlyArray<string>>(\n csv: CSVBinary,\n options?: ParseBinaryOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport async function* parse<Header extends ReadonlyArray<string>>(\n csv: CSV,\n options?: ParseBinaryOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n if (typeof csv === \"string\") {\n yield* parseString(csv, options);\n } else if (csv instanceof Uint8Array || csv instanceof ArrayBuffer) {\n yield* parseBinary(csv, options);\n } else if (csv instanceof ReadableStream) {\n const [branch1, branch2] = csv.tee();\n const reader1 = branch1.getReader();\n const { value: firstChunk } = await reader1.read();\n reader1.releaseLock();\n if (typeof firstChunk === \"string\") {\n yield* parseStringStream(branch2 as ReadableStream<string>, options);\n } else if (firstChunk instanceof Uint8Array) {\n yield* parseUint8ArrayStream(\n branch2 as ReadableStream<Uint8Array>,\n options,\n );\n }\n } else if (csv instanceof Response) {\n yield* parseResponse(csv, options);\n }\n}\n\nexport namespace parse {\n /**\n * Parse CSV string to array of records,\n * ideal for smaller data sets.\n *\n * @example Parse a CSV as array of records\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const records = await parse.toArray(csv);\n * console.log(records);\n * // Prints:\n * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]\n * ```\n */\n export declare function toArray<Header extends ReadonlyArray<string>>(\n csv: CSVString,\n options?: ParseOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV string to array of records,\n * ideal for smaller data sets.\n *\n * @example Parse a CSV as array of records\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const response = await fetch('https://example.com/data.csv');\n *\n * const records = await parse.toArray(response);\n * console.log(records);\n * ```\n */\n export declare function toArray<Header extends ReadonlyArray<string>>(\n csv: CSVBinary,\n options?: ParseBinaryOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n Object.defineProperty(parse, \"toArray\", {\n enumerable: true,\n writable: false,\n value: internal.toArray,\n });\n}\n","import { ParseBinaryOptions } from \"../common/types.ts\";\n\nexport function convertBinaryToString<Header extends ReadonlyArray<string>>(\n binary: Uint8Array | ArrayBuffer,\n options: ParseBinaryOptions<Header>,\n): string {\n return new TextDecoder(options?.charset, {\n ignoreBOM: options?.ignoreBOM,\n fatal: options?.fatal,\n }).decode(binary instanceof ArrayBuffer ? new Uint8Array(binary) : binary);\n}\n","import { CSVRecord, ParseBinaryOptions } from \"../common/types.ts\";\nimport { convertBinaryToString } from \"./convertBinaryToString.ts\";\nimport { parseStringToArraySync } from \"./parseStringToArraySync.ts\";\n\nexport function parseBinaryToArraySync<Header extends ReadonlyArray<string>>(\n binary: Uint8Array | ArrayBuffer,\n options: ParseBinaryOptions<Header> = {},\n): CSVRecord<Header>[] {\n const csv = convertBinaryToString(binary, options);\n return parseStringToArraySync(csv, options);\n}\n","import { CSVRecord, ParseBinaryOptions } from \"../common/types.ts\";\nimport { convertBinaryToString } from \"./convertBinaryToString.ts\";\nimport { parseStringToIterableIterator } from \"./parseStringToIterableIterator.ts\";\n\nexport function parseBinaryToIterableIterator<\n Header extends ReadonlyArray<string>,\n>(\n binary: Uint8Array | ArrayBuffer,\n options: ParseBinaryOptions<Header> = {},\n): IterableIterator<CSVRecord<Header>> {\n const csv = convertBinaryToString(binary, options);\n return parseStringToIterableIterator(csv, options);\n}\n","import { CSVRecord, ParseBinaryOptions } from \"../common/types.ts\";\nimport { convertBinaryToString } from \"./convertBinaryToString.ts\";\nimport { parseStringToStream } from \"./parseStringToStream.ts\";\n\nexport function parseBinaryToStream<Header extends ReadonlyArray<string>>(\n binary: Uint8Array | ArrayBuffer,\n options: ParseBinaryOptions<Header> = {},\n): ReadableStream<CSVRecord<Header>> {\n const csv = convertBinaryToString(binary, options);\n return parseStringToStream(csv, options);\n}\n","export function iterableIteratorToAsync<T>(\n iterator: IterableIterator<T>,\n): AsyncIterableIterator<T> {\n return {\n async next() {\n const result = iterator.next();\n return Promise.resolve(result);\n },\n [Symbol.asyncIterator]() {\n return this;\n },\n };\n}\n","export function pipeline<I, T, O>(\n stream: ReadableStream<I>,\n transformer1: TransformStream<I, T>,\n transformer2: TransformStream<T, O>,\n): ReadableStream<O>;\nexport function pipeline<I, T1, T2, O>(\n stream: ReadableStream<I>,\n transformer1: TransformStream<I, T1>,\n transformer2: TransformStream<T1, T2>,\n transformer3: TransformStream<T2, O>,\n): ReadableStream<O>;\nexport function pipeline<I, T1, T2, T3, O>(\n stream: ReadableStream<I>,\n transformer1: TransformStream<I, T1>,\n transformer2: TransformStream<T1, T2>,\n transformer3: TransformStream<T2, T3>,\n transformer4: TransformStream<T3, O>,\n): ReadableStream<O>;\nexport function pipeline<I, O>(\n stream: ReadableStream<I>,\n ...transformers: TransformStream[]\n): ReadableStream<O> {\n return new ReadableStream({\n start: (controller) => {\n (() =>\n transformers\n .reduce<ReadableStream>(\n (stream, transformer) => stream.pipeThrough(transformer),\n stream,\n )\n .pipeTo(\n new WritableStream({\n write: (v) => controller.enqueue(v),\n close: () => controller.close(),\n }),\n ))();\n },\n });\n}\n","import { CSVRecord, ParseBinaryOptions } from \"../common/types.ts\";\nimport { LexerTransformer } from \"../transformers/LexerTransformer.ts\";\nimport { RecordAssemblerTransformer } from \"../transformers/RecordAssemblerTransformer.ts\";\nimport { pipeline } from \"./utils/pipeline.ts\";\n\nexport function parseUint8ArrayStreamToStream<Header extends readonly string[]>(\n stream: ReadableStream<Uint8Array>,\n options?: ParseBinaryOptions<Header>,\n): ReadableStream<CSVRecord<Header>> {\n const { charset, fatal, ignoreBOM, decomposition } = options ?? {};\n return decomposition\n ? pipeline(\n stream,\n new DecompressionStream(decomposition),\n new TextDecoderStream(charset, { fatal, ignoreBOM }),\n new LexerTransformer(options),\n new RecordAssemblerTransformer(options),\n )\n : pipeline(\n stream,\n new TextDecoderStream(charset, { fatal, ignoreBOM }),\n new LexerTransformer(options),\n new RecordAssemblerTransformer(options),\n );\n}\n","export async function* streamToAsyncIterableIterator<T>(\n stream: ReadableStream<T>,\n): AsyncIterableIterator<T> {\n const reader = stream.getReader();\n while (true) {\n const { done, value } = await reader.read();\n if (done) break;\n yield value;\n }\n}\n","import { CSVRecord, ParseOptions } from \"../common/types.ts\";\nimport { LexerTransformer } from \"../transformers/LexerTransformer.ts\";\nimport { RecordAssemblerTransformer } from \"../transformers/RecordAssemblerTransformer.ts\";\nimport { pipeline } from \"./utils/pipeline.ts\";\n\nexport function parseStringStreamToStream<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<string>,\n options?: ParseOptions<Header>,\n): ReadableStream<CSVRecord<Header>> {\n return pipeline(\n stream,\n new LexerTransformer(options),\n new RecordAssemblerTransformer(options),\n );\n}\n","import { ParseBinaryOptions } from \"../common/types.ts\";\nimport { parseMime } from \"./parseMime.ts\";\n\nexport function getOptionsFromResponse<Header extends ReadonlyArray<string>>(\n response: Response,\n options: ParseBinaryOptions<Header> = {},\n): ParseBinaryOptions<Header> {\n const { headers } = response;\n const contentType = headers.get(\"content-type\") ?? \"text/csv\";\n const mime = parseMime(contentType);\n if (mime.type !== \"text/csv\") {\n throw new Error(`Invalid mime type: ${contentType}`);\n }\n const decomposition =\n (headers.get(\"content-encoding\") as CompressionFormat) ?? undefined;\n const charset = mime.parameters.charset ?? \"utf-8\";\n // TODO: Support header=present and header=absent\n // const header = mime.parameters.header ?? \"present\";\n return {\n decomposition,\n charset,\n ...options,\n };\n}\n","export interface Result {\n type: string;\n parameters: {\n [key: string]: string;\n };\n}\n\nexport function parseMime(contentType: string) {\n const [type, ...parameters] = contentType.split(\";\");\n const result: Result = {\n type: type.trim(),\n parameters: {},\n };\n for (const paramator of parameters) {\n const [key, value] = paramator.split(\"=\");\n result.parameters[key.trim()] = value.trim();\n }\n return result;\n}\n","import { CSVRecord, ParseBinaryOptions } from \"../common/types.ts\";\nimport { getOptionsFromResponse } from \"./getOptionsFromResponse.ts\";\nimport { parseUint8ArrayStreamToStream } from \"./parseUint8ArrayStreamToStream.ts\";\n\nexport function parseResponseToStream<Header extends ReadonlyArray<string>>(\n response: Response,\n options?: ParseBinaryOptions<Header>,\n): ReadableStream<CSVRecord<Header>> {\n const options_ = getOptionsFromResponse(response, options);\n if (response.body === null) {\n throw new Error(\"Response body is null\");\n }\n return parseUint8ArrayStreamToStream(response.body, options_);\n}\n"],"names":["FieldDelimiter","Symbol","for","RecordDelimiter","Field","CRLF","LF","escapeRegExp","v","replace","Lexer","delimiter","delimiterLength","quotation","quotationLength","matcher","buffer","flush","constructor","options","length","Error","includes","assertCommonOptions","this","d","q","RegExp","lex","chunk","buffering","tokens","endsWith","slice","currentField","token","nextToken","value","startsWith","extractQuotedString","match","exec","type","end","LexerTransformer","TransformStream","lexer","super","transform","controller","enqueue","RecordAssembler","fieldIndex","row","header","dirty","Array","isArray","setHeader","assemble","Object","fromEntries","map","index","at","fill","filter","Set","size","RecordAssemblerTransformer","assembler","parseStringToArraySync","csv","parseStringToIterableIterator","parseStringToStream","ReadableStream","start","record","close","async","toArray","args","rows","push","parseString","parseBinary","parseUint8ArrayStream","parseStringStream","parseResponse","parse","convertBinaryToString","binary","TextDecoder","charset","ignoreBOM","fatal","decode","ArrayBuffer","Uint8Array","parseBinaryToArraySync","parseBinaryToIterableIterator","parseBinaryToStream","bytes","iterator","next","result","Promise","resolve","asyncIterator","iterableIteratorToAsync","pipeline","stream","transformers","reduce","transformer","pipeThrough","pipeTo","WritableStream","write","parseUint8ArrayStreamToStream","decomposition","DecompressionStream","TextDecoderStream","streamToAsyncIterableIterator","reader","getReader","done","read","parseStringStreamToStream","getOptionsFromResponse","response","headers","contentType","get","mime","parameters","split","trim","paramator","key","parseMime","parseResponseToStream","options_","body","branch1","branch2","tee","reader1","firstChunk","releaseLock","Response","defineProperty","enumerable","writable","internal.toArray"],"mappings":"gFAIa,MAAAA,EAAiBC,OAAOC,IAAI,kCAK5BC,EAAkBF,OAAOC,IAAI,mCAK7BE,EAAQH,OAAOC,IAAI,yBCbnBG,EAAO,OACPC,EAAK,KCKX,SAASC,EAAaC,GACpB,OAAAA,EAAEC,QAAQ,sBAAuB,OAC1C,CCHO,MAAMC,EACXC,GACAC,GACAC,GACAC,GACAC,GACAC,GAAU,GACVC,IAAS,EAET,WAAAC,EAAYP,UACVA,EFTiB,IESLE,UACZA,EFLwB,KEMP,KCVd,SAA6BM,GAClC,GAAiC,iBAAtBA,EAAQN,WAAuD,IAA7BM,EAAQN,UAAUO,OACvD,MAAA,IAAIC,MAAM,+BAElB,GAAiC,iBAAtBF,EAAQR,WAAuD,IAA7BQ,EAAQR,UAAUS,OACvD,MAAA,IAAIC,MAAM,+BAEd,GAAAF,EAAQN,UAAUS,SAAShB,IAAOa,EAAQN,UAAUS,SHfxC,MGgBR,MAAA,IAAID,MAAM,uCAEd,GAAAF,EAAQR,UAAUW,SAAShB,IAAOa,EAAQR,UAAUW,SHlBxC,MGmBR,MAAA,IAAID,MAAM,uCAGhB,GAAAF,EAAQR,UAAUW,SAASH,EAAQN,YACnCM,EAAQN,UAAUS,SAASH,EAAQR,WAEnC,MAAM,IAAIU,MACR,qEAGN,CDVwBE,CAAA,CAAEZ,YAAWE,cACjCW,MAAKb,EAAaA,EAClBa,MAAKZ,EAAmBD,EAAUS,OAClCI,MAAKX,EAAaA,EAClBW,MAAKV,EAAmBD,EAAUO,OAE5B,MAAAK,EAAIlB,EAAaI,GACjBe,EAAInB,EAAaM,GACvBW,MAAKT,EAAW,IAAIY,OAClB,UAAUD,QAAQD,6CAA6CC,KAAKD,eAExE,CAEO,GAAAG,CAAIC,EAAsBC,GAAY,GAQ3C,OAPKA,IACHN,MAAKP,GAAS,GAEK,iBAAVY,GAAuC,IAAjBA,EAAMT,SACrCI,MAAKR,GAAWa,GAGXL,MAAKO,GACd,CAEO,KAAAd,GAEL,OADAO,MAAKP,GAAS,EACP,IAAIO,MAAKO,IAClB,CAEA,GAACA,GACKP,MAAKP,IAEHO,MAAKR,EAAQgB,SAAS3B,GACxBmB,MAAKR,EAAUQ,MAAKR,EAAQiB,MAAM,GAAI5B,GAC7BmB,MAAKR,EAAQgB,SAAS1B,KAC/BkB,MAAKR,EAAUQ,MAAKR,EAAQiB,MAAM,GAAI3B,KAG1C,IAAI4B,EAA6B,KACjC,IAAA,IAASC,EAAsBA,EAAQX,MAAKY,KAC1C,OAAQD,GACN,KAAKnC,EAOL,KAAKG,EACC+B,UACIA,EACSA,EAAA,YAEXC,EACN,MACF,QACMD,EACFA,EAAaG,OAASF,EAAME,MAEbH,EAAAC,EAKnBD,UACIA,EAEV,CAEA,EAAAE,GACM,GAAwB,IAAxBZ,MAAKR,EAAQI,OACR,OAAA,KAIP,IAAgB,IAAhBI,MAAKP,IACJO,MAAKR,IAAYX,GAAQmB,MAAKR,IAAYV,GAEpC,OAAA,KAIT,GAAIkB,MAAKR,EAAQsB,WAAWjC,GAEnB,OADPmB,MAAKR,EAAUQ,MAAKR,EAAQiB,MAAM,GAC3B9B,EAIT,GAAIqB,MAAKR,EAAQsB,WAAWhC,GAEnB,OADPkB,MAAKR,EAAUQ,MAAKR,EAAQiB,MAAM,GAC3B9B,EAIT,GAAIqB,MAAKR,EAAQsB,WAAWd,MAAKb,GAExB,OADPa,MAAKR,EAAUQ,MAAKR,EAAQiB,MAAMT,MAAKZ,GAChCZ,EAIT,GAAIwB,MAAKR,EAAQsB,WAAWd,MAAKX,GAE3B,OAAgB,IAAhBW,MAAKP,GAAoBO,MAAKR,EAAQgB,SAASR,MAAKX,GAC/C,KAEFW,MAAKe,IAId,MAAMC,EAAQhB,MAAKT,EAAS0B,KAAKjB,MAAKR,GACtC,OAAIwB,GAGkB,IAAhBhB,MAAKP,GAAoBuB,EAAM,GAAGpB,SAAWI,MAAKR,EAAQI,OACrD,MAETI,MAAKR,EAAUQ,MAAKR,EAAQiB,MAAMO,EAAM,GAAGpB,QACpC,CAAEsB,KAAMtC,EAAOiC,MAAOG,EAAM,KAI9B,IACT,CAEA,EAAAD,GACE,IAAII,EAAMnB,MAAKV,EACXuB,EAAQ,GAEL,KAAAM,EAAMnB,MAAKR,EAAQI,QAGtB,GAAAI,MAAKR,EAAQiB,MAAMU,EAAKA,EAAMnB,MAAKV,KACjCU,MAAKX,GACPW,MAAKR,EAAQiB,MACXU,EAAMnB,MAAKV,EACX6B,EAA8B,EAAxBnB,MAAKV,KACPU,MAAKX,EALX,CAcA,GAAAW,MAAKR,EAAQiB,MAAMU,EAAKA,EAAMnB,MAAKV,KAAsBU,MAAKX,EAI5D,OAAgB,IAAhBW,MAAKP,GACL0B,EAAMnB,MAAKV,EAAmBU,MAAKR,EAAQI,QAC3CI,MAAKR,EAAQiB,MACXU,EAAMnB,MAAKV,EACXU,MAAKZ,KACDY,MAAKb,GACXa,MAAKR,EAAQiB,MACXU,EAAMnB,MAAKV,EACX6B,EAAMnB,MAAKV,EAAmB,KAC1BT,GACNmB,MAAKR,EAAQiB,MACXU,EAAMnB,MAAKV,EACX6B,EAAMnB,MAAKV,EAAmB,KAC1BR,EAEC,MAITkB,MAAKR,EAAUQ,MAAKR,EAAQiB,MAAMU,EAAMnB,MAAKV,GACtC,CAAE4B,KAAMtC,EAAOiC,UAGfA,GAAAb,MAAKR,EAAQ2B,GACtBA,GAhCA,MAHEN,GAASb,MAAKX,EACd8B,GAA+B,EAAxBnB,MAAKV,EAsCT,OAAA,IACT,EEpKK,MAAM8B,UAAyBC,gBACpC,WAAA3B,CAAYC,EAAyB,IAC7B,MAAA2B,EAAQ,IAAIpC,EAAMS,GAClB4B,MAAA,CACJC,UAAW,CAACnB,EAAOoB,KACI,IAAjBpB,EAAMT,QACG6B,EAAAC,QAAQ,IAAIJ,EAAMlB,IAAIC,GAAO,IAC1C,EAEFZ,MAAQgC,IACKA,EAAAC,QAAQJ,EAAM7B,QAAO,GAGtC,EC3CK,MAAMkC,EACXC,GAAc,EACdC,GAAiB,GACjBC,GACAC,IAAS,EAET,WAAArC,CAAYC,EAA0C,SAC7B,IAAnBA,EAAQmC,QAAwBE,MAAMC,QAAQtC,EAAQmC,SACnD9B,MAAAkC,EAAWvC,EAAQmC,OAE5B,CAEA,SAAQK,CACN5B,EACAd,GAAQ,GAER,IAAA,MAAWkB,KAASJ,EAClB,OAAQI,GACN,KAAKnC,EACEwB,MAAA4B,IACL5B,MAAK+B,GAAS,EACd,MACF,KAAKpD,OACkB,IAAjBqB,MAAK8B,EACF9B,MAAAkC,EAAWlC,MAAK6B,GAEjB7B,MAAK+B,QACDK,OAAOC,YACXrC,MAAK8B,EAAQQ,KAAI,CAACR,EAAQS,IAAU,CAClCT,EACA9B,MAAK6B,EAAKW,GAAGD,aAIXH,OAAOC,YACXrC,MAAK8B,EAAQQ,KAAKR,GAAW,CAACA,EAAQ,OAK5C9B,MAAK4B,EAAc,EACd5B,MAAA6B,EAAO,IAAIG,MAAMhC,MAAK8B,GAASlC,QAAQ6C,KAAK,IACjDzC,MAAK+B,GAAS,EACd,MACF,QACE/B,MAAK+B,GAAS,EACd/B,MAAK6B,EAAK7B,MAAK4B,GAAejB,EAAME,MAKtCpB,UACKO,KAAKP,QAEhB,CAEA,MAAQA,QACe,IAAjBO,MAAK8B,GACH9B,MAAK+B,UACDK,OAAOC,YACXrC,MAAK8B,EACFY,QAAQ1D,GAAMA,IACdsD,KAAI,CAACR,EAAQS,IAAU,CAACT,EAAQ9B,MAAK6B,EAAKW,GAAGD,OAIxD,CAEA,EAAAL,CAAWJ,GAEL,GADJ9B,MAAK8B,EAAUA,EACa,IAAxB9B,MAAK8B,EAAQlC,OACT,MAAA,IAAIC,MAAM,iCAEd,GAAA,IAAI8C,IAAI3C,MAAK8B,GAASc,OAAS5C,MAAK8B,EAAQlC,OACxC,MAAA,IAAIC,MAAM,gDAEpB,ECjCK,MAAMgD,UAEHxB,gBACR,WAAA3B,CAAYC,EAA0C,IAC9C,MAAAmD,EAAY,IAAInB,EAAgBhC,GAChC4B,MAAA,CACJC,UAAW,CAACjB,EAAQkB,KAClB,IAAA,MAAWd,KAASmC,EAAUX,SAAS5B,GAAQ,GAC7CkB,EAAWC,QAAQf,EACrB,EAEFlB,MAAQgC,IACK,IAAA,MAAAd,KAASmC,EAAUrD,QAC5BgC,EAAWC,QAAQf,EACrB,GAGN,EC3Dc,SAAAoC,EACdC,EACArD,GAEM,MAAA2B,EAAQ,IAAIpC,EAAMS,GAClBmD,EAAY,IAAInB,EAAgBhC,GAChCY,EAASe,EAAMlB,IAAI4C,GACzB,MAAO,IAAIF,EAAUX,SAAS5B,GAChC,CCRgB,SAAA0C,EAGdD,EACArD,GAEM,MAAA2B,EAAQ,IAAIpC,EAAMS,GAClBmD,EAAY,IAAInB,EAAgBhC,GAChCY,EAASe,EAAMlB,IAAI4C,GAClB,OAAAF,EAAUX,SAAS5B,EAC5B,CCVgB,SAAA2C,EACdF,EACArD,GAEM,MAAA2B,EAAQ,IAAIpC,EAAMS,GAClBmD,EAAY,IAAInB,EAAgBhC,GACtC,OAAO,IAAIwD,eAAe,CACxB,KAAAC,CAAM3B,GACE,MAAAlB,EAASe,EAAMlB,IAAI4C,GACzB,IAAA,MAAWK,KAAUP,EAAUX,SAAS5B,GACtCkB,EAAWC,QAAQ2B,GAErB5B,EAAW6B,OACb,GAEJ,CCnBAC,eAAsBC,KAGRC,GACZ,MAAMC,EAAY,GAClB,UAAA,MAAiB7B,KAAO7B,QAAQyD,GAC9BC,EAAKC,KAAK9B,GAEL,OAAA6B,CACT,CCuBuBH,eAAAK,EACrBZ,EACArD,SAEOsD,EAA8BD,EAAKrD,EAC5C,CAAA,IACiBiE,ECAAC,ECUAC,ECDAC,ECEAC,EC6IAC,EC5LD,SAAAC,EACdC,EACAxE,GAEO,OAAA,IAAIyE,YAAYzE,GAAS0E,QAAS,CACvCC,UAAW3E,GAAS2E,UACpBC,MAAO5E,GAAS4E,QACfC,OAAOL,aAAkBM,YAAc,IAAIC,WAAWP,GAAUA,EACrE,CCNO,SAASQ,EACdR,EACAxE,EAAsC,IAG/B,OAAAoD,EADKmB,EAAsBC,EAAQxE,GACPA,EACrC,CCNO,SAASiF,EAGdT,EACAxE,EAAsC,IAG/B,OAAAsD,EADKiB,EAAsBC,EAAQxE,GACAA,EAC5C,CCRO,SAASkF,EACdV,EACAxE,EAAsC,IAG/B,OAAAuD,EADKgB,EAAsBC,EAAQxE,GACVA,EAClC,CRoBgB,SAAAkE,EACdiB,EACAnF,GAGA,OSnCK,SACLoF,GAEO,MAAA,CACL,UAAMC,GACE,MAAAC,EAASF,EAASC,OACjB,OAAAE,QAAQC,QAAQF,EACzB,EACA,CAACxG,OAAO2G,iBACC,OAAApF,IACT,EAEJ,CTuBSqF,CADUT,EAA8BE,EAAOnF,GAExD,CUlBgB,SAAA2F,EACdC,KACGC,GAEH,OAAO,IAAIrC,eAAe,CACxBC,MAAQ3B,IAEJ+D,EACGC,QACC,CAACF,EAAQG,IAAgBH,EAAOI,YAAYD,IAC5CH,GAEDK,OACC,IAAIC,eAAe,CACjBC,MAAQ9G,GAAMyC,EAAWC,QAAQ1C,GACjCsE,MAAO,IAAM7B,EAAW6B,UAEzB,GAGb,CCjCgB,SAAAyC,EACdR,EACA5F,GAEA,MAAM0E,QAAEA,EAASE,MAAAA,EAAAD,UAAOA,gBAAW0B,GAAkBrG,GAAW,GAChE,OAAOqG,EACHV,EACEC,EACA,IAAIU,oBAAoBD,GACxB,IAAIE,kBAAkB7B,EAAS,CAAEE,QAAOD,cACxC,IAAIlD,EAAiBzB,GACrB,IAAIkD,EAA2BlD,IAEjC2F,EACEC,EACA,IAAIW,kBAAkB7B,EAAS,CAAEE,QAAOD,cACxC,IAAIlD,EAAiBzB,GACrB,IAAIkD,EAA2BlD,GAEvC,CCxBA4D,eAAuB4C,EACrBZ,GAEM,MAAAa,EAASb,EAAOc,YACtB,OAAa,CACX,MAAMC,KAAEA,EAAMzF,MAAAA,SAAgBuF,EAAOG,OACjC,GAAAD,EAAM,YACJzF,CACR,CACF,CX+BgB,SAAAiD,EACdyB,EACA5F,GAGA,OAAOwG,EADcJ,EAA8BR,EAAQ5F,GAE7D,CYzCgB,SAAA6G,EACdjB,EACA5F,GAEO,OAAA2F,EACLC,EACA,IAAInE,EAAiBzB,GACrB,IAAIkD,EAA2BlD,GAEnC,CXyBgB,SAAAoE,EACdwB,EACA5F,GAGA,OAAOwG,EADcK,EAA0BjB,EAAQ5F,GAEzD,CY1CO,SAAS8G,EACdC,EACA/G,EAAsC,IAEhC,MAAAgH,QAAEA,GAAYD,EACdE,EAAcD,EAAQE,IAAI,iBAAmB,WAC7CC,ECFD,SAAmBF,GACxB,MAAO1F,KAAS6F,GAAcH,EAAYI,MAAM,KAC1C/B,EAAiB,CACrB/D,KAAMA,EAAK+F,OACXF,WAAY,CAAC,GAEf,IAAA,MAAWG,KAAaH,EAAY,CAClC,MAAOI,EAAKtG,GAASqG,EAAUF,MAAM,KACrC/B,EAAO8B,WAAWI,EAAIF,QAAUpG,EAAMoG,MACxC,CACO,OAAAhC,CACT,CDTemC,CAAUR,GACnB,GAAc,aAAdE,EAAK5F,KACP,MAAM,IAAIrB,MAAM,sBAAsB+G,KAOjC,MAAA,CACLZ,cALCW,EAAQE,IAAI,0BAA6C,EAM1DxC,QALcyC,EAAKC,WAAW1C,SAAW,WAMtC1E,EAEP,CEnBgB,SAAA0H,EACdX,EACA/G,GAEM,MAAA2H,EAAWb,EAAuBC,EAAU/G,GAC9C,GAAkB,OAAlB+G,EAASa,KACL,MAAA,IAAI1H,MAAM,yBAEX,OAAAkG,EAA8BW,EAASa,KAAMD,EACtD,CbyBgB,SAAAtD,EACd0C,EACA/G,GAEM,MAAA2H,EAAWb,EAAuBC,EAAU/G,GAC9C,GAAkB,OAAlB+G,EAASa,KACL,MAAA,IAAI1H,MAAM,yBAEX,OAAAiE,EAAsB4C,EAASa,KAAMD,EAC9C,CCqHuB/D,eAAAU,EACrBjB,EACArD,GAEI,GAAe,iBAARqD,QACFY,EAAYZ,EAAKrD,QACf,GAAAqD,aAAe0B,YAAc1B,aAAeyB,kBAC9CZ,EAAYb,EAAKrD,QAC1B,GAAWqD,aAAeG,eAAgB,CACxC,MAAOqE,EAASC,GAAWzE,EAAI0E,MACzBC,EAAUH,EAAQnB,aAChBxF,MAAO+G,SAAqBD,EAAQpB,OAC5CoB,EAAQE,cACkB,iBAAfD,QACF7D,EAAkB0D,EAAmC9H,GACnDiI,aAAsBlD,mBACxBZ,EACL2D,EACA9H,GAEJ,MACSqD,aAAe8E,iBACjB9D,EAAchB,EAAKrD,GAE9B,CLtJiBiE,EAAAA,IAAAA,EAAA,CAAA,GAwBRxB,OAAA2F,eAAenE,EAAa,UAAW,CAC5CoE,YAAY,EACZC,UAAU,EACVpH,MAAOqH,IA2BF9F,OAAA2F,eAAenE,EAAa,cAAe,CAChDoE,YAAY,EACZC,UAAU,EACVpH,MAAOkC,IA8BFX,OAAA2F,eAAenE,EAAa,qBAAsB,CACvDoE,YAAY,EACZC,UAAU,EACVpH,MAAOoC,IAiCFb,OAAA2F,eAAenE,EAAa,WAAY,CAC7CoE,YAAY,EACZC,UAAU,EACVpH,MAAOqC,IC9HMW,EAAAA,IAAAA,EAAA,CAAA,GAuBRzB,OAAA2F,eAAelE,EAAa,UAAW,CAC5CmE,YAAY,EACZC,UAAU,EACVpH,MAAOqH,IAyBF9F,OAAA2F,eAAelE,EAAa,cAAe,CAChDmE,YAAY,EACZC,UAAU,EACVpH,MAAO8D,IA4BFvC,OAAA2F,eAAelE,EAAa,qBAAsB,CACvDmE,YAAY,EACZC,UAAU,EACVpH,MAAO+D,IAkCFxC,OAAA2F,eAAelE,EAAa,WAAY,CAC7CmE,YAAY,EACZC,UAAU,EACVpH,MAAOgE,IChHMf,EAAAA,IAAAA,EAAA,CAAA,GA8BR1B,OAAA2F,eAAejE,EAAuB,UAAW,CACtDkE,YAAY,EACZC,UAAU,EACVpH,MAAOqH,IAoCF9F,OAAA2F,eAAejE,EAAuB,WAAY,CACvDkE,YAAY,EACZC,UAAU,EACVpH,MAAOkF,ICzEMhC,EAAAA,IAAAA,EAAA,CAAA,GAgCR3B,OAAA2F,eAAehE,EAAmB,UAAW,CAClDiE,YAAY,EACZC,UAAU,EACVpH,MAAOqH,IAsCF9F,OAAA2F,eAAehE,EAAmB,WAAY,CACnDiE,YAAY,EACZC,UAAU,EACVpH,MAAO2F,IC1EMxC,EAAAA,IAAAA,EAAA,CAAA,GAqBR5B,OAAA2F,eAAe/D,EAAe,UAAW,CAC9CgE,YAAY,EACZC,UAAU,EACVpH,MAAOqH,IAiCF9F,OAAA2F,eAAe/D,EAAe,WAAY,CAC/CgE,YAAY,EACZC,UAAU,EACVpH,MAAOwG,ICiFMpD,EAAAA,IAAAA,EAAA,CAAA,GA2CR7B,OAAA2F,eAAe9D,EAAO,UAAW,CACtC+D,YAAY,EACZC,UAAU,EACVpH,MAAOqH"}