web-csv-toolbox 0.9.0 → 0.10.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (124) hide show
  1. package/dist/cjs/Lexer.cjs +1 -1
  2. package/dist/cjs/Lexer.cjs.map +1 -1
  3. package/dist/cjs/LexerTransformer.cjs +1 -1
  4. package/dist/cjs/LexerTransformer.cjs.map +1 -1
  5. package/dist/cjs/RecordAssembler.cjs +1 -1
  6. package/dist/cjs/RecordAssembler.cjs.map +1 -1
  7. package/dist/cjs/RecordAssemblerTransformer.cjs +1 -1
  8. package/dist/cjs/RecordAssemblerTransformer.cjs.map +1 -1
  9. package/dist/cjs/_virtual/web_csv_toolbox_wasm_bg.wasm.cjs +1 -1
  10. package/dist/cjs/assertCommonOptions.cjs +1 -1
  11. package/dist/cjs/assertCommonOptions.cjs.map +1 -1
  12. package/dist/cjs/common/errors.cjs +2 -0
  13. package/dist/cjs/common/errors.cjs.map +1 -0
  14. package/dist/cjs/commonParseErrorHandling.cjs +2 -0
  15. package/dist/cjs/commonParseErrorHandling.cjs.map +1 -0
  16. package/dist/cjs/getOptionsFromResponse.cjs +1 -1
  17. package/dist/cjs/getOptionsFromResponse.cjs.map +1 -1
  18. package/dist/cjs/parseBinaryToArraySync.cjs +1 -1
  19. package/dist/cjs/parseBinaryToArraySync.cjs.map +1 -1
  20. package/dist/cjs/parseBinaryToIterableIterator.cjs +1 -1
  21. package/dist/cjs/parseBinaryToIterableIterator.cjs.map +1 -1
  22. package/dist/cjs/parseBinaryToStream.cjs +1 -1
  23. package/dist/cjs/parseBinaryToStream.cjs.map +1 -1
  24. package/dist/cjs/parseResponse.cjs +1 -1
  25. package/dist/cjs/parseResponse.cjs.map +1 -1
  26. package/dist/cjs/parseResponseToStream.cjs +1 -1
  27. package/dist/cjs/parseResponseToStream.cjs.map +1 -1
  28. package/dist/cjs/parseString.cjs +1 -1
  29. package/dist/cjs/parseString.cjs.map +1 -1
  30. package/dist/cjs/parseStringToArraySync.cjs +1 -1
  31. package/dist/cjs/parseStringToArraySync.cjs.map +1 -1
  32. package/dist/cjs/parseStringToArraySyncWASM.cjs +1 -1
  33. package/dist/cjs/parseStringToArraySyncWASM.cjs.map +1 -1
  34. package/dist/cjs/parseStringToIterableIterator.cjs +1 -1
  35. package/dist/cjs/parseStringToIterableIterator.cjs.map +1 -1
  36. package/dist/cjs/parseStringToStream.cjs +1 -1
  37. package/dist/cjs/parseStringToStream.cjs.map +1 -1
  38. package/dist/cjs/utils/convertBinaryToString.cjs.map +1 -0
  39. package/dist/cjs/utils/pipeline.cjs +1 -1
  40. package/dist/cjs/utils/pipeline.cjs.map +1 -1
  41. package/dist/cjs/web-csv-toolbox.cjs +1 -1
  42. package/dist/es/Lexer.js +6 -0
  43. package/dist/es/Lexer.js.map +1 -1
  44. package/dist/es/LexerTransformer.js +12 -3
  45. package/dist/es/LexerTransformer.js.map +1 -1
  46. package/dist/es/RecordAssembler.js +3 -2
  47. package/dist/es/RecordAssembler.js.map +1 -1
  48. package/dist/es/RecordAssemblerTransformer.js +14 -5
  49. package/dist/es/RecordAssemblerTransformer.js.map +1 -1
  50. package/dist/es/_virtual/web_csv_toolbox_wasm_bg.wasm.js +1 -1
  51. package/dist/es/assertCommonOptions.js +8 -7
  52. package/dist/es/assertCommonOptions.js.map +1 -1
  53. package/dist/es/common/errors.js +20 -0
  54. package/dist/es/common/errors.js.map +1 -0
  55. package/dist/es/commonParseErrorHandling.js +13 -0
  56. package/dist/es/commonParseErrorHandling.js.map +1 -0
  57. package/dist/es/getOptionsFromResponse.js +1 -1
  58. package/dist/es/getOptionsFromResponse.js.map +1 -1
  59. package/dist/es/parseBinaryToArraySync.js +8 -3
  60. package/dist/es/parseBinaryToArraySync.js.map +1 -1
  61. package/dist/es/parseBinaryToIterableIterator.js +8 -3
  62. package/dist/es/parseBinaryToIterableIterator.js.map +1 -1
  63. package/dist/es/parseBinaryToStream.js +8 -3
  64. package/dist/es/parseBinaryToStream.js.map +1 -1
  65. package/dist/es/parseResponse.js +9 -4
  66. package/dist/es/parseResponse.js.map +1 -1
  67. package/dist/es/parseResponseToStream.js +9 -4
  68. package/dist/es/parseResponseToStream.js.map +1 -1
  69. package/dist/es/parseString.js +6 -1
  70. package/dist/es/parseString.js.map +1 -1
  71. package/dist/es/parseStringToArraySync.js +9 -4
  72. package/dist/es/parseStringToArraySync.js.map +1 -1
  73. package/dist/es/parseStringToArraySyncWASM.js +9 -2
  74. package/dist/es/parseStringToArraySyncWASM.js.map +1 -1
  75. package/dist/es/parseStringToIterableIterator.js +9 -4
  76. package/dist/es/parseStringToIterableIterator.js.map +1 -1
  77. package/dist/es/parseStringToStream.js +15 -10
  78. package/dist/es/parseStringToStream.js.map +1 -1
  79. package/dist/es/utils/convertBinaryToString.js.map +1 -0
  80. package/dist/es/utils/pipeline.js +1 -1
  81. package/dist/es/utils/pipeline.js.map +1 -1
  82. package/dist/es/web-csv-toolbox.js +7 -6
  83. package/dist/es/web-csv-toolbox.js.map +1 -1
  84. package/dist/types/Lexer.d.ts +2 -1
  85. package/dist/types/LexerTransformer.d.ts +11 -8
  86. package/dist/types/LexerTransformer.test.d.ts +1 -0
  87. package/dist/types/RecordAssembler.d.ts +2 -1
  88. package/dist/types/RecordAssemblerTransformer.d.ts +4 -1
  89. package/dist/types/assertCommonOptions.d.ts +4 -2
  90. package/dist/types/common/errors.d.ts +34 -0
  91. package/dist/types/common/types.d.ts +2 -1
  92. package/dist/types/commonParseErrorHandling.d.ts +8 -0
  93. package/dist/types/escapeField.d.ts +2 -1
  94. package/dist/types/getOptionsFromResponse.d.ts +10 -1
  95. package/dist/types/loadWASM.d.ts +2 -1
  96. package/dist/types/loadWASM.web.d.ts +2 -1
  97. package/dist/types/parse.d.ts +2 -1
  98. package/dist/types/parseBinary.d.ts +2 -1
  99. package/dist/types/parseBinaryToArraySync.d.ts +2 -1
  100. package/dist/types/parseBinaryToArraySync.test.d.ts +1 -0
  101. package/dist/types/parseBinaryToIterableIterator.d.ts +10 -1
  102. package/dist/types/parseBinaryToStream.d.ts +2 -1
  103. package/dist/types/parseResponse.d.ts +2 -1
  104. package/dist/types/parseResponseToStream.d.ts +2 -1
  105. package/dist/types/parseString.d.ts +2 -1
  106. package/dist/types/parseStringStream.d.ts +2 -1
  107. package/dist/types/parseStringStreamToStream.d.ts +2 -1
  108. package/dist/types/parseStringToArraySync.d.ts +2 -1
  109. package/dist/types/parseStringToArraySyncWASM.d.ts +2 -1
  110. package/dist/types/parseStringToIterableIterator.d.ts +2 -1
  111. package/dist/types/parseStringToStream.d.ts +2 -1
  112. package/dist/types/parseUint8ArrayStream.d.ts +2 -1
  113. package/dist/types/parseUint8ArrayStreamToStream.d.ts +2 -1
  114. package/dist/types/utils/convertBinaryToString.d.ts +12 -0
  115. package/dist/types/web-csv-toolbox.d.ts +13 -12
  116. package/dist/web-csv-toolbox.umd.cjs +1 -1
  117. package/dist/web-csv-toolbox.umd.cjs.map +1 -1
  118. package/dist/web_csv_toolbox_wasm_bg.wasm +0 -0
  119. package/package.json +7 -3
  120. package/dist/cjs/convertBinaryToString.cjs.map +0 -1
  121. package/dist/es/convertBinaryToString.js.map +0 -1
  122. package/dist/types/convertBinaryToString.d.ts +0 -2
  123. /package/dist/cjs/{convertBinaryToString.cjs → utils/convertBinaryToString.cjs} +0 -0
  124. /package/dist/es/{convertBinaryToString.js → utils/convertBinaryToString.js} +0 -0
@@ -1,2 +1,2 @@
1
- "use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const t=require("./assertCommonOptions.cjs"),e=require("./common/constants.cjs"),s=require("./constants.cjs"),r=require("./utils/escapeRegExp.cjs");exports.Lexer=class{#t;#e;#s="";#r=!1;#i;#u;#o={line:1,column:1,offset:0};#h=1;constructor({delimiter:e=s.COMMA,quotation:i=s.DOUBLE_QUOTE}={}){t.assertCommonOptions({delimiter:e,quotation:i}),this.#t=e,this.#e=i,this.#u=e.length;const u=r.escapeRegExp(e),o=r.escapeRegExp(i);this.#i=new RegExp(`^(?:(?!${o})(?!${u})(?![\\r\\n]))([\\S\\s\\uFEFF\\xA0]+?)(?=${o}|${u}|\\r|\\n|$)`)}lex(t,e=!1){return e||(this.#r=!0),"string"==typeof t&&0!==t.length&&(this.#s+=t),this.#f()}flush(){return this.#r=!0,[...this.#f()]}*#f(){let t;for(this.#r&&(this.#s.endsWith(s.CRLF)?this.#s=this.#s.slice(0,-2):this.#s.endsWith(s.LF)&&(this.#s=this.#s.slice(0,-1)));t=this.#n();)yield t}#n(){if(0===this.#s.length)return null;if(!1===this.#r&&(this.#s===s.CRLF||this.#s===s.LF))return null;if(this.#s.startsWith(s.CRLF)){this.#s=this.#s.slice(2);const t={...this.#o};this.#o.line++,this.#o.column=1,this.#o.offset+=2;return{type:e.RecordDelimiter,value:s.CRLF,location:{start:t,end:{...this.#o},rowNumber:this.#h++}}}if(this.#s.startsWith(s.LF)){this.#s=this.#s.slice(1);const t={...this.#o};this.#o.line++,this.#o.column=1,this.#o.offset+=1;return{type:e.RecordDelimiter,value:s.LF,location:{start:t,end:{...this.#o},rowNumber:this.#h++}}}if(this.#s.startsWith(this.#t)){this.#s=this.#s.slice(1);const t={...this.#o};return this.#o.column+=this.#u,this.#o.offset+=this.#u,{type:e.FieldDelimiter,value:this.#t,location:{start:t,end:{...this.#o},rowNumber:this.#h}}}if(this.#s.startsWith(this.#e)){let t="",r=1,i=2,u=0,o=this.#s[r],h=this.#s[r+1];do{if(o===this.#e){if(h===this.#e){t+=this.#e,r+=2,o=this.#s[r],h=this.#s[r+1],i+=2;continue}if(void 0===h&&!1===this.#r)return null;r++,this.#s=this.#s.slice(r);const s={...this.#o};return this.#o.column+=i,this.#o.offset+=r,this.#o.line+=u,{type:e.Field,value:t,location:{start:s,end:{...this.#o},rowNumber:this.#h}}}t+=o,o===s.LF?(u++,i=1):i++,r++,o=h,h=this.#s[r+1]}while(void 0!==o);return null}const t=this.#i.exec(this.#s);if(t){if(!1===this.#r&&t[0].length===this.#s.length)return null;const s=t[1];this.#s=this.#s.slice(s.length);const r={...this.#o};return this.#o.column+=s.length,this.#o.offset+=s.length,{type:e.Field,value:s,location:{start:r,end:{...this.#o},rowNumber:this.#h}}}return null}};
1
+ "use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const t=require("./assertCommonOptions.cjs"),e=require("./common/constants.cjs"),r=require("./common/errors.cjs"),s=require("./constants.cjs"),i=require("./utils/escapeRegExp.cjs");exports.Lexer=class{#t;#e;#r="";#s=!1;#i;#u;#o={line:1,column:1,offset:0};#h=1;constructor({delimiter:e=s.COMMA,quotation:r=s.DOUBLE_QUOTE}={}){t.assertCommonOptions({delimiter:e,quotation:r}),this.#t=e,this.#e=r,this.#u=e.length;const u=i.escapeRegExp(e),o=i.escapeRegExp(r);this.#i=new RegExp(`^(?:(?!${o})(?!${u})(?![\\r\\n]))([\\S\\s\\uFEFF\\xA0]+?)(?=${o}|${u}|\\r|\\n|$)`)}lex(t,e=!1){return e||(this.#s=!0),"string"==typeof t&&0!==t.length&&(this.#r+=t),this.#f()}flush(){return this.#s=!0,[...this.#f()]}*#f(){let t;for(this.#s&&(this.#r.endsWith(s.CRLF)?this.#r=this.#r.slice(0,-2):this.#r.endsWith(s.LF)&&(this.#r=this.#r.slice(0,-1)));t=this.#n();)yield t}#n(){if(0===this.#r.length)return null;if(!1===this.#s&&(this.#r===s.CRLF||this.#r===s.LF))return null;if(this.#r.startsWith(s.CRLF)){this.#r=this.#r.slice(2);const t={...this.#o};this.#o.line++,this.#o.column=1,this.#o.offset+=2;return{type:e.RecordDelimiter,value:s.CRLF,location:{start:t,end:{...this.#o},rowNumber:this.#h++}}}if(this.#r.startsWith(s.LF)){this.#r=this.#r.slice(1);const t={...this.#o};this.#o.line++,this.#o.column=1,this.#o.offset+=1;return{type:e.RecordDelimiter,value:s.LF,location:{start:t,end:{...this.#o},rowNumber:this.#h++}}}if(this.#r.startsWith(this.#t)){this.#r=this.#r.slice(1);const t={...this.#o};return this.#o.column+=this.#u,this.#o.offset+=this.#u,{type:e.FieldDelimiter,value:this.#t,location:{start:t,end:{...this.#o},rowNumber:this.#h}}}if(this.#r.startsWith(this.#e)){let t="",i=1,u=2,o=0,h=this.#r[i],f=this.#r[i+1];do{if(h===this.#e){if(f===this.#e){t+=this.#e,i+=2,h=this.#r[i],f=this.#r[i+1],u+=2;continue}if(void 0===f&&!1===this.#s)return null;i++,this.#r=this.#r.slice(i);const r={...this.#o};return this.#o.column+=u,this.#o.offset+=i,this.#o.line+=o,{type:e.Field,value:t,location:{start:r,end:{...this.#o},rowNumber:this.#h}}}t+=h,h===s.LF?(o++,u=1):u++,i++,h=f,f=this.#r[i+1]}while(void 0!==h);if(this.#s)throw new r.ParseError("Unexpected EOF while parsing quoted field.",{position:{...this.#o}});return null}const t=this.#i.exec(this.#r);if(t){if(!1===this.#s&&t[0].length===this.#r.length)return null;const r=t[1];this.#r=this.#r.slice(r.length);const s={...this.#o};return this.#o.column+=r.length,this.#o.offset+=r.length,{type:e.Field,value:r,location:{start:s,end:{...this.#o},rowNumber:this.#h}}}return null}};
2
2
  //# sourceMappingURL=Lexer.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"Lexer.cjs","sources":["../../src/Lexer.ts"],"sourcesContent":["import { assertCommonOptions } from \"./assertCommonOptions.ts\";\nimport { Field, FieldDelimiter, RecordDelimiter } from \"./common/constants.ts\";\nimport type {\n CommonOptions,\n Position,\n RecordDelimiterToken,\n Token,\n} from \"./common/types.ts\";\nimport { COMMA, CRLF, DOUBLE_QUOTE, LF } from \"./constants.ts\";\nimport { escapeRegExp } from \"./utils/escapeRegExp.ts\";\n\n/**\n * CSV Lexer.\n *\n * Lexter tokenizes CSV data into fields and records.\n */\nexport class Lexer {\n #delimiter: string;\n #quotation: string;\n #buffer = \"\";\n #flush = false;\n #matcher: RegExp;\n #fieldDelimiterLength: number;\n\n #cursor: Position = {\n line: 1,\n column: 1,\n offset: 0,\n };\n #rowNumber = 1;\n\n /**\n * Constructs a new Lexer instance.\n * @param options - The common options for the lexer.\n */\n constructor({\n delimiter = COMMA,\n quotation = DOUBLE_QUOTE,\n }: CommonOptions = {}) {\n assertCommonOptions({ delimiter, quotation });\n this.#delimiter = delimiter;\n this.#quotation = quotation;\n this.#fieldDelimiterLength = delimiter.length;\n const d = escapeRegExp(delimiter);\n const q = escapeRegExp(quotation);\n this.#matcher = new RegExp(\n `^(?:(?!${q})(?!${d})(?![\\\\r\\\\n]))([\\\\S\\\\s\\\\uFEFF\\\\xA0]+?)(?=${q}|${d}|\\\\r|\\\\n|$)`,\n );\n }\n\n /**\n * Lexes the given chunk of CSV data.\n * @param chunk - The chunk of CSV data to be lexed.\n * @param buffering - Indicates whether the lexer is buffering or not.\n * @returns An iterable iterator of tokens.\n */\n public lex(chunk: string | null, buffering = false): IterableIterator<Token> {\n if (!buffering) {\n this.#flush = true;\n }\n if (typeof chunk === \"string\" && chunk.length !== 0) {\n this.#buffer += chunk;\n }\n\n return this.#tokens();\n }\n\n /**\n * Flushes the lexer and returns any remaining tokens.\n * @returns An array of tokens.\n */\n public flush(): Token[] {\n this.#flush = true;\n return [...this.#tokens()];\n }\n\n /**\n * Generates tokens from the buffered CSV data.\n * @yields Tokens from the buffered CSV data.\n */\n *#tokens(): Generator<Token> {\n if (this.#flush) {\n // Trim the last CRLF or LF\n if (this.#buffer.endsWith(CRLF)) {\n this.#buffer = this.#buffer.slice(0, -2 /* -CRLF.length */);\n } else if (this.#buffer.endsWith(LF)) {\n this.#buffer = this.#buffer.slice(0, -1 /* -LF.length */);\n }\n }\n let token: Token | null;\n while ((token = this.#nextToken())) {\n yield token;\n }\n }\n\n /**\n * Retrieves the next token from the buffered CSV data.\n * @returns The next token or null if there are no more tokens.\n */\n #nextToken(): Token | null {\n if (this.#buffer.length === 0) {\n return null;\n }\n // Buffer is Record Delimiter, defer to the next iteration.\n if (\n this.#flush === false &&\n (this.#buffer === CRLF || this.#buffer === LF)\n ) {\n return null;\n }\n\n // Check for CRLF\n if (this.#buffer.startsWith(CRLF)) {\n this.#buffer = this.#buffer.slice(2);\n const start: Position = { ...this.#cursor };\n this.#cursor.line++;\n this.#cursor.column = 1;\n this.#cursor.offset += 2; // CRLF.length\n const token: RecordDelimiterToken = {\n type: RecordDelimiter,\n value: CRLF,\n location: {\n start,\n end: { ...this.#cursor },\n rowNumber: this.#rowNumber++,\n },\n };\n return token;\n }\n\n // Check for LF\n if (this.#buffer.startsWith(LF)) {\n this.#buffer = this.#buffer.slice(1);\n const start: Position = { ...this.#cursor };\n this.#cursor.line++;\n this.#cursor.column = 1;\n this.#cursor.offset += 1; // LF.length\n const token: RecordDelimiterToken = {\n type: RecordDelimiter,\n value: LF,\n location: {\n start,\n end: { ...this.#cursor },\n rowNumber: this.#rowNumber++,\n },\n };\n return token;\n }\n\n // Check for Delimiter\n if (this.#buffer.startsWith(this.#delimiter)) {\n this.#buffer = this.#buffer.slice(1);\n const start: Position = { ...this.#cursor };\n this.#cursor.column += this.#fieldDelimiterLength;\n this.#cursor.offset += this.#fieldDelimiterLength;\n return {\n type: FieldDelimiter,\n value: this.#delimiter,\n location: {\n start,\n end: { ...this.#cursor },\n rowNumber: this.#rowNumber,\n },\n };\n }\n\n // Check for Quoted String\n if (this.#buffer.startsWith(this.#quotation)) {\n /**\n * Extract Quoted field.\n *\n * The following code is equivalent to the following:\n *\n * If the next character is a quote:\n * - If the character after that is a quote, then append a quote to the value and skip two characters.\n * - Otherwise, return the quoted string.\n * Otherwise, append the character to the value and skip one character.\n *\n * ```plaintext\n * | `i` | `i + 1` | `i + 2` |\n * |------------|------------|----------|\n * | cur | next | | => Variable names\n * | #quotation | #quotation | | => Escaped quote\n * | #quotation | (EOF) | | => Closing quote\n * | #quotation | undefined | | => End of buffer\n * | undefined | | | => End of buffer\n * ```\n */\n let value = \"\";\n let offset = 1; // Skip the opening quote\n let column = 2; // Skip the opening quote\n let line = 0;\n\n // Define variables\n let cur: string = this.#buffer[offset];\n let next: string | undefined = this.#buffer[offset + 1];\n do {\n // If the current character is a quote, check the next characters for closing quotes.\n if (cur === this.#quotation) {\n // If the cur character is a quote and the next character is a quote,\n // then append a quote to the value and skip two characters.\n if (next === this.#quotation) {\n // Append a quote to the value and skip two characters.\n value += this.#quotation;\n offset += 2;\n cur = this.#buffer[offset];\n next = this.#buffer[offset + 1];\n\n // Update the diff\n column += 2;\n continue;\n }\n\n // If the cur character is a quote and the next character is undefined,\n // then return null.\n if (next === undefined && this.#flush === false) {\n return null;\n }\n\n // Otherwise, return the quoted string.\n // Update the buffer and return the token\n offset++;\n this.#buffer = this.#buffer.slice(offset);\n const start: Position = { ...this.#cursor };\n this.#cursor.column += column;\n this.#cursor.offset += offset;\n this.#cursor.line += line;\n return {\n type: Field,\n value,\n location: {\n start,\n end: { ...this.#cursor },\n rowNumber: this.#rowNumber,\n },\n };\n // return this.#field(value, { column, offset, line });\n }\n\n // Append the character to the value.\n value += cur;\n\n // Prepare for the next iteration\n if (cur === LF) {\n // If the current character is a LF,\n // then increment the line number and reset the column number.\n line++;\n column = 1;\n } else {\n // Otherwise, increment the column number and offset.\n column++;\n }\n\n offset++;\n cur = next;\n next = this.#buffer[offset + 1];\n } while (cur !== undefined);\n\n // If we get here, we've reached the end of the buffer\n return null;\n // TODO: If flash is true, the buffer is exiting unquoted and an exception should be raised.\n }\n\n // Check for Unquoted String\n const match = this.#matcher.exec(this.#buffer);\n if (match) {\n // If we're flushing and the match doesn't consume the entire buffer,\n // then return null\n if (this.#flush === false && match[0].length === this.#buffer.length) {\n return null;\n }\n const value = match[1];\n this.#buffer = this.#buffer.slice(value.length);\n const start: Position = { ...this.#cursor };\n this.#cursor.column += value.length;\n this.#cursor.offset += value.length;\n return {\n type: Field,\n value,\n location: {\n start,\n end: { ...this.#cursor },\n rowNumber: this.#rowNumber,\n },\n };\n }\n\n // Otherwise, return null\n return null;\n }\n}\n"],"names":["delimiter","quotation","buffer","flush","matcher","fieldDelimiterLength","cursor","line","column","offset","rowNumber","constructor","COMMA","DOUBLE_QUOTE","assertCommonOptions","this","length","d","escapeRegExp","q","RegExp","lex","chunk","buffering","tokens","token","endsWith","CRLF","slice","LF","nextToken","startsWith","start","type","RecordDelimiter","value","location","end","FieldDelimiter","cur","next","Field","match","exec"],"mappings":"kPAgBO,MACLA,GACAC,GACAC,GAAU,GACVC,IAAS,EACTC,GACAC,GAEAC,GAAoB,CAClBC,KAAM,EACNC,OAAQ,EACRC,OAAQ,GAEVC,GAAa,EAMb,WAAAC,EAAYX,UACVA,EAAYY,EAAAA,MAAAX,UACZA,EAAYY,EAAAA,cACK,IACGC,EAAAA,oBAAA,CAAEd,YAAWC,cACjCc,MAAKf,EAAaA,EAClBe,MAAKd,EAAaA,EAClBc,MAAKV,EAAwBL,EAAUgB,OACjC,MAAAC,EAAIC,eAAalB,GACjBmB,EAAID,eAAajB,GACvBc,MAAKX,EAAW,IAAIgB,OAClB,UAAUD,QAAQF,6CAA6CE,KAAKF,eAExE,CAQO,GAAAI,CAAIC,EAAsBC,GAAY,GAQ3C,OAPKA,IACHR,MAAKZ,GAAS,GAEK,iBAAVmB,GAAuC,IAAjBA,EAAMN,SACrCD,MAAKb,GAAWoB,GAGXP,MAAKS,GACd,CAMO,KAAArB,GAEL,OADAY,MAAKZ,GAAS,EACP,IAAIY,MAAKS,IAClB,CAMA,GAACA,GASK,IAAAC,EACI,IATJV,MAAKZ,IAEHY,MAAKb,EAAQwB,SAASC,EAAIA,MACvBZ,MAAAb,EAAUa,MAAKb,EAAQ0B,MAAM,GAAG,GAC5Bb,MAAKb,EAAQwB,SAASG,EAAEA,MAC5Bd,MAAAb,EAAUa,MAAKb,EAAQ0B,MAAM,GAAG,KAIjCH,EAAQV,MAAKe,WACbL,CAEV,CAMA,EAAAK,GACM,GAAwB,IAAxBf,MAAKb,EAAQc,OACR,OAAA,KAIP,IAAgB,IAAhBD,MAAKZ,IACJY,MAAKb,IAAYyB,EAAAA,MAAQZ,MAAKb,IAAY2B,EAAAA,IAEpC,OAAA,KAIT,GAAId,MAAKb,EAAQ6B,WAAWJ,EAAIA,MAAG,CACjCZ,MAAKb,EAAUa,MAAKb,EAAQ0B,MAAM,GAClC,MAAMI,EAAkB,IAAKjB,MAAKT,GAClCS,MAAKT,EAAQC,OACbQ,MAAKT,EAAQE,OAAS,EACtBO,MAAKT,EAAQG,QAAU,EAUhB,MAT6B,CAClCwB,KAAMC,EAAAA,gBACNC,MAAOR,EAAAA,KACPS,SAAU,CACRJ,QACAK,IAAK,IAAKtB,MAAKT,GACfI,UAAWK,MAAKL,KAItB,CAGA,GAAIK,MAAKb,EAAQ6B,WAAWF,EAAEA,IAAG,CAC/Bd,MAAKb,EAAUa,MAAKb,EAAQ0B,MAAM,GAClC,MAAMI,EAAkB,IAAKjB,MAAKT,GAClCS,MAAKT,EAAQC,OACbQ,MAAKT,EAAQE,OAAS,EACtBO,MAAKT,EAAQG,QAAU,EAUhB,MAT6B,CAClCwB,KAAMC,EAAAA,gBACNC,MAAON,EAAAA,GACPO,SAAU,CACRJ,QACAK,IAAK,IAAKtB,MAAKT,GACfI,UAAWK,MAAKL,KAItB,CAGA,GAAIK,MAAKb,EAAQ6B,WAAWhB,MAAKf,GAAa,CAC5Ce,MAAKb,EAAUa,MAAKb,EAAQ0B,MAAM,GAClC,MAAMI,EAAkB,IAAKjB,MAAKT,GAG3B,OAFFS,MAAAT,EAAQE,QAAUO,MAAKV,EACvBU,MAAAT,EAAQG,QAAUM,MAAKV,EACrB,CACL4B,KAAMK,EAAAA,eACNH,MAAOpB,MAAKf,EACZoC,SAAU,CACRJ,QACAK,IAAK,IAAKtB,MAAKT,GACfI,UAAWK,MAAKL,GAGtB,CAGA,GAAIK,MAAKb,EAAQ6B,WAAWhB,MAAKd,GAAa,CAqB5C,IAAIkC,EAAQ,GACR1B,EAAS,EACTD,EAAS,EACTD,EAAO,EAGPgC,EAAcxB,MAAKb,EAAQO,GAC3B+B,EAA2BzB,MAAKb,EAAQO,EAAS,GAClD,EAAA,CAEG,GAAA8B,IAAQxB,MAAKd,EAAY,CAGvB,GAAAuC,IAASzB,MAAKd,EAAY,CAE5BkC,GAASpB,MAAKd,EACJQ,GAAA,EACJ8B,EAAAxB,MAAKb,EAAQO,GACZ+B,EAAAzB,MAAKb,EAAQO,EAAS,GAGnBD,GAAA,EACV,QACF,CAIA,QAAa,IAATgC,IAAsC,IAAhBzB,MAAKZ,EACtB,OAAA,KAKTM,IACAM,MAAKb,EAAUa,MAAKb,EAAQ0B,MAAMnB,GAClC,MAAMuB,EAAkB,IAAKjB,MAAKT,GAI3B,OAHPS,MAAKT,EAAQE,QAAUA,EACvBO,MAAKT,EAAQG,QAAUA,EACvBM,MAAKT,EAAQC,MAAQA,EACd,CACL0B,KAAMQ,EAAAA,MACNN,QACAC,SAAU,CACRJ,QACAK,IAAK,IAAKtB,MAAKT,GACfI,UAAWK,MAAKL,GAItB,CAGSyB,GAAAI,EAGLA,IAAQV,EAAAA,IAGVtB,IACSC,EAAA,GAGTA,IAGFC,IACM8B,EAAAC,EACCA,EAAAzB,MAAKb,EAAQO,EAAS,EAAC,YACf,IAAR8B,GAGF,OAAA,IAET,CAGA,MAAMG,EAAQ3B,MAAKX,EAASuC,KAAK5B,MAAKb,GACtC,GAAIwC,EAAO,CAGL,IAAgB,IAAhB3B,MAAKZ,GAAoBuC,EAAM,GAAG1B,SAAWD,MAAKb,EAAQc,OACrD,OAAA,KAEH,MAAAmB,EAAQO,EAAM,GACpB3B,MAAKb,EAAUa,MAAKb,EAAQ0B,MAAMO,EAAMnB,QACxC,MAAMgB,EAAkB,IAAKjB,MAAKT,GAG3B,OAFFS,MAAAT,EAAQE,QAAU2B,EAAMnB,OACxBD,MAAAT,EAAQG,QAAU0B,EAAMnB,OACtB,CACLiB,KAAMQ,EAAAA,MACNN,QACAC,SAAU,CACRJ,QACAK,IAAK,IAAKtB,MAAKT,GACfI,UAAWK,MAAKL,GAGtB,CAGO,OAAA,IACT"}
1
+ {"version":3,"file":"Lexer.cjs","sources":["../../src/Lexer.ts"],"sourcesContent":["import { assertCommonOptions } from \"./assertCommonOptions.ts\";\nimport { Field, FieldDelimiter, RecordDelimiter } from \"./common/constants.ts\";\nimport { ParseError } from \"./common/errors.ts\";\nimport type {\n CommonOptions,\n Position,\n RecordDelimiterToken,\n Token,\n} from \"./common/types.ts\";\nimport { COMMA, CRLF, DOUBLE_QUOTE, LF } from \"./constants.ts\";\nimport { escapeRegExp } from \"./utils/escapeRegExp.ts\";\n\n/**\n * CSV Lexer.\n *\n * Lexter tokenizes CSV data into fields and records.\n */\nexport class Lexer {\n #delimiter: string;\n #quotation: string;\n #buffer = \"\";\n #flush = false;\n #matcher: RegExp;\n #fieldDelimiterLength: number;\n\n #cursor: Position = {\n line: 1,\n column: 1,\n offset: 0,\n };\n #rowNumber = 1;\n\n /**\n * Constructs a new Lexer instance.\n * @param options - The common options for the lexer.\n */\n constructor({\n delimiter = COMMA,\n quotation = DOUBLE_QUOTE,\n }: CommonOptions = {}) {\n assertCommonOptions({ delimiter, quotation });\n this.#delimiter = delimiter;\n this.#quotation = quotation;\n this.#fieldDelimiterLength = delimiter.length;\n const d = escapeRegExp(delimiter);\n const q = escapeRegExp(quotation);\n this.#matcher = new RegExp(\n `^(?:(?!${q})(?!${d})(?![\\\\r\\\\n]))([\\\\S\\\\s\\\\uFEFF\\\\xA0]+?)(?=${q}|${d}|\\\\r|\\\\n|$)`,\n );\n }\n\n /**\n * Lexes the given chunk of CSV data.\n * @param chunk - The chunk of CSV data to be lexed.\n * @param buffering - Indicates whether the lexer is buffering or not.\n * @returns An iterable iterator of tokens.\n */\n public lex(chunk: string | null, buffering = false): IterableIterator<Token> {\n if (!buffering) {\n this.#flush = true;\n }\n if (typeof chunk === \"string\" && chunk.length !== 0) {\n this.#buffer += chunk;\n }\n\n return this.#tokens();\n }\n\n /**\n * Flushes the lexer and returns any remaining tokens.\n * @returns An array of tokens.\n */\n public flush(): Token[] {\n this.#flush = true;\n return [...this.#tokens()];\n }\n\n /**\n * Generates tokens from the buffered CSV data.\n * @yields Tokens from the buffered CSV data.\n */\n *#tokens(): Generator<Token> {\n if (this.#flush) {\n // Trim the last CRLF or LF\n if (this.#buffer.endsWith(CRLF)) {\n this.#buffer = this.#buffer.slice(0, -2 /* -CRLF.length */);\n } else if (this.#buffer.endsWith(LF)) {\n this.#buffer = this.#buffer.slice(0, -1 /* -LF.length */);\n }\n }\n let token: Token | null;\n while ((token = this.#nextToken())) {\n yield token;\n }\n }\n\n /**\n * Retrieves the next token from the buffered CSV data.\n * @returns The next token or null if there are no more tokens.\n */\n #nextToken(): Token | null {\n if (this.#buffer.length === 0) {\n return null;\n }\n // Buffer is Record Delimiter, defer to the next iteration.\n if (\n this.#flush === false &&\n (this.#buffer === CRLF || this.#buffer === LF)\n ) {\n return null;\n }\n\n // Check for CRLF\n if (this.#buffer.startsWith(CRLF)) {\n this.#buffer = this.#buffer.slice(2);\n const start: Position = { ...this.#cursor };\n this.#cursor.line++;\n this.#cursor.column = 1;\n this.#cursor.offset += 2; // CRLF.length\n const token: RecordDelimiterToken = {\n type: RecordDelimiter,\n value: CRLF,\n location: {\n start,\n end: { ...this.#cursor },\n rowNumber: this.#rowNumber++,\n },\n };\n return token;\n }\n\n // Check for LF\n if (this.#buffer.startsWith(LF)) {\n this.#buffer = this.#buffer.slice(1);\n const start: Position = { ...this.#cursor };\n this.#cursor.line++;\n this.#cursor.column = 1;\n this.#cursor.offset += 1; // LF.length\n const token: RecordDelimiterToken = {\n type: RecordDelimiter,\n value: LF,\n location: {\n start,\n end: { ...this.#cursor },\n rowNumber: this.#rowNumber++,\n },\n };\n return token;\n }\n\n // Check for Delimiter\n if (this.#buffer.startsWith(this.#delimiter)) {\n this.#buffer = this.#buffer.slice(1);\n const start: Position = { ...this.#cursor };\n this.#cursor.column += this.#fieldDelimiterLength;\n this.#cursor.offset += this.#fieldDelimiterLength;\n return {\n type: FieldDelimiter,\n value: this.#delimiter,\n location: {\n start,\n end: { ...this.#cursor },\n rowNumber: this.#rowNumber,\n },\n };\n }\n\n // Check for Quoted String\n if (this.#buffer.startsWith(this.#quotation)) {\n /**\n * Extract Quoted field.\n *\n * The following code is equivalent to the following:\n *\n * If the next character is a quote:\n * - If the character after that is a quote, then append a quote to the value and skip two characters.\n * - Otherwise, return the quoted string.\n * Otherwise, append the character to the value and skip one character.\n *\n * ```plaintext\n * | `i` | `i + 1` | `i + 2` |\n * |------------|------------|----------|\n * | cur | next | | => Variable names\n * | #quotation | #quotation | | => Escaped quote\n * | #quotation | (EOF) | | => Closing quote\n * | #quotation | undefined | | => End of buffer\n * | undefined | | | => End of buffer\n * ```\n */\n let value = \"\";\n let offset = 1; // Skip the opening quote\n let column = 2; // Skip the opening quote\n let line = 0;\n\n // Define variables\n let cur: string = this.#buffer[offset];\n let next: string | undefined = this.#buffer[offset + 1];\n do {\n // If the current character is a quote, check the next characters for closing quotes.\n if (cur === this.#quotation) {\n // If the cur character is a quote and the next character is a quote,\n // then append a quote to the value and skip two characters.\n if (next === this.#quotation) {\n // Append a quote to the value and skip two characters.\n value += this.#quotation;\n offset += 2;\n cur = this.#buffer[offset];\n next = this.#buffer[offset + 1];\n\n // Update the diff\n column += 2;\n continue;\n }\n\n // If the cur character is a quote and the next character is undefined,\n // then return null.\n if (next === undefined && this.#flush === false) {\n return null;\n }\n\n // Otherwise, return the quoted string.\n // Update the buffer and return the token\n offset++;\n this.#buffer = this.#buffer.slice(offset);\n const start: Position = { ...this.#cursor };\n this.#cursor.column += column;\n this.#cursor.offset += offset;\n this.#cursor.line += line;\n return {\n type: Field,\n value,\n location: {\n start,\n end: { ...this.#cursor },\n rowNumber: this.#rowNumber,\n },\n };\n }\n\n // Append the character to the value.\n value += cur;\n\n // Prepare for the next iteration\n if (cur === LF) {\n // If the current character is a LF,\n // then increment the line number and reset the column number.\n line++;\n column = 1;\n } else {\n // Otherwise, increment the column number and offset.\n column++;\n }\n\n offset++;\n cur = next;\n next = this.#buffer[offset + 1];\n } while (cur !== undefined);\n\n if (this.#flush) {\n throw new ParseError(\"Unexpected EOF while parsing quoted field.\", {\n position: { ...this.#cursor },\n });\n }\n return null;\n }\n\n // Check for Unquoted String\n const match = this.#matcher.exec(this.#buffer);\n if (match) {\n // If we're flushing and the match doesn't consume the entire buffer,\n // then return null\n if (this.#flush === false && match[0].length === this.#buffer.length) {\n return null;\n }\n const value = match[1];\n this.#buffer = this.#buffer.slice(value.length);\n const start: Position = { ...this.#cursor };\n this.#cursor.column += value.length;\n this.#cursor.offset += value.length;\n return {\n type: Field,\n value,\n location: {\n start,\n end: { ...this.#cursor },\n rowNumber: this.#rowNumber,\n },\n };\n }\n\n // Otherwise, return null\n return null;\n }\n}\n"],"names":["delimiter","quotation","buffer","flush","matcher","fieldDelimiterLength","cursor","line","column","offset","rowNumber","constructor","COMMA","DOUBLE_QUOTE","assertCommonOptions","this","length","d","escapeRegExp","q","RegExp","lex","chunk","buffering","tokens","token","endsWith","CRLF","slice","LF","nextToken","startsWith","start","type","RecordDelimiter","value","location","end","FieldDelimiter","cur","next","Field","ParseError","position","match","exec"],"mappings":"mRAiBO,MACLA,GACAC,GACAC,GAAU,GACVC,IAAS,EACTC,GACAC,GAEAC,GAAoB,CAClBC,KAAM,EACNC,OAAQ,EACRC,OAAQ,GAEVC,GAAa,EAMb,WAAAC,EAAYX,UACVA,EAAYY,EAAAA,MAAAX,UACZA,EAAYY,EAAAA,cACK,IACGC,EAAAA,oBAAA,CAAEd,YAAWC,cACjCc,MAAKf,EAAaA,EAClBe,MAAKd,EAAaA,EAClBc,MAAKV,EAAwBL,EAAUgB,OACjC,MAAAC,EAAIC,eAAalB,GACjBmB,EAAID,eAAajB,GACvBc,MAAKX,EAAW,IAAIgB,OAClB,UAAUD,QAAQF,6CAA6CE,KAAKF,eAExE,CAQO,GAAAI,CAAIC,EAAsBC,GAAY,GAQ3C,OAPKA,IACHR,MAAKZ,GAAS,GAEK,iBAAVmB,GAAuC,IAAjBA,EAAMN,SACrCD,MAAKb,GAAWoB,GAGXP,MAAKS,GACd,CAMO,KAAArB,GAEL,OADAY,MAAKZ,GAAS,EACP,IAAIY,MAAKS,IAClB,CAMA,GAACA,GASK,IAAAC,EACI,IATJV,MAAKZ,IAEHY,MAAKb,EAAQwB,SAASC,EAAIA,MACvBZ,MAAAb,EAAUa,MAAKb,EAAQ0B,MAAM,GAAG,GAC5Bb,MAAKb,EAAQwB,SAASG,EAAEA,MAC5Bd,MAAAb,EAAUa,MAAKb,EAAQ0B,MAAM,GAAG,KAIjCH,EAAQV,MAAKe,WACbL,CAEV,CAMA,EAAAK,GACM,GAAwB,IAAxBf,MAAKb,EAAQc,OACR,OAAA,KAIP,IAAgB,IAAhBD,MAAKZ,IACJY,MAAKb,IAAYyB,EAAAA,MAAQZ,MAAKb,IAAY2B,EAAAA,IAEpC,OAAA,KAIT,GAAId,MAAKb,EAAQ6B,WAAWJ,EAAIA,MAAG,CACjCZ,MAAKb,EAAUa,MAAKb,EAAQ0B,MAAM,GAClC,MAAMI,EAAkB,IAAKjB,MAAKT,GAClCS,MAAKT,EAAQC,OACbQ,MAAKT,EAAQE,OAAS,EACtBO,MAAKT,EAAQG,QAAU,EAUhB,MAT6B,CAClCwB,KAAMC,EAAAA,gBACNC,MAAOR,EAAAA,KACPS,SAAU,CACRJ,QACAK,IAAK,IAAKtB,MAAKT,GACfI,UAAWK,MAAKL,KAItB,CAGA,GAAIK,MAAKb,EAAQ6B,WAAWF,EAAEA,IAAG,CAC/Bd,MAAKb,EAAUa,MAAKb,EAAQ0B,MAAM,GAClC,MAAMI,EAAkB,IAAKjB,MAAKT,GAClCS,MAAKT,EAAQC,OACbQ,MAAKT,EAAQE,OAAS,EACtBO,MAAKT,EAAQG,QAAU,EAUhB,MAT6B,CAClCwB,KAAMC,EAAAA,gBACNC,MAAON,EAAAA,GACPO,SAAU,CACRJ,QACAK,IAAK,IAAKtB,MAAKT,GACfI,UAAWK,MAAKL,KAItB,CAGA,GAAIK,MAAKb,EAAQ6B,WAAWhB,MAAKf,GAAa,CAC5Ce,MAAKb,EAAUa,MAAKb,EAAQ0B,MAAM,GAClC,MAAMI,EAAkB,IAAKjB,MAAKT,GAG3B,OAFFS,MAAAT,EAAQE,QAAUO,MAAKV,EACvBU,MAAAT,EAAQG,QAAUM,MAAKV,EACrB,CACL4B,KAAMK,EAAAA,eACNH,MAAOpB,MAAKf,EACZoC,SAAU,CACRJ,QACAK,IAAK,IAAKtB,MAAKT,GACfI,UAAWK,MAAKL,GAGtB,CAGA,GAAIK,MAAKb,EAAQ6B,WAAWhB,MAAKd,GAAa,CAqB5C,IAAIkC,EAAQ,GACR1B,EAAS,EACTD,EAAS,EACTD,EAAO,EAGPgC,EAAcxB,MAAKb,EAAQO,GAC3B+B,EAA2BzB,MAAKb,EAAQO,EAAS,GAClD,EAAA,CAEG,GAAA8B,IAAQxB,MAAKd,EAAY,CAGvB,GAAAuC,IAASzB,MAAKd,EAAY,CAE5BkC,GAASpB,MAAKd,EACJQ,GAAA,EACJ8B,EAAAxB,MAAKb,EAAQO,GACZ+B,EAAAzB,MAAKb,EAAQO,EAAS,GAGnBD,GAAA,EACV,QACF,CAIA,QAAa,IAATgC,IAAsC,IAAhBzB,MAAKZ,EACtB,OAAA,KAKTM,IACAM,MAAKb,EAAUa,MAAKb,EAAQ0B,MAAMnB,GAClC,MAAMuB,EAAkB,IAAKjB,MAAKT,GAI3B,OAHPS,MAAKT,EAAQE,QAAUA,EACvBO,MAAKT,EAAQG,QAAUA,EACvBM,MAAKT,EAAQC,MAAQA,EACd,CACL0B,KAAMQ,EAAAA,MACNN,QACAC,SAAU,CACRJ,QACAK,IAAK,IAAKtB,MAAKT,GACfI,UAAWK,MAAKL,GAGtB,CAGSyB,GAAAI,EAGLA,IAAQV,EAAAA,IAGVtB,IACSC,EAAA,GAGTA,IAGFC,IACM8B,EAAAC,EACCA,EAAAzB,MAAKb,EAAQO,EAAS,EAAC,YACf,IAAR8B,GAET,GAAIxB,MAAKZ,EACD,MAAA,IAAIuC,aAAW,6CAA8C,CACjEC,SAAU,IAAK5B,MAAKT,KAGjB,OAAA,IACT,CAGA,MAAMsC,EAAQ7B,MAAKX,EAASyC,KAAK9B,MAAKb,GACtC,GAAI0C,EAAO,CAGL,IAAgB,IAAhB7B,MAAKZ,GAAoByC,EAAM,GAAG5B,SAAWD,MAAKb,EAAQc,OACrD,OAAA,KAEH,MAAAmB,EAAQS,EAAM,GACpB7B,MAAKb,EAAUa,MAAKb,EAAQ0B,MAAMO,EAAMnB,QACxC,MAAMgB,EAAkB,IAAKjB,MAAKT,GAG3B,OAFFS,MAAAT,EAAQE,QAAU2B,EAAMnB,OACxBD,MAAAT,EAAQG,QAAU0B,EAAMnB,OACtB,CACLiB,KAAMQ,EAAAA,MACNN,QACAC,SAAU,CACRJ,QACAK,IAAK,IAAKtB,MAAKT,GACfI,UAAWK,MAAKL,GAGtB,CAGO,OAAA,IACT"}
@@ -1,2 +1,2 @@
1
- "use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const e=require("./Lexer.cjs");class r extends TransformStream{constructor(r={}){const s=new e.Lexer(r);super({transform:(e,r)=>{0!==e.length&&r.enqueue([...s.lex(e,!0)])},flush:e=>{e.enqueue(s.flush())}})}}exports.LexerTransformer=r;
1
+ "use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const e=require("./Lexer.cjs");class r extends TransformStream{lexer;constructor(r={}){super({transform:(e,r)=>{if(0!==e.length)try{r.enqueue([...this.lexer.lex(e,!0)])}catch(t){r.error(t)}},flush:e=>{try{e.enqueue(this.lexer.flush())}catch(r){e.error(r)}}}),this.lexer=new e.Lexer(r)}}exports.LexerTransformer=r;
2
2
  //# sourceMappingURL=LexerTransformer.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"LexerTransformer.cjs","sources":["../../src/LexerTransformer.ts"],"sourcesContent":["import { Lexer } from \"./Lexer.ts\";\nimport type { CommonOptions, Token } from \"./common/types.ts\";\n\n/**\n * A transform stream that converts a stream of tokens into a stream of rows.\n *\n * @category Low-level API\n *\n * @example Parse a CSV with headers by data\n * ```ts\n * new ReadableStream({\n * start(controller) {\n * controller.enqueue(\"name,age\\r\\n\");\n * controller.enqueue(\"Alice,20\\r\\n\");\n * controller.close();\n * }\n * })\n * .pipeThrough(new LexerTransformer())\n * .pipeTo(new WritableStream({ write(tokens) {\n * for (const token of tokens) {\n * console.log(token);\n * }\n * }}));\n * // { type: Field, value: \"name\" }\n * // FieldDelimiter\n * // { type: Field, value: \"age\" }\n * // RecordDelimiter\n * // { type: Field, value: \"Alice\" }\n * // FieldDelimiter\n * // { type: Field, value: \"20\" }\n * // RecordDelimiter\n * ```\n */\nexport class LexerTransformer extends TransformStream<string, Token[]> {\n constructor(options: CommonOptions = {}) {\n const lexer = new Lexer(options);\n super({\n transform: (chunk, controller) => {\n if (chunk.length !== 0) {\n controller.enqueue([...lexer.lex(chunk, true)]);\n }\n },\n flush: (controller) => {\n controller.enqueue(lexer.flush());\n },\n });\n }\n}\n"],"names":["LexerTransformer","TransformStream","constructor","options","lexer","Lexer","super","transform","chunk","controller","length","enqueue","lex","flush"],"mappings":"+GAiCO,MAAMA,UAAyBC,gBACpC,WAAAC,CAAYC,EAAyB,IAC7B,MAAAC,EAAQ,IAAIC,QAAMF,GAClBG,MAAA,CACJC,UAAW,CAACC,EAAOC,KACI,IAAjBD,EAAME,QACGD,EAAAE,QAAQ,IAAIP,EAAMQ,IAAIJ,GAAO,IAC1C,EAEFK,MAAQJ,IACKA,EAAAE,QAAQP,EAAMS,QAAO,GAGtC"}
1
+ {"version":3,"file":"LexerTransformer.cjs","sources":["../../src/LexerTransformer.ts"],"sourcesContent":["import { Lexer } from \"./Lexer.ts\";\nimport type { CommonOptions, Token } from \"./common/types.ts\";\n\n/**\n * A transform stream that converts a stream of tokens into a stream of rows.\n *\n * @category Low-level API\n *\n * @example Parse a CSV with headers by data\n * ```ts\n * new ReadableStream({\n * start(controller) {\n * controller.enqueue(\"name,age\\r\\n\");\n * controller.enqueue(\"Alice,20\\r\\n\");\n * controller.close();\n * }\n * })\n * .pipeThrough(new LexerTransformer())\n * .pipeTo(new WritableStream({ write(tokens) {\n * for (const token of tokens) {\n * console.log(token);\n * }\n * }}));\n * // { type: Field, value: \"name\", location: {...} }\n * // { type: FieldDelimiter, value: \",\", location: {...} }\n * // { type: Field, value: \"age\", location: {...} }\n * // { type: RecordDelimiter, value: \"\\r\\n\", location: {...} }\n * // { type: Field, value: \"Alice\", location: {...} }\n * // { type: FieldDelimiter, value: \",\", location: {...} }\n * // { type: Field, value: \"20\" }\n * // { type: RecordDelimiter, value: \"\\r\\n\", location: {...} }\n * ```\n */\nexport class LexerTransformer extends TransformStream<string, Token[]> {\n public readonly lexer: Lexer;\n constructor(options: CommonOptions = {}) {\n super({\n transform: (chunk, controller) => {\n if (chunk.length !== 0) {\n try {\n controller.enqueue([...this.lexer.lex(chunk, true)]);\n } catch (error) {\n controller.error(error);\n }\n }\n },\n flush: (controller) => {\n try {\n controller.enqueue(this.lexer.flush());\n } catch (error) {\n controller.error(error);\n }\n },\n });\n this.lexer = new Lexer(options);\n }\n}\n"],"names":["LexerTransformer","TransformStream","lexer","constructor","options","super","transform","chunk","controller","length","enqueue","this","lex","error","flush","Lexer"],"mappings":"+GAiCO,MAAMA,UAAyBC,gBACpBC,MAChB,WAAAC,CAAYC,EAAyB,IAC7BC,MAAA,CACJC,UAAW,CAACC,EAAOC,KACb,GAAiB,IAAjBD,EAAME,OACJ,IACSD,EAAAE,QAAQ,IAAIC,KAAKT,MAAMU,IAAIL,GAAO,WACtCM,GACPL,EAAWK,MAAMA,EACnB,CACF,EAEFC,MAAQN,IACF,IACFA,EAAWE,QAAQC,KAAKT,MAAMY,eACvBD,GACPL,EAAWK,MAAMA,EACnB,KAGCF,KAAAT,MAAQ,IAAIa,EAAAA,MAAMX,EACzB"}
@@ -1,2 +1,2 @@
1
- "use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const e=require("./common/constants.cjs");exports.RecordAssembler=class{#e=0;#t=[];#r;#i=!1;constructor(e={}){void 0!==e.header&&Array.isArray(e.header)&&this.#s(e.header)}*assemble(t,r=!0){for(const i of t)switch(i.type){case e.FieldDelimiter:this.#e++,this.#i=!0;break;case e.RecordDelimiter:void 0===this.#r?this.#s(this.#t):this.#i?yield Object.fromEntries(this.#r.map(((e,t)=>[e,this.#t.at(t)]))):yield Object.fromEntries(this.#r.map((e=>[e,""]))),this.#e=0,this.#t=new Array(this.#r?.length).fill(""),this.#i=!1;break;default:this.#i=!0,this.#t[this.#e]=i.value}r&&(yield*this.flush())}*flush(){void 0!==this.#r&&this.#i&&(yield Object.fromEntries(this.#r.filter((e=>e)).map(((e,t)=>[e,this.#t.at(t)]))))}#s(e){if(this.#r=e,0===this.#r.length)throw new Error("The header must not be empty.");if(new Set(this.#r).size!==this.#r.length)throw new Error("The header must not contain duplicate fields.")}};
1
+ "use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const e=require("./common/constants.cjs"),r=require("./common/errors.cjs");exports.RecordAssembler=class{#e=0;#r=[];#t;#i=!1;constructor(e={}){void 0!==e.header&&Array.isArray(e.header)&&this.#s(e.header)}*assemble(r,t=!0){for(const i of r)switch(i.type){case e.FieldDelimiter:this.#e++,this.#i=!0;break;case e.RecordDelimiter:void 0===this.#t?this.#s(this.#r):this.#i?yield Object.fromEntries(this.#t.map(((e,r)=>[e,this.#r.at(r)]))):yield Object.fromEntries(this.#t.map((e=>[e,""]))),this.#e=0,this.#r=new Array(this.#t?.length).fill(""),this.#i=!1;break;default:this.#i=!0,this.#r[this.#e]=i.value}t&&(yield*this.flush())}*flush(){void 0!==this.#t&&this.#i&&(yield Object.fromEntries(this.#t.filter((e=>e)).map(((e,r)=>[e,this.#r.at(r)]))))}#s(e){if(this.#t=e,0===this.#t.length)throw new r.ParseError("The header must not be empty.");if(new Set(this.#t).size!==this.#t.length)throw new r.ParseError("The header must not contain duplicate fields.")}};
2
2
  //# sourceMappingURL=RecordAssembler.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"RecordAssembler.cjs","sources":["../../src/RecordAssembler.ts"],"sourcesContent":["import { FieldDelimiter, RecordDelimiter } from \"./common/constants.ts\";\nimport type {\n CSVRecord,\n RecordAssemblerOptions,\n Token,\n} from \"./common/types.ts\";\n\nexport class RecordAssembler<Header extends ReadonlyArray<string>> {\n #fieldIndex = 0;\n #row: string[] = [];\n #header: Header | undefined;\n #dirty = false;\n\n constructor(options: RecordAssemblerOptions<Header> = {}) {\n if (options.header !== undefined && Array.isArray(options.header)) {\n this.#setHeader(options.header);\n }\n }\n\n public *assemble(\n tokens: Iterable<Token>,\n flush = true,\n ): IterableIterator<CSVRecord<Header>> {\n for (const token of tokens) {\n switch (token.type) {\n case FieldDelimiter:\n this.#fieldIndex++;\n this.#dirty = true;\n break;\n case RecordDelimiter:\n if (this.#header === undefined) {\n this.#setHeader(this.#row as unknown as Header);\n } else {\n if (this.#dirty) {\n yield Object.fromEntries(\n this.#header.map((header, index) => [\n header,\n this.#row.at(index),\n ]),\n ) as unknown as CSVRecord<Header>;\n } else {\n yield Object.fromEntries(\n this.#header.map((header) => [header, \"\"]),\n ) as CSVRecord<Header>;\n }\n }\n // Reset the row fields buffer.\n this.#fieldIndex = 0;\n this.#row = new Array(this.#header?.length).fill(\"\");\n this.#dirty = false;\n break;\n default:\n this.#dirty = true;\n this.#row[this.#fieldIndex] = token.value;\n break;\n }\n }\n\n if (flush) {\n yield* this.flush();\n }\n }\n\n public *flush(): Generator<CSVRecord<Header>> {\n if (this.#header !== undefined) {\n if (this.#dirty) {\n yield Object.fromEntries(\n this.#header\n .filter((v) => v)\n .map((header, index) => [header, this.#row.at(index)]),\n ) as unknown as CSVRecord<Header>;\n }\n }\n }\n\n #setHeader(header: Header) {\n this.#header = header;\n if (this.#header.length === 0) {\n throw new Error(\"The header must not be empty.\");\n }\n if (new Set(this.#header).size !== this.#header.length) {\n throw new Error(\"The header must not contain duplicate fields.\");\n }\n }\n}\n"],"names":["fieldIndex","row","header","dirty","constructor","options","Array","isArray","this","setHeader","assemble","tokens","flush","token","type","FieldDelimiter","RecordDelimiter","Object","fromEntries","map","index","at","length","fill","value","filter","v","Error","Set","size"],"mappings":"kJAOO,MACLA,GAAc,EACdC,GAAiB,GACjBC,GACAC,IAAS,EAET,WAAAC,CAAYC,EAA0C,SAC7B,IAAnBA,EAAQH,QAAwBI,MAAMC,QAAQF,EAAQH,SACnDM,MAAAC,EAAWJ,EAAQH,OAE5B,CAEA,SAAQQ,CACNC,EACAC,GAAQ,GAER,IAAA,MAAWC,KAASF,EAClB,OAAQE,EAAMC,MACZ,KAAKC,EAAAA,eACEP,MAAAR,IACLQ,MAAKL,GAAS,EACd,MACF,KAAKa,EAAAA,qBACkB,IAAjBR,MAAKN,EACFM,MAAAC,EAAWD,MAAKP,GAEjBO,MAAKL,QACDc,OAAOC,YACXV,MAAKN,EAAQiB,KAAI,CAACjB,EAAQkB,IAAU,CAClClB,EACAM,MAAKP,EAAKoB,GAAGD,aAIXH,OAAOC,YACXV,MAAKN,EAAQiB,KAAKjB,GAAW,CAACA,EAAQ,OAK5CM,MAAKR,EAAc,EACdQ,MAAAP,EAAO,IAAIK,MAAME,MAAKN,GAASoB,QAAQC,KAAK,IACjDf,MAAKL,GAAS,EACd,MACF,QACEK,MAAKL,GAAS,EACdK,MAAKP,EAAKO,MAAKR,GAAea,EAAMW,MAKtCZ,UACKJ,KAAKI,QAEhB,CAEA,MAAQA,QACe,IAAjBJ,MAAKN,GACHM,MAAKL,UACDc,OAAOC,YACXV,MAAKN,EACFuB,QAAQC,GAAMA,IACdP,KAAI,CAACjB,EAAQkB,IAAU,CAAClB,EAAQM,MAAKP,EAAKoB,GAAGD,OAIxD,CAEA,EAAAX,CAAWP,GAEL,GADJM,MAAKN,EAAUA,EACa,IAAxBM,MAAKN,EAAQoB,OACT,MAAA,IAAIK,MAAM,iCAEd,GAAA,IAAIC,IAAIpB,MAAKN,GAAS2B,OAASrB,MAAKN,EAAQoB,OACxC,MAAA,IAAIK,MAAM,gDAEpB"}
1
+ {"version":3,"file":"RecordAssembler.cjs","sources":["../../src/RecordAssembler.ts"],"sourcesContent":["import { FieldDelimiter, RecordDelimiter } from \"./common/constants.ts\";\nimport { ParseError } from \"./common/errors.ts\";\nimport type {\n CSVRecord,\n RecordAssemblerOptions,\n Token,\n} from \"./common/types.ts\";\n\nexport class RecordAssembler<Header extends ReadonlyArray<string>> {\n #fieldIndex = 0;\n #row: string[] = [];\n #header: Header | undefined;\n #dirty = false;\n\n constructor(options: RecordAssemblerOptions<Header> = {}) {\n if (options.header !== undefined && Array.isArray(options.header)) {\n this.#setHeader(options.header);\n }\n }\n\n public *assemble(\n tokens: Iterable<Token>,\n flush = true,\n ): IterableIterator<CSVRecord<Header>> {\n for (const token of tokens) {\n switch (token.type) {\n case FieldDelimiter:\n this.#fieldIndex++;\n this.#dirty = true;\n break;\n case RecordDelimiter:\n if (this.#header === undefined) {\n this.#setHeader(this.#row as unknown as Header);\n } else {\n if (this.#dirty) {\n yield Object.fromEntries(\n this.#header.map((header, index) => [\n header,\n this.#row.at(index),\n ]),\n ) as unknown as CSVRecord<Header>;\n } else {\n yield Object.fromEntries(\n this.#header.map((header) => [header, \"\"]),\n ) as CSVRecord<Header>;\n }\n }\n // Reset the row fields buffer.\n this.#fieldIndex = 0;\n this.#row = new Array(this.#header?.length).fill(\"\");\n this.#dirty = false;\n break;\n default:\n this.#dirty = true;\n this.#row[this.#fieldIndex] = token.value;\n break;\n }\n }\n\n if (flush) {\n yield* this.flush();\n }\n }\n\n public *flush(): Generator<CSVRecord<Header>> {\n if (this.#header !== undefined) {\n if (this.#dirty) {\n yield Object.fromEntries(\n this.#header\n .filter((v) => v)\n .map((header, index) => [header, this.#row.at(index)]),\n ) as unknown as CSVRecord<Header>;\n }\n }\n }\n\n #setHeader(header: Header) {\n this.#header = header;\n if (this.#header.length === 0) {\n throw new ParseError(\"The header must not be empty.\");\n }\n if (new Set(this.#header).size !== this.#header.length) {\n throw new ParseError(\"The header must not contain duplicate fields.\");\n }\n }\n}\n"],"names":["fieldIndex","row","header","dirty","constructor","options","Array","isArray","this","setHeader","assemble","tokens","flush","token","type","FieldDelimiter","RecordDelimiter","Object","fromEntries","map","index","at","length","fill","value","filter","v","ParseError","Set","size"],"mappings":"mLAQO,MACLA,GAAc,EACdC,GAAiB,GACjBC,GACAC,IAAS,EAET,WAAAC,CAAYC,EAA0C,SAC7B,IAAnBA,EAAQH,QAAwBI,MAAMC,QAAQF,EAAQH,SACnDM,MAAAC,EAAWJ,EAAQH,OAE5B,CAEA,SAAQQ,CACNC,EACAC,GAAQ,GAER,IAAA,MAAWC,KAASF,EAClB,OAAQE,EAAMC,MACZ,KAAKC,EAAAA,eACEP,MAAAR,IACLQ,MAAKL,GAAS,EACd,MACF,KAAKa,EAAAA,qBACkB,IAAjBR,MAAKN,EACFM,MAAAC,EAAWD,MAAKP,GAEjBO,MAAKL,QACDc,OAAOC,YACXV,MAAKN,EAAQiB,KAAI,CAACjB,EAAQkB,IAAU,CAClClB,EACAM,MAAKP,EAAKoB,GAAGD,aAIXH,OAAOC,YACXV,MAAKN,EAAQiB,KAAKjB,GAAW,CAACA,EAAQ,OAK5CM,MAAKR,EAAc,EACdQ,MAAAP,EAAO,IAAIK,MAAME,MAAKN,GAASoB,QAAQC,KAAK,IACjDf,MAAKL,GAAS,EACd,MACF,QACEK,MAAKL,GAAS,EACdK,MAAKP,EAAKO,MAAKR,GAAea,EAAMW,MAKtCZ,UACKJ,KAAKI,QAEhB,CAEA,MAAQA,QACe,IAAjBJ,MAAKN,GACHM,MAAKL,UACDc,OAAOC,YACXV,MAAKN,EACFuB,QAAQC,GAAMA,IACdP,KAAI,CAACjB,EAAQkB,IAAU,CAAClB,EAAQM,MAAKP,EAAKoB,GAAGD,OAIxD,CAEA,EAAAX,CAAWP,GAEL,GADJM,MAAKN,EAAUA,EACa,IAAxBM,MAAKN,EAAQoB,OACT,MAAA,IAAIK,EAAAA,WAAW,iCAEnB,GAAA,IAAIC,IAAIpB,MAAKN,GAAS2B,OAASrB,MAAKN,EAAQoB,OACxC,MAAA,IAAIK,EAAAA,WAAW,gDAEzB"}
@@ -1,2 +1,2 @@
1
- "use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const e=require("./RecordAssembler.cjs");class r extends TransformStream{constructor(r={}){const s=new e.RecordAssembler(r);super({transform:(e,r)=>{for(const o of s.assemble(e,!1))r.enqueue(o)},flush:e=>{for(const r of s.flush())e.enqueue(r)}})}}exports.RecordAssemblerTransformer=r;
1
+ "use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const e=require("./RecordAssembler.cjs");class r extends TransformStream{assembler;constructor(r={}){super({transform:(e,r)=>{try{for(const s of this.assembler.assemble(e,!1))r.enqueue(s)}catch(s){r.error(s)}},flush:e=>{try{for(const r of this.assembler.flush())e.enqueue(r)}catch(r){e.error(r)}}}),this.assembler=new e.RecordAssembler(r)}}exports.RecordAssemblerTransformer=r;
2
2
  //# sourceMappingURL=RecordAssemblerTransformer.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"RecordAssemblerTransformer.cjs","sources":["../../src/RecordAssemblerTransformer.ts"],"sourcesContent":["import { RecordAssembler } from \"./RecordAssembler.ts\";\nimport type {\n CSVRecord,\n RecordAssemblerOptions,\n Token,\n} from \"./common/types.ts\";\n\n/**\n * A transform stream that converts a stream of tokens into a stream of rows.\n * @template Header The type of the header row.\n * @param options The options for the parser.\n *\n * @category Low-level API\n *\n * @example Parse a CSV with headers by data\n * ```ts\n * new ReadableStream({\n * start(controller) {\n * controller.enqueue(\"name,age\\r\\n\");\n * controller.enqueue(\"Alice,20\\r\\n\");\n * controller.enqueue(\"Bob,25\\r\\n\");\n * controller.enqueue(\"Charlie,30\\r\\n\");\n * controller.close();\n * })\n * .pipeThrough(new LexerTransformer())\n * .pipeThrough(new RecordAssemblerTransformer())\n * .pipeTo(new WritableStream({ write(row) { console.log(row); }}));\n * // { name: \"Alice\", age: \"20\" }\n * // { name: \"Bob\", age: \"25\" }\n * // { name: \"Charlie\", age: \"30\" }\n * ```\n *\n * @example Parse a CSV with headers by options\n * ```ts\n * new ReadableStream({\n * start(controller) {\n * controller.enqueue(\"Alice,20\\r\\n\");\n * controller.enqueue(\"Bob,25\\r\\n\");\n * controller.enqueue(\"Charlie,30\\r\\n\");\n * controller.close();\n * }\n * })\n * .pipeThrough(new LexerTransformer())\n * .pipeThrough(new RecordAssemblerTransformer({ header: [\"name\", \"age\"] }))\n * .pipeTo(new WritableStream({ write(row) { console.log(row); }}));\n * // { name: \"Alice\", age: \"20\" }\n * // { name: \"Bob\", age: \"25\" }\n * // { name: \"Charlie\", age: \"30\" }\n * ```\n */\nexport class RecordAssemblerTransformer<\n Header extends ReadonlyArray<string>,\n> extends TransformStream<Token[], CSVRecord<Header>> {\n constructor(options: RecordAssemblerOptions<Header> = {}) {\n const assembler = new RecordAssembler(options);\n super({\n transform: (tokens, controller) => {\n for (const token of assembler.assemble(tokens, false)) {\n controller.enqueue(token);\n }\n },\n flush: (controller) => {\n for (const token of assembler.flush()) {\n controller.enqueue(token);\n }\n },\n });\n }\n}\n"],"names":["RecordAssemblerTransformer","TransformStream","constructor","options","assembler","RecordAssembler","super","transform","tokens","controller","token","assemble","enqueue","flush"],"mappings":"yHAkDO,MAAMA,UAEHC,gBACR,WAAAC,CAAYC,EAA0C,IAC9C,MAAAC,EAAY,IAAIC,kBAAgBF,GAChCG,MAAA,CACJC,UAAW,CAACC,EAAQC,KAClB,IAAA,MAAWC,KAASN,EAAUO,SAASH,GAAQ,GAC7CC,EAAWG,QAAQF,EACrB,EAEFG,MAAQJ,IACK,IAAA,MAAAC,KAASN,EAAUS,QAC5BJ,EAAWG,QAAQF,EACrB,GAGN"}
1
+ {"version":3,"file":"RecordAssemblerTransformer.cjs","sources":["../../src/RecordAssemblerTransformer.ts"],"sourcesContent":["import { RecordAssembler } from \"./RecordAssembler.ts\";\nimport type {\n CSVRecord,\n RecordAssemblerOptions,\n Token,\n} from \"./common/types.ts\";\n\n/**\n * A transform stream that converts a stream of tokens into a stream of rows.\n * @template Header The type of the header row.\n * @param options The options for the parser.\n *\n * @category Low-level API\n *\n * @example Parse a CSV with headers by data\n * ```ts\n * new ReadableStream({\n * start(controller) {\n * controller.enqueue(\"name,age\\r\\n\");\n * controller.enqueue(\"Alice,20\\r\\n\");\n * controller.enqueue(\"Bob,25\\r\\n\");\n * controller.enqueue(\"Charlie,30\\r\\n\");\n * controller.close();\n * })\n * .pipeThrough(new LexerTransformer())\n * .pipeThrough(new RecordAssemblerTransformer())\n * .pipeTo(new WritableStream({ write(row) { console.log(row); }}));\n * // { name: \"Alice\", age: \"20\" }\n * // { name: \"Bob\", age: \"25\" }\n * // { name: \"Charlie\", age: \"30\" }\n * ```\n *\n * @example Parse a CSV with headers by options\n * ```ts\n * new ReadableStream({\n * start(controller) {\n * controller.enqueue(\"Alice,20\\r\\n\");\n * controller.enqueue(\"Bob,25\\r\\n\");\n * controller.enqueue(\"Charlie,30\\r\\n\");\n * controller.close();\n * }\n * })\n * .pipeThrough(new LexerTransformer())\n * .pipeThrough(new RecordAssemblerTransformer({ header: [\"name\", \"age\"] }))\n * .pipeTo(new WritableStream({ write(row) { console.log(row); }}));\n * // { name: \"Alice\", age: \"20\" }\n * // { name: \"Bob\", age: \"25\" }\n * // { name: \"Charlie\", age: \"30\" }\n * ```\n */\nexport class RecordAssemblerTransformer<\n Header extends ReadonlyArray<string>,\n> extends TransformStream<Token[], CSVRecord<Header>> {\n public readonly assembler: RecordAssembler<Header>;\n\n constructor(options: RecordAssemblerOptions<Header> = {}) {\n super({\n transform: (tokens, controller) => {\n try {\n for (const token of this.assembler.assemble(tokens, false)) {\n controller.enqueue(token);\n }\n } catch (error) {\n controller.error(error);\n }\n },\n flush: (controller) => {\n try {\n for (const token of this.assembler.flush()) {\n controller.enqueue(token);\n }\n } catch (error) {\n controller.error(error);\n }\n },\n });\n this.assembler = new RecordAssembler(options);\n }\n}\n"],"names":["RecordAssemblerTransformer","TransformStream","assembler","constructor","options","super","transform","tokens","controller","token","this","assemble","enqueue","error","flush","RecordAssembler"],"mappings":"yHAkDO,MAAMA,UAEHC,gBACQC,UAEhB,WAAAC,CAAYC,EAA0C,IAC9CC,MAAA,CACJC,UAAW,CAACC,EAAQC,KACd,IACF,IAAA,MAAWC,KAASC,KAAKR,UAAUS,SAASJ,GAAQ,GAClDC,EAAWI,QAAQH,SAEdI,GACPL,EAAWK,MAAMA,EACnB,GAEFC,MAAQN,IACF,IACF,IAAA,MAAWC,KAASC,KAAKR,UAAUY,QACjCN,EAAWI,QAAQH,SAEdI,GACPL,EAAWK,MAAMA,EACnB,KAGCH,KAAAR,UAAY,IAAIa,EAAAA,gBAAgBX,EACvC"}