web-csv-toolbox 0.9.0 → 0.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/Lexer.cjs +1 -1
- package/dist/cjs/Lexer.cjs.map +1 -1
- package/dist/cjs/LexerTransformer.cjs +1 -1
- package/dist/cjs/LexerTransformer.cjs.map +1 -1
- package/dist/cjs/RecordAssembler.cjs +1 -1
- package/dist/cjs/RecordAssembler.cjs.map +1 -1
- package/dist/cjs/RecordAssemblerTransformer.cjs +1 -1
- package/dist/cjs/RecordAssemblerTransformer.cjs.map +1 -1
- package/dist/cjs/_virtual/web_csv_toolbox_wasm_bg.wasm.cjs +1 -1
- package/dist/cjs/assertCommonOptions.cjs +1 -1
- package/dist/cjs/assertCommonOptions.cjs.map +1 -1
- package/dist/cjs/common/errors.cjs +2 -0
- package/dist/cjs/common/errors.cjs.map +1 -0
- package/dist/cjs/commonParseErrorHandling.cjs +2 -0
- package/dist/cjs/commonParseErrorHandling.cjs.map +1 -0
- package/dist/cjs/getOptionsFromResponse.cjs +1 -1
- package/dist/cjs/getOptionsFromResponse.cjs.map +1 -1
- package/dist/cjs/parseBinaryToArraySync.cjs +1 -1
- package/dist/cjs/parseBinaryToArraySync.cjs.map +1 -1
- package/dist/cjs/parseBinaryToIterableIterator.cjs +1 -1
- package/dist/cjs/parseBinaryToIterableIterator.cjs.map +1 -1
- package/dist/cjs/parseBinaryToStream.cjs +1 -1
- package/dist/cjs/parseBinaryToStream.cjs.map +1 -1
- package/dist/cjs/parseResponse.cjs +1 -1
- package/dist/cjs/parseResponse.cjs.map +1 -1
- package/dist/cjs/parseResponseToStream.cjs +1 -1
- package/dist/cjs/parseResponseToStream.cjs.map +1 -1
- package/dist/cjs/parseString.cjs +1 -1
- package/dist/cjs/parseString.cjs.map +1 -1
- package/dist/cjs/parseStringToArraySync.cjs +1 -1
- package/dist/cjs/parseStringToArraySync.cjs.map +1 -1
- package/dist/cjs/parseStringToArraySyncWASM.cjs +1 -1
- package/dist/cjs/parseStringToArraySyncWASM.cjs.map +1 -1
- package/dist/cjs/parseStringToIterableIterator.cjs +1 -1
- package/dist/cjs/parseStringToIterableIterator.cjs.map +1 -1
- package/dist/cjs/parseStringToStream.cjs +1 -1
- package/dist/cjs/parseStringToStream.cjs.map +1 -1
- package/dist/cjs/utils/convertBinaryToString.cjs.map +1 -0
- package/dist/cjs/utils/pipeline.cjs +1 -1
- package/dist/cjs/utils/pipeline.cjs.map +1 -1
- package/dist/cjs/web-csv-toolbox.cjs +1 -1
- package/dist/es/Lexer.js +6 -0
- package/dist/es/Lexer.js.map +1 -1
- package/dist/es/LexerTransformer.js +12 -3
- package/dist/es/LexerTransformer.js.map +1 -1
- package/dist/es/RecordAssembler.js +3 -2
- package/dist/es/RecordAssembler.js.map +1 -1
- package/dist/es/RecordAssemblerTransformer.js +14 -5
- package/dist/es/RecordAssemblerTransformer.js.map +1 -1
- package/dist/es/_virtual/web_csv_toolbox_wasm_bg.wasm.js +1 -1
- package/dist/es/assertCommonOptions.js +8 -7
- package/dist/es/assertCommonOptions.js.map +1 -1
- package/dist/es/common/errors.js +20 -0
- package/dist/es/common/errors.js.map +1 -0
- package/dist/es/commonParseErrorHandling.js +13 -0
- package/dist/es/commonParseErrorHandling.js.map +1 -0
- package/dist/es/getOptionsFromResponse.js +1 -1
- package/dist/es/getOptionsFromResponse.js.map +1 -1
- package/dist/es/parseBinaryToArraySync.js +8 -3
- package/dist/es/parseBinaryToArraySync.js.map +1 -1
- package/dist/es/parseBinaryToIterableIterator.js +8 -3
- package/dist/es/parseBinaryToIterableIterator.js.map +1 -1
- package/dist/es/parseBinaryToStream.js +8 -3
- package/dist/es/parseBinaryToStream.js.map +1 -1
- package/dist/es/parseResponse.js +9 -4
- package/dist/es/parseResponse.js.map +1 -1
- package/dist/es/parseResponseToStream.js +9 -4
- package/dist/es/parseResponseToStream.js.map +1 -1
- package/dist/es/parseString.js +6 -1
- package/dist/es/parseString.js.map +1 -1
- package/dist/es/parseStringToArraySync.js +9 -4
- package/dist/es/parseStringToArraySync.js.map +1 -1
- package/dist/es/parseStringToArraySyncWASM.js +9 -2
- package/dist/es/parseStringToArraySyncWASM.js.map +1 -1
- package/dist/es/parseStringToIterableIterator.js +9 -4
- package/dist/es/parseStringToIterableIterator.js.map +1 -1
- package/dist/es/parseStringToStream.js +15 -10
- package/dist/es/parseStringToStream.js.map +1 -1
- package/dist/es/utils/convertBinaryToString.js.map +1 -0
- package/dist/es/utils/pipeline.js +1 -1
- package/dist/es/utils/pipeline.js.map +1 -1
- package/dist/es/web-csv-toolbox.js +6 -6
- package/dist/types/LexerTransformer.d.ts +9 -7
- package/dist/types/LexerTransformer.test.d.ts +1 -0
- package/dist/types/RecordAssemblerTransformer.d.ts +2 -0
- package/dist/types/assertCommonOptions.d.ts +2 -1
- package/dist/types/common/errors.d.ts +26 -0
- package/dist/types/commonParseErrorHandling.d.ts +8 -0
- package/dist/types/getOptionsFromResponse.d.ts +8 -0
- package/dist/types/parseBinaryToArraySync.test.d.ts +1 -0
- package/dist/types/parseBinaryToIterableIterator.d.ts +8 -0
- package/dist/types/utils/convertBinaryToString.d.ts +11 -0
- package/dist/types/web-csv-toolbox.d.ts +6 -6
- package/dist/web-csv-toolbox.umd.cjs +1 -1
- package/dist/web-csv-toolbox.umd.cjs.map +1 -1
- package/dist/web_csv_toolbox_wasm_bg.wasm +0 -0
- package/package.json +3 -1
- package/dist/cjs/convertBinaryToString.cjs.map +0 -1
- package/dist/es/convertBinaryToString.js.map +0 -1
- package/dist/types/convertBinaryToString.d.ts +0 -2
- /package/dist/cjs/{convertBinaryToString.cjs → utils/convertBinaryToString.cjs} +0 -0
- /package/dist/es/{convertBinaryToString.js → utils/convertBinaryToString.js} +0 -0
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"getOptionsFromResponse.cjs","sources":["../../src/getOptionsFromResponse.ts"],"sourcesContent":["import type { ParseBinaryOptions } from \"./common/types.ts\";\nimport { parseMime } from \"./utils/parseMime.ts\";\n\nexport function getOptionsFromResponse<Header extends ReadonlyArray<string>>(\n response: Response,\n options: ParseBinaryOptions<Header> = {},\n): ParseBinaryOptions<Header> {\n const { headers } = response;\n const contentType = headers.get(\"content-type\") ?? \"text/csv\";\n const mime = parseMime(contentType);\n if (mime.type !== \"text/csv\") {\n throw new
|
|
1
|
+
{"version":3,"file":"getOptionsFromResponse.cjs","sources":["../../src/getOptionsFromResponse.ts"],"sourcesContent":["import type { ParseBinaryOptions } from \"./common/types.ts\";\nimport { parseMime } from \"./utils/parseMime.ts\";\n\n/**\n * Extracts the options from the response object.\n *\n * @param response - The response object from which to extract the options.\n * @param options - The options to merge with the extracted options.\n * @returns The options extracted from the response.\n * @throws {RangeError} - The content type is not supported.\n */\nexport function getOptionsFromResponse<Header extends ReadonlyArray<string>>(\n response: Response,\n options: ParseBinaryOptions<Header> = {},\n): ParseBinaryOptions<Header> {\n const { headers } = response;\n const contentType = headers.get(\"content-type\") ?? \"text/csv\";\n const mime = parseMime(contentType);\n if (mime.type !== \"text/csv\") {\n throw new RangeError(`Invalid mime type: \"${contentType}\"`);\n }\n const decomposition =\n (headers.get(\"content-encoding\") as CompressionFormat) ?? undefined;\n const charset = mime.parameters.charset ?? \"utf-8\";\n // TODO: Support header=present and header=absent\n // const header = mime.parameters.header ?? \"present\";\n return {\n decomposition,\n charset,\n ...options,\n };\n}\n"],"names":["response","options","headers","contentType","get","mime","parseMime","type","RangeError","decomposition","charset","parameters"],"mappings":"wJAWO,SACLA,EACAC,EAAsC,IAEhC,MAAAC,QAAEA,GAAYF,EACdG,EAAcD,EAAQE,IAAI,iBAAmB,WAC7CC,EAAOC,YAAUH,GACnB,GAAc,aAAdE,EAAKE,KACP,MAAM,IAAIC,WAAW,uBAAuBL,MAOvC,MAAA,CACLM,cALCP,EAAQE,IAAI,0BAA6C,EAM1DM,QALcL,EAAKM,WAAWD,SAAW,WAMtCT,EAEP"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const r=require("./
|
|
1
|
+
"use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const r=require("./commonParseErrorHandling.cjs"),n=require("./parseStringToArraySync.cjs"),e=require("./utils/convertBinaryToString.cjs");exports.parseBinaryToArraySync=function(o,t={}){try{const r=e.convertBinaryToString(o,t);return n.parseStringToArraySync(r,t)}catch(c){r.commonParseErrorHandling(c)}};
|
|
2
2
|
//# sourceMappingURL=parseBinaryToArraySync.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parseBinaryToArraySync.cjs","sources":["../../src/parseBinaryToArraySync.ts"],"sourcesContent":["import type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport {
|
|
1
|
+
{"version":3,"file":"parseBinaryToArraySync.cjs","sources":["../../src/parseBinaryToArraySync.ts"],"sourcesContent":["import type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\nimport { parseStringToArraySync } from \"./parseStringToArraySync.ts\";\nimport { convertBinaryToString } from \"./utils/convertBinaryToString.ts\";\n\nexport function parseBinaryToArraySync<Header extends ReadonlyArray<string>>(\n binary: Uint8Array | ArrayBuffer,\n options: ParseBinaryOptions<Header> = {},\n): CSVRecord<Header>[] {\n try {\n const csv = convertBinaryToString(binary, options);\n return parseStringToArraySync(csv, options);\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\n"],"names":["binary","options","csv","convertBinaryToString","parseStringToArraySync","error","commonParseErrorHandling"],"mappings":"0PAKO,SACLA,EACAC,EAAsC,IAElC,IACI,MAAAC,EAAMC,EAAAA,sBAAsBH,EAAQC,GACnC,OAAAG,EAAAA,uBAAuBF,EAAKD,SAC5BI,GACPC,EAAAA,yBAAyBD,EAC3B,CACF"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const r=require("./
|
|
1
|
+
"use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const r=require("./commonParseErrorHandling.cjs"),e=require("./parseStringToIterableIterator.cjs"),t=require("./utils/convertBinaryToString.cjs");exports.parseBinaryToIterableIterator=function(o,n={}){try{const r=t.convertBinaryToString(o,n);return e.parseStringToIterableIterator(r,n)}catch(a){r.commonParseErrorHandling(a)}};
|
|
2
2
|
//# sourceMappingURL=parseBinaryToIterableIterator.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parseBinaryToIterableIterator.cjs","sources":["../../src/parseBinaryToIterableIterator.ts"],"sourcesContent":["import type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport {
|
|
1
|
+
{"version":3,"file":"parseBinaryToIterableIterator.cjs","sources":["../../src/parseBinaryToIterableIterator.ts"],"sourcesContent":["import type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\nimport { parseStringToIterableIterator } from \"./parseStringToIterableIterator.ts\";\nimport { convertBinaryToString } from \"./utils/convertBinaryToString.ts\";\n\n/**\n * Parses the given binary data into an iterable iterator of CSV records.\n *\n * @param binary - The binary data to parse.\n * @param options - The parse options.\n * @returns An iterable iterator of CSV records.\n * @throws {ParseError} When an error occurs while parsing the CSV data.\n */\nexport function parseBinaryToIterableIterator<\n Header extends ReadonlyArray<string>,\n>(\n binary: Uint8Array | ArrayBuffer,\n options: ParseBinaryOptions<Header> = {},\n): IterableIterator<CSVRecord<Header>> {\n try {\n const csv = convertBinaryToString(binary, options);\n return parseStringToIterableIterator(csv, options);\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\n"],"names":["binary","options","csv","convertBinaryToString","parseStringToIterableIterator","error","commonParseErrorHandling"],"mappings":"wQAaO,SAGLA,EACAC,EAAsC,IAElC,IACI,MAAAC,EAAMC,EAAAA,sBAAsBH,EAAQC,GACnC,OAAAG,EAAAA,8BAA8BF,EAAKD,SACnCI,GACPC,EAAAA,yBAAyBD,EAC3B,CACF"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const r=require("./
|
|
1
|
+
"use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const r=require("./commonParseErrorHandling.cjs"),e=require("./parseStringToStream.cjs"),t=require("./utils/convertBinaryToString.cjs");exports.parseBinaryToStream=function(n,o={}){try{const r=t.convertBinaryToString(n,o);return e.parseStringToStream(r,o)}catch(i){r.commonParseErrorHandling(i)}};
|
|
2
2
|
//# sourceMappingURL=parseBinaryToStream.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parseBinaryToStream.cjs","sources":["../../src/parseBinaryToStream.ts"],"sourcesContent":["import type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport {
|
|
1
|
+
{"version":3,"file":"parseBinaryToStream.cjs","sources":["../../src/parseBinaryToStream.ts"],"sourcesContent":["import type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\nimport { parseStringToStream } from \"./parseStringToStream.ts\";\nimport { convertBinaryToString } from \"./utils/convertBinaryToString.ts\";\n\nexport function parseBinaryToStream<Header extends ReadonlyArray<string>>(\n binary: Uint8Array | ArrayBuffer,\n options: ParseBinaryOptions<Header> = {},\n): ReadableStream<CSVRecord<Header>> {\n try {\n const csv = convertBinaryToString(binary, options);\n return parseStringToStream(csv, options);\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\n"],"names":["binary","options","csv","convertBinaryToString","parseStringToStream","error","commonParseErrorHandling"],"mappings":"oPAKO,SACLA,EACAC,EAAsC,IAElC,IACI,MAAAC,EAAMC,EAAAA,sBAAsBH,EAAQC,GACnC,OAAAG,EAAAA,oBAAoBF,EAAKD,SACzBI,GACPC,EAAAA,yBAAyBD,EAC3B,CACF"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const e=require("./
|
|
1
|
+
"use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const e=require("./commonParseErrorHandling.cjs"),r=require("./getOptionsFromResponse.cjs"),o=require("./parseResponseToStream.cjs"),t=require("./parseUint8ArrayStream.cjs"),s=require("./utils/convertThisAsyncIterableIteratorToArray.cjs");function n(o,s){try{const e=r.getOptionsFromResponse(o,s);if(null===o.body)throw new RangeError("Response body is null");return t.parseUint8ArrayStream(o.body,e)}catch(n){e.commonParseErrorHandling(n)}}Object.defineProperties(n,{toArray:{enumerable:!0,writable:!1,value:s.convertThisAsyncIterableIteratorToArray},toStreamSync:{enumerable:!0,writable:!1,value:o.parseResponseToStream}}),exports.parseResponse=n;
|
|
2
2
|
//# sourceMappingURL=parseResponse.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parseResponse.cjs","sources":["../../src/parseResponse.ts"],"sourcesContent":["import type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { getOptionsFromResponse } from \"./getOptionsFromResponse.ts\";\nimport { parseResponseToStream } from \"./parseResponseToStream.ts\";\nimport { parseUint8ArrayStream } from \"./parseUint8ArrayStream.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\n\n/**\n * Parse HTTP Response what contains CSV to records,\n * ideal for smaller data sets.\n *\n * @remarks\n * This function automatically treats response headers.\n *\n * - If `Content-Type` header is not set, it assumes `text/csv`.\n * - If `Content-Type` header is not `text/csv`, it throws an error.\n * - If `Content-Type` header has charset parameter, it uses it for decoding.\n * - If `Content-Encoding` header is set, it decompresses the response.\n * - Should there be any conflicting information between the header and the options, the option's value will take precedence.\n *\n * @category Middle-level API\n * @param response\n * @param options\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parseResponse.toArray} function.\n *\n * @example Parsing CSV Response\n *\n * ```ts\n * import { parseResponse } from 'web-csv-toolbox';\n *\n * const response = await fetch('https://example.com/data.csv');\n *\n * for await (const record of parseResponse(response)) {\n * console.log(record);\n * }\n * ```\n */\nexport function parseResponse<Header extends ReadonlyArray<string>>(\n response: Response,\n options?: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n const options_ = getOptionsFromResponse(response, options);\n
|
|
1
|
+
{"version":3,"file":"parseResponse.cjs","sources":["../../src/parseResponse.ts"],"sourcesContent":["import type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\nimport { getOptionsFromResponse } from \"./getOptionsFromResponse.ts\";\nimport { parseResponseToStream } from \"./parseResponseToStream.ts\";\nimport { parseUint8ArrayStream } from \"./parseUint8ArrayStream.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\n\n/**\n * Parse HTTP Response what contains CSV to records,\n * ideal for smaller data sets.\n *\n * @remarks\n * This function automatically treats response headers.\n *\n * - If `Content-Type` header is not set, it assumes `text/csv`.\n * - If `Content-Type` header is not `text/csv`, it throws an error.\n * - If `Content-Type` header has charset parameter, it uses it for decoding.\n * - If `Content-Encoding` header is set, it decompresses the response.\n * - Should there be any conflicting information between the header and the options, the option's value will take precedence.\n *\n * @category Middle-level API\n * @param response\n * @param options\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parseResponse.toArray} function.\n *\n * @example Parsing CSV Response\n *\n * ```ts\n * import { parseResponse } from 'web-csv-toolbox';\n *\n * const response = await fetch('https://example.com/data.csv');\n *\n * for await (const record of parseResponse(response)) {\n * console.log(record);\n * }\n * ```\n */\nexport function parseResponse<Header extends ReadonlyArray<string>>(\n response: Response,\n options?: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n try {\n const options_ = getOptionsFromResponse(response, options);\n if (response.body === null) {\n throw new RangeError(\"Response body is null\");\n }\n return parseUint8ArrayStream(response.body, options_);\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\n\nexport declare namespace parseResponse {\n /**\n * Parse CSV Response to array of records.\n *\n * @returns Array of records\n *\n * @example Parsing CSV Response\n *\n * ```ts\n * import { parseResponse } from 'web-csv-toolbox';\n *\n * const response = await fetch('https://example.com/data.csv');\n *\n * const records = await parseResponse.toArray(response);\n * console.log(records);\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n response: Response,\n options?: ParseOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV Response to stream of records.\n *\n * @param response Response to parse\n * @returns Stream of records\n *\n * @example Parsing CSV Response\n *\n * ```ts\n * import { parseResponse } from 'web-csv-toolbox';\n *\n * const response = await fetch('https://example.com/data.csv');\n *\n * await parseResponse.toStream(response)\n * .pipeTo(\n * new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\n export function toStream<Header extends ReadonlyArray<string>>(\n response: Response,\n options?: ParseOptions<Header>,\n ): ReadableStream<CSVRecord<Header>[]>;\n}\n\nObject.defineProperties(parseResponse, {\n toArray: {\n enumerable: true,\n writable: false,\n value: internal.convertThisAsyncIterableIteratorToArray,\n },\n toStreamSync: {\n enumerable: true,\n writable: false,\n value: parseResponseToStream,\n },\n});\n"],"names":["parseResponse","response","options","options_","getOptionsFromResponse","body","RangeError","parseUint8ArrayStream","error","commonParseErrorHandling","Object","defineProperties","toArray","enumerable","writable","value","internal.convertThisAsyncIterableIteratorToArray","convertThisAsyncIterableIteratorToArray","toStreamSync","parseResponseToStream"],"mappings":"+TAuCgB,SAAAA,EACdC,EACAC,GAEI,IACI,MAAAC,EAAWC,EAAAA,uBAAuBH,EAAUC,GAC9C,GAAkB,OAAlBD,EAASI,KACL,MAAA,IAAIC,WAAW,yBAEhB,OAAAC,wBAAsBN,EAASI,KAAMF,SACrCK,GACPC,EAAAA,yBAAyBD,EAC3B,CACF,CAuDAE,OAAOC,iBAAiBX,EAAe,CACrCY,QAAS,CACPC,YAAY,EACZC,UAAU,EACVC,MAAOC,EAASC,yCAElBC,aAAc,CACZL,YAAY,EACZC,UAAU,EACVC,MAAOI,EAAAA"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const e=require("./getOptionsFromResponse.cjs"),
|
|
1
|
+
"use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const r=require("./commonParseErrorHandling.cjs"),e=require("./getOptionsFromResponse.cjs"),o=require("./parseUint8ArrayStreamToStream.cjs");exports.parseResponseToStream=function(t,n){try{const r=e.getOptionsFromResponse(t,n);if(null===t.body)throw new RangeError("Response body is null");return o.parseUint8ArrayStreamToStream(t.body,r)}catch(s){r.commonParseErrorHandling(s)}};
|
|
2
2
|
//# sourceMappingURL=parseResponseToStream.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parseResponseToStream.cjs","sources":["../../src/parseResponseToStream.ts"],"sourcesContent":["import type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { getOptionsFromResponse } from \"./getOptionsFromResponse.ts\";\nimport { parseUint8ArrayStreamToStream } from \"./parseUint8ArrayStreamToStream.ts\";\n\nexport function parseResponseToStream<Header extends ReadonlyArray<string>>(\n response: Response,\n options?: ParseBinaryOptions<Header>,\n): ReadableStream<CSVRecord<Header>> {\n const options_ = getOptionsFromResponse(response, options);\n
|
|
1
|
+
{"version":3,"file":"parseResponseToStream.cjs","sources":["../../src/parseResponseToStream.ts"],"sourcesContent":["import type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\nimport { getOptionsFromResponse } from \"./getOptionsFromResponse.ts\";\nimport { parseUint8ArrayStreamToStream } from \"./parseUint8ArrayStreamToStream.ts\";\n\nexport function parseResponseToStream<Header extends ReadonlyArray<string>>(\n response: Response,\n options?: ParseBinaryOptions<Header>,\n): ReadableStream<CSVRecord<Header>> {\n try {\n const options_ = getOptionsFromResponse(response, options);\n if (response.body === null) {\n throw new RangeError(\"Response body is null\");\n }\n return parseUint8ArrayStreamToStream(response.body, options_);\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\n"],"names":["response","options","options_","getOptionsFromResponse","body","RangeError","parseUint8ArrayStreamToStream","error","commonParseErrorHandling"],"mappings":"2PAKgB,SACdA,EACAC,GAEI,IACI,MAAAC,EAAWC,EAAAA,uBAAuBH,EAAUC,GAC9C,GAAkB,OAAlBD,EAASI,KACL,MAAA,IAAIC,WAAW,yBAEhB,OAAAC,gCAA8BN,EAASI,KAAMF,SAC7CK,GACPC,EAAAA,yBAAyBD,EAC3B,CACF"}
|
package/dist/cjs/parseString.cjs
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const r=require("./
|
|
1
|
+
"use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const r=require("./commonParseErrorHandling.cjs"),e=require("./parseStringToArraySync.cjs"),t=require("./parseStringToIterableIterator.cjs"),a=require("./parseStringToStream.cjs"),o=require("./utils/convertThisAsyncIterableIteratorToArray.cjs");async function*n(e,a){try{yield*t.parseStringToIterableIterator(e,a)}catch(o){r.commonParseErrorHandling(o)}}Object.defineProperties(n,{toArray:{enumerable:!0,writable:!1,value:o.convertThisAsyncIterableIteratorToArray},toArraySync:{enumerable:!0,writable:!1,value:e.parseStringToArraySync},toIterableIterator:{enumerable:!0,writable:!1,value:t.parseStringToIterableIterator},toStream:{enumerable:!0,writable:!1,value:a.parseStringToStream}}),exports.parseString=n;
|
|
2
2
|
//# sourceMappingURL=parseString.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parseString.cjs","sources":["../../src/parseString.ts"],"sourcesContent":["import type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { parseStringToArraySync } from \"./parseStringToArraySync.ts\";\nimport { parseStringToIterableIterator } from \"./parseStringToIterableIterator.ts\";\nimport { parseStringToStream } from \"./parseStringToStream.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\n\n/**\n * Parse CSV string to records.\n *\n * @category Middle-level API\n * @param csv CSV string to parse\n * @param options Parsing options. See {@link ParseOptions}.\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parseString.toArray} function.\n * @example Parsing CSV files from strings\n *\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * for await (const record of parseString(csv)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\nexport async function* parseString<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n yield* parseStringToIterableIterator(csv, options);\n}\nexport declare namespace parseString {\n /**\n * Parse CSV string to records.\n *\n * @returns Array of records\n *\n * @example\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const records = await parseString.toArray(csv);\n * console.log(records);\n * // Prints:\n * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV string to records.\n *\n * @returns Array of records\n *\n * @example\n *\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const records = parseString.toArraySync(csv);\n * console.log(records);\n * // Prints:\n * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]\n * ```\n */\n export function toArraySync<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): CSVRecord<Header>[];\n /**\n * Parse CSV string to records.\n *\n * @returns Async iterable iterator of records\n *\n * @example\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * for (const record of parseString.toIterableIterator(csv)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\n export function toIterableIterator<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): IterableIterator<CSVRecord<Header>>;\n /**\n * Parse CSV string to records.\n *\n * @returns Readable stream of records\n *\n * @example\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * await parseString.toStream(csv)\n * .pipeTo(\n * new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\n export function toStream<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): ReadableStream<CSVRecord<Header>>;\n}\nObject.defineProperties(parseString, {\n toArray: {\n enumerable: true,\n writable: false,\n value: internal.convertThisAsyncIterableIteratorToArray,\n },\n toArraySync: {\n enumerable: true,\n writable: false,\n value: parseStringToArraySync,\n },\n toIterableIterator: {\n enumerable: true,\n writable: false,\n value: parseStringToIterableIterator,\n },\n toStream: {\n enumerable: true,\n writable: false,\n value: parseStringToStream,\n },\n});\n"],"names":["async","parseString","csv","options","parseStringToIterableIterator","Object","defineProperties","toArray","enumerable","writable","value","internal.convertThisAsyncIterableIteratorToArray","convertThisAsyncIterableIteratorToArray","toArraySync","parseStringToArraySync","toIterableIterator","toStream","parseStringToStream"],"mappings":"
|
|
1
|
+
{"version":3,"file":"parseString.cjs","sources":["../../src/parseString.ts"],"sourcesContent":["import type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\nimport { parseStringToArraySync } from \"./parseStringToArraySync.ts\";\nimport { parseStringToIterableIterator } from \"./parseStringToIterableIterator.ts\";\nimport { parseStringToStream } from \"./parseStringToStream.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\n\n/**\n * Parse CSV string to records.\n *\n * @category Middle-level API\n * @param csv CSV string to parse\n * @param options Parsing options. See {@link ParseOptions}.\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parseString.toArray} function.\n * @example Parsing CSV files from strings\n *\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * for await (const record of parseString(csv)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\nexport async function* parseString<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n try {\n yield* parseStringToIterableIterator(csv, options);\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\nexport declare namespace parseString {\n /**\n * Parse CSV string to records.\n *\n * @returns Array of records\n *\n * @example\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const records = await parseString.toArray(csv);\n * console.log(records);\n * // Prints:\n * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV string to records.\n *\n * @returns Array of records\n *\n * @example\n *\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const records = parseString.toArraySync(csv);\n * console.log(records);\n * // Prints:\n * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]\n * ```\n */\n export function toArraySync<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): CSVRecord<Header>[];\n /**\n * Parse CSV string to records.\n *\n * @returns Async iterable iterator of records\n *\n * @example\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * for (const record of parseString.toIterableIterator(csv)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\n export function toIterableIterator<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): IterableIterator<CSVRecord<Header>>;\n /**\n * Parse CSV string to records.\n *\n * @returns Readable stream of records\n *\n * @example\n * ```ts\n * import { parseString } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * await parseString.toStream(csv)\n * .pipeTo(\n * new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\n export function toStream<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n ): ReadableStream<CSVRecord<Header>>;\n}\nObject.defineProperties(parseString, {\n toArray: {\n enumerable: true,\n writable: false,\n value: internal.convertThisAsyncIterableIteratorToArray,\n },\n toArraySync: {\n enumerable: true,\n writable: false,\n value: parseStringToArraySync,\n },\n toIterableIterator: {\n enumerable: true,\n writable: false,\n value: parseStringToIterableIterator,\n },\n toStream: {\n enumerable: true,\n writable: false,\n value: parseStringToStream,\n },\n});\n"],"names":["async","parseString","csv","options","parseStringToIterableIterator","error","commonParseErrorHandling","Object","defineProperties","toArray","enumerable","writable","value","internal.convertThisAsyncIterableIteratorToArray","convertThisAsyncIterableIteratorToArray","toArraySync","parseStringToArraySync","toIterableIterator","toStream","parseStringToStream"],"mappings":"qUAiCuBA,eAAAC,EACrBC,EACAC,GAEI,UACKC,EAAAA,8BAA8BF,EAAKC,SACnCE,GACPC,EAAAA,yBAAyBD,EAC3B,CACF,CAyGAE,OAAOC,iBAAiBP,EAAa,CACnCQ,QAAS,CACPC,YAAY,EACZC,UAAU,EACVC,MAAOC,EAASC,yCAElBC,YAAa,CACXL,YAAY,EACZC,UAAU,EACVC,MAAOI,EAAAA,wBAETC,mBAAoB,CAClBP,YAAY,EACZC,UAAU,EACVC,MAAOR,EAAAA,+BAETc,SAAU,CACRR,YAAY,EACZC,UAAU,EACVC,MAAOO,EAAAA"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const e=require("./Lexer.cjs"),r=require("./RecordAssembler.cjs");exports.parseStringToArraySync=function(s,
|
|
1
|
+
"use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const e=require("./Lexer.cjs"),r=require("./RecordAssembler.cjs"),o=require("./commonParseErrorHandling.cjs");exports.parseStringToArraySync=function(s,n){try{const o=new e.Lexer(n),c=new r.RecordAssembler(n),t=o.lex(s);return[...c.assemble(t)]}catch(c){o.commonParseErrorHandling(c)}};
|
|
2
2
|
//# sourceMappingURL=parseStringToArraySync.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parseStringToArraySync.cjs","sources":["../../src/parseStringToArraySync.ts"],"sourcesContent":["import { Lexer } from \"./Lexer.ts\";\nimport { RecordAssembler } from \"./RecordAssembler.ts\";\nimport type { CSVRecord, ParseOptions } from \"./common/types.ts\";\n\nexport function parseStringToArraySync<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n): CSVRecord<Header>[] {\n const lexer = new Lexer(options);\n
|
|
1
|
+
{"version":3,"file":"parseStringToArraySync.cjs","sources":["../../src/parseStringToArraySync.ts"],"sourcesContent":["import { Lexer } from \"./Lexer.ts\";\nimport { RecordAssembler } from \"./RecordAssembler.ts\";\nimport type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\n\nexport function parseStringToArraySync<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n): CSVRecord<Header>[] {\n try {\n const lexer = new Lexer(options);\n const assembler = new RecordAssembler(options);\n const tokens = lexer.lex(csv);\n return [...assembler.assemble(tokens)];\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\n"],"names":["csv","options","lexer","Lexer","assembler","RecordAssembler","tokens","lex","assemble","error","commonParseErrorHandling"],"mappings":"6NAKgB,SACdA,EACAC,GAEI,IACI,MAAAC,EAAQ,IAAIC,QAAMF,GAClBG,EAAY,IAAIC,kBAAgBJ,GAChCK,EAASJ,EAAMK,IAAIP,GACzB,MAAO,IAAII,EAAUI,SAASF,UACvBG,GACPC,EAAAA,yBAAyBD,EAC3B,CACF"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const r=require("./_virtual/_web-csv-toolbox-wasm.cjs"),t=require("./constants.cjs");exports.parseStringToArraySyncWASM=function(
|
|
1
|
+
"use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const r=require("./_virtual/_web-csv-toolbox-wasm.cjs"),t=require("./assertCommonOptions.cjs"),o=require("./common/errors.cjs"),e=require("./constants.cjs");exports.parseStringToArraySyncWASM=function(n,i={}){const{delimiter:s=e.COMMA,quotation:a=e.DOUBLE_QUOTE}=i;if("string"!=typeof s||1!==s.length)throw new o.InvalidOptionError("Invalid delimiter, must be a single character on WASM.");if(a!==e.DOUBLE_QUOTE)throw new o.InvalidOptionError("Invalid quotation, must be double quote on WASM.");t.assertCommonOptions({delimiter:s,quotation:a});const c=s.charCodeAt(0);return JSON.parse(r.parseStringToArraySync(n,c))};
|
|
2
2
|
//# sourceMappingURL=parseStringToArraySyncWASM.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parseStringToArraySyncWASM.cjs","sources":["../../src/parseStringToArraySyncWASM.ts"],"sourcesContent":["import { parseStringToArraySync } from \"web-csv-toolbox-wasm\";\nimport type { CSVRecord, CommonOptions } from \"./common/types.ts\";\nimport { COMMA, DOUBLE_QUOTE } from \"./constants.ts\";\nimport type { loadWASM } from \"./loadWASM.ts\";\n\n/**\n * Parse CSV string to record of arrays.\n *\n * @param csv CSV string\n * @param options Parse options\n * @returns Record of arrays\n *\n * @remarks\n * This function uses WebAssembly to parse CSV string.\n * Before calling this function, you must call {@link loadWASM} function.\n *\n * This function only supports UTF-8 string.\n * If you pass a string that is not UTF-8, like UTF-16, it throws an error.\n * This function only supports double quote as quotation.\n * So, `options.quotation` must be `\"` (double quote). Otherwise, it throws an error.\n *\n * And this function only supports single character as delimiter.\n * So, `options.delimiter` must be a single character. Otherwise, it throws an error.\n *\n * @example\n *\n * ```ts\n * import { loadWASM, parseStringWASM } from \"web-csv-toolbox\";\n *\n * await loadWASM();\n *\n * const csv = \"a,b,c\\n1,2,3\";\n *\n * const result = parseStringToArraySyncWASM(csv);\n * console.log(result);\n * // Prints:\n * // [{ a: \"1\", b: \"2\", c: \"3\" }]\n * ```\n * @beta\n */\nexport function parseStringToArraySyncWASM<Header extends readonly string[]>(\n csv: string,\n options: CommonOptions = {},\n): CSVRecord<Header>[] {\n const { delimiter = COMMA, quotation = DOUBLE_QUOTE } = options;\n if (typeof delimiter !== \"string\" || delimiter.length !== 1) {\n throw new
|
|
1
|
+
{"version":3,"file":"parseStringToArraySyncWASM.cjs","sources":["../../src/parseStringToArraySyncWASM.ts"],"sourcesContent":["import { parseStringToArraySync } from \"web-csv-toolbox-wasm\";\nimport { assertCommonOptions } from \"./assertCommonOptions.ts\";\nimport { InvalidOptionError } from \"./common/errors.ts\";\nimport type { CSVRecord, CommonOptions } from \"./common/types.ts\";\nimport { COMMA, DOUBLE_QUOTE } from \"./constants.ts\";\nimport type { loadWASM } from \"./loadWASM.ts\";\n\n/**\n * Parse CSV string to record of arrays.\n *\n * @param csv CSV string\n * @param options Parse options\n * @returns Record of arrays\n *\n * @remarks\n * This function uses WebAssembly to parse CSV string.\n * Before calling this function, you must call {@link loadWASM} function.\n *\n * This function only supports UTF-8 string.\n * If you pass a string that is not UTF-8, like UTF-16, it throws an error.\n * This function only supports double quote as quotation.\n * So, `options.quotation` must be `\"` (double quote). Otherwise, it throws an error.\n *\n * And this function only supports single character as delimiter.\n * So, `options.delimiter` must be a single character. Otherwise, it throws an error.\n *\n * @example\n *\n * ```ts\n * import { loadWASM, parseStringWASM } from \"web-csv-toolbox\";\n *\n * await loadWASM();\n *\n * const csv = \"a,b,c\\n1,2,3\";\n *\n * const result = parseStringToArraySyncWASM(csv);\n * console.log(result);\n * // Prints:\n * // [{ a: \"1\", b: \"2\", c: \"3\" }]\n * ```\n * @beta\n */\nexport function parseStringToArraySyncWASM<Header extends readonly string[]>(\n csv: string,\n options: CommonOptions = {},\n): CSVRecord<Header>[] {\n const { delimiter = COMMA, quotation = DOUBLE_QUOTE } = options;\n if (typeof delimiter !== \"string\" || delimiter.length !== 1) {\n throw new InvalidOptionError(\n \"Invalid delimiter, must be a single character on WASM.\",\n );\n }\n if (quotation !== DOUBLE_QUOTE) {\n throw new InvalidOptionError(\n \"Invalid quotation, must be double quote on WASM.\",\n );\n }\n assertCommonOptions({ delimiter, quotation });\n const demiliterCode = delimiter.charCodeAt(0);\n return JSON.parse(parseStringToArraySync(csv, demiliterCode));\n}\n"],"names":["csv","options","delimiter","COMMA","quotation","DOUBLE_QUOTE","length","InvalidOptionError","assertCommonOptions","demiliterCode","charCodeAt","JSON","parse","parseStringToArraySync"],"mappings":"gRA0CO,SACLA,EACAC,EAAyB,IAEzB,MAAMC,UAAEA,EAAYC,EAAOA,MAAAC,UAAAA,EAAYC,gBAAiBJ,EACxD,GAAyB,iBAAdC,GAA+C,IAArBA,EAAUI,OAC7C,MAAM,IAAIC,EAAAA,mBACR,0DAGJ,GAAIH,IAAcC,EAAAA,aAChB,MAAM,IAAIE,EAAAA,mBACR,oDAGgBC,EAAAA,oBAAA,CAAEN,YAAWE,cAC3B,MAAAK,EAAgBP,EAAUQ,WAAW,GAC3C,OAAOC,KAAKC,MAAMC,EAAAA,uBAAuBb,EAAKS,GAChD"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const e=require("./Lexer.cjs"),r=require("./RecordAssembler.cjs");exports.parseStringToIterableIterator=function(t
|
|
1
|
+
"use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const e=require("./Lexer.cjs"),r=require("./RecordAssembler.cjs"),o=require("./commonParseErrorHandling.cjs");exports.parseStringToIterableIterator=function(s,t){try{const o=new e.Lexer(t),n=new r.RecordAssembler(t),c=o.lex(s);return n.assemble(c)}catch(n){o.commonParseErrorHandling(n)}};
|
|
2
2
|
//# sourceMappingURL=parseStringToIterableIterator.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parseStringToIterableIterator.cjs","sources":["../../src/parseStringToIterableIterator.ts"],"sourcesContent":["import { Lexer } from \"./Lexer.ts\";\nimport { RecordAssembler } from \"./RecordAssembler.ts\";\nimport type { CSVRecord, ParseOptions } from \"./common/types.ts\";\n\nexport function parseStringToIterableIterator<\n Header extends ReadonlyArray<string>,\n>(\n csv: string,\n options?: ParseOptions<Header>,\n): IterableIterator<CSVRecord<Header>> {\n const lexer = new Lexer(options);\n
|
|
1
|
+
{"version":3,"file":"parseStringToIterableIterator.cjs","sources":["../../src/parseStringToIterableIterator.ts"],"sourcesContent":["import { Lexer } from \"./Lexer.ts\";\nimport { RecordAssembler } from \"./RecordAssembler.ts\";\nimport type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\n\nexport function parseStringToIterableIterator<\n Header extends ReadonlyArray<string>,\n>(\n csv: string,\n options?: ParseOptions<Header>,\n): IterableIterator<CSVRecord<Header>> {\n try {\n const lexer = new Lexer(options);\n const assembler = new RecordAssembler(options);\n const tokens = lexer.lex(csv);\n return assembler.assemble(tokens);\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\n"],"names":["csv","options","lexer","Lexer","assembler","RecordAssembler","tokens","lex","assemble","error","commonParseErrorHandling"],"mappings":"oOAKgB,SAGdA,EACAC,GAEI,IACI,MAAAC,EAAQ,IAAIC,QAAMF,GAClBG,EAAY,IAAIC,kBAAgBJ,GAChCK,EAASJ,EAAMK,IAAIP,GAClB,OAAAI,EAAUI,SAASF,SACnBG,GACPC,EAAAA,yBAAyBD,EAC3B,CACF"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const e=require("./Lexer.cjs"),r=require("./RecordAssembler.cjs");exports.parseStringToStream=function(s,t){const o=new e.Lexer(t),n=new r.RecordAssembler(t);return new ReadableStream({start(e){const r=o.lex(s);for(const
|
|
1
|
+
"use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const e=require("./Lexer.cjs"),r=require("./RecordAssembler.cjs"),o=require("./commonParseErrorHandling.cjs");exports.parseStringToStream=function(s,t){try{const o=new e.Lexer(t),n=new r.RecordAssembler(t);return new ReadableStream({start(e){const r=o.lex(s);for(const o of n.assemble(r))e.enqueue(o);e.close()}})}catch(n){o.commonParseErrorHandling(n)}};
|
|
2
2
|
//# sourceMappingURL=parseStringToStream.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parseStringToStream.cjs","sources":["../../src/parseStringToStream.ts"],"sourcesContent":["import { Lexer } from \"./Lexer.ts\";\nimport { RecordAssembler } from \"./RecordAssembler.ts\";\nimport type { CSVRecord, ParseOptions } from \"./common/types.ts\";\n\nexport function parseStringToStream<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n): ReadableStream<CSVRecord<Header>> {\n const lexer = new Lexer(options);\n
|
|
1
|
+
{"version":3,"file":"parseStringToStream.cjs","sources":["../../src/parseStringToStream.ts"],"sourcesContent":["import { Lexer } from \"./Lexer.ts\";\nimport { RecordAssembler } from \"./RecordAssembler.ts\";\nimport type { CSVRecord, ParseOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\n\nexport function parseStringToStream<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n): ReadableStream<CSVRecord<Header>> {\n try {\n const lexer = new Lexer(options);\n const assembler = new RecordAssembler(options);\n return new ReadableStream({\n start(controller) {\n const tokens = lexer.lex(csv);\n for (const record of assembler.assemble(tokens)) {\n controller.enqueue(record);\n }\n controller.close();\n },\n });\n } catch (error) {\n commonParseErrorHandling(error);\n }\n}\n"],"names":["csv","options","lexer","Lexer","assembler","RecordAssembler","ReadableStream","start","controller","tokens","lex","record","assemble","enqueue","close","error","commonParseErrorHandling"],"mappings":"0NAKgB,SACdA,EACAC,GAEI,IACI,MAAAC,EAAQ,IAAIC,QAAMF,GAClBG,EAAY,IAAIC,kBAAgBJ,GACtC,OAAO,IAAIK,eAAe,CACxB,KAAAC,CAAMC,GACE,MAAAC,EAASP,EAAMQ,IAAIV,GACzB,IAAA,MAAWW,KAAUP,EAAUQ,SAASH,GACtCD,EAAWK,QAAQF,GAErBH,EAAWM,OACb,UAEKC,GACPC,EAAAA,yBAAyBD,EAC3B,CACF"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"convertBinaryToString.cjs","sources":["../../../src/utils/convertBinaryToString.ts"],"sourcesContent":["import type { BinaryOptions } from \"../common/types.ts\";\n\n/**\n * Converts a binary string to a string.\n *\n * @param binary - The binary string to convert.\n * @param options - The options for parsing the binary string.\n * @returns The converted string.\n * @throws {RangeError} The given charset is not supported.\n * @throws {TypeError} The encoded data was not valid.\n */\nexport function convertBinaryToString(\n binary: Uint8Array | ArrayBuffer,\n options: BinaryOptions,\n): string {\n return new TextDecoder(options?.charset, {\n ignoreBOM: options?.ignoreBOM,\n fatal: options?.fatal,\n }).decode(binary instanceof ArrayBuffer ? new Uint8Array(binary) : binary);\n}\n"],"names":["binary","options","TextDecoder","charset","ignoreBOM","fatal","decode","ArrayBuffer","Uint8Array"],"mappings":"8GAWgB,SACdA,EACAC,GAEO,OAAA,IAAIC,YAAYD,GAASE,QAAS,CACvCC,UAAWH,GAASG,UACpBC,MAAOJ,GAASI,QACfC,OAAON,aAAkBO,YAAc,IAAIC,WAAWR,GAAUA,EACrE"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"}),exports.pipeline=function(e,...
|
|
1
|
+
"use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"}),exports.pipeline=function(e,...r){return new ReadableStream({start:t=>{r.reduce(((e,r)=>e.pipeThrough(r)),e).pipeTo(new WritableStream({write:e=>t.enqueue(e),close:()=>t.close()})).catch((e=>t.error(e)))}})};
|
|
2
2
|
//# sourceMappingURL=pipeline.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"pipeline.cjs","sources":["../../../src/utils/pipeline.ts"],"sourcesContent":["export function pipeline<I, T, O>(\n stream: ReadableStream<I>,\n transformer1: TransformStream<I, T>,\n transformer2: TransformStream<T, O>,\n): ReadableStream<O>;\nexport function pipeline<I, T1, T2, O>(\n stream: ReadableStream<I>,\n transformer1: TransformStream<I, T1>,\n transformer2: TransformStream<T1, T2>,\n transformer3: TransformStream<T2, O>,\n): ReadableStream<O>;\nexport function pipeline<I, T1, T2, T3, O>(\n stream: ReadableStream<I>,\n transformer1: TransformStream<I, T1>,\n transformer2: TransformStream<T1, T2>,\n transformer3: TransformStream<T2, T3>,\n transformer4: TransformStream<T3, O>,\n): ReadableStream<O>;\nexport function pipeline<I, O>(\n stream: ReadableStream<I>,\n ...transformers: TransformStream[]\n): ReadableStream<O> {\n return new ReadableStream({\n start: (controller) => {\n (() =>\n transformers\n .reduce<ReadableStream>(\n (stream, transformer) => stream.pipeThrough(transformer),\n stream,\n )\n .pipeTo(\n new WritableStream({\n write: (v) => controller.enqueue(v),\n close: () => controller.close(),\n }),\n ))();\n },\n });\n}\n"],"names":["stream","transformers","ReadableStream","start","controller","reduce","transformer","pipeThrough","pipeTo","WritableStream","write","v","enqueue","close"],"mappings":"iGAkBgB,SACdA,KACGC,GAEH,OAAO,IAAIC,eAAe,CACxBC,MAAQC,IAEJH,EACGI,QACC,CAACL,EAAQM,IAAgBN,EAAOO,YAAYD,IAC5CN,GAEDQ,OACC,IAAIC,eAAe,CACjBC,MAAQC,GAAMP,EAAWQ,QAAQD,GACjCE,MAAO,IAAMT,EAAWS,
|
|
1
|
+
{"version":3,"file":"pipeline.cjs","sources":["../../../src/utils/pipeline.ts"],"sourcesContent":["export function pipeline<I, T, O>(\n stream: ReadableStream<I>,\n transformer1: TransformStream<I, T>,\n transformer2: TransformStream<T, O>,\n): ReadableStream<O>;\nexport function pipeline<I, T1, T2, O>(\n stream: ReadableStream<I>,\n transformer1: TransformStream<I, T1>,\n transformer2: TransformStream<T1, T2>,\n transformer3: TransformStream<T2, O>,\n): ReadableStream<O>;\nexport function pipeline<I, T1, T2, T3, O>(\n stream: ReadableStream<I>,\n transformer1: TransformStream<I, T1>,\n transformer2: TransformStream<T1, T2>,\n transformer3: TransformStream<T2, T3>,\n transformer4: TransformStream<T3, O>,\n): ReadableStream<O>;\nexport function pipeline<I, O>(\n stream: ReadableStream<I>,\n ...transformers: TransformStream[]\n): ReadableStream<O> {\n return new ReadableStream({\n start: (controller) => {\n (() =>\n transformers\n .reduce<ReadableStream>(\n (stream, transformer) => stream.pipeThrough(transformer),\n stream,\n )\n .pipeTo(\n new WritableStream({\n write: (v) => controller.enqueue(v),\n close: () => controller.close(),\n }),\n )\n .catch((error) => controller.error(error)))();\n },\n });\n}\n"],"names":["stream","transformers","ReadableStream","start","controller","reduce","transformer","pipeThrough","pipeTo","WritableStream","write","v","enqueue","close","catch","error"],"mappings":"iGAkBgB,SACdA,KACGC,GAEH,OAAO,IAAIC,eAAe,CACxBC,MAAQC,IAEJH,EACGI,QACC,CAACL,EAAQM,IAAgBN,EAAOO,YAAYD,IAC5CN,GAEDQ,OACC,IAAIC,eAAe,CACjBC,MAAQC,GAAMP,EAAWQ,QAAQD,GACjCE,MAAO,IAAMT,EAAWS,WAG3BC,OAAOC,GAAUX,EAAWW,MAAMA,IAAS,GAGtD"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const r=require("./common/constants.cjs"),e=require("./LexerTransformer.cjs"),s=require("./
|
|
1
|
+
"use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const r=require("./common/constants.cjs"),e=require("./LexerTransformer.cjs"),s=require("./loadWASM.cjs"),a=require("./parse.cjs"),t=require("./parseBinary.cjs"),o=require("./parseResponse.cjs"),i=require("./parseString.cjs"),p=require("./parseStringStream.cjs"),n=require("./parseStringToArraySyncWASM.cjs"),S=require("./parseUint8ArrayStream.cjs"),c=require("./RecordAssemblerTransformer.cjs");exports.Field=r.Field,exports.FieldDelimiter=r.FieldDelimiter,exports.RecordDelimiter=r.RecordDelimiter,exports.LexerTransformer=e.LexerTransformer,exports.loadWASM=s.loadWASM,exports.parse=a.parse,exports.parseBinary=t.parseBinary,exports.parseResponse=o.parseResponse,exports.parseString=i.parseString,exports.parseStringStream=p.parseStringStream,exports.parseStringToArraySyncWASM=n.parseStringToArraySyncWASM,exports.parseUint8ArrayStream=S.parseUint8ArrayStream,exports.RecordAssemblerTransformer=c.RecordAssemblerTransformer;
|
|
2
2
|
//# sourceMappingURL=web-csv-toolbox.cjs.map
|
package/dist/es/Lexer.js
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { assertCommonOptions } from './assertCommonOptions.js';
|
|
2
2
|
import { RecordDelimiter, FieldDelimiter, Field } from './common/constants.js';
|
|
3
|
+
import { ParseError } from './common/errors.js';
|
|
3
4
|
import { COMMA, DOUBLE_QUOTE, CRLF, LF } from './constants.js';
|
|
4
5
|
import { escapeRegExp } from './utils/escapeRegExp.js';
|
|
5
6
|
|
|
@@ -189,6 +190,11 @@ class Lexer {
|
|
|
189
190
|
cur = next;
|
|
190
191
|
next = this.#buffer[offset + 1];
|
|
191
192
|
} while (cur !== void 0);
|
|
193
|
+
if (this.#flush) {
|
|
194
|
+
throw new ParseError("Unexpected EOF while parsing quoted field.", {
|
|
195
|
+
position: { ...this.#cursor }
|
|
196
|
+
});
|
|
197
|
+
}
|
|
192
198
|
return null;
|
|
193
199
|
}
|
|
194
200
|
const match = this.#matcher.exec(this.#buffer);
|
package/dist/es/Lexer.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"Lexer.js","sources":["../../src/Lexer.ts"],"sourcesContent":["import { assertCommonOptions } from \"./assertCommonOptions.ts\";\nimport { Field, FieldDelimiter, RecordDelimiter } from \"./common/constants.ts\";\nimport type {\n CommonOptions,\n Position,\n RecordDelimiterToken,\n Token,\n} from \"./common/types.ts\";\nimport { COMMA, CRLF, DOUBLE_QUOTE, LF } from \"./constants.ts\";\nimport { escapeRegExp } from \"./utils/escapeRegExp.ts\";\n\n/**\n * CSV Lexer.\n *\n * Lexter tokenizes CSV data into fields and records.\n */\nexport class Lexer {\n #delimiter: string;\n #quotation: string;\n #buffer = \"\";\n #flush = false;\n #matcher: RegExp;\n #fieldDelimiterLength: number;\n\n #cursor: Position = {\n line: 1,\n column: 1,\n offset: 0,\n };\n #rowNumber = 1;\n\n /**\n * Constructs a new Lexer instance.\n * @param options - The common options for the lexer.\n */\n constructor({\n delimiter = COMMA,\n quotation = DOUBLE_QUOTE,\n }: CommonOptions = {}) {\n assertCommonOptions({ delimiter, quotation });\n this.#delimiter = delimiter;\n this.#quotation = quotation;\n this.#fieldDelimiterLength = delimiter.length;\n const d = escapeRegExp(delimiter);\n const q = escapeRegExp(quotation);\n this.#matcher = new RegExp(\n `^(?:(?!${q})(?!${d})(?![\\\\r\\\\n]))([\\\\S\\\\s\\\\uFEFF\\\\xA0]+?)(?=${q}|${d}|\\\\r|\\\\n|$)`,\n );\n }\n\n /**\n * Lexes the given chunk of CSV data.\n * @param chunk - The chunk of CSV data to be lexed.\n * @param buffering - Indicates whether the lexer is buffering or not.\n * @returns An iterable iterator of tokens.\n */\n public lex(chunk: string | null, buffering = false): IterableIterator<Token> {\n if (!buffering) {\n this.#flush = true;\n }\n if (typeof chunk === \"string\" && chunk.length !== 0) {\n this.#buffer += chunk;\n }\n\n return this.#tokens();\n }\n\n /**\n * Flushes the lexer and returns any remaining tokens.\n * @returns An array of tokens.\n */\n public flush(): Token[] {\n this.#flush = true;\n return [...this.#tokens()];\n }\n\n /**\n * Generates tokens from the buffered CSV data.\n * @yields Tokens from the buffered CSV data.\n */\n *#tokens(): Generator<Token> {\n if (this.#flush) {\n // Trim the last CRLF or LF\n if (this.#buffer.endsWith(CRLF)) {\n this.#buffer = this.#buffer.slice(0, -2 /* -CRLF.length */);\n } else if (this.#buffer.endsWith(LF)) {\n this.#buffer = this.#buffer.slice(0, -1 /* -LF.length */);\n }\n }\n let token: Token | null;\n while ((token = this.#nextToken())) {\n yield token;\n }\n }\n\n /**\n * Retrieves the next token from the buffered CSV data.\n * @returns The next token or null if there are no more tokens.\n */\n #nextToken(): Token | null {\n if (this.#buffer.length === 0) {\n return null;\n }\n // Buffer is Record Delimiter, defer to the next iteration.\n if (\n this.#flush === false &&\n (this.#buffer === CRLF || this.#buffer === LF)\n ) {\n return null;\n }\n\n // Check for CRLF\n if (this.#buffer.startsWith(CRLF)) {\n this.#buffer = this.#buffer.slice(2);\n const start: Position = { ...this.#cursor };\n this.#cursor.line++;\n this.#cursor.column = 1;\n this.#cursor.offset += 2; // CRLF.length\n const token: RecordDelimiterToken = {\n type: RecordDelimiter,\n value: CRLF,\n location: {\n start,\n end: { ...this.#cursor },\n rowNumber: this.#rowNumber++,\n },\n };\n return token;\n }\n\n // Check for LF\n if (this.#buffer.startsWith(LF)) {\n this.#buffer = this.#buffer.slice(1);\n const start: Position = { ...this.#cursor };\n this.#cursor.line++;\n this.#cursor.column = 1;\n this.#cursor.offset += 1; // LF.length\n const token: RecordDelimiterToken = {\n type: RecordDelimiter,\n value: LF,\n location: {\n start,\n end: { ...this.#cursor },\n rowNumber: this.#rowNumber++,\n },\n };\n return token;\n }\n\n // Check for Delimiter\n if (this.#buffer.startsWith(this.#delimiter)) {\n this.#buffer = this.#buffer.slice(1);\n const start: Position = { ...this.#cursor };\n this.#cursor.column += this.#fieldDelimiterLength;\n this.#cursor.offset += this.#fieldDelimiterLength;\n return {\n type: FieldDelimiter,\n value: this.#delimiter,\n location: {\n start,\n end: { ...this.#cursor },\n rowNumber: this.#rowNumber,\n },\n };\n }\n\n // Check for Quoted String\n if (this.#buffer.startsWith(this.#quotation)) {\n /**\n * Extract Quoted field.\n *\n * The following code is equivalent to the following:\n *\n * If the next character is a quote:\n * - If the character after that is a quote, then append a quote to the value and skip two characters.\n * - Otherwise, return the quoted string.\n * Otherwise, append the character to the value and skip one character.\n *\n * ```plaintext\n * | `i` | `i + 1` | `i + 2` |\n * |------------|------------|----------|\n * | cur | next | | => Variable names\n * | #quotation | #quotation | | => Escaped quote\n * | #quotation | (EOF) | | => Closing quote\n * | #quotation | undefined | | => End of buffer\n * | undefined | | | => End of buffer\n * ```\n */\n let value = \"\";\n let offset = 1; // Skip the opening quote\n let column = 2; // Skip the opening quote\n let line = 0;\n\n // Define variables\n let cur: string = this.#buffer[offset];\n let next: string | undefined = this.#buffer[offset + 1];\n do {\n // If the current character is a quote, check the next characters for closing quotes.\n if (cur === this.#quotation) {\n // If the cur character is a quote and the next character is a quote,\n // then append a quote to the value and skip two characters.\n if (next === this.#quotation) {\n // Append a quote to the value and skip two characters.\n value += this.#quotation;\n offset += 2;\n cur = this.#buffer[offset];\n next = this.#buffer[offset + 1];\n\n // Update the diff\n column += 2;\n continue;\n }\n\n // If the cur character is a quote and the next character is undefined,\n // then return null.\n if (next === undefined && this.#flush === false) {\n return null;\n }\n\n // Otherwise, return the quoted string.\n // Update the buffer and return the token\n offset++;\n this.#buffer = this.#buffer.slice(offset);\n const start: Position = { ...this.#cursor };\n this.#cursor.column += column;\n this.#cursor.offset += offset;\n this.#cursor.line += line;\n return {\n type: Field,\n value,\n location: {\n start,\n end: { ...this.#cursor },\n rowNumber: this.#rowNumber,\n },\n };\n // return this.#field(value, { column, offset, line });\n }\n\n // Append the character to the value.\n value += cur;\n\n // Prepare for the next iteration\n if (cur === LF) {\n // If the current character is a LF,\n // then increment the line number and reset the column number.\n line++;\n column = 1;\n } else {\n // Otherwise, increment the column number and offset.\n column++;\n }\n\n offset++;\n cur = next;\n next = this.#buffer[offset + 1];\n } while (cur !== undefined);\n\n // If we get here, we've reached the end of the buffer\n return null;\n // TODO: If flash is true, the buffer is exiting unquoted and an exception should be raised.\n }\n\n // Check for Unquoted String\n const match = this.#matcher.exec(this.#buffer);\n if (match) {\n // If we're flushing and the match doesn't consume the entire buffer,\n // then return null\n if (this.#flush === false && match[0].length === this.#buffer.length) {\n return null;\n }\n const value = match[1];\n this.#buffer = this.#buffer.slice(value.length);\n const start: Position = { ...this.#cursor };\n this.#cursor.column += value.length;\n this.#cursor.offset += value.length;\n return {\n type: Field,\n value,\n location: {\n start,\n end: { ...this.#cursor },\n rowNumber: this.#rowNumber,\n },\n };\n }\n\n // Otherwise, return null\n return null;\n }\n}\n"],"names":[],"mappings":";;;;;AAgBO,MAAM,KAAM,CAAA;AAAA,EACjB,UAAA,CAAA;AAAA,EACA,UAAA,CAAA;AAAA,EACA,OAAU,GAAA,EAAA,CAAA;AAAA,EACV,MAAS,GAAA,KAAA,CAAA;AAAA,EACT,QAAA,CAAA;AAAA,EACA,qBAAA,CAAA;AAAA,EAEA,OAAoB,GAAA;AAAA,IAClB,IAAM,EAAA,CAAA;AAAA,IACN,MAAQ,EAAA,CAAA;AAAA,IACR,MAAQ,EAAA,CAAA;AAAA,GACV,CAAA;AAAA,EACA,UAAa,GAAA,CAAA,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMb,WAAY,CAAA;AAAA,IACV,SAAY,GAAA,KAAA;AAAA,IACZ,SAAY,GAAA,YAAA;AAAA,GACd,GAAmB,EAAI,EAAA;AACrB,IAAoB,mBAAA,CAAA,EAAE,SAAW,EAAA,SAAA,EAAW,CAAA,CAAA;AAC5C,IAAA,IAAA,CAAK,UAAa,GAAA,SAAA,CAAA;AAClB,IAAA,IAAA,CAAK,UAAa,GAAA,SAAA,CAAA;AAClB,IAAA,IAAA,CAAK,wBAAwB,SAAU,CAAA,MAAA,CAAA;AACvC,IAAM,MAAA,CAAA,GAAI,aAAa,SAAS,CAAA,CAAA;AAChC,IAAM,MAAA,CAAA,GAAI,aAAa,SAAS,CAAA,CAAA;AAChC,IAAA,IAAA,CAAK,WAAW,IAAI,MAAA;AAAA,MAClB,UAAU,CAAC,CAAA,IAAA,EAAO,CAAC,CAA4C,yCAAA,EAAA,CAAC,IAAI,CAAC,CAAA,WAAA,CAAA;AAAA,KACvE,CAAA;AAAA,GACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQO,GAAA,CAAI,KAAsB,EAAA,SAAA,GAAY,KAAgC,EAAA;AAC3E,IAAA,IAAI,CAAC,SAAW,EAAA;AACd,MAAA,IAAA,CAAK,MAAS,GAAA,IAAA,CAAA;AAAA,KAChB;AACA,IAAA,IAAI,OAAO,KAAA,KAAU,QAAY,IAAA,KAAA,CAAM,WAAW,CAAG,EAAA;AACnD,MAAA,IAAA,CAAK,OAAW,IAAA,KAAA,CAAA;AAAA,KAClB;AAEA,IAAA,OAAO,KAAK,OAAQ,EAAA,CAAA;AAAA,GACtB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,KAAiB,GAAA;AACtB,IAAA,IAAA,CAAK,MAAS,GAAA,IAAA,CAAA;AACd,IAAA,OAAO,CAAC,GAAG,IAAK,CAAA,OAAA,EAAS,CAAA,CAAA;AAAA,GAC3B;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,CAAC,OAA4B,GAAA;AAC3B,IAAA,IAAI,KAAK,MAAQ,EAAA;AAEf,MAAA,IAAI,IAAK,CAAA,OAAA,CAAQ,QAAS,CAAA,IAAI,CAAG,EAAA;AAC/B,QAAK,IAAA,CAAA,OAAA,GAAU,KAAK,OAAQ,CAAA,KAAA;AAAA,UAAM,CAAA;AAAA,UAAG,CAAA,CAAA;AAAA;AAAA,SAAqB,CAAA;AAAA,OACjD,MAAA,IAAA,IAAA,CAAK,OAAQ,CAAA,QAAA,CAAS,EAAE,CAAG,EAAA;AACpC,QAAK,IAAA,CAAA,OAAA,GAAU,KAAK,OAAQ,CAAA,KAAA;AAAA,UAAM,CAAA;AAAA,UAAG,CAAA,CAAA;AAAA;AAAA,SAAmB,CAAA;AAAA,OAC1D;AAAA,KACF;AACA,IAAI,IAAA,KAAA,CAAA;AACJ,IAAQ,OAAA,KAAA,GAAQ,IAAK,CAAA,UAAA,EAAe,EAAA;AAClC,MAAM,MAAA,KAAA,CAAA;AAAA,KACR;AAAA,GACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,UAA2B,GAAA;AACzB,IAAI,IAAA,IAAA,CAAK,OAAQ,CAAA,MAAA,KAAW,CAAG,EAAA;AAC7B,MAAO,OAAA,IAAA,CAAA;AAAA,KACT;AAEA,IACE,IAAA,IAAA,CAAK,WAAW,KACf,KAAA,IAAA,CAAK,YAAY,IAAQ,IAAA,IAAA,CAAK,YAAY,EAC3C,CAAA,EAAA;AACA,MAAO,OAAA,IAAA,CAAA;AAAA,KACT;AAGA,IAAA,IAAI,IAAK,CAAA,OAAA,CAAQ,UAAW,CAAA,IAAI,CAAG,EAAA;AACjC,MAAA,IAAA,CAAK,OAAU,GAAA,IAAA,CAAK,OAAQ,CAAA,KAAA,CAAM,CAAC,CAAA,CAAA;AACnC,MAAA,MAAM,KAAkB,GAAA,EAAE,GAAG,IAAA,CAAK,OAAQ,EAAA,CAAA;AAC1C,MAAA,IAAA,CAAK,OAAQ,CAAA,IAAA,EAAA,CAAA;AACb,MAAA,IAAA,CAAK,QAAQ,MAAS,GAAA,CAAA,CAAA;AACtB,MAAA,IAAA,CAAK,QAAQ,MAAU,IAAA,CAAA,CAAA;AACvB,MAAA,MAAM,KAA8B,GAAA;AAAA,QAClC,IAAM,EAAA,eAAA;AAAA,QACN,KAAO,EAAA,IAAA;AAAA,QACP,QAAU,EAAA;AAAA,UACR,KAAA;AAAA,UACA,GAAK,EAAA,EAAE,GAAG,IAAA,CAAK,OAAQ,EAAA;AAAA,UACvB,WAAW,IAAK,CAAA,UAAA,EAAA;AAAA,SAClB;AAAA,OACF,CAAA;AACA,MAAO,OAAA,KAAA,CAAA;AAAA,KACT;AAGA,IAAA,IAAI,IAAK,CAAA,OAAA,CAAQ,UAAW,CAAA,EAAE,CAAG,EAAA;AAC/B,MAAA,IAAA,CAAK,OAAU,GAAA,IAAA,CAAK,OAAQ,CAAA,KAAA,CAAM,CAAC,CAAA,CAAA;AACnC,MAAA,MAAM,KAAkB,GAAA,EAAE,GAAG,IAAA,CAAK,OAAQ,EAAA,CAAA;AAC1C,MAAA,IAAA,CAAK,OAAQ,CAAA,IAAA,EAAA,CAAA;AACb,MAAA,IAAA,CAAK,QAAQ,MAAS,GAAA,CAAA,CAAA;AACtB,MAAA,IAAA,CAAK,QAAQ,MAAU,IAAA,CAAA,CAAA;AACvB,MAAA,MAAM,KAA8B,GAAA;AAAA,QAClC,IAAM,EAAA,eAAA;AAAA,QACN,KAAO,EAAA,EAAA;AAAA,QACP,QAAU,EAAA;AAAA,UACR,KAAA;AAAA,UACA,GAAK,EAAA,EAAE,GAAG,IAAA,CAAK,OAAQ,EAAA;AAAA,UACvB,WAAW,IAAK,CAAA,UAAA,EAAA;AAAA,SAClB;AAAA,OACF,CAAA;AACA,MAAO,OAAA,KAAA,CAAA;AAAA,KACT;AAGA,IAAA,IAAI,IAAK,CAAA,OAAA,CAAQ,UAAW,CAAA,IAAA,CAAK,UAAU,CAAG,EAAA;AAC5C,MAAA,IAAA,CAAK,OAAU,GAAA,IAAA,CAAK,OAAQ,CAAA,KAAA,CAAM,CAAC,CAAA,CAAA;AACnC,MAAA,MAAM,KAAkB,GAAA,EAAE,GAAG,IAAA,CAAK,OAAQ,EAAA,CAAA;AAC1C,MAAK,IAAA,CAAA,OAAA,CAAQ,UAAU,IAAK,CAAA,qBAAA,CAAA;AAC5B,MAAK,IAAA,CAAA,OAAA,CAAQ,UAAU,IAAK,CAAA,qBAAA,CAAA;AAC5B,MAAO,OAAA;AAAA,QACL,IAAM,EAAA,cAAA;AAAA,QACN,OAAO,IAAK,CAAA,UAAA;AAAA,QACZ,QAAU,EAAA;AAAA,UACR,KAAA;AAAA,UACA,GAAK,EAAA,EAAE,GAAG,IAAA,CAAK,OAAQ,EAAA;AAAA,UACvB,WAAW,IAAK,CAAA,UAAA;AAAA,SAClB;AAAA,OACF,CAAA;AAAA,KACF;AAGA,IAAA,IAAI,IAAK,CAAA,OAAA,CAAQ,UAAW,CAAA,IAAA,CAAK,UAAU,CAAG,EAAA;AAqB5C,MAAA,IAAI,KAAQ,GAAA,EAAA,CAAA;AACZ,MAAA,IAAI,MAAS,GAAA,CAAA,CAAA;AACb,MAAA,IAAI,MAAS,GAAA,CAAA,CAAA;AACb,MAAA,IAAI,IAAO,GAAA,CAAA,CAAA;AAGX,MAAI,IAAA,GAAA,GAAc,IAAK,CAAA,OAAA,CAAQ,MAAM,CAAA,CAAA;AACrC,MAAA,IAAI,IAA2B,GAAA,IAAA,CAAK,OAAQ,CAAA,MAAA,GAAS,CAAC,CAAA,CAAA;AACtD,MAAG,GAAA;AAED,QAAI,IAAA,GAAA,KAAQ,KAAK,UAAY,EAAA;AAG3B,UAAI,IAAA,IAAA,KAAS,KAAK,UAAY,EAAA;AAE5B,YAAA,KAAA,IAAS,IAAK,CAAA,UAAA,CAAA;AACd,YAAU,MAAA,IAAA,CAAA,CAAA;AACV,YAAM,GAAA,GAAA,IAAA,CAAK,QAAQ,MAAM,CAAA,CAAA;AACzB,YAAO,IAAA,GAAA,IAAA,CAAK,OAAQ,CAAA,MAAA,GAAS,CAAC,CAAA,CAAA;AAG9B,YAAU,MAAA,IAAA,CAAA,CAAA;AACV,YAAA,SAAA;AAAA,WACF;AAIA,UAAA,IAAI,IAAS,KAAA,KAAA,CAAA,IAAa,IAAK,CAAA,MAAA,KAAW,KAAO,EAAA;AAC/C,YAAO,OAAA,IAAA,CAAA;AAAA,WACT;AAIA,UAAA,MAAA,EAAA,CAAA;AACA,UAAA,IAAA,CAAK,OAAU,GAAA,IAAA,CAAK,OAAQ,CAAA,KAAA,CAAM,MAAM,CAAA,CAAA;AACxC,UAAA,MAAM,KAAkB,GAAA,EAAE,GAAG,IAAA,CAAK,OAAQ,EAAA,CAAA;AAC1C,UAAA,IAAA,CAAK,QAAQ,MAAU,IAAA,MAAA,CAAA;AACvB,UAAA,IAAA,CAAK,QAAQ,MAAU,IAAA,MAAA,CAAA;AACvB,UAAA,IAAA,CAAK,QAAQ,IAAQ,IAAA,IAAA,CAAA;AACrB,UAAO,OAAA;AAAA,YACL,IAAM,EAAA,KAAA;AAAA,YACN,KAAA;AAAA,YACA,QAAU,EAAA;AAAA,cACR,KAAA;AAAA,cACA,GAAK,EAAA,EAAE,GAAG,IAAA,CAAK,OAAQ,EAAA;AAAA,cACvB,WAAW,IAAK,CAAA,UAAA;AAAA,aAClB;AAAA,WACF,CAAA;AAAA,SAEF;AAGA,QAAS,KAAA,IAAA,GAAA,CAAA;AAGT,QAAA,IAAI,QAAQ,EAAI,EAAA;AAGd,UAAA,IAAA,EAAA,CAAA;AACA,UAAS,MAAA,GAAA,CAAA,CAAA;AAAA,SACJ,MAAA;AAEL,UAAA,MAAA,EAAA,CAAA;AAAA,SACF;AAEA,QAAA,MAAA,EAAA,CAAA;AACA,QAAM,GAAA,GAAA,IAAA,CAAA;AACN,QAAO,IAAA,GAAA,IAAA,CAAK,OAAQ,CAAA,MAAA,GAAS,CAAC,CAAA,CAAA;AAAA,eACvB,GAAQ,KAAA,KAAA,CAAA,EAAA;AAGjB,MAAO,OAAA,IAAA,CAAA;AAAA,KAET;AAGA,IAAA,MAAM,KAAQ,GAAA,IAAA,CAAK,QAAS,CAAA,IAAA,CAAK,KAAK,OAAO,CAAA,CAAA;AAC7C,IAAA,IAAI,KAAO,EAAA;AAGT,MAAI,IAAA,IAAA,CAAK,WAAW,KAAS,IAAA,KAAA,CAAM,CAAC,CAAE,CAAA,MAAA,KAAW,IAAK,CAAA,OAAA,CAAQ,MAAQ,EAAA;AACpE,QAAO,OAAA,IAAA,CAAA;AAAA,OACT;AACA,MAAM,MAAA,KAAA,GAAQ,MAAM,CAAC,CAAA,CAAA;AACrB,MAAA,IAAA,CAAK,OAAU,GAAA,IAAA,CAAK,OAAQ,CAAA,KAAA,CAAM,MAAM,MAAM,CAAA,CAAA;AAC9C,MAAA,MAAM,KAAkB,GAAA,EAAE,GAAG,IAAA,CAAK,OAAQ,EAAA,CAAA;AAC1C,MAAK,IAAA,CAAA,OAAA,CAAQ,UAAU,KAAM,CAAA,MAAA,CAAA;AAC7B,MAAK,IAAA,CAAA,OAAA,CAAQ,UAAU,KAAM,CAAA,MAAA,CAAA;AAC7B,MAAO,OAAA;AAAA,QACL,IAAM,EAAA,KAAA;AAAA,QACN,KAAA;AAAA,QACA,QAAU,EAAA;AAAA,UACR,KAAA;AAAA,UACA,GAAK,EAAA,EAAE,GAAG,IAAA,CAAK,OAAQ,EAAA;AAAA,UACvB,WAAW,IAAK,CAAA,UAAA;AAAA,SAClB;AAAA,OACF,CAAA;AAAA,KACF;AAGA,IAAO,OAAA,IAAA,CAAA;AAAA,GACT;AACF;;;;"}
|
|
1
|
+
{"version":3,"file":"Lexer.js","sources":["../../src/Lexer.ts"],"sourcesContent":["import { assertCommonOptions } from \"./assertCommonOptions.ts\";\nimport { Field, FieldDelimiter, RecordDelimiter } from \"./common/constants.ts\";\nimport { ParseError } from \"./common/errors.ts\";\nimport type {\n CommonOptions,\n Position,\n RecordDelimiterToken,\n Token,\n} from \"./common/types.ts\";\nimport { COMMA, CRLF, DOUBLE_QUOTE, LF } from \"./constants.ts\";\nimport { escapeRegExp } from \"./utils/escapeRegExp.ts\";\n\n/**\n * CSV Lexer.\n *\n * Lexter tokenizes CSV data into fields and records.\n */\nexport class Lexer {\n #delimiter: string;\n #quotation: string;\n #buffer = \"\";\n #flush = false;\n #matcher: RegExp;\n #fieldDelimiterLength: number;\n\n #cursor: Position = {\n line: 1,\n column: 1,\n offset: 0,\n };\n #rowNumber = 1;\n\n /**\n * Constructs a new Lexer instance.\n * @param options - The common options for the lexer.\n */\n constructor({\n delimiter = COMMA,\n quotation = DOUBLE_QUOTE,\n }: CommonOptions = {}) {\n assertCommonOptions({ delimiter, quotation });\n this.#delimiter = delimiter;\n this.#quotation = quotation;\n this.#fieldDelimiterLength = delimiter.length;\n const d = escapeRegExp(delimiter);\n const q = escapeRegExp(quotation);\n this.#matcher = new RegExp(\n `^(?:(?!${q})(?!${d})(?![\\\\r\\\\n]))([\\\\S\\\\s\\\\uFEFF\\\\xA0]+?)(?=${q}|${d}|\\\\r|\\\\n|$)`,\n );\n }\n\n /**\n * Lexes the given chunk of CSV data.\n * @param chunk - The chunk of CSV data to be lexed.\n * @param buffering - Indicates whether the lexer is buffering or not.\n * @returns An iterable iterator of tokens.\n */\n public lex(chunk: string | null, buffering = false): IterableIterator<Token> {\n if (!buffering) {\n this.#flush = true;\n }\n if (typeof chunk === \"string\" && chunk.length !== 0) {\n this.#buffer += chunk;\n }\n\n return this.#tokens();\n }\n\n /**\n * Flushes the lexer and returns any remaining tokens.\n * @returns An array of tokens.\n */\n public flush(): Token[] {\n this.#flush = true;\n return [...this.#tokens()];\n }\n\n /**\n * Generates tokens from the buffered CSV data.\n * @yields Tokens from the buffered CSV data.\n */\n *#tokens(): Generator<Token> {\n if (this.#flush) {\n // Trim the last CRLF or LF\n if (this.#buffer.endsWith(CRLF)) {\n this.#buffer = this.#buffer.slice(0, -2 /* -CRLF.length */);\n } else if (this.#buffer.endsWith(LF)) {\n this.#buffer = this.#buffer.slice(0, -1 /* -LF.length */);\n }\n }\n let token: Token | null;\n while ((token = this.#nextToken())) {\n yield token;\n }\n }\n\n /**\n * Retrieves the next token from the buffered CSV data.\n * @returns The next token or null if there are no more tokens.\n */\n #nextToken(): Token | null {\n if (this.#buffer.length === 0) {\n return null;\n }\n // Buffer is Record Delimiter, defer to the next iteration.\n if (\n this.#flush === false &&\n (this.#buffer === CRLF || this.#buffer === LF)\n ) {\n return null;\n }\n\n // Check for CRLF\n if (this.#buffer.startsWith(CRLF)) {\n this.#buffer = this.#buffer.slice(2);\n const start: Position = { ...this.#cursor };\n this.#cursor.line++;\n this.#cursor.column = 1;\n this.#cursor.offset += 2; // CRLF.length\n const token: RecordDelimiterToken = {\n type: RecordDelimiter,\n value: CRLF,\n location: {\n start,\n end: { ...this.#cursor },\n rowNumber: this.#rowNumber++,\n },\n };\n return token;\n }\n\n // Check for LF\n if (this.#buffer.startsWith(LF)) {\n this.#buffer = this.#buffer.slice(1);\n const start: Position = { ...this.#cursor };\n this.#cursor.line++;\n this.#cursor.column = 1;\n this.#cursor.offset += 1; // LF.length\n const token: RecordDelimiterToken = {\n type: RecordDelimiter,\n value: LF,\n location: {\n start,\n end: { ...this.#cursor },\n rowNumber: this.#rowNumber++,\n },\n };\n return token;\n }\n\n // Check for Delimiter\n if (this.#buffer.startsWith(this.#delimiter)) {\n this.#buffer = this.#buffer.slice(1);\n const start: Position = { ...this.#cursor };\n this.#cursor.column += this.#fieldDelimiterLength;\n this.#cursor.offset += this.#fieldDelimiterLength;\n return {\n type: FieldDelimiter,\n value: this.#delimiter,\n location: {\n start,\n end: { ...this.#cursor },\n rowNumber: this.#rowNumber,\n },\n };\n }\n\n // Check for Quoted String\n if (this.#buffer.startsWith(this.#quotation)) {\n /**\n * Extract Quoted field.\n *\n * The following code is equivalent to the following:\n *\n * If the next character is a quote:\n * - If the character after that is a quote, then append a quote to the value and skip two characters.\n * - Otherwise, return the quoted string.\n * Otherwise, append the character to the value and skip one character.\n *\n * ```plaintext\n * | `i` | `i + 1` | `i + 2` |\n * |------------|------------|----------|\n * | cur | next | | => Variable names\n * | #quotation | #quotation | | => Escaped quote\n * | #quotation | (EOF) | | => Closing quote\n * | #quotation | undefined | | => End of buffer\n * | undefined | | | => End of buffer\n * ```\n */\n let value = \"\";\n let offset = 1; // Skip the opening quote\n let column = 2; // Skip the opening quote\n let line = 0;\n\n // Define variables\n let cur: string = this.#buffer[offset];\n let next: string | undefined = this.#buffer[offset + 1];\n do {\n // If the current character is a quote, check the next characters for closing quotes.\n if (cur === this.#quotation) {\n // If the cur character is a quote and the next character is a quote,\n // then append a quote to the value and skip two characters.\n if (next === this.#quotation) {\n // Append a quote to the value and skip two characters.\n value += this.#quotation;\n offset += 2;\n cur = this.#buffer[offset];\n next = this.#buffer[offset + 1];\n\n // Update the diff\n column += 2;\n continue;\n }\n\n // If the cur character is a quote and the next character is undefined,\n // then return null.\n if (next === undefined && this.#flush === false) {\n return null;\n }\n\n // Otherwise, return the quoted string.\n // Update the buffer and return the token\n offset++;\n this.#buffer = this.#buffer.slice(offset);\n const start: Position = { ...this.#cursor };\n this.#cursor.column += column;\n this.#cursor.offset += offset;\n this.#cursor.line += line;\n return {\n type: Field,\n value,\n location: {\n start,\n end: { ...this.#cursor },\n rowNumber: this.#rowNumber,\n },\n };\n }\n\n // Append the character to the value.\n value += cur;\n\n // Prepare for the next iteration\n if (cur === LF) {\n // If the current character is a LF,\n // then increment the line number and reset the column number.\n line++;\n column = 1;\n } else {\n // Otherwise, increment the column number and offset.\n column++;\n }\n\n offset++;\n cur = next;\n next = this.#buffer[offset + 1];\n } while (cur !== undefined);\n\n if (this.#flush) {\n throw new ParseError(\"Unexpected EOF while parsing quoted field.\", {\n position: { ...this.#cursor },\n });\n }\n return null;\n }\n\n // Check for Unquoted String\n const match = this.#matcher.exec(this.#buffer);\n if (match) {\n // If we're flushing and the match doesn't consume the entire buffer,\n // then return null\n if (this.#flush === false && match[0].length === this.#buffer.length) {\n return null;\n }\n const value = match[1];\n this.#buffer = this.#buffer.slice(value.length);\n const start: Position = { ...this.#cursor };\n this.#cursor.column += value.length;\n this.#cursor.offset += value.length;\n return {\n type: Field,\n value,\n location: {\n start,\n end: { ...this.#cursor },\n rowNumber: this.#rowNumber,\n },\n };\n }\n\n // Otherwise, return null\n return null;\n }\n}\n"],"names":[],"mappings":";;;;;;AAiBO,MAAM,KAAM,CAAA;AAAA,EACjB,UAAA,CAAA;AAAA,EACA,UAAA,CAAA;AAAA,EACA,OAAU,GAAA,EAAA,CAAA;AAAA,EACV,MAAS,GAAA,KAAA,CAAA;AAAA,EACT,QAAA,CAAA;AAAA,EACA,qBAAA,CAAA;AAAA,EAEA,OAAoB,GAAA;AAAA,IAClB,IAAM,EAAA,CAAA;AAAA,IACN,MAAQ,EAAA,CAAA;AAAA,IACR,MAAQ,EAAA,CAAA;AAAA,GACV,CAAA;AAAA,EACA,UAAa,GAAA,CAAA,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMb,WAAY,CAAA;AAAA,IACV,SAAY,GAAA,KAAA;AAAA,IACZ,SAAY,GAAA,YAAA;AAAA,GACd,GAAmB,EAAI,EAAA;AACrB,IAAoB,mBAAA,CAAA,EAAE,SAAW,EAAA,SAAA,EAAW,CAAA,CAAA;AAC5C,IAAA,IAAA,CAAK,UAAa,GAAA,SAAA,CAAA;AAClB,IAAA,IAAA,CAAK,UAAa,GAAA,SAAA,CAAA;AAClB,IAAA,IAAA,CAAK,wBAAwB,SAAU,CAAA,MAAA,CAAA;AACvC,IAAM,MAAA,CAAA,GAAI,aAAa,SAAS,CAAA,CAAA;AAChC,IAAM,MAAA,CAAA,GAAI,aAAa,SAAS,CAAA,CAAA;AAChC,IAAA,IAAA,CAAK,WAAW,IAAI,MAAA;AAAA,MAClB,UAAU,CAAC,CAAA,IAAA,EAAO,CAAC,CAA4C,yCAAA,EAAA,CAAC,IAAI,CAAC,CAAA,WAAA,CAAA;AAAA,KACvE,CAAA;AAAA,GACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQO,GAAA,CAAI,KAAsB,EAAA,SAAA,GAAY,KAAgC,EAAA;AAC3E,IAAA,IAAI,CAAC,SAAW,EAAA;AACd,MAAA,IAAA,CAAK,MAAS,GAAA,IAAA,CAAA;AAAA,KAChB;AACA,IAAA,IAAI,OAAO,KAAA,KAAU,QAAY,IAAA,KAAA,CAAM,WAAW,CAAG,EAAA;AACnD,MAAA,IAAA,CAAK,OAAW,IAAA,KAAA,CAAA;AAAA,KAClB;AAEA,IAAA,OAAO,KAAK,OAAQ,EAAA,CAAA;AAAA,GACtB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,KAAiB,GAAA;AACtB,IAAA,IAAA,CAAK,MAAS,GAAA,IAAA,CAAA;AACd,IAAA,OAAO,CAAC,GAAG,IAAK,CAAA,OAAA,EAAS,CAAA,CAAA;AAAA,GAC3B;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,CAAC,OAA4B,GAAA;AAC3B,IAAA,IAAI,KAAK,MAAQ,EAAA;AAEf,MAAA,IAAI,IAAK,CAAA,OAAA,CAAQ,QAAS,CAAA,IAAI,CAAG,EAAA;AAC/B,QAAK,IAAA,CAAA,OAAA,GAAU,KAAK,OAAQ,CAAA,KAAA;AAAA,UAAM,CAAA;AAAA,UAAG,CAAA,CAAA;AAAA;AAAA,SAAqB,CAAA;AAAA,OACjD,MAAA,IAAA,IAAA,CAAK,OAAQ,CAAA,QAAA,CAAS,EAAE,CAAG,EAAA;AACpC,QAAK,IAAA,CAAA,OAAA,GAAU,KAAK,OAAQ,CAAA,KAAA;AAAA,UAAM,CAAA;AAAA,UAAG,CAAA,CAAA;AAAA;AAAA,SAAmB,CAAA;AAAA,OAC1D;AAAA,KACF;AACA,IAAI,IAAA,KAAA,CAAA;AACJ,IAAQ,OAAA,KAAA,GAAQ,IAAK,CAAA,UAAA,EAAe,EAAA;AAClC,MAAM,MAAA,KAAA,CAAA;AAAA,KACR;AAAA,GACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,UAA2B,GAAA;AACzB,IAAI,IAAA,IAAA,CAAK,OAAQ,CAAA,MAAA,KAAW,CAAG,EAAA;AAC7B,MAAO,OAAA,IAAA,CAAA;AAAA,KACT;AAEA,IACE,IAAA,IAAA,CAAK,WAAW,KACf,KAAA,IAAA,CAAK,YAAY,IAAQ,IAAA,IAAA,CAAK,YAAY,EAC3C,CAAA,EAAA;AACA,MAAO,OAAA,IAAA,CAAA;AAAA,KACT;AAGA,IAAA,IAAI,IAAK,CAAA,OAAA,CAAQ,UAAW,CAAA,IAAI,CAAG,EAAA;AACjC,MAAA,IAAA,CAAK,OAAU,GAAA,IAAA,CAAK,OAAQ,CAAA,KAAA,CAAM,CAAC,CAAA,CAAA;AACnC,MAAA,MAAM,KAAkB,GAAA,EAAE,GAAG,IAAA,CAAK,OAAQ,EAAA,CAAA;AAC1C,MAAA,IAAA,CAAK,OAAQ,CAAA,IAAA,EAAA,CAAA;AACb,MAAA,IAAA,CAAK,QAAQ,MAAS,GAAA,CAAA,CAAA;AACtB,MAAA,IAAA,CAAK,QAAQ,MAAU,IAAA,CAAA,CAAA;AACvB,MAAA,MAAM,KAA8B,GAAA;AAAA,QAClC,IAAM,EAAA,eAAA;AAAA,QACN,KAAO,EAAA,IAAA;AAAA,QACP,QAAU,EAAA;AAAA,UACR,KAAA;AAAA,UACA,GAAK,EAAA,EAAE,GAAG,IAAA,CAAK,OAAQ,EAAA;AAAA,UACvB,WAAW,IAAK,CAAA,UAAA,EAAA;AAAA,SAClB;AAAA,OACF,CAAA;AACA,MAAO,OAAA,KAAA,CAAA;AAAA,KACT;AAGA,IAAA,IAAI,IAAK,CAAA,OAAA,CAAQ,UAAW,CAAA,EAAE,CAAG,EAAA;AAC/B,MAAA,IAAA,CAAK,OAAU,GAAA,IAAA,CAAK,OAAQ,CAAA,KAAA,CAAM,CAAC,CAAA,CAAA;AACnC,MAAA,MAAM,KAAkB,GAAA,EAAE,GAAG,IAAA,CAAK,OAAQ,EAAA,CAAA;AAC1C,MAAA,IAAA,CAAK,OAAQ,CAAA,IAAA,EAAA,CAAA;AACb,MAAA,IAAA,CAAK,QAAQ,MAAS,GAAA,CAAA,CAAA;AACtB,MAAA,IAAA,CAAK,QAAQ,MAAU,IAAA,CAAA,CAAA;AACvB,MAAA,MAAM,KAA8B,GAAA;AAAA,QAClC,IAAM,EAAA,eAAA;AAAA,QACN,KAAO,EAAA,EAAA;AAAA,QACP,QAAU,EAAA;AAAA,UACR,KAAA;AAAA,UACA,GAAK,EAAA,EAAE,GAAG,IAAA,CAAK,OAAQ,EAAA;AAAA,UACvB,WAAW,IAAK,CAAA,UAAA,EAAA;AAAA,SAClB;AAAA,OACF,CAAA;AACA,MAAO,OAAA,KAAA,CAAA;AAAA,KACT;AAGA,IAAA,IAAI,IAAK,CAAA,OAAA,CAAQ,UAAW,CAAA,IAAA,CAAK,UAAU,CAAG,EAAA;AAC5C,MAAA,IAAA,CAAK,OAAU,GAAA,IAAA,CAAK,OAAQ,CAAA,KAAA,CAAM,CAAC,CAAA,CAAA;AACnC,MAAA,MAAM,KAAkB,GAAA,EAAE,GAAG,IAAA,CAAK,OAAQ,EAAA,CAAA;AAC1C,MAAK,IAAA,CAAA,OAAA,CAAQ,UAAU,IAAK,CAAA,qBAAA,CAAA;AAC5B,MAAK,IAAA,CAAA,OAAA,CAAQ,UAAU,IAAK,CAAA,qBAAA,CAAA;AAC5B,MAAO,OAAA;AAAA,QACL,IAAM,EAAA,cAAA;AAAA,QACN,OAAO,IAAK,CAAA,UAAA;AAAA,QACZ,QAAU,EAAA;AAAA,UACR,KAAA;AAAA,UACA,GAAK,EAAA,EAAE,GAAG,IAAA,CAAK,OAAQ,EAAA;AAAA,UACvB,WAAW,IAAK,CAAA,UAAA;AAAA,SAClB;AAAA,OACF,CAAA;AAAA,KACF;AAGA,IAAA,IAAI,IAAK,CAAA,OAAA,CAAQ,UAAW,CAAA,IAAA,CAAK,UAAU,CAAG,EAAA;AAqB5C,MAAA,IAAI,KAAQ,GAAA,EAAA,CAAA;AACZ,MAAA,IAAI,MAAS,GAAA,CAAA,CAAA;AACb,MAAA,IAAI,MAAS,GAAA,CAAA,CAAA;AACb,MAAA,IAAI,IAAO,GAAA,CAAA,CAAA;AAGX,MAAI,IAAA,GAAA,GAAc,IAAK,CAAA,OAAA,CAAQ,MAAM,CAAA,CAAA;AACrC,MAAA,IAAI,IAA2B,GAAA,IAAA,CAAK,OAAQ,CAAA,MAAA,GAAS,CAAC,CAAA,CAAA;AACtD,MAAG,GAAA;AAED,QAAI,IAAA,GAAA,KAAQ,KAAK,UAAY,EAAA;AAG3B,UAAI,IAAA,IAAA,KAAS,KAAK,UAAY,EAAA;AAE5B,YAAA,KAAA,IAAS,IAAK,CAAA,UAAA,CAAA;AACd,YAAU,MAAA,IAAA,CAAA,CAAA;AACV,YAAM,GAAA,GAAA,IAAA,CAAK,QAAQ,MAAM,CAAA,CAAA;AACzB,YAAO,IAAA,GAAA,IAAA,CAAK,OAAQ,CAAA,MAAA,GAAS,CAAC,CAAA,CAAA;AAG9B,YAAU,MAAA,IAAA,CAAA,CAAA;AACV,YAAA,SAAA;AAAA,WACF;AAIA,UAAA,IAAI,IAAS,KAAA,KAAA,CAAA,IAAa,IAAK,CAAA,MAAA,KAAW,KAAO,EAAA;AAC/C,YAAO,OAAA,IAAA,CAAA;AAAA,WACT;AAIA,UAAA,MAAA,EAAA,CAAA;AACA,UAAA,IAAA,CAAK,OAAU,GAAA,IAAA,CAAK,OAAQ,CAAA,KAAA,CAAM,MAAM,CAAA,CAAA;AACxC,UAAA,MAAM,KAAkB,GAAA,EAAE,GAAG,IAAA,CAAK,OAAQ,EAAA,CAAA;AAC1C,UAAA,IAAA,CAAK,QAAQ,MAAU,IAAA,MAAA,CAAA;AACvB,UAAA,IAAA,CAAK,QAAQ,MAAU,IAAA,MAAA,CAAA;AACvB,UAAA,IAAA,CAAK,QAAQ,IAAQ,IAAA,IAAA,CAAA;AACrB,UAAO,OAAA;AAAA,YACL,IAAM,EAAA,KAAA;AAAA,YACN,KAAA;AAAA,YACA,QAAU,EAAA;AAAA,cACR,KAAA;AAAA,cACA,GAAK,EAAA,EAAE,GAAG,IAAA,CAAK,OAAQ,EAAA;AAAA,cACvB,WAAW,IAAK,CAAA,UAAA;AAAA,aAClB;AAAA,WACF,CAAA;AAAA,SACF;AAGA,QAAS,KAAA,IAAA,GAAA,CAAA;AAGT,QAAA,IAAI,QAAQ,EAAI,EAAA;AAGd,UAAA,IAAA,EAAA,CAAA;AACA,UAAS,MAAA,GAAA,CAAA,CAAA;AAAA,SACJ,MAAA;AAEL,UAAA,MAAA,EAAA,CAAA;AAAA,SACF;AAEA,QAAA,MAAA,EAAA,CAAA;AACA,QAAM,GAAA,GAAA,IAAA,CAAA;AACN,QAAO,IAAA,GAAA,IAAA,CAAK,OAAQ,CAAA,MAAA,GAAS,CAAC,CAAA,CAAA;AAAA,eACvB,GAAQ,KAAA,KAAA,CAAA,EAAA;AAEjB,MAAA,IAAI,KAAK,MAAQ,EAAA;AACf,QAAM,MAAA,IAAI,WAAW,4CAA8C,EAAA;AAAA,UACjE,QAAU,EAAA,EAAE,GAAG,IAAA,CAAK,OAAQ,EAAA;AAAA,SAC7B,CAAA,CAAA;AAAA,OACH;AACA,MAAO,OAAA,IAAA,CAAA;AAAA,KACT;AAGA,IAAA,MAAM,KAAQ,GAAA,IAAA,CAAK,QAAS,CAAA,IAAA,CAAK,KAAK,OAAO,CAAA,CAAA;AAC7C,IAAA,IAAI,KAAO,EAAA;AAGT,MAAI,IAAA,IAAA,CAAK,WAAW,KAAS,IAAA,KAAA,CAAM,CAAC,CAAE,CAAA,MAAA,KAAW,IAAK,CAAA,OAAA,CAAQ,MAAQ,EAAA;AACpE,QAAO,OAAA,IAAA,CAAA;AAAA,OACT;AACA,MAAM,MAAA,KAAA,GAAQ,MAAM,CAAC,CAAA,CAAA;AACrB,MAAA,IAAA,CAAK,OAAU,GAAA,IAAA,CAAK,OAAQ,CAAA,KAAA,CAAM,MAAM,MAAM,CAAA,CAAA;AAC9C,MAAA,MAAM,KAAkB,GAAA,EAAE,GAAG,IAAA,CAAK,OAAQ,EAAA,CAAA;AAC1C,MAAK,IAAA,CAAA,OAAA,CAAQ,UAAU,KAAM,CAAA,MAAA,CAAA;AAC7B,MAAK,IAAA,CAAA,OAAA,CAAQ,UAAU,KAAM,CAAA,MAAA,CAAA;AAC7B,MAAO,OAAA;AAAA,QACL,IAAM,EAAA,KAAA;AAAA,QACN,KAAA;AAAA,QACA,QAAU,EAAA;AAAA,UACR,KAAA;AAAA,UACA,GAAK,EAAA,EAAE,GAAG,IAAA,CAAK,OAAQ,EAAA;AAAA,UACvB,WAAW,IAAK,CAAA,UAAA;AAAA,SAClB;AAAA,OACF,CAAA;AAAA,KACF;AAGA,IAAO,OAAA,IAAA,CAAA;AAAA,GACT;AACF;;;;"}
|
|
@@ -1,18 +1,27 @@
|
|
|
1
1
|
import { Lexer } from './Lexer.js';
|
|
2
2
|
|
|
3
3
|
class LexerTransformer extends TransformStream {
|
|
4
|
+
lexer;
|
|
4
5
|
constructor(options = {}) {
|
|
5
|
-
const lexer = new Lexer(options);
|
|
6
6
|
super({
|
|
7
7
|
transform: (chunk, controller) => {
|
|
8
8
|
if (chunk.length !== 0) {
|
|
9
|
-
|
|
9
|
+
try {
|
|
10
|
+
controller.enqueue([...this.lexer.lex(chunk, true)]);
|
|
11
|
+
} catch (error) {
|
|
12
|
+
controller.error(error);
|
|
13
|
+
}
|
|
10
14
|
}
|
|
11
15
|
},
|
|
12
16
|
flush: (controller) => {
|
|
13
|
-
|
|
17
|
+
try {
|
|
18
|
+
controller.enqueue(this.lexer.flush());
|
|
19
|
+
} catch (error) {
|
|
20
|
+
controller.error(error);
|
|
21
|
+
}
|
|
14
22
|
}
|
|
15
23
|
});
|
|
24
|
+
this.lexer = new Lexer(options);
|
|
16
25
|
}
|
|
17
26
|
}
|
|
18
27
|
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"LexerTransformer.js","sources":["../../src/LexerTransformer.ts"],"sourcesContent":["import { Lexer } from \"./Lexer.ts\";\nimport type { CommonOptions, Token } from \"./common/types.ts\";\n\n/**\n * A transform stream that converts a stream of tokens into a stream of rows.\n *\n * @category Low-level API\n *\n * @example Parse a CSV with headers by data\n * ```ts\n * new ReadableStream({\n * start(controller) {\n * controller.enqueue(\"name,age\\r\\n\");\n * controller.enqueue(\"Alice,20\\r\\n\");\n * controller.close();\n * }\n * })\n * .pipeThrough(new LexerTransformer())\n * .pipeTo(new WritableStream({ write(tokens) {\n * for (const token of tokens) {\n * console.log(token);\n * }\n * }}));\n * // { type: Field, value: \"name\" }\n * // FieldDelimiter\n * // { type: Field, value: \"age\" }\n * // RecordDelimiter\n * // { type: Field, value: \"Alice\" }\n * // FieldDelimiter\n * // { type: Field, value: \"20\" }\n * // RecordDelimiter\n * ```\n */\nexport class LexerTransformer extends TransformStream<string, Token[]> {\n constructor(options: CommonOptions = {}) {\n
|
|
1
|
+
{"version":3,"file":"LexerTransformer.js","sources":["../../src/LexerTransformer.ts"],"sourcesContent":["import { Lexer } from \"./Lexer.ts\";\nimport type { CommonOptions, Token } from \"./common/types.ts\";\n\n/**\n * A transform stream that converts a stream of tokens into a stream of rows.\n *\n * @category Low-level API\n *\n * @example Parse a CSV with headers by data\n * ```ts\n * new ReadableStream({\n * start(controller) {\n * controller.enqueue(\"name,age\\r\\n\");\n * controller.enqueue(\"Alice,20\\r\\n\");\n * controller.close();\n * }\n * })\n * .pipeThrough(new LexerTransformer())\n * .pipeTo(new WritableStream({ write(tokens) {\n * for (const token of tokens) {\n * console.log(token);\n * }\n * }}));\n * // { type: Field, value: \"name\", location: {...} }\n * // { type: FieldDelimiter, value: \",\", location: {...} }\n * // { type: Field, value: \"age\", location: {...} }\n * // { type: RecordDelimiter, value: \"\\r\\n\", location: {...} }\n * // { type: Field, value: \"Alice\", location: {...} }\n * // { type: FieldDelimiter, value: \",\", location: {...} }\n * // { type: Field, value: \"20\" }\n * // { type: RecordDelimiter, value: \"\\r\\n\", location: {...} }\n * ```\n */\nexport class LexerTransformer extends TransformStream<string, Token[]> {\n public readonly lexer: Lexer;\n constructor(options: CommonOptions = {}) {\n super({\n transform: (chunk, controller) => {\n if (chunk.length !== 0) {\n try {\n controller.enqueue([...this.lexer.lex(chunk, true)]);\n } catch (error) {\n controller.error(error);\n }\n }\n },\n flush: (controller) => {\n try {\n controller.enqueue(this.lexer.flush());\n } catch (error) {\n controller.error(error);\n }\n },\n });\n this.lexer = new Lexer(options);\n }\n}\n"],"names":[],"mappings":";;AAiCO,MAAM,yBAAyB,eAAiC,CAAA;AAAA,EACrD,KAAA,CAAA;AAAA,EAChB,WAAA,CAAY,OAAyB,GAAA,EAAI,EAAA;AACvC,IAAM,KAAA,CAAA;AAAA,MACJ,SAAA,EAAW,CAAC,KAAA,EAAO,UAAe,KAAA;AAChC,QAAI,IAAA,KAAA,CAAM,WAAW,CAAG,EAAA;AACtB,UAAI,IAAA;AACF,YAAW,UAAA,CAAA,OAAA,CAAQ,CAAC,GAAG,IAAA,CAAK,MAAM,GAAI,CAAA,KAAA,EAAO,IAAI,CAAC,CAAC,CAAA,CAAA;AAAA,mBAC5C,KAAO,EAAA;AACd,YAAA,UAAA,CAAW,MAAM,KAAK,CAAA,CAAA;AAAA,WACxB;AAAA,SACF;AAAA,OACF;AAAA,MACA,KAAA,EAAO,CAAC,UAAe,KAAA;AACrB,QAAI,IAAA;AACF,UAAA,UAAA,CAAW,OAAQ,CAAA,IAAA,CAAK,KAAM,CAAA,KAAA,EAAO,CAAA,CAAA;AAAA,iBAC9B,KAAO,EAAA;AACd,UAAA,UAAA,CAAW,MAAM,KAAK,CAAA,CAAA;AAAA,SACxB;AAAA,OACF;AAAA,KACD,CAAA,CAAA;AACD,IAAK,IAAA,CAAA,KAAA,GAAQ,IAAI,KAAA,CAAM,OAAO,CAAA,CAAA;AAAA,GAChC;AACF;;;;"}
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { RecordDelimiter, FieldDelimiter } from './common/constants.js';
|
|
2
|
+
import { ParseError } from './common/errors.js';
|
|
2
3
|
|
|
3
4
|
class RecordAssembler {
|
|
4
5
|
#fieldIndex = 0;
|
|
@@ -60,10 +61,10 @@ class RecordAssembler {
|
|
|
60
61
|
#setHeader(header) {
|
|
61
62
|
this.#header = header;
|
|
62
63
|
if (this.#header.length === 0) {
|
|
63
|
-
throw new
|
|
64
|
+
throw new ParseError("The header must not be empty.");
|
|
64
65
|
}
|
|
65
66
|
if (new Set(this.#header).size !== this.#header.length) {
|
|
66
|
-
throw new
|
|
67
|
+
throw new ParseError("The header must not contain duplicate fields.");
|
|
67
68
|
}
|
|
68
69
|
}
|
|
69
70
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"RecordAssembler.js","sources":["../../src/RecordAssembler.ts"],"sourcesContent":["import { FieldDelimiter, RecordDelimiter } from \"./common/constants.ts\";\nimport type {\n CSVRecord,\n RecordAssemblerOptions,\n Token,\n} from \"./common/types.ts\";\n\nexport class RecordAssembler<Header extends ReadonlyArray<string>> {\n #fieldIndex = 0;\n #row: string[] = [];\n #header: Header | undefined;\n #dirty = false;\n\n constructor(options: RecordAssemblerOptions<Header> = {}) {\n if (options.header !== undefined && Array.isArray(options.header)) {\n this.#setHeader(options.header);\n }\n }\n\n public *assemble(\n tokens: Iterable<Token>,\n flush = true,\n ): IterableIterator<CSVRecord<Header>> {\n for (const token of tokens) {\n switch (token.type) {\n case FieldDelimiter:\n this.#fieldIndex++;\n this.#dirty = true;\n break;\n case RecordDelimiter:\n if (this.#header === undefined) {\n this.#setHeader(this.#row as unknown as Header);\n } else {\n if (this.#dirty) {\n yield Object.fromEntries(\n this.#header.map((header, index) => [\n header,\n this.#row.at(index),\n ]),\n ) as unknown as CSVRecord<Header>;\n } else {\n yield Object.fromEntries(\n this.#header.map((header) => [header, \"\"]),\n ) as CSVRecord<Header>;\n }\n }\n // Reset the row fields buffer.\n this.#fieldIndex = 0;\n this.#row = new Array(this.#header?.length).fill(\"\");\n this.#dirty = false;\n break;\n default:\n this.#dirty = true;\n this.#row[this.#fieldIndex] = token.value;\n break;\n }\n }\n\n if (flush) {\n yield* this.flush();\n }\n }\n\n public *flush(): Generator<CSVRecord<Header>> {\n if (this.#header !== undefined) {\n if (this.#dirty) {\n yield Object.fromEntries(\n this.#header\n .filter((v) => v)\n .map((header, index) => [header, this.#row.at(index)]),\n ) as unknown as CSVRecord<Header>;\n }\n }\n }\n\n #setHeader(header: Header) {\n this.#header = header;\n if (this.#header.length === 0) {\n throw new
|
|
1
|
+
{"version":3,"file":"RecordAssembler.js","sources":["../../src/RecordAssembler.ts"],"sourcesContent":["import { FieldDelimiter, RecordDelimiter } from \"./common/constants.ts\";\nimport { ParseError } from \"./common/errors.ts\";\nimport type {\n CSVRecord,\n RecordAssemblerOptions,\n Token,\n} from \"./common/types.ts\";\n\nexport class RecordAssembler<Header extends ReadonlyArray<string>> {\n #fieldIndex = 0;\n #row: string[] = [];\n #header: Header | undefined;\n #dirty = false;\n\n constructor(options: RecordAssemblerOptions<Header> = {}) {\n if (options.header !== undefined && Array.isArray(options.header)) {\n this.#setHeader(options.header);\n }\n }\n\n public *assemble(\n tokens: Iterable<Token>,\n flush = true,\n ): IterableIterator<CSVRecord<Header>> {\n for (const token of tokens) {\n switch (token.type) {\n case FieldDelimiter:\n this.#fieldIndex++;\n this.#dirty = true;\n break;\n case RecordDelimiter:\n if (this.#header === undefined) {\n this.#setHeader(this.#row as unknown as Header);\n } else {\n if (this.#dirty) {\n yield Object.fromEntries(\n this.#header.map((header, index) => [\n header,\n this.#row.at(index),\n ]),\n ) as unknown as CSVRecord<Header>;\n } else {\n yield Object.fromEntries(\n this.#header.map((header) => [header, \"\"]),\n ) as CSVRecord<Header>;\n }\n }\n // Reset the row fields buffer.\n this.#fieldIndex = 0;\n this.#row = new Array(this.#header?.length).fill(\"\");\n this.#dirty = false;\n break;\n default:\n this.#dirty = true;\n this.#row[this.#fieldIndex] = token.value;\n break;\n }\n }\n\n if (flush) {\n yield* this.flush();\n }\n }\n\n public *flush(): Generator<CSVRecord<Header>> {\n if (this.#header !== undefined) {\n if (this.#dirty) {\n yield Object.fromEntries(\n this.#header\n .filter((v) => v)\n .map((header, index) => [header, this.#row.at(index)]),\n ) as unknown as CSVRecord<Header>;\n }\n }\n }\n\n #setHeader(header: Header) {\n this.#header = header;\n if (this.#header.length === 0) {\n throw new ParseError(\"The header must not be empty.\");\n }\n if (new Set(this.#header).size !== this.#header.length) {\n throw new ParseError(\"The header must not contain duplicate fields.\");\n }\n }\n}\n"],"names":[],"mappings":";;;AAQO,MAAM,eAAsD,CAAA;AAAA,EACjE,WAAc,GAAA,CAAA,CAAA;AAAA,EACd,OAAiB,EAAC,CAAA;AAAA,EAClB,OAAA,CAAA;AAAA,EACA,MAAS,GAAA,KAAA,CAAA;AAAA,EAET,WAAA,CAAY,OAA0C,GAAA,EAAI,EAAA;AACxD,IAAA,IAAI,QAAQ,MAAW,KAAA,KAAA,CAAA,IAAa,MAAM,OAAQ,CAAA,OAAA,CAAQ,MAAM,CAAG,EAAA;AACjE,MAAK,IAAA,CAAA,UAAA,CAAW,QAAQ,MAAM,CAAA,CAAA;AAAA,KAChC;AAAA,GACF;AAAA,EAEA,CAAQ,QAAA,CACN,MACA,EAAA,KAAA,GAAQ,IAC6B,EAAA;AACrC,IAAA,KAAA,MAAW,SAAS,MAAQ,EAAA;AAC1B,MAAA,QAAQ,MAAM,IAAM;AAAA,QAClB,KAAK,cAAA;AACH,UAAK,IAAA,CAAA,WAAA,EAAA,CAAA;AACL,UAAA,IAAA,CAAK,MAAS,GAAA,IAAA,CAAA;AACd,UAAA,MAAA;AAAA,QACF,KAAK,eAAA;AACH,UAAI,IAAA,IAAA,CAAK,YAAY,KAAW,CAAA,EAAA;AAC9B,YAAK,IAAA,CAAA,UAAA,CAAW,KAAK,IAAyB,CAAA,CAAA;AAAA,WACzC,MAAA;AACL,YAAA,IAAI,KAAK,MAAQ,EAAA;AACf,cAAA,MAAM,MAAO,CAAA,WAAA;AAAA,gBACX,IAAK,CAAA,OAAA,CAAQ,GAAI,CAAA,CAAC,QAAQ,KAAU,KAAA;AAAA,kBAClC,MAAA;AAAA,kBACA,IAAA,CAAK,IAAK,CAAA,EAAA,CAAG,KAAK,CAAA;AAAA,iBACnB,CAAA;AAAA,eACH,CAAA;AAAA,aACK,MAAA;AACL,cAAA,MAAM,MAAO,CAAA,WAAA;AAAA,gBACX,IAAA,CAAK,QAAQ,GAAI,CAAA,CAAC,WAAW,CAAC,MAAA,EAAQ,EAAE,CAAC,CAAA;AAAA,eAC3C,CAAA;AAAA,aACF;AAAA,WACF;AAEA,UAAA,IAAA,CAAK,WAAc,GAAA,CAAA,CAAA;AACnB,UAAK,IAAA,CAAA,IAAA,GAAO,IAAI,KAAM,CAAA,IAAA,CAAK,SAAS,MAAM,CAAA,CAAE,KAAK,EAAE,CAAA,CAAA;AACnD,UAAA,IAAA,CAAK,MAAS,GAAA,KAAA,CAAA;AACd,UAAA,MAAA;AAAA,QACF;AACE,UAAA,IAAA,CAAK,MAAS,GAAA,IAAA,CAAA;AACd,UAAA,IAAA,CAAK,IAAK,CAAA,IAAA,CAAK,WAAW,CAAA,GAAI,KAAM,CAAA,KAAA,CAAA;AACpC,UAAA,MAAA;AAAA,OACJ;AAAA,KACF;AAEA,IAAA,IAAI,KAAO,EAAA;AACT,MAAA,OAAO,KAAK,KAAM,EAAA,CAAA;AAAA,KACpB;AAAA,GACF;AAAA,EAEA,CAAQ,KAAsC,GAAA;AAC5C,IAAI,IAAA,IAAA,CAAK,YAAY,KAAW,CAAA,EAAA;AAC9B,MAAA,IAAI,KAAK,MAAQ,EAAA;AACf,QAAA,MAAM,MAAO,CAAA,WAAA;AAAA,UACX,KAAK,OACF,CAAA,MAAA,CAAO,CAAC,CAAM,KAAA,CAAC,EACf,GAAI,CAAA,CAAC,MAAQ,EAAA,KAAA,KAAU,CAAC,MAAQ,EAAA,IAAA,CAAK,KAAK,EAAG,CAAA,KAAK,CAAC,CAAC,CAAA;AAAA,SACzD,CAAA;AAAA,OACF;AAAA,KACF;AAAA,GACF;AAAA,EAEA,WAAW,MAAgB,EAAA;AACzB,IAAA,IAAA,CAAK,OAAU,GAAA,MAAA,CAAA;AACf,IAAI,IAAA,IAAA,CAAK,OAAQ,CAAA,MAAA,KAAW,CAAG,EAAA;AAC7B,MAAM,MAAA,IAAI,WAAW,+BAA+B,CAAA,CAAA;AAAA,KACtD;AACA,IAAI,IAAA,IAAI,IAAI,IAAK,CAAA,OAAO,EAAE,IAAS,KAAA,IAAA,CAAK,QAAQ,MAAQ,EAAA;AACtD,MAAM,MAAA,IAAI,WAAW,+CAA+C,CAAA,CAAA;AAAA,KACtE;AAAA,GACF;AACF;;;;"}
|
|
@@ -1,20 +1,29 @@
|
|
|
1
1
|
import { RecordAssembler } from './RecordAssembler.js';
|
|
2
2
|
|
|
3
3
|
class RecordAssemblerTransformer extends TransformStream {
|
|
4
|
+
assembler;
|
|
4
5
|
constructor(options = {}) {
|
|
5
|
-
const assembler = new RecordAssembler(options);
|
|
6
6
|
super({
|
|
7
7
|
transform: (tokens, controller) => {
|
|
8
|
-
|
|
9
|
-
|
|
8
|
+
try {
|
|
9
|
+
for (const token of this.assembler.assemble(tokens, false)) {
|
|
10
|
+
controller.enqueue(token);
|
|
11
|
+
}
|
|
12
|
+
} catch (error) {
|
|
13
|
+
controller.error(error);
|
|
10
14
|
}
|
|
11
15
|
},
|
|
12
16
|
flush: (controller) => {
|
|
13
|
-
|
|
14
|
-
|
|
17
|
+
try {
|
|
18
|
+
for (const token of this.assembler.flush()) {
|
|
19
|
+
controller.enqueue(token);
|
|
20
|
+
}
|
|
21
|
+
} catch (error) {
|
|
22
|
+
controller.error(error);
|
|
15
23
|
}
|
|
16
24
|
}
|
|
17
25
|
});
|
|
26
|
+
this.assembler = new RecordAssembler(options);
|
|
18
27
|
}
|
|
19
28
|
}
|
|
20
29
|
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"RecordAssemblerTransformer.js","sources":["../../src/RecordAssemblerTransformer.ts"],"sourcesContent":["import { RecordAssembler } from \"./RecordAssembler.ts\";\nimport type {\n CSVRecord,\n RecordAssemblerOptions,\n Token,\n} from \"./common/types.ts\";\n\n/**\n * A transform stream that converts a stream of tokens into a stream of rows.\n * @template Header The type of the header row.\n * @param options The options for the parser.\n *\n * @category Low-level API\n *\n * @example Parse a CSV with headers by data\n * ```ts\n * new ReadableStream({\n * start(controller) {\n * controller.enqueue(\"name,age\\r\\n\");\n * controller.enqueue(\"Alice,20\\r\\n\");\n * controller.enqueue(\"Bob,25\\r\\n\");\n * controller.enqueue(\"Charlie,30\\r\\n\");\n * controller.close();\n * })\n * .pipeThrough(new LexerTransformer())\n * .pipeThrough(new RecordAssemblerTransformer())\n * .pipeTo(new WritableStream({ write(row) { console.log(row); }}));\n * // { name: \"Alice\", age: \"20\" }\n * // { name: \"Bob\", age: \"25\" }\n * // { name: \"Charlie\", age: \"30\" }\n * ```\n *\n * @example Parse a CSV with headers by options\n * ```ts\n * new ReadableStream({\n * start(controller) {\n * controller.enqueue(\"Alice,20\\r\\n\");\n * controller.enqueue(\"Bob,25\\r\\n\");\n * controller.enqueue(\"Charlie,30\\r\\n\");\n * controller.close();\n * }\n * })\n * .pipeThrough(new LexerTransformer())\n * .pipeThrough(new RecordAssemblerTransformer({ header: [\"name\", \"age\"] }))\n * .pipeTo(new WritableStream({ write(row) { console.log(row); }}));\n * // { name: \"Alice\", age: \"20\" }\n * // { name: \"Bob\", age: \"25\" }\n * // { name: \"Charlie\", age: \"30\" }\n * ```\n */\nexport class RecordAssemblerTransformer<\n Header extends ReadonlyArray<string>,\n> extends TransformStream<Token[], CSVRecord<Header>> {\n constructor(options: RecordAssemblerOptions<Header> = {}) {\n
|
|
1
|
+
{"version":3,"file":"RecordAssemblerTransformer.js","sources":["../../src/RecordAssemblerTransformer.ts"],"sourcesContent":["import { RecordAssembler } from \"./RecordAssembler.ts\";\nimport type {\n CSVRecord,\n RecordAssemblerOptions,\n Token,\n} from \"./common/types.ts\";\n\n/**\n * A transform stream that converts a stream of tokens into a stream of rows.\n * @template Header The type of the header row.\n * @param options The options for the parser.\n *\n * @category Low-level API\n *\n * @example Parse a CSV with headers by data\n * ```ts\n * new ReadableStream({\n * start(controller) {\n * controller.enqueue(\"name,age\\r\\n\");\n * controller.enqueue(\"Alice,20\\r\\n\");\n * controller.enqueue(\"Bob,25\\r\\n\");\n * controller.enqueue(\"Charlie,30\\r\\n\");\n * controller.close();\n * })\n * .pipeThrough(new LexerTransformer())\n * .pipeThrough(new RecordAssemblerTransformer())\n * .pipeTo(new WritableStream({ write(row) { console.log(row); }}));\n * // { name: \"Alice\", age: \"20\" }\n * // { name: \"Bob\", age: \"25\" }\n * // { name: \"Charlie\", age: \"30\" }\n * ```\n *\n * @example Parse a CSV with headers by options\n * ```ts\n * new ReadableStream({\n * start(controller) {\n * controller.enqueue(\"Alice,20\\r\\n\");\n * controller.enqueue(\"Bob,25\\r\\n\");\n * controller.enqueue(\"Charlie,30\\r\\n\");\n * controller.close();\n * }\n * })\n * .pipeThrough(new LexerTransformer())\n * .pipeThrough(new RecordAssemblerTransformer({ header: [\"name\", \"age\"] }))\n * .pipeTo(new WritableStream({ write(row) { console.log(row); }}));\n * // { name: \"Alice\", age: \"20\" }\n * // { name: \"Bob\", age: \"25\" }\n * // { name: \"Charlie\", age: \"30\" }\n * ```\n */\nexport class RecordAssemblerTransformer<\n Header extends ReadonlyArray<string>,\n> extends TransformStream<Token[], CSVRecord<Header>> {\n public readonly assembler: RecordAssembler<Header>;\n\n constructor(options: RecordAssemblerOptions<Header> = {}) {\n super({\n transform: (tokens, controller) => {\n try {\n for (const token of this.assembler.assemble(tokens, false)) {\n controller.enqueue(token);\n }\n } catch (error) {\n controller.error(error);\n }\n },\n flush: (controller) => {\n try {\n for (const token of this.assembler.flush()) {\n controller.enqueue(token);\n }\n } catch (error) {\n controller.error(error);\n }\n },\n });\n this.assembler = new RecordAssembler(options);\n }\n}\n"],"names":[],"mappings":";;AAkDO,MAAM,mCAEH,eAA4C,CAAA;AAAA,EACpC,SAAA,CAAA;AAAA,EAEhB,WAAA,CAAY,OAA0C,GAAA,EAAI,EAAA;AACxD,IAAM,KAAA,CAAA;AAAA,MACJ,SAAA,EAAW,CAAC,MAAA,EAAQ,UAAe,KAAA;AACjC,QAAI,IAAA;AACF,UAAA,KAAA,MAAW,SAAS,IAAK,CAAA,SAAA,CAAU,QAAS,CAAA,MAAA,EAAQ,KAAK,CAAG,EAAA;AAC1D,YAAA,UAAA,CAAW,QAAQ,KAAK,CAAA,CAAA;AAAA,WAC1B;AAAA,iBACO,KAAO,EAAA;AACd,UAAA,UAAA,CAAW,MAAM,KAAK,CAAA,CAAA;AAAA,SACxB;AAAA,OACF;AAAA,MACA,KAAA,EAAO,CAAC,UAAe,KAAA;AACrB,QAAI,IAAA;AACF,UAAA,KAAA,MAAW,KAAS,IAAA,IAAA,CAAK,SAAU,CAAA,KAAA,EAAS,EAAA;AAC1C,YAAA,UAAA,CAAW,QAAQ,KAAK,CAAA,CAAA;AAAA,WAC1B;AAAA,iBACO,KAAO,EAAA;AACd,UAAA,UAAA,CAAW,MAAM,KAAK,CAAA,CAAA;AAAA,SACxB;AAAA,OACF;AAAA,KACD,CAAA,CAAA;AACD,IAAK,IAAA,CAAA,SAAA,GAAY,IAAI,eAAA,CAAgB,OAAO,CAAA,CAAA;AAAA,GAC9C;AACF;;;;"}
|