web-csv-toolbox 0.0.0-next-20240116142901 → 0.0.0-next-20240116161004

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (167) hide show
  1. package/dist/cjs/Lexer.js +2 -0
  2. package/dist/cjs/Lexer.js.map +1 -0
  3. package/dist/cjs/LexerTransformer.js +2 -0
  4. package/dist/cjs/LexerTransformer.js.map +1 -0
  5. package/dist/cjs/RecordAssembler.js +2 -0
  6. package/dist/cjs/RecordAssembler.js.map +1 -0
  7. package/dist/cjs/RecordAssemblerTransformer.js +2 -0
  8. package/dist/cjs/RecordAssemblerTransformer.js.map +1 -0
  9. package/dist/cjs/assertCommonOptions.js +2 -0
  10. package/dist/cjs/assertCommonOptions.js.map +1 -0
  11. package/dist/cjs/common/constants.js +2 -0
  12. package/dist/cjs/common/constants.js.map +1 -0
  13. package/dist/cjs/constants.js +2 -0
  14. package/dist/cjs/constants.js.map +1 -0
  15. package/dist/cjs/convertBinaryToString.js +2 -0
  16. package/dist/cjs/convertBinaryToString.js.map +1 -0
  17. package/dist/cjs/getOptionsFromResponse.js +2 -0
  18. package/dist/cjs/getOptionsFromResponse.js.map +1 -0
  19. package/dist/cjs/parse.js +2 -0
  20. package/dist/cjs/parse.js.map +1 -0
  21. package/dist/cjs/parseBinary.js +2 -0
  22. package/dist/cjs/parseBinary.js.map +1 -0
  23. package/dist/cjs/parseBinaryToArraySync.js +2 -0
  24. package/dist/cjs/parseBinaryToArraySync.js.map +1 -0
  25. package/dist/cjs/parseBinaryToIterableIterator.js +2 -0
  26. package/dist/cjs/parseBinaryToIterableIterator.js.map +1 -0
  27. package/dist/cjs/parseBinaryToStream.js +2 -0
  28. package/dist/cjs/parseBinaryToStream.js.map +1 -0
  29. package/dist/cjs/parseResponse.js +2 -0
  30. package/dist/cjs/parseResponse.js.map +1 -0
  31. package/dist/cjs/parseResponseToStream.js +2 -0
  32. package/dist/cjs/parseResponseToStream.js.map +1 -0
  33. package/dist/cjs/parseString.js +2 -0
  34. package/dist/cjs/parseString.js.map +1 -0
  35. package/dist/cjs/parseStringStream.js +2 -0
  36. package/dist/cjs/parseStringStream.js.map +1 -0
  37. package/dist/cjs/parseStringStreamToStream.js +2 -0
  38. package/dist/cjs/parseStringStreamToStream.js.map +1 -0
  39. package/dist/cjs/parseStringToArraySync.js +2 -0
  40. package/dist/cjs/parseStringToArraySync.js.map +1 -0
  41. package/dist/cjs/parseStringToIterableIterator.js +2 -0
  42. package/dist/cjs/parseStringToIterableIterator.js.map +1 -0
  43. package/dist/cjs/parseStringToStream.js +2 -0
  44. package/dist/cjs/parseStringToStream.js.map +1 -0
  45. package/dist/cjs/parseUint8ArrayStream.js +2 -0
  46. package/dist/cjs/parseUint8ArrayStream.js.map +1 -0
  47. package/dist/cjs/parseUint8ArrayStreamToStream.js +2 -0
  48. package/dist/cjs/parseUint8ArrayStreamToStream.js.map +1 -0
  49. package/dist/cjs/utils/convertIterableIteratorToAsync.js +2 -0
  50. package/dist/cjs/utils/convertIterableIteratorToAsync.js.map +1 -0
  51. package/dist/cjs/utils/convertStreamToAsyncIterableIterator.js +2 -0
  52. package/dist/cjs/utils/convertStreamToAsyncIterableIterator.js.map +1 -0
  53. package/dist/cjs/utils/convertThisAsyncIterableIteratorToArray.js +2 -0
  54. package/dist/cjs/utils/convertThisAsyncIterableIteratorToArray.js.map +1 -0
  55. package/dist/cjs/utils/escapeRegExp.js +2 -0
  56. package/dist/cjs/utils/escapeRegExp.js.map +1 -0
  57. package/dist/cjs/utils/parseMime.js +2 -0
  58. package/dist/cjs/utils/parseMime.js.map +1 -0
  59. package/dist/cjs/utils/pipeline.js +2 -0
  60. package/dist/cjs/utils/pipeline.js.map +1 -0
  61. package/dist/cjs/web-csv-toolbox.js +2 -0
  62. package/dist/cjs/web-csv-toolbox.js.map +1 -0
  63. package/dist/es/Lexer.js +151 -0
  64. package/dist/es/Lexer.js.map +1 -0
  65. package/dist/es/LexerTransformer.js +20 -0
  66. package/dist/es/LexerTransformer.js.map +1 -0
  67. package/dist/es/RecordAssembler.js +72 -0
  68. package/dist/es/RecordAssembler.js.map +1 -0
  69. package/dist/es/RecordAssemblerTransformer.js +22 -0
  70. package/dist/es/RecordAssemblerTransformer.js.map +1 -0
  71. package/dist/es/assertCommonOptions.js +24 -0
  72. package/dist/es/assertCommonOptions.js.map +1 -0
  73. package/dist/es/common/constants.js +6 -0
  74. package/dist/es/common/constants.js.map +1 -0
  75. package/dist/es/constants.js +8 -0
  76. package/dist/es/constants.js.map +1 -0
  77. package/dist/es/convertBinaryToString.js +9 -0
  78. package/dist/es/convertBinaryToString.js.map +1 -0
  79. package/dist/es/getOptionsFromResponse.js +20 -0
  80. package/dist/es/getOptionsFromResponse.js.map +1 -0
  81. package/dist/es/parse.js +39 -0
  82. package/dist/es/parse.js.map +1 -0
  83. package/dist/es/parseBinary.js +35 -0
  84. package/dist/es/parseBinary.js.map +1 -0
  85. package/dist/es/parseBinaryToArraySync.js +10 -0
  86. package/dist/es/parseBinaryToArraySync.js.map +1 -0
  87. package/dist/es/parseBinaryToIterableIterator.js +10 -0
  88. package/dist/es/parseBinaryToIterableIterator.js.map +1 -0
  89. package/dist/es/parseBinaryToStream.js +10 -0
  90. package/dist/es/parseBinaryToStream.js.map +1 -0
  91. package/dist/es/parseResponse.js +27 -0
  92. package/dist/es/parseResponse.js.map +1 -0
  93. package/dist/es/parseResponseToStream.js +13 -0
  94. package/dist/es/parseResponseToStream.js.map +1 -0
  95. package/dist/es/parseString.js +33 -0
  96. package/dist/es/parseString.js.map +1 -0
  97. package/dist/es/parseStringStream.js +23 -0
  98. package/dist/es/parseStringStream.js.map +1 -0
  99. package/dist/es/parseStringStreamToStream.js +14 -0
  100. package/dist/es/parseStringStreamToStream.js.map +1 -0
  101. package/dist/es/parseStringToArraySync.js +12 -0
  102. package/dist/es/parseStringToArraySync.js.map +1 -0
  103. package/dist/es/parseStringToIterableIterator.js +12 -0
  104. package/dist/es/parseStringToIterableIterator.js.map +1 -0
  105. package/dist/es/parseStringToStream.js +19 -0
  106. package/dist/es/parseStringToStream.js.map +1 -0
  107. package/dist/es/parseUint8ArrayStream.js +23 -0
  108. package/dist/es/parseUint8ArrayStream.js.map +1 -0
  109. package/dist/es/parseUint8ArrayStreamToStream.js +22 -0
  110. package/dist/es/parseUint8ArrayStreamToStream.js.map +1 -0
  111. package/dist/es/utils/convertIterableIteratorToAsync.js +14 -0
  112. package/dist/es/utils/convertIterableIteratorToAsync.js.map +1 -0
  113. package/dist/es/utils/convertStreamToAsyncIterableIterator.js +12 -0
  114. package/dist/es/utils/convertStreamToAsyncIterableIterator.js.map +1 -0
  115. package/dist/es/utils/convertThisAsyncIterableIteratorToArray.js +10 -0
  116. package/dist/es/utils/convertThisAsyncIterableIteratorToArray.js.map +1 -0
  117. package/dist/es/utils/escapeRegExp.js +6 -0
  118. package/dist/es/utils/escapeRegExp.js.map +1 -0
  119. package/dist/es/utils/parseMime.js +15 -0
  120. package/dist/es/utils/parseMime.js.map +1 -0
  121. package/dist/es/utils/pipeline.js +18 -0
  122. package/dist/es/utils/pipeline.js.map +1 -0
  123. package/dist/es/web-csv-toolbox.js +10 -0
  124. package/dist/es/web-csv-toolbox.js.map +1 -0
  125. package/dist/types/Lexer.d.ts +7 -0
  126. package/dist/types/LexerTransformer.d.ts +34 -0
  127. package/dist/types/RecordAssembler.d.ts +7 -0
  128. package/dist/types/RecordAssemblerTransformer.d.ts +47 -0
  129. package/dist/types/assertCommonOptions.d.ts +7 -0
  130. package/dist/types/common/constants.d.ts +15 -0
  131. package/dist/types/common/types.d.ts +163 -0
  132. package/dist/types/constants.d.ts +11 -0
  133. package/dist/types/convertBinaryToString.d.ts +2 -0
  134. package/dist/types/escapeField.d.ts +13 -0
  135. package/dist/types/getOptionsFromResponse.d.ts +2 -0
  136. package/dist/types/parse.d.ts +182 -0
  137. package/dist/types/parseBinary.d.ts +113 -0
  138. package/dist/types/parseBinaryToArraySync.d.ts +2 -0
  139. package/dist/types/parseBinaryToIterableIterator.d.ts +2 -0
  140. package/dist/types/parseBinaryToStream.d.ts +2 -0
  141. package/dist/types/parseResponse.d.ts +80 -0
  142. package/dist/types/parseResponseToStream.d.ts +2 -0
  143. package/dist/types/parseString.d.ts +120 -0
  144. package/dist/types/parseStringStream.d.ts +98 -0
  145. package/dist/types/parseStringStreamToStream.d.ts +2 -0
  146. package/dist/types/parseStringToArraySync.d.ts +2 -0
  147. package/dist/types/parseStringToIterableIterator.d.ts +2 -0
  148. package/dist/types/parseStringToStream.d.ts +2 -0
  149. package/dist/types/parseUint8ArrayStream.d.ts +94 -0
  150. package/dist/types/parseUint8ArrayStreamToStream.d.ts +2 -0
  151. package/dist/types/utils/SingleValueReadableStream.d.ts +3 -0
  152. package/dist/types/utils/convertIterableIteratorToAsync.d.ts +1 -0
  153. package/dist/types/utils/convertStreamToAsyncIterableIterator.d.ts +1 -0
  154. package/dist/types/utils/convertThisAsyncIterableIteratorToArray.d.ts +1 -0
  155. package/dist/types/utils/escapeRegExp.d.ts +8 -0
  156. package/dist/types/utils/occurrences.d.ts +7 -0
  157. package/dist/types/utils/parseMime.d.ts +7 -0
  158. package/dist/types/utils/pipeline.d.ts +3 -0
  159. package/dist/types/web-csv-toolbox.d.ts +10 -0
  160. package/dist/web-csv-toolbox.umd.cjs +1 -1
  161. package/dist/web-csv-toolbox.umd.cjs.map +1 -1
  162. package/package.json +31 -10
  163. package/dist/web-csv-toolbox.cjs +0 -2
  164. package/dist/web-csv-toolbox.cjs.map +0 -1
  165. package/dist/web-csv-toolbox.d.ts +0 -982
  166. package/dist/web-csv-toolbox.js +0 -576
  167. package/dist/web-csv-toolbox.js.map +0 -1
@@ -0,0 +1 @@
1
+ {"version":3,"file":"parseStringToArraySync.js","sources":["../../src/parseStringToArraySync.ts"],"sourcesContent":["import { Lexer } from \"./Lexer.ts\";\nimport { RecordAssembler } from \"./RecordAssembler.ts\";\nimport { CSVRecord, ParseOptions } from \"./common/types.ts\";\n\nexport function parseStringToArraySync<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n): CSVRecord<Header>[] {\n const lexer = new Lexer(options);\n const assembler = new RecordAssembler(options);\n const tokens = lexer.lex(csv);\n return [...assembler.assemble(tokens)];\n}\n"],"names":[],"mappings":";;;AAIgB,SAAA,sBAAA,CACd,KACA,OACqB,EAAA;AACrB,EAAM,MAAA,KAAA,GAAQ,IAAI,KAAA,CAAM,OAAO,CAAA,CAAA;AAC/B,EAAM,MAAA,SAAA,GAAY,IAAI,eAAA,CAAgB,OAAO,CAAA,CAAA;AAC7C,EAAM,MAAA,MAAA,GAAS,KAAM,CAAA,GAAA,CAAI,GAAG,CAAA,CAAA;AAC5B,EAAA,OAAO,CAAC,GAAG,SAAU,CAAA,QAAA,CAAS,MAAM,CAAC,CAAA,CAAA;AACvC;;;;"}
@@ -0,0 +1,12 @@
1
+ import { Lexer } from './Lexer.js';
2
+ import { RecordAssembler } from './RecordAssembler.js';
3
+
4
+ function parseStringToIterableIterator(csv, options) {
5
+ const lexer = new Lexer(options);
6
+ const assembler = new RecordAssembler(options);
7
+ const tokens = lexer.lex(csv);
8
+ return assembler.assemble(tokens);
9
+ }
10
+
11
+ export { parseStringToIterableIterator };
12
+ //# sourceMappingURL=parseStringToIterableIterator.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"parseStringToIterableIterator.js","sources":["../../src/parseStringToIterableIterator.ts"],"sourcesContent":["import { Lexer } from \"./Lexer.ts\";\nimport { RecordAssembler } from \"./RecordAssembler.ts\";\nimport { CSVRecord, ParseOptions } from \"./common/types.ts\";\n\nexport function parseStringToIterableIterator<\n Header extends ReadonlyArray<string>,\n>(\n csv: string,\n options?: ParseOptions<Header>,\n): IterableIterator<CSVRecord<Header>> {\n const lexer = new Lexer(options);\n const assembler = new RecordAssembler(options);\n const tokens = lexer.lex(csv);\n return assembler.assemble(tokens);\n}\n"],"names":[],"mappings":";;;AAIgB,SAAA,6BAAA,CAGd,KACA,OACqC,EAAA;AACrC,EAAM,MAAA,KAAA,GAAQ,IAAI,KAAA,CAAM,OAAO,CAAA,CAAA;AAC/B,EAAM,MAAA,SAAA,GAAY,IAAI,eAAA,CAAgB,OAAO,CAAA,CAAA;AAC7C,EAAM,MAAA,MAAA,GAAS,KAAM,CAAA,GAAA,CAAI,GAAG,CAAA,CAAA;AAC5B,EAAO,OAAA,SAAA,CAAU,SAAS,MAAM,CAAA,CAAA;AAClC;;;;"}
@@ -0,0 +1,19 @@
1
+ import { Lexer } from './Lexer.js';
2
+ import { RecordAssembler } from './RecordAssembler.js';
3
+
4
+ function parseStringToStream(csv, options) {
5
+ const lexer = new Lexer(options);
6
+ const assembler = new RecordAssembler(options);
7
+ return new ReadableStream({
8
+ start(controller) {
9
+ const tokens = lexer.lex(csv);
10
+ for (const record of assembler.assemble(tokens)) {
11
+ controller.enqueue(record);
12
+ }
13
+ controller.close();
14
+ }
15
+ });
16
+ }
17
+
18
+ export { parseStringToStream };
19
+ //# sourceMappingURL=parseStringToStream.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"parseStringToStream.js","sources":["../../src/parseStringToStream.ts"],"sourcesContent":["import { Lexer } from \"./Lexer.ts\";\nimport { RecordAssembler } from \"./RecordAssembler.ts\";\nimport { CSVRecord, ParseOptions } from \"./common/types.ts\";\n\nexport function parseStringToStream<Header extends ReadonlyArray<string>>(\n csv: string,\n options?: ParseOptions<Header>,\n): ReadableStream<CSVRecord<Header>> {\n const lexer = new Lexer(options);\n const assembler = new RecordAssembler(options);\n return new ReadableStream({\n start(controller) {\n const tokens = lexer.lex(csv);\n for (const record of assembler.assemble(tokens)) {\n controller.enqueue(record);\n }\n controller.close();\n },\n });\n}\n"],"names":[],"mappings":";;;AAIgB,SAAA,mBAAA,CACd,KACA,OACmC,EAAA;AACnC,EAAM,MAAA,KAAA,GAAQ,IAAI,KAAA,CAAM,OAAO,CAAA,CAAA;AAC/B,EAAM,MAAA,SAAA,GAAY,IAAI,eAAA,CAAgB,OAAO,CAAA,CAAA;AAC7C,EAAA,OAAO,IAAI,cAAe,CAAA;AAAA,IACxB,MAAM,UAAY,EAAA;AAChB,MAAM,MAAA,MAAA,GAAS,KAAM,CAAA,GAAA,CAAI,GAAG,CAAA,CAAA;AAC5B,MAAA,KAAA,MAAW,MAAU,IAAA,SAAA,CAAU,QAAS,CAAA,MAAM,CAAG,EAAA;AAC/C,QAAA,UAAA,CAAW,QAAQ,MAAM,CAAA,CAAA;AAAA,OAC3B;AACA,MAAA,UAAA,CAAW,KAAM,EAAA,CAAA;AAAA,KACnB;AAAA,GACD,CAAA,CAAA;AACH;;;;"}
@@ -0,0 +1,23 @@
1
+ import { parseUint8ArrayStreamToStream } from './parseUint8ArrayStreamToStream.js';
2
+ import { convertStreamToAsyncIterableIterator } from './utils/convertStreamToAsyncIterableIterator.js';
3
+ import { convertThisAsyncIterableIteratorToArray } from './utils/convertThisAsyncIterableIteratorToArray.js';
4
+
5
+ function parseUint8ArrayStream(stream, options) {
6
+ const recordStream = parseUint8ArrayStreamToStream(stream, options);
7
+ return convertStreamToAsyncIterableIterator(recordStream);
8
+ }
9
+ Object.defineProperties(parseUint8ArrayStream, {
10
+ toArray: {
11
+ enumerable: true,
12
+ writable: false,
13
+ value: convertThisAsyncIterableIteratorToArray
14
+ },
15
+ toStream: {
16
+ enumerable: true,
17
+ writable: false,
18
+ value: parseUint8ArrayStreamToStream
19
+ }
20
+ });
21
+
22
+ export { parseUint8ArrayStream };
23
+ //# sourceMappingURL=parseUint8ArrayStream.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"parseUint8ArrayStream.js","sources":["../../src/parseUint8ArrayStream.ts"],"sourcesContent":["import { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { parseStringStream } from \"./parseStringStream.ts\";\nimport { parseUint8ArrayStreamToStream } from \"./parseUint8ArrayStreamToStream.ts\";\nimport { convertStreamToAsyncIterableIterator } from \"./utils/convertStreamToAsyncIterableIterator.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\n\n/**\n * Parse CSV to records.\n * This function is for parsing a binary stream.\n *\n * @category Middle-level API\n * @remarks\n * If you want to parse a string, use {@link parseStringStream}.\n * @param stream CSV string to parse\n * @param options Parsing options.\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parseUint8ArrayStream.toArray} function.\n *\n * @example Parsing CSV binary\n *\n * ```ts\n * import { parseUint8ArrayStream } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * for await (const record of parseUint8ArrayStream(csv)) {\n * console.log(record);\n * }\n * ```\n */\nexport function parseUint8ArrayStream<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<Uint8Array>,\n options?: ParseBinaryOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n const recordStream = parseUint8ArrayStreamToStream(stream, options);\n return convertStreamToAsyncIterableIterator(recordStream);\n}\n\nexport declare namespace parseUint8ArrayStream {\n /**\n * Parse CSV binary to array of records,\n * ideal for smaller data sets.\n *\n * @returns Array of records\n *\n * @example Parsing CSV binary\n * ```ts\n * import { parseUint8ArrayStream } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * const records = await parseUint8ArrayStream.toArray(stream);\n * console.log(records);\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<Uint8Array>,\n options?: ParseBinaryOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV binary to array of records.\n *\n * @returns Stream of records\n *\n * @example Parsing CSV binary\n * ```ts\n * import { parseUint8ArrayStream } from 'web-csv-toolbox';\n *\n * const csv = Uint8Array.from([\n * // ...\n * ]);\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * },\n * });\n *\n * await parseUint8ArrayStream.toStream(stream)\n * .pipeTo(new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * ```\n */\n export function toStream<Header extends ReadonlyArray<string>>(\n stream: ReadableStream<Uint8Array>,\n options?: ParseBinaryOptions<Header>,\n ): ReadableStream<CSVRecord<Header>[]>;\n}\nObject.defineProperties(parseUint8ArrayStream, {\n toArray: {\n enumerable: true,\n writable: false,\n value: internal.convertThisAsyncIterableIteratorToArray,\n },\n toStream: {\n enumerable: true,\n writable: false,\n value: parseUint8ArrayStreamToStream,\n },\n});\n"],"names":["internal.convertThisAsyncIterableIteratorToArray"],"mappings":";;;;AAwCgB,SAAA,qBAAA,CACd,QACA,OAC0C,EAAA;AAC1C,EAAM,MAAA,YAAA,GAAe,6BAA8B,CAAA,MAAA,EAAQ,OAAO,CAAA,CAAA;AAClE,EAAA,OAAO,qCAAqC,YAAY,CAAA,CAAA;AAC1D,CAAA;AAkEA,MAAA,CAAO,iBAAiB,qBAAuB,EAAA;AAAA,EAC7C,OAAS,EAAA;AAAA,IACP,UAAY,EAAA,IAAA;AAAA,IACZ,QAAU,EAAA,KAAA;AAAA,IACV,OAAOA,uCAAS;AAAA,GAClB;AAAA,EACA,QAAU,EAAA;AAAA,IACR,UAAY,EAAA,IAAA;AAAA,IACZ,QAAU,EAAA,KAAA;AAAA,IACV,KAAO,EAAA,6BAAA;AAAA,GACT;AACF,CAAC,CAAA;;;;"}
@@ -0,0 +1,22 @@
1
+ import { LexerTransformer } from './LexerTransformer.js';
2
+ import { RecordAssemblerTransformer } from './RecordAssemblerTransformer.js';
3
+ import { pipeline } from './utils/pipeline.js';
4
+
5
+ function parseUint8ArrayStreamToStream(stream, options) {
6
+ const { charset, fatal, ignoreBOM, decomposition } = options ?? {};
7
+ return decomposition ? pipeline(
8
+ stream,
9
+ new DecompressionStream(decomposition),
10
+ new TextDecoderStream(charset, { fatal, ignoreBOM }),
11
+ new LexerTransformer(options),
12
+ new RecordAssemblerTransformer(options)
13
+ ) : pipeline(
14
+ stream,
15
+ new TextDecoderStream(charset, { fatal, ignoreBOM }),
16
+ new LexerTransformer(options),
17
+ new RecordAssemblerTransformer(options)
18
+ );
19
+ }
20
+
21
+ export { parseUint8ArrayStreamToStream };
22
+ //# sourceMappingURL=parseUint8ArrayStreamToStream.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"parseUint8ArrayStreamToStream.js","sources":["../../src/parseUint8ArrayStreamToStream.ts"],"sourcesContent":["import { LexerTransformer } from \"./LexerTransformer.ts\";\nimport { RecordAssemblerTransformer } from \"./RecordAssemblerTransformer.ts\";\nimport { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { pipeline } from \"./utils/pipeline.ts\";\n\nexport function parseUint8ArrayStreamToStream<Header extends readonly string[]>(\n stream: ReadableStream<Uint8Array>,\n options?: ParseBinaryOptions<Header>,\n): ReadableStream<CSVRecord<Header>> {\n const { charset, fatal, ignoreBOM, decomposition } = options ?? {};\n return decomposition\n ? pipeline(\n stream,\n new DecompressionStream(decomposition),\n new TextDecoderStream(charset, { fatal, ignoreBOM }),\n new LexerTransformer(options),\n new RecordAssemblerTransformer(options),\n )\n : pipeline(\n stream,\n new TextDecoderStream(charset, { fatal, ignoreBOM }),\n new LexerTransformer(options),\n new RecordAssemblerTransformer(options),\n );\n}\n"],"names":[],"mappings":";;;;AAKgB,SAAA,6BAAA,CACd,QACA,OACmC,EAAA;AACnC,EAAA,MAAM,EAAE,OAAS,EAAA,KAAA,EAAO,WAAW,aAAc,EAAA,GAAI,WAAW,EAAC,CAAA;AACjE,EAAA,OAAO,aACH,GAAA,QAAA;AAAA,IACE,MAAA;AAAA,IACA,IAAI,oBAAoB,aAAa,CAAA;AAAA,IACrC,IAAI,iBAAkB,CAAA,OAAA,EAAS,EAAE,KAAA,EAAO,WAAW,CAAA;AAAA,IACnD,IAAI,iBAAiB,OAAO,CAAA;AAAA,IAC5B,IAAI,2BAA2B,OAAO,CAAA;AAAA,GAExC,GAAA,QAAA;AAAA,IACE,MAAA;AAAA,IACA,IAAI,iBAAkB,CAAA,OAAA,EAAS,EAAE,KAAA,EAAO,WAAW,CAAA;AAAA,IACnD,IAAI,iBAAiB,OAAO,CAAA;AAAA,IAC5B,IAAI,2BAA2B,OAAO,CAAA;AAAA,GACxC,CAAA;AACN;;;;"}
@@ -0,0 +1,14 @@
1
+ function convertIterableIteratorToAsync(iterator) {
2
+ return {
3
+ async next() {
4
+ const result = iterator.next();
5
+ return Promise.resolve(result);
6
+ },
7
+ [Symbol.asyncIterator]() {
8
+ return this;
9
+ }
10
+ };
11
+ }
12
+
13
+ export { convertIterableIteratorToAsync };
14
+ //# sourceMappingURL=convertIterableIteratorToAsync.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"convertIterableIteratorToAsync.js","sources":["../../../src/utils/convertIterableIteratorToAsync.ts"],"sourcesContent":["export function convertIterableIteratorToAsync<T>(\n iterator: IterableIterator<T>,\n): AsyncIterableIterator<T> {\n return {\n async next() {\n const result = iterator.next();\n return Promise.resolve(result);\n },\n [Symbol.asyncIterator]() {\n return this;\n },\n };\n}\n"],"names":[],"mappings":"AAAO,SAAS,+BACd,QAC0B,EAAA;AAC1B,EAAO,OAAA;AAAA,IACL,MAAM,IAAO,GAAA;AACX,MAAM,MAAA,MAAA,GAAS,SAAS,IAAK,EAAA,CAAA;AAC7B,MAAO,OAAA,OAAA,CAAQ,QAAQ,MAAM,CAAA,CAAA;AAAA,KAC/B;AAAA,IACA,CAAC,MAAO,CAAA,aAAa,CAAI,GAAA;AACvB,MAAO,OAAA,IAAA,CAAA;AAAA,KACT;AAAA,GACF,CAAA;AACF;;;;"}
@@ -0,0 +1,12 @@
1
+ async function* convertStreamToAsyncIterableIterator(stream) {
2
+ const reader = stream.getReader();
3
+ while (true) {
4
+ const { done, value } = await reader.read();
5
+ if (done)
6
+ break;
7
+ yield value;
8
+ }
9
+ }
10
+
11
+ export { convertStreamToAsyncIterableIterator };
12
+ //# sourceMappingURL=convertStreamToAsyncIterableIterator.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"convertStreamToAsyncIterableIterator.js","sources":["../../../src/utils/convertStreamToAsyncIterableIterator.ts"],"sourcesContent":["export async function* convertStreamToAsyncIterableIterator<T>(\n stream: ReadableStream<T>,\n): AsyncIterableIterator<T> {\n const reader = stream.getReader();\n while (true) {\n const { done, value } = await reader.read();\n if (done) break;\n yield value;\n }\n}\n"],"names":[],"mappings":"AAAA,gBAAuB,qCACrB,MAC0B,EAAA;AAC1B,EAAM,MAAA,MAAA,GAAS,OAAO,SAAU,EAAA,CAAA;AAChC,EAAA,OAAO,IAAM,EAAA;AACX,IAAA,MAAM,EAAE,IAAM,EAAA,KAAA,EAAU,GAAA,MAAM,OAAO,IAAK,EAAA,CAAA;AAC1C,IAAI,IAAA,IAAA;AAAM,MAAA,MAAA;AACV,IAAM,MAAA,KAAA,CAAA;AAAA,GACR;AACF;;;;"}
@@ -0,0 +1,10 @@
1
+ async function convertThisAsyncIterableIteratorToArray(...args) {
2
+ const rows = [];
3
+ for await (const row of this(...args)) {
4
+ rows.push(row);
5
+ }
6
+ return rows;
7
+ }
8
+
9
+ export { convertThisAsyncIterableIteratorToArray };
10
+ //# sourceMappingURL=convertThisAsyncIterableIteratorToArray.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"convertThisAsyncIterableIteratorToArray.js","sources":["../../../src/utils/convertThisAsyncIterableIteratorToArray.ts"],"sourcesContent":["export async function convertThisAsyncIterableIteratorToArray<\n O,\n T extends (...args: any[]) => AsyncGenerator<O>,\n>(this: T, ...args: Parameters<T>): Promise<O[]> {\n const rows: O[] = [];\n for await (const row of this(...args)) {\n rows.push(row);\n }\n return rows;\n}\n"],"names":[],"mappings":"AAAA,eAAsB,2CAGR,IAAmC,EAAA;AAC/C,EAAA,MAAM,OAAY,EAAC,CAAA;AACnB,EAAA,WAAA,MAAiB,GAAO,IAAA,IAAA,CAAK,GAAG,IAAI,CAAG,EAAA;AACrC,IAAA,IAAA,CAAK,KAAK,GAAG,CAAA,CAAA;AAAA,GACf;AACA,EAAO,OAAA,IAAA,CAAA;AACT;;;;"}
@@ -0,0 +1,6 @@
1
+ function escapeRegExp(v) {
2
+ return v.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
3
+ }
4
+
5
+ export { escapeRegExp };
6
+ //# sourceMappingURL=escapeRegExp.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"escapeRegExp.js","sources":["../../../src/utils/escapeRegExp.ts"],"sourcesContent":["/**\n * Escape a string for use in a regular expression.\n *\n * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_expressions#escaping Regular expressions#Escaping | MDN}\n * @param v string to escape\n * @returns escaped string\n */\nexport function escapeRegExp(v: string) {\n return v.replace(/[.*+?^${}()|[\\]\\\\]/g, \"\\\\$&\");\n}\n"],"names":[],"mappings":"AAOO,SAAS,aAAa,CAAW,EAAA;AACtC,EAAO,OAAA,CAAA,CAAE,OAAQ,CAAA,qBAAA,EAAuB,MAAM,CAAA,CAAA;AAChD;;;;"}
@@ -0,0 +1,15 @@
1
+ function parseMime(contentType) {
2
+ const [type, ...parameters] = contentType.split(";");
3
+ const result = {
4
+ type: type.trim(),
5
+ parameters: {}
6
+ };
7
+ for (const paramator of parameters) {
8
+ const [key, value] = paramator.split("=");
9
+ result.parameters[key.trim()] = value.trim();
10
+ }
11
+ return result;
12
+ }
13
+
14
+ export { parseMime };
15
+ //# sourceMappingURL=parseMime.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"parseMime.js","sources":["../../../src/utils/parseMime.ts"],"sourcesContent":["export interface ParseMimeResult {\n type: string;\n parameters: {\n [key: string]: string;\n };\n}\n\nexport function parseMime(contentType: string) {\n const [type, ...parameters] = contentType.split(\";\");\n const result: ParseMimeResult = {\n type: type.trim(),\n parameters: {},\n };\n for (const paramator of parameters) {\n const [key, value] = paramator.split(\"=\");\n result.parameters[key.trim()] = value.trim();\n }\n return result;\n}\n"],"names":[],"mappings":"AAOO,SAAS,UAAU,WAAqB,EAAA;AAC7C,EAAA,MAAM,CAAC,IAAM,EAAA,GAAG,UAAU,CAAI,GAAA,WAAA,CAAY,MAAM,GAAG,CAAA,CAAA;AACnD,EAAA,MAAM,MAA0B,GAAA;AAAA,IAC9B,IAAA,EAAM,KAAK,IAAK,EAAA;AAAA,IAChB,YAAY,EAAC;AAAA,GACf,CAAA;AACA,EAAA,KAAA,MAAW,aAAa,UAAY,EAAA;AAClC,IAAA,MAAM,CAAC,GAAK,EAAA,KAAK,CAAI,GAAA,SAAA,CAAU,MAAM,GAAG,CAAA,CAAA;AACxC,IAAA,MAAA,CAAO,WAAW,GAAI,CAAA,IAAA,EAAM,CAAA,GAAI,MAAM,IAAK,EAAA,CAAA;AAAA,GAC7C;AACA,EAAO,OAAA,MAAA,CAAA;AACT;;;;"}
@@ -0,0 +1,18 @@
1
+ function pipeline(stream, ...transformers) {
2
+ return new ReadableStream({
3
+ start: (controller) => {
4
+ (() => transformers.reduce(
5
+ (stream2, transformer) => stream2.pipeThrough(transformer),
6
+ stream
7
+ ).pipeTo(
8
+ new WritableStream({
9
+ write: (v) => controller.enqueue(v),
10
+ close: () => controller.close()
11
+ })
12
+ ))();
13
+ }
14
+ });
15
+ }
16
+
17
+ export { pipeline };
18
+ //# sourceMappingURL=pipeline.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"pipeline.js","sources":["../../../src/utils/pipeline.ts"],"sourcesContent":["export function pipeline<I, T, O>(\n stream: ReadableStream<I>,\n transformer1: TransformStream<I, T>,\n transformer2: TransformStream<T, O>,\n): ReadableStream<O>;\nexport function pipeline<I, T1, T2, O>(\n stream: ReadableStream<I>,\n transformer1: TransformStream<I, T1>,\n transformer2: TransformStream<T1, T2>,\n transformer3: TransformStream<T2, O>,\n): ReadableStream<O>;\nexport function pipeline<I, T1, T2, T3, O>(\n stream: ReadableStream<I>,\n transformer1: TransformStream<I, T1>,\n transformer2: TransformStream<T1, T2>,\n transformer3: TransformStream<T2, T3>,\n transformer4: TransformStream<T3, O>,\n): ReadableStream<O>;\nexport function pipeline<I, O>(\n stream: ReadableStream<I>,\n ...transformers: TransformStream[]\n): ReadableStream<O> {\n return new ReadableStream({\n start: (controller) => {\n (() =>\n transformers\n .reduce<ReadableStream>(\n (stream, transformer) => stream.pipeThrough(transformer),\n stream,\n )\n .pipeTo(\n new WritableStream({\n write: (v) => controller.enqueue(v),\n close: () => controller.close(),\n }),\n ))();\n },\n });\n}\n"],"names":["stream"],"mappings":"AAkBgB,SAAA,QAAA,CACd,WACG,YACgB,EAAA;AACnB,EAAA,OAAO,IAAI,cAAe,CAAA;AAAA,IACxB,KAAA,EAAO,CAAC,UAAe,KAAA;AACrB,MAAA,CAAC,MACC,YACG,CAAA,MAAA;AAAA,QACC,CAACA,OAAAA,EAAQ,WAAgBA,KAAAA,OAAAA,CAAO,YAAY,WAAW,CAAA;AAAA,QACvD,MAAA;AAAA,OAED,CAAA,MAAA;AAAA,QACC,IAAI,cAAe,CAAA;AAAA,UACjB,KAAO,EAAA,CAAC,CAAM,KAAA,UAAA,CAAW,QAAQ,CAAC,CAAA;AAAA,UAClC,KAAA,EAAO,MAAM,UAAA,CAAW,KAAM,EAAA;AAAA,SAC/B,CAAA;AAAA,OACA,GAAA,CAAA;AAAA,KACT;AAAA,GACD,CAAA,CAAA;AACH;;;;"}
@@ -0,0 +1,10 @@
1
+ export { Field, FieldDelimiter, RecordDelimiter } from './common/constants.js';
2
+ export { LexerTransformer } from './LexerTransformer.js';
3
+ export { RecordAssemblerTransformer } from './RecordAssemblerTransformer.js';
4
+ export { parseString } from './parseString.js';
5
+ export { parseBinary } from './parseBinary.js';
6
+ export { parseUint8ArrayStream } from './parseUint8ArrayStream.js';
7
+ export { parseStringStream } from './parseStringStream.js';
8
+ export { parseResponse } from './parseResponse.js';
9
+ export { parse } from './parse.js';
10
+ //# sourceMappingURL=web-csv-toolbox.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"web-csv-toolbox.js","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;"}
@@ -0,0 +1,7 @@
1
+ import { CommonOptions, Token } from "./common/types.ts";
2
+ export declare class Lexer {
3
+ #private;
4
+ constructor({ delimiter, quotation, }?: CommonOptions);
5
+ lex(chunk: string | null, buffering?: boolean): IterableIterator<Token>;
6
+ flush(): Token[];
7
+ }
@@ -0,0 +1,34 @@
1
+ import { CommonOptions, Token } from "./common/types.ts";
2
+ /**
3
+ * A transform stream that converts a stream of tokens into a stream of rows.
4
+ *
5
+ * @category Low-level API
6
+ *
7
+ * @example Parse a CSV with headers by data
8
+ * ```ts
9
+ * new ReadableStream({
10
+ * start(controller) {
11
+ * controller.enqueue("name,age\r\n");
12
+ * controller.enqueue("Alice,20\r\n");
13
+ * controller.close();
14
+ * }
15
+ * })
16
+ * .pipeThrough(new LexerTransformer())
17
+ * .pipeTo(new WritableStream({ write(tokens) {
18
+ * for (const token of tokens) {
19
+ * console.log(token);
20
+ * }
21
+ * }}));
22
+ * // { type: Field, value: "name" }
23
+ * // FieldDelimiter
24
+ * // { type: Field, value: "age" }
25
+ * // RecordDelimiter
26
+ * // { type: Field, value: "Alice" }
27
+ * // FieldDelimiter
28
+ * // { type: Field, value: "20" }
29
+ * // RecordDelimiter
30
+ * ```
31
+ */
32
+ export declare class LexerTransformer extends TransformStream<string, Token[]> {
33
+ constructor(options?: CommonOptions);
34
+ }
@@ -0,0 +1,7 @@
1
+ import { CSVRecord, RecordAssemblerOptions, Token } from "./common/types.ts";
2
+ export declare class RecordAssembler<Header extends ReadonlyArray<string>> {
3
+ #private;
4
+ constructor(options?: RecordAssemblerOptions<Header>);
5
+ assemble(tokens: Iterable<Token>, flush?: boolean): IterableIterator<CSVRecord<Header>>;
6
+ flush(): Generator<CSVRecord<Header>>;
7
+ }
@@ -0,0 +1,47 @@
1
+ import { CSVRecord, RecordAssemblerOptions, Token } from "./common/types.ts";
2
+ /**
3
+ * A transform stream that converts a stream of tokens into a stream of rows.
4
+ * @template Header The type of the header row.
5
+ * @param options The options for the parser.
6
+ *
7
+ * @category Low-level API
8
+ *
9
+ * @example Parse a CSV with headers by data
10
+ * ```ts
11
+ * new ReadableStream({
12
+ * start(controller) {
13
+ * controller.enqueue("name,age\r\n");
14
+ * controller.enqueue("Alice,20\r\n");
15
+ * controller.enqueue("Bob,25\r\n");
16
+ * controller.enqueue("Charlie,30\r\n");
17
+ * controller.close();
18
+ * })
19
+ * .pipeThrough(new LexerTransformer())
20
+ * .pipeThrough(new RecordAssemblerTransformer())
21
+ * .pipeTo(new WritableStream({ write(row) { console.log(row); }}));
22
+ * // { name: "Alice", age: "20" }
23
+ * // { name: "Bob", age: "25" }
24
+ * // { name: "Charlie", age: "30" }
25
+ * ```
26
+ *
27
+ * @example Parse a CSV with headers by options
28
+ * ```ts
29
+ * new ReadableStream({
30
+ * start(controller) {
31
+ * controller.enqueue("Alice,20\r\n");
32
+ * controller.enqueue("Bob,25\r\n");
33
+ * controller.enqueue("Charlie,30\r\n");
34
+ * controller.close();
35
+ * }
36
+ * })
37
+ * .pipeThrough(new LexerTransformer())
38
+ * .pipeThrough(new RecordAssemblerTransformer({ header: ["name", "age"] }))
39
+ * .pipeTo(new WritableStream({ write(row) { console.log(row); }}));
40
+ * // { name: "Alice", age: "20" }
41
+ * // { name: "Bob", age: "25" }
42
+ * // { name: "Charlie", age: "30" }
43
+ * ```
44
+ */
45
+ export declare class RecordAssemblerTransformer<Header extends ReadonlyArray<string>> extends TransformStream<Token[], CSVRecord<Header>> {
46
+ constructor(options?: RecordAssemblerOptions<Header>);
47
+ }
@@ -0,0 +1,7 @@
1
+ import { CommonOptions } from "./common/types.ts";
2
+ /**
3
+ * Assert that the options are valid.
4
+ *
5
+ * @param options The options to assert.
6
+ */
7
+ export declare function assertCommonOptions(options: Required<CommonOptions>): void;
@@ -0,0 +1,15 @@
1
+ /**
2
+ * FiledDelimiter is a symbol for field delimiter of CSV.
3
+ * @category Constants
4
+ */
5
+ export declare const FieldDelimiter: unique symbol;
6
+ /**
7
+ * RecordDelimiter is a symbol for record delimiter of CSV.
8
+ * @category Constants
9
+ */
10
+ export declare const RecordDelimiter: unique symbol;
11
+ /**
12
+ * Field is a symbol for field of CSV.
13
+ * @category Constants
14
+ */
15
+ export declare const Field: unique symbol;
@@ -0,0 +1,163 @@
1
+ import { Field, FieldDelimiter, RecordDelimiter } from "./constants.ts";
2
+ /**
3
+ * Field token type.
4
+ * @category Types
5
+ */
6
+ export interface FieldToken {
7
+ type: typeof Field;
8
+ value: string;
9
+ }
10
+ /**
11
+ * Token is a atomic unit of a CSV file.
12
+ * It can be a field, field delimiter, or record delimiter.
13
+ * @category Types
14
+ */
15
+ export type Token = FieldToken | typeof FieldDelimiter | typeof RecordDelimiter;
16
+ /**
17
+ * CSV Common Options.
18
+ * @category Types
19
+ */
20
+ export interface CommonOptions {
21
+ /**
22
+ * CSV field delimiter.
23
+ *
24
+ * @remarks
25
+ * If you want to parse TSV, specify `'\t'`.
26
+ *
27
+ * This library supports multi-character delimiters.
28
+ * @default ','
29
+ */
30
+ delimiter?: string;
31
+ /**
32
+ * CSV field quotation.
33
+ *
34
+ * @remarks
35
+ * This library supports multi-character quotations.
36
+ *
37
+ * @default '"'
38
+ */
39
+ quotation?: string;
40
+ }
41
+ /**
42
+ * CSV Parsing Options for binary.
43
+ * @category Types
44
+ */
45
+ export interface BinaryOptions {
46
+ /**
47
+ * If the binary is compressed by a compression algorithm,
48
+ * the decompressed CSV can be parsed by specifying the algorithm.
49
+ *
50
+ * @remarks
51
+ * Make sure the runtime you are running supports stream decompression.
52
+ *
53
+ * See {@link https://developer.mozilla.org/en-US/docs/Web/API/DecompressionStream#browser_compatibility | DecompressionStream Compatibility}.
54
+ */
55
+ decomposition?: CompressionFormat;
56
+ /**
57
+ * You can specify the character encoding of the binary.
58
+ *
59
+ * @remarks
60
+ * {@link !TextDecoderStream} is used internally.
61
+ *
62
+ * See {@link https://developer.mozilla.org/en-US/docs/Web/API/Encoding_API/Encodings | Encoding API Compatibility}
63
+ * for the encoding formats that can be specified.
64
+ *
65
+ * @default 'utf-8'
66
+ */
67
+ charset?: string;
68
+ /**
69
+ * If the binary has a BOM, you can specify whether to ignore it.
70
+ *
71
+ * @remarks
72
+ * If you specify true, the BOM will be ignored.
73
+ * If you specify false or not specify it, the BOM will be treated as a normal character.
74
+ * See {@link https://developer.mozilla.org/en-US/docs/Web/API/TextDecoderStream/ignoreBOM | TextDecoderOptions.ignoreBOM} for more information about the BOM.
75
+ * @default false
76
+ */
77
+ ignoreBOM?: boolean;
78
+ /**
79
+ * If the binary has a invalid character, you can specify whether to throw an error.
80
+ *
81
+ * @remarks
82
+ * If the property is `true` then a decoder will throw a {@link !TypeError}
83
+ * if it encounters malformed data while decoding.
84
+ *
85
+ * If `false` the decoder will substitute the invalid data
86
+ * with the replacement character `U+FFFD` (�).
87
+ *
88
+ * See {@link https://developer.mozilla.org/en-US/docs/Web/API/TextDecoderStream/fatal | TextDecoderOptions.fatal} for more information.
89
+ *
90
+ * @default false
91
+ */
92
+ fatal?: boolean;
93
+ }
94
+ /**
95
+ * Record Assembler Options for CSV.
96
+ * @category Types
97
+ *
98
+ * @remarks
99
+ * If you specify `header: ['foo', 'bar']`,
100
+ * the first record will be treated as a normal record.
101
+ *
102
+ * If you don't specify `header`,
103
+ * the first record will be treated as a header.
104
+ */
105
+ export interface RecordAssemblerOptions<Header extends ReadonlyArray<string>> {
106
+ /**
107
+ * CSV header.
108
+ *
109
+ * @remarks
110
+ * If you specify this option,
111
+ * the first record will be treated as a normal record.
112
+ *
113
+ * If you don't specify this option,
114
+ * the first record will be treated as a header.
115
+ *
116
+ * @default undefined
117
+ */
118
+ header?: Header;
119
+ }
120
+ /**
121
+ * Parse options for CSV string.
122
+ * @category Types
123
+ */
124
+ export interface ParseOptions<Header extends ReadonlyArray<string>> extends CommonOptions, RecordAssemblerOptions<Header> {
125
+ }
126
+ /**
127
+ * Parse options for CSV binary.
128
+ * @category Types
129
+ */
130
+ export interface ParseBinaryOptions<Header extends ReadonlyArray<string>> extends ParseOptions<Header>, BinaryOptions {
131
+ }
132
+ /**
133
+ * CSV Record.
134
+ * @category Types
135
+ * @template Header Header of the CSV.
136
+ *
137
+ * @example Header is ["foo", "bar"]
138
+ * ```ts
139
+ * const record: CSVRecord<["foo", "bar"]> = {
140
+ * foo: "1",
141
+ * bar: "2",
142
+ * };
143
+ * ```
144
+ */
145
+ export type CSVRecord<Header extends ReadonlyArray<string>> = Record<Header[number], string>;
146
+ /**
147
+ * CSV String.
148
+ *
149
+ * @category Types
150
+ */
151
+ export type CSVString = string | ReadableStream<string>;
152
+ /**
153
+ * CSV Binary.
154
+ *
155
+ * @category Types
156
+ */
157
+ export type CSVBinary = ReadableStream<Uint8Array> | Response | ArrayBuffer | Uint8Array;
158
+ /**
159
+ * CSV.
160
+ *
161
+ * @category Types
162
+ */
163
+ export type CSV = CSVString | CSVBinary;
@@ -0,0 +1,11 @@
1
+ export declare const CR = "\r";
2
+ export declare const CRLF = "\r\n";
3
+ export declare const LF = "\n";
4
+ /**
5
+ * COMMA is a symbol for comma(,).
6
+ */
7
+ export declare const COMMA = ",";
8
+ /**
9
+ * DOUBLE_QUOTE is a symbol for double quote(").
10
+ */
11
+ export declare const DOUBLE_QUOTE = "\"";
@@ -0,0 +1,2 @@
1
+ import { ParseBinaryOptions } from "./common/types.ts";
2
+ export declare function convertBinaryToString<Header extends ReadonlyArray<string>>(binary: Uint8Array | ArrayBuffer, options: ParseBinaryOptions<Header>): string;
@@ -0,0 +1,13 @@
1
+ import { CommonOptions } from "./common/types.ts";
2
+ export interface EscapeFieldOptions extends CommonOptions {
3
+ quote?: true;
4
+ }
5
+ /**
6
+ * Escape the field.
7
+ *
8
+ * DO NOT USE THIS FUNCTION BEFORE ASSTPTED BY `{@link assertCommonOptions}`.
9
+ * @param value The field value to escape.
10
+ * @param options The options.
11
+ * @returns The escaped field.
12
+ */
13
+ export declare function escapeField(value: string, { quotation, delimiter, quote, }?: EscapeFieldOptions): string;
@@ -0,0 +1,2 @@
1
+ import { ParseBinaryOptions } from "./common/types.ts";
2
+ export declare function getOptionsFromResponse<Header extends ReadonlyArray<string>>(response: Response, options?: ParseBinaryOptions<Header>): ParseBinaryOptions<Header>;