web-csv-toolbox 0.13.1-next-afac98bd3a41b6e902268ac4ca6a99a8da883c81 → 0.14.0-next-fe8f8c27b5fcf2744b32ad7dca0a70ed7f47c915

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,41 @@
1
+ import { SUPPORTED_COMPRESSIONS } from './getOptionsFromResponse.constants.web.js';
2
+ import { parseMime } from './utils/parseMime.js';
3
+
4
+ function getOptionsFromRequest(request, options = {}) {
5
+ const { headers } = request;
6
+ const contentType = headers.get("content-type") ?? "text/csv";
7
+ const mime = parseMime(contentType);
8
+ if (mime.type !== "text/csv") {
9
+ throw new TypeError(`Invalid mime type: "${contentType}"`);
10
+ }
11
+ const contentEncoding = headers.get("content-encoding");
12
+ let decompression;
13
+ if (contentEncoding) {
14
+ const normalizedEncoding = contentEncoding.trim().toLowerCase();
15
+ if (normalizedEncoding.includes(",")) {
16
+ throw new TypeError(
17
+ `Multiple content-encodings are not supported: "${contentEncoding}"`
18
+ );
19
+ }
20
+ if (SUPPORTED_COMPRESSIONS.has(normalizedEncoding)) {
21
+ decompression = normalizedEncoding;
22
+ } else if (normalizedEncoding) {
23
+ if (options.allowExperimentalCompressions) {
24
+ decompression = normalizedEncoding;
25
+ } else {
26
+ throw new TypeError(
27
+ `Unsupported content-encoding: "${contentEncoding}". Supported formats: ${Array.from(SUPPORTED_COMPRESSIONS).join(", ")}. To use experimental formats, set allowExperimentalCompressions: true`
28
+ );
29
+ }
30
+ }
31
+ }
32
+ const charset = mime.parameters.charset ?? "utf-8";
33
+ return {
34
+ decompression,
35
+ charset,
36
+ ...options
37
+ };
38
+ }
39
+
40
+ export { getOptionsFromRequest };
41
+ //# sourceMappingURL=getOptionsFromRequest.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"getOptionsFromRequest.js","sources":["../src/getOptionsFromRequest.ts"],"sourcesContent":["import { SUPPORTED_COMPRESSIONS } from \"#getOptionsFromResponse.constants.js\";\nimport type { ParseBinaryOptions } from \"./common/types.ts\";\nimport type { DEFAULT_DELIMITER, DEFAULT_QUOTATION } from \"./constants.ts\";\nimport { parseMime } from \"./utils/parseMime.ts\";\n\n/**\n * Extracts the options from the request object.\n *\n * @param request - The request object from which to extract the options.\n * @param options - The options to merge with the extracted options.\n * @returns The options extracted from the request.\n * @throws {TypeError} - The content type is not supported or the content-encoding is invalid.\n */\nexport function getOptionsFromRequest<\n Header extends ReadonlyArray<string>,\n Delimiter extends string = DEFAULT_DELIMITER,\n Quotation extends string = '\"',\n>(\n request: Request,\n options: ParseBinaryOptions<\n Header,\n Delimiter,\n Quotation\n > = {} as ParseBinaryOptions<Header, Delimiter, Quotation>,\n): ParseBinaryOptions<Header, Delimiter, Quotation> {\n const { headers } = request;\n const contentType = headers.get(\"content-type\") ?? \"text/csv\";\n const mime = parseMime(contentType);\n if (mime.type !== \"text/csv\") {\n throw new TypeError(`Invalid mime type: \"${contentType}\"`);\n }\n\n const contentEncoding = headers.get(\"content-encoding\");\n let decompression: CompressionFormat | undefined;\n\n if (contentEncoding) {\n const normalizedEncoding = contentEncoding.trim().toLowerCase();\n\n if (normalizedEncoding.includes(\",\")) {\n throw new TypeError(\n `Multiple content-encodings are not supported: \"${contentEncoding}\"`,\n );\n }\n\n if (SUPPORTED_COMPRESSIONS.has(normalizedEncoding as CompressionFormat)) {\n decompression = normalizedEncoding as CompressionFormat;\n } else if (normalizedEncoding) {\n // Unknown compression format\n if (options.allowExperimentalCompressions) {\n // Allow runtime to handle experimental/future formats\n decompression = normalizedEncoding as CompressionFormat;\n } else {\n throw new TypeError(\n `Unsupported content-encoding: \"${contentEncoding}\". Supported formats: ${Array.from(SUPPORTED_COMPRESSIONS).join(\", \")}. To use experimental formats, set allowExperimentalCompressions: true`,\n );\n }\n }\n }\n\n const charset = mime.parameters.charset ?? \"utf-8\";\n // TODO: Support header=present and header=absent\n // const header = mime.parameters.header ?? \"present\";\n return {\n decompression,\n charset,\n ...options,\n };\n}\n"],"names":[],"mappings":";;;AAaO,SAAS,qBAAA,CAKd,OAAA,EACA,OAAA,GAII,EAAC,EAC6C;AAClD,EAAA,MAAM,EAAE,SAAQ,GAAI,OAAA;AACpB,EAAA,MAAM,WAAA,GAAc,OAAA,CAAQ,GAAA,CAAI,cAAc,CAAA,IAAK,UAAA;AACnD,EAAA,MAAM,IAAA,GAAO,UAAU,WAAW,CAAA;AAClC,EAAA,IAAI,IAAA,CAAK,SAAS,UAAA,EAAY;AAC5B,IAAA,MAAM,IAAI,SAAA,CAAU,CAAA,oBAAA,EAAuB,WAAW,CAAA,CAAA,CAAG,CAAA;AAAA,EAC3D;AAEA,EAAA,MAAM,eAAA,GAAkB,OAAA,CAAQ,GAAA,CAAI,kBAAkB,CAAA;AACtD,EAAA,IAAI,aAAA;AAEJ,EAAA,IAAI,eAAA,EAAiB;AACnB,IAAA,MAAM,kBAAA,GAAqB,eAAA,CAAgB,IAAA,EAAK,CAAE,WAAA,EAAY;AAE9D,IAAA,IAAI,kBAAA,CAAmB,QAAA,CAAS,GAAG,CAAA,EAAG;AACpC,MAAA,MAAM,IAAI,SAAA;AAAA,QACR,kDAAkD,eAAe,CAAA,CAAA;AAAA,OACnE;AAAA,IACF;AAEA,IAAA,IAAI,sBAAA,CAAuB,GAAA,CAAI,kBAAuC,CAAA,EAAG;AACvE,MAAA,aAAA,GAAgB,kBAAA;AAAA,IAClB,WAAW,kBAAA,EAAoB;AAE7B,MAAA,IAAI,QAAQ,6BAAA,EAA+B;AAEzC,QAAA,aAAA,GAAgB,kBAAA;AAAA,MAClB,CAAA,MAAO;AACL,QAAA,MAAM,IAAI,SAAA;AAAA,UACR,CAAA,+BAAA,EAAkC,eAAe,CAAA,sBAAA,EAAyB,KAAA,CAAM,KAAK,sBAAsB,CAAA,CAAE,IAAA,CAAK,IAAI,CAAC,CAAA,sEAAA;AAAA,SACzH;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,EAAA,MAAM,OAAA,GAAU,IAAA,CAAK,UAAA,CAAW,OAAA,IAAW,OAAA;AAG3C,EAAA,OAAO;AAAA,IACL,aAAA;AAAA,IACA,OAAA;AAAA,IACA,GAAG;AAAA,GACL;AACF;;;;"}
package/dist/parse.d.ts CHANGED
@@ -4,7 +4,7 @@ import { PickCSVHeader } from './utils/types.ts';
4
4
  /**
5
5
  * Parse CSV to records.
6
6
  *
7
- * {@link !String}, {@link !ReadableStream}<string | {@link !Uint8Array}> and {@link !Response} are supported.
7
+ * {@link !String}, {@link !ReadableStream}<string | {@link !Uint8Array}>, {@link !Response}, {@link !Request}, {@link !Blob}, and {@link !File} are supported.
8
8
  *
9
9
  *
10
10
  * @typeParam Header Header type like `['name', 'age']`.
@@ -18,7 +18,7 @@ import { PickCSVHeader } from './utils/types.ts';
18
18
  *
19
19
  * @remarks
20
20
  * {@link parseString}, {@link parseBinary}, {@link parseUint8ArrayStream},
21
- * {@link parseStringStream} and {@link parseResponse} are used internally.
21
+ * {@link parseStringStream}, {@link parseResponse}, {@link parseRequest}, and {@link parseBlob} are used internally.
22
22
  *
23
23
  * If you known the type of the CSV, it performs better to use them directly.
24
24
  *
@@ -29,6 +29,8 @@ import { PickCSVHeader } from './utils/types.ts';
29
29
  * | {@link !Uint8Array} \| {@link !ArrayBuffer} | {@link parseBinary} | {@link ParseBinaryOptions} |
30
30
  * | {@link !ReadableStream}<{@link !Uint8Array}> | {@link parseUint8ArrayStream} | {@link ParseBinaryOptions} |
31
31
  * | {@link !Response} | {@link parseResponse} | {@link ParseBinaryOptions} |
32
+ * | {@link !Request} | {@link parseRequest} | {@link ParseBinaryOptions} |
33
+ * | {@link !Blob} \| {@link !File} | {@link parseBlob} | {@link ParseBinaryOptions} |
32
34
  *
33
35
  * **Performance Characteristics:**
34
36
  * - **Memory usage**: O(1) - constant per record (streaming approach)
package/dist/parse.js CHANGED
@@ -1,4 +1,6 @@
1
1
  import { parseBinary } from './parseBinary.js';
2
+ import { parseBlob } from './parseBlob.js';
3
+ import { parseRequest } from './parseRequest.js';
2
4
  import { parseResponse } from './parseResponse.js';
3
5
  import { parseString } from './parseString.js';
4
6
  import { parseStringStream } from './parseStringStream.js';
@@ -7,48 +9,28 @@ import { convertThisAsyncIterableIteratorToArray } from './utils/convertThisAsyn
7
9
 
8
10
  async function* parse(csv, options) {
9
11
  if (typeof csv === "string") {
10
- const iterator = parseString(csv, options);
11
- yield* iterator;
12
+ yield* parseString(csv, options);
12
13
  } else if (csv instanceof Uint8Array || csv instanceof ArrayBuffer) {
13
- const iterator = parseBinary(csv, options);
14
- if (iterator instanceof Promise) {
15
- yield* await iterator;
16
- } else {
17
- yield* iterator;
18
- }
14
+ yield* parseBinary(csv, options);
19
15
  } else if (csv instanceof ReadableStream) {
20
16
  const [branch1, branch2] = csv.tee();
21
17
  const reader1 = branch1.getReader();
22
18
  const { value: firstChunk } = await reader1.read();
23
19
  reader1.releaseLock();
24
20
  if (typeof firstChunk === "string") {
25
- const iterator = parseStringStream(
26
- branch2,
27
- options
28
- );
29
- if (iterator instanceof Promise) {
30
- yield* await iterator;
31
- } else {
32
- yield* iterator;
33
- }
21
+ yield* parseStringStream(branch2, options);
34
22
  } else if (firstChunk instanceof Uint8Array) {
35
- const iterator = parseUint8ArrayStream(
23
+ yield* parseUint8ArrayStream(
36
24
  branch2,
37
25
  options
38
26
  );
39
- if (iterator instanceof Promise) {
40
- yield* await iterator;
41
- } else {
42
- yield* iterator;
43
- }
44
27
  }
45
28
  } else if (csv instanceof Response) {
46
- const iterator = parseResponse(csv, options);
47
- if (iterator instanceof Promise) {
48
- yield* await iterator;
49
- } else {
50
- yield* iterator;
51
- }
29
+ yield* parseResponse(csv, options);
30
+ } else if (csv instanceof Request) {
31
+ yield* parseRequest(csv, options);
32
+ } else if (csv instanceof Blob) {
33
+ yield* parseBlob(csv, options);
52
34
  }
53
35
  }
54
36
  Object.defineProperties(parse, {
package/dist/parse.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"file":"parse.js","sources":["../src/parse.ts"],"sourcesContent":["import type {\n CSV,\n CSVBinary,\n CSVRecord,\n CSVString,\n ParseBinaryOptions,\n ParseOptions,\n} from \"./common/types.ts\";\nimport type { DEFAULT_DELIMITER, DEFAULT_QUOTATION } from \"./constants.ts\";\nimport { parseBinary } from \"./parseBinary.ts\";\nimport { parseResponse } from \"./parseResponse.ts\";\nimport { parseString } from \"./parseString.ts\";\nimport { parseStringStream } from \"./parseStringStream.ts\";\nimport { parseUint8ArrayStream } from \"./parseUint8ArrayStream.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\nimport type { PickCSVHeader } from \"./utils/types.ts\";\n\n/**\n * Parse CSV to records.\n *\n * {@link !String}, {@link !ReadableStream}<string | {@link !Uint8Array}> and {@link !Response} are supported.\n *\n *\n * @typeParam Header Header type like `['name', 'age']`.\n *\n * @param csv CSV string to parse.\n * @param options Parsing options for CSV string parsing.\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parse.toArray} function.\n * @category High-level API\n *\n * @remarks\n * {@link parseString}, {@link parseBinary}, {@link parseUint8ArrayStream},\n * {@link parseStringStream} and {@link parseResponse} are used internally.\n *\n * If you known the type of the CSV, it performs better to use them directly.\n *\n * | If you want to parse a... | Use... | Options... |\n * | -------------------------------------------- | ----------------------------- | -------------------------- |\n * | {@link !String} | {@link parseString} | {@link ParseOptions} |\n * | {@link !ReadableStream}<{@link !String}> | {@link parseStringStream} | {@link ParseOptions} |\n * | {@link !Uint8Array} \\| {@link !ArrayBuffer} | {@link parseBinary} | {@link ParseBinaryOptions} |\n * | {@link !ReadableStream}<{@link !Uint8Array}> | {@link parseUint8ArrayStream} | {@link ParseBinaryOptions} |\n * | {@link !Response} | {@link parseResponse} | {@link ParseBinaryOptions} |\n *\n * **Performance Characteristics:**\n * - **Memory usage**: O(1) - constant per record (streaming approach)\n * - **Suitable for**: Files of any size, browser and server environments\n * - **Recommended for**: Large files (> 10MB) or memory-constrained environments\n *\n * This function processes CSV data as an async iterable iterator, yielding one record at a time.\n * Memory footprint remains constant regardless of file size, making it ideal for large datasets.\n *\n * @example Parsing CSV files from strings\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * for await (const record of parse(csv)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n *\n * @example Parsing CSV files from streams\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * }\n * });\n *\n * for await (const record of parse(stream)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n *\n *\n * @example Parsing CSV files with headers\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * // This CSV has no header.\n * const csv = `Alice,42\n * Bob,69`;\n *\n * for await (const record of parse(csv, { header: ['name', 'age'] })) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n *\n * @example Parsing CSV files with different delimiters characters\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const csv = `name\\tage\n * Alice\\t42\n * Bob\\t69`;\n *\n * for await (const record of parse(csv, { delimiter: '\\t' })) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\nexport function parse<const CSVSource extends CSVString>(\n csv: CSVSource,\n): AsyncIterableIterator<CSVRecord<PickCSVHeader<CSVSource>>>;\nexport function parse<const Header extends ReadonlyArray<string>>(\n csv: CSVString,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport function parse<const Header extends ReadonlyArray<string>>(\n csv: CSVString,\n options: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport function parse<\n const CSVSource extends CSVString,\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n const Header extends ReadonlyArray<string> = PickCSVHeader<\n CSVSource,\n Delimiter,\n Quotation\n >,\n>(\n csv: CSVSource,\n options: ParseOptions<Header, Delimiter, Quotation>,\n): AsyncIterableIterator<CSVRecord<Header>>;\n/**\n * Parse CSV binary to records.\n *\n * @param csv CSV binary to parse.\n * @param options Parsing options for CSV binary parsing.\n *\n * @example Parsing CSV files from responses\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * // This CSV data is not gzipped and encoded in utf-8.\n * const response = await fetch('https://example.com/data.csv');\n *\n * for await (const record of parse(response)) {\n * // ...\n * }\n * ```\n *\n * @example Parsing CSV files with options spcialized for binary\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * // This CSV data is gzipped and encoded in shift-jis and has BOM.\n * const response = await fetch('https://example.com/data.csv.gz');\n *\n * for await (const record of parse(response, {\n * charset: 'shift-jis',\n * ignoreBOM: true,\n * decompression: 'gzip',\n * })) {\n * // ...\n * }\n * ```\n */\nexport function parse<const Header extends ReadonlyArray<string>>(\n csv: CSVBinary,\n options?: ParseBinaryOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport async function* parse<const Header extends ReadonlyArray<string>>(\n csv: CSV,\n options?: ParseBinaryOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n if (typeof csv === \"string\") {\n const iterator = parseString(csv, options);\n yield* iterator;\n } else if (csv instanceof Uint8Array || csv instanceof ArrayBuffer) {\n const iterator = parseBinary(csv, options);\n // Check if it's a Promise\n if (iterator instanceof Promise) {\n yield* await iterator;\n } else {\n yield* iterator;\n }\n } else if (csv instanceof ReadableStream) {\n const [branch1, branch2] = csv.tee();\n const reader1 = branch1.getReader();\n const { value: firstChunk } = await reader1.read();\n reader1.releaseLock();\n if (typeof firstChunk === \"string\") {\n const iterator = parseStringStream(\n branch2 as ReadableStream<string>,\n options,\n );\n // Check if it's a Promise\n if (iterator instanceof Promise) {\n yield* await iterator;\n } else {\n yield* iterator;\n }\n } else if (firstChunk instanceof Uint8Array) {\n const iterator = parseUint8ArrayStream(\n branch2 as ReadableStream<Uint8Array>,\n options,\n );\n // Check if it's a Promise\n if (iterator instanceof Promise) {\n yield* await iterator;\n } else {\n yield* iterator;\n }\n }\n } else if (csv instanceof Response) {\n const iterator = parseResponse(csv, options);\n // Check if it's a Promise\n if (iterator instanceof Promise) {\n yield* await iterator;\n } else {\n yield* iterator;\n }\n }\n}\n\nexport declare namespace parse {\n /**\n * Parse CSV string to array of records,\n * ideal for smaller data sets.\n *\n * @remarks\n * **Performance Characteristics:**\n * - **Memory usage**: O(n) - proportional to file size (loads entire result into memory)\n * - **Suitable for**: Small datasets, quick prototyping\n * - **Recommended max**: ~10MB (browser), ~100MB (Node.js/Deno)\n *\n * This function collects all records into an array before returning.\n * For large files, consider using the streaming {@link parse} function instead.\n *\n * @example Parse a CSV as array of records\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const records = await parse.toArray(csv);\n * console.log(records);\n * // Prints:\n * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n csv: CSVString,\n options?: ParseOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV binary to array of records,\n * ideal for smaller data sets.\n *\n * @remarks\n * **Performance Characteristics:**\n * - **Memory usage**: O(n) - proportional to file size (loads entire result into memory)\n * - **Suitable for**: Small datasets, quick prototyping\n * - **Recommended max**: ~10MB (browser), ~100MB (Node.js/Deno)\n *\n * This function collects all records into an array before returning.\n * For large files, consider using the streaming {@link parse} function instead.\n *\n * @example Parse a CSV as array of records\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const response = await fetch('https://example.com/data.csv');\n *\n * const records = await parse.toArray(response);\n * console.log(records);\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n csv: CSVBinary,\n options?: ParseBinaryOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n}\n\nObject.defineProperties(parse, {\n toArray: {\n enumerable: true,\n writable: false,\n value: internal.convertThisAsyncIterableIteratorToArray,\n },\n});\n"],"names":["internal.convertThisAsyncIterableIteratorToArray"],"mappings":";;;;;;;AAiMA,gBAAuB,KAAA,CACrB,KACA,OAAA,EAC0C;AAC1C,EAAA,IAAI,OAAO,QAAQ,QAAA,EAAU;AAC3B,IAAA,MAAM,QAAA,GAAW,WAAA,CAAY,GAAA,EAAK,OAAO,CAAA;AACzC,IAAA,OAAO,QAAA;AAAA,EACT,CAAA,MAAA,IAAW,GAAA,YAAe,UAAA,IAAc,GAAA,YAAe,WAAA,EAAa;AAClE,IAAA,MAAM,QAAA,GAAW,WAAA,CAAY,GAAA,EAAK,OAAO,CAAA;AAEzC,IAAA,IAAI,oBAAoB,OAAA,EAAS;AAC/B,MAAA,OAAO,MAAM,QAAA;AAAA,IACf,CAAA,MAAO;AACL,MAAA,OAAO,QAAA;AAAA,IACT;AAAA,EACF,CAAA,MAAA,IAAW,eAAe,cAAA,EAAgB;AACxC,IAAA,MAAM,CAAC,OAAA,EAAS,OAAO,CAAA,GAAI,IAAI,GAAA,EAAI;AACnC,IAAA,MAAM,OAAA,GAAU,QAAQ,SAAA,EAAU;AAClC,IAAA,MAAM,EAAE,KAAA,EAAO,UAAA,EAAW,GAAI,MAAM,QAAQ,IAAA,EAAK;AACjD,IAAA,OAAA,CAAQ,WAAA,EAAY;AACpB,IAAA,IAAI,OAAO,eAAe,QAAA,EAAU;AAClC,MAAA,MAAM,QAAA,GAAW,iBAAA;AAAA,QACf,OAAA;AAAA,QACA;AAAA,OACF;AAEA,MAAA,IAAI,oBAAoB,OAAA,EAAS;AAC/B,QAAA,OAAO,MAAM,QAAA;AAAA,MACf,CAAA,MAAO;AACL,QAAA,OAAO,QAAA;AAAA,MACT;AAAA,IACF,CAAA,MAAA,IAAW,sBAAsB,UAAA,EAAY;AAC3C,MAAA,MAAM,QAAA,GAAW,qBAAA;AAAA,QACf,OAAA;AAAA,QACA;AAAA,OACF;AAEA,MAAA,IAAI,oBAAoB,OAAA,EAAS;AAC/B,QAAA,OAAO,MAAM,QAAA;AAAA,MACf,CAAA,MAAO;AACL,QAAA,OAAO,QAAA;AAAA,MACT;AAAA,IACF;AAAA,EACF,CAAA,MAAA,IAAW,eAAe,QAAA,EAAU;AAClC,IAAA,MAAM,QAAA,GAAW,aAAA,CAAc,GAAA,EAAK,OAAO,CAAA;AAE3C,IAAA,IAAI,oBAAoB,OAAA,EAAS;AAC/B,MAAA,OAAO,MAAM,QAAA;AAAA,IACf,CAAA,MAAO;AACL,MAAA,OAAO,QAAA;AAAA,IACT;AAAA,EACF;AACF;AAiEA,MAAA,CAAO,iBAAiB,KAAA,EAAO;AAAA,EAC7B,OAAA,EAAS;AAAA,IACP,UAAA,EAAY,IAAA;AAAA,IACZ,QAAA,EAAU,KAAA;AAAA,IACV,OAAOA;AAAS;AAEpB,CAAC,CAAA;;;;"}
1
+ {"version":3,"file":"parse.js","sources":["../src/parse.ts"],"sourcesContent":["import type {\n CSV,\n CSVBinary,\n CSVRecord,\n CSVString,\n ParseBinaryOptions,\n ParseOptions,\n} from \"./common/types.ts\";\nimport type { DEFAULT_DELIMITER, DEFAULT_QUOTATION } from \"./constants.ts\";\nimport { parseBinary } from \"./parseBinary.ts\";\nimport { parseBlob } from \"./parseBlob.ts\";\nimport { parseRequest } from \"./parseRequest.ts\";\nimport { parseResponse } from \"./parseResponse.ts\";\nimport { parseString } from \"./parseString.ts\";\nimport { parseStringStream } from \"./parseStringStream.ts\";\nimport { parseUint8ArrayStream } from \"./parseUint8ArrayStream.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\nimport type { PickCSVHeader } from \"./utils/types.ts\";\n\n/**\n * Parse CSV to records.\n *\n * {@link !String}, {@link !ReadableStream}<string | {@link !Uint8Array}>, {@link !Response}, {@link !Request}, {@link !Blob}, and {@link !File} are supported.\n *\n *\n * @typeParam Header Header type like `['name', 'age']`.\n *\n * @param csv CSV string to parse.\n * @param options Parsing options for CSV string parsing.\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parse.toArray} function.\n * @category High-level API\n *\n * @remarks\n * {@link parseString}, {@link parseBinary}, {@link parseUint8ArrayStream},\n * {@link parseStringStream}, {@link parseResponse}, {@link parseRequest}, and {@link parseBlob} are used internally.\n *\n * If you known the type of the CSV, it performs better to use them directly.\n *\n * | If you want to parse a... | Use... | Options... |\n * | -------------------------------------------- | ----------------------------- | -------------------------- |\n * | {@link !String} | {@link parseString} | {@link ParseOptions} |\n * | {@link !ReadableStream}<{@link !String}> | {@link parseStringStream} | {@link ParseOptions} |\n * | {@link !Uint8Array} \\| {@link !ArrayBuffer} | {@link parseBinary} | {@link ParseBinaryOptions} |\n * | {@link !ReadableStream}<{@link !Uint8Array}> | {@link parseUint8ArrayStream} | {@link ParseBinaryOptions} |\n * | {@link !Response} | {@link parseResponse} | {@link ParseBinaryOptions} |\n * | {@link !Request} | {@link parseRequest} | {@link ParseBinaryOptions} |\n * | {@link !Blob} \\| {@link !File} | {@link parseBlob} | {@link ParseBinaryOptions} |\n *\n * **Performance Characteristics:**\n * - **Memory usage**: O(1) - constant per record (streaming approach)\n * - **Suitable for**: Files of any size, browser and server environments\n * - **Recommended for**: Large files (> 10MB) or memory-constrained environments\n *\n * This function processes CSV data as an async iterable iterator, yielding one record at a time.\n * Memory footprint remains constant regardless of file size, making it ideal for large datasets.\n *\n * @example Parsing CSV files from strings\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * for await (const record of parse(csv)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n *\n * @example Parsing CSV files from streams\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const stream = new ReadableStream({\n * start(controller) {\n * controller.enqueue(csv);\n * controller.close();\n * }\n * });\n *\n * for await (const record of parse(stream)) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n *\n *\n * @example Parsing CSV files with headers\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * // This CSV has no header.\n * const csv = `Alice,42\n * Bob,69`;\n *\n * for await (const record of parse(csv, { header: ['name', 'age'] })) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n *\n * @example Parsing CSV files with different delimiters characters\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const csv = `name\\tage\n * Alice\\t42\n * Bob\\t69`;\n *\n * for await (const record of parse(csv, { delimiter: '\\t' })) {\n * console.log(record);\n * }\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\nexport function parse<const CSVSource extends CSVString>(\n csv: CSVSource,\n): AsyncIterableIterator<CSVRecord<PickCSVHeader<CSVSource>>>;\nexport function parse<const Header extends ReadonlyArray<string>>(\n csv: CSVString,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport function parse<const Header extends ReadonlyArray<string>>(\n csv: CSVString,\n options: ParseOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport function parse<\n const CSVSource extends CSVString,\n const Delimiter extends string = DEFAULT_DELIMITER,\n const Quotation extends string = DEFAULT_QUOTATION,\n const Header extends ReadonlyArray<string> = PickCSVHeader<\n CSVSource,\n Delimiter,\n Quotation\n >,\n>(\n csv: CSVSource,\n options: ParseOptions<Header, Delimiter, Quotation>,\n): AsyncIterableIterator<CSVRecord<Header>>;\n/**\n * Parse CSV binary to records.\n *\n * @param csv CSV binary to parse.\n * @param options Parsing options for CSV binary parsing.\n *\n * @example Parsing CSV files from responses\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * // This CSV data is not gzipped and encoded in utf-8.\n * const response = await fetch('https://example.com/data.csv');\n *\n * for await (const record of parse(response)) {\n * // ...\n * }\n * ```\n *\n * @example Parsing CSV files with options spcialized for binary\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * // This CSV data is gzipped and encoded in shift-jis and has BOM.\n * const response = await fetch('https://example.com/data.csv.gz');\n *\n * for await (const record of parse(response, {\n * charset: 'shift-jis',\n * ignoreBOM: true,\n * decompression: 'gzip',\n * })) {\n * // ...\n * }\n * ```\n */\nexport function parse<const Header extends ReadonlyArray<string>>(\n csv: CSVBinary,\n options?: ParseBinaryOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>>;\nexport async function* parse<const Header extends ReadonlyArray<string>>(\n csv: CSV,\n options?: ParseBinaryOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n if (typeof csv === \"string\") {\n yield* parseString(csv, options);\n } else if (csv instanceof Uint8Array || csv instanceof ArrayBuffer) {\n yield* parseBinary(csv, options);\n } else if (csv instanceof ReadableStream) {\n const [branch1, branch2] = csv.tee();\n const reader1 = branch1.getReader();\n const { value: firstChunk } = await reader1.read();\n reader1.releaseLock();\n if (typeof firstChunk === \"string\") {\n yield* parseStringStream(branch2 as ReadableStream<string>, options);\n } else if (firstChunk instanceof Uint8Array) {\n yield* parseUint8ArrayStream(\n branch2 as ReadableStream<Uint8Array>,\n options,\n );\n }\n } else if (csv instanceof Response) {\n yield* parseResponse(csv, options);\n } else if (csv instanceof Request) {\n yield* parseRequest(csv, options);\n } else if (csv instanceof Blob) {\n yield* parseBlob(csv, options);\n }\n}\n\nexport declare namespace parse {\n /**\n * Parse CSV string to array of records,\n * ideal for smaller data sets.\n *\n * @remarks\n * **Performance Characteristics:**\n * - **Memory usage**: O(n) - proportional to file size (loads entire result into memory)\n * - **Suitable for**: Small datasets, quick prototyping\n * - **Recommended max**: ~10MB (browser), ~100MB (Node.js/Deno)\n *\n * This function collects all records into an array before returning.\n * For large files, consider using the streaming {@link parse} function instead.\n *\n * @example Parse a CSV as array of records\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const csv = `name,age\n * Alice,42\n * Bob,69`;\n *\n * const records = await parse.toArray(csv);\n * console.log(records);\n * // Prints:\n * // [ { name: 'Alice', age: '42' }, { name: 'Bob', age: '69' } ]\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n csv: CSVString,\n options?: ParseOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV binary to array of records,\n * ideal for smaller data sets.\n *\n * @remarks\n * **Performance Characteristics:**\n * - **Memory usage**: O(n) - proportional to file size (loads entire result into memory)\n * - **Suitable for**: Small datasets, quick prototyping\n * - **Recommended max**: ~10MB (browser), ~100MB (Node.js/Deno)\n *\n * This function collects all records into an array before returning.\n * For large files, consider using the streaming {@link parse} function instead.\n *\n * @example Parse a CSV as array of records\n *\n * ```ts\n * import { parse } from 'web-csv-toolbox';\n *\n * const response = await fetch('https://example.com/data.csv');\n *\n * const records = await parse.toArray(response);\n * console.log(records);\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n csv: CSVBinary,\n options?: ParseBinaryOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n}\n\nObject.defineProperties(parse, {\n toArray: {\n enumerable: true,\n writable: false,\n value: internal.convertThisAsyncIterableIteratorToArray,\n },\n});\n"],"names":["internal.convertThisAsyncIterableIteratorToArray"],"mappings":";;;;;;;;;AAqMA,gBAAuB,KAAA,CACrB,KACA,OAAA,EAC0C;AAC1C,EAAA,IAAI,OAAO,QAAQ,QAAA,EAAU;AAC3B,IAAA,OAAO,WAAA,CAAY,KAAK,OAAO,CAAA;AAAA,EACjC,CAAA,MAAA,IAAW,GAAA,YAAe,UAAA,IAAc,GAAA,YAAe,WAAA,EAAa;AAClE,IAAA,OAAO,WAAA,CAAY,KAAK,OAAO,CAAA;AAAA,EACjC,CAAA,MAAA,IAAW,eAAe,cAAA,EAAgB;AACxC,IAAA,MAAM,CAAC,OAAA,EAAS,OAAO,CAAA,GAAI,IAAI,GAAA,EAAI;AACnC,IAAA,MAAM,OAAA,GAAU,QAAQ,SAAA,EAAU;AAClC,IAAA,MAAM,EAAE,KAAA,EAAO,UAAA,EAAW,GAAI,MAAM,QAAQ,IAAA,EAAK;AACjD,IAAA,OAAA,CAAQ,WAAA,EAAY;AACpB,IAAA,IAAI,OAAO,eAAe,QAAA,EAAU;AAClC,MAAA,OAAO,iBAAA,CAAkB,SAAmC,OAAO,CAAA;AAAA,IACrE,CAAA,MAAA,IAAW,sBAAsB,UAAA,EAAY;AAC3C,MAAA,OAAO,qBAAA;AAAA,QACL,OAAA;AAAA,QACA;AAAA,OACF;AAAA,IACF;AAAA,EACF,CAAA,MAAA,IAAW,eAAe,QAAA,EAAU;AAClC,IAAA,OAAO,aAAA,CAAc,KAAK,OAAO,CAAA;AAAA,EACnC,CAAA,MAAA,IAAW,eAAe,OAAA,EAAS;AACjC,IAAA,OAAO,YAAA,CAAa,KAAK,OAAO,CAAA;AAAA,EAClC,CAAA,MAAA,IAAW,eAAe,IAAA,EAAM;AAC9B,IAAA,OAAO,SAAA,CAAU,KAAK,OAAO,CAAA;AAAA,EAC/B;AACF;AAiEA,MAAA,CAAO,iBAAiB,KAAA,EAAO;AAAA,EAC7B,OAAA,EAAS;AAAA,IACP,UAAA,EAAY,IAAA;AAAA,IACZ,QAAA,EAAU,KAAA;AAAA,IACV,OAAOA;AAAS;AAEpB,CAAC,CAAA;;;;"}
@@ -0,0 +1,102 @@
1
+ import { CSVRecord, ParseBinaryOptions } from './common/types.ts';
2
+ /**
3
+ * Parse CSV from a {@link !Blob} or {@link !File} to records.
4
+ *
5
+ * @remarks
6
+ * This function can parse CSV data from Blob or File objects.
7
+ * If the Blob has a type with charset parameter, it will be used for decoding.
8
+ *
9
+ * File objects (from file inputs or drag-and-drop) extend Blob and are automatically supported.
10
+ *
11
+ * @category Middle-level API
12
+ * @param blob - The blob or file to parse
13
+ * @param options - Parsing options
14
+ * @returns Async iterable iterator of records.
15
+ *
16
+ * If you want array of records, use {@link parseBlob.toArray} function.
17
+ *
18
+ * @example Parsing CSV from Blob
19
+ *
20
+ * ```ts
21
+ * import { parseBlob } from 'web-csv-toolbox';
22
+ *
23
+ * const blob = new Blob(['name,age\nAlice,42\nBob,69'], { type: 'text/csv' });
24
+ *
25
+ * for await (const record of parseBlob(blob)) {
26
+ * console.log(record);
27
+ * }
28
+ * ```
29
+ *
30
+ * @example Parsing CSV from File (input element)
31
+ *
32
+ * ```ts
33
+ * import { parseBlob } from 'web-csv-toolbox';
34
+ *
35
+ * const input = document.querySelector('input[type="file"]');
36
+ * input.addEventListener('change', async (event) => {
37
+ * const file = event.target.files[0];
38
+ * for await (const record of parseBlob(file)) {
39
+ * console.log(record);
40
+ * }
41
+ * });
42
+ * ```
43
+ *
44
+ * @example Parsing CSV from Blob with charset
45
+ *
46
+ * ```ts
47
+ * import { parseBlob } from 'web-csv-toolbox';
48
+ *
49
+ * const blob = new Blob([csvData], { type: 'text/csv;charset=shift-jis' });
50
+ *
51
+ * for await (const record of parseBlob(blob)) {
52
+ * console.log(record);
53
+ * }
54
+ * ```
55
+ */
56
+ export declare function parseBlob<Header extends ReadonlyArray<string>>(blob: Blob, options?: ParseBinaryOptions<Header>): AsyncIterableIterator<CSVRecord<Header>>;
57
+ export declare namespace parseBlob {
58
+ /**
59
+ * Parse CSV from a {@link !Blob} or {@link !File} to array of records.
60
+ *
61
+ * @returns Array of records
62
+ *
63
+ * @example Parsing CSV from Blob
64
+ *
65
+ * ```ts
66
+ * import { parseBlob } from 'web-csv-toolbox';
67
+ *
68
+ * const blob = new Blob(['name,age\nAlice,42\nBob,69'], { type: 'text/csv' });
69
+ *
70
+ * const records = await parseBlob.toArray(blob);
71
+ * console.log(records);
72
+ * ```
73
+ */
74
+ function toArray<Header extends ReadonlyArray<string>>(blob: Blob, options?: ParseBinaryOptions<Header>): Promise<CSVRecord<Header>[]>;
75
+ /**
76
+ * Parse CSV from a {@link !Blob} or {@link !File} to stream of records.
77
+ *
78
+ * @param blob - Blob or File to parse
79
+ * @returns Stream of records
80
+ *
81
+ * @example Parsing CSV from Blob
82
+ *
83
+ * ```ts
84
+ * import { parseBlob } from 'web-csv-toolbox';
85
+ *
86
+ * const blob = new Blob(['name,age\nAlice,42\nBob,69'], { type: 'text/csv' });
87
+ *
88
+ * await parseBlob.toStream(blob)
89
+ * .pipeTo(
90
+ * new WritableStream({
91
+ * write(record) {
92
+ * console.log(record);
93
+ * },
94
+ * }),
95
+ * );
96
+ * // Prints:
97
+ * // { name: 'Alice', age: '42' }
98
+ * // { name: 'Bob', age: '69' }
99
+ * ```
100
+ */
101
+ function toStream<Header extends ReadonlyArray<string>>(blob: Blob, options?: ParseBinaryOptions<Header>): ReadableStream<CSVRecord<Header>>;
102
+ }
@@ -0,0 +1,31 @@
1
+ import { commonParseErrorHandling } from './commonParseErrorHandling.js';
2
+ import { getOptionsFromBlob } from './getOptionsFromBlob.js';
3
+ import { parseBlobToStream } from './parseBlobToStream.js';
4
+ import { parseUint8ArrayStream } from './parseUint8ArrayStream.js';
5
+ import { convertThisAsyncIterableIteratorToArray } from './utils/convertThisAsyncIterableIteratorToArray.js';
6
+
7
+ function parseBlob(blob, options) {
8
+ const options_ = getOptionsFromBlob(blob, options);
9
+ return (async function* () {
10
+ try {
11
+ yield* parseUint8ArrayStream(blob.stream(), options_);
12
+ } catch (error) {
13
+ commonParseErrorHandling(error);
14
+ }
15
+ })();
16
+ }
17
+ Object.defineProperties(parseBlob, {
18
+ toArray: {
19
+ enumerable: true,
20
+ writable: false,
21
+ value: convertThisAsyncIterableIteratorToArray
22
+ },
23
+ toStream: {
24
+ enumerable: true,
25
+ writable: false,
26
+ value: parseBlobToStream
27
+ }
28
+ });
29
+
30
+ export { parseBlob };
31
+ //# sourceMappingURL=parseBlob.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"parseBlob.js","sources":["../src/parseBlob.ts"],"sourcesContent":["import type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\nimport { getOptionsFromBlob } from \"./getOptionsFromBlob.ts\";\nimport { parseBlobToStream } from \"./parseBlobToStream.ts\";\nimport { parseUint8ArrayStream } from \"./parseUint8ArrayStream.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\n\n/**\n * Parse CSV from a {@link !Blob} or {@link !File} to records.\n *\n * @remarks\n * This function can parse CSV data from Blob or File objects.\n * If the Blob has a type with charset parameter, it will be used for decoding.\n *\n * File objects (from file inputs or drag-and-drop) extend Blob and are automatically supported.\n *\n * @category Middle-level API\n * @param blob - The blob or file to parse\n * @param options - Parsing options\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parseBlob.toArray} function.\n *\n * @example Parsing CSV from Blob\n *\n * ```ts\n * import { parseBlob } from 'web-csv-toolbox';\n *\n * const blob = new Blob(['name,age\\nAlice,42\\nBob,69'], { type: 'text/csv' });\n *\n * for await (const record of parseBlob(blob)) {\n * console.log(record);\n * }\n * ```\n *\n * @example Parsing CSV from File (input element)\n *\n * ```ts\n * import { parseBlob } from 'web-csv-toolbox';\n *\n * const input = document.querySelector('input[type=\"file\"]');\n * input.addEventListener('change', async (event) => {\n * const file = event.target.files[0];\n * for await (const record of parseBlob(file)) {\n * console.log(record);\n * }\n * });\n * ```\n *\n * @example Parsing CSV from Blob with charset\n *\n * ```ts\n * import { parseBlob } from 'web-csv-toolbox';\n *\n * const blob = new Blob([csvData], { type: 'text/csv;charset=shift-jis' });\n *\n * for await (const record of parseBlob(blob)) {\n * console.log(record);\n * }\n * ```\n */\nexport function parseBlob<Header extends ReadonlyArray<string>>(\n blob: Blob,\n options?: ParseBinaryOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n // Extract options from blob\n const options_ = getOptionsFromBlob(blob, options);\n\n // Return wrapper async generator for error handling\n return (async function* () {\n try {\n yield* parseUint8ArrayStream(blob.stream(), options_);\n } catch (error) {\n commonParseErrorHandling(error);\n }\n })();\n}\n\nexport declare namespace parseBlob {\n /**\n * Parse CSV from a {@link !Blob} or {@link !File} to array of records.\n *\n * @returns Array of records\n *\n * @example Parsing CSV from Blob\n *\n * ```ts\n * import { parseBlob } from 'web-csv-toolbox';\n *\n * const blob = new Blob(['name,age\\nAlice,42\\nBob,69'], { type: 'text/csv' });\n *\n * const records = await parseBlob.toArray(blob);\n * console.log(records);\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n blob: Blob,\n options?: ParseBinaryOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV from a {@link !Blob} or {@link !File} to stream of records.\n *\n * @param blob - Blob or File to parse\n * @returns Stream of records\n *\n * @example Parsing CSV from Blob\n *\n * ```ts\n * import { parseBlob } from 'web-csv-toolbox';\n *\n * const blob = new Blob(['name,age\\nAlice,42\\nBob,69'], { type: 'text/csv' });\n *\n * await parseBlob.toStream(blob)\n * .pipeTo(\n * new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\n export function toStream<Header extends ReadonlyArray<string>>(\n blob: Blob,\n options?: ParseBinaryOptions<Header>,\n ): ReadableStream<CSVRecord<Header>>;\n}\n\nObject.defineProperties(parseBlob, {\n toArray: {\n enumerable: true,\n writable: false,\n value: internal.convertThisAsyncIterableIteratorToArray,\n },\n toStream: {\n enumerable: true,\n writable: false,\n value: parseBlobToStream,\n },\n});\n"],"names":["internal.convertThisAsyncIterableIteratorToArray"],"mappings":";;;;;;AA6DO,SAAS,SAAA,CACd,MACA,OAAA,EAC0C;AAE1C,EAAA,MAAM,QAAA,GAAW,kBAAA,CAAmB,IAAA,EAAM,OAAO,CAAA;AAGjD,EAAA,OAAA,CAAQ,mBAAmB;AACzB,IAAA,IAAI;AACF,MAAA,OAAO,qBAAA,CAAsB,IAAA,CAAK,MAAA,EAAO,EAAG,QAAQ,CAAA;AAAA,IACtD,SAAS,KAAA,EAAO;AACd,MAAA,wBAAA,CAAyB,KAAK,CAAA;AAAA,IAChC;AAAA,EACF,CAAA,GAAG;AACL;AAuDA,MAAA,CAAO,iBAAiB,SAAA,EAAW;AAAA,EACjC,OAAA,EAAS;AAAA,IACP,UAAA,EAAY,IAAA;AAAA,IACZ,QAAA,EAAU,KAAA;AAAA,IACV,OAAOA;AAAS,GAClB;AAAA,EACA,QAAA,EAAU;AAAA,IACR,UAAA,EAAY,IAAA;AAAA,IACZ,QAAA,EAAU,KAAA;AAAA,IACV,KAAA,EAAO;AAAA;AAEX,CAAC,CAAA;;;;"}
@@ -0,0 +1,11 @@
1
+ import { CSVRecord, ParseBinaryOptions } from './common/types.ts';
2
+ /**
3
+ * Parse CSV from a {@link !Blob} or {@link !File} to stream of records.
4
+ *
5
+ * @param blob - Blob or File to parse
6
+ * @param options - Parsing options
7
+ * @returns Stream of records
8
+ *
9
+ * @category Middle-level API
10
+ */
11
+ export declare function parseBlobToStream<Header extends ReadonlyArray<string>>(blob: Blob, options?: ParseBinaryOptions<Header>): ReadableStream<CSVRecord<Header>>;
@@ -0,0 +1,10 @@
1
+ import { getOptionsFromBlob } from './getOptionsFromBlob.js';
2
+ import { parseUint8ArrayStreamToStream } from './parseUint8ArrayStreamToStream.js';
3
+
4
+ function parseBlobToStream(blob, options) {
5
+ const options_ = getOptionsFromBlob(blob, options);
6
+ return parseUint8ArrayStreamToStream(blob.stream(), options_);
7
+ }
8
+
9
+ export { parseBlobToStream };
10
+ //# sourceMappingURL=parseBlobToStream.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"parseBlobToStream.js","sources":["../src/parseBlobToStream.ts"],"sourcesContent":["import type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { getOptionsFromBlob } from \"./getOptionsFromBlob.ts\";\nimport { parseUint8ArrayStreamToStream } from \"./parseUint8ArrayStreamToStream.ts\";\n\n/**\n * Parse CSV from a {@link !Blob} or {@link !File} to stream of records.\n *\n * @param blob - Blob or File to parse\n * @param options - Parsing options\n * @returns Stream of records\n *\n * @category Middle-level API\n */\nexport function parseBlobToStream<Header extends ReadonlyArray<string>>(\n blob: Blob,\n options?: ParseBinaryOptions<Header>,\n): ReadableStream<CSVRecord<Header>> {\n const options_ = getOptionsFromBlob(blob, options);\n return parseUint8ArrayStreamToStream(blob.stream(), options_);\n}\n"],"names":[],"mappings":";;;AAaO,SAAS,iBAAA,CACd,MACA,OAAA,EACmC;AACnC,EAAA,MAAM,QAAA,GAAW,kBAAA,CAAmB,IAAA,EAAM,OAAO,CAAA;AACjD,EAAA,OAAO,6BAAA,CAA8B,IAAA,CAAK,MAAA,EAAO,EAAG,QAAQ,CAAA;AAC9D;;;;"}
@@ -0,0 +1,93 @@
1
+ import { CSVRecord, ParseBinaryOptions } from './common/types.ts';
2
+ /**
3
+ * Parse CSV from a {@link !File} to records.
4
+ *
5
+ * @remarks
6
+ * This is an alias for {@link parseBlob} since File extends Blob.
7
+ *
8
+ * This function can parse CSV data from File objects (from file inputs or drag-and-drop).
9
+ * If the File has a type with charset parameter, it will be used for decoding.
10
+ *
11
+ * @category Middle-level API
12
+ * @param file - The file to parse
13
+ * @param options - Parsing options
14
+ * @returns Async iterable iterator of records.
15
+ *
16
+ * If you want array of records, use {@link parseFile.toArray} function.
17
+ *
18
+ * @example Parsing CSV from File (input element)
19
+ *
20
+ * ```ts
21
+ * import { parseFile } from 'web-csv-toolbox';
22
+ *
23
+ * const input = document.querySelector('input[type="file"]');
24
+ * input.addEventListener('change', async (event) => {
25
+ * const file = event.target.files[0];
26
+ * for await (const record of parseFile(file)) {
27
+ * console.log(record);
28
+ * }
29
+ * });
30
+ * ```
31
+ *
32
+ * @example Parsing CSV from File (drag-and-drop)
33
+ *
34
+ * ```ts
35
+ * import { parseFile } from 'web-csv-toolbox';
36
+ *
37
+ * dropZone.addEventListener('drop', async (event) => {
38
+ * event.preventDefault();
39
+ * const file = event.dataTransfer.files[0];
40
+ * for await (const record of parseFile(file)) {
41
+ * console.log(record);
42
+ * }
43
+ * });
44
+ * ```
45
+ */
46
+ export declare function parseFile<Header extends ReadonlyArray<string>>(file: File, options?: ParseBinaryOptions<Header>): AsyncIterableIterator<CSVRecord<Header>>;
47
+ export declare namespace parseFile {
48
+ /**
49
+ * Parse CSV from a {@link !File} to array of records.
50
+ *
51
+ * @returns Array of records
52
+ *
53
+ * @example Parsing CSV from File
54
+ *
55
+ * ```ts
56
+ * import { parseFile } from 'web-csv-toolbox';
57
+ *
58
+ * const input = document.querySelector('input[type="file"]');
59
+ * input.addEventListener('change', async (event) => {
60
+ * const file = event.target.files[0];
61
+ * const records = await parseFile.toArray(file);
62
+ * console.log(records);
63
+ * });
64
+ * ```
65
+ */
66
+ function toArray<Header extends ReadonlyArray<string>>(file: File, options?: ParseBinaryOptions<Header>): Promise<CSVRecord<Header>[]>;
67
+ /**
68
+ * Parse CSV from a {@link !File} to stream of records.
69
+ *
70
+ * @param file - File to parse
71
+ * @returns Stream of records
72
+ *
73
+ * @example Parsing CSV from File
74
+ *
75
+ * ```ts
76
+ * import { parseFile } from 'web-csv-toolbox';
77
+ *
78
+ * const input = document.querySelector('input[type="file"]');
79
+ * input.addEventListener('change', async (event) => {
80
+ * const file = event.target.files[0];
81
+ * await parseFile.toStream(file)
82
+ * .pipeTo(
83
+ * new WritableStream({
84
+ * write(record) {
85
+ * console.log(record);
86
+ * },
87
+ * }),
88
+ * );
89
+ * });
90
+ * ```
91
+ */
92
+ function toStream<Header extends ReadonlyArray<string>>(file: File, options?: ParseBinaryOptions<Header>): ReadableStream<CSVRecord<Header>>;
93
+ }
@@ -0,0 +1,20 @@
1
+ import { parseBlob } from './parseBlob.js';
2
+
3
+ function parseFile(file, options) {
4
+ return parseBlob(file, options);
5
+ }
6
+ Object.defineProperties(parseFile, {
7
+ toArray: {
8
+ enumerable: true,
9
+ writable: false,
10
+ value: parseBlob.toArray
11
+ },
12
+ toStream: {
13
+ enumerable: true,
14
+ writable: false,
15
+ value: parseBlob.toStream
16
+ }
17
+ });
18
+
19
+ export { parseFile };
20
+ //# sourceMappingURL=parseFile.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"parseFile.js","sources":["../src/parseFile.ts"],"sourcesContent":["import type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { parseBlob } from \"./parseBlob.ts\";\n\n/**\n * Parse CSV from a {@link !File} to records.\n *\n * @remarks\n * This is an alias for {@link parseBlob} since File extends Blob.\n *\n * This function can parse CSV data from File objects (from file inputs or drag-and-drop).\n * If the File has a type with charset parameter, it will be used for decoding.\n *\n * @category Middle-level API\n * @param file - The file to parse\n * @param options - Parsing options\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parseFile.toArray} function.\n *\n * @example Parsing CSV from File (input element)\n *\n * ```ts\n * import { parseFile } from 'web-csv-toolbox';\n *\n * const input = document.querySelector('input[type=\"file\"]');\n * input.addEventListener('change', async (event) => {\n * const file = event.target.files[0];\n * for await (const record of parseFile(file)) {\n * console.log(record);\n * }\n * });\n * ```\n *\n * @example Parsing CSV from File (drag-and-drop)\n *\n * ```ts\n * import { parseFile } from 'web-csv-toolbox';\n *\n * dropZone.addEventListener('drop', async (event) => {\n * event.preventDefault();\n * const file = event.dataTransfer.files[0];\n * for await (const record of parseFile(file)) {\n * console.log(record);\n * }\n * });\n * ```\n */\nexport function parseFile<Header extends ReadonlyArray<string>>(\n file: File,\n options?: ParseBinaryOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n return parseBlob(file, options);\n}\n\nexport declare namespace parseFile {\n /**\n * Parse CSV from a {@link !File} to array of records.\n *\n * @returns Array of records\n *\n * @example Parsing CSV from File\n *\n * ```ts\n * import { parseFile } from 'web-csv-toolbox';\n *\n * const input = document.querySelector('input[type=\"file\"]');\n * input.addEventListener('change', async (event) => {\n * const file = event.target.files[0];\n * const records = await parseFile.toArray(file);\n * console.log(records);\n * });\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n file: File,\n options?: ParseBinaryOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV from a {@link !File} to stream of records.\n *\n * @param file - File to parse\n * @returns Stream of records\n *\n * @example Parsing CSV from File\n *\n * ```ts\n * import { parseFile } from 'web-csv-toolbox';\n *\n * const input = document.querySelector('input[type=\"file\"]');\n * input.addEventListener('change', async (event) => {\n * const file = event.target.files[0];\n * await parseFile.toStream(file)\n * .pipeTo(\n * new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * });\n * ```\n */\n export function toStream<Header extends ReadonlyArray<string>>(\n file: File,\n options?: ParseBinaryOptions<Header>,\n ): ReadableStream<CSVRecord<Header>>;\n}\n\nObject.defineProperties(parseFile, {\n toArray: {\n enumerable: true,\n writable: false,\n value: parseBlob.toArray,\n },\n toStream: {\n enumerable: true,\n writable: false,\n value: parseBlob.toStream,\n },\n});\n"],"names":[],"mappings":";;AA+CO,SAAS,SAAA,CACd,MACA,OAAA,EAC0C;AAC1C,EAAA,OAAO,SAAA,CAAU,MAAM,OAAO,CAAA;AAChC;AAwDA,MAAA,CAAO,iBAAiB,SAAA,EAAW;AAAA,EACjC,OAAA,EAAS;AAAA,IACP,UAAA,EAAY,IAAA;AAAA,IACZ,QAAA,EAAU,KAAA;AAAA,IACV,OAAO,SAAA,CAAU;AAAA,GACnB;AAAA,EACA,QAAA,EAAU;AAAA,IACR,UAAA,EAAY,IAAA;AAAA,IACZ,QAAA,EAAU,KAAA;AAAA,IACV,OAAO,SAAA,CAAU;AAAA;AAErB,CAAC,CAAA;;;;"}
@@ -0,0 +1,120 @@
1
+ import { CSVRecord, ParseBinaryOptions } from './common/types.ts';
2
+ /**
3
+ * Parse HTTP Request what contains CSV to records,
4
+ * ideal for server-side use cases.
5
+ *
6
+ * @remarks
7
+ * This function automatically treats request headers.
8
+ *
9
+ * - If `Content-Type` header is not set, it assumes `text/csv`.
10
+ * - If `Content-Type` header is not `text/csv`, it throws an error.
11
+ * - If `Content-Type` header has charset parameter, it uses it for decoding.
12
+ * - If `Content-Encoding` header is set, it decompresses the request.
13
+ * - Should there be any conflicting information between the header and the options, the option's value will take precedence.
14
+ *
15
+ * This function is particularly useful for server-side environments like Cloudflare Workers,
16
+ * Service Workers, or other edge computing platforms that use the Request API.
17
+ *
18
+ * @category Middle-level API
19
+ * @param request - The request object to parse
20
+ * @param options - Parsing options
21
+ * @returns Async iterable iterator of records.
22
+ *
23
+ * If you want array of records, use {@link parseRequest.toArray} function.
24
+ *
25
+ * @example Parsing CSV from Request (Cloudflare Workers)
26
+ *
27
+ * ```ts
28
+ * import { parseRequest } from 'web-csv-toolbox';
29
+ *
30
+ * export default {
31
+ * async fetch(request: Request) {
32
+ * if (request.method === 'POST' && request.headers.get('content-type')?.includes('text/csv')) {
33
+ * for await (const record of parseRequest(request)) {
34
+ * console.log(record);
35
+ * }
36
+ * return new Response('CSV processed', { status: 200 });
37
+ * }
38
+ * return new Response('Not Found', { status: 404 });
39
+ * }
40
+ * };
41
+ * ```
42
+ *
43
+ * @example Parsing CSV from Request (Service Worker)
44
+ *
45
+ * ```ts
46
+ * import { parseRequest } from 'web-csv-toolbox';
47
+ *
48
+ * self.addEventListener('fetch', (event) => {
49
+ * const request = event.request;
50
+ * if (request.method === 'POST' && request.url.endsWith('/upload-csv')) {
51
+ * event.respondWith(
52
+ * (async () => {
53
+ * const records = [];
54
+ * for await (const record of parseRequest(request)) {
55
+ * records.push(record);
56
+ * }
57
+ * return new Response(JSON.stringify(records), {
58
+ * headers: { 'Content-Type': 'application/json' }
59
+ * });
60
+ * })()
61
+ * );
62
+ * }
63
+ * });
64
+ * ```
65
+ */
66
+ export declare function parseRequest<Header extends ReadonlyArray<string>>(request: Request, options?: ParseBinaryOptions<Header>): AsyncIterableIterator<CSVRecord<Header>>;
67
+ export declare namespace parseRequest {
68
+ /**
69
+ * Parse CSV Request to array of records.
70
+ *
71
+ * @returns Array of records
72
+ *
73
+ * @example Parsing CSV Request
74
+ *
75
+ * ```ts
76
+ * import { parseRequest } from 'web-csv-toolbox';
77
+ *
78
+ * const request = new Request('https://example.com', {
79
+ * method: 'POST',
80
+ * headers: { 'Content-Type': 'text/csv' },
81
+ * body: 'name,age\nAlice,42\nBob,69'
82
+ * });
83
+ *
84
+ * const records = await parseRequest.toArray(request);
85
+ * console.log(records);
86
+ * ```
87
+ */
88
+ function toArray<Header extends ReadonlyArray<string>>(request: Request, options?: ParseBinaryOptions<Header>): Promise<CSVRecord<Header>[]>;
89
+ /**
90
+ * Parse CSV Request to stream of records.
91
+ *
92
+ * @param request - Request to parse
93
+ * @returns Stream of records
94
+ *
95
+ * @example Parsing CSV Request
96
+ *
97
+ * ```ts
98
+ * import { parseRequest } from 'web-csv-toolbox';
99
+ *
100
+ * const request = new Request('https://example.com', {
101
+ * method: 'POST',
102
+ * headers: { 'Content-Type': 'text/csv' },
103
+ * body: 'name,age\nAlice,42\nBob,69'
104
+ * });
105
+ *
106
+ * await parseRequest.toStream(request)
107
+ * .pipeTo(
108
+ * new WritableStream({
109
+ * write(record) {
110
+ * console.log(record);
111
+ * },
112
+ * }),
113
+ * );
114
+ * // Prints:
115
+ * // { name: 'Alice', age: '42' }
116
+ * // { name: 'Bob', age: '69' }
117
+ * ```
118
+ */
119
+ function toStream<Header extends ReadonlyArray<string>>(request: Request, options?: ParseBinaryOptions<Header>): ReadableStream<CSVRecord<Header>>;
120
+ }