web-csv-toolbox 0.14.0-next-e45bc4d089f1fb259a7596b9862b3b34e717dab7 → 0.14.0-next-978b88933762ecc27270ce746b80a3fa7ed8c4f7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +28 -48
- package/dist/CSVLexer.js +8 -5
- package/dist/CSVLexer.js.map +1 -1
- package/dist/CSVLexerTransformer.d.ts +10 -12
- package/dist/CSVLexerTransformer.js +12 -16
- package/dist/CSVLexerTransformer.js.map +1 -1
- package/dist/CSVRecordAssembler.js +14 -4
- package/dist/CSVRecordAssembler.js.map +1 -1
- package/dist/CSVRecordAssemblerTransformer.d.ts +8 -14
- package/dist/CSVRecordAssemblerTransformer.js +10 -16
- package/dist/CSVRecordAssemblerTransformer.js.map +1 -1
- package/dist/assertCommonOptions.d.ts +1 -1
- package/dist/assertCommonOptions.js.map +1 -1
- package/dist/common/errors.d.ts +32 -0
- package/dist/common/errors.js +18 -0
- package/dist/common/errors.js.map +1 -1
- package/dist/common/types.d.ts +249 -66
- package/dist/constants.d.ts +12 -0
- package/dist/constants.js +2 -1
- package/dist/constants.js.map +1 -1
- package/dist/execution/EnginePresets.d.ts +52 -12
- package/dist/execution/EnginePresets.js +1 -1
- package/dist/execution/EnginePresets.js.map +1 -1
- package/dist/execution/InternalEngineConfig.js +40 -18
- package/dist/execution/InternalEngineConfig.js.map +1 -1
- package/dist/execution/worker/parseBinaryInWorker.node.js +3 -4
- package/dist/execution/worker/parseBinaryInWorker.node.js.map +1 -1
- package/dist/execution/worker/parseBinaryInWorker.web.js +3 -4
- package/dist/execution/worker/parseBinaryInWorker.web.js.map +1 -1
- package/dist/execution/worker/parseBinaryInWorkerWASM.node.js +3 -4
- package/dist/execution/worker/parseBinaryInWorkerWASM.node.js.map +1 -1
- package/dist/execution/worker/parseBinaryInWorkerWASM.web.js +3 -4
- package/dist/execution/worker/parseBinaryInWorkerWASM.web.js.map +1 -1
- package/dist/execution/worker/parseStreamInWorker.node.js +3 -4
- package/dist/execution/worker/parseStreamInWorker.node.js.map +1 -1
- package/dist/execution/worker/parseStreamInWorker.web.js +3 -4
- package/dist/execution/worker/parseStreamInWorker.web.js.map +1 -1
- package/dist/execution/worker/parseStringInWorker.node.js +3 -4
- package/dist/execution/worker/parseStringInWorker.node.js.map +1 -1
- package/dist/execution/worker/parseStringInWorker.web.js +3 -4
- package/dist/execution/worker/parseStringInWorker.web.js.map +1 -1
- package/dist/execution/worker/parseStringInWorkerWASM.node.js +3 -4
- package/dist/execution/worker/parseStringInWorkerWASM.node.js.map +1 -1
- package/dist/execution/worker/parseStringInWorkerWASM.web.js +3 -4
- package/dist/execution/worker/parseStringInWorkerWASM.web.js.map +1 -1
- package/dist/execution/worker/parseUint8ArrayStreamInWorker.node.js +3 -4
- package/dist/execution/worker/parseUint8ArrayStreamInWorker.node.js.map +1 -1
- package/dist/execution/worker/parseUint8ArrayStreamInWorker.web.js +3 -4
- package/dist/execution/worker/parseUint8ArrayStreamInWorker.web.js.map +1 -1
- package/dist/getOptionsFromFile.d.ts +14 -0
- package/dist/getOptionsFromFile.js +12 -0
- package/dist/getOptionsFromFile.js.map +1 -0
- package/dist/parseBlob.js +9 -1
- package/dist/parseBlob.js.map +1 -1
- package/dist/parseFile.d.ts +3 -2
- package/dist/parseFile.js +7 -3
- package/dist/parseFile.js.map +1 -1
- package/dist/parseFileToArray.d.ts +27 -0
- package/dist/parseFileToArray.js +12 -0
- package/dist/parseFileToArray.js.map +1 -0
- package/dist/parseFileToStream.d.ts +33 -0
- package/dist/parseFileToStream.js +10 -0
- package/dist/parseFileToStream.js.map +1 -0
- package/dist/utils/types.d.ts +21 -10
- package/dist/web-csv-toolbox.d.ts +3 -0
- package/dist/web-csv-toolbox.js +3 -0
- package/dist/web-csv-toolbox.js.map +1 -1
- package/package.json +1 -1
|
@@ -5,10 +5,9 @@ import { collectUint8ArrayStream } from './utils/streamCollector.node.js';
|
|
|
5
5
|
|
|
6
6
|
async function* parseUint8ArrayStreamInWorker(stream, options) {
|
|
7
7
|
const combined = await collectUint8ArrayStream(stream, options?.signal);
|
|
8
|
-
using session = await WorkerSession.create(
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
});
|
|
8
|
+
using session = await WorkerSession.create(
|
|
9
|
+
options?.engine?.worker === true ? options.engine : void 0
|
|
10
|
+
);
|
|
12
11
|
yield* sendWorkerMessage(
|
|
13
12
|
session.getWorker(),
|
|
14
13
|
{
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parseUint8ArrayStreamInWorker.node.js","sources":["../../../src/execution/worker/parseUint8ArrayStreamInWorker.node.ts"],"sourcesContent":["import type {\n CSVRecord,\n ParseBinaryOptions,\n ParseOptions,\n} from \"../../common/types.ts\";\nimport type { DEFAULT_DELIMITER, DEFAULT_QUOTATION } from \"../../constants.ts\";\nimport { WorkerSession } from \"./helpers/WorkerSession.ts\";\nimport { sendWorkerMessage } from \"./utils/messageHandler.ts\";\nimport { serializeOptions } from \"./utils/serializeOptions.ts\";\nimport { collectUint8ArrayStream } from \"./utils/streamCollector.node.ts\";\n\n/**\n * Parse CSV Uint8Array stream in Worker thread (Node.js).\n * Collects stream into Uint8Array first, then sends to worker.\n *\n * Note: Node.js Worker Threads do not support ReadableStream transfer,\n * so we collect the stream into an array first.\n *\n * @internal\n */\nexport async function* parseUint8ArrayStreamInWorker<\n Header extends ReadonlyArray<string>,\n Delimiter extends string = DEFAULT_DELIMITER,\n Quotation extends string = DEFAULT_QUOTATION,\n>(\n stream: ReadableStream<Uint8Array>,\n options?: ParseBinaryOptions<Header, Delimiter, Quotation>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n // Node.js: Collect stream into Uint8Array first\n const combined = await collectUint8ArrayStream(stream, options?.signal);\n\n using session = await WorkerSession.create(
|
|
1
|
+
{"version":3,"file":"parseUint8ArrayStreamInWorker.node.js","sources":["../../../src/execution/worker/parseUint8ArrayStreamInWorker.node.ts"],"sourcesContent":["import type {\n CSVRecord,\n ParseBinaryOptions,\n ParseOptions,\n} from \"../../common/types.ts\";\nimport type { DEFAULT_DELIMITER, DEFAULT_QUOTATION } from \"../../constants.ts\";\nimport { WorkerSession } from \"./helpers/WorkerSession.ts\";\nimport { sendWorkerMessage } from \"./utils/messageHandler.ts\";\nimport { serializeOptions } from \"./utils/serializeOptions.ts\";\nimport { collectUint8ArrayStream } from \"./utils/streamCollector.node.ts\";\n\n/**\n * Parse CSV Uint8Array stream in Worker thread (Node.js).\n * Collects stream into Uint8Array first, then sends to worker.\n *\n * Note: Node.js Worker Threads do not support ReadableStream transfer,\n * so we collect the stream into an array first.\n *\n * @internal\n */\nexport async function* parseUint8ArrayStreamInWorker<\n Header extends ReadonlyArray<string>,\n Delimiter extends string = DEFAULT_DELIMITER,\n Quotation extends string = DEFAULT_QUOTATION,\n>(\n stream: ReadableStream<Uint8Array>,\n options?: ParseBinaryOptions<Header, Delimiter, Quotation>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n // Node.js: Collect stream into Uint8Array first\n const combined = await collectUint8ArrayStream(stream, options?.signal);\n\n using session = await WorkerSession.create(\n options?.engine?.worker === true ? options.engine : undefined,\n );\n\n yield* sendWorkerMessage<CSVRecord<Header>>(\n session.getWorker(),\n {\n id: session.getNextRequestId(),\n type: \"parseBinary\",\n data: combined,\n options: serializeOptions(options),\n useWASM: false,\n },\n options as ParseOptions<Header> | ParseBinaryOptions<Header> | undefined,\n );\n}\n"],"names":[],"mappings":";;;;;AAoBA,gBAAuB,6BAAA,CAKrB,QACA,OAAA,EAC0C;AAE1C,EAAA,MAAM,QAAA,GAAW,MAAM,uBAAA,CAAwB,MAAA,EAAQ,SAAS,MAAM,CAAA;AAEtE,EAAA,MAAM,OAAA,GAAU,MAAM,aAAA,CAAc,MAAA;AAAA,IAClC,OAAA,EAAS,MAAA,EAAQ,MAAA,KAAW,IAAA,GAAO,QAAQ,MAAA,GAAS;AAAA,GACtD;AAEA,EAAA,OAAO,iBAAA;AAAA,IACL,QAAQ,SAAA,EAAU;AAAA,IAClB;AAAA,MACE,EAAA,EAAI,QAAQ,gBAAA,EAAiB;AAAA,MAC7B,IAAA,EAAM,aAAA;AAAA,MACN,IAAA,EAAM,QAAA;AAAA,MACN,OAAA,EAAS,iBAAiB,OAAO,CAAA;AAAA,MACjC,OAAA,EAAS;AAAA,KACX;AAAA,IACA;AAAA,GACF;AACF;;;;"}
|
|
@@ -3,10 +3,9 @@ import { sendWorkerMessage } from './utils/messageHandler.js';
|
|
|
3
3
|
import { serializeOptions } from './utils/serializeOptions.js';
|
|
4
4
|
|
|
5
5
|
async function* parseUint8ArrayStreamInWorker(stream, options) {
|
|
6
|
-
using session = await WorkerSession.create(
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
});
|
|
6
|
+
using session = await WorkerSession.create(
|
|
7
|
+
options?.engine?.worker === true ? options.engine : void 0
|
|
8
|
+
);
|
|
10
9
|
yield* sendWorkerMessage(
|
|
11
10
|
session.getWorker(),
|
|
12
11
|
{
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parseUint8ArrayStreamInWorker.web.js","sources":["../../../src/execution/worker/parseUint8ArrayStreamInWorker.web.ts"],"sourcesContent":["import type {\n CSVRecord,\n ParseBinaryOptions,\n ParseOptions,\n} from \"../../common/types.ts\";\nimport type { DEFAULT_DELIMITER, DEFAULT_QUOTATION } from \"../../constants.ts\";\nimport { convertStreamToAsyncIterableIterator } from \"../../utils/convertStreamToAsyncIterableIterator.ts\";\nimport { WorkerSession } from \"./helpers/WorkerSession.ts\";\nimport { sendWorkerMessage } from \"./utils/messageHandler.ts\";\nimport { serializeOptions } from \"./utils/serializeOptions.ts\";\n\n/**\n * Parse CSV Uint8Array stream in Worker thread (Browser/Deno).\n * Uses Transferable Streams for zero-copy transfer.\n *\n * @internal\n */\nexport async function* parseUint8ArrayStreamInWorker<\n Header extends ReadonlyArray<string>,\n Delimiter extends string = DEFAULT_DELIMITER,\n Quotation extends string = DEFAULT_QUOTATION,\n>(\n stream: ReadableStream<Uint8Array>,\n options?: ParseBinaryOptions<Header, Delimiter, Quotation>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n using session = await WorkerSession.create(
|
|
1
|
+
{"version":3,"file":"parseUint8ArrayStreamInWorker.web.js","sources":["../../../src/execution/worker/parseUint8ArrayStreamInWorker.web.ts"],"sourcesContent":["import type {\n CSVRecord,\n ParseBinaryOptions,\n ParseOptions,\n} from \"../../common/types.ts\";\nimport type { DEFAULT_DELIMITER, DEFAULT_QUOTATION } from \"../../constants.ts\";\nimport { convertStreamToAsyncIterableIterator } from \"../../utils/convertStreamToAsyncIterableIterator.ts\";\nimport { WorkerSession } from \"./helpers/WorkerSession.ts\";\nimport { sendWorkerMessage } from \"./utils/messageHandler.ts\";\nimport { serializeOptions } from \"./utils/serializeOptions.ts\";\n\n/**\n * Parse CSV Uint8Array stream in Worker thread (Browser/Deno).\n * Uses Transferable Streams for zero-copy transfer.\n *\n * @internal\n */\nexport async function* parseUint8ArrayStreamInWorker<\n Header extends ReadonlyArray<string>,\n Delimiter extends string = DEFAULT_DELIMITER,\n Quotation extends string = DEFAULT_QUOTATION,\n>(\n stream: ReadableStream<Uint8Array>,\n options?: ParseBinaryOptions<Header, Delimiter, Quotation>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n using session = await WorkerSession.create(\n options?.engine?.worker === true ? options.engine : undefined,\n );\n\n yield* sendWorkerMessage<CSVRecord<Header>>(\n session.getWorker(),\n {\n id: session.getNextRequestId(),\n type: \"parseUint8ArrayStream\",\n data: stream,\n options: serializeOptions(options),\n },\n options as ParseOptions<Header> | ParseBinaryOptions<Header> | undefined,\n [stream], // Transfer stream\n );\n}\n"],"names":[],"mappings":";;;;AAiBA,gBAAuB,6BAAA,CAKrB,QACA,OAAA,EAC0C;AAC1C,EAAA,MAAM,OAAA,GAAU,MAAM,aAAA,CAAc,MAAA;AAAA,IAClC,OAAA,EAAS,MAAA,EAAQ,MAAA,KAAW,IAAA,GAAO,QAAQ,MAAA,GAAS;AAAA,GACtD;AAEA,EAAA,OAAO,iBAAA;AAAA,IACL,QAAQ,SAAA,EAAU;AAAA,IAClB;AAAA,MACE,EAAA,EAAI,QAAQ,gBAAA,EAAiB;AAAA,MAC7B,IAAA,EAAM,uBAAA;AAAA,MACN,IAAA,EAAM,MAAA;AAAA,MACN,OAAA,EAAS,iBAAiB,OAAO;AAAA,KACnC;AAAA,IACA,OAAA;AAAA,IACA,CAAC,MAAM;AAAA;AAAA,GACT;AACF;;;;"}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { ParseBinaryOptions } from './common/types.ts';
|
|
2
|
+
import { DEFAULT_DELIMITER } from './constants.ts';
|
|
3
|
+
/**
|
|
4
|
+
* Extracts the options from the file object.
|
|
5
|
+
*
|
|
6
|
+
* @remarks
|
|
7
|
+
* This function automatically sets the file name as the error source
|
|
8
|
+
* for better error reporting (unless explicitly overridden via options).
|
|
9
|
+
*
|
|
10
|
+
* @param file - The file object from which to extract the options.
|
|
11
|
+
* @param options - The options to merge with the extracted options.
|
|
12
|
+
* @returns The options extracted from the file.
|
|
13
|
+
*/
|
|
14
|
+
export declare function getOptionsFromFile<Header extends ReadonlyArray<string>, Delimiter extends string = DEFAULT_DELIMITER>(file: File, options?: ParseBinaryOptions<Header, Delimiter>): ParseBinaryOptions<Header, Delimiter>;
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { getOptionsFromBlob } from './getOptionsFromBlob.js';
|
|
2
|
+
|
|
3
|
+
function getOptionsFromFile(file, options = {}) {
|
|
4
|
+
const blobOptions = getOptionsFromBlob(file, options);
|
|
5
|
+
return {
|
|
6
|
+
...blobOptions,
|
|
7
|
+
source: options.source ?? file.name
|
|
8
|
+
};
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export { getOptionsFromFile };
|
|
12
|
+
//# sourceMappingURL=getOptionsFromFile.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"getOptionsFromFile.js","sources":["../src/getOptionsFromFile.ts"],"sourcesContent":["import type { ParseBinaryOptions } from \"./common/types.ts\";\nimport type { DEFAULT_DELIMITER } from \"./constants.ts\";\nimport { getOptionsFromBlob } from \"./getOptionsFromBlob.ts\";\n\n/**\n * Extracts the options from the file object.\n *\n * @remarks\n * This function automatically sets the file name as the error source\n * for better error reporting (unless explicitly overridden via options).\n *\n * @param file - The file object from which to extract the options.\n * @param options - The options to merge with the extracted options.\n * @returns The options extracted from the file.\n */\nexport function getOptionsFromFile<\n Header extends ReadonlyArray<string>,\n Delimiter extends string = DEFAULT_DELIMITER,\n>(\n file: File,\n options: ParseBinaryOptions<Header, Delimiter> = {} as ParseBinaryOptions<\n Header,\n Delimiter\n >,\n): ParseBinaryOptions<Header, Delimiter> {\n // Get options from blob (charset extraction)\n const blobOptions = getOptionsFromBlob(file, options);\n\n // Add file name as source for error reporting if not already set\n return {\n ...blobOptions,\n source: options.source ?? file.name,\n };\n}\n"],"names":[],"mappings":";;AAeO,SAAS,kBAAA,CAId,IAAA,EACA,OAAA,GAAiD,EAAC,EAIX;AAEvC,EAAA,MAAM,WAAA,GAAc,kBAAA,CAAmB,IAAA,EAAM,OAAO,CAAA;AAGpD,EAAA,OAAO;AAAA,IACL,GAAG,WAAA;AAAA,IACH,MAAA,EAAQ,OAAA,CAAQ,MAAA,IAAU,IAAA,CAAK;AAAA,GACjC;AACF;;;;"}
|
package/dist/parseBlob.js
CHANGED
|
@@ -1,14 +1,22 @@
|
|
|
1
1
|
import { commonParseErrorHandling } from './commonParseErrorHandling.js';
|
|
2
|
+
import { DEFAULT_ARRAY_BUFFER_THRESHOLD } from './constants.js';
|
|
2
3
|
import { getOptionsFromBlob } from './getOptionsFromBlob.js';
|
|
4
|
+
import { parseBinary } from './parseBinary.js';
|
|
3
5
|
import { parseBlobToStream } from './parseBlobToStream.js';
|
|
4
6
|
import { parseUint8ArrayStream } from './parseUint8ArrayStream.js';
|
|
5
7
|
import { convertThisAsyncIterableIteratorToArray } from './utils/convertThisAsyncIterableIteratorToArray.js';
|
|
6
8
|
|
|
7
9
|
function parseBlob(blob, options) {
|
|
8
10
|
const options_ = getOptionsFromBlob(blob, options);
|
|
11
|
+
const threshold = options_?.engine?.arrayBufferThreshold ?? DEFAULT_ARRAY_BUFFER_THRESHOLD;
|
|
9
12
|
return (async function* () {
|
|
10
13
|
try {
|
|
11
|
-
|
|
14
|
+
if (blob.size < threshold) {
|
|
15
|
+
const buffer = await blob.arrayBuffer();
|
|
16
|
+
yield* parseBinary(new Uint8Array(buffer), options_);
|
|
17
|
+
} else {
|
|
18
|
+
yield* parseUint8ArrayStream(blob.stream(), options_);
|
|
19
|
+
}
|
|
12
20
|
} catch (error) {
|
|
13
21
|
commonParseErrorHandling(error);
|
|
14
22
|
}
|
package/dist/parseBlob.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parseBlob.js","sources":["../src/parseBlob.ts"],"sourcesContent":["import type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\nimport { getOptionsFromBlob } from \"./getOptionsFromBlob.ts\";\nimport { parseBlobToStream } from \"./parseBlobToStream.ts\";\nimport { parseUint8ArrayStream } from \"./parseUint8ArrayStream.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\n\n/**\n * Parse CSV from a {@link !Blob} or {@link !File} to records.\n *\n * @remarks\n * This function can parse CSV data from Blob or File objects.\n * If the Blob has a type with charset parameter, it will be used for decoding.\n *\n * File objects (from file inputs or drag-and-drop) extend Blob and are automatically supported.\n *\n * @category Middle-level API\n * @param blob - The blob or file to parse\n * @param options - Parsing options\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parseBlob.toArray} function.\n *\n * @example Parsing CSV from Blob\n *\n * ```ts\n * import { parseBlob } from 'web-csv-toolbox';\n *\n * const blob = new Blob(['name,age\\nAlice,42\\nBob,69'], { type: 'text/csv' });\n *\n * for await (const record of parseBlob(blob)) {\n * console.log(record);\n * }\n * ```\n *\n * @example Parsing CSV from File (input element)\n *\n * ```ts\n * import { parseBlob } from 'web-csv-toolbox';\n *\n * const input = document.querySelector('input[type=\"file\"]');\n * input.addEventListener('change', async (event) => {\n * const file = event.target.files[0];\n * for await (const record of parseBlob(file)) {\n * console.log(record);\n * }\n * });\n * ```\n *\n * @example Parsing CSV from Blob with charset\n *\n * ```ts\n * import { parseBlob } from 'web-csv-toolbox';\n *\n * const blob = new Blob([csvData], { type: 'text/csv;charset=shift-jis' });\n *\n * for await (const record of parseBlob(blob)) {\n * console.log(record);\n * }\n * ```\n */\nexport function parseBlob<Header extends ReadonlyArray<string>>(\n blob: Blob,\n options?: ParseBinaryOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n // Extract options from blob\n const options_ = getOptionsFromBlob(blob, options);\n\n // Return wrapper async generator for error handling\n return (async function* () {\n try {\n yield* parseUint8ArrayStream(blob.stream(), options_);\n } catch (error) {\n commonParseErrorHandling(error);\n }\n })();\n}\n\nexport declare namespace parseBlob {\n /**\n * Parse CSV from a {@link !Blob} or {@link !File} to array of records.\n *\n * @returns Array of records\n *\n * @example Parsing CSV from Blob\n *\n * ```ts\n * import { parseBlob } from 'web-csv-toolbox';\n *\n * const blob = new Blob(['name,age\\nAlice,42\\nBob,69'], { type: 'text/csv' });\n *\n * const records = await parseBlob.toArray(blob);\n * console.log(records);\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n blob: Blob,\n options?: ParseBinaryOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV from a {@link !Blob} or {@link !File} to stream of records.\n *\n * @param blob - Blob or File to parse\n * @returns Stream of records\n *\n * @example Parsing CSV from Blob\n *\n * ```ts\n * import { parseBlob } from 'web-csv-toolbox';\n *\n * const blob = new Blob(['name,age\\nAlice,42\\nBob,69'], { type: 'text/csv' });\n *\n * await parseBlob.toStream(blob)\n * .pipeTo(\n * new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\n export function toStream<Header extends ReadonlyArray<string>>(\n blob: Blob,\n options?: ParseBinaryOptions<Header>,\n ): ReadableStream<CSVRecord<Header>>;\n}\n\nObject.defineProperties(parseBlob, {\n toArray: {\n enumerable: true,\n writable: false,\n value: internal.convertThisAsyncIterableIteratorToArray,\n },\n toStream: {\n enumerable: true,\n writable: false,\n value: parseBlobToStream,\n },\n});\n"],"names":["internal.convertThisAsyncIterableIteratorToArray"],"mappings":"
|
|
1
|
+
{"version":3,"file":"parseBlob.js","sources":["../src/parseBlob.ts"],"sourcesContent":["import type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { commonParseErrorHandling } from \"./commonParseErrorHandling.ts\";\nimport { DEFAULT_ARRAY_BUFFER_THRESHOLD } from \"./constants.ts\";\nimport { getOptionsFromBlob } from \"./getOptionsFromBlob.ts\";\nimport { parseBinary } from \"./parseBinary.ts\";\nimport { parseBlobToStream } from \"./parseBlobToStream.ts\";\nimport { parseUint8ArrayStream } from \"./parseUint8ArrayStream.ts\";\nimport * as internal from \"./utils/convertThisAsyncIterableIteratorToArray.ts\";\n\n/**\n * Parse CSV from a {@link !Blob} or {@link !File} to records.\n *\n * @remarks\n * This function can parse CSV data from Blob or File objects.\n * If the Blob has a type with charset parameter, it will be used for decoding.\n *\n * File objects (from file inputs or drag-and-drop) extend Blob and are automatically supported.\n *\n * @category Middle-level API\n * @param blob - The blob or file to parse\n * @param options - Parsing options\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parseBlob.toArray} function.\n *\n * @example Parsing CSV from Blob\n *\n * ```ts\n * import { parseBlob } from 'web-csv-toolbox';\n *\n * const blob = new Blob(['name,age\\nAlice,42\\nBob,69'], { type: 'text/csv' });\n *\n * for await (const record of parseBlob(blob)) {\n * console.log(record);\n * }\n * ```\n *\n * @example Parsing CSV from File (input element)\n *\n * ```ts\n * import { parseBlob } from 'web-csv-toolbox';\n *\n * const input = document.querySelector('input[type=\"file\"]');\n * input.addEventListener('change', async (event) => {\n * const file = event.target.files[0];\n * for await (const record of parseBlob(file)) {\n * console.log(record);\n * }\n * });\n * ```\n *\n * @example Parsing CSV from Blob with charset\n *\n * ```ts\n * import { parseBlob } from 'web-csv-toolbox';\n *\n * const blob = new Blob([csvData], { type: 'text/csv;charset=shift-jis' });\n *\n * for await (const record of parseBlob(blob)) {\n * console.log(record);\n * }\n * ```\n */\nexport function parseBlob<Header extends ReadonlyArray<string>>(\n blob: Blob,\n options?: ParseBinaryOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n // Extract options from blob\n const options_ = getOptionsFromBlob(blob, options);\n\n // Get threshold from engine config or use default\n const threshold =\n options_?.engine?.arrayBufferThreshold ?? DEFAULT_ARRAY_BUFFER_THRESHOLD;\n\n // Return wrapper async generator for error handling\n return (async function* () {\n try {\n // Choose strategy based on blob size and threshold\n if (blob.size < threshold) {\n // Small file: use arrayBuffer for better performance\n const buffer = await blob.arrayBuffer();\n yield* parseBinary(new Uint8Array(buffer), options_);\n } else {\n // Large file: use streaming for memory efficiency\n yield* parseUint8ArrayStream(blob.stream(), options_);\n }\n } catch (error) {\n commonParseErrorHandling(error);\n }\n })();\n}\n\nexport declare namespace parseBlob {\n /**\n * Parse CSV from a {@link !Blob} or {@link !File} to array of records.\n *\n * @returns Array of records\n *\n * @example Parsing CSV from Blob\n *\n * ```ts\n * import { parseBlob } from 'web-csv-toolbox';\n *\n * const blob = new Blob(['name,age\\nAlice,42\\nBob,69'], { type: 'text/csv' });\n *\n * const records = await parseBlob.toArray(blob);\n * console.log(records);\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n blob: Blob,\n options?: ParseBinaryOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV from a {@link !Blob} or {@link !File} to stream of records.\n *\n * @param blob - Blob or File to parse\n * @returns Stream of records\n *\n * @example Parsing CSV from Blob\n *\n * ```ts\n * import { parseBlob } from 'web-csv-toolbox';\n *\n * const blob = new Blob(['name,age\\nAlice,42\\nBob,69'], { type: 'text/csv' });\n *\n * await parseBlob.toStream(blob)\n * .pipeTo(\n * new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * // Prints:\n * // { name: 'Alice', age: '42' }\n * // { name: 'Bob', age: '69' }\n * ```\n */\n export function toStream<Header extends ReadonlyArray<string>>(\n blob: Blob,\n options?: ParseBinaryOptions<Header>,\n ): ReadableStream<CSVRecord<Header>>;\n}\n\nObject.defineProperties(parseBlob, {\n toArray: {\n enumerable: true,\n writable: false,\n value: internal.convertThisAsyncIterableIteratorToArray,\n },\n toStream: {\n enumerable: true,\n writable: false,\n value: parseBlobToStream,\n },\n});\n"],"names":["internal.convertThisAsyncIterableIteratorToArray"],"mappings":";;;;;;;;AA+DO,SAAS,SAAA,CACd,MACA,OAAA,EAC0C;AAE1C,EAAA,MAAM,QAAA,GAAW,kBAAA,CAAmB,IAAA,EAAM,OAAO,CAAA;AAGjD,EAAA,MAAM,SAAA,GACJ,QAAA,EAAU,MAAA,EAAQ,oBAAA,IAAwB,8BAAA;AAG5C,EAAA,OAAA,CAAQ,mBAAmB;AACzB,IAAA,IAAI;AAEF,MAAA,IAAI,IAAA,CAAK,OAAO,SAAA,EAAW;AAEzB,QAAA,MAAM,MAAA,GAAS,MAAM,IAAA,CAAK,WAAA,EAAY;AACtC,QAAA,OAAO,WAAA,CAAY,IAAI,UAAA,CAAW,MAAM,GAAG,QAAQ,CAAA;AAAA,MACrD,CAAA,MAAO;AAEL,QAAA,OAAO,qBAAA,CAAsB,IAAA,CAAK,MAAA,EAAO,EAAG,QAAQ,CAAA;AAAA,MACtD;AAAA,IACF,SAAS,KAAA,EAAO;AACd,MAAA,wBAAA,CAAyB,KAAK,CAAA;AAAA,IAChC;AAAA,EACF,CAAA,GAAG;AACL;AAuDA,MAAA,CAAO,iBAAiB,SAAA,EAAW;AAAA,EACjC,OAAA,EAAS;AAAA,IACP,UAAA,EAAY,IAAA;AAAA,IACZ,QAAA,EAAU,KAAA;AAAA,IACV,OAAOA;AAAS,GAClB;AAAA,EACA,QAAA,EAAU;AAAA,IACR,UAAA,EAAY,IAAA;AAAA,IACZ,QAAA,EAAU,KAAA;AAAA,IACV,KAAA,EAAO;AAAA;AAEX,CAAC,CAAA;;;;"}
|
package/dist/parseFile.d.ts
CHANGED
|
@@ -3,11 +3,12 @@ import { CSVRecord, ParseBinaryOptions } from './common/types.ts';
|
|
|
3
3
|
* Parse CSV from a {@link !File} to records.
|
|
4
4
|
*
|
|
5
5
|
* @remarks
|
|
6
|
-
* This is an alias for {@link parseBlob} since File extends Blob.
|
|
7
|
-
*
|
|
8
6
|
* This function can parse CSV data from File objects (from file inputs or drag-and-drop).
|
|
9
7
|
* If the File has a type with charset parameter, it will be used for decoding.
|
|
10
8
|
*
|
|
9
|
+
* Unlike {@link parseBlob}, this function automatically sets the file name as the
|
|
10
|
+
* error source for better error reporting (unless explicitly overridden via options).
|
|
11
|
+
*
|
|
11
12
|
* @category Middle-level API
|
|
12
13
|
* @param file - The file to parse
|
|
13
14
|
* @param options - Parsing options
|
package/dist/parseFile.js
CHANGED
|
@@ -1,18 +1,22 @@
|
|
|
1
|
+
import { getOptionsFromFile } from './getOptionsFromFile.js';
|
|
1
2
|
import { parseBlob } from './parseBlob.js';
|
|
3
|
+
import { parseFileToArray } from './parseFileToArray.js';
|
|
4
|
+
import { parseFileToStream } from './parseFileToStream.js';
|
|
2
5
|
|
|
3
6
|
function parseFile(file, options) {
|
|
4
|
-
|
|
7
|
+
const options_ = getOptionsFromFile(file, options);
|
|
8
|
+
return parseBlob(file, options_);
|
|
5
9
|
}
|
|
6
10
|
Object.defineProperties(parseFile, {
|
|
7
11
|
toArray: {
|
|
8
12
|
enumerable: true,
|
|
9
13
|
writable: false,
|
|
10
|
-
value:
|
|
14
|
+
value: parseFileToArray
|
|
11
15
|
},
|
|
12
16
|
toStream: {
|
|
13
17
|
enumerable: true,
|
|
14
18
|
writable: false,
|
|
15
|
-
value:
|
|
19
|
+
value: parseFileToStream
|
|
16
20
|
}
|
|
17
21
|
});
|
|
18
22
|
|
package/dist/parseFile.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parseFile.js","sources":["../src/parseFile.ts"],"sourcesContent":["import type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { parseBlob } from \"./parseBlob.ts\";\n\n/**\n * Parse CSV from a {@link !File} to records.\n *\n * @remarks\n * This
|
|
1
|
+
{"version":3,"file":"parseFile.js","sources":["../src/parseFile.ts"],"sourcesContent":["import type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { getOptionsFromFile } from \"./getOptionsFromFile.ts\";\nimport { parseBlob } from \"./parseBlob.ts\";\nimport { parseFileToArray } from \"./parseFileToArray.ts\";\nimport { parseFileToStream } from \"./parseFileToStream.ts\";\n\n/**\n * Parse CSV from a {@link !File} to records.\n *\n * @remarks\n * This function can parse CSV data from File objects (from file inputs or drag-and-drop).\n * If the File has a type with charset parameter, it will be used for decoding.\n *\n * Unlike {@link parseBlob}, this function automatically sets the file name as the\n * error source for better error reporting (unless explicitly overridden via options).\n *\n * @category Middle-level API\n * @param file - The file to parse\n * @param options - Parsing options\n * @returns Async iterable iterator of records.\n *\n * If you want array of records, use {@link parseFile.toArray} function.\n *\n * @example Parsing CSV from File (input element)\n *\n * ```ts\n * import { parseFile } from 'web-csv-toolbox';\n *\n * const input = document.querySelector('input[type=\"file\"]');\n * input.addEventListener('change', async (event) => {\n * const file = event.target.files[0];\n * for await (const record of parseFile(file)) {\n * console.log(record);\n * }\n * });\n * ```\n *\n * @example Parsing CSV from File (drag-and-drop)\n *\n * ```ts\n * import { parseFile } from 'web-csv-toolbox';\n *\n * dropZone.addEventListener('drop', async (event) => {\n * event.preventDefault();\n * const file = event.dataTransfer.files[0];\n * for await (const record of parseFile(file)) {\n * console.log(record);\n * }\n * });\n * ```\n */\nexport function parseFile<Header extends ReadonlyArray<string>>(\n file: File,\n options?: ParseBinaryOptions<Header>,\n): AsyncIterableIterator<CSVRecord<Header>> {\n const options_ = getOptionsFromFile(file, options);\n return parseBlob(file, options_);\n}\n\nexport declare namespace parseFile {\n /**\n * Parse CSV from a {@link !File} to array of records.\n *\n * @returns Array of records\n *\n * @example Parsing CSV from File\n *\n * ```ts\n * import { parseFile } from 'web-csv-toolbox';\n *\n * const input = document.querySelector('input[type=\"file\"]');\n * input.addEventListener('change', async (event) => {\n * const file = event.target.files[0];\n * const records = await parseFile.toArray(file);\n * console.log(records);\n * });\n * ```\n */\n export function toArray<Header extends ReadonlyArray<string>>(\n file: File,\n options?: ParseBinaryOptions<Header>,\n ): Promise<CSVRecord<Header>[]>;\n /**\n * Parse CSV from a {@link !File} to stream of records.\n *\n * @param file - File to parse\n * @returns Stream of records\n *\n * @example Parsing CSV from File\n *\n * ```ts\n * import { parseFile } from 'web-csv-toolbox';\n *\n * const input = document.querySelector('input[type=\"file\"]');\n * input.addEventListener('change', async (event) => {\n * const file = event.target.files[0];\n * await parseFile.toStream(file)\n * .pipeTo(\n * new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * });\n * ```\n */\n export function toStream<Header extends ReadonlyArray<string>>(\n file: File,\n options?: ParseBinaryOptions<Header>,\n ): ReadableStream<CSVRecord<Header>>;\n}\n\nObject.defineProperties(parseFile, {\n toArray: {\n enumerable: true,\n writable: false,\n value: parseFileToArray,\n },\n toStream: {\n enumerable: true,\n writable: false,\n value: parseFileToStream,\n },\n});\n"],"names":[],"mappings":";;;;;AAmDO,SAAS,SAAA,CACd,MACA,OAAA,EAC0C;AAC1C,EAAA,MAAM,QAAA,GAAW,kBAAA,CAAmB,IAAA,EAAM,OAAO,CAAA;AACjD,EAAA,OAAO,SAAA,CAAU,MAAM,QAAQ,CAAA;AACjC;AAwDA,MAAA,CAAO,iBAAiB,SAAA,EAAW;AAAA,EACjC,OAAA,EAAS;AAAA,IACP,UAAA,EAAY,IAAA;AAAA,IACZ,QAAA,EAAU,KAAA;AAAA,IACV,KAAA,EAAO;AAAA,GACT;AAAA,EACA,QAAA,EAAU;AAAA,IACR,UAAA,EAAY,IAAA;AAAA,IACZ,QAAA,EAAU,KAAA;AAAA,IACV,KAAA,EAAO;AAAA;AAEX,CAAC,CAAA;;;;"}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import { CSVRecord, ParseBinaryOptions } from './common/types.ts';
|
|
2
|
+
/**
|
|
3
|
+
* Parse CSV from a {@link !File} to array of records.
|
|
4
|
+
*
|
|
5
|
+
* @remarks
|
|
6
|
+
* This function automatically sets the file name as the error source for better
|
|
7
|
+
* error reporting (unless explicitly overridden via options).
|
|
8
|
+
*
|
|
9
|
+
* @category Middle-level API
|
|
10
|
+
* @param file - The file to parse
|
|
11
|
+
* @param options - Parsing options
|
|
12
|
+
* @returns Promise of array of records
|
|
13
|
+
*
|
|
14
|
+
* @example Parsing CSV from File
|
|
15
|
+
*
|
|
16
|
+
* ```ts
|
|
17
|
+
* import { parseFileToArray } from 'web-csv-toolbox';
|
|
18
|
+
*
|
|
19
|
+
* const input = document.querySelector('input[type="file"]');
|
|
20
|
+
* input.addEventListener('change', async (event) => {
|
|
21
|
+
* const file = event.target.files[0];
|
|
22
|
+
* const records = await parseFileToArray(file);
|
|
23
|
+
* console.log(records);
|
|
24
|
+
* });
|
|
25
|
+
* ```
|
|
26
|
+
*/
|
|
27
|
+
export declare function parseFileToArray<Header extends ReadonlyArray<string>>(file: File, options?: ParseBinaryOptions<Header>): Promise<CSVRecord<Header>[]>;
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { parseFile } from './parseFile.js';
|
|
2
|
+
|
|
3
|
+
async function parseFileToArray(file, options) {
|
|
4
|
+
const rows = [];
|
|
5
|
+
for await (const row of parseFile(file, options)) {
|
|
6
|
+
rows.push(row);
|
|
7
|
+
}
|
|
8
|
+
return rows;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export { parseFileToArray };
|
|
12
|
+
//# sourceMappingURL=parseFileToArray.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"parseFileToArray.js","sources":["../src/parseFileToArray.ts"],"sourcesContent":["import type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { parseFile } from \"./parseFile.ts\";\n\n/**\n * Parse CSV from a {@link !File} to array of records.\n *\n * @remarks\n * This function automatically sets the file name as the error source for better\n * error reporting (unless explicitly overridden via options).\n *\n * @category Middle-level API\n * @param file - The file to parse\n * @param options - Parsing options\n * @returns Promise of array of records\n *\n * @example Parsing CSV from File\n *\n * ```ts\n * import { parseFileToArray } from 'web-csv-toolbox';\n *\n * const input = document.querySelector('input[type=\"file\"]');\n * input.addEventListener('change', async (event) => {\n * const file = event.target.files[0];\n * const records = await parseFileToArray(file);\n * console.log(records);\n * });\n * ```\n */\nexport async function parseFileToArray<Header extends ReadonlyArray<string>>(\n file: File,\n options?: ParseBinaryOptions<Header>,\n): Promise<CSVRecord<Header>[]> {\n const rows: CSVRecord<Header>[] = [];\n for await (const row of parseFile(file, options)) {\n rows.push(row);\n }\n return rows;\n}\n"],"names":[],"mappings":";;AA4BA,eAAsB,gBAAA,CACpB,MACA,OAAA,EAC8B;AAC9B,EAAA,MAAM,OAA4B,EAAC;AACnC,EAAA,WAAA,MAAiB,GAAA,IAAO,SAAA,CAAU,IAAA,EAAM,OAAO,CAAA,EAAG;AAChD,IAAA,IAAA,CAAK,KAAK,GAAG,CAAA;AAAA,EACf;AACA,EAAA,OAAO,IAAA;AACT;;;;"}
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import { CSVRecord, ParseBinaryOptions } from './common/types.ts';
|
|
2
|
+
/**
|
|
3
|
+
* Parse CSV from a {@link !File} to stream of records.
|
|
4
|
+
*
|
|
5
|
+
* @remarks
|
|
6
|
+
* This function automatically sets the file name as the error source for better
|
|
7
|
+
* error reporting (unless explicitly overridden via options).
|
|
8
|
+
*
|
|
9
|
+
* @category Middle-level API
|
|
10
|
+
* @param file - File to parse
|
|
11
|
+
* @param options - Parsing options
|
|
12
|
+
* @returns Stream of records
|
|
13
|
+
*
|
|
14
|
+
* @example Parsing CSV from File
|
|
15
|
+
*
|
|
16
|
+
* ```ts
|
|
17
|
+
* import { parseFileToStream } from 'web-csv-toolbox';
|
|
18
|
+
*
|
|
19
|
+
* const input = document.querySelector('input[type="file"]');
|
|
20
|
+
* input.addEventListener('change', async (event) => {
|
|
21
|
+
* const file = event.target.files[0];
|
|
22
|
+
* await parseFileToStream(file)
|
|
23
|
+
* .pipeTo(
|
|
24
|
+
* new WritableStream({
|
|
25
|
+
* write(record) {
|
|
26
|
+
* console.log(record);
|
|
27
|
+
* },
|
|
28
|
+
* }),
|
|
29
|
+
* );
|
|
30
|
+
* });
|
|
31
|
+
* ```
|
|
32
|
+
*/
|
|
33
|
+
export declare function parseFileToStream<Header extends ReadonlyArray<string>>(file: File, options?: ParseBinaryOptions<Header>): ReadableStream<CSVRecord<Header>>;
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { getOptionsFromFile } from './getOptionsFromFile.js';
|
|
2
|
+
import { parseUint8ArrayStreamToStream } from './parseUint8ArrayStreamToStream.js';
|
|
3
|
+
|
|
4
|
+
function parseFileToStream(file, options) {
|
|
5
|
+
const options_ = getOptionsFromFile(file, options);
|
|
6
|
+
return parseUint8ArrayStreamToStream(file.stream(), options_);
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
export { parseFileToStream };
|
|
10
|
+
//# sourceMappingURL=parseFileToStream.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"parseFileToStream.js","sources":["../src/parseFileToStream.ts"],"sourcesContent":["import type { CSVRecord, ParseBinaryOptions } from \"./common/types.ts\";\nimport { getOptionsFromFile } from \"./getOptionsFromFile.ts\";\nimport { parseUint8ArrayStreamToStream } from \"./parseUint8ArrayStreamToStream.ts\";\n\n/**\n * Parse CSV from a {@link !File} to stream of records.\n *\n * @remarks\n * This function automatically sets the file name as the error source for better\n * error reporting (unless explicitly overridden via options).\n *\n * @category Middle-level API\n * @param file - File to parse\n * @param options - Parsing options\n * @returns Stream of records\n *\n * @example Parsing CSV from File\n *\n * ```ts\n * import { parseFileToStream } from 'web-csv-toolbox';\n *\n * const input = document.querySelector('input[type=\"file\"]');\n * input.addEventListener('change', async (event) => {\n * const file = event.target.files[0];\n * await parseFileToStream(file)\n * .pipeTo(\n * new WritableStream({\n * write(record) {\n * console.log(record);\n * },\n * }),\n * );\n * });\n * ```\n */\nexport function parseFileToStream<Header extends ReadonlyArray<string>>(\n file: File,\n options?: ParseBinaryOptions<Header>,\n): ReadableStream<CSVRecord<Header>> {\n const options_ = getOptionsFromFile(file, options);\n return parseUint8ArrayStreamToStream(file.stream(), options_);\n}\n"],"names":[],"mappings":";;;AAmCO,SAAS,iBAAA,CACd,MACA,OAAA,EACmC;AACnC,EAAA,MAAM,QAAA,GAAW,kBAAA,CAAmB,IAAA,EAAM,OAAO,CAAA;AACjD,EAAA,OAAO,6BAAA,CAA8B,IAAA,CAAK,MAAA,EAAO,EAAG,QAAQ,CAAA;AAC9D;;;;"}
|
package/dist/utils/types.d.ts
CHANGED
|
@@ -1,16 +1,21 @@
|
|
|
1
|
+
import { CSVString } from '../common/types.ts';
|
|
1
2
|
import { DEFAULT_DELIMITER, DEFAULT_QUOTATION, Newline } from '../constants.ts';
|
|
2
|
-
import { CSVString } from '../web-csv-toolbox.ts';
|
|
3
3
|
/**
|
|
4
|
-
*
|
|
4
|
+
* Join CSV field array into a CSV-formatted string with proper escaping.
|
|
5
5
|
*
|
|
6
6
|
* @category Types
|
|
7
7
|
*
|
|
8
|
+
* @remarks
|
|
9
|
+
* This type handles CSV-specific formatting:
|
|
10
|
+
* - Quotes fields containing delimiters, quotations, or newlines
|
|
11
|
+
* - Joins fields with the specified delimiter
|
|
12
|
+
*
|
|
8
13
|
* @example Default
|
|
9
14
|
*
|
|
10
15
|
* ```ts
|
|
11
16
|
* const header = ["name", "age", "city", "zip"];
|
|
12
17
|
*
|
|
13
|
-
* type _ =
|
|
18
|
+
* type _ = JoinCSVFields<typeof header>
|
|
14
19
|
* // `name,age,city,zip`
|
|
15
20
|
* ```
|
|
16
21
|
*
|
|
@@ -19,22 +24,28 @@ import { CSVString } from '../web-csv-toolbox.ts';
|
|
|
19
24
|
* ```ts
|
|
20
25
|
* const header = ["name", "a\nge", "city", "zip"];
|
|
21
26
|
*
|
|
22
|
-
* type _ =
|
|
27
|
+
* type _ = JoinCSVFields<typeof header, "@", "$">
|
|
23
28
|
* // `name@$a\nge$@city@zip`
|
|
24
29
|
* ```
|
|
25
30
|
*/
|
|
26
|
-
export type
|
|
31
|
+
export type JoinCSVFields<Chars extends ReadonlyArray<string | number | boolean | bigint>, Delimiter extends string = DEFAULT_DELIMITER, Quotation extends string = DEFAULT_QUOTATION, Nl extends string = Exclude<Newline, Delimiter | Quotation>> = Chars extends readonly [infer F, ...infer R] ? F extends string ? R extends string[] ? `${F extends `${string}${Nl | Delimiter | Quotation}${string}` ? `${Quotation}${F}${Quotation}` : F}${R extends [] ? "" : Delimiter}${JoinCSVFields<R, Delimiter, Quotation>}` : string : string : "";
|
|
27
32
|
/**
|
|
28
|
-
*
|
|
33
|
+
* Split CSV-formatted string into field array with proper unescaping.
|
|
29
34
|
*
|
|
30
35
|
* @category Types
|
|
31
36
|
*
|
|
37
|
+
* @remarks
|
|
38
|
+
* This type handles CSV-specific parsing:
|
|
39
|
+
* - Unquotes quoted fields
|
|
40
|
+
* - Handles escaped quotation marks
|
|
41
|
+
* - Splits by the specified delimiter
|
|
42
|
+
*
|
|
32
43
|
* @example Default
|
|
33
44
|
*
|
|
34
45
|
* ```ts
|
|
35
46
|
* const header = `name,age,city,zip`;
|
|
36
47
|
*
|
|
37
|
-
* type _ =
|
|
48
|
+
* type _ = SplitCSVFields<typeof header>
|
|
38
49
|
* // ["name", "age", "city", "zip"]
|
|
39
50
|
* ```
|
|
40
51
|
*
|
|
@@ -44,11 +55,11 @@ export type Join<Chars extends ReadonlyArray<string | number | boolean | bigint>
|
|
|
44
55
|
* const header = `name@$a
|
|
45
56
|
* ge$@city@zip`;
|
|
46
57
|
*
|
|
47
|
-
* type _ =
|
|
58
|
+
* type _ = SplitCSVFields<typeof header, "@", "$">
|
|
48
59
|
* // ["name", "a\nge", "city", "zip"]
|
|
49
60
|
* ```
|
|
50
61
|
*/
|
|
51
|
-
export type
|
|
62
|
+
export type SplitCSVFields<Char extends string, Delimiter extends string = DEFAULT_DELIMITER, Quotation extends string = DEFAULT_QUOTATION, Escaping extends boolean = false, Col extends string = "", Result extends string[] = []> = Char extends `${Delimiter}${infer R}` ? Escaping extends true ? SplitCSVFields<R, Delimiter, Quotation, true, `${Col}${Delimiter}`, Result> : SplitCSVFields<R, Delimiter, Quotation, false, "", [...Result, Col]> : Char extends `${Quotation}${infer R}` ? Escaping extends true ? R extends "" | Delimiter | `${Delimiter}${string}` ? SplitCSVFields<R, Delimiter, Quotation, false, Col, Result> : SplitCSVFields<R, Delimiter, Quotation, true, `${Col}${Quotation}`, Result> : SplitCSVFields<R, Delimiter, Quotation, true, Col, Result> : Char extends `${infer F}${infer R}` ? SplitCSVFields<R, Delimiter, Quotation, Escaping, `${Col}${F}`, Result> : [...Result, Col] extends [""] ? readonly string[] : readonly [...Result, Col];
|
|
52
63
|
type ExtractString<Source extends CSVString> = Source extends `${infer S}` | ReadableStream<infer S> ? S : string;
|
|
53
64
|
type ExtractCSVBody<CSVSource extends CSVString, Delimiter extends string = DEFAULT_DELIMITER, Quotation extends string = DEFAULT_QUOTATION, Nl extends string = Exclude<Newline, Delimiter | Quotation>, Escaping extends boolean = false> = ExtractString<CSVSource> extends `${Quotation}${infer R}` ? Escaping extends true ? R extends Delimiter | Nl | `${Delimiter | Nl}${string}` ? ExtractCSVBody<R, Delimiter, Quotation, Nl, false> : ExtractCSVBody<R, Delimiter, Quotation, Nl, true> : ExtractCSVBody<R, Delimiter, Quotation, Nl, true> : ExtractString<CSVSource> extends `${infer _ extends Nl}${infer R}` ? Escaping extends true ? ExtractCSVBody<R, Delimiter, Quotation, Nl, true> : R : ExtractString<CSVSource> extends `${infer _}${infer R}` ? ExtractCSVBody<R, Delimiter, Quotation, Nl, Escaping> : "";
|
|
54
65
|
/**
|
|
@@ -110,5 +121,5 @@ export type ExtractCSVHeader<CSVSource extends CSVString, Delimiter extends stri
|
|
|
110
121
|
* // ["name", "a\nge"]
|
|
111
122
|
* ```
|
|
112
123
|
*/
|
|
113
|
-
export type PickCSVHeader<CSVSource extends CSVString, Delimiter extends string = DEFAULT_DELIMITER, Quotation extends string = DEFAULT_QUOTATION> = ExtractString<CSVSource> extends `${infer S}` ?
|
|
124
|
+
export type PickCSVHeader<CSVSource extends CSVString, Delimiter extends string = DEFAULT_DELIMITER, Quotation extends string = DEFAULT_QUOTATION> = ExtractString<CSVSource> extends `${infer S}` ? SplitCSVFields<ExtractCSVHeader<S, Delimiter, Quotation>, Delimiter, Quotation> : ReadonlyArray<string>;
|
|
114
125
|
export {};
|
|
@@ -8,6 +8,9 @@ export * from './parse.ts';
|
|
|
8
8
|
export * from './parseBinary.ts';
|
|
9
9
|
export * from './parseBlob.ts';
|
|
10
10
|
export * from './parseFile.ts';
|
|
11
|
+
export * from './parseFileToArray.ts';
|
|
12
|
+
export * from './parseFileToStream.ts';
|
|
13
|
+
export * from './getOptionsFromFile.ts';
|
|
11
14
|
export * from './parseRequest.ts';
|
|
12
15
|
export * from './parseResponse.ts';
|
|
13
16
|
export * from './parseString.ts';
|
package/dist/web-csv-toolbox.js
CHANGED
|
@@ -7,6 +7,9 @@ export { parse } from './parse.js';
|
|
|
7
7
|
export { parseBinary } from './parseBinary.js';
|
|
8
8
|
export { parseBlob } from './parseBlob.js';
|
|
9
9
|
export { parseFile } from './parseFile.js';
|
|
10
|
+
export { parseFileToArray } from './parseFileToArray.js';
|
|
11
|
+
export { parseFileToStream } from './parseFileToStream.js';
|
|
12
|
+
export { getOptionsFromFile } from './getOptionsFromFile.js';
|
|
10
13
|
export { parseRequest } from './parseRequest.js';
|
|
11
14
|
export { parseResponse } from './parseResponse.js';
|
|
12
15
|
export { parseString } from './parseString.js';
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"web-csv-toolbox.js","sources":[],"sourcesContent":[],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"web-csv-toolbox.js","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "web-csv-toolbox",
|
|
3
|
-
"version": "0.14.0-next-
|
|
3
|
+
"version": "0.14.0-next-978b88933762ecc27270ce746b80a3fa7ed8c4f7",
|
|
4
4
|
"description": "A CSV Toolbox utilizing Web Standard APIs.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"module": "dist/web-csv-toolbox.js",
|