convert-csv-to-json 3.13.0 → 3.15.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/MIGRATION.md +174 -0
- package/README.md +577 -20
- package/index.d.ts +29 -0
- package/index.js +30 -0
- package/jest.config.js +6 -2
- package/package.json +7 -3
- package/src/browserApi.js +105 -0
- package/src/csvToJsonAsync.js +120 -0
- package/src/util/fileUtils.js +29 -0
- package/tsconfig.json +19 -0
package/MIGRATION.md
ADDED
|
@@ -0,0 +1,174 @@
|
|
|
1
|
+
# Migration Guide: Moving from Sync to Async
|
|
2
|
+
|
|
3
|
+
This guide will help you transition from the synchronous API to the new asynchronous API in csvToJson.
|
|
4
|
+
|
|
5
|
+
## Table of Contents
|
|
6
|
+
|
|
7
|
+
- [Basic Migration Patterns](#basic-migration-patterns)
|
|
8
|
+
- [Common Patterns and Best Practices](#common-patterns-and-best-practices)
|
|
9
|
+
- [Advanced Use Cases](#advanced-use-cases)
|
|
10
|
+
- [Migration Tips](#migration-tips)
|
|
11
|
+
|
|
12
|
+
## Basic Migration Patterns
|
|
13
|
+
|
|
14
|
+
1. Direct file reading:
|
|
15
|
+
```js
|
|
16
|
+
// Before (sync)
|
|
17
|
+
const json = csvToJson.getJsonFromCsv('input.csv');
|
|
18
|
+
console.log(json);
|
|
19
|
+
|
|
20
|
+
// After (async) - using Promises
|
|
21
|
+
csvToJson.getJsonFromCsvAsync('input.csv')
|
|
22
|
+
.then(json => console.log(json))
|
|
23
|
+
.catch(err => console.error('Error:', err));
|
|
24
|
+
|
|
25
|
+
// After (async) - using async/await
|
|
26
|
+
async function readCsv() {
|
|
27
|
+
try {
|
|
28
|
+
const json = await csvToJson.getJsonFromCsvAsync('input.csv');
|
|
29
|
+
console.log(json);
|
|
30
|
+
} catch (err) {
|
|
31
|
+
console.error('Error:', err);
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
```
|
|
35
|
+
|
|
36
|
+
2. File generation:
|
|
37
|
+
```js
|
|
38
|
+
// Before (sync)
|
|
39
|
+
csvToJson.generateJsonFileFromCsv('input.csv', 'output.json');
|
|
40
|
+
|
|
41
|
+
// After (async) - using Promises
|
|
42
|
+
csvToJson.generateJsonFileFromCsvAsync('input.csv', 'output.json')
|
|
43
|
+
.then(() => console.log('File created'))
|
|
44
|
+
.catch(err => console.error('Error:', err));
|
|
45
|
+
```
|
|
46
|
+
|
|
47
|
+
3. Chained operations:
|
|
48
|
+
```js
|
|
49
|
+
// Before (sync)
|
|
50
|
+
const json = csvToJson
|
|
51
|
+
.fieldDelimiter(',')
|
|
52
|
+
.formatValueByType()
|
|
53
|
+
.getJsonFromCsv('input.csv');
|
|
54
|
+
|
|
55
|
+
// After (async)
|
|
56
|
+
await csvToJson
|
|
57
|
+
.fieldDelimiter(',')
|
|
58
|
+
.formatValueByType()
|
|
59
|
+
.getJsonFromCsvAsync('input.csv');
|
|
60
|
+
```
|
|
61
|
+
|
|
62
|
+
## Common Patterns and Best Practices
|
|
63
|
+
|
|
64
|
+
1. Processing multiple files:
|
|
65
|
+
```js
|
|
66
|
+
// Sequential processing
|
|
67
|
+
async function processFiles(files) {
|
|
68
|
+
const results = [];
|
|
69
|
+
for (const file of files) {
|
|
70
|
+
const json = await csvToJson.getJsonFromCsvAsync(file);
|
|
71
|
+
results.push(json);
|
|
72
|
+
}
|
|
73
|
+
return results;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
// Parallel processing
|
|
77
|
+
async function processFilesParallel(files) {
|
|
78
|
+
const promises = files.map(file =>
|
|
79
|
+
csvToJson.getJsonFromCsvAsync(file)
|
|
80
|
+
);
|
|
81
|
+
return Promise.all(promises);
|
|
82
|
+
}
|
|
83
|
+
```
|
|
84
|
+
|
|
85
|
+
2. Error handling:
|
|
86
|
+
```js
|
|
87
|
+
// Robust error handling
|
|
88
|
+
async function processWithRetry(file, maxRetries = 3) {
|
|
89
|
+
for (let i = 0; i < maxRetries; i++) {
|
|
90
|
+
try {
|
|
91
|
+
return await csvToJson.getJsonFromCsvAsync(file);
|
|
92
|
+
} catch (err) {
|
|
93
|
+
if (i === maxRetries - 1) throw err;
|
|
94
|
+
await new Promise(resolve => setTimeout(resolve, 1000 * (i + 1)));
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
```
|
|
99
|
+
|
|
100
|
+
3. Processing raw CSV data:
|
|
101
|
+
```js
|
|
102
|
+
// Processing CSV from network request
|
|
103
|
+
async function processCsvFromApi() {
|
|
104
|
+
const response = await fetch('https://api.example.com/data.csv');
|
|
105
|
+
const csvText = await response.text();
|
|
106
|
+
return csvToJson.getJsonFromCsvAsync(csvText, { raw: true });
|
|
107
|
+
}
|
|
108
|
+
```
|
|
109
|
+
|
|
110
|
+
## Advanced Use Cases
|
|
111
|
+
|
|
112
|
+
1. Streaming large files with async iteration:
|
|
113
|
+
```js
|
|
114
|
+
const { createReadStream } = require('fs');
|
|
115
|
+
const { createInterface } = require('readline');
|
|
116
|
+
|
|
117
|
+
async function* processLargeCsv(filePath) {
|
|
118
|
+
const fileStream = createReadStream(filePath);
|
|
119
|
+
const lines = createInterface({
|
|
120
|
+
input: fileStream,
|
|
121
|
+
crlfDelay: Infinity
|
|
122
|
+
});
|
|
123
|
+
|
|
124
|
+
const headers = await lines.next();
|
|
125
|
+
for await (const line of lines) {
|
|
126
|
+
const json = await csvToJson
|
|
127
|
+
.getJsonFromCsvAsync(headers.value + '\n' + line, { raw: true });
|
|
128
|
+
yield json[0];
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
// Usage
|
|
133
|
+
for await (const record of processLargeCsv('large.csv')) {
|
|
134
|
+
console.log(record);
|
|
135
|
+
}
|
|
136
|
+
```
|
|
137
|
+
|
|
138
|
+
2. Custom data transformation:
|
|
139
|
+
```js
|
|
140
|
+
async function processWithTransform(file) {
|
|
141
|
+
const json = await csvToJson
|
|
142
|
+
.formatValueByType()
|
|
143
|
+
.getJsonFromCsvAsync(file);
|
|
144
|
+
|
|
145
|
+
return json.map(record => ({
|
|
146
|
+
...record,
|
|
147
|
+
timestamp: new Date().toISOString(),
|
|
148
|
+
processed: true
|
|
149
|
+
}));
|
|
150
|
+
}
|
|
151
|
+
```
|
|
152
|
+
|
|
153
|
+
3. Validation and filtering:
|
|
154
|
+
```js
|
|
155
|
+
async function processWithValidation(file) {
|
|
156
|
+
const json = await csvToJson.getJsonFromCsvAsync(file);
|
|
157
|
+
|
|
158
|
+
return json.filter(record => {
|
|
159
|
+
// Validate required fields
|
|
160
|
+
if (!record.id || !record.name) return false;
|
|
161
|
+
// Validate data types
|
|
162
|
+
if (typeof record.age !== 'number') return false;
|
|
163
|
+
return true;
|
|
164
|
+
});
|
|
165
|
+
}
|
|
166
|
+
```
|
|
167
|
+
|
|
168
|
+
## Migration Tips
|
|
169
|
+
|
|
170
|
+
1. **Gradual Migration**: You can mix sync and async code during migration
|
|
171
|
+
2. **Error Handling**: Always include proper error handling with async code
|
|
172
|
+
3. **Testing**: Test both success and error cases
|
|
173
|
+
4. **Performance**: Consider using `Promise.all()` for parallel processing
|
|
174
|
+
5. **Memory**: For large files, consider streaming approaches
|
package/README.md
CHANGED
|
@@ -8,10 +8,13 @@
|
|
|
8
8
|
[](https://npmjs.org/package/convert-csv-to-json)
|
|
9
9
|
[](https://npmjs.org/package/convert-csv-to-json)
|
|
10
10
|
|
|
11
|
-
|
|
11
|
+
|
|
12
|
+

|
|
13
|
+

|
|
12
14
|

|
|
15
|
+

|
|
13
16
|
|
|
14
|
-
**This project is not dependent on others packages or libraries.**
|
|
17
|
+
**This project is not dependent on others packages or libraries, and supports both synchronous and Promise-based asynchronous APIs.**
|
|
15
18
|
|
|
16
19
|
This repository uses [](https://github.com/marketplace/actions/npm-semver-publish)
|
|
17
20
|
|
|
@@ -23,11 +26,10 @@ show your :heart: and support.
|
|
|
23
26
|
<!-- toc -->
|
|
24
27
|
|
|
25
28
|
- [Description](#description)
|
|
26
|
-
- [Support for JS
|
|
29
|
+
- [Support for NodeJS, Browser, JS, TS](#support-for-nodejs-browser-js-ts)
|
|
27
30
|
- [Prerequisites](#prerequisites)
|
|
28
31
|
- [Install npm *convert-csv-to-json package*](#install-npm-convert-csv-to-json-package)
|
|
29
|
-
* [
|
|
30
|
-
* [Usage](#usage)
|
|
32
|
+
* [Sync API Usage](#sync-api-usage)
|
|
31
33
|
+ [Generate JSON file](#generate-json-file)
|
|
32
34
|
+ [Generate Array of Object in JSON format](#generate-array-of-object-in-json-format)
|
|
33
35
|
+ [Generate Object with sub array](#generate-object-with-sub-array)
|
|
@@ -38,11 +40,38 @@ show your :heart: and support.
|
|
|
38
40
|
+ [Index header](#index-header)
|
|
39
41
|
+ [Empty rows](#empty-rows)
|
|
40
42
|
+ [Format property value by type](#format-property-value-by-type)
|
|
41
|
-
- [
|
|
43
|
+
- [Numbers](#numbers)
|
|
42
44
|
- [Boolean](#boolean)
|
|
45
|
+
- [Complete Example](#complete-example)
|
|
43
46
|
+ [Encoding](#encoding)
|
|
44
47
|
+ [Working with CSV strings directly](#working-with-csv-strings-directly)
|
|
45
|
-
* [
|
|
48
|
+
* [Sync API (TypeScript)](#sync-api-typescript)
|
|
49
|
+
- [Browser API Usage](#browser-api-usage)
|
|
50
|
+
* [Basic Browser Operations](#basic-browser-operations)
|
|
51
|
+
* [Parsing File/Blob](#parsing-fileblob)
|
|
52
|
+
* [Browser API Notes](#browser-api-notes)
|
|
53
|
+
* [Browser API (TypeScript)](#browser-api-typescript)
|
|
54
|
+
- [Async API Usage](#async-api-usage)
|
|
55
|
+
* [Async API (TypeScript)](#async-api-typescript)
|
|
56
|
+
* [Basic Async Operations](#basic-async-operations)
|
|
57
|
+
* [Working with Raw CSV Data](#working-with-raw-csv-data)
|
|
58
|
+
* [Processing Large Files](#processing-large-files)
|
|
59
|
+
* [Error Handling and Retries](#error-handling-and-retries)
|
|
60
|
+
* [Batch Processing](#batch-processing)
|
|
61
|
+
- [Chaining Pattern](#chaining-pattern)
|
|
62
|
+
* [Synchronous Chaining](#synchronous-chaining)
|
|
63
|
+
* [Asynchronous Chaining](#asynchronous-chaining)
|
|
64
|
+
- [Common Use Cases](#common-use-cases)
|
|
65
|
+
* [1. Processing CSV from HTTP Response](#1-processing-csv-from-http-response)
|
|
66
|
+
* [2. Batch Processing Multiple Files](#2-batch-processing-multiple-files)
|
|
67
|
+
* [3. Data Transformation Pipeline](#3-data-transformation-pipeline)
|
|
68
|
+
* [4. Error Recovery and Logging](#4-error-recovery-and-logging)
|
|
69
|
+
- [Troubleshooting](#troubleshooting)
|
|
70
|
+
* [Memory Issues with Large Files](#memory-issues-with-large-files)
|
|
71
|
+
* [Handling Different CSV Formats](#handling-different-csv-formats)
|
|
72
|
+
* [Common Error Solutions](#common-error-solutions)
|
|
73
|
+
* [Performance Optimization](#performance-optimization)
|
|
74
|
+
* [TypeScript Support](#typescript-support)
|
|
46
75
|
- [Development](#development)
|
|
47
76
|
- [CI CD github action](#ci-cd-github-action)
|
|
48
77
|
- [License](#license)
|
|
@@ -51,7 +80,7 @@ show your :heart: and support.
|
|
|
51
80
|
<!-- tocstop -->
|
|
52
81
|
|
|
53
82
|
## Description
|
|
54
|
-
Converts *csv* files to *JSON* files with Node.js.
|
|
83
|
+
Converts *csv* files to *JSON* files with Node.js. Supports both synchronous operations and Promise-based asynchronous operations, allowing integration with modern async/await patterns.
|
|
55
84
|
|
|
56
85
|
Give an input file like:
|
|
57
86
|
|
|
@@ -92,9 +121,14 @@ will generate:
|
|
|
92
121
|
}
|
|
93
122
|
]
|
|
94
123
|
```
|
|
95
|
-
## Support for JS
|
|
124
|
+
## Support for NodeJS, Browser, JS, TS
|
|
125
|
+
|
|
126
|
+
This package is compatible with:
|
|
96
127
|
|
|
97
|
-
|
|
128
|
+

|
|
129
|
+

|
|
130
|
+

|
|
131
|
+

|
|
98
132
|
|
|
99
133
|
## Prerequisites
|
|
100
134
|
**NPM** (see [Installing Npm](https://docs.npmjs.com/getting-started/installing-node)).
|
|
@@ -102,7 +136,6 @@ This package is compatible with .
|
|
104
138
|
|
|
105
|
-
### Install
|
|
106
139
|
Install package in your *package.json*
|
|
107
140
|
```bash
|
|
108
141
|
$ npm install convert-csv-to-json --save
|
|
@@ -112,7 +145,7 @@ Install package on your machine
|
|
|
112
145
|
$ npm install -g convert-csv-to-json
|
|
113
146
|
```
|
|
114
147
|
|
|
115
|
-
### Usage
|
|
148
|
+
### Sync API Usage
|
|
116
149
|
|
|
117
150
|
#### Generate JSON file
|
|
118
151
|
```js
|
|
@@ -351,21 +384,545 @@ let jsonArray = csvToJson
|
|
|
351
384
|
.csvStringToJson(csvString);
|
|
352
385
|
```
|
|
353
386
|
|
|
354
|
-
###
|
|
387
|
+
### Sync API (TypeScript)
|
|
388
|
+
|
|
389
|
+
TypeScript typings are available via the included `index.d.ts`. You can import the default converter or use named imports. Below are common patterns when using the synchronous API from TypeScript.
|
|
390
|
+
|
|
391
|
+
```ts
|
|
392
|
+
// Named import (recommended when using ES modules)
|
|
393
|
+
import converter, { /* or */ } from 'convert-csv-to-json';
|
|
394
|
+
// Access the default converter
|
|
395
|
+
const csvToJson = require('convert-csv-to-json');
|
|
396
|
+
|
|
397
|
+
// Define a type for your CSV records
|
|
398
|
+
interface Person {
|
|
399
|
+
name: string;
|
|
400
|
+
age: number;
|
|
401
|
+
}
|
|
402
|
+
|
|
403
|
+
// Parse CSV string synchronously and assert the returned type
|
|
404
|
+
const csv = 'name,age\nAlice,30';
|
|
405
|
+
const parsed = csvToJson.csvStringToJson(csv) as Person[];
|
|
406
|
+
|
|
407
|
+
// Chain configuration and call sync methods
|
|
408
|
+
const result = csvToJson
|
|
409
|
+
.fieldDelimiter(',')
|
|
410
|
+
.formatValueByType()
|
|
411
|
+
.csvStringToJson('name,age\nBob,25') as Person[];
|
|
412
|
+
```
|
|
413
|
+
|
|
414
|
+
## Browser API Usage
|
|
355
415
|
|
|
356
|
-
The
|
|
416
|
+
The package exposes a `browser` helper that reuses the library's parsing logic but provides browser-friendly helpers for parsing CSV strings and `File`/`Blob` objects. The API mirrors the synchronous and asynchronous Node APIs and supports method chaining for configuration.
|
|
417
|
+
|
|
418
|
+
### Basic Browser Operations
|
|
357
419
|
|
|
358
420
|
```js
|
|
359
|
-
|
|
421
|
+
const convert = require('convert-csv-to-json');
|
|
422
|
+
|
|
423
|
+
// Parse CSV string synchronously
|
|
424
|
+
const arr = convert.browser
|
|
425
|
+
.supportQuotedField(true)
|
|
426
|
+
.fieldDelimiter(',')
|
|
427
|
+
.csvStringToJson('name,age\nAlice,30');
|
|
428
|
+
|
|
429
|
+
// Parse CSV string asynchronously (returns Promise)
|
|
430
|
+
const arrAsync = await convert.browser.csvStringToJsonAsync('name;age\nBob;25');
|
|
431
|
+
|
|
432
|
+
// Get stringified JSON synchronously
|
|
433
|
+
const jsonString = convert.browser.csvStringToJsonStringified('name;age\nEve;40');
|
|
434
|
+
```
|
|
360
435
|
|
|
361
|
-
|
|
362
|
-
.formatValueByType()
|
|
363
|
-
.parseSubArray("*",',')
|
|
364
|
-
.supportQuotedField(true)
|
|
365
|
-
.getJsonFromCsv('myInputFile.csv');
|
|
436
|
+
### Parsing File/Blob
|
|
366
437
|
|
|
438
|
+
`parseFile(file, options)` reads a `File` or `Blob` and returns a Promise that resolves with the parsed array of objects.
|
|
439
|
+
|
|
440
|
+
```js
|
|
441
|
+
// In a browser environment with an <input type="file">
|
|
442
|
+
const file = document.querySelector('input[type=file]').files[0];
|
|
443
|
+
convert.browser
|
|
444
|
+
.fieldDelimiter(',')
|
|
445
|
+
.formatValueByType()
|
|
446
|
+
.parseFile(file)
|
|
447
|
+
.then(json => console.log(json))
|
|
448
|
+
.catch(err => console.error(err));
|
|
449
|
+
```
|
|
450
|
+
|
|
451
|
+
`parseFile` accepts an optional `options` object with `encoding` (passed to `FileReader.readAsText`). If `FileReader` is not available, `parseFile` will reject.
|
|
452
|
+
|
|
453
|
+
### Browser API Notes
|
|
454
|
+
|
|
455
|
+
- The `browser` API proxies the same configuration methods as the Node API and follows the same behavior for quoted fields, sub-array parsing, trimming, and value formatting.
|
|
456
|
+
- `parseFile` depends on the browser `FileReader` API; calling it in Node.js will reject with an informative error.
|
|
457
|
+
|
|
458
|
+
### Browser API (TypeScript)
|
|
459
|
+
|
|
460
|
+
TypeScript typings are provided via the included `index.d.ts`. You can import the default converter and access the `browser` helper, or import `browser` directly. Below are common usage patterns.
|
|
461
|
+
|
|
462
|
+
```ts
|
|
463
|
+
// Named import (recommended for direct use)
|
|
464
|
+
import { browser } from 'convert-csv-to-json';
|
|
465
|
+
|
|
466
|
+
// Or default import and access the browser helper
|
|
467
|
+
import converter from 'convert-csv-to-json';
|
|
468
|
+
const browserApi = converter.browser;
|
|
469
|
+
|
|
470
|
+
// Define a type for your CSV records
|
|
471
|
+
interface Person {
|
|
472
|
+
name: string;
|
|
473
|
+
age: number;
|
|
474
|
+
}
|
|
475
|
+
|
|
476
|
+
// Synchronous parse (assert the returned type)
|
|
477
|
+
const csv = 'name,age\nAlice,30';
|
|
478
|
+
const parsed = browser.csvStringToJson(csv) as Person[];
|
|
479
|
+
|
|
480
|
+
// Async parse
|
|
481
|
+
const parsedAsync = await browser.csvStringToJsonAsync(csv) as Person[];
|
|
482
|
+
|
|
483
|
+
// Parse a File in the browser
|
|
484
|
+
const inputEl = document.querySelector('input[type=file]') as HTMLInputElement;
|
|
485
|
+
const file = inputEl.files![0];
|
|
486
|
+
const data = await browser.parseFile(file) as Person[];
|
|
367
487
|
```
|
|
368
488
|
|
|
489
|
+
The `BrowserApi` interface in `index.d.ts` exposes typed method signatures for IDE autocompletion and compile-time checks.
|
|
490
|
+
|
|
491
|
+
## Async API Usage
|
|
492
|
+
|
|
493
|
+
This library provides a Promise-based async API that's perfect for modern Node.js applications. For a detailed migration guide from sync to async API, see [MIGRATION.md](MIGRATION.md).
|
|
494
|
+
|
|
495
|
+
### Async API (TypeScript)
|
|
496
|
+
|
|
497
|
+
The async API also has TypeScript typings. Typical usage in TypeScript looks like this:
|
|
498
|
+
|
|
499
|
+
```ts
|
|
500
|
+
import csvToJson from 'convert-csv-to-json';
|
|
501
|
+
|
|
502
|
+
interface Person {
|
|
503
|
+
name: string;
|
|
504
|
+
age: number;
|
|
505
|
+
}
|
|
506
|
+
|
|
507
|
+
// Using async/await
|
|
508
|
+
async function load(): Promise<Person[]> {
|
|
509
|
+
const csv = 'name,age\nAlice,30';
|
|
510
|
+
const parsed = await csvToJson.getJsonFromCsvAsync(csv, { raw: true }) as Person[];
|
|
511
|
+
return parsed;
|
|
512
|
+
}
|
|
513
|
+
|
|
514
|
+
// Using the async helper that parses CSV strings
|
|
515
|
+
const parsedDirect = await csvToJson.csvStringToJsonAsync('name;age\nBob;25') as Person[];
|
|
516
|
+
```
|
|
517
|
+
|
|
518
|
+
|
|
519
|
+
### Basic Async Operations
|
|
520
|
+
|
|
521
|
+
1. Convert CSV file to JSON:
|
|
522
|
+
```js
|
|
523
|
+
const csvToJson = require('convert-csv-to-json');
|
|
524
|
+
|
|
525
|
+
// Using Promises
|
|
526
|
+
csvToJson.getJsonFromCsvAsync('input.csv')
|
|
527
|
+
.then(json => console.log(json))
|
|
528
|
+
.catch(err => console.error('Error:', err));
|
|
529
|
+
|
|
530
|
+
// Using async/await
|
|
531
|
+
async function convertCsv() {
|
|
532
|
+
try {
|
|
533
|
+
const json = await csvToJson.getJsonFromCsvAsync('input.csv');
|
|
534
|
+
console.log(json);
|
|
535
|
+
} catch (err) {
|
|
536
|
+
console.error('Error:', err);
|
|
537
|
+
}
|
|
538
|
+
}
|
|
539
|
+
```
|
|
540
|
+
|
|
541
|
+
2. Generate JSON file from CSV:
|
|
542
|
+
```js
|
|
543
|
+
// Using async/await with chain configuration
|
|
544
|
+
async function convertAndSave() {
|
|
545
|
+
await csvToJson
|
|
546
|
+
.fieldDelimiter(',')
|
|
547
|
+
.formatValueByType()
|
|
548
|
+
.generateJsonFileFromCsvAsync('input.csv', 'output.json');
|
|
549
|
+
}
|
|
550
|
+
```
|
|
551
|
+
|
|
552
|
+
### Working with Raw CSV Data
|
|
553
|
+
|
|
554
|
+
Process CSV data from memory or network sources:
|
|
555
|
+
|
|
556
|
+
```js
|
|
557
|
+
// Example: Processing CSV from an API
|
|
558
|
+
async function processCsvFromApi() {
|
|
559
|
+
const response = await fetch('https://api.example.com/data.csv');
|
|
560
|
+
const csvText = await response.text();
|
|
561
|
+
|
|
562
|
+
const json = await csvToJson
|
|
563
|
+
.formatValueByType()
|
|
564
|
+
.getJsonFromCsvAsync(csvText, { raw: true });
|
|
565
|
+
|
|
566
|
+
return json;
|
|
567
|
+
}
|
|
568
|
+
```
|
|
569
|
+
|
|
570
|
+
### Processing Large Files
|
|
571
|
+
|
|
572
|
+
For large files, use streaming to manage memory efficiently:
|
|
573
|
+
|
|
574
|
+
```js
|
|
575
|
+
const { createReadStream } = require('fs');
|
|
576
|
+
const { createInterface } = require('readline');
|
|
577
|
+
|
|
578
|
+
async function* processLargeFile(filePath) {
|
|
579
|
+
const fileStream = createReadStream(filePath);
|
|
580
|
+
const rl = createInterface({
|
|
581
|
+
input: fileStream,
|
|
582
|
+
crlfDelay: Infinity
|
|
583
|
+
});
|
|
584
|
+
|
|
585
|
+
for await (const line of rl) {
|
|
586
|
+
yield await csvToJson.getJsonFromCsvAsync(line, { raw: true });
|
|
587
|
+
}
|
|
588
|
+
}
|
|
589
|
+
|
|
590
|
+
// Usage
|
|
591
|
+
async function processData() {
|
|
592
|
+
for await (const record of processLargeFile('large.csv')) {
|
|
593
|
+
await saveToDatabase(record);
|
|
594
|
+
}
|
|
595
|
+
}
|
|
596
|
+
```
|
|
597
|
+
|
|
598
|
+
### Error Handling and Retries
|
|
599
|
+
|
|
600
|
+
Implement robust error handling with retries:
|
|
601
|
+
|
|
602
|
+
```js
|
|
603
|
+
async function processWithRetry(filePath, maxRetries = 3) {
|
|
604
|
+
for (let i = 0; i < maxRetries; i++) {
|
|
605
|
+
try {
|
|
606
|
+
const json = await csvToJson
|
|
607
|
+
.formatValueByType()
|
|
608
|
+
.getJsonFromCsvAsync(filePath);
|
|
609
|
+
|
|
610
|
+
return json;
|
|
611
|
+
} catch (err) {
|
|
612
|
+
if (i === maxRetries - 1) throw err;
|
|
613
|
+
// Exponential backoff
|
|
614
|
+
await new Promise(resolve =>
|
|
615
|
+
setTimeout(resolve, Math.pow(2, i) * 1000)
|
|
616
|
+
);
|
|
617
|
+
}
|
|
618
|
+
}
|
|
619
|
+
}
|
|
620
|
+
```
|
|
621
|
+
|
|
622
|
+
### Batch Processing
|
|
623
|
+
|
|
624
|
+
Process multiple files efficiently:
|
|
625
|
+
|
|
626
|
+
```js
|
|
627
|
+
async function batchProcess(files, batchSize = 3) {
|
|
628
|
+
const results = new Map();
|
|
629
|
+
|
|
630
|
+
for (let i = 0; i < files.length; i += batchSize) {
|
|
631
|
+
const batch = files.slice(i, i + batchSize);
|
|
632
|
+
const processed = await Promise.all(
|
|
633
|
+
batch.map(async file => {
|
|
634
|
+
const json = await csvToJson.getJsonFromCsvAsync(file);
|
|
635
|
+
return [file, json];
|
|
636
|
+
})
|
|
637
|
+
);
|
|
638
|
+
|
|
639
|
+
processed.forEach(([file, json]) => results.set(file, json));
|
|
640
|
+
}
|
|
641
|
+
|
|
642
|
+
return results;
|
|
643
|
+
}
|
|
644
|
+
|
|
645
|
+
// Usage
|
|
646
|
+
const files = ['data1.csv', 'data2.csv', 'data3.csv', 'data4.csv'];
|
|
647
|
+
const results = await batchProcess(files, 2);
|
|
648
|
+
```
|
|
649
|
+
|
|
650
|
+
## Chaining Pattern
|
|
651
|
+
|
|
652
|
+
The exposed API is implemented with the [Method Chaining Pattern](https://en.wikipedia.org/wiki/Method_chaining), which means that multiple methods can be chained. This pattern works with both synchronous and asynchronous methods:
|
|
653
|
+
|
|
654
|
+
### Synchronous Chaining
|
|
655
|
+
|
|
656
|
+
```js
|
|
657
|
+
const csvToJson = require('convert-csv-to-json');
|
|
658
|
+
|
|
659
|
+
// Chain configuration methods with sync operation
|
|
660
|
+
const json = csvToJson
|
|
661
|
+
.fieldDelimiter(',')
|
|
662
|
+
.formatValueByType()
|
|
663
|
+
.parseSubArray("*", ',')
|
|
664
|
+
.supportQuotedField(true)
|
|
665
|
+
.getJsonFromCsv('myInputFile.csv');
|
|
666
|
+
|
|
667
|
+
// Chain with file generation
|
|
668
|
+
csvToJson
|
|
669
|
+
.fieldDelimiter(';')
|
|
670
|
+
.utf8Encoding()
|
|
671
|
+
.formatValueByType()
|
|
672
|
+
.generateJsonFileFromCsv('input.csv', 'output.json');
|
|
673
|
+
|
|
674
|
+
// Chain with string parsing
|
|
675
|
+
const jsonArray = csvToJson
|
|
676
|
+
.fieldDelimiter(',')
|
|
677
|
+
.trimHeaderFieldWhiteSpace(true)
|
|
678
|
+
.csvStringToJson('name,age\nJohn,30\nJane,25');
|
|
679
|
+
```
|
|
680
|
+
|
|
681
|
+
### Asynchronous Chaining
|
|
682
|
+
|
|
683
|
+
```js
|
|
684
|
+
const csvToJson = require('convert-csv-to-json');
|
|
685
|
+
|
|
686
|
+
// Using async/await
|
|
687
|
+
async function processCSV() {
|
|
688
|
+
// Chain configuration methods with async operation
|
|
689
|
+
const json = await csvToJson
|
|
690
|
+
.fieldDelimiter(',')
|
|
691
|
+
.formatValueByType()
|
|
692
|
+
.parseSubArray("*", ',')
|
|
693
|
+
.supportQuotedField(true)
|
|
694
|
+
.getJsonFromCsvAsync('myInputFile.csv');
|
|
695
|
+
|
|
696
|
+
// Chain with async file generation
|
|
697
|
+
await csvToJson
|
|
698
|
+
.fieldDelimiter(';')
|
|
699
|
+
.utf8Encoding()
|
|
700
|
+
.formatValueByType()
|
|
701
|
+
.generateJsonFileFromCsvAsync('input.csv', 'output.json');
|
|
702
|
+
}
|
|
703
|
+
|
|
704
|
+
// Using Promises
|
|
705
|
+
csvToJson
|
|
706
|
+
.fieldDelimiter(',')
|
|
707
|
+
.formatValueByType()
|
|
708
|
+
.getJsonFromCsvAsync('input.csv')
|
|
709
|
+
.then(json => console.log(json))
|
|
710
|
+
.catch(err => console.error('Error:', err));
|
|
711
|
+
```
|
|
712
|
+
|
|
713
|
+
All configuration methods can be chained in any order before calling the final operation method (like `getJsonFromCsv`, `getJsonFromCsvAsync`, etc.). The configuration will be applied in the order it is chained.
|
|
714
|
+
|
|
715
|
+
## Common Use Cases
|
|
716
|
+
|
|
717
|
+
Here are some common use cases and how to implement them:
|
|
718
|
+
|
|
719
|
+
### 1. Processing CSV from HTTP Response
|
|
720
|
+
```js
|
|
721
|
+
const https = require('https');
|
|
722
|
+
|
|
723
|
+
async function processRemoteCsv(url) {
|
|
724
|
+
const csvData = await new Promise((resolve, reject) => {
|
|
725
|
+
https.get(url, (res) => {
|
|
726
|
+
let data = '';
|
|
727
|
+
res.on('data', chunk => data += chunk);
|
|
728
|
+
res.on('end', () => resolve(data));
|
|
729
|
+
res.on('error', reject);
|
|
730
|
+
});
|
|
731
|
+
});
|
|
732
|
+
|
|
733
|
+
return csvToJson.getJsonFromCsvAsync(csvData, { raw: true });
|
|
734
|
+
}
|
|
735
|
+
```
|
|
736
|
+
|
|
737
|
+
### 2. Batch Processing Multiple Files
|
|
738
|
+
```js
|
|
739
|
+
async function batchProcess(files) {
|
|
740
|
+
const results = new Map();
|
|
741
|
+
|
|
742
|
+
// Process in chunks of 3 files at a time
|
|
743
|
+
for (let i = 0; i < files.length; i += 3) {
|
|
744
|
+
const chunk = files.slice(i, i + 3);
|
|
745
|
+
const processed = await Promise.all(
|
|
746
|
+
chunk.map(async file => {
|
|
747
|
+
const json = await csvToJson.getJsonFromCsvAsync(file);
|
|
748
|
+
return [file, json];
|
|
749
|
+
})
|
|
750
|
+
);
|
|
751
|
+
|
|
752
|
+
processed.forEach(([file, json]) => results.set(file, json));
|
|
753
|
+
}
|
|
754
|
+
|
|
755
|
+
return results;
|
|
756
|
+
}
|
|
757
|
+
```
|
|
758
|
+
|
|
759
|
+
### 3. Data Transformation Pipeline
|
|
760
|
+
```js
|
|
761
|
+
async function transformData(csvFile) {
|
|
762
|
+
// Step 1: Parse CSV
|
|
763
|
+
const json = await csvToJson
|
|
764
|
+
.formatValueByType()
|
|
765
|
+
.getJsonFromCsvAsync(csvFile);
|
|
766
|
+
|
|
767
|
+
// Step 2: Transform data
|
|
768
|
+
const transformed = json.map(record => ({
|
|
769
|
+
id: record.id,
|
|
770
|
+
fullName: `${record.firstName} ${record.lastName}`,
|
|
771
|
+
age: Number(record.age),
|
|
772
|
+
isAdult: Number(record.age) >= 18,
|
|
773
|
+
email: record.email.toLowerCase()
|
|
774
|
+
}));
|
|
775
|
+
|
|
776
|
+
// Step 3: Filter invalid records
|
|
777
|
+
return transformed.filter(record =>
|
|
778
|
+
record.id &&
|
|
779
|
+
record.fullName.length > 0 &&
|
|
780
|
+
!isNaN(record.age)
|
|
781
|
+
);
|
|
782
|
+
}
|
|
783
|
+
```
|
|
784
|
+
|
|
785
|
+
### 4. Error Recovery and Logging
|
|
786
|
+
```js
|
|
787
|
+
async function processWithLogging(file) {
|
|
788
|
+
const logger = {
|
|
789
|
+
info: (msg) => console.log(`[INFO] ${msg}`),
|
|
790
|
+
error: (msg, err) => console.error(`[ERROR] ${msg}`, err)
|
|
791
|
+
};
|
|
792
|
+
|
|
793
|
+
try {
|
|
794
|
+
logger.info(`Starting processing ${file}`);
|
|
795
|
+
const startTime = Date.now();
|
|
796
|
+
|
|
797
|
+
const json = await csvToJson.getJsonFromCsvAsync(file);
|
|
798
|
+
|
|
799
|
+
const duration = Date.now() - startTime;
|
|
800
|
+
logger.info(`Processed ${file} in ${duration}ms`);
|
|
801
|
+
|
|
802
|
+
return json;
|
|
803
|
+
} catch (err) {
|
|
804
|
+
logger.error(`Failed to process ${file}`, err);
|
|
805
|
+
throw err;
|
|
806
|
+
}
|
|
807
|
+
}
|
|
808
|
+
```
|
|
809
|
+
|
|
810
|
+
## Troubleshooting
|
|
811
|
+
|
|
812
|
+
Here are solutions to common issues you might encounter:
|
|
813
|
+
|
|
814
|
+
### Memory Issues with Large Files
|
|
815
|
+
|
|
816
|
+
If you're processing large CSV files and encountering memory issues:
|
|
817
|
+
|
|
818
|
+
```js
|
|
819
|
+
// Instead of loading the entire file
|
|
820
|
+
const json = await csvToJson.getJsonFromCsvAsync('large.csv'); // ❌
|
|
821
|
+
|
|
822
|
+
// Use streaming with async iteration
|
|
823
|
+
for await (const record of processLargeCsv('large.csv')) { // ✅
|
|
824
|
+
// Process one record at a time
|
|
825
|
+
await processRecord(record);
|
|
826
|
+
}
|
|
827
|
+
```
|
|
828
|
+
|
|
829
|
+
### Handling Different CSV Formats
|
|
830
|
+
|
|
831
|
+
1. **Mixed Quote Types**:
|
|
832
|
+
```js
|
|
833
|
+
csvToJson
|
|
834
|
+
.supportQuotedField(true) // Enable quoted field support
|
|
835
|
+
.getJsonFromCsvAsync(file);
|
|
836
|
+
```
|
|
837
|
+
|
|
838
|
+
2. **Custom Delimiters**:
|
|
839
|
+
```js
|
|
840
|
+
csvToJson
|
|
841
|
+
.fieldDelimiter(';') // Change delimiter
|
|
842
|
+
.getJsonFromCsvAsync(file);
|
|
843
|
+
```
|
|
844
|
+
|
|
845
|
+
3. **UTF-8 with BOM**:
|
|
846
|
+
```js
|
|
847
|
+
csvToJson
|
|
848
|
+
.encoding('utf8') // Specify encoding
|
|
849
|
+
.getJsonFromCsvAsync(file);
|
|
850
|
+
```
|
|
851
|
+
|
|
852
|
+
### Common Error Solutions
|
|
853
|
+
|
|
854
|
+
1. **ENOENT: no such file or directory**
|
|
855
|
+
- Check if the file path is correct and absolute
|
|
856
|
+
- Verify file permissions
|
|
857
|
+
- Ensure the file exists in the specified location
|
|
858
|
+
|
|
859
|
+
2. **Invalid CSV Structure**
|
|
860
|
+
- Verify CSV format matches expected structure
|
|
861
|
+
- Check for missing or extra delimiters
|
|
862
|
+
- Validate header row exists if expected
|
|
863
|
+
|
|
864
|
+
3. **Memory Leaks**
|
|
865
|
+
- Use streaming for large files
|
|
866
|
+
- Process files in smaller chunks
|
|
867
|
+
- Implement proper cleanup in try/finally blocks
|
|
868
|
+
|
|
869
|
+
4. **Encoding Issues**
|
|
870
|
+
- Specify correct encoding using .encoding()
|
|
871
|
+
- Check for BOM markers
|
|
872
|
+
- Verify source file encoding
|
|
873
|
+
|
|
874
|
+
### Performance Optimization
|
|
875
|
+
|
|
876
|
+
1. **Parallel Processing**:
|
|
877
|
+
```js
|
|
878
|
+
// Instead of sequential processing
|
|
879
|
+
for (const file of files) {
|
|
880
|
+
await process(file); // ❌
|
|
881
|
+
}
|
|
882
|
+
|
|
883
|
+
// Use parallel processing with limits
|
|
884
|
+
async function processWithLimit(files, limit = 3) {
|
|
885
|
+
const results = [];
|
|
886
|
+
for (let i = 0; i < files.length; i += limit) {
|
|
887
|
+
const chunk = files.slice(i, i + limit);
|
|
888
|
+
const chunkResults = await Promise.all(
|
|
889
|
+
chunk.map(file => csvToJson.getJsonFromCsvAsync(file))
|
|
890
|
+
);
|
|
891
|
+
results.push(...chunkResults);
|
|
892
|
+
}
|
|
893
|
+
return results;
|
|
894
|
+
} // ✅
|
|
895
|
+
```
|
|
896
|
+
|
|
897
|
+
2. **Memory Usage**:
|
|
898
|
+
```js
|
|
899
|
+
// Clear references when done
|
|
900
|
+
async function processWithCleanup(file) {
|
|
901
|
+
let json;
|
|
902
|
+
try {
|
|
903
|
+
json = await csvToJson.getJsonFromCsvAsync(file);
|
|
904
|
+
return await processData(json);
|
|
905
|
+
} finally {
|
|
906
|
+
json = null; // Clear reference
|
|
907
|
+
}
|
|
908
|
+
}
|
|
909
|
+
```
|
|
910
|
+
|
|
911
|
+
### TypeScript Support
|
|
912
|
+
|
|
913
|
+
If you're using TypeScript and encounter type issues:
|
|
914
|
+
|
|
915
|
+
```typescript
|
|
916
|
+
// Define custom types for your CSV structure
|
|
917
|
+
interface MyCsvRecord {
|
|
918
|
+
id: number;
|
|
919
|
+
name: string;
|
|
920
|
+
age?: number;
|
|
921
|
+
}
|
|
922
|
+
|
|
923
|
+
// Use type assertion
|
|
924
|
+
const json = await csvToJson.getJsonFromCsvAsync<MyCsvRecord>('data.csv');
|
|
925
|
+
```
|
|
369
926
|
|
|
370
927
|
## Development
|
|
371
928
|
* Download all csvToJson dependencies:
|
package/index.d.ts
CHANGED
|
@@ -91,6 +91,11 @@ declare module 'convert-csv-to-json' {
|
|
|
91
91
|
*/
|
|
92
92
|
getJsonFromCsv(inputFileName: string): any[];
|
|
93
93
|
|
|
94
|
+
/**
|
|
95
|
+
* Async version of getJsonFromCsv. When options.raw is true the input is treated as a CSV string
|
|
96
|
+
*/
|
|
97
|
+
getJsonFromCsvAsync(inputFileNameOrCsv: string, options?: { raw?: boolean }): Promise<any[]>;
|
|
98
|
+
|
|
94
99
|
csvStringToJson(csvString: string): any[];
|
|
95
100
|
|
|
96
101
|
/**
|
|
@@ -111,4 +116,28 @@ declare module 'convert-csv-to-json' {
|
|
|
111
116
|
}
|
|
112
117
|
const converter: ConvertCsvToJson;
|
|
113
118
|
export default converter;
|
|
119
|
+
|
|
120
|
+
/**
|
|
121
|
+
* Browser API exposes parsing helpers for browser environments
|
|
122
|
+
*/
|
|
123
|
+
export interface BrowserApi {
|
|
124
|
+
formatValueByType(active: boolean): this;
|
|
125
|
+
trimHeaderFieldWhiteSpace(active: boolean): this;
|
|
126
|
+
supportQuotedField(active: boolean): this;
|
|
127
|
+
fieldDelimiter(delimiter: string): this;
|
|
128
|
+
indexHeader(index: number): this;
|
|
129
|
+
parseSubArray(delimiter: string, separator: string): this;
|
|
130
|
+
|
|
131
|
+
csvStringToJson(csvString: string): any[];
|
|
132
|
+
csvStringToJsonStringified(csvString: string): string;
|
|
133
|
+
csvStringToJsonAsync(csvString: string): Promise<any[]>;
|
|
134
|
+
csvStringToJsonStringifiedAsync(csvString: string): Promise<string>;
|
|
135
|
+
|
|
136
|
+
/**
|
|
137
|
+
* Parse a File or Blob and return a Promise that resolves to the JSON array
|
|
138
|
+
*/
|
|
139
|
+
parseFile(file: Blob | File, options?: { encoding?: string }): Promise<any[]>;
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
export const browser: BrowserApi;
|
|
114
143
|
}
|
package/index.js
CHANGED
|
@@ -152,6 +152,30 @@ exports.getJsonFromCsv = function(inputFileName) {
|
|
|
152
152
|
return csvToJson.getJsonFromCsv(inputFileName);
|
|
153
153
|
};
|
|
154
154
|
|
|
155
|
+
/**
|
|
156
|
+
* Async version of getJsonFromCsv.
|
|
157
|
+
* @param {string} inputFileNameOrCsv path to file or CSV string
|
|
158
|
+
* @param {object} options { raw: boolean } when raw=true the first param is treated as CSV content
|
|
159
|
+
* @returns {Promise<Array>} resolves with the array of objects
|
|
160
|
+
*/
|
|
161
|
+
const csvToJsonAsync = require('./src/csvToJsonAsync');
|
|
162
|
+
|
|
163
|
+
// Re-export all async API methods
|
|
164
|
+
Object.assign(exports, {
|
|
165
|
+
getJsonFromCsvAsync: function(input, options) {
|
|
166
|
+
return csvToJsonAsync.getJsonFromCsvAsync(input, options);
|
|
167
|
+
},
|
|
168
|
+
csvStringToJsonAsync: function(input, options) {
|
|
169
|
+
return csvToJsonAsync.csvStringToJsonAsync(input, options);
|
|
170
|
+
},
|
|
171
|
+
csvStringToJsonStringifiedAsync: function(input) {
|
|
172
|
+
return csvToJsonAsync.csvStringToJsonStringifiedAsync(input);
|
|
173
|
+
},
|
|
174
|
+
generateJsonFileFromCsvAsync: function(input, output) {
|
|
175
|
+
return csvToJsonAsync.generateJsonFileFromCsv(input, output);
|
|
176
|
+
}
|
|
177
|
+
});
|
|
178
|
+
|
|
155
179
|
exports.csvStringToJson = function(csvString) {
|
|
156
180
|
return csvToJson.csvStringToJson(csvString);
|
|
157
181
|
};
|
|
@@ -178,3 +202,9 @@ exports.csvStringToJsonStringified = function(csvString) {
|
|
|
178
202
|
exports.jsonToCsv = function(inputFileName, outputFileName) {
|
|
179
203
|
csvToJson.generateJsonFileFromCsv(inputFileName, outputFileName);
|
|
180
204
|
};
|
|
205
|
+
|
|
206
|
+
/**
|
|
207
|
+
* Browser API
|
|
208
|
+
* Provides parsing helpers suitable for browser environments (parsing strings and File/Blob objects)
|
|
209
|
+
*/
|
|
210
|
+
exports.browser = require('./src/browserApi');
|
package/jest.config.js
CHANGED
|
@@ -1,9 +1,13 @@
|
|
|
1
1
|
/** @type {import('jest').Config} */
|
|
2
2
|
const config = {
|
|
3
|
-
|
|
3
|
+
preset: 'ts-jest',
|
|
4
|
+
testEnvironment: 'node',
|
|
5
|
+
transform: {
|
|
6
|
+
'^.+\\.(ts|tsx)$': 'ts-jest'
|
|
7
|
+
},
|
|
8
|
+
testMatch: ['**/?(*.)+(spec|test).[tj]s?(x)'],
|
|
4
9
|
coverageReporters: ['clover', 'html' ,'json', 'lcov', ['text', {skipFull: true}]],
|
|
5
10
|
collectCoverage: true
|
|
6
|
-
|
|
7
11
|
};
|
|
8
12
|
|
|
9
13
|
module.exports = config;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "convert-csv-to-json",
|
|
3
|
-
"version": "3.
|
|
3
|
+
"version": "3.15.0",
|
|
4
4
|
"description": "Convert CSV to JSON",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"types": "index.d.ts",
|
|
@@ -35,7 +35,8 @@
|
|
|
35
35
|
"js",
|
|
36
36
|
"javascript",
|
|
37
37
|
"ts",
|
|
38
|
-
"typescript"
|
|
38
|
+
"typescript",
|
|
39
|
+
"browser"
|
|
39
40
|
],
|
|
40
41
|
"author": "iuccio",
|
|
41
42
|
"license": "MIT",
|
|
@@ -44,6 +45,9 @@
|
|
|
44
45
|
},
|
|
45
46
|
"homepage": "https://github.com/iuccio/CSVtoJSON#readme",
|
|
46
47
|
"devDependencies": {
|
|
47
|
-
"jest": "^29.7.0"
|
|
48
|
+
"jest": "^29.7.0",
|
|
49
|
+
"ts-jest": "^29.1.0",
|
|
50
|
+
"typescript": "^5.1.6",
|
|
51
|
+
"@types/jest": "^29.5.3"
|
|
48
52
|
}
|
|
49
53
|
}
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
const csvToJson = require('./csvToJson');
|
|
4
|
+
|
|
5
|
+
class BrowserApi {
|
|
6
|
+
constructor() {
|
|
7
|
+
// reuse the existing csvToJson instance for parsing and configuration
|
|
8
|
+
this.csvToJson = csvToJson;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
// Configuration proxies (chainable)
|
|
12
|
+
formatValueByType(active = true) {
|
|
13
|
+
this.csvToJson.formatValueByType(active);
|
|
14
|
+
return this;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
supportQuotedField(active = false) {
|
|
18
|
+
this.csvToJson.supportQuotedField(active);
|
|
19
|
+
return this;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
fieldDelimiter(delimiter) {
|
|
23
|
+
this.csvToJson.fieldDelimiter(delimiter);
|
|
24
|
+
return this;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
trimHeaderFieldWhiteSpace(active = false) {
|
|
28
|
+
this.csvToJson.trimHeaderFieldWhiteSpace(active);
|
|
29
|
+
return this;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
indexHeader(index) {
|
|
33
|
+
this.csvToJson.indexHeader(index);
|
|
34
|
+
return this;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
parseSubArray(delimiter = '*', separator = ',') {
|
|
38
|
+
this.csvToJson.parseSubArray(delimiter, separator);
|
|
39
|
+
return this;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
// Synchronous parse from CSV string (browser friendly)
|
|
43
|
+
csvStringToJson(csvString) {
|
|
44
|
+
if (csvString === undefined || csvString === null) {
|
|
45
|
+
throw new Error('csvString is not defined!!!');
|
|
46
|
+
}
|
|
47
|
+
return this.csvToJson.csvToJson(csvString);
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
csvStringToJsonStringified(csvString) {
|
|
51
|
+
if (csvString === undefined || csvString === null) {
|
|
52
|
+
throw new Error('csvString is not defined!!!');
|
|
53
|
+
}
|
|
54
|
+
return this.csvToJson.csvStringToJsonStringified(csvString);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// Async parse from CSV string (returns a Promise)
|
|
58
|
+
csvStringToJsonAsync(csvString) {
|
|
59
|
+
return Promise.resolve(this.csvStringToJson(csvString));
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
csvStringToJsonStringifiedAsync(csvString) {
|
|
63
|
+
return Promise.resolve(this.csvStringToJsonStringified(csvString));
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
/**
|
|
67
|
+
* Parse a browser File or Blob object to JSON array.
|
|
68
|
+
* @param {File|Blob} file - File or Blob to read as text
|
|
69
|
+
* @param {object} options - options: { encoding?: string }
|
|
70
|
+
* @returns {Promise<any[]>}
|
|
71
|
+
*/
|
|
72
|
+
parseFile(file, options = {}) {
|
|
73
|
+
if (!file) {
|
|
74
|
+
return Promise.reject(new Error('file is not defined!!!'));
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
return new Promise((resolve, reject) => {
|
|
78
|
+
if (typeof FileReader === 'undefined') {
|
|
79
|
+
reject(new Error('FileReader is not available in this environment'));
|
|
80
|
+
return;
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
const reader = new FileReader();
|
|
84
|
+
reader.onerror = () => reject(reader.error || new Error('Failed to read file'));
|
|
85
|
+
reader.onload = () => {
|
|
86
|
+
try {
|
|
87
|
+
const text = reader.result;
|
|
88
|
+
const result = this.csvToJson.csvToJson(String(text));
|
|
89
|
+
resolve(result);
|
|
90
|
+
} catch (err) {
|
|
91
|
+
reject(err);
|
|
92
|
+
}
|
|
93
|
+
};
|
|
94
|
+
|
|
95
|
+
// If encoding is provided, pass it to readAsText
|
|
96
|
+
if (options.encoding) {
|
|
97
|
+
reader.readAsText(file, options.encoding);
|
|
98
|
+
} else {
|
|
99
|
+
reader.readAsText(file);
|
|
100
|
+
}
|
|
101
|
+
});
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
module.exports = new BrowserApi();
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const fileUtils = require('./util/fileUtils');
|
|
4
|
+
const csvToJson = require('./csvToJson');
|
|
5
|
+
|
|
6
|
+
class CsvToJsonAsync {
|
|
7
|
+
constructor() {
|
|
8
|
+
// Proxy the configuration methods to the sync instance
|
|
9
|
+
this.csvToJson = csvToJson;
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Set value type formatting
|
|
14
|
+
*/
|
|
15
|
+
formatValueByType(active) {
|
|
16
|
+
this.csvToJson.formatValueByType(active);
|
|
17
|
+
return this;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Set quoted field support
|
|
22
|
+
*/
|
|
23
|
+
supportQuotedField(active) {
|
|
24
|
+
this.csvToJson.supportQuotedField(active);
|
|
25
|
+
return this;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Set field delimiter
|
|
30
|
+
*/
|
|
31
|
+
fieldDelimiter(delimiter) {
|
|
32
|
+
this.csvToJson.fieldDelimiter(delimiter);
|
|
33
|
+
return this;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Trim header field whitespace
|
|
38
|
+
*/
|
|
39
|
+
trimHeaderFieldWhiteSpace(active) {
|
|
40
|
+
this.csvToJson.trimHeaderFieldWhiteSpace(active);
|
|
41
|
+
return this;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/**
|
|
45
|
+
* Set header index
|
|
46
|
+
*/
|
|
47
|
+
indexHeader(indexHeader) {
|
|
48
|
+
this.csvToJson.indexHeader(indexHeader);
|
|
49
|
+
return this;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
/**
|
|
53
|
+
* Set sub-array parsing options
|
|
54
|
+
*/
|
|
55
|
+
parseSubArray(delimiter = '*', separator = ',') {
|
|
56
|
+
this.csvToJson.parseSubArray(delimiter, separator);
|
|
57
|
+
return this;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
/**
|
|
61
|
+
* Set encoding
|
|
62
|
+
*/
|
|
63
|
+
encoding(encoding) {
|
|
64
|
+
this.csvToJson.encoding = encoding;
|
|
65
|
+
return this;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Async version of generateJsonFileFromCsv
|
|
70
|
+
*/
|
|
71
|
+
async generateJsonFileFromCsv(fileInputName, fileOutputName) {
|
|
72
|
+
const jsonStringified = await this.getJsonFromCsvStringified(fileInputName);
|
|
73
|
+
await fileUtils.writeFileAsync(jsonStringified, fileOutputName);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
/**
|
|
77
|
+
* Async version that returns stringified JSON from CSV file
|
|
78
|
+
*/
|
|
79
|
+
async getJsonFromCsvStringified(fileInputName) {
|
|
80
|
+
const json = await this.getJsonFromCsvAsync(fileInputName);
|
|
81
|
+
return JSON.stringify(json, undefined, 1);
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
/**
|
|
85
|
+
* Main async API method. If options.raw is true, treats input as CSV string.
|
|
86
|
+
* Otherwise reads from file path.
|
|
87
|
+
*/
|
|
88
|
+
async getJsonFromCsvAsync(inputFileNameOrCsv, options = {}) {
|
|
89
|
+
if (inputFileNameOrCsv === null || inputFileNameOrCsv === undefined) {
|
|
90
|
+
throw new Error('inputFileNameOrCsv is not defined!!!');
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
if (options.raw) {
|
|
94
|
+
if (inputFileNameOrCsv === '') {
|
|
95
|
+
return [];
|
|
96
|
+
}
|
|
97
|
+
return this.csvToJson.csvToJson(inputFileNameOrCsv);
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
const parsedCsv = await fileUtils.readFileAsync(inputFileNameOrCsv, this.csvToJson.encoding || 'utf8');
|
|
101
|
+
return this.csvToJson.csvToJson(parsedCsv);
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
/**
|
|
105
|
+
* Parse CSV string to JSON asynchronously
|
|
106
|
+
*/
|
|
107
|
+
csvStringToJsonAsync(csvString, options = { raw: true }) {
|
|
108
|
+
return this.getJsonFromCsvAsync(csvString, options);
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
/**
|
|
112
|
+
* Parse CSV string to stringified JSON asynchronously
|
|
113
|
+
*/
|
|
114
|
+
async csvStringToJsonStringifiedAsync(csvString) {
|
|
115
|
+
const json = await this.csvStringToJsonAsync(csvString);
|
|
116
|
+
return JSON.stringify(json, undefined, 1);
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
module.exports = new CsvToJsonAsync();
|
package/src/util/fileUtils.js
CHANGED
|
@@ -8,6 +8,23 @@ class FileUtils {
|
|
|
8
8
|
return fs.readFileSync(fileInputName, encoding).toString();
|
|
9
9
|
}
|
|
10
10
|
|
|
11
|
+
readFileAsync(fileInputName, encoding = 'utf8') {
|
|
12
|
+
// Use fs.promises when available for a Promise-based API
|
|
13
|
+
if (fs.promises && typeof fs.promises.readFile === 'function') {
|
|
14
|
+
return fs.promises.readFile(fileInputName, encoding)
|
|
15
|
+
.then(buf => buf.toString());
|
|
16
|
+
}
|
|
17
|
+
return new Promise((resolve, reject) => {
|
|
18
|
+
fs.readFile(fileInputName, encoding, (err, data) => {
|
|
19
|
+
if (err) {
|
|
20
|
+
reject(err);
|
|
21
|
+
return;
|
|
22
|
+
}
|
|
23
|
+
resolve(data.toString());
|
|
24
|
+
});
|
|
25
|
+
});
|
|
26
|
+
}
|
|
27
|
+
|
|
11
28
|
writeFile(json, fileOutputName) {
|
|
12
29
|
fs.writeFile(fileOutputName, json, function (err) {
|
|
13
30
|
if (err) {
|
|
@@ -18,5 +35,17 @@ class FileUtils {
|
|
|
18
35
|
});
|
|
19
36
|
}
|
|
20
37
|
|
|
38
|
+
writeFileAsync(json, fileOutputName) {
|
|
39
|
+
if (fs.promises && typeof fs.promises.writeFile === 'function') {
|
|
40
|
+
return fs.promises.writeFile(fileOutputName, json);
|
|
41
|
+
}
|
|
42
|
+
return new Promise((resolve, reject) => {
|
|
43
|
+
fs.writeFile(fileOutputName, json, (err) => {
|
|
44
|
+
if (err) return reject(err);
|
|
45
|
+
resolve();
|
|
46
|
+
});
|
|
47
|
+
});
|
|
48
|
+
}
|
|
49
|
+
|
|
21
50
|
}
|
|
22
51
|
module.exports = new FileUtils();
|
package/tsconfig.json
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
{
|
|
2
|
+
"compilerOptions": {
|
|
3
|
+
"target": "ES2019",
|
|
4
|
+
"module": "CommonJS",
|
|
5
|
+
"strict": true,
|
|
6
|
+
"esModuleInterop": true,
|
|
7
|
+
"skipLibCheck": true,
|
|
8
|
+
"forceConsistentCasingInFileNames": true,
|
|
9
|
+
"outDir": "dist",
|
|
10
|
+
"moduleResolution": "node",
|
|
11
|
+
"resolveJsonModule": true,
|
|
12
|
+
"types": ["node", "jest"]
|
|
13
|
+
},
|
|
14
|
+
"baseUrl": ".",
|
|
15
|
+
"paths": {
|
|
16
|
+
"convert-csv-to-json": ["./index.d.ts", "./index.js"]
|
|
17
|
+
},
|
|
18
|
+
"include": ["test/**/*.ts"]
|
|
19
|
+
}
|