convert-csv-to-json 3.13.0 → 3.14.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/MIGRATION.md ADDED
@@ -0,0 +1,174 @@
1
+ # Migration Guide: Moving from Sync to Async
2
+
3
+ This guide will help you transition from the synchronous API to the new asynchronous API in csvToJson.
4
+
5
+ ## Table of Contents
6
+
7
+ - [Basic Migration Patterns](#basic-migration-patterns)
8
+ - [Common Patterns and Best Practices](#common-patterns-and-best-practices)
9
+ - [Advanced Use Cases](#advanced-use-cases)
10
+ - [Migration Tips](#migration-tips)
11
+
12
+ ## Basic Migration Patterns
13
+
14
+ 1. Direct file reading:
15
+ ```js
16
+ // Before (sync)
17
+ const json = csvToJson.getJsonFromCsv('input.csv');
18
+ console.log(json);
19
+
20
+ // After (async) - using Promises
21
+ csvToJson.getJsonFromCsvAsync('input.csv')
22
+ .then(json => console.log(json))
23
+ .catch(err => console.error('Error:', err));
24
+
25
+ // After (async) - using async/await
26
+ async function readCsv() {
27
+ try {
28
+ const json = await csvToJson.getJsonFromCsvAsync('input.csv');
29
+ console.log(json);
30
+ } catch (err) {
31
+ console.error('Error:', err);
32
+ }
33
+ }
34
+ ```
35
+
36
+ 2. File generation:
37
+ ```js
38
+ // Before (sync)
39
+ csvToJson.generateJsonFileFromCsv('input.csv', 'output.json');
40
+
41
+ // After (async) - using Promises
42
+ csvToJson.generateJsonFileFromCsvAsync('input.csv', 'output.json')
43
+ .then(() => console.log('File created'))
44
+ .catch(err => console.error('Error:', err));
45
+ ```
46
+
47
+ 3. Chained operations:
48
+ ```js
49
+ // Before (sync)
50
+ const json = csvToJson
51
+ .fieldDelimiter(',')
52
+ .formatValueByType()
53
+ .getJsonFromCsv('input.csv');
54
+
55
+ // After (async)
56
+ await csvToJson
57
+ .fieldDelimiter(',')
58
+ .formatValueByType()
59
+ .getJsonFromCsvAsync('input.csv');
60
+ ```
61
+
62
+ ## Common Patterns and Best Practices
63
+
64
+ 1. Processing multiple files:
65
+ ```js
66
+ // Sequential processing
67
+ async function processFiles(files) {
68
+ const results = [];
69
+ for (const file of files) {
70
+ const json = await csvToJson.getJsonFromCsvAsync(file);
71
+ results.push(json);
72
+ }
73
+ return results;
74
+ }
75
+
76
+ // Parallel processing
77
+ async function processFilesParallel(files) {
78
+ const promises = files.map(file =>
79
+ csvToJson.getJsonFromCsvAsync(file)
80
+ );
81
+ return Promise.all(promises);
82
+ }
83
+ ```
84
+
85
+ 2. Error handling:
86
+ ```js
87
+ // Robust error handling
88
+ async function processWithRetry(file, maxRetries = 3) {
89
+ for (let i = 0; i < maxRetries; i++) {
90
+ try {
91
+ return await csvToJson.getJsonFromCsvAsync(file);
92
+ } catch (err) {
93
+ if (i === maxRetries - 1) throw err;
94
+ await new Promise(resolve => setTimeout(resolve, 1000 * (i + 1)));
95
+ }
96
+ }
97
+ }
98
+ ```
99
+
100
+ 3. Processing raw CSV data:
101
+ ```js
102
+ // Processing CSV from network request
103
+ async function processCsvFromApi() {
104
+ const response = await fetch('https://api.example.com/data.csv');
105
+ const csvText = await response.text();
106
+ return csvToJson.getJsonFromCsvAsync(csvText, { raw: true });
107
+ }
108
+ ```
109
+
110
+ ## Advanced Use Cases
111
+
112
+ 1. Streaming large files with async iteration:
113
+ ```js
114
+ const { createReadStream } = require('fs');
115
+ const { createInterface } = require('readline');
116
+
117
+ async function* processLargeCsv(filePath) {
118
+ const fileStream = createReadStream(filePath);
119
+ const lines = createInterface({
120
+ input: fileStream,
121
+ crlfDelay: Infinity
122
+ });
123
+
124
+ const headers = await lines.next();
125
+ for await (const line of lines) {
126
+ const json = await csvToJson
127
+ .getJsonFromCsvAsync(headers.value + '\n' + line, { raw: true });
128
+ yield json[0];
129
+ }
130
+ }
131
+
132
+ // Usage
133
+ for await (const record of processLargeCsv('large.csv')) {
134
+ console.log(record);
135
+ }
136
+ ```
137
+
138
+ 2. Custom data transformation:
139
+ ```js
140
+ async function processWithTransform(file) {
141
+ const json = await csvToJson
142
+ .formatValueByType()
143
+ .getJsonFromCsvAsync(file);
144
+
145
+ return json.map(record => ({
146
+ ...record,
147
+ timestamp: new Date().toISOString(),
148
+ processed: true
149
+ }));
150
+ }
151
+ ```
152
+
153
+ 3. Validation and filtering:
154
+ ```js
155
+ async function processWithValidation(file) {
156
+ const json = await csvToJson.getJsonFromCsvAsync(file);
157
+
158
+ return json.filter(record => {
159
+ // Validate required fields
160
+ if (!record.id || !record.name) return false;
161
+ // Validate data types
162
+ if (typeof record.age !== 'number') return false;
163
+ return true;
164
+ });
165
+ }
166
+ ```
167
+
168
+ ## Migration Tips
169
+
170
+ 1. **Gradual Migration**: You can mix sync and async code during migration
171
+ 2. **Error Handling**: Always include proper error handling with async code
172
+ 3. **Testing**: Test both success and error cases
173
+ 4. **Performance**: Consider using `Promise.all()` for parallel processing
174
+ 5. **Memory**: For large files, consider streaming approaches
package/README.md CHANGED
@@ -11,7 +11,7 @@
11
11
  ![TypeScript](https://img.shields.io/badge/typescript-%23007ACC.svg?style=for-the-badge&logo=typescript&logoColor=white)
12
12
  ![JavaScript](https://img.shields.io/badge/javascript-%23323330.svg?style=for-the-badge&logo=javascript&logoColor=%23F7DF1E)
13
13
 
14
- **This project is not dependent on others packages or libraries.**
14
+ **This project is not dependent on others packages or libraries, and supports both synchronous and Promise-based asynchronous APIs.**
15
15
 
16
16
  This repository uses [![GitHub Action - iuccio/npm-semantic-publish-action@latest](https://img.shields.io/badge/GitHub_Action_-iuccio%2Fnpm--semantic--publish--action%40latest-2ea44f)](https://github.com/marketplace/actions/npm-semver-publish)
17
17
 
@@ -27,7 +27,7 @@ show your :heart: and support.
27
27
  - [Prerequisites](#prerequisites)
28
28
  - [Install npm *convert-csv-to-json package*](#install-npm-convert-csv-to-json-package)
29
29
  * [Install](#install)
30
- * [Usage](#usage)
30
+ * [Sync API Usage](#sync-api-usage)
31
31
  + [Generate JSON file](#generate-json-file)
32
32
  + [Generate Array of Object in JSON format](#generate-array-of-object-in-json-format)
33
33
  + [Generate Object with sub array](#generate-object-with-sub-array)
@@ -42,6 +42,12 @@ show your :heart: and support.
42
42
  - [Boolean](#boolean)
43
43
  + [Encoding](#encoding)
44
44
  + [Working with CSV strings directly](#working-with-csv-strings-directly)
45
+ * [Async API Usage](#async-api-usage)
46
+ + [Basic Async Operations](#basic-async-operations)
47
+ + [Working with Raw CSV Data](#working-with-raw-csv-data)
48
+ + [Processing Large Files](#processing-large-files)
49
+ + [Error Handling and Retries](#error-handling-and-retries)
50
+ + [Batch Processing](#batch-processing)
45
51
  * [Chaining Pattern](#chaining-pattern)
46
52
  - [Development](#development)
47
53
  - [CI CD github action](#ci-cd-github-action)
@@ -51,7 +57,7 @@ show your :heart: and support.
51
57
  <!-- tocstop -->
52
58
 
53
59
  ## Description
54
- Converts *csv* files to *JSON* files with Node.js.
60
+ Converts *csv* files to *JSON* files with Node.js. Supports both synchronous operations and Promise-based asynchronous operations, allowing integration with modern async/await patterns.
55
61
 
56
62
  Give an input file like:
57
63
 
@@ -112,7 +118,7 @@ Install package on your machine
112
118
  $ npm install -g convert-csv-to-json
113
119
  ```
114
120
 
115
- ### Usage
121
+ ### Sync API Usage
116
122
 
117
123
  #### Generate JSON file
118
124
  ```js
@@ -351,21 +357,417 @@ let jsonArray = csvToJson
351
357
  .csvStringToJson(csvString);
352
358
  ```
353
359
 
354
- ### Chaining Pattern
360
+ ## Async API Usage
355
361
 
356
- The exposed API is implemented with the [Method Chaining Pattern](https://en.wikipedia.org/wiki/Method_chaining), which means that multiple methods can be chained, e.g.:
362
+ This library provides a Promise-based async API that's perfect for modern Node.js applications. For a detailed migration guide from sync to async API, see [MIGRATION.md](MIGRATION.md).
357
363
 
364
+ ### Basic Async Operations
365
+
366
+ 1. Convert CSV file to JSON:
358
367
  ```js
359
- let csvToJson = require('convert-csv-to-json');
368
+ const csvToJson = require('convert-csv-to-json');
369
+
370
+ // Using Promises
371
+ csvToJson.getJsonFromCsvAsync('input.csv')
372
+ .then(json => console.log(json))
373
+ .catch(err => console.error('Error:', err));
374
+
375
+ // Using async/await
376
+ async function convertCsv() {
377
+ try {
378
+ const json = await csvToJson.getJsonFromCsvAsync('input.csv');
379
+ console.log(json);
380
+ } catch (err) {
381
+ console.error('Error:', err);
382
+ }
383
+ }
384
+ ```
385
+
386
+ 2. Generate JSON file from CSV:
387
+ ```js
388
+ // Using async/await with chain configuration
389
+ async function convertAndSave() {
390
+ await csvToJson
391
+ .fieldDelimiter(',')
392
+ .formatValueByType()
393
+ .generateJsonFileFromCsvAsync('input.csv', 'output.json');
394
+ }
395
+ ```
396
+
397
+ ### Working with Raw CSV Data
398
+
399
+ Process CSV data from memory or network sources:
400
+
401
+ ```js
402
+ // Example: Processing CSV from an API
403
+ async function processCsvFromApi() {
404
+ const response = await fetch('https://api.example.com/data.csv');
405
+ const csvText = await response.text();
406
+
407
+ const json = await csvToJson
408
+ .formatValueByType()
409
+ .getJsonFromCsvAsync(csvText, { raw: true });
410
+
411
+ return json;
412
+ }
413
+ ```
414
+
415
+ ### Processing Large Files
416
+
417
+ For large files, use streaming to manage memory efficiently:
418
+
419
+ ```js
420
+ const { createReadStream } = require('fs');
421
+ const { createInterface } = require('readline');
422
+
423
+ async function* processLargeFile(filePath) {
424
+ const fileStream = createReadStream(filePath);
425
+ const rl = createInterface({
426
+ input: fileStream,
427
+ crlfDelay: Infinity
428
+ });
429
+
430
+ for await (const line of rl) {
431
+ yield await csvToJson.getJsonFromCsvAsync(line, { raw: true });
432
+ }
433
+ }
434
+
435
+ // Usage
436
+ async function processData() {
437
+ for await (const record of processLargeFile('large.csv')) {
438
+ await saveToDatabase(record);
439
+ }
440
+ }
441
+ ```
442
+
443
+ ### Error Handling and Retries
444
+
445
+ Implement robust error handling with retries:
446
+
447
+ ```js
448
+ async function processWithRetry(filePath, maxRetries = 3) {
449
+ for (let i = 0; i < maxRetries; i++) {
450
+ try {
451
+ const json = await csvToJson
452
+ .formatValueByType()
453
+ .getJsonFromCsvAsync(filePath);
454
+
455
+ return json;
456
+ } catch (err) {
457
+ if (i === maxRetries - 1) throw err;
458
+ // Exponential backoff
459
+ await new Promise(resolve =>
460
+ setTimeout(resolve, Math.pow(2, i) * 1000)
461
+ );
462
+ }
463
+ }
464
+ }
465
+ ```
466
+
467
+ ### Batch Processing
468
+
469
+ Process multiple files efficiently:
470
+
471
+ ```js
472
+ async function batchProcess(files, batchSize = 3) {
473
+ const results = new Map();
474
+
475
+ for (let i = 0; i < files.length; i += batchSize) {
476
+ const batch = files.slice(i, i + batchSize);
477
+ const processed = await Promise.all(
478
+ batch.map(async file => {
479
+ const json = await csvToJson.getJsonFromCsvAsync(file);
480
+ return [file, json];
481
+ })
482
+ );
483
+
484
+ processed.forEach(([file, json]) => results.set(file, json));
485
+ }
486
+
487
+ return results;
488
+ }
489
+
490
+ // Usage
491
+ const files = ['data1.csv', 'data2.csv', 'data3.csv', 'data4.csv'];
492
+ const results = await batchProcess(files, 2);
493
+ ```
494
+
495
+ ## Chaining Pattern
496
+
497
+ The exposed API is implemented with the [Method Chaining Pattern](https://en.wikipedia.org/wiki/Method_chaining), which means that multiple methods can be chained. This pattern works with both synchronous and asynchronous methods:
498
+
499
+ ### Synchronous Chaining
360
500
 
361
- csvToJson.fieldDelimiter(',')
362
- .formatValueByType()
363
- .parseSubArray("*",',')
364
- .supportQuotedField(true)
365
- .getJsonFromCsv('myInputFile.csv');
501
+ ```js
502
+ const csvToJson = require('convert-csv-to-json');
503
+
504
+ // Chain configuration methods with sync operation
505
+ const json = csvToJson
506
+ .fieldDelimiter(',')
507
+ .formatValueByType()
508
+ .parseSubArray("*", ',')
509
+ .supportQuotedField(true)
510
+ .getJsonFromCsv('myInputFile.csv');
511
+
512
+ // Chain with file generation
513
+ csvToJson
514
+ .fieldDelimiter(';')
515
+ .utf8Encoding()
516
+ .formatValueByType()
517
+ .generateJsonFileFromCsv('input.csv', 'output.json');
518
+
519
+ // Chain with string parsing
520
+ const jsonArray = csvToJson
521
+ .fieldDelimiter(',')
522
+ .trimHeaderFieldWhiteSpace(true)
523
+ .csvStringToJson('name,age\nJohn,30\nJane,25');
524
+ ```
366
525
 
526
+ ### Asynchronous Chaining
527
+
528
+ ```js
529
+ const csvToJson = require('convert-csv-to-json');
530
+
531
+ // Using async/await
532
+ async function processCSV() {
533
+ // Chain configuration methods with async operation
534
+ const json = await csvToJson
535
+ .fieldDelimiter(',')
536
+ .formatValueByType()
537
+ .parseSubArray("*", ',')
538
+ .supportQuotedField(true)
539
+ .getJsonFromCsvAsync('myInputFile.csv');
540
+
541
+ // Chain with async file generation
542
+ await csvToJson
543
+ .fieldDelimiter(';')
544
+ .utf8Encoding()
545
+ .formatValueByType()
546
+ .generateJsonFileFromCsvAsync('input.csv', 'output.json');
547
+ }
548
+
549
+ // Using Promises
550
+ csvToJson
551
+ .fieldDelimiter(',')
552
+ .formatValueByType()
553
+ .getJsonFromCsvAsync('input.csv')
554
+ .then(json => console.log(json))
555
+ .catch(err => console.error('Error:', err));
556
+ ```
557
+
558
+ All configuration methods can be chained in any order before calling the final operation method (like `getJsonFromCsv`, `getJsonFromCsvAsync`, etc.). The configuration will be applied in the order it is chained.
559
+
560
+ ## Common Use Cases
561
+
562
+ Here are some common use cases and how to implement them:
563
+
564
+ ### 1. Processing CSV from HTTP Response
565
+ ```js
566
+ const https = require('https');
567
+
568
+ async function processRemoteCsv(url) {
569
+ const csvData = await new Promise((resolve, reject) => {
570
+ https.get(url, (res) => {
571
+ let data = '';
572
+ res.on('data', chunk => data += chunk);
573
+ res.on('end', () => resolve(data));
574
+ res.on('error', reject);
575
+ });
576
+ });
577
+
578
+ return csvToJson.getJsonFromCsvAsync(csvData, { raw: true });
579
+ }
367
580
  ```
368
581
 
582
+ ### 2. Batch Processing Multiple Files
583
+ ```js
584
+ async function batchProcess(files) {
585
+ const results = new Map();
586
+
587
+ // Process in chunks of 3 files at a time
588
+ for (let i = 0; i < files.length; i += 3) {
589
+ const chunk = files.slice(i, i + 3);
590
+ const processed = await Promise.all(
591
+ chunk.map(async file => {
592
+ const json = await csvToJson.getJsonFromCsvAsync(file);
593
+ return [file, json];
594
+ })
595
+ );
596
+
597
+ processed.forEach(([file, json]) => results.set(file, json));
598
+ }
599
+
600
+ return results;
601
+ }
602
+ ```
603
+
604
+ ### 3. Data Transformation Pipeline
605
+ ```js
606
+ async function transformData(csvFile) {
607
+ // Step 1: Parse CSV
608
+ const json = await csvToJson
609
+ .formatValueByType()
610
+ .getJsonFromCsvAsync(csvFile);
611
+
612
+ // Step 2: Transform data
613
+ const transformed = json.map(record => ({
614
+ id: record.id,
615
+ fullName: `${record.firstName} ${record.lastName}`,
616
+ age: Number(record.age),
617
+ isAdult: Number(record.age) >= 18,
618
+ email: record.email.toLowerCase()
619
+ }));
620
+
621
+ // Step 3: Filter invalid records
622
+ return transformed.filter(record =>
623
+ record.id &&
624
+ record.fullName.length > 0 &&
625
+ !isNaN(record.age)
626
+ );
627
+ }
628
+ ```
629
+
630
+ ### 4. Error Recovery and Logging
631
+ ```js
632
+ async function processWithLogging(file) {
633
+ const logger = {
634
+ info: (msg) => console.log(`[INFO] ${msg}`),
635
+ error: (msg, err) => console.error(`[ERROR] ${msg}`, err)
636
+ };
637
+
638
+ try {
639
+ logger.info(`Starting processing ${file}`);
640
+ const startTime = Date.now();
641
+
642
+ const json = await csvToJson.getJsonFromCsvAsync(file);
643
+
644
+ const duration = Date.now() - startTime;
645
+ logger.info(`Processed ${file} in ${duration}ms`);
646
+
647
+ return json;
648
+ } catch (err) {
649
+ logger.error(`Failed to process ${file}`, err);
650
+ throw err;
651
+ }
652
+ }
653
+ ```
654
+
655
+ ## Troubleshooting
656
+
657
+ Here are solutions to common issues you might encounter:
658
+
659
+ ### Memory Issues with Large Files
660
+
661
+ If you're processing large CSV files and encountering memory issues:
662
+
663
+ ```js
664
+ // Instead of loading the entire file
665
+ const json = await csvToJson.getJsonFromCsvAsync('large.csv'); // ❌
666
+
667
+ // Use streaming with async iteration
668
+ for await (const record of processLargeCsv('large.csv')) { // ✅
669
+ // Process one record at a time
670
+ await processRecord(record);
671
+ }
672
+ ```
673
+
674
+ ### Handling Different CSV Formats
675
+
676
+ 1. **Mixed Quote Types**:
677
+ ```js
678
+ csvToJson
679
+ .supportQuotedField(true) // Enable quoted field support
680
+ .getJsonFromCsvAsync(file);
681
+ ```
682
+
683
+ 2. **Custom Delimiters**:
684
+ ```js
685
+ csvToJson
686
+ .fieldDelimiter(';') // Change delimiter
687
+ .getJsonFromCsvAsync(file);
688
+ ```
689
+
690
+ 3. **UTF-8 with BOM**:
691
+ ```js
692
+ csvToJson
693
+ .encoding('utf8') // Specify encoding
694
+ .getJsonFromCsvAsync(file);
695
+ ```
696
+
697
+ ### Common Error Solutions
698
+
699
+ 1. **ENOENT: no such file or directory**
700
+ - Check if the file path is correct and absolute
701
+ - Verify file permissions
702
+ - Ensure the file exists in the specified location
703
+
704
+ 2. **Invalid CSV Structure**
705
+ - Verify CSV format matches expected structure
706
+ - Check for missing or extra delimiters
707
+ - Validate header row exists if expected
708
+
709
+ 3. **Memory Leaks**
710
+ - Use streaming for large files
711
+ - Process files in smaller chunks
712
+ - Implement proper cleanup in try/finally blocks
713
+
714
+ 4. **Encoding Issues**
715
+ - Specify correct encoding using .encoding()
716
+ - Check for BOM markers
717
+ - Verify source file encoding
718
+
719
+ ### Performance Optimization
720
+
721
+ 1. **Parallel Processing**:
722
+ ```js
723
+ // Instead of sequential processing
724
+ for (const file of files) {
725
+ await process(file); // ❌
726
+ }
727
+
728
+ // Use parallel processing with limits
729
+ async function processWithLimit(files, limit = 3) {
730
+ const results = [];
731
+ for (let i = 0; i < files.length; i += limit) {
732
+ const chunk = files.slice(i, i + limit);
733
+ const chunkResults = await Promise.all(
734
+ chunk.map(file => csvToJson.getJsonFromCsvAsync(file))
735
+ );
736
+ results.push(...chunkResults);
737
+ }
738
+ return results;
739
+ } // ✅
740
+ ```
741
+
742
+ 2. **Memory Usage**:
743
+ ```js
744
+ // Clear references when done
745
+ async function processWithCleanup(file) {
746
+ let json;
747
+ try {
748
+ json = await csvToJson.getJsonFromCsvAsync(file);
749
+ return await processData(json);
750
+ } finally {
751
+ json = null; // Clear reference
752
+ }
753
+ }
754
+ ```
755
+
756
+ ### TypeScript Support
757
+
758
+ If you're using TypeScript and encounter type issues:
759
+
760
+ ```typescript
761
+ // Define custom types for your CSV structure
762
+ interface MyCsvRecord {
763
+ id: number;
764
+ name: string;
765
+ age?: number;
766
+ }
767
+
768
+ // Use type assertion
769
+ const json = await csvToJson.getJsonFromCsvAsync<MyCsvRecord>('data.csv');
770
+ ```
369
771
 
370
772
  ## Development
371
773
  * Download all csvToJson dependencies:
package/index.d.ts CHANGED
@@ -91,6 +91,11 @@ declare module 'convert-csv-to-json' {
91
91
  */
92
92
  getJsonFromCsv(inputFileName: string): any[];
93
93
 
94
+ /**
95
+ * Async version of getJsonFromCsv. When options.raw is true the input is treated as a CSV string
96
+ */
97
+ getJsonFromCsvAsync(inputFileNameOrCsv: string, options?: { raw?: boolean }): Promise<any[]>;
98
+
94
99
  csvStringToJson(csvString: string): any[];
95
100
 
96
101
  /**
package/index.js CHANGED
@@ -152,6 +152,30 @@ exports.getJsonFromCsv = function(inputFileName) {
152
152
  return csvToJson.getJsonFromCsv(inputFileName);
153
153
  };
154
154
 
155
+ /**
156
+ * Async version of getJsonFromCsv.
157
+ * @param {string} inputFileNameOrCsv path to file or CSV string
158
+ * @param {object} options { raw: boolean } when raw=true the first param is treated as CSV content
159
+ * @returns {Promise<Array>} resolves with the array of objects
160
+ */
161
+ const csvToJsonAsync = require('./src/csvToJsonAsync');
162
+
163
+ // Re-export all async API methods
164
+ Object.assign(exports, {
165
+ getJsonFromCsvAsync: function(input, options) {
166
+ return csvToJsonAsync.getJsonFromCsvAsync(input, options);
167
+ },
168
+ csvStringToJsonAsync: function(input, options) {
169
+ return csvToJsonAsync.csvStringToJsonAsync(input, options);
170
+ },
171
+ csvStringToJsonStringifiedAsync: function(input) {
172
+ return csvToJsonAsync.csvStringToJsonStringifiedAsync(input);
173
+ },
174
+ generateJsonFileFromCsvAsync: function(input, output) {
175
+ return csvToJsonAsync.generateJsonFileFromCsv(input, output);
176
+ }
177
+ });
178
+
155
179
  exports.csvStringToJson = function(csvString) {
156
180
  return csvToJson.csvStringToJson(csvString);
157
181
  };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "convert-csv-to-json",
3
- "version": "3.13.0",
3
+ "version": "3.14.0",
4
4
  "description": "Convert CSV to JSON",
5
5
  "main": "index.js",
6
6
  "types": "index.d.ts",
@@ -0,0 +1,120 @@
1
+ 'use strict';
2
+
3
+ const fileUtils = require('./util/fileUtils');
4
+ const csvToJson = require('./csvToJson');
5
+
6
+ class CsvToJsonAsync {
7
+ constructor() {
8
+ // Proxy the configuration methods to the sync instance
9
+ this.csvToJson = csvToJson;
10
+ }
11
+
12
+ /**
13
+ * Set value type formatting
14
+ */
15
+ formatValueByType(active) {
16
+ this.csvToJson.formatValueByType(active);
17
+ return this;
18
+ }
19
+
20
+ /**
21
+ * Set quoted field support
22
+ */
23
+ supportQuotedField(active) {
24
+ this.csvToJson.supportQuotedField(active);
25
+ return this;
26
+ }
27
+
28
+ /**
29
+ * Set field delimiter
30
+ */
31
+ fieldDelimiter(delimiter) {
32
+ this.csvToJson.fieldDelimiter(delimiter);
33
+ return this;
34
+ }
35
+
36
+ /**
37
+ * Trim header field whitespace
38
+ */
39
+ trimHeaderFieldWhiteSpace(active) {
40
+ this.csvToJson.trimHeaderFieldWhiteSpace(active);
41
+ return this;
42
+ }
43
+
44
+ /**
45
+ * Set header index
46
+ */
47
+ indexHeader(indexHeader) {
48
+ this.csvToJson.indexHeader(indexHeader);
49
+ return this;
50
+ }
51
+
52
+ /**
53
+ * Set sub-array parsing options
54
+ */
55
+ parseSubArray(delimiter = '*', separator = ',') {
56
+ this.csvToJson.parseSubArray(delimiter, separator);
57
+ return this;
58
+ }
59
+
60
+ /**
61
+ * Set encoding
62
+ */
63
+ encoding(encoding) {
64
+ this.csvToJson.encoding = encoding;
65
+ return this;
66
+ }
67
+
68
+ /**
69
+ * Async version of generateJsonFileFromCsv
70
+ */
71
+ async generateJsonFileFromCsv(fileInputName, fileOutputName) {
72
+ const jsonStringified = await this.getJsonFromCsvStringified(fileInputName);
73
+ await fileUtils.writeFileAsync(jsonStringified, fileOutputName);
74
+ }
75
+
76
+ /**
77
+ * Async version that returns stringified JSON from CSV file
78
+ */
79
+ async getJsonFromCsvStringified(fileInputName) {
80
+ const json = await this.getJsonFromCsvAsync(fileInputName);
81
+ return JSON.stringify(json, undefined, 1);
82
+ }
83
+
84
+ /**
85
+ * Main async API method. If options.raw is true, treats input as CSV string.
86
+ * Otherwise reads from file path.
87
+ */
88
+ async getJsonFromCsvAsync(inputFileNameOrCsv, options = {}) {
89
+ if (inputFileNameOrCsv === null || inputFileNameOrCsv === undefined) {
90
+ throw new Error('inputFileNameOrCsv is not defined!!!');
91
+ }
92
+
93
+ if (options.raw) {
94
+ if (inputFileNameOrCsv === '') {
95
+ return [];
96
+ }
97
+ return this.csvToJson.csvToJson(inputFileNameOrCsv);
98
+ }
99
+
100
+ const parsedCsv = await fileUtils.readFileAsync(inputFileNameOrCsv, this.csvToJson.encoding || 'utf8');
101
+ return this.csvToJson.csvToJson(parsedCsv);
102
+ }
103
+
104
+ /**
105
+ * Parse CSV string to JSON asynchronously
106
+ */
107
+ csvStringToJsonAsync(csvString, options = { raw: true }) {
108
+ return this.getJsonFromCsvAsync(csvString, options);
109
+ }
110
+
111
+ /**
112
+ * Parse CSV string to stringified JSON asynchronously
113
+ */
114
+ async csvStringToJsonStringifiedAsync(csvString) {
115
+ const json = await this.csvStringToJsonAsync(csvString);
116
+ return JSON.stringify(json, undefined, 1);
117
+ }
118
+ }
119
+
120
+ module.exports = new CsvToJsonAsync();
@@ -8,6 +8,23 @@ class FileUtils {
8
8
  return fs.readFileSync(fileInputName, encoding).toString();
9
9
  }
10
10
 
11
+ readFileAsync(fileInputName, encoding = 'utf8') {
12
+ // Use fs.promises when available for a Promise-based API
13
+ if (fs.promises && typeof fs.promises.readFile === 'function') {
14
+ return fs.promises.readFile(fileInputName, encoding)
15
+ .then(buf => buf.toString());
16
+ }
17
+ return new Promise((resolve, reject) => {
18
+ fs.readFile(fileInputName, encoding, (err, data) => {
19
+ if (err) {
20
+ reject(err);
21
+ return;
22
+ }
23
+ resolve(data.toString());
24
+ });
25
+ });
26
+ }
27
+
11
28
  writeFile(json, fileOutputName) {
12
29
  fs.writeFile(fileOutputName, json, function (err) {
13
30
  if (err) {
@@ -18,5 +35,17 @@ class FileUtils {
18
35
  });
19
36
  }
20
37
 
38
+ writeFileAsync(json, fileOutputName) {
39
+ if (fs.promises && typeof fs.promises.writeFile === 'function') {
40
+ return fs.promises.writeFile(fileOutputName, json);
41
+ }
42
+ return new Promise((resolve, reject) => {
43
+ fs.writeFile(fileOutputName, json, (err) => {
44
+ if (err) return reject(err);
45
+ resolve();
46
+ });
47
+ });
48
+ }
49
+
21
50
  }
22
51
  module.exports = new FileUtils();