convert-csv-to-json 3.12.1 → 3.14.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/MIGRATION.md +174 -0
- package/README.md +460 -38
- package/index.d.ts +5 -0
- package/index.js +24 -0
- package/package.json +1 -1
- package/src/csvToJsonAsync.js +120 -0
- package/src/util/fileUtils.js +29 -0
- package/src/util/stringUtils.js +90 -21
package/MIGRATION.md
ADDED
|
@@ -0,0 +1,174 @@
|
|
|
1
|
+
# Migration Guide: Moving from Sync to Async
|
|
2
|
+
|
|
3
|
+
This guide will help you transition from the synchronous API to the new asynchronous API in csvToJson.
|
|
4
|
+
|
|
5
|
+
## Table of Contents
|
|
6
|
+
|
|
7
|
+
- [Basic Migration Patterns](#basic-migration-patterns)
|
|
8
|
+
- [Common Patterns and Best Practices](#common-patterns-and-best-practices)
|
|
9
|
+
- [Advanced Use Cases](#advanced-use-cases)
|
|
10
|
+
- [Migration Tips](#migration-tips)
|
|
11
|
+
|
|
12
|
+
## Basic Migration Patterns
|
|
13
|
+
|
|
14
|
+
1. Direct file reading:
|
|
15
|
+
```js
|
|
16
|
+
// Before (sync)
|
|
17
|
+
const json = csvToJson.getJsonFromCsv('input.csv');
|
|
18
|
+
console.log(json);
|
|
19
|
+
|
|
20
|
+
// After (async) - using Promises
|
|
21
|
+
csvToJson.getJsonFromCsvAsync('input.csv')
|
|
22
|
+
.then(json => console.log(json))
|
|
23
|
+
.catch(err => console.error('Error:', err));
|
|
24
|
+
|
|
25
|
+
// After (async) - using async/await
|
|
26
|
+
async function readCsv() {
|
|
27
|
+
try {
|
|
28
|
+
const json = await csvToJson.getJsonFromCsvAsync('input.csv');
|
|
29
|
+
console.log(json);
|
|
30
|
+
} catch (err) {
|
|
31
|
+
console.error('Error:', err);
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
```
|
|
35
|
+
|
|
36
|
+
2. File generation:
|
|
37
|
+
```js
|
|
38
|
+
// Before (sync)
|
|
39
|
+
csvToJson.generateJsonFileFromCsv('input.csv', 'output.json');
|
|
40
|
+
|
|
41
|
+
// After (async) - using Promises
|
|
42
|
+
csvToJson.generateJsonFileFromCsvAsync('input.csv', 'output.json')
|
|
43
|
+
.then(() => console.log('File created'))
|
|
44
|
+
.catch(err => console.error('Error:', err));
|
|
45
|
+
```
|
|
46
|
+
|
|
47
|
+
3. Chained operations:
|
|
48
|
+
```js
|
|
49
|
+
// Before (sync)
|
|
50
|
+
const json = csvToJson
|
|
51
|
+
.fieldDelimiter(',')
|
|
52
|
+
.formatValueByType()
|
|
53
|
+
.getJsonFromCsv('input.csv');
|
|
54
|
+
|
|
55
|
+
// After (async)
|
|
56
|
+
await csvToJson
|
|
57
|
+
.fieldDelimiter(',')
|
|
58
|
+
.formatValueByType()
|
|
59
|
+
.getJsonFromCsvAsync('input.csv');
|
|
60
|
+
```
|
|
61
|
+
|
|
62
|
+
## Common Patterns and Best Practices
|
|
63
|
+
|
|
64
|
+
1. Processing multiple files:
|
|
65
|
+
```js
|
|
66
|
+
// Sequential processing
|
|
67
|
+
async function processFiles(files) {
|
|
68
|
+
const results = [];
|
|
69
|
+
for (const file of files) {
|
|
70
|
+
const json = await csvToJson.getJsonFromCsvAsync(file);
|
|
71
|
+
results.push(json);
|
|
72
|
+
}
|
|
73
|
+
return results;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
// Parallel processing
|
|
77
|
+
async function processFilesParallel(files) {
|
|
78
|
+
const promises = files.map(file =>
|
|
79
|
+
csvToJson.getJsonFromCsvAsync(file)
|
|
80
|
+
);
|
|
81
|
+
return Promise.all(promises);
|
|
82
|
+
}
|
|
83
|
+
```
|
|
84
|
+
|
|
85
|
+
2. Error handling:
|
|
86
|
+
```js
|
|
87
|
+
// Robust error handling
|
|
88
|
+
async function processWithRetry(file, maxRetries = 3) {
|
|
89
|
+
for (let i = 0; i < maxRetries; i++) {
|
|
90
|
+
try {
|
|
91
|
+
return await csvToJson.getJsonFromCsvAsync(file);
|
|
92
|
+
} catch (err) {
|
|
93
|
+
if (i === maxRetries - 1) throw err;
|
|
94
|
+
await new Promise(resolve => setTimeout(resolve, 1000 * (i + 1)));
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
```
|
|
99
|
+
|
|
100
|
+
3. Processing raw CSV data:
|
|
101
|
+
```js
|
|
102
|
+
// Processing CSV from network request
|
|
103
|
+
async function processCsvFromApi() {
|
|
104
|
+
const response = await fetch('https://api.example.com/data.csv');
|
|
105
|
+
const csvText = await response.text();
|
|
106
|
+
return csvToJson.getJsonFromCsvAsync(csvText, { raw: true });
|
|
107
|
+
}
|
|
108
|
+
```
|
|
109
|
+
|
|
110
|
+
## Advanced Use Cases
|
|
111
|
+
|
|
112
|
+
1. Streaming large files with async iteration:
|
|
113
|
+
```js
|
|
114
|
+
const { createReadStream } = require('fs');
|
|
115
|
+
const { createInterface } = require('readline');
|
|
116
|
+
|
|
117
|
+
async function* processLargeCsv(filePath) {
|
|
118
|
+
const fileStream = createReadStream(filePath);
|
|
119
|
+
const lines = createInterface({
|
|
120
|
+
input: fileStream,
|
|
121
|
+
crlfDelay: Infinity
|
|
122
|
+
});
|
|
123
|
+
|
|
124
|
+
const headers = await lines.next();
|
|
125
|
+
for await (const line of lines) {
|
|
126
|
+
const json = await csvToJson
|
|
127
|
+
.getJsonFromCsvAsync(headers.value + '\n' + line, { raw: true });
|
|
128
|
+
yield json[0];
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
// Usage
|
|
133
|
+
for await (const record of processLargeCsv('large.csv')) {
|
|
134
|
+
console.log(record);
|
|
135
|
+
}
|
|
136
|
+
```
|
|
137
|
+
|
|
138
|
+
2. Custom data transformation:
|
|
139
|
+
```js
|
|
140
|
+
async function processWithTransform(file) {
|
|
141
|
+
const json = await csvToJson
|
|
142
|
+
.formatValueByType()
|
|
143
|
+
.getJsonFromCsvAsync(file);
|
|
144
|
+
|
|
145
|
+
return json.map(record => ({
|
|
146
|
+
...record,
|
|
147
|
+
timestamp: new Date().toISOString(),
|
|
148
|
+
processed: true
|
|
149
|
+
}));
|
|
150
|
+
}
|
|
151
|
+
```
|
|
152
|
+
|
|
153
|
+
3. Validation and filtering:
|
|
154
|
+
```js
|
|
155
|
+
async function processWithValidation(file) {
|
|
156
|
+
const json = await csvToJson.getJsonFromCsvAsync(file);
|
|
157
|
+
|
|
158
|
+
return json.filter(record => {
|
|
159
|
+
// Validate required fields
|
|
160
|
+
if (!record.id || !record.name) return false;
|
|
161
|
+
// Validate data types
|
|
162
|
+
if (typeof record.age !== 'number') return false;
|
|
163
|
+
return true;
|
|
164
|
+
});
|
|
165
|
+
}
|
|
166
|
+
```
|
|
167
|
+
|
|
168
|
+
## Migration Tips
|
|
169
|
+
|
|
170
|
+
1. **Gradual Migration**: You can mix sync and async code during migration
|
|
171
|
+
2. **Error Handling**: Always include proper error handling with async code
|
|
172
|
+
3. **Testing**: Test both success and error cases
|
|
173
|
+
4. **Performance**: Consider using `Promise.all()` for parallel processing
|
|
174
|
+
5. **Memory**: For large files, consider streaming approaches
|
package/README.md
CHANGED
|
@@ -11,7 +11,7 @@
|
|
|
11
11
|

|
|
12
12
|

|
|
13
13
|
|
|
14
|
-
**This project is not dependent on others packages or libraries.**
|
|
14
|
+
**This project is not dependent on others packages or libraries, and supports both synchronous and Promise-based asynchronous APIs.**
|
|
15
15
|
|
|
16
16
|
This repository uses [](https://github.com/marketplace/actions/npm-semver-publish)
|
|
17
17
|
|
|
@@ -27,7 +27,7 @@ show your :heart: and support.
|
|
|
27
27
|
- [Prerequisites](#prerequisites)
|
|
28
28
|
- [Install npm *convert-csv-to-json package*](#install-npm-convert-csv-to-json-package)
|
|
29
29
|
* [Install](#install)
|
|
30
|
-
* [Usage](#usage)
|
|
30
|
+
* [Sync API Usage](#sync-api-usage)
|
|
31
31
|
+ [Generate JSON file](#generate-json-file)
|
|
32
32
|
+ [Generate Array of Object in JSON format](#generate-array-of-object-in-json-format)
|
|
33
33
|
+ [Generate Object with sub array](#generate-object-with-sub-array)
|
|
@@ -42,6 +42,12 @@ show your :heart: and support.
|
|
|
42
42
|
- [Boolean](#boolean)
|
|
43
43
|
+ [Encoding](#encoding)
|
|
44
44
|
+ [Working with CSV strings directly](#working-with-csv-strings-directly)
|
|
45
|
+
* [Async API Usage](#async-api-usage)
|
|
46
|
+
+ [Basic Async Operations](#basic-async-operations)
|
|
47
|
+
+ [Working with Raw CSV Data](#working-with-raw-csv-data)
|
|
48
|
+
+ [Processing Large Files](#processing-large-files)
|
|
49
|
+
+ [Error Handling and Retries](#error-handling-and-retries)
|
|
50
|
+
+ [Batch Processing](#batch-processing)
|
|
45
51
|
* [Chaining Pattern](#chaining-pattern)
|
|
46
52
|
- [Development](#development)
|
|
47
53
|
- [CI CD github action](#ci-cd-github-action)
|
|
@@ -51,7 +57,7 @@ show your :heart: and support.
|
|
|
51
57
|
<!-- tocstop -->
|
|
52
58
|
|
|
53
59
|
## Description
|
|
54
|
-
Converts *csv* files to *JSON* files with Node.js.
|
|
60
|
+
Converts *csv* files to *JSON* files with Node.js. Supports both synchronous operations and Promise-based asynchronous operations, allowing integration with modern async/await patterns.
|
|
55
61
|
|
|
56
62
|
Give an input file like:
|
|
57
63
|
|
|
@@ -112,7 +118,7 @@ Install package on your machine
|
|
|
112
118
|
$ npm install -g convert-csv-to-json
|
|
113
119
|
```
|
|
114
120
|
|
|
115
|
-
### Usage
|
|
121
|
+
### Sync API Usage
|
|
116
122
|
|
|
117
123
|
#### Generate JSON file
|
|
118
124
|
```js
|
|
@@ -221,13 +227,49 @@ If the header is not on the first line you can define the header index like:
|
|
|
221
227
|
Empty rows are ignored and not parsed.
|
|
222
228
|
|
|
223
229
|
#### Format property value by type
|
|
224
|
-
|
|
230
|
+
The `formatValueByType()` function intelligently converts string values to their appropriate types while preserving data integrity. To enable automatic type conversion:
|
|
231
|
+
|
|
225
232
|
```js
|
|
226
|
-
|
|
227
|
-
|
|
233
|
+
csvToJson.formatValueByType()
|
|
234
|
+
.getJsonFromCsv(fileInputName);
|
|
235
|
+
```
|
|
236
|
+
|
|
237
|
+
This conversion follows these rules:
|
|
238
|
+
|
|
239
|
+
##### Numbers
|
|
240
|
+
- Regular integers and decimals are converted to Number type
|
|
241
|
+
- Numbers with leading zeros are preserved as strings (e.g., "0012" stays "0012")
|
|
242
|
+
- Large integers outside JavaScript's safe range are preserved as strings
|
|
243
|
+
- Valid decimal numbers are converted to Number type
|
|
244
|
+
|
|
245
|
+
For example:
|
|
246
|
+
```json
|
|
247
|
+
{
|
|
248
|
+
"normalInteger": 42, // Converted to number
|
|
249
|
+
"decimal": 3.14, // Converted to number
|
|
250
|
+
"leadingZeros": "0012345", // Kept as string to preserve leading zeros
|
|
251
|
+
"largeNumber": "9007199254740992" // Kept as string to preserve precision
|
|
252
|
+
}
|
|
228
253
|
```
|
|
229
|
-
For example:
|
|
230
254
|
|
|
255
|
+
##### Boolean
|
|
256
|
+
Case-insensitive "true" or "false" strings are converted to boolean values:
|
|
257
|
+
```json
|
|
258
|
+
{
|
|
259
|
+
"registered": true, // From "true" or "TRUE" or "True"
|
|
260
|
+
"active": false // From "false" or "FALSE" or "False"
|
|
261
|
+
}
|
|
262
|
+
```
|
|
263
|
+
|
|
264
|
+
##### Complete Example
|
|
265
|
+
Input CSV:
|
|
266
|
+
```csv
|
|
267
|
+
first_name;last_name;email;gender;age;id;zip;registered
|
|
268
|
+
Constantin;Langsdon;clangsdon0@hc360.com;Male;96;00123;123;true
|
|
269
|
+
Norah;Raison;nraison1@wired.com;Female;32;987;00456;FALSE
|
|
270
|
+
```
|
|
271
|
+
|
|
272
|
+
Output JSON:
|
|
231
273
|
```json
|
|
232
274
|
[
|
|
233
275
|
{
|
|
@@ -236,8 +278,9 @@ For example:
|
|
|
236
278
|
"email": "clangsdon0@hc360.com",
|
|
237
279
|
"gender": "Male",
|
|
238
280
|
"age": 96,
|
|
239
|
-
"
|
|
240
|
-
"
|
|
281
|
+
"id": "00123", // Preserved leading zeros
|
|
282
|
+
"zip": 123, // Converted to number
|
|
283
|
+
"registered": true // Converted to boolean
|
|
241
284
|
},
|
|
242
285
|
{
|
|
243
286
|
"first_name": "Norah",
|
|
@@ -245,29 +288,12 @@ For example:
|
|
|
245
288
|
"email": "nraison1@wired.com",
|
|
246
289
|
"gender": "Female",
|
|
247
290
|
"age": 32,
|
|
248
|
-
"
|
|
249
|
-
"
|
|
291
|
+
"id": "987",
|
|
292
|
+
"zip": "00456", // Preserved leading zeros
|
|
293
|
+
"registered": false // Case-insensitive boolean conversion
|
|
250
294
|
}
|
|
251
295
|
]
|
|
252
296
|
```
|
|
253
|
-
##### Number
|
|
254
|
-
The property **age** is printed as
|
|
255
|
-
```json
|
|
256
|
-
"age": 32
|
|
257
|
-
```
|
|
258
|
-
instead of
|
|
259
|
-
```json
|
|
260
|
-
"age": "32"
|
|
261
|
-
```
|
|
262
|
-
##### Boolean
|
|
263
|
-
The property **registered** is printed as
|
|
264
|
-
```json
|
|
265
|
-
"registered": true
|
|
266
|
-
```
|
|
267
|
-
instead of
|
|
268
|
-
```json
|
|
269
|
-
"registered": "true"
|
|
270
|
-
```
|
|
271
297
|
|
|
272
298
|
#### Encoding
|
|
273
299
|
You can read and decode files with the following encoding:
|
|
@@ -331,21 +357,417 @@ let jsonArray = csvToJson
|
|
|
331
357
|
.csvStringToJson(csvString);
|
|
332
358
|
```
|
|
333
359
|
|
|
334
|
-
|
|
360
|
+
## Async API Usage
|
|
335
361
|
|
|
336
|
-
|
|
362
|
+
This library provides a Promise-based async API that's perfect for modern Node.js applications. For a detailed migration guide from sync to async API, see [MIGRATION.md](MIGRATION.md).
|
|
337
363
|
|
|
364
|
+
### Basic Async Operations
|
|
365
|
+
|
|
366
|
+
1. Convert CSV file to JSON:
|
|
338
367
|
```js
|
|
339
|
-
|
|
368
|
+
const csvToJson = require('convert-csv-to-json');
|
|
369
|
+
|
|
370
|
+
// Using Promises
|
|
371
|
+
csvToJson.getJsonFromCsvAsync('input.csv')
|
|
372
|
+
.then(json => console.log(json))
|
|
373
|
+
.catch(err => console.error('Error:', err));
|
|
374
|
+
|
|
375
|
+
// Using async/await
|
|
376
|
+
async function convertCsv() {
|
|
377
|
+
try {
|
|
378
|
+
const json = await csvToJson.getJsonFromCsvAsync('input.csv');
|
|
379
|
+
console.log(json);
|
|
380
|
+
} catch (err) {
|
|
381
|
+
console.error('Error:', err);
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
```
|
|
385
|
+
|
|
386
|
+
2. Generate JSON file from CSV:
|
|
387
|
+
```js
|
|
388
|
+
// Using async/await with chain configuration
|
|
389
|
+
async function convertAndSave() {
|
|
390
|
+
await csvToJson
|
|
391
|
+
.fieldDelimiter(',')
|
|
392
|
+
.formatValueByType()
|
|
393
|
+
.generateJsonFileFromCsvAsync('input.csv', 'output.json');
|
|
394
|
+
}
|
|
395
|
+
```
|
|
340
396
|
|
|
341
|
-
|
|
342
|
-
.formatValueByType()
|
|
343
|
-
.parseSubArray("*",',')
|
|
344
|
-
.supportQuotedField(true)
|
|
345
|
-
.getJsonFromCsv('myInputFile.csv');
|
|
397
|
+
### Working with Raw CSV Data
|
|
346
398
|
|
|
399
|
+
Process CSV data from memory or network sources:
|
|
400
|
+
|
|
401
|
+
```js
|
|
402
|
+
// Example: Processing CSV from an API
|
|
403
|
+
async function processCsvFromApi() {
|
|
404
|
+
const response = await fetch('https://api.example.com/data.csv');
|
|
405
|
+
const csvText = await response.text();
|
|
406
|
+
|
|
407
|
+
const json = await csvToJson
|
|
408
|
+
.formatValueByType()
|
|
409
|
+
.getJsonFromCsvAsync(csvText, { raw: true });
|
|
410
|
+
|
|
411
|
+
return json;
|
|
412
|
+
}
|
|
413
|
+
```
|
|
414
|
+
|
|
415
|
+
### Processing Large Files
|
|
416
|
+
|
|
417
|
+
For large files, use streaming to manage memory efficiently:
|
|
418
|
+
|
|
419
|
+
```js
|
|
420
|
+
const { createReadStream } = require('fs');
|
|
421
|
+
const { createInterface } = require('readline');
|
|
422
|
+
|
|
423
|
+
async function* processLargeFile(filePath) {
|
|
424
|
+
const fileStream = createReadStream(filePath);
|
|
425
|
+
const rl = createInterface({
|
|
426
|
+
input: fileStream,
|
|
427
|
+
crlfDelay: Infinity
|
|
428
|
+
});
|
|
429
|
+
|
|
430
|
+
for await (const line of rl) {
|
|
431
|
+
yield await csvToJson.getJsonFromCsvAsync(line, { raw: true });
|
|
432
|
+
}
|
|
433
|
+
}
|
|
434
|
+
|
|
435
|
+
// Usage
|
|
436
|
+
async function processData() {
|
|
437
|
+
for await (const record of processLargeFile('large.csv')) {
|
|
438
|
+
await saveToDatabase(record);
|
|
439
|
+
}
|
|
440
|
+
}
|
|
441
|
+
```
|
|
442
|
+
|
|
443
|
+
### Error Handling and Retries
|
|
444
|
+
|
|
445
|
+
Implement robust error handling with retries:
|
|
446
|
+
|
|
447
|
+
```js
|
|
448
|
+
async function processWithRetry(filePath, maxRetries = 3) {
|
|
449
|
+
for (let i = 0; i < maxRetries; i++) {
|
|
450
|
+
try {
|
|
451
|
+
const json = await csvToJson
|
|
452
|
+
.formatValueByType()
|
|
453
|
+
.getJsonFromCsvAsync(filePath);
|
|
454
|
+
|
|
455
|
+
return json;
|
|
456
|
+
} catch (err) {
|
|
457
|
+
if (i === maxRetries - 1) throw err;
|
|
458
|
+
// Exponential backoff
|
|
459
|
+
await new Promise(resolve =>
|
|
460
|
+
setTimeout(resolve, Math.pow(2, i) * 1000)
|
|
461
|
+
);
|
|
462
|
+
}
|
|
463
|
+
}
|
|
464
|
+
}
|
|
465
|
+
```
|
|
466
|
+
|
|
467
|
+
### Batch Processing
|
|
468
|
+
|
|
469
|
+
Process multiple files efficiently:
|
|
470
|
+
|
|
471
|
+
```js
|
|
472
|
+
async function batchProcess(files, batchSize = 3) {
|
|
473
|
+
const results = new Map();
|
|
474
|
+
|
|
475
|
+
for (let i = 0; i < files.length; i += batchSize) {
|
|
476
|
+
const batch = files.slice(i, i + batchSize);
|
|
477
|
+
const processed = await Promise.all(
|
|
478
|
+
batch.map(async file => {
|
|
479
|
+
const json = await csvToJson.getJsonFromCsvAsync(file);
|
|
480
|
+
return [file, json];
|
|
481
|
+
})
|
|
482
|
+
);
|
|
483
|
+
|
|
484
|
+
processed.forEach(([file, json]) => results.set(file, json));
|
|
485
|
+
}
|
|
486
|
+
|
|
487
|
+
return results;
|
|
488
|
+
}
|
|
489
|
+
|
|
490
|
+
// Usage
|
|
491
|
+
const files = ['data1.csv', 'data2.csv', 'data3.csv', 'data4.csv'];
|
|
492
|
+
const results = await batchProcess(files, 2);
|
|
493
|
+
```
|
|
494
|
+
|
|
495
|
+
## Chaining Pattern
|
|
496
|
+
|
|
497
|
+
The exposed API is implemented with the [Method Chaining Pattern](https://en.wikipedia.org/wiki/Method_chaining), which means that multiple methods can be chained. This pattern works with both synchronous and asynchronous methods:
|
|
498
|
+
|
|
499
|
+
### Synchronous Chaining
|
|
500
|
+
|
|
501
|
+
```js
|
|
502
|
+
const csvToJson = require('convert-csv-to-json');
|
|
503
|
+
|
|
504
|
+
// Chain configuration methods with sync operation
|
|
505
|
+
const json = csvToJson
|
|
506
|
+
.fieldDelimiter(',')
|
|
507
|
+
.formatValueByType()
|
|
508
|
+
.parseSubArray("*", ',')
|
|
509
|
+
.supportQuotedField(true)
|
|
510
|
+
.getJsonFromCsv('myInputFile.csv');
|
|
511
|
+
|
|
512
|
+
// Chain with file generation
|
|
513
|
+
csvToJson
|
|
514
|
+
.fieldDelimiter(';')
|
|
515
|
+
.utf8Encoding()
|
|
516
|
+
.formatValueByType()
|
|
517
|
+
.generateJsonFileFromCsv('input.csv', 'output.json');
|
|
518
|
+
|
|
519
|
+
// Chain with string parsing
|
|
520
|
+
const jsonArray = csvToJson
|
|
521
|
+
.fieldDelimiter(',')
|
|
522
|
+
.trimHeaderFieldWhiteSpace(true)
|
|
523
|
+
.csvStringToJson('name,age\nJohn,30\nJane,25');
|
|
347
524
|
```
|
|
348
525
|
|
|
526
|
+
### Asynchronous Chaining
|
|
527
|
+
|
|
528
|
+
```js
|
|
529
|
+
const csvToJson = require('convert-csv-to-json');
|
|
530
|
+
|
|
531
|
+
// Using async/await
|
|
532
|
+
async function processCSV() {
|
|
533
|
+
// Chain configuration methods with async operation
|
|
534
|
+
const json = await csvToJson
|
|
535
|
+
.fieldDelimiter(',')
|
|
536
|
+
.formatValueByType()
|
|
537
|
+
.parseSubArray("*", ',')
|
|
538
|
+
.supportQuotedField(true)
|
|
539
|
+
.getJsonFromCsvAsync('myInputFile.csv');
|
|
540
|
+
|
|
541
|
+
// Chain with async file generation
|
|
542
|
+
await csvToJson
|
|
543
|
+
.fieldDelimiter(';')
|
|
544
|
+
.utf8Encoding()
|
|
545
|
+
.formatValueByType()
|
|
546
|
+
.generateJsonFileFromCsvAsync('input.csv', 'output.json');
|
|
547
|
+
}
|
|
548
|
+
|
|
549
|
+
// Using Promises
|
|
550
|
+
csvToJson
|
|
551
|
+
.fieldDelimiter(',')
|
|
552
|
+
.formatValueByType()
|
|
553
|
+
.getJsonFromCsvAsync('input.csv')
|
|
554
|
+
.then(json => console.log(json))
|
|
555
|
+
.catch(err => console.error('Error:', err));
|
|
556
|
+
```
|
|
557
|
+
|
|
558
|
+
All configuration methods can be chained in any order before calling the final operation method (like `getJsonFromCsv`, `getJsonFromCsvAsync`, etc.). The configuration will be applied in the order it is chained.
|
|
559
|
+
|
|
560
|
+
## Common Use Cases
|
|
561
|
+
|
|
562
|
+
Here are some common use cases and how to implement them:
|
|
563
|
+
|
|
564
|
+
### 1. Processing CSV from HTTP Response
|
|
565
|
+
```js
|
|
566
|
+
const https = require('https');
|
|
567
|
+
|
|
568
|
+
async function processRemoteCsv(url) {
|
|
569
|
+
const csvData = await new Promise((resolve, reject) => {
|
|
570
|
+
https.get(url, (res) => {
|
|
571
|
+
let data = '';
|
|
572
|
+
res.on('data', chunk => data += chunk);
|
|
573
|
+
res.on('end', () => resolve(data));
|
|
574
|
+
res.on('error', reject);
|
|
575
|
+
});
|
|
576
|
+
});
|
|
577
|
+
|
|
578
|
+
return csvToJson.getJsonFromCsvAsync(csvData, { raw: true });
|
|
579
|
+
}
|
|
580
|
+
```
|
|
581
|
+
|
|
582
|
+
### 2. Batch Processing Multiple Files
|
|
583
|
+
```js
|
|
584
|
+
async function batchProcess(files) {
|
|
585
|
+
const results = new Map();
|
|
586
|
+
|
|
587
|
+
// Process in chunks of 3 files at a time
|
|
588
|
+
for (let i = 0; i < files.length; i += 3) {
|
|
589
|
+
const chunk = files.slice(i, i + 3);
|
|
590
|
+
const processed = await Promise.all(
|
|
591
|
+
chunk.map(async file => {
|
|
592
|
+
const json = await csvToJson.getJsonFromCsvAsync(file);
|
|
593
|
+
return [file, json];
|
|
594
|
+
})
|
|
595
|
+
);
|
|
596
|
+
|
|
597
|
+
processed.forEach(([file, json]) => results.set(file, json));
|
|
598
|
+
}
|
|
599
|
+
|
|
600
|
+
return results;
|
|
601
|
+
}
|
|
602
|
+
```
|
|
603
|
+
|
|
604
|
+
### 3. Data Transformation Pipeline
|
|
605
|
+
```js
|
|
606
|
+
async function transformData(csvFile) {
|
|
607
|
+
// Step 1: Parse CSV
|
|
608
|
+
const json = await csvToJson
|
|
609
|
+
.formatValueByType()
|
|
610
|
+
.getJsonFromCsvAsync(csvFile);
|
|
611
|
+
|
|
612
|
+
// Step 2: Transform data
|
|
613
|
+
const transformed = json.map(record => ({
|
|
614
|
+
id: record.id,
|
|
615
|
+
fullName: `${record.firstName} ${record.lastName}`,
|
|
616
|
+
age: Number(record.age),
|
|
617
|
+
isAdult: Number(record.age) >= 18,
|
|
618
|
+
email: record.email.toLowerCase()
|
|
619
|
+
}));
|
|
620
|
+
|
|
621
|
+
// Step 3: Filter invalid records
|
|
622
|
+
return transformed.filter(record =>
|
|
623
|
+
record.id &&
|
|
624
|
+
record.fullName.length > 0 &&
|
|
625
|
+
!isNaN(record.age)
|
|
626
|
+
);
|
|
627
|
+
}
|
|
628
|
+
```
|
|
629
|
+
|
|
630
|
+
### 4. Error Recovery and Logging
|
|
631
|
+
```js
|
|
632
|
+
async function processWithLogging(file) {
|
|
633
|
+
const logger = {
|
|
634
|
+
info: (msg) => console.log(`[INFO] ${msg}`),
|
|
635
|
+
error: (msg, err) => console.error(`[ERROR] ${msg}`, err)
|
|
636
|
+
};
|
|
637
|
+
|
|
638
|
+
try {
|
|
639
|
+
logger.info(`Starting processing ${file}`);
|
|
640
|
+
const startTime = Date.now();
|
|
641
|
+
|
|
642
|
+
const json = await csvToJson.getJsonFromCsvAsync(file);
|
|
643
|
+
|
|
644
|
+
const duration = Date.now() - startTime;
|
|
645
|
+
logger.info(`Processed ${file} in ${duration}ms`);
|
|
646
|
+
|
|
647
|
+
return json;
|
|
648
|
+
} catch (err) {
|
|
649
|
+
logger.error(`Failed to process ${file}`, err);
|
|
650
|
+
throw err;
|
|
651
|
+
}
|
|
652
|
+
}
|
|
653
|
+
```
|
|
654
|
+
|
|
655
|
+
## Troubleshooting
|
|
656
|
+
|
|
657
|
+
Here are solutions to common issues you might encounter:
|
|
658
|
+
|
|
659
|
+
### Memory Issues with Large Files
|
|
660
|
+
|
|
661
|
+
If you're processing large CSV files and encountering memory issues:
|
|
662
|
+
|
|
663
|
+
```js
|
|
664
|
+
// Instead of loading the entire file
|
|
665
|
+
const json = await csvToJson.getJsonFromCsvAsync('large.csv'); // ❌
|
|
666
|
+
|
|
667
|
+
// Use streaming with async iteration
|
|
668
|
+
for await (const record of processLargeCsv('large.csv')) { // ✅
|
|
669
|
+
// Process one record at a time
|
|
670
|
+
await processRecord(record);
|
|
671
|
+
}
|
|
672
|
+
```
|
|
673
|
+
|
|
674
|
+
### Handling Different CSV Formats
|
|
675
|
+
|
|
676
|
+
1. **Mixed Quote Types**:
|
|
677
|
+
```js
|
|
678
|
+
csvToJson
|
|
679
|
+
.supportQuotedField(true) // Enable quoted field support
|
|
680
|
+
.getJsonFromCsvAsync(file);
|
|
681
|
+
```
|
|
682
|
+
|
|
683
|
+
2. **Custom Delimiters**:
|
|
684
|
+
```js
|
|
685
|
+
csvToJson
|
|
686
|
+
.fieldDelimiter(';') // Change delimiter
|
|
687
|
+
.getJsonFromCsvAsync(file);
|
|
688
|
+
```
|
|
689
|
+
|
|
690
|
+
3. **UTF-8 with BOM**:
|
|
691
|
+
```js
|
|
692
|
+
csvToJson
|
|
693
|
+
.encoding('utf8') // Specify encoding
|
|
694
|
+
.getJsonFromCsvAsync(file);
|
|
695
|
+
```
|
|
696
|
+
|
|
697
|
+
### Common Error Solutions
|
|
698
|
+
|
|
699
|
+
1. **ENOENT: no such file or directory**
|
|
700
|
+
- Check if the file path is correct and absolute
|
|
701
|
+
- Verify file permissions
|
|
702
|
+
- Ensure the file exists in the specified location
|
|
703
|
+
|
|
704
|
+
2. **Invalid CSV Structure**
|
|
705
|
+
- Verify CSV format matches expected structure
|
|
706
|
+
- Check for missing or extra delimiters
|
|
707
|
+
- Validate header row exists if expected
|
|
708
|
+
|
|
709
|
+
3. **Memory Leaks**
|
|
710
|
+
- Use streaming for large files
|
|
711
|
+
- Process files in smaller chunks
|
|
712
|
+
- Implement proper cleanup in try/finally blocks
|
|
713
|
+
|
|
714
|
+
4. **Encoding Issues**
|
|
715
|
+
- Specify correct encoding using .encoding()
|
|
716
|
+
- Check for BOM markers
|
|
717
|
+
- Verify source file encoding
|
|
718
|
+
|
|
719
|
+
### Performance Optimization
|
|
720
|
+
|
|
721
|
+
1. **Parallel Processing**:
|
|
722
|
+
```js
|
|
723
|
+
// Instead of sequential processing
|
|
724
|
+
for (const file of files) {
|
|
725
|
+
await process(file); // ❌
|
|
726
|
+
}
|
|
727
|
+
|
|
728
|
+
// Use parallel processing with limits
|
|
729
|
+
async function processWithLimit(files, limit = 3) {
|
|
730
|
+
const results = [];
|
|
731
|
+
for (let i = 0; i < files.length; i += limit) {
|
|
732
|
+
const chunk = files.slice(i, i + limit);
|
|
733
|
+
const chunkResults = await Promise.all(
|
|
734
|
+
chunk.map(file => csvToJson.getJsonFromCsvAsync(file))
|
|
735
|
+
);
|
|
736
|
+
results.push(...chunkResults);
|
|
737
|
+
}
|
|
738
|
+
return results;
|
|
739
|
+
} // ✅
|
|
740
|
+
```
|
|
741
|
+
|
|
742
|
+
2. **Memory Usage**:
|
|
743
|
+
```js
|
|
744
|
+
// Clear references when done
|
|
745
|
+
async function processWithCleanup(file) {
|
|
746
|
+
let json;
|
|
747
|
+
try {
|
|
748
|
+
json = await csvToJson.getJsonFromCsvAsync(file);
|
|
749
|
+
return await processData(json);
|
|
750
|
+
} finally {
|
|
751
|
+
json = null; // Clear reference
|
|
752
|
+
}
|
|
753
|
+
}
|
|
754
|
+
```
|
|
755
|
+
|
|
756
|
+
### TypeScript Support
|
|
757
|
+
|
|
758
|
+
If you're using TypeScript and encounter type issues:
|
|
759
|
+
|
|
760
|
+
```typescript
|
|
761
|
+
// Define custom types for your CSV structure
|
|
762
|
+
interface MyCsvRecord {
|
|
763
|
+
id: number;
|
|
764
|
+
name: string;
|
|
765
|
+
age?: number;
|
|
766
|
+
}
|
|
767
|
+
|
|
768
|
+
// Use type assertion
|
|
769
|
+
const json = await csvToJson.getJsonFromCsvAsync<MyCsvRecord>('data.csv');
|
|
770
|
+
```
|
|
349
771
|
|
|
350
772
|
## Development
|
|
351
773
|
* Download all csvToJson dependencies:
|
package/index.d.ts
CHANGED
|
@@ -91,6 +91,11 @@ declare module 'convert-csv-to-json' {
|
|
|
91
91
|
*/
|
|
92
92
|
getJsonFromCsv(inputFileName: string): any[];
|
|
93
93
|
|
|
94
|
+
/**
|
|
95
|
+
* Async version of getJsonFromCsv. When options.raw is true the input is treated as a CSV string
|
|
96
|
+
*/
|
|
97
|
+
getJsonFromCsvAsync(inputFileNameOrCsv: string, options?: { raw?: boolean }): Promise<any[]>;
|
|
98
|
+
|
|
94
99
|
csvStringToJson(csvString: string): any[];
|
|
95
100
|
|
|
96
101
|
/**
|
package/index.js
CHANGED
|
@@ -152,6 +152,30 @@ exports.getJsonFromCsv = function(inputFileName) {
|
|
|
152
152
|
return csvToJson.getJsonFromCsv(inputFileName);
|
|
153
153
|
};
|
|
154
154
|
|
|
155
|
+
/**
|
|
156
|
+
* Async version of getJsonFromCsv.
|
|
157
|
+
* @param {string} inputFileNameOrCsv path to file or CSV string
|
|
158
|
+
* @param {object} options { raw: boolean } when raw=true the first param is treated as CSV content
|
|
159
|
+
* @returns {Promise<Array>} resolves with the array of objects
|
|
160
|
+
*/
|
|
161
|
+
const csvToJsonAsync = require('./src/csvToJsonAsync');
|
|
162
|
+
|
|
163
|
+
// Re-export all async API methods
|
|
164
|
+
Object.assign(exports, {
|
|
165
|
+
getJsonFromCsvAsync: function(input, options) {
|
|
166
|
+
return csvToJsonAsync.getJsonFromCsvAsync(input, options);
|
|
167
|
+
},
|
|
168
|
+
csvStringToJsonAsync: function(input, options) {
|
|
169
|
+
return csvToJsonAsync.csvStringToJsonAsync(input, options);
|
|
170
|
+
},
|
|
171
|
+
csvStringToJsonStringifiedAsync: function(input) {
|
|
172
|
+
return csvToJsonAsync.csvStringToJsonStringifiedAsync(input);
|
|
173
|
+
},
|
|
174
|
+
generateJsonFileFromCsvAsync: function(input, output) {
|
|
175
|
+
return csvToJsonAsync.generateJsonFileFromCsv(input, output);
|
|
176
|
+
}
|
|
177
|
+
});
|
|
178
|
+
|
|
155
179
|
exports.csvStringToJson = function(csvString) {
|
|
156
180
|
return csvToJson.csvStringToJson(csvString);
|
|
157
181
|
};
|
package/package.json
CHANGED
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const fileUtils = require('./util/fileUtils');
|
|
4
|
+
const csvToJson = require('./csvToJson');
|
|
5
|
+
|
|
6
|
+
class CsvToJsonAsync {
|
|
7
|
+
constructor() {
|
|
8
|
+
// Proxy the configuration methods to the sync instance
|
|
9
|
+
this.csvToJson = csvToJson;
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Set value type formatting
|
|
14
|
+
*/
|
|
15
|
+
formatValueByType(active) {
|
|
16
|
+
this.csvToJson.formatValueByType(active);
|
|
17
|
+
return this;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Set quoted field support
|
|
22
|
+
*/
|
|
23
|
+
supportQuotedField(active) {
|
|
24
|
+
this.csvToJson.supportQuotedField(active);
|
|
25
|
+
return this;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Set field delimiter
|
|
30
|
+
*/
|
|
31
|
+
fieldDelimiter(delimiter) {
|
|
32
|
+
this.csvToJson.fieldDelimiter(delimiter);
|
|
33
|
+
return this;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Trim header field whitespace
|
|
38
|
+
*/
|
|
39
|
+
trimHeaderFieldWhiteSpace(active) {
|
|
40
|
+
this.csvToJson.trimHeaderFieldWhiteSpace(active);
|
|
41
|
+
return this;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/**
|
|
45
|
+
* Set header index
|
|
46
|
+
*/
|
|
47
|
+
indexHeader(indexHeader) {
|
|
48
|
+
this.csvToJson.indexHeader(indexHeader);
|
|
49
|
+
return this;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
/**
|
|
53
|
+
* Set sub-array parsing options
|
|
54
|
+
*/
|
|
55
|
+
parseSubArray(delimiter = '*', separator = ',') {
|
|
56
|
+
this.csvToJson.parseSubArray(delimiter, separator);
|
|
57
|
+
return this;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
/**
|
|
61
|
+
* Set encoding
|
|
62
|
+
*/
|
|
63
|
+
encoding(encoding) {
|
|
64
|
+
this.csvToJson.encoding = encoding;
|
|
65
|
+
return this;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Async version of generateJsonFileFromCsv
|
|
70
|
+
*/
|
|
71
|
+
async generateJsonFileFromCsv(fileInputName, fileOutputName) {
|
|
72
|
+
const jsonStringified = await this.getJsonFromCsvStringified(fileInputName);
|
|
73
|
+
await fileUtils.writeFileAsync(jsonStringified, fileOutputName);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
/**
|
|
77
|
+
* Async version that returns stringified JSON from CSV file
|
|
78
|
+
*/
|
|
79
|
+
async getJsonFromCsvStringified(fileInputName) {
|
|
80
|
+
const json = await this.getJsonFromCsvAsync(fileInputName);
|
|
81
|
+
return JSON.stringify(json, undefined, 1);
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
/**
|
|
85
|
+
* Main async API method. If options.raw is true, treats input as CSV string.
|
|
86
|
+
* Otherwise reads from file path.
|
|
87
|
+
*/
|
|
88
|
+
async getJsonFromCsvAsync(inputFileNameOrCsv, options = {}) {
|
|
89
|
+
if (inputFileNameOrCsv === null || inputFileNameOrCsv === undefined) {
|
|
90
|
+
throw new Error('inputFileNameOrCsv is not defined!!!');
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
if (options.raw) {
|
|
94
|
+
if (inputFileNameOrCsv === '') {
|
|
95
|
+
return [];
|
|
96
|
+
}
|
|
97
|
+
return this.csvToJson.csvToJson(inputFileNameOrCsv);
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
const parsedCsv = await fileUtils.readFileAsync(inputFileNameOrCsv, this.csvToJson.encoding || 'utf8');
|
|
101
|
+
return this.csvToJson.csvToJson(parsedCsv);
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
/**
|
|
105
|
+
* Parse CSV string to JSON asynchronously
|
|
106
|
+
*/
|
|
107
|
+
csvStringToJsonAsync(csvString, options = { raw: true }) {
|
|
108
|
+
return this.getJsonFromCsvAsync(csvString, options);
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
/**
|
|
112
|
+
* Parse CSV string to stringified JSON asynchronously
|
|
113
|
+
*/
|
|
114
|
+
async csvStringToJsonStringifiedAsync(csvString) {
|
|
115
|
+
const json = await this.csvStringToJsonAsync(csvString);
|
|
116
|
+
return JSON.stringify(json, undefined, 1);
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
module.exports = new CsvToJsonAsync();
|
package/src/util/fileUtils.js
CHANGED
|
@@ -8,6 +8,23 @@ class FileUtils {
|
|
|
8
8
|
return fs.readFileSync(fileInputName, encoding).toString();
|
|
9
9
|
}
|
|
10
10
|
|
|
11
|
+
readFileAsync(fileInputName, encoding = 'utf8') {
|
|
12
|
+
// Use fs.promises when available for a Promise-based API
|
|
13
|
+
if (fs.promises && typeof fs.promises.readFile === 'function') {
|
|
14
|
+
return fs.promises.readFile(fileInputName, encoding)
|
|
15
|
+
.then(buf => buf.toString());
|
|
16
|
+
}
|
|
17
|
+
return new Promise((resolve, reject) => {
|
|
18
|
+
fs.readFile(fileInputName, encoding, (err, data) => {
|
|
19
|
+
if (err) {
|
|
20
|
+
reject(err);
|
|
21
|
+
return;
|
|
22
|
+
}
|
|
23
|
+
resolve(data.toString());
|
|
24
|
+
});
|
|
25
|
+
});
|
|
26
|
+
}
|
|
27
|
+
|
|
11
28
|
writeFile(json, fileOutputName) {
|
|
12
29
|
fs.writeFile(fileOutputName, json, function (err) {
|
|
13
30
|
if (err) {
|
|
@@ -18,5 +35,17 @@ class FileUtils {
|
|
|
18
35
|
});
|
|
19
36
|
}
|
|
20
37
|
|
|
38
|
+
writeFileAsync(json, fileOutputName) {
|
|
39
|
+
if (fs.promises && typeof fs.promises.writeFile === 'function') {
|
|
40
|
+
return fs.promises.writeFile(fileOutputName, json);
|
|
41
|
+
}
|
|
42
|
+
return new Promise((resolve, reject) => {
|
|
43
|
+
fs.writeFile(fileOutputName, json, (err) => {
|
|
44
|
+
if (err) return reject(err);
|
|
45
|
+
resolve();
|
|
46
|
+
});
|
|
47
|
+
});
|
|
48
|
+
}
|
|
49
|
+
|
|
21
50
|
}
|
|
22
51
|
module.exports = new FileUtils();
|
package/src/util/stringUtils.js
CHANGED
|
@@ -1,40 +1,109 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
3
|
class StringUtils {
|
|
4
|
+
// Regular expressions as constants for better maintainability
|
|
5
|
+
static PATTERNS = {
|
|
6
|
+
INTEGER: /^-?\d+$/,
|
|
7
|
+
FLOAT: /^-?\d*\.\d+$/,
|
|
8
|
+
WHITESPACE: /\s/g
|
|
9
|
+
};
|
|
4
10
|
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
11
|
+
static BOOLEAN_VALUES = {
|
|
12
|
+
TRUE: 'true',
|
|
13
|
+
FALSE: 'false'
|
|
14
|
+
};
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Removes whitespace from property names based on configuration
|
|
18
|
+
* @param {boolean} shouldTrimAll - If true, removes all whitespace, otherwise only trims edges
|
|
19
|
+
* @param {string} propertyName - The property name to process
|
|
20
|
+
* @returns {string} The processed property name
|
|
21
|
+
*/
|
|
22
|
+
trimPropertyName(shouldTrimAll, propertyName) {
|
|
23
|
+
if (!propertyName) {
|
|
24
|
+
return '';
|
|
8
25
|
}
|
|
9
|
-
return
|
|
10
|
-
|
|
26
|
+
return shouldTrimAll ?
|
|
27
|
+
propertyName.replace(StringUtils.PATTERNS.WHITESPACE, '') :
|
|
28
|
+
propertyName.trim();
|
|
11
29
|
}
|
|
12
30
|
|
|
31
|
+
/**
|
|
32
|
+
* Converts a string value to its appropriate type while preserving data integrity
|
|
33
|
+
* @param {string} value - The input value to convert
|
|
34
|
+
* @returns {string|number|boolean} The converted value
|
|
35
|
+
*/
|
|
13
36
|
getValueFormatByType(value) {
|
|
14
|
-
if(value
|
|
37
|
+
if (this.isEmpty(value)) {
|
|
15
38
|
return String();
|
|
16
39
|
}
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
40
|
+
|
|
41
|
+
if (this.isBoolean(value)) {
|
|
42
|
+
return this.convertToBoolean(value);
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
if (this.isInteger(value)) {
|
|
46
|
+
return this.convertInteger(value);
|
|
21
47
|
}
|
|
22
|
-
|
|
23
|
-
if(value
|
|
24
|
-
return
|
|
48
|
+
|
|
49
|
+
if (this.isFloat(value)) {
|
|
50
|
+
return this.convertFloat(value);
|
|
25
51
|
}
|
|
52
|
+
|
|
26
53
|
return String(value);
|
|
27
54
|
}
|
|
28
55
|
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
56
|
+
/**
|
|
57
|
+
* Checks if a value array contains any non-empty values
|
|
58
|
+
* @param {Array} values - Array to check for content
|
|
59
|
+
* @returns {boolean} True if array has any non-empty values
|
|
60
|
+
*/
|
|
61
|
+
hasContent(values = []) {
|
|
62
|
+
return Array.isArray(values) &&
|
|
63
|
+
values.some(value => Boolean(value));
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
// Private helper methods for type checking and conversion
|
|
67
|
+
isEmpty(value) {
|
|
68
|
+
return value === undefined || value === '';
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
isBoolean(value) {
|
|
72
|
+
const normalizedValue = value.toLowerCase();
|
|
73
|
+
return normalizedValue === StringUtils.BOOLEAN_VALUES.TRUE ||
|
|
74
|
+
normalizedValue === StringUtils.BOOLEAN_VALUES.FALSE;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
isInteger(value) {
|
|
78
|
+
return StringUtils.PATTERNS.INTEGER.test(value);
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
isFloat(value) {
|
|
82
|
+
return StringUtils.PATTERNS.FLOAT.test(value);
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
hasLeadingZero(value) {
|
|
86
|
+
const isPositiveWithLeadingZero = value.length > 1 && value[0] === '0';
|
|
87
|
+
const isNegativeWithLeadingZero = value.length > 2 && value[0] === '-' && value[1] === '0';
|
|
88
|
+
return isPositiveWithLeadingZero || isNegativeWithLeadingZero;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
convertToBoolean(value) {
|
|
92
|
+
return JSON.parse(value.toLowerCase());
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
convertInteger(value) {
|
|
96
|
+
if (this.hasLeadingZero(value)) {
|
|
97
|
+
return String(value);
|
|
36
98
|
}
|
|
37
|
-
|
|
99
|
+
|
|
100
|
+
const num = Number(value);
|
|
101
|
+
return Number.isSafeInteger(num) ? num : String(value);
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
convertFloat(value) {
|
|
105
|
+
const num = Number(value);
|
|
106
|
+
return Number.isFinite(num) ? num : String(value);
|
|
38
107
|
}
|
|
39
108
|
}
|
|
40
109
|
|