jtcsv 2.2.7 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (140) hide show
  1. package/README.md +31 -1
  2. package/bin/jtcsv.js +891 -821
  3. package/bin/jtcsv.ts +2534 -0
  4. package/csv-to-json.js +168 -145
  5. package/dist/jtcsv-core.cjs.js +1407 -0
  6. package/dist/jtcsv-core.cjs.js.map +1 -0
  7. package/dist/jtcsv-core.esm.js +1379 -0
  8. package/dist/jtcsv-core.esm.js.map +1 -0
  9. package/dist/jtcsv-core.umd.js +1413 -0
  10. package/dist/jtcsv-core.umd.js.map +1 -0
  11. package/dist/jtcsv-full.cjs.js +1912 -0
  12. package/dist/jtcsv-full.cjs.js.map +1 -0
  13. package/dist/jtcsv-full.esm.js +1880 -0
  14. package/dist/jtcsv-full.esm.js.map +1 -0
  15. package/dist/jtcsv-full.umd.js +1918 -0
  16. package/dist/jtcsv-full.umd.js.map +1 -0
  17. package/dist/jtcsv-workers.esm.js +759 -0
  18. package/dist/jtcsv-workers.esm.js.map +1 -0
  19. package/dist/jtcsv-workers.umd.js +773 -0
  20. package/dist/jtcsv-workers.umd.js.map +1 -0
  21. package/dist/jtcsv.cjs.js +61 -19
  22. package/dist/jtcsv.cjs.js.map +1 -1
  23. package/dist/jtcsv.esm.js +61 -19
  24. package/dist/jtcsv.esm.js.map +1 -1
  25. package/dist/jtcsv.umd.js +61 -19
  26. package/dist/jtcsv.umd.js.map +1 -1
  27. package/errors.js +188 -2
  28. package/examples/advanced/conditional-transformations.js +446 -0
  29. package/examples/advanced/conditional-transformations.ts +446 -0
  30. package/examples/advanced/csv-parser.worker.js +89 -0
  31. package/examples/advanced/csv-parser.worker.ts +89 -0
  32. package/examples/advanced/nested-objects-example.js +306 -0
  33. package/examples/advanced/nested-objects-example.ts +306 -0
  34. package/examples/advanced/performance-optimization.js +504 -0
  35. package/examples/advanced/performance-optimization.ts +504 -0
  36. package/examples/advanced/run-demo-server.js +116 -0
  37. package/examples/advanced/run-demo-server.ts +116 -0
  38. package/examples/advanced/web-worker-usage.html +874 -0
  39. package/examples/async-multithreaded-example.ts +335 -0
  40. package/examples/cli-advanced-usage.md +288 -0
  41. package/examples/cli-batch-processing.ts +38 -0
  42. package/examples/cli-tool.js +0 -3
  43. package/examples/cli-tool.ts +183 -0
  44. package/examples/error-handling.js +21 -7
  45. package/examples/error-handling.ts +356 -0
  46. package/examples/express-api.js +0 -3
  47. package/examples/express-api.ts +164 -0
  48. package/examples/large-dataset-example.js +0 -3
  49. package/examples/large-dataset-example.ts +204 -0
  50. package/examples/ndjson-processing.js +1 -1
  51. package/examples/ndjson-processing.ts +456 -0
  52. package/examples/plugin-excel-exporter.js +3 -4
  53. package/examples/plugin-excel-exporter.ts +406 -0
  54. package/examples/react-integration.tsx +637 -0
  55. package/examples/schema-validation.ts +640 -0
  56. package/examples/simple-usage.js +254 -254
  57. package/examples/simple-usage.ts +194 -0
  58. package/examples/streaming-example.js +4 -5
  59. package/examples/streaming-example.ts +419 -0
  60. package/examples/web-workers-advanced.ts +28 -0
  61. package/index.d.ts +1 -3
  62. package/index.js +15 -1
  63. package/json-save.js +9 -3
  64. package/json-to-csv.js +168 -21
  65. package/package.json +69 -10
  66. package/plugins/express-middleware/README.md +21 -2
  67. package/plugins/express-middleware/example.js +3 -4
  68. package/plugins/express-middleware/example.ts +135 -0
  69. package/plugins/express-middleware/index.d.ts +1 -1
  70. package/plugins/express-middleware/index.js +270 -118
  71. package/plugins/express-middleware/index.ts +557 -0
  72. package/plugins/fastify-plugin/index.js +2 -4
  73. package/plugins/fastify-plugin/index.ts +443 -0
  74. package/plugins/hono/index.ts +226 -0
  75. package/plugins/nestjs/index.ts +201 -0
  76. package/plugins/nextjs-api/examples/ConverterComponent.tsx +386 -0
  77. package/plugins/nextjs-api/examples/api-convert.js +0 -2
  78. package/plugins/nextjs-api/examples/api-convert.ts +67 -0
  79. package/plugins/nextjs-api/index.tsx +339 -0
  80. package/plugins/nextjs-api/route.js +2 -3
  81. package/plugins/nextjs-api/route.ts +370 -0
  82. package/plugins/nuxt/index.ts +94 -0
  83. package/plugins/nuxt/runtime/composables/useJtcsv.ts +100 -0
  84. package/plugins/nuxt/runtime/plugin.ts +71 -0
  85. package/plugins/remix/index.js +1 -1
  86. package/plugins/remix/index.ts +260 -0
  87. package/plugins/sveltekit/index.js +1 -1
  88. package/plugins/sveltekit/index.ts +301 -0
  89. package/plugins/trpc/index.ts +267 -0
  90. package/src/browser/browser-functions.ts +402 -0
  91. package/src/browser/core.js +92 -0
  92. package/src/browser/core.ts +152 -0
  93. package/src/browser/csv-to-json-browser.d.ts +3 -0
  94. package/src/browser/csv-to-json-browser.js +36 -14
  95. package/src/browser/csv-to-json-browser.ts +264 -0
  96. package/src/browser/errors-browser.ts +303 -0
  97. package/src/browser/extensions/plugins.js +92 -0
  98. package/src/browser/extensions/plugins.ts +93 -0
  99. package/src/browser/extensions/workers.js +39 -0
  100. package/src/browser/extensions/workers.ts +39 -0
  101. package/src/browser/globals.d.ts +5 -0
  102. package/src/browser/index.ts +192 -0
  103. package/src/browser/json-to-csv-browser.d.ts +3 -0
  104. package/src/browser/json-to-csv-browser.js +13 -3
  105. package/src/browser/json-to-csv-browser.ts +262 -0
  106. package/src/browser/streams.js +12 -2
  107. package/src/browser/streams.ts +336 -0
  108. package/src/browser/workers/csv-parser.worker.ts +377 -0
  109. package/src/browser/workers/worker-pool.ts +548 -0
  110. package/src/core/delimiter-cache.js +22 -8
  111. package/src/core/delimiter-cache.ts +310 -0
  112. package/src/core/node-optimizations.ts +449 -0
  113. package/src/core/plugin-system.js +29 -11
  114. package/src/core/plugin-system.ts +400 -0
  115. package/src/core/transform-hooks.ts +558 -0
  116. package/src/engines/fast-path-engine-new.ts +347 -0
  117. package/src/engines/fast-path-engine.ts +854 -0
  118. package/src/errors.ts +72 -0
  119. package/src/formats/ndjson-parser.ts +469 -0
  120. package/src/formats/tsv-parser.ts +334 -0
  121. package/src/index-with-plugins.js +16 -9
  122. package/src/index-with-plugins.ts +395 -0
  123. package/src/types/index.ts +255 -0
  124. package/src/utils/bom-utils.js +259 -0
  125. package/src/utils/bom-utils.ts +373 -0
  126. package/src/utils/encoding-support.js +124 -0
  127. package/src/utils/encoding-support.ts +155 -0
  128. package/src/utils/schema-validator.js +19 -19
  129. package/src/utils/schema-validator.ts +819 -0
  130. package/src/utils/transform-loader.js +1 -1
  131. package/src/utils/transform-loader.ts +389 -0
  132. package/src/utils/zod-adapter.js +170 -0
  133. package/src/utils/zod-adapter.ts +280 -0
  134. package/src/web-server/index.js +10 -10
  135. package/src/web-server/index.ts +683 -0
  136. package/src/workers/csv-multithreaded.ts +310 -0
  137. package/src/workers/csv-parser.worker.ts +227 -0
  138. package/src/workers/worker-pool.ts +409 -0
  139. package/stream-csv-to-json.js +26 -8
  140. package/stream-json-to-csv.js +1 -0
@@ -0,0 +1,446 @@
1
+ /**
2
+ * Advanced Example: Conditional Field Transformations
3
+ *
4
+ * Demonstrates how to use jtcsv for complex data transformations
5
+ * with conditional logic, field validation, and custom formatting.
6
+ */
7
+
8
+ const { csvToJson, jsonToCsv, createCsvToJsonStream } = await import("../../index.js");
9
+ import fs from "fs";
10
+ const { pipeline } = await import("stream/promises");
11
+
12
+ /**
13
+ * Example 1: Conditional Transformation with Business Logic
14
+ *
15
+ * Scenario: Process sales data with different tax rates based on region
16
+ * and apply discounts for bulk purchases.
17
+ */
18
+ async function exampleConditionalTransformation() {
19
+ console.log('=== Example 1: Conditional Transformation ===\n');
20
+
21
+ const salesCsv = `order_id,customer_id,region,product,quantity,unit_price,tax_rate
22
+ ORD001,CUST001,US,Widget A,10,25.99,0.08
23
+ ORD002,CUST002,EU,Widget B,5,49.99,0.21
24
+ ORD003,CUST003,US,Widget C,100,12.50,0.08
25
+ ORD004,CUST004,ASIA,Widget D,25,89.99,0.10
26
+ ORD005,CUST005,EU,Widget E,2,199.99,0.21`;
27
+
28
+ const transformedData = csvToJson(salesCsv, {
29
+ hasHeaders: true,
30
+ parseNumbers: true,
31
+ transform: (row) => {
32
+ // Calculate subtotal
33
+ const subtotal = row.quantity * row.unit_price;
34
+
35
+ // Apply bulk discount (10% for 50+ units)
36
+ let discountRate = 0;
37
+ if (row.quantity >= 50) {
38
+ discountRate = 0.10;
39
+ } else if (row.quantity >= 20) {
40
+ discountRate = 0.05;
41
+ }
42
+
43
+ const discountAmount = subtotal * discountRate;
44
+ const discountedSubtotal = subtotal - discountAmount;
45
+
46
+ // Calculate tax based on region
47
+ let taxAmount = discountedSubtotal * row.tax_rate;
48
+
49
+ // Special tax rules for specific regions
50
+ if (row.region === 'EU' && discountedSubtotal > 1000) {
51
+ taxAmount *= 0.9; // 10% tax reduction for large EU orders
52
+ }
53
+
54
+ const total = discountedSubtotal + taxAmount;
55
+
56
+ // Add derived fields
57
+ return {
58
+ ...row,
59
+ subtotal: Math.round(subtotal * 100) / 100,
60
+ discount_rate: discountRate,
61
+ discount_amount: Math.round(discountAmount * 100) / 100,
62
+ discounted_subtotal: Math.round(discountedSubtotal * 100) / 100,
63
+ tax_amount: Math.round(taxAmount * 100) / 100,
64
+ total: Math.round(total * 100) / 100,
65
+ currency: row.region === 'EU' ? 'EUR' : 'USD',
66
+ processed_at: new Date().toISOString(),
67
+
68
+ // Add business logic flags
69
+ is_bulk_order: row.quantity >= 20,
70
+ requires_vat_certificate: row.region === 'EU' && total > 1000,
71
+ is_export: row.region !== 'US'
72
+ };
73
+ }
74
+ });
75
+
76
+ console.log('Transformed Sales Data:');
77
+ transformedData.forEach((order, index) => {
78
+ console.log(`\nOrder ${index + 1}:`);
79
+ console.log(` ID: ${order.order_id}, Region: ${order.region}`);
80
+ console.log(` Product: ${order.product}, Quantity: ${order.quantity}`);
81
+ console.log(` Subtotal: ${order.currency} ${order.subtotal}`);
82
+ console.log(` Discount: ${order.currency} ${order.discount_amount} (${order.discount_rate * 100}%)`);
83
+ console.log(` Tax: ${order.currency} ${order.tax_amount}`);
84
+ console.log(` Total: ${order.currency} ${order.total}`);
85
+ console.log(` Flags: Bulk: ${order.is_bulk_order}, VAT Cert: ${order.requires_vat_certificate}, Export: ${order.is_export}`);
86
+ });
87
+
88
+ // Convert back to CSV with selected columns
89
+ const outputCsv = jsonToCsv(transformedData, {
90
+ template: {
91
+ order_id: '',
92
+ region: '',
93
+ product: '',
94
+ quantity: '',
95
+ subtotal: '',
96
+ discount_amount: '',
97
+ tax_amount: '',
98
+ total: '',
99
+ currency: '',
100
+ is_bulk_order: '',
101
+ requires_vat_certificate: ''
102
+ }
103
+ });
104
+
105
+ console.log('\nGenerated CSV for accounting system:');
106
+ console.log(outputCsv);
107
+ }
108
+
109
+ /**
110
+ * Example 2: Streaming with Real-time Validation
111
+ *
112
+ * Scenario: Process large log files with validation and error handling
113
+ */
114
+ async function exampleStreamingValidation() {
115
+ console.log('\n\n=== Example 2: Streaming with Validation ===\n');
116
+
117
+ // Simulate log data
118
+ const logData = `timestamp,user_id,action,resource,status_code,response_time
119
+ 2024-01-15T10:30:00Z,user123,GET,/api/users,200,145
120
+ 2024-01-15T10:31:00Z,user456,POST,/api/orders,201,230
121
+ 2024-01-15T10:32:00Z,user789,GET,/api/products,404,89
122
+ 2024-01-15T10:33:00Z,user123,DELETE,/api/users/456,403,312
123
+ 2024-01-15T10:34:00Z,user999,GET,/api/health,200,45
124
+ 2024-01-15T10:35:00Z,user123,PUT,/api/orders/789,200,567`;
125
+
126
+ const errorLog = [];
127
+ const stats = {
128
+ total: 0,
129
+ success: 0,
130
+ errors: 0,
131
+ slowRequests: 0
132
+ };
133
+
134
+ const transformStream = createCsvToJsonStream({
135
+ hasHeaders: true,
136
+ parseNumbers: true,
137
+ onError: (error, row, rowNumber) => {
138
+ stats.errors++;
139
+ errorLog.push({
140
+ rowNumber,
141
+ row,
142
+ error: error.message,
143
+ timestamp: new Date().toISOString()
144
+ });
145
+ return null; // Skip this row
146
+ },
147
+ validate: (row) => {
148
+ // Validate required fields
149
+ if (!row.timestamp || !row.user_id || !row.action) {
150
+ throw new Error('Missing required fields');
151
+ }
152
+
153
+ // Validate timestamp format
154
+ if (isNaN(new Date(row.timestamp).getTime())) {
155
+ throw new Error('Invalid timestamp format');
156
+ }
157
+
158
+ // Validate status code
159
+ if (row.status_code < 100 || row.status_code > 599) {
160
+ throw new Error('Invalid HTTP status code');
161
+ }
162
+
163
+ // Validate response time
164
+ if (row.response_time < 0 || row.response_time > 10000) {
165
+ throw new Error('Invalid response time');
166
+ }
167
+
168
+ return true;
169
+ },
170
+ transform: (row) => {
171
+ stats.total++;
172
+
173
+ // Add derived fields
174
+ const processedRow = {
175
+ ...row,
176
+ is_success: row.status_code >= 200 && row.status_code < 300,
177
+ is_error: row.status_code >= 400,
178
+ is_slow: row.response_time > 500,
179
+ processing_timestamp: new Date().toISOString()
180
+ };
181
+
182
+ if (processedRow.is_success) {
183
+ stats.success++;
184
+ }
185
+ if (processedRow.is_slow) {
186
+ stats.slowRequests++;
187
+ }
188
+
189
+ return processedRow;
190
+ }
191
+ });
192
+
193
+ // Process the data
194
+ const { Readable, Writable } = await import("stream");
195
+ const readable = Readable.from([logData]);
196
+ const results = [];
197
+
198
+ const collector = new Writable({
199
+ objectMode: true,
200
+ write(chunk, encoding, callback) {
201
+ results.push(chunk);
202
+ callback();
203
+ }
204
+ });
205
+
206
+ await pipeline(readable, transformStream, collector);
207
+
208
+ console.log('Processing Statistics:');
209
+ console.log(` Total rows: ${stats.total}`);
210
+ console.log(` Successful: ${stats.success}`);
211
+ console.log(` Errors: ${stats.errors}`);
212
+ console.log(` Slow requests (>500ms): ${stats.slowRequests}`);
213
+
214
+ if (errorLog.length > 0) {
215
+ console.log('\nError Log:');
216
+ errorLog.forEach(error => {
217
+ console.log(` Row ${error.rowNumber}: ${error.error}`);
218
+ });
219
+ }
220
+
221
+ console.log('\nSample Processed Rows:');
222
+ results.slice(0, 3).forEach((row, index) => {
223
+ console.log(` ${index + 1}. ${row.user_id} - ${row.action} ${row.resource} - ${row.status_code} (${row.response_time}ms)`);
224
+ console.log(` Success: ${row.is_success}, Error: ${row.is_error}, Slow: ${row.is_slow}`);
225
+ });
226
+ }
227
+
228
+ /**
229
+ * Example 3: Database Export/Import Workflow
230
+ *
231
+ * Scenario: Export data from database, transform, and import to another system
232
+ */
233
+ async function exampleDatabaseWorkflow() {
234
+ console.log('\n\n=== Example 3: Database Export/Import Workflow ===\n');
235
+
236
+ // Simulate database export (in real scenario, this would come from PostgreSQL/MongoDB)
237
+ const databaseData = [
238
+ {
239
+ id: 1,
240
+ name: 'John Doe',
241
+ email: 'john@example.com',
242
+ department: 'Engineering',
243
+ salary: 85000,
244
+ hire_date: '2020-03-15',
245
+ active: true
246
+ },
247
+ {
248
+ id: 2,
249
+ name: 'Jane Smith',
250
+ email: 'jane@example.com',
251
+ department: 'Marketing',
252
+ salary: 72000,
253
+ hire_date: '2021-07-22',
254
+ active: true
255
+ },
256
+ {
257
+ id: 3,
258
+ name: 'Bob Johnson',
259
+ email: 'bob@example.com',
260
+ department: 'Sales',
261
+ salary: 68000,
262
+ hire_date: '2019-11-30',
263
+ active: false
264
+ }
265
+ ];
266
+
267
+ // Step 1: Export to CSV for HR system
268
+ console.log('Step 1: Exporting to HR System CSV');
269
+ const hrCsv = jsonToCsv(databaseData, {
270
+ renameMap: {
271
+ id: 'Employee_ID',
272
+ name: 'Full_Name',
273
+ email: 'Email_Address',
274
+ department: 'Department',
275
+ salary: 'Annual_Salary',
276
+ hire_date: 'Hire_Date',
277
+ active: 'Employment_Status'
278
+ },
279
+ transform: (row) => ({
280
+ ...row,
281
+ Employment_Status: row.active ? 'ACTIVE' : 'INACTIVE',
282
+ Annual_Salary: `$${row.salary.toLocaleString()}`,
283
+ Export_Date: new Date().toISOString().split('T')[0]
284
+ })
285
+ });
286
+
287
+ console.log('HR System CSV:');
288
+ console.log(hrCsv);
289
+
290
+ // Step 2: Transform for payroll system (different format)
291
+ console.log('\nStep 2: Transforming for Payroll System');
292
+ const payrollData = databaseData
293
+ .filter(employee => employee.active)
294
+ .map(employee => ({
295
+ employee_id: `EMP${employee.id.toString().padStart(4, '0')}`,
296
+ employee_name: employee.name.toUpperCase(),
297
+ department_code: employee.department.substring(0, 3).toUpperCase(),
298
+ monthly_salary: Math.round(employee.salary / 12),
299
+ tax_id: `TAX${employee.id.toString().padStart(6, '0')}`,
300
+ payment_method: 'DIRECT_DEPOSIT',
301
+ bank_account: `****${employee.id.toString().padStart(4, '0')}`
302
+ }));
303
+
304
+ const payrollCsv = jsonToCsv(payrollData, {
305
+ delimiter: '|', // Payroll system uses pipe delimiter
306
+ rfc4180Compliant: true
307
+ });
308
+
309
+ console.log('Payroll System CSV (pipe-delimited):');
310
+ console.log(payrollCsv);
311
+
312
+ // Step 3: Import transformed data (simulate reading CSV back)
313
+ console.log('\nStep 3: Importing Transformed Data');
314
+ const importedData = csvToJson(payrollCsv, {
315
+ delimiter: '|',
316
+ hasHeaders: true,
317
+ parseNumbers: true
318
+ });
319
+
320
+ console.log('Imported Payroll Data:');
321
+ importedData.forEach(employee => {
322
+ console.log(` ${employee.employee_id}: ${employee.employee_name} - ${employee.department_code} - $${employee.monthly_salary}/month`);
323
+ });
324
+ }
325
+
326
+ /**
327
+ * Example 4: API Stream Handling with Error Recovery
328
+ *
329
+ * Scenario: Process streaming API responses with retry logic
330
+ */
331
+ async function exampleApiStreamHandling() {
332
+ console.log('\n\n=== Example 4: API Stream Handling ===\n');
333
+
334
+ // Simulate API response stream
335
+ const apiResponses = [
336
+ 'id,name,age,city\n1,Alice,30,New York\n2,Bob,25,Los Angeles\n3,Charlie,35,Chicago',
337
+ 'id,name,age,city\n4,Diana,28,Miami\n5,Eve,32,Seattle\n6,Frank,40,Boston',
338
+ 'id,name,age,city\n7,Grace,29,Denver\n8,Henry,31,Atlanta\n9,Ivy,27,Portland'
339
+ ];
340
+
341
+ const processedChunks = [];
342
+ let chunkCount = 0;
343
+
344
+ // Simulate processing each API response chunk
345
+ for (const chunk of apiResponses) {
346
+ chunkCount++;
347
+
348
+ try {
349
+ console.log(`Processing API chunk ${chunkCount}...`);
350
+
351
+ const data = csvToJson(chunk, {
352
+ hasHeaders: true,
353
+ parseNumbers: true,
354
+ onError: (error, row, rowNumber) => {
355
+ console.warn(` Warning in chunk ${chunkCount}, row ${rowNumber}: ${error.message}`);
356
+ // In real scenario, you might log to monitoring system
357
+ return null; // Skip problematic row
358
+ },
359
+ transform: (row) => {
360
+ // Add metadata
361
+ return {
362
+ ...row,
363
+ processed_chunk: chunkCount,
364
+ processing_timestamp: new Date().toISOString(),
365
+ data_source: 'api_stream'
366
+ };
367
+ }
368
+ });
369
+
370
+ processedChunks.push(...data);
371
+ console.log(` Successfully processed ${data.length} rows`);
372
+
373
+ } catch (error) {
374
+ console.error(` Error processing chunk ${chunkCount}: ${error.message}`);
375
+ // In real scenario, implement retry logic here
376
+ console.log(' Implementing retry logic...');
377
+
378
+ // Simulate retry
379
+ try {
380
+ const retryData = csvToJson(chunk, {
381
+ hasHeaders: true,
382
+ parseNumbers: false, // Try without number parsing
383
+ transform: (row) => ({
384
+ ...row,
385
+ age: parseInt(row.age, 10) || 0, // Manual parsing
386
+ processed_chunk: chunkCount,
387
+ processing_timestamp: new Date().toISOString(),
388
+ data_source: 'api_stream_retry',
389
+ had_error: true
390
+ })
391
+ });
392
+
393
+ processedChunks.push(...retryData);
394
+ console.log(` Retry successful: processed ${retryData.length} rows`);
395
+ } catch (retryError) {
396
+ console.error(` Retry failed: ${retryError.message}`);
397
+ }
398
+ }
399
+ }
400
+
401
+ console.log('\nAPI Stream Processing Summary:');
402
+ console.log(` Total chunks: ${chunkCount}`);
403
+ console.log(` Total rows processed: ${processedChunks.length}`);
404
+
405
+ // Aggregate data
406
+ const averageAge = processedChunks.reduce((sum, row) => sum + row.age, 0) / processedChunks.length;
407
+ const cities = [...new Set(processedChunks.map(row => row.city))];
408
+
409
+ console.log(` Average age: ${averageAge.toFixed(1)}`);
410
+ console.log(` Unique cities: ${cities.join(', ')}`);
411
+ }
412
+
413
+ /**
414
+ * Main function to run all examples
415
+ */
416
+ async function main() {
417
+ console.log('='.repeat(80));
418
+ console.log('ADVANCED JTCSV EXAMPLES');
419
+ console.log('='.repeat(80));
420
+
421
+ try {
422
+ await exampleConditionalTransformation();
423
+ await exampleStreamingValidation();
424
+ await exampleDatabaseWorkflow();
425
+ await exampleApiStreamHandling();
426
+
427
+ console.log('\n' + '='.repeat(80));
428
+ console.log('ALL EXAMPLES COMPLETED SUCCESSFULLY');
429
+ console.log('='.repeat(80));
430
+ } catch (error) {
431
+ console.error('\nError running examples:', error);
432
+ process.exit(1);
433
+ }
434
+ }
435
+
436
+ // Run examples if this file is executed directly
437
+ if (require.main === module) {
438
+ main();
439
+ }
440
+
441
+ export default {
442
+ exampleConditionalTransformation,
443
+ exampleStreamingValidation,
444
+ exampleDatabaseWorkflow,
445
+ exampleApiStreamHandling
446
+ };
@@ -0,0 +1,89 @@
1
+ // Simplified Web Worker for demo purposes
2
+ // This is a standalone worker that doesn't depend on jtcsv bundle
3
+
4
+ self.onmessage = function (event) {
5
+ const { type, csv, options } = event.data;
6
+
7
+ if (type === 'parseCsv') {
8
+ // Simulate processing time based on CSV size
9
+ const startTime = performance.now();
10
+ const lines = csv.split('\n');
11
+ const totalLines = lines.length;
12
+
13
+ // Send initial progress
14
+ self.postMessage({
15
+ type: 'progress',
16
+ progress: 10
17
+ });
18
+
19
+ // Simple CSV parsing (for demo only)
20
+ const headers = lines[0] ? lines[0].split(',') : [];
21
+ const result = [];
22
+
23
+ // Process in chunks to show progress
24
+ const CHUNK_SIZE = Math.max(1, Math.floor(totalLines / 10));
25
+
26
+ for (let i = 1; i < lines.length; i++) {
27
+ if (lines[i].trim() === '') {
28
+ continue;
29
+ }
30
+
31
+ const values = lines[i].split(',');
32
+ const obj = {};
33
+
34
+ headers.forEach((header, index) => {
35
+ let value = values[index] || '';
36
+
37
+ // Try to parse numbers and booleans if options say so
38
+ if (options.parseNumbers) {
39
+ const num = parseFloat(value);
40
+ if (!isNaN(num) && value.trim() !== '') {
41
+ value = num;
42
+ }
43
+ }
44
+
45
+ if (options.parseBooleans) {
46
+ const lower = value.toString().toLowerCase();
47
+ if (lower === 'true' || lower === 'false') {
48
+ value = lower === 'true';
49
+ }
50
+ }
51
+
52
+ obj[header.trim()] = value;
53
+ });
54
+
55
+ result.push(obj);
56
+
57
+ // Send progress updates
58
+ if (i % CHUNK_SIZE === 0) {
59
+ const progress = Math.min(95, Math.floor((i / totalLines) * 100));
60
+ self.postMessage({
61
+ type: 'progress',
62
+ progress: progress
63
+ });
64
+ }
65
+ }
66
+
67
+ // Send final progress
68
+ self.postMessage({
69
+ type: 'progress',
70
+ progress: 100
71
+ });
72
+
73
+ const endTime = performance.now();
74
+
75
+ // Send result
76
+ self.postMessage({
77
+ type: 'result',
78
+ data: result,
79
+ stats: {
80
+ rows: result.length,
81
+ processingTime: endTime - startTime,
82
+ headers: headers.length
83
+ }
84
+ });
85
+ }
86
+ };
87
+
88
+ // Signal that worker is ready
89
+ self.postMessage({ type: 'ready' });
@@ -0,0 +1,89 @@
1
+ // Simplified Web Worker for demo purposes
2
+ // This is a standalone worker that doesn't depend on jtcsv bundle
3
+
4
+ self.onmessage = function (event) {
5
+ const { type, csv, options } = event.data;
6
+
7
+ if (type === 'parseCsv') {
8
+ // Simulate processing time based on CSV size
9
+ const startTime = performance.now();
10
+ const lines = csv.split('\n');
11
+ const totalLines = lines.length;
12
+
13
+ // Send initial progress
14
+ self.postMessage({
15
+ type: 'progress',
16
+ progress: 10
17
+ });
18
+
19
+ // Simple CSV parsing (for demo only)
20
+ const headers = lines[0] ? lines[0].split(',') : [];
21
+ const result = [];
22
+
23
+ // Process in chunks to show progress
24
+ const CHUNK_SIZE = Math.max(1, Math.floor(totalLines / 10));
25
+
26
+ for (let i = 1; i < lines.length; i++) {
27
+ if (lines[i].trim() === '') {
28
+ continue;
29
+ }
30
+
31
+ const values = lines[i].split(',');
32
+ const obj = {};
33
+
34
+ headers.forEach((header, index) => {
35
+ let value = values[index] || '';
36
+
37
+ // Try to parse numbers and booleans if options say so
38
+ if (options.parseNumbers) {
39
+ const num = parseFloat(value);
40
+ if (!isNaN(num) && value.trim() !== '') {
41
+ value = num;
42
+ }
43
+ }
44
+
45
+ if (options.parseBooleans) {
46
+ const lower = value.toString().toLowerCase();
47
+ if (lower === 'true' || lower === 'false') {
48
+ value = lower === 'true';
49
+ }
50
+ }
51
+
52
+ obj[header.trim()] = value;
53
+ });
54
+
55
+ result.push(obj);
56
+
57
+ // Send progress updates
58
+ if (i % CHUNK_SIZE === 0) {
59
+ const progress = Math.min(95, Math.floor((i / totalLines) * 100));
60
+ self.postMessage({
61
+ type: 'progress',
62
+ progress: progress
63
+ });
64
+ }
65
+ }
66
+
67
+ // Send final progress
68
+ self.postMessage({
69
+ type: 'progress',
70
+ progress: 100
71
+ });
72
+
73
+ const endTime = performance.now();
74
+
75
+ // Send result
76
+ self.postMessage({
77
+ type: 'result',
78
+ data: result,
79
+ stats: {
80
+ rows: result.length,
81
+ processingTime: endTime - startTime,
82
+ headers: headers.length
83
+ }
84
+ });
85
+ }
86
+ };
87
+
88
+ // Signal that worker is ready
89
+ self.postMessage({ type: 'ready' });