jtcsv 2.2.7 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (140) hide show
  1. package/README.md +31 -1
  2. package/bin/jtcsv.js +891 -821
  3. package/bin/jtcsv.ts +2534 -0
  4. package/csv-to-json.js +168 -145
  5. package/dist/jtcsv-core.cjs.js +1407 -0
  6. package/dist/jtcsv-core.cjs.js.map +1 -0
  7. package/dist/jtcsv-core.esm.js +1379 -0
  8. package/dist/jtcsv-core.esm.js.map +1 -0
  9. package/dist/jtcsv-core.umd.js +1413 -0
  10. package/dist/jtcsv-core.umd.js.map +1 -0
  11. package/dist/jtcsv-full.cjs.js +1912 -0
  12. package/dist/jtcsv-full.cjs.js.map +1 -0
  13. package/dist/jtcsv-full.esm.js +1880 -0
  14. package/dist/jtcsv-full.esm.js.map +1 -0
  15. package/dist/jtcsv-full.umd.js +1918 -0
  16. package/dist/jtcsv-full.umd.js.map +1 -0
  17. package/dist/jtcsv-workers.esm.js +759 -0
  18. package/dist/jtcsv-workers.esm.js.map +1 -0
  19. package/dist/jtcsv-workers.umd.js +773 -0
  20. package/dist/jtcsv-workers.umd.js.map +1 -0
  21. package/dist/jtcsv.cjs.js +61 -19
  22. package/dist/jtcsv.cjs.js.map +1 -1
  23. package/dist/jtcsv.esm.js +61 -19
  24. package/dist/jtcsv.esm.js.map +1 -1
  25. package/dist/jtcsv.umd.js +61 -19
  26. package/dist/jtcsv.umd.js.map +1 -1
  27. package/errors.js +188 -2
  28. package/examples/advanced/conditional-transformations.js +446 -0
  29. package/examples/advanced/conditional-transformations.ts +446 -0
  30. package/examples/advanced/csv-parser.worker.js +89 -0
  31. package/examples/advanced/csv-parser.worker.ts +89 -0
  32. package/examples/advanced/nested-objects-example.js +306 -0
  33. package/examples/advanced/nested-objects-example.ts +306 -0
  34. package/examples/advanced/performance-optimization.js +504 -0
  35. package/examples/advanced/performance-optimization.ts +504 -0
  36. package/examples/advanced/run-demo-server.js +116 -0
  37. package/examples/advanced/run-demo-server.ts +116 -0
  38. package/examples/advanced/web-worker-usage.html +874 -0
  39. package/examples/async-multithreaded-example.ts +335 -0
  40. package/examples/cli-advanced-usage.md +288 -0
  41. package/examples/cli-batch-processing.ts +38 -0
  42. package/examples/cli-tool.js +0 -3
  43. package/examples/cli-tool.ts +183 -0
  44. package/examples/error-handling.js +21 -7
  45. package/examples/error-handling.ts +356 -0
  46. package/examples/express-api.js +0 -3
  47. package/examples/express-api.ts +164 -0
  48. package/examples/large-dataset-example.js +0 -3
  49. package/examples/large-dataset-example.ts +204 -0
  50. package/examples/ndjson-processing.js +1 -1
  51. package/examples/ndjson-processing.ts +456 -0
  52. package/examples/plugin-excel-exporter.js +3 -4
  53. package/examples/plugin-excel-exporter.ts +406 -0
  54. package/examples/react-integration.tsx +637 -0
  55. package/examples/schema-validation.ts +640 -0
  56. package/examples/simple-usage.js +254 -254
  57. package/examples/simple-usage.ts +194 -0
  58. package/examples/streaming-example.js +4 -5
  59. package/examples/streaming-example.ts +419 -0
  60. package/examples/web-workers-advanced.ts +28 -0
  61. package/index.d.ts +1 -3
  62. package/index.js +15 -1
  63. package/json-save.js +9 -3
  64. package/json-to-csv.js +168 -21
  65. package/package.json +69 -10
  66. package/plugins/express-middleware/README.md +21 -2
  67. package/plugins/express-middleware/example.js +3 -4
  68. package/plugins/express-middleware/example.ts +135 -0
  69. package/plugins/express-middleware/index.d.ts +1 -1
  70. package/plugins/express-middleware/index.js +270 -118
  71. package/plugins/express-middleware/index.ts +557 -0
  72. package/plugins/fastify-plugin/index.js +2 -4
  73. package/plugins/fastify-plugin/index.ts +443 -0
  74. package/plugins/hono/index.ts +226 -0
  75. package/plugins/nestjs/index.ts +201 -0
  76. package/plugins/nextjs-api/examples/ConverterComponent.tsx +386 -0
  77. package/plugins/nextjs-api/examples/api-convert.js +0 -2
  78. package/plugins/nextjs-api/examples/api-convert.ts +67 -0
  79. package/plugins/nextjs-api/index.tsx +339 -0
  80. package/plugins/nextjs-api/route.js +2 -3
  81. package/plugins/nextjs-api/route.ts +370 -0
  82. package/plugins/nuxt/index.ts +94 -0
  83. package/plugins/nuxt/runtime/composables/useJtcsv.ts +100 -0
  84. package/plugins/nuxt/runtime/plugin.ts +71 -0
  85. package/plugins/remix/index.js +1 -1
  86. package/plugins/remix/index.ts +260 -0
  87. package/plugins/sveltekit/index.js +1 -1
  88. package/plugins/sveltekit/index.ts +301 -0
  89. package/plugins/trpc/index.ts +267 -0
  90. package/src/browser/browser-functions.ts +402 -0
  91. package/src/browser/core.js +92 -0
  92. package/src/browser/core.ts +152 -0
  93. package/src/browser/csv-to-json-browser.d.ts +3 -0
  94. package/src/browser/csv-to-json-browser.js +36 -14
  95. package/src/browser/csv-to-json-browser.ts +264 -0
  96. package/src/browser/errors-browser.ts +303 -0
  97. package/src/browser/extensions/plugins.js +92 -0
  98. package/src/browser/extensions/plugins.ts +93 -0
  99. package/src/browser/extensions/workers.js +39 -0
  100. package/src/browser/extensions/workers.ts +39 -0
  101. package/src/browser/globals.d.ts +5 -0
  102. package/src/browser/index.ts +192 -0
  103. package/src/browser/json-to-csv-browser.d.ts +3 -0
  104. package/src/browser/json-to-csv-browser.js +13 -3
  105. package/src/browser/json-to-csv-browser.ts +262 -0
  106. package/src/browser/streams.js +12 -2
  107. package/src/browser/streams.ts +336 -0
  108. package/src/browser/workers/csv-parser.worker.ts +377 -0
  109. package/src/browser/workers/worker-pool.ts +548 -0
  110. package/src/core/delimiter-cache.js +22 -8
  111. package/src/core/delimiter-cache.ts +310 -0
  112. package/src/core/node-optimizations.ts +449 -0
  113. package/src/core/plugin-system.js +29 -11
  114. package/src/core/plugin-system.ts +400 -0
  115. package/src/core/transform-hooks.ts +558 -0
  116. package/src/engines/fast-path-engine-new.ts +347 -0
  117. package/src/engines/fast-path-engine.ts +854 -0
  118. package/src/errors.ts +72 -0
  119. package/src/formats/ndjson-parser.ts +469 -0
  120. package/src/formats/tsv-parser.ts +334 -0
  121. package/src/index-with-plugins.js +16 -9
  122. package/src/index-with-plugins.ts +395 -0
  123. package/src/types/index.ts +255 -0
  124. package/src/utils/bom-utils.js +259 -0
  125. package/src/utils/bom-utils.ts +373 -0
  126. package/src/utils/encoding-support.js +124 -0
  127. package/src/utils/encoding-support.ts +155 -0
  128. package/src/utils/schema-validator.js +19 -19
  129. package/src/utils/schema-validator.ts +819 -0
  130. package/src/utils/transform-loader.js +1 -1
  131. package/src/utils/transform-loader.ts +389 -0
  132. package/src/utils/zod-adapter.js +170 -0
  133. package/src/utils/zod-adapter.ts +280 -0
  134. package/src/web-server/index.js +10 -10
  135. package/src/web-server/index.ts +683 -0
  136. package/src/workers/csv-multithreaded.ts +310 -0
  137. package/src/workers/csv-parser.worker.ts +227 -0
  138. package/src/workers/worker-pool.ts +409 -0
  139. package/stream-csv-to-json.js +26 -8
  140. package/stream-json-to-csv.js +1 -0
@@ -0,0 +1,194 @@
1
+ #!/usr/bin/env node
2
+
3
+ /**
4
+ * Простой пример использования новых возможностей JTCSV 2.1.0
5
+ * Демонстрация Fast-Path Engine, NDJSON и Plugin System
6
+ *
7
+ * @version 1.0.0
8
+ * @date 2026-01-22
9
+ */
10
+
11
+ // Используем require для совместимости с текущей структурой проекта
12
+ const { jsonToCsv, csvToJson } = require('../index.js');
13
+ import type { JsonToCsvOptions, CsvToJsonOptions } from '../src/types';
14
+
15
+ (async () => {
16
+ console.log('🚀 JTCSV 2.1.0 - Демонстрация новых возможностей\n');
17
+
18
+ // ============================================================================
19
+ // 1. Базовое использование (обратная совместимость)
20
+ // ============================================================================
21
+
22
+ console.log('1. 📦 Базовое использование (обратная совместимость)');
23
+ console.log('='.repeat(60));
24
+
25
+ const sampleData = [
26
+ { id: 1, name: 'John Doe', age: 30, city: 'New York' },
27
+ { id: 2, name: 'Jane Smith', age: 25, city: 'London' },
28
+ { id: 3, name: 'Bob Johnson', age: 35, city: 'Tokyo' }
29
+ ];
30
+
31
+ // Конвертация JSON → CSV
32
+ const csv = jsonToCsv(sampleData, { delimiter: ',' });
33
+ console.log('📄 CSV результат:');
34
+ console.log(csv);
35
+ console.log();
36
+
37
+ // Конвертация CSV → JSON
38
+ const json = csvToJson(csv, { delimiter: ',' });
39
+ console.log('📊 JSON результат:');
40
+ console.log(JSON.stringify(json, null, 2));
41
+ console.log();
42
+
43
+ // ============================================================================
44
+ // 2. Fast-Path Engine (оптимизированный парсинг)
45
+ // ============================================================================
46
+
47
+ console.log('\n2. ⚡ Fast-Path Engine (оптимизированный парсинг)');
48
+ console.log('='.repeat(60));
49
+
50
+ const FastPathEngine = require('../src/engines/fast-path-engine');
51
+ const engine = new FastPathEngine();
52
+
53
+ const largeCsv = `id,name,age,city
54
+ 1,John Doe,30,New York
55
+ 2,Jane Smith,25,London
56
+ 3,Bob Johnson,35,Tokyo
57
+ 4,Alice Brown,28,Paris
58
+ 5,Charlie Wilson,42,Berlin`;
59
+
60
+ console.log('📊 Парсинг CSV с Fast-Path Engine:');
61
+ const fastPathResult = engine.parse(largeCsv, { delimiter: ',' });
62
+ console.log(JSON.stringify(fastPathResult, null, 2));
63
+ console.log();
64
+
65
+ // ============================================================================
66
+ // 3. NDJSON поддержка (Newline Delimited JSON)
67
+ // ============================================================================
68
+
69
+ console.log('\n3. 📝 NDJSON поддержка (Newline Delimited JSON)');
70
+ console.log('='.repeat(60));
71
+
72
+ const { jsonToNdjson, ndjsonToJson } = require('jtcsv');
73
+
74
+ const ndjsonData = [
75
+ { id: 1, name: 'John', active: true },
76
+ { id: 2, name: 'Jane', active: false },
77
+ { id: 3, name: 'Bob', active: true }
78
+ ];
79
+
80
+ // Конвертация в NDJSON
81
+ const ndjson = jsonToNdjson(ndjsonData);
82
+ console.log('📄 NDJSON результат:');
83
+ console.log(ndjson);
84
+ console.log();
85
+
86
+ // Обратная конвертация
87
+ const fromNdjson = ndjsonToJson(ndjson);
88
+ console.log('📊 JSON из NDJSON:');
89
+ console.log(JSON.stringify(fromNdjson, null, 2));
90
+ console.log();
91
+
92
+ // ============================================================================
93
+ // 4. Потоковая обработка
94
+ // ============================================================================
95
+
96
+ console.log('\n4. 🌊 Потоковая обработка');
97
+ console.log('='.repeat(60));
98
+
99
+ const { streamCsvToJson, streamJsonToCsv } = require('jtcsv');
100
+
101
+ console.log('📊 Пример потоковой обработки CSV:');
102
+
103
+ // Создание потока CSV данных
104
+ const csvStream = `id,name,age
105
+ 1,John,30
106
+ 2,Jane,25
107
+ 3,Bob,35`;
108
+
109
+ const jsonStream = streamCsvToJson(csvStream, { delimiter: ',' });
110
+
111
+ // В реальном приложении здесь была бы обработка потока
112
+ console.log('✅ Поток создан успешно');
113
+ console.log();
114
+
115
+ // ============================================================================
116
+ // 5. Асинхронные функции
117
+ // ============================================================================
118
+
119
+ console.log('\n5. ⏱️ Асинхронные функции');
120
+ console.log('='.repeat(60));
121
+
122
+ try {
123
+ // Использование асинхронных версий функций
124
+ const asyncCsv = await jsonToCsv(sampleData, { delimiter: ';' });
125
+ console.log('📄 Асинхронный CSV результат:');
126
+ console.log(asyncCsv.substring(0, 100) + '...');
127
+ console.log();
128
+ } catch (error: any) {
129
+ console.error('❌ Ошибка при асинхронной конвертации:', error.message);
130
+ }
131
+
132
+ // ============================================================================
133
+ // 6. Многопоточная обработка
134
+ // ============================================================================
135
+
136
+ console.log('\n6. 🚀 Многопоточная обработка');
137
+ console.log('='.repeat(60));
138
+
139
+ try {
140
+ const { processCsvMultithreaded } = require('../src/workers/csv-multithreaded');
141
+
142
+ console.log('📊 Запуск многопоточной обработки...');
143
+
144
+ // В реальном приложении здесь была бы обработка больших данных
145
+ console.log('✅ Многопоточная система готова к использованию');
146
+ console.log();
147
+ } catch (error) {
148
+ console.log('ℹ️ Многопоточная обработка доступна только в Node.js');
149
+ console.log();
150
+ }
151
+
152
+ // ============================================================================
153
+ // 7. TypeScript типы
154
+ // ============================================================================
155
+
156
+ console.log('\n7. 📘 TypeScript типы');
157
+ console.log('='.repeat(60));
158
+
159
+ // Демонстрация TypeScript типов
160
+ const typedOptions: JsonToCsvOptions = {
161
+ delimiter: ',',
162
+ includeHeaders: true,
163
+ maxRecords: 100,
164
+ preventCsvInjection: true,
165
+ rfc4180Compliant: true
166
+ };
167
+
168
+ console.log('✅ TypeScript типы корректно работают');
169
+ console.log('📋 Пример опций:', JSON.stringify(typedOptions, null, 2));
170
+ console.log();
171
+
172
+ // ============================================================================
173
+ // Заключение
174
+ // ============================================================================
175
+
176
+ console.log('\n🎉 Демонстрация завершена!');
177
+ console.log('='.repeat(60));
178
+ console.log('\n📚 Основные возможности JTCSV 2.1.0:');
179
+ console.log(' • ⚡ Fast-Path Engine для оптимизированного парсинга');
180
+ console.log(' • 📝 Поддержка NDJSON (Newline Delimited JSON)');
181
+ console.log(' • 🌊 Потоковая обработка больших данных');
182
+ console.log(' • ⏱️ Асинхронные и многопоточные функции');
183
+ console.log(' • 📘 Полная поддержка TypeScript');
184
+ console.log(' • 🔌 Расширяемая плагинная система');
185
+ console.log(' • 🛡️ Защита от CSV инъекций');
186
+ console.log(' • 📊 Поддержка различных форматов (CSV, TSV, NDJSON)');
187
+ console.log('\n🚀 Готово к использованию в production!');
188
+ })();
189
+
190
+ // Обработка ошибок
191
+ process.on('unhandledRejection', (error) => {
192
+ console.error('❌ Необработанная ошибка:', error);
193
+ process.exit(1);
194
+ });
@@ -364,7 +364,9 @@ async function exampleBidirectionalStreaming() {
364
364
  // Cleanup on error
365
365
  try {
366
366
  await fs.promises.unlink(tempFile);
367
- } catch (e) {}
367
+ } catch (_e) {
368
+ // Ignore cleanup errors
369
+ }
368
370
  }
369
371
  }
370
372
 
@@ -413,7 +415,4 @@ module.exports = {
413
415
  exampleJsonToCsvStreaming,
414
416
  exampleCsvToJsonStreaming,
415
417
  exampleBidirectionalStreaming
416
- };
417
-
418
-
419
-
418
+ };
@@ -0,0 +1,419 @@
1
+ #!/usr/bin/env node
2
+
3
+ /**
4
+ * Streaming Example for jtcsv
5
+ *
6
+ * Demonstrates bidirectional streaming with large datasets
7
+ * and real-time progress monitoring.
8
+ */
9
+
10
+ import fs from "fs";
11
+ import path from "path";
12
+ const { pipeline } = await import("stream/promises");
13
+ import jtcsv from "../index.js";
14
+
15
+ async function generateLargeJsonFile(filePath, recordCount) {
16
+ console.log(`Generating ${recordCount} records...`);
17
+
18
+ const writeStream = fs.createWriteStream(filePath, 'utf8');
19
+
20
+ // Write opening bracket for JSON array
21
+ writeStream.write('[');
22
+
23
+ for (let i = 1; i <= recordCount; i++) {
24
+ const record = {
25
+ id: i,
26
+ name: `User ${i}`,
27
+ email: `user${i}@example.com`,
28
+ age: Math.floor(Math.random() * 50) + 18,
29
+ active: Math.random() > 0.5,
30
+ score: Math.random() * 100,
31
+ tags: ['customer', `tier${Math.floor(Math.random() * 3) + 1}`],
32
+ metadata: {
33
+ created: new Date().toISOString(),
34
+ updated: new Date().toISOString(),
35
+ version: '1.0.0'
36
+ }
37
+ };
38
+
39
+ const jsonRecord = JSON.stringify(record);
40
+ writeStream.write(jsonRecord);
41
+
42
+ if (i < recordCount) {
43
+ writeStream.write(',');
44
+ }
45
+
46
+ // Show progress every 10,000 records
47
+ if (i % 10000 === 0) {
48
+ process.stdout.write(` Generated ${i} records\r`);
49
+ }
50
+ }
51
+
52
+ // Write closing bracket
53
+ writeStream.write(']');
54
+
55
+ await new Promise((resolve) => {
56
+ writeStream.end(() => {
57
+ console.log(`\nGenerated ${recordCount} records in ${filePath}`);
58
+ resolve();
59
+ });
60
+ });
61
+ }
62
+
63
+ async function exampleJsonToCsvStreaming() {
64
+ console.log('\n=== Example 1: JSON to CSV Streaming ===');
65
+
66
+ const inputFile = './examples/large-data.json';
67
+ const outputFile = './examples/large-data.csv';
68
+
69
+ // Generate test data if needed
70
+ if (!fs.existsSync(inputFile)) {
71
+ await generateLargeJsonFile(inputFile, 100000);
72
+ }
73
+
74
+ console.log('Converting JSON to CSV using streaming...');
75
+
76
+ const startTime = Date.now();
77
+
78
+ try {
79
+ // Create readable stream from file
80
+ const readStream = fs.createReadStream(inputFile, 'utf8');
81
+
82
+ // Parse JSON stream (simplified - in real app use proper JSON stream parser)
83
+ let buffer = '';
84
+ let recordCount = 0;
85
+
86
+ const streamModule = require("stream");
87
+ const jsonStream = new streamModule.Transform({
88
+ objectMode: true,
89
+ transform(chunk, encoding, callback) {
90
+ buffer += chunk;
91
+
92
+ // Simple JSON parsing for demonstration
93
+ // In production, use a proper JSON stream parser
94
+ try {
95
+ const data = JSON.parse(buffer);
96
+ if (Array.isArray(data)) {
97
+ data.forEach(item => {
98
+ this.push(item);
99
+ recordCount++;
100
+
101
+ if (recordCount % 10000 === 0) {
102
+ console.log(` Processed ${recordCount} records`);
103
+ }
104
+ });
105
+ }
106
+ buffer = '';
107
+ } catch (error) {
108
+ // Incomplete JSON, wait for more data
109
+ }
110
+
111
+ callback();
112
+ },
113
+
114
+ flush(callback) {
115
+ // Process any remaining data
116
+ if (buffer.trim()) {
117
+ try {
118
+ const data = JSON.parse(buffer);
119
+ if (Array.isArray(data)) {
120
+ data.forEach(item => {
121
+ this.push(item);
122
+ recordCount++;
123
+ });
124
+ }
125
+ } catch (error) {
126
+ console.error('Error parsing final JSON:', error.message);
127
+ }
128
+ }
129
+ callback();
130
+ }
131
+ });
132
+
133
+ // Create CSV transform stream
134
+ const csvStream = jtcsv.createJsonToCsvStream({
135
+ delimiter: ',',
136
+ includeHeaders: true,
137
+ renameMap: {
138
+ id: 'ID',
139
+ name: 'Full Name',
140
+ email: 'Email Address',
141
+ age: 'Age',
142
+ active: 'Active Status',
143
+ score: 'Score',
144
+ 'metadata.created': 'Created Date',
145
+ 'metadata.updated': 'Updated Date'
146
+ },
147
+ preventCsvInjection: true,
148
+ rfc4180Compliant: true
149
+ });
150
+
151
+ // Create write stream
152
+ const writeStream = fs.createWriteStream(outputFile, 'utf8');
153
+
154
+ // Add UTF-8 BOM for Excel compatibility
155
+ writeStream.write('\uFEFF');
156
+
157
+ // Pipe streams together
158
+ await pipeline(
159
+ readStream,
160
+ jsonStream,
161
+ csvStream,
162
+ writeStream
163
+ );
164
+
165
+ const elapsed = Date.now() - startTime;
166
+ console.log(`✓ Converted ${recordCount} records in ${elapsed}ms`);
167
+
168
+ // Show file sizes
169
+ const inputStats = fs.statSync(inputFile);
170
+ const outputStats = fs.statSync(outputFile);
171
+
172
+ console.log(` Input: ${(inputStats.size / 1024 / 1024).toFixed(2)} MB`);
173
+ console.log(` Output: ${(outputStats.size / 1024 / 1024).toFixed(2)} MB`);
174
+
175
+ } catch (error) {
176
+ console.error('Error:', error.message);
177
+ }
178
+ }
179
+
180
+ async function exampleCsvToJsonStreaming() {
181
+ console.log('\n=== Example 2: CSV to JSON Streaming ===');
182
+
183
+ const inputFile = './examples/large-data.csv';
184
+ const outputFile = './examples/streamed-output.json';
185
+
186
+ if (!fs.existsSync(inputFile)) {
187
+ console.log('CSV file not found. Run Example 1 first.');
188
+ return;
189
+ }
190
+
191
+ console.log('Converting CSV to JSON using streaming...');
192
+
193
+ const startTime = Date.now();
194
+
195
+ try {
196
+ // Create CSV read stream
197
+ const readStream = fs.createReadStream(inputFile, 'utf8');
198
+
199
+ // Create CSV to JSON transform stream
200
+ const jsonStream = jtcsv.createCsvToJsonStream({
201
+ delimiter: ',',
202
+ hasHeaders: true,
203
+ parseNumbers: true,
204
+ parseBooleans: true,
205
+ renameMap: {
206
+ 'ID': 'id',
207
+ 'Full Name': 'name',
208
+ 'Email Address': 'email',
209
+ 'Age': 'age',
210
+ 'Active Status': 'active',
211
+ 'Score': 'score',
212
+ 'Created Date': 'created',
213
+ 'Updated Date': 'updated'
214
+ }
215
+ });
216
+
217
+ // Create JSON write stream
218
+ const writeStream = fs.createWriteStream(outputFile, 'utf8');
219
+
220
+ // Write opening bracket
221
+ writeStream.write('[');
222
+
223
+ let recordCount = 0;
224
+ let firstRecord = true;
225
+
226
+ // Custom transform to format JSON array
227
+ const arrayFormatter = (require("stream").Transform)({
228
+ objectMode: true,
229
+ transform(chunk, encoding, callback) {
230
+ const jsonRecord = JSON.stringify(chunk);
231
+
232
+ if (!firstRecord) {
233
+ writeStream.write(',');
234
+ } else {
235
+ firstRecord = false;
236
+ }
237
+
238
+ writeStream.write(jsonRecord);
239
+
240
+ recordCount++;
241
+ if (recordCount % 10000 === 0) {
242
+ console.log(` Processed ${recordCount} records`);
243
+ }
244
+
245
+ callback();
246
+ },
247
+
248
+ flush(callback) {
249
+ // Write closing bracket
250
+ writeStream.write(']');
251
+ callback();
252
+ }
253
+ });
254
+
255
+ // Pipe streams together
256
+ await pipeline(
257
+ readStream,
258
+ jsonStream,
259
+ arrayFormatter
260
+ );
261
+
262
+ // Close write stream
263
+ await new Promise((resolve) => {
264
+ writeStream.end(() => resolve());
265
+ });
266
+
267
+ const elapsed = Date.now() - startTime;
268
+ console.log(`✓ Converted ${recordCount} records in ${elapsed}ms`);
269
+
270
+ // Verify roundtrip
271
+ const inputStats = fs.statSync(inputFile);
272
+ const outputStats = fs.statSync(outputFile);
273
+
274
+ console.log(` Input: ${(inputStats.size / 1024 / 1024).toFixed(2)} MB`);
275
+ console.log(` Output: ${(outputStats.size / 1024 / 1024).toFixed(2)} MB`);
276
+
277
+ // Sample verification
278
+ const sampleData = require(outputFile).slice(0, 3);
279
+ console.log('\nSample of converted data (first 3 records):');
280
+ console.log(JSON.stringify(sampleData, null, 2));
281
+
282
+ } catch (error) {
283
+ console.error('Error:', error.message);
284
+ }
285
+ }
286
+
287
+ async function exampleBidirectionalStreaming() {
288
+ console.log('\n=== Example 3: Bidirectional Streaming Pipeline ===');
289
+
290
+ const inputFile = './examples/large-data.json';
291
+ const tempFile = './examples/temp-stream.csv';
292
+ const outputFile = './examples/final-stream.json';
293
+
294
+ if (!fs.existsSync(inputFile)) {
295
+ await generateLargeJsonFile(inputFile, 50000);
296
+ }
297
+
298
+ console.log('Running bidirectional streaming pipeline...');
299
+ console.log('JSON → CSV → JSON roundtrip');
300
+
301
+ const startTime = Date.now();
302
+
303
+ try {
304
+ // Step 1: JSON to CSV
305
+ console.log('\nStep 1: Converting JSON to CSV...');
306
+
307
+ const jsonToCsvOptions = {
308
+ delimiter: ',',
309
+ includeHeaders: true,
310
+ preventCsvInjection: true
311
+ };
312
+
313
+ // For simplicity, we'll use the regular conversion
314
+ // In production, you would use proper streaming
315
+ const jsonData = require(inputFile);
316
+ const csvData = jtcsv.jsonToCsv(jsonData, jsonToCsvOptions);
317
+
318
+ await fs.promises.writeFile(tempFile, csvData, 'utf8');
319
+
320
+ const step1Time = Date.now() - startTime;
321
+ console.log(` ✓ Step 1 completed in ${step1Time}ms`);
322
+
323
+ // Step 2: CSV to JSON
324
+ console.log('\nStep 2: Converting CSV to JSON...');
325
+
326
+ const csvToJsonOptions = {
327
+ delimiter: ',',
328
+ hasHeaders: true,
329
+ parseNumbers: true,
330
+ parseBooleans: true
331
+ };
332
+
333
+ const finalData = await jtcsv.readCsvAsJson(tempFile, csvToJsonOptions);
334
+
335
+ await fs.promises.writeFile(
336
+ outputFile,
337
+ JSON.stringify(finalData, null, 2),
338
+ 'utf8'
339
+ );
340
+
341
+ const totalTime = Date.now() - startTime;
342
+ console.log(` ✓ Step 2 completed in ${totalTime - step1Time}ms`);
343
+
344
+ // Verification
345
+ console.log('\nVerification:');
346
+ console.log(` Original records: ${jsonData.length}`);
347
+ console.log(` Final records: ${finalData.length}`);
348
+
349
+ // Check if data survived roundtrip
350
+ const sampleOriginal = jsonData[0];
351
+ const sampleFinal = finalData[0];
352
+
353
+ console.log('\nSample comparison (first record):');
354
+ console.log(' Original:', JSON.stringify(sampleOriginal).substring(0, 100) + '...');
355
+ console.log(' Final: ', JSON.stringify(sampleFinal).substring(0, 100) + '...');
356
+
357
+ // Cleanup temp file
358
+ await fs.promises.unlink(tempFile).catch(() => {});
359
+
360
+ console.log(`\n✓ Bidirectional streaming completed in ${totalTime}ms`);
361
+
362
+ } catch (error) {
363
+ console.error('Error:', error.message);
364
+
365
+ // Cleanup on error
366
+ try {
367
+ await fs.promises.unlink(tempFile);
368
+ } catch (_e) {
369
+ // Ignore cleanup errors
370
+ }
371
+ }
372
+ }
373
+
374
+ async function main() {
375
+ console.log('='.repeat(60));
376
+ console.log('jtcsv Streaming Examples');
377
+ console.log('='.repeat(60));
378
+
379
+ // Create examples directory
380
+ const examplesDir = './examples';
381
+ if (!fs.existsSync(examplesDir)) {
382
+ fs.mkdirSync(examplesDir, { recursive: true });
383
+ }
384
+
385
+ try {
386
+ await exampleJsonToCsvStreaming();
387
+ await exampleCsvToJsonStreaming();
388
+ await exampleBidirectionalStreaming();
389
+
390
+ console.log('\n' + '='.repeat(60));
391
+ console.log('All examples completed successfully!');
392
+ console.log('='.repeat(60));
393
+
394
+ console.log('\nTry these commands:');
395
+ console.log(' • Launch TUI: npx jtcsv tui');
396
+ console.log(' • Convert JSON: npx jtcsv json2csv examples/large-data.json output.csv');
397
+ console.log(' • Convert CSV: npx jtcsv csv2json examples/large-data.csv output.json');
398
+ console.log(' • Streaming mode: npx jtcsv stream json2csv examples/large-data.json streamed.csv');
399
+
400
+ } catch (error) {
401
+ console.error('\nError running examples:', error.message);
402
+ process.exit(1);
403
+ }
404
+ }
405
+
406
+ // Run examples
407
+ if (require.main === module) {
408
+ main().catch((error) => {
409
+ console.error('Fatal error:', error);
410
+ process.exit(1);
411
+ });
412
+ }
413
+
414
+ export default {
415
+ generateLargeJsonFile,
416
+ exampleJsonToCsvStreaming,
417
+ exampleCsvToJsonStreaming,
418
+ exampleBidirectionalStreaming
419
+ };
@@ -0,0 +1,28 @@
1
+ // Advanced Web Workers usage (browser example)
2
+ // Requires jtcsv loaded in the browser (UMD or ESM).
3
+
4
+ async function runWorkerDemo(file) {
5
+ if (!window.jtcsv) {
6
+ throw new Error('jtcsv is not available on window');
7
+ }
8
+
9
+ const pool = window.jtcsv.createWorkerPool({
10
+ workerCount: 4,
11
+ maxQueueSize: 50,
12
+ autoScale: true
13
+ });
14
+
15
+ const result = await window.jtcsv.parseCSVWithWorker(file, {
16
+ delimiter: ',',
17
+ parseNumbers: true
18
+ }, (progress) => {
19
+ console.log(`Processed ${progress.processed} rows (${progress.percentage.toFixed(1)}%)`);
20
+ }, pool);
21
+
22
+ console.log('Rows:', result.length);
23
+ return result;
24
+ }
25
+
26
+ // Example usage in the browser:
27
+ // const file = document.querySelector('input[type="file"]').files[0];
28
+ // runWorkerDemo(file);
package/index.d.ts CHANGED
@@ -1,5 +1,4 @@
1
- declare module 'jtcsv' {
2
- import { Readable, Writable, Transform } from 'stream';
1
+ import { Readable, Writable, Transform } from 'stream';
3
2
 
4
3
  // JSON to CSV interfaces
5
4
  export interface JsonToCsvOptions {
@@ -649,4 +648,3 @@ declare module 'jtcsv' {
649
648
  export function createTsvToJsonStream(
650
649
  options?: TsvOptions
651
650
  ): TransformStream;
652
- }
package/index.js CHANGED
@@ -1,3 +1,4 @@
1
+ // @ts-nocheck
1
2
  // Main entry point for the jtcsv module
2
3
  // Exports both JSON→CSV and CSV→JSON functions
3
4
 
@@ -9,6 +10,8 @@ const streamJsonToCsvModule = require('./stream-json-to-csv');
9
10
  const streamCsvToJsonModule = require('./stream-csv-to-json');
10
11
  const ndjsonParser = require('./src/formats/ndjson-parser');
11
12
  const tsvParser = require('./src/formats/tsv-parser');
13
+ const zodAdapter = require('./src/utils/zod-adapter');
14
+ const encodingSupport = require('./src/utils/encoding-support');
12
15
 
13
16
  // Combine all exports
14
17
  module.exports = {
@@ -64,5 +67,16 @@ module.exports = {
64
67
  createTsvToJsonStream: tsvParser.createTsvToJsonStream,
65
68
 
66
69
  // Error classes
67
- ...errorsModule
70
+ ...errorsModule,
71
+
72
+ // Schema validation adapters (optional dependencies)
73
+ createZodValidationHook: zodAdapter.createZodValidationHook,
74
+ createYupValidationHook: zodAdapter.createYupValidationHook,
75
+ createValidatedParser: zodAdapter.createValidatedParser,
76
+
77
+ // Encoding detection and conversion
78
+ detectEncoding: encodingSupport.detectEncoding,
79
+ convertToUtf8: encodingSupport.convertToUtf8,
80
+ autoDetectAndConvert: encodingSupport.autoDetectAndConvert,
81
+ csvToJsonWithEncoding: encodingSupport.csvToJsonWithEncoding
68
82
  };