jtcsv 1.2.0 → 2.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/README.md +252 -337
  2. package/bin/jtcsv.js +167 -85
  3. package/cli-tui.js +0 -0
  4. package/dist/jtcsv.cjs.js +1619 -0
  5. package/dist/jtcsv.cjs.js.map +1 -0
  6. package/dist/jtcsv.esm.js +1599 -0
  7. package/dist/jtcsv.esm.js.map +1 -0
  8. package/dist/jtcsv.umd.js +1625 -0
  9. package/dist/jtcsv.umd.js.map +1 -0
  10. package/examples/cli-tool.js +186 -0
  11. package/examples/express-api.js +167 -0
  12. package/examples/large-dataset-example.js +185 -0
  13. package/examples/plugin-excel-exporter.js +407 -0
  14. package/examples/simple-usage.js +280 -0
  15. package/examples/streaming-example.js +419 -0
  16. package/index.d.ts +4 -0
  17. package/json-save.js +1 -1
  18. package/package.json +128 -14
  19. package/plugins/README.md +373 -0
  20. package/plugins/express-middleware/README.md +306 -0
  21. package/plugins/express-middleware/example.js +136 -0
  22. package/plugins/express-middleware/index.d.ts +114 -0
  23. package/plugins/express-middleware/index.js +360 -0
  24. package/plugins/express-middleware/package.json +52 -0
  25. package/plugins/fastify-plugin/index.js +406 -0
  26. package/plugins/fastify-plugin/package.json +55 -0
  27. package/plugins/nextjs-api/README.md +452 -0
  28. package/plugins/nextjs-api/examples/ConverterComponent.jsx +386 -0
  29. package/plugins/nextjs-api/examples/api-convert.js +69 -0
  30. package/plugins/nextjs-api/index.js +388 -0
  31. package/plugins/nextjs-api/package.json +63 -0
  32. package/plugins/nextjs-api/route.js +372 -0
  33. package/src/browser/browser-functions.js +189 -0
  34. package/src/browser/csv-to-json-browser.js +442 -0
  35. package/src/browser/errors-browser.js +194 -0
  36. package/src/browser/index.js +79 -0
  37. package/src/browser/json-to-csv-browser.js +309 -0
  38. package/src/browser/workers/csv-parser.worker.js +359 -0
  39. package/src/browser/workers/worker-pool.js +467 -0
  40. package/src/core/plugin-system.js +472 -0
  41. package/src/engines/fast-path-engine-new.js +338 -0
  42. package/src/engines/fast-path-engine.js +347 -0
  43. package/src/formats/ndjson-parser.js +419 -0
  44. package/src/index-with-plugins.js +349 -0
  45. package/stream-csv-to-json.js +1 -1
  46. package/stream-json-to-csv.js +1 -1
@@ -0,0 +1,280 @@
1
+ #!/usr/bin/env node
2
+
3
+ /**
4
+ * Простой пример использования новых возможностей JTCSV 2.1.0
5
+ * Демонстрация Fast-Path Engine, NDJSON и Plugin System
6
+ *
7
+ * @version 1.0.0
8
+ * @date 2026-01-22
9
+ */
10
+
11
+ console.log('🚀 JTCSV 2.1.0 - Демонстрация новых возможностей\n');
12
+
13
+ // ============================================================================
14
+ // 1. Базовое использование (обратная совместимость)
15
+ // ============================================================================
16
+
17
+ console.log('1. 📦 Базовое использование (обратная совместимость)');
18
+ console.log('='.repeat(60));
19
+
20
+ const { jsonToCsv, csvToJson } = require('jtcsv
21
+
22
+ const sampleData = [
23
+ { id: 1, name: 'John Doe', age: 30, city: 'New York' },
24
+ { id: 2, name: 'Jane Smith', age: 25, city: 'London' },
25
+ { id: 3, name: 'Bob Johnson', age: 35, city: 'Tokyo' }
26
+ ];
27
+
28
+ // Конвертация JSON → CSV
29
+ const csv = jsonToCsv(sampleData, { delimiter: ',' });
30
+ console.log('📄 CSV результат:');
31
+ console.log(csv);
32
+ console.log();
33
+
34
+ // Конвертация CSV → JSON
35
+ const json = csvToJson(csv, { delimiter: ',' });
36
+ console.log('📊 JSON результат:');
37
+ console.log(JSON.stringify(json, null, 2));
38
+ console.log();
39
+
40
+ // ============================================================================
41
+ // 2. Fast-Path Engine (оптимизированный парсинг)
42
+ // ============================================================================
43
+
44
+ console.log('\n2. ⚡ Fast-Path Engine (оптимизированный парсинг)');
45
+ console.log('='.repeat(60));
46
+
47
+ const { FastPathEngine } = require('../src/engines/fast-path-engine');
48
+ const engine = new FastPathEngine();
49
+
50
+ // Создаем тестовый CSV
51
+ let testCsv = 'id,name,description\n';
52
+ for (let i = 1; i <= 1000; i++) {
53
+ testCsv += `${i},User${i},"Description for user ${i}"\n`;
54
+ }
55
+
56
+ console.log('📊 Анализ структуры CSV...');
57
+ const sample = testCsv.substring(0, 500);
58
+ const structure = engine.analyzeStructure(sample);
59
+ console.log('Структура:', {
60
+ delimiter: structure.delimiter,
61
+ hasQuotes: structure.hasQuotes,
62
+ recommendedEngine: structure.recommendedEngine,
63
+ complexity: structure.complexity
64
+ });
65
+
66
+ console.log('\n⚡ Парсинг 1000 строк с оптимизацией...');
67
+ const startTime = Date.now();
68
+ const parsed = engine.parse(testCsv);
69
+ const duration = Date.now() - startTime;
70
+
71
+ console.log(`✅ Парсинг завершен за ${duration}ms`);
72
+ console.log(`📈 Скорость: ${Math.round(1000 / (duration / 1000))} строк/сек`);
73
+ console.log(`📊 Результат: ${parsed.length} строк (${parsed[0].length} колонок)`);
74
+
75
+ const stats = engine.getStats();
76
+ console.log('\n📊 Статистика Fast-Path Engine:');
77
+ console.log(` Simple парсеры: ${stats.simpleParserCount}`);
78
+ console.log(` Quote-aware парсеры: ${stats.quoteAwareParserCount}`);
79
+ console.log(` Cache hits: ${stats.cacheHits}`);
80
+ console.log(` Cache miss: ${stats.cacheMisses}`);
81
+ console.log(` Hit rate: ${(stats.hitRate * 100).toFixed(1)}%`);
82
+
83
+ // ============================================================================
84
+ // 3. NDJSON поддержка (потоковая обработка)
85
+ // ============================================================================
86
+
87
+ console.log('\n3. 📝 NDJSON поддержка (потоковая обработка)');
88
+ console.log('='.repeat(60));
89
+
90
+ const { NdjsonParser } = require('../src/formats/ndjson-parser');
91
+
92
+ // Конвертация в NDJSON
93
+ const ndjson = NdjsonParser.toNdjson(sampleData, { space: 2 });
94
+ console.log('📄 NDJSON результат:');
95
+ console.log(ndjson);
96
+ console.log();
97
+
98
+ // Обратная конвертация
99
+ const fromNdjson = NdjsonParser.fromNdjson(ndjson);
100
+ console.log('🔁 Обратная конвертация:');
101
+ console.log(JSON.stringify(fromNdjson, null, 2));
102
+ console.log();
103
+
104
+ // Статистика
105
+ const ndjsonStats = NdjsonParser.getStats(ndjson);
106
+ console.log('📊 Статистика NDJSON:');
107
+ console.log(` Строк: ${ndjsonStats.totalLines}`);
108
+ console.log(` Валидных: ${ndjsonStats.validLines}`);
109
+ console.log(` Успешность: ${ndjsonStats.successRate}%`);
110
+
111
+ // ============================================================================
112
+ // 4. Plugin System (расширяемость)
113
+ // ============================================================================
114
+
115
+ console.log('\n4. 🔌 Plugin System (расширяемость)');
116
+ console.log('='.repeat(60));
117
+
118
+ const { PluginManager } = require('../src/core/plugin-system');
119
+
120
+ // Создаем простой плагин для логирования
121
+ const loggingPlugin = {
122
+ name: 'Logging Plugin',
123
+ version: '1.0.0',
124
+ description: 'Логирование операций конвертации',
125
+
126
+ hooks: {
127
+ 'before:csvToJson': (csv, context) => {
128
+ console.log(` 📥 Начало csvToJson (${csv.length} байт)`);
129
+ return csv;
130
+ },
131
+
132
+ 'after:csvToJson': (result, context) => {
133
+ console.log(` 📤 Завершение csvToJson (${result.length} записей)`);
134
+ return result;
135
+ },
136
+
137
+ 'before:jsonToCsv': (json, context) => {
138
+ console.log(` 📥 Начало jsonToCsv (${json.length} записей)`);
139
+ return json;
140
+ },
141
+
142
+ 'after:jsonToCsv': (csv, context) => {
143
+ console.log(` 📤 Завершение jsonToCsv (${csv.length} байт)`);
144
+ return csv;
145
+ }
146
+ },
147
+
148
+ middlewares: [
149
+ async (ctx, next) => {
150
+ console.log(` 🔄 Middleware: ${ctx.operation} начат`);
151
+ const start = Date.now();
152
+ await next();
153
+ const duration = Date.now() - start;
154
+ console.log(` ✅ Middleware: ${ctx.operation} завершен за ${duration}ms`);
155
+ }
156
+ ]
157
+ };
158
+
159
+ // Создаем плагин для трансформации данных
160
+ const transformPlugin = {
161
+ name: 'Transform Plugin',
162
+ version: '1.0.0',
163
+ description: 'Трансформация данных перед обработкой',
164
+
165
+ hooks: {
166
+ 'before:jsonToCsv': (json, context) => {
167
+ // Добавляем timestamp к каждой записи
168
+ return json.map(item => ({
169
+ ...item,
170
+ processedAt: new Date().toISOString(),
171
+ processedBy: 'transform-plugin'
172
+ }));
173
+ }
174
+ }
175
+ };
176
+
177
+ // Инициализируем менеджер плагинов
178
+ const pluginManager = new PluginManager();
179
+ pluginManager.use('logging', loggingPlugin);
180
+ pluginManager.use('transform', transformPlugin);
181
+
182
+ console.log('📋 Зарегистрированные плагины:');
183
+ pluginManager.listPlugins().forEach(plugin => {
184
+ console.log(` • ${plugin.name} v${plugin.version} - ${plugin.description}`);
185
+ });
186
+
187
+ console.log('\n🔄 Выполнение с плагинами...');
188
+
189
+ // Основная функция для демонстрации
190
+ const processData = async () => {
191
+ const testJson = [
192
+ { id: 1, value: 'test1' },
193
+ { id: 2, value: 'test2' }
194
+ ];
195
+
196
+ // Выполняем с плагинами
197
+ const result = await pluginManager.executeWithPlugins(
198
+ 'jsonToCsv',
199
+ testJson,
200
+ { delimiter: '|' },
201
+ (data, options) => {
202
+ // Имитация основной функции
203
+ const headers = Object.keys(data[0]);
204
+ const rows = data.map(item => headers.map(h => item[h]).join(options.delimiter));
205
+ return [headers.join(options.delimiter), ...rows].join('\n');
206
+ }
207
+ );
208
+
209
+ console.log('\n📄 Результат с плагинами:');
210
+ console.log(result);
211
+ };
212
+
213
+ await processData();
214
+
215
+ // Статистика плагинов
216
+ const pluginStats = pluginManager.getStats();
217
+ console.log('\n📊 Статистика Plugin System:');
218
+ console.log(` Плагинов: ${pluginStats.plugins}`);
219
+ console.log(` Hooks выполнено: ${pluginStats.hookExecutions}`);
220
+ console.log(` Middleware выполнено: ${pluginStats.middlewareExecutions}`);
221
+ console.log(` Уникальных hooks: ${pluginStats.uniqueHooks}`);
222
+
223
+ // ============================================================================
224
+ // 5. JTCSV с плагинами (полная интеграция)
225
+ // ============================================================================
226
+
227
+ console.log('\n5. 🎯 JTCSV с плагинами (полная интеграция)');
228
+ console.log('='.repeat(60));
229
+
230
+ // Проверяем доступность полной интеграции
231
+ try {
232
+ const JtcsvWithPlugins = require('../src/index-with-plugins');
233
+
234
+ console.log('✅ Полная интеграция доступна');
235
+ console.log('Запустите для полной демонстрации:');
236
+ console.log(' npm run example:plugins');
237
+ console.log('\nИли посмотрите пример:');
238
+ console.log(' examples/plugin-excel-exporter.js');
239
+ } catch (error) {
240
+ console.log('⚠️ Полная интеграция требует дополнительных зависимостей');
241
+ console.log('Установите exceljs для демонстрации Excel плагина:');
242
+ console.log(' npm install exceljs --save-optional');
243
+ }
244
+
245
+ // ============================================================================
246
+ // Итоги
247
+ // ============================================================================
248
+
249
+ console.log('\n' + '='.repeat(60));
250
+ console.log('🎉 ДЕМОНСТРАЦИЯ ЗАВЕРШЕНА!');
251
+ console.log('='.repeat(60));
252
+
253
+ console.log('\n📈 ИТОГИ JTCSV 2.1.0:');
254
+ console.log('✅ Обратная совместимость с 2.0.0');
255
+ console.log('✅ Fast-Path Engine (до 3-4x быстрее)');
256
+ console.log('✅ NDJSON поддержка для потоковой обработки');
257
+ console.log('✅ Plugin System для расширяемости');
258
+ console.log('✅ Excel интеграция (через плагины)');
259
+ console.log('✅ Детальная статистика и мониторинг');
260
+
261
+ console.log('\n🚀 Следующие шаги:');
262
+ console.log('1. Изучите README-PLUGINS.md для подробной документации');
263
+ console.log('2. Запустите тесты: npm test');
264
+ console.log('3. Попробуйте примеры: npm run example:plugins');
265
+ console.log('4. Создайте свой плагин!');
266
+
267
+ console.log('\n💡 Совет: Для production используйте:');
268
+ console.log(' const jtcsv = require("jtcsv.create();');
269
+ console.log(' jtcsv.use("your-plugin", yourPluginConfig);');
270
+
271
+ console.log('\n📚 Документация: https://github.com/Linol-Hamelton/jtcsv');
272
+ console.log('🐛 Issues: https://github.com/Linol-Hamelton/jtcsv/issues');
273
+ console.log('⭐ Star на GitHub если понравилось!');
274
+
275
+ console.log('\n' + '✨'.repeat(30));
276
+ console.log('✨ JTCSV 2.1.0 - Next Generation JSON/CSV Converter ✨');
277
+ console.log('✨'.repeat(30));
278
+
279
+
280
+
@@ -0,0 +1,419 @@
1
+ #!/usr/bin/env node
2
+
3
+ /**
4
+ * Streaming Example for jtcsv
5
+ *
6
+ * Demonstrates bidirectional streaming with large datasets
7
+ * and real-time progress monitoring.
8
+ */
9
+
10
+ const fs = require('fs');
11
+ const path = require('path');
12
+ const { pipeline } = require('stream/promises');
13
+ const jtcsv = require('../index.js');
14
+
15
+ async function generateLargeJsonFile(filePath, recordCount) {
16
+ console.log(`Generating ${recordCount} records...`);
17
+
18
+ const writeStream = fs.createWriteStream(filePath, 'utf8');
19
+
20
+ // Write opening bracket for JSON array
21
+ writeStream.write('[');
22
+
23
+ for (let i = 1; i <= recordCount; i++) {
24
+ const record = {
25
+ id: i,
26
+ name: `User ${i}`,
27
+ email: `user${i}@example.com`,
28
+ age: Math.floor(Math.random() * 50) + 18,
29
+ active: Math.random() > 0.5,
30
+ score: Math.random() * 100,
31
+ tags: ['customer', `tier${Math.floor(Math.random() * 3) + 1}`],
32
+ metadata: {
33
+ created: new Date().toISOString(),
34
+ updated: new Date().toISOString(),
35
+ version: '1.0.0'
36
+ }
37
+ };
38
+
39
+ const jsonRecord = JSON.stringify(record);
40
+ writeStream.write(jsonRecord);
41
+
42
+ if (i < recordCount) {
43
+ writeStream.write(',');
44
+ }
45
+
46
+ // Show progress every 10,000 records
47
+ if (i % 10000 === 0) {
48
+ process.stdout.write(` Generated ${i} records\r`);
49
+ }
50
+ }
51
+
52
+ // Write closing bracket
53
+ writeStream.write(']');
54
+
55
+ await new Promise((resolve) => {
56
+ writeStream.end(() => {
57
+ console.log(`\nGenerated ${recordCount} records in ${filePath}`);
58
+ resolve();
59
+ });
60
+ });
61
+ }
62
+
63
+ async function exampleJsonToCsvStreaming() {
64
+ console.log('\n=== Example 1: JSON to CSV Streaming ===');
65
+
66
+ const inputFile = './examples/large-data.json';
67
+ const outputFile = './examples/large-data.csv';
68
+
69
+ // Generate test data if needed
70
+ if (!fs.existsSync(inputFile)) {
71
+ await generateLargeJsonFile(inputFile, 100000);
72
+ }
73
+
74
+ console.log('Converting JSON to CSV using streaming...');
75
+
76
+ const startTime = Date.now();
77
+
78
+ try {
79
+ // Create readable stream from file
80
+ const readStream = fs.createReadStream(inputFile, 'utf8');
81
+
82
+ // Parse JSON stream (simplified - in real app use proper JSON stream parser)
83
+ let buffer = '';
84
+ let recordCount = 0;
85
+
86
+ const jsonStream = new (require('stream').Transform)({
87
+ objectMode: true,
88
+ transform(chunk, encoding, callback) {
89
+ buffer += chunk;
90
+
91
+ // Simple JSON parsing for demonstration
92
+ // In production, use a proper JSON stream parser
93
+ try {
94
+ const data = JSON.parse(buffer);
95
+ if (Array.isArray(data)) {
96
+ data.forEach(item => {
97
+ this.push(item);
98
+ recordCount++;
99
+
100
+ if (recordCount % 10000 === 0) {
101
+ console.log(` Processed ${recordCount} records`);
102
+ }
103
+ });
104
+ }
105
+ buffer = '';
106
+ } catch (error) {
107
+ // Incomplete JSON, wait for more data
108
+ }
109
+
110
+ callback();
111
+ },
112
+
113
+ flush(callback) {
114
+ // Process any remaining data
115
+ if (buffer.trim()) {
116
+ try {
117
+ const data = JSON.parse(buffer);
118
+ if (Array.isArray(data)) {
119
+ data.forEach(item => {
120
+ this.push(item);
121
+ recordCount++;
122
+ });
123
+ }
124
+ } catch (error) {
125
+ console.error('Error parsing final JSON:', error.message);
126
+ }
127
+ }
128
+ callback();
129
+ }
130
+ });
131
+
132
+ // Create CSV transform stream
133
+ const csvStream = jtcsv.createJsonToCsvStream({
134
+ delimiter: ',',
135
+ includeHeaders: true,
136
+ renameMap: {
137
+ id: 'ID',
138
+ name: 'Full Name',
139
+ email: 'Email Address',
140
+ age: 'Age',
141
+ active: 'Active Status',
142
+ score: 'Score',
143
+ 'metadata.created': 'Created Date',
144
+ 'metadata.updated': 'Updated Date'
145
+ },
146
+ preventCsvInjection: true,
147
+ rfc4180Compliant: true
148
+ });
149
+
150
+ // Create write stream
151
+ const writeStream = fs.createWriteStream(outputFile, 'utf8');
152
+
153
+ // Add UTF-8 BOM for Excel compatibility
154
+ writeStream.write('\uFEFF');
155
+
156
+ // Pipe streams together
157
+ await pipeline(
158
+ readStream,
159
+ jsonStream,
160
+ csvStream,
161
+ writeStream
162
+ );
163
+
164
+ const elapsed = Date.now() - startTime;
165
+ console.log(`✓ Converted ${recordCount} records in ${elapsed}ms`);
166
+
167
+ // Show file sizes
168
+ const inputStats = fs.statSync(inputFile);
169
+ const outputStats = fs.statSync(outputFile);
170
+
171
+ console.log(` Input: ${(inputStats.size / 1024 / 1024).toFixed(2)} MB`);
172
+ console.log(` Output: ${(outputStats.size / 1024 / 1024).toFixed(2)} MB`);
173
+
174
+ } catch (error) {
175
+ console.error('Error:', error.message);
176
+ }
177
+ }
178
+
179
+ async function exampleCsvToJsonStreaming() {
180
+ console.log('\n=== Example 2: CSV to JSON Streaming ===');
181
+
182
+ const inputFile = './examples/large-data.csv';
183
+ const outputFile = './examples/streamed-output.json';
184
+
185
+ if (!fs.existsSync(inputFile)) {
186
+ console.log('CSV file not found. Run Example 1 first.');
187
+ return;
188
+ }
189
+
190
+ console.log('Converting CSV to JSON using streaming...');
191
+
192
+ const startTime = Date.now();
193
+
194
+ try {
195
+ // Create CSV read stream
196
+ const readStream = fs.createReadStream(inputFile, 'utf8');
197
+
198
+ // Create CSV to JSON transform stream
199
+ const jsonStream = jtcsv.createCsvToJsonStream({
200
+ delimiter: ',',
201
+ hasHeaders: true,
202
+ parseNumbers: true,
203
+ parseBooleans: true,
204
+ renameMap: {
205
+ 'ID': 'id',
206
+ 'Full Name': 'name',
207
+ 'Email Address': 'email',
208
+ 'Age': 'age',
209
+ 'Active Status': 'active',
210
+ 'Score': 'score',
211
+ 'Created Date': 'created',
212
+ 'Updated Date': 'updated'
213
+ }
214
+ });
215
+
216
+ // Create JSON write stream
217
+ const writeStream = fs.createWriteStream(outputFile, 'utf8');
218
+
219
+ // Write opening bracket
220
+ writeStream.write('[');
221
+
222
+ let recordCount = 0;
223
+ let firstRecord = true;
224
+
225
+ // Custom transform to format JSON array
226
+ const arrayFormatter = new (require('stream').Transform)({
227
+ objectMode: true,
228
+ transform(chunk, encoding, callback) {
229
+ const jsonRecord = JSON.stringify(chunk);
230
+
231
+ if (!firstRecord) {
232
+ writeStream.write(',');
233
+ } else {
234
+ firstRecord = false;
235
+ }
236
+
237
+ writeStream.write(jsonRecord);
238
+
239
+ recordCount++;
240
+ if (recordCount % 10000 === 0) {
241
+ console.log(` Processed ${recordCount} records`);
242
+ }
243
+
244
+ callback();
245
+ },
246
+
247
+ flush(callback) {
248
+ // Write closing bracket
249
+ writeStream.write(']');
250
+ callback();
251
+ }
252
+ });
253
+
254
+ // Pipe streams together
255
+ await pipeline(
256
+ readStream,
257
+ jsonStream,
258
+ arrayFormatter
259
+ );
260
+
261
+ // Close write stream
262
+ await new Promise((resolve) => {
263
+ writeStream.end(() => resolve());
264
+ });
265
+
266
+ const elapsed = Date.now() - startTime;
267
+ console.log(`✓ Converted ${recordCount} records in ${elapsed}ms`);
268
+
269
+ // Verify roundtrip
270
+ const inputStats = fs.statSync(inputFile);
271
+ const outputStats = fs.statSync(outputFile);
272
+
273
+ console.log(` Input: ${(inputStats.size / 1024 / 1024).toFixed(2)} MB`);
274
+ console.log(` Output: ${(outputStats.size / 1024 / 1024).toFixed(2)} MB`);
275
+
276
+ // Sample verification
277
+ const sampleData = require(outputFile).slice(0, 3);
278
+ console.log('\nSample of converted data (first 3 records):');
279
+ console.log(JSON.stringify(sampleData, null, 2));
280
+
281
+ } catch (error) {
282
+ console.error('Error:', error.message);
283
+ }
284
+ }
285
+
286
+ async function exampleBidirectionalStreaming() {
287
+ console.log('\n=== Example 3: Bidirectional Streaming Pipeline ===');
288
+
289
+ const inputFile = './examples/large-data.json';
290
+ const tempFile = './examples/temp-stream.csv';
291
+ const outputFile = './examples/final-stream.json';
292
+
293
+ if (!fs.existsSync(inputFile)) {
294
+ await generateLargeJsonFile(inputFile, 50000);
295
+ }
296
+
297
+ console.log('Running bidirectional streaming pipeline...');
298
+ console.log('JSON → CSV → JSON roundtrip');
299
+
300
+ const startTime = Date.now();
301
+
302
+ try {
303
+ // Step 1: JSON to CSV
304
+ console.log('\nStep 1: Converting JSON to CSV...');
305
+
306
+ const jsonToCsvOptions = {
307
+ delimiter: ',',
308
+ includeHeaders: true,
309
+ preventCsvInjection: true
310
+ };
311
+
312
+ // For simplicity, we'll use the regular conversion
313
+ // In production, you would use proper streaming
314
+ const jsonData = require(inputFile);
315
+ const csvData = jtcsv.jsonToCsv(jsonData, jsonToCsvOptions);
316
+
317
+ await fs.promises.writeFile(tempFile, csvData, 'utf8');
318
+
319
+ const step1Time = Date.now() - startTime;
320
+ console.log(` ✓ Step 1 completed in ${step1Time}ms`);
321
+
322
+ // Step 2: CSV to JSON
323
+ console.log('\nStep 2: Converting CSV to JSON...');
324
+
325
+ const csvToJsonOptions = {
326
+ delimiter: ',',
327
+ hasHeaders: true,
328
+ parseNumbers: true,
329
+ parseBooleans: true
330
+ };
331
+
332
+ const finalData = await jtcsv.readCsvAsJson(tempFile, csvToJsonOptions);
333
+
334
+ await fs.promises.writeFile(
335
+ outputFile,
336
+ JSON.stringify(finalData, null, 2),
337
+ 'utf8'
338
+ );
339
+
340
+ const totalTime = Date.now() - startTime;
341
+ console.log(` ✓ Step 2 completed in ${totalTime - step1Time}ms`);
342
+
343
+ // Verification
344
+ console.log('\nVerification:');
345
+ console.log(` Original records: ${jsonData.length}`);
346
+ console.log(` Final records: ${finalData.length}`);
347
+
348
+ // Check if data survived roundtrip
349
+ const sampleOriginal = jsonData[0];
350
+ const sampleFinal = finalData[0];
351
+
352
+ console.log('\nSample comparison (first record):');
353
+ console.log(' Original:', JSON.stringify(sampleOriginal).substring(0, 100) + '...');
354
+ console.log(' Final: ', JSON.stringify(sampleFinal).substring(0, 100) + '...');
355
+
356
+ // Cleanup temp file
357
+ await fs.promises.unlink(tempFile).catch(() => {});
358
+
359
+ console.log(`\n✓ Bidirectional streaming completed in ${totalTime}ms`);
360
+
361
+ } catch (error) {
362
+ console.error('Error:', error.message);
363
+
364
+ // Cleanup on error
365
+ try {
366
+ await fs.promises.unlink(tempFile);
367
+ } catch (e) {}
368
+ }
369
+ }
370
+
371
+ async function main() {
372
+ console.log('='.repeat(60));
373
+ console.log('jtcsv Streaming Examples');
374
+ console.log('='.repeat(60));
375
+
376
+ // Create examples directory
377
+ const examplesDir = './examples';
378
+ if (!fs.existsSync(examplesDir)) {
379
+ fs.mkdirSync(examplesDir, { recursive: true });
380
+ }
381
+
382
+ try {
383
+ await exampleJsonToCsvStreaming();
384
+ await exampleCsvToJsonStreaming();
385
+ await exampleBidirectionalStreaming();
386
+
387
+ console.log('\n' + '='.repeat(60));
388
+ console.log('All examples completed successfully!');
389
+ console.log('='.repeat(60));
390
+
391
+ console.log('\nTry these commands:');
392
+ console.log(' • Launch TUI: npx jtcsv tui');
393
+ console.log(' • Convert JSON: npx jtcsv json2csv examples/large-data.json output.csv');
394
+ console.log(' • Convert CSV: npx jtcsv csv2json examples/large-data.csv output.json');
395
+ console.log(' • Streaming mode: npx jtcsv stream json2csv examples/large-data.json streamed.csv');
396
+
397
+ } catch (error) {
398
+ console.error('\nError running examples:', error.message);
399
+ process.exit(1);
400
+ }
401
+ }
402
+
403
+ // Run examples
404
+ if (require.main === module) {
405
+ main().catch((error) => {
406
+ console.error('Fatal error:', error);
407
+ process.exit(1);
408
+ });
409
+ }
410
+
411
+ module.exports = {
412
+ generateLargeJsonFile,
413
+ exampleJsonToCsvStreaming,
414
+ exampleCsvToJsonStreaming,
415
+ exampleBidirectionalStreaming
416
+ };
417
+
418
+
419
+
package/index.d.ts CHANGED
@@ -361,3 +361,7 @@ declare module 'jtcsv' {
361
361
  */
362
362
  export function createJsonCollectorStream(): Writable;
363
363
  }
364
+
365
+
366
+
367
+