@arela/uploader 0.2.3 → 0.2.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/index.js CHANGED
@@ -1,2211 +1,282 @@
1
1
  #!/usr/bin/env node
2
- import { createClient } from '@supabase/supabase-js';
3
- import cliProgress from 'cli-progress';
4
2
  import { Command } from 'commander';
5
- import { config } from 'dotenv';
6
- import FormData from 'form-data';
7
- import fs from 'fs';
8
- import { globby } from 'globby';
9
- import mime from 'mime-types';
10
- import fetch from 'node-fetch';
11
- import path from 'path';
12
3
 
13
- import { FileDetectionService } from './file-detection.js';
14
-
15
- config();
16
-
17
- const program = new Command();
18
-
19
- // Read package.json version at startup
20
- let packageVersion = '1.0.0'; // fallback
21
- try {
22
- const __filename = new URL(import.meta.url).pathname;
23
- const __dirname = path.dirname(__filename);
24
- const packageJsonPath = path.resolve(__dirname, '../package.json');
25
- const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf-8'));
26
- packageVersion = packageJson.version || '1.0.0';
27
- } catch (error) {
28
- console.warn('⚠️ Could not read package.json version, using fallback');
29
- }
30
-
31
- // Configuración de Supabase (original)
32
- const supabaseUrl = process.env.SUPABASE_URL;
33
- const supabaseKey = process.env.SUPABASE_KEY;
34
- const bucket = process.env.SUPABASE_BUCKET;
35
-
36
- // Configuración de API (nueva)
37
- const API_BASE_URL = process.env.ARELA_API_URL;
38
- const API_TOKEN = process.env.ARELA_API_TOKEN;
39
-
40
- // Configuración del uploader mejorado
41
- const basePath = process.env.UPLOAD_BASE_PATH;
42
- const sources = process.env.UPLOAD_SOURCES?.split('|')
43
- .map((s) => s.trim())
44
- .filter(Boolean);
45
-
46
- // Configuración de RFCs para upload
47
- console.log('🔧 Configured RFCs for upload:', process.env.UPLOAD_RFCS);
48
- const uploadRfcs = process.env.UPLOAD_RFCS?.split('|')
49
- .map((s) => s.trim())
50
- .filter(Boolean);
51
-
52
- let supabase;
53
- let apiMode = false;
54
-
55
- // Pre-compiled regex patterns for better performance (from original complex uploader)
56
- const SANITIZATION_PATTERNS = [
57
- [/[áàâäãåāăą]/gi, 'a'],
58
- [/[éèêëēĕėę]/gi, 'e'],
59
- [/[íìîïīĭį]/gi, 'i'],
60
- [/[óòôöõōŏő]/gi, 'o'],
61
- [/[úùûüūŭů]/gi, 'u'],
62
- [/[ñň]/gi, 'n'],
63
- [/[ç]/gi, 'c'],
64
- [/[ý]/gi, 'y'],
65
- [/[멕]/g, 'meok'],
66
- [/[시]/g, 'si'],
67
- [/[코]/g, 'ko'],
68
- [/[용]/g, 'yong'],
69
- [/[가-힣]/g, 'kr'],
70
- [/[\u0300-\u036f]/g, ''],
71
- [/[\\?%*:|"<>[\]~`^]/g, '-'],
72
- [/[{}]/g, '-'],
73
- [/[&]/g, 'and'],
74
- [/[()]/g, ''],
75
- [/\s+/g, '-'],
76
- [/-+/g, '-'],
77
- [/^-+|-+$/g, ''],
78
- [/^\.+/, ''],
79
- [/[^\w.-]/g, ''],
80
- ];
81
-
82
- const sanitizationCache = new Map();
83
-
84
- const sanitizeFileName = (fileName) => {
85
- if (sanitizationCache.has(fileName)) {
86
- return sanitizationCache.get(fileName);
87
- }
88
-
89
- const ext = path.extname(fileName);
90
- const nameWithoutExt = path.basename(fileName, ext);
91
-
92
- if (/^[a-zA-Z0-9._-]+$/.test(nameWithoutExt)) {
93
- const result = fileName;
94
- sanitizationCache.set(fileName, result);
95
- return result;
96
- }
97
-
98
- let sanitized = nameWithoutExt.normalize('NFD');
99
-
100
- for (const [pattern, replacement] of SANITIZATION_PATTERNS) {
101
- sanitized = sanitized.replace(pattern, replacement);
102
- }
103
-
104
- // Additional sanitization for problematic characters
105
- sanitized = sanitized
106
- .replace(/~/g, '-') // Replace tildes
107
- .replace(/\s+/g, '-') // Replace spaces with dashes
108
- .replace(/\.+/g, '-') // Replace multiple dots with dashes
109
- .replace(/-+/g, '-') // Collapse multiple dashes
110
- .replace(/^-+|-+$/g, ''); // Remove leading/trailing dashes
111
-
112
- if (!sanitized) {
113
- sanitized = 'unnamed_file';
114
- }
115
-
116
- const result = sanitized + ext;
117
- sanitizationCache.set(fileName, result);
118
- return result;
119
- };
120
-
121
- const checkCredentials = async (forceSupabase = false) => {
122
- // Force Supabase mode if explicitly requested
123
- if (forceSupabase) {
124
- console.log('🔧 Force Supabase mode enabled - skipping API');
125
- apiMode = false;
126
- } else if (API_BASE_URL && API_TOKEN) {
127
- console.log(
128
- '🌐 API mode enabled - files will be uploaded to Arela API with automatic processing',
129
- );
130
- apiMode = true;
131
-
132
- try {
133
- const response = await fetch(`${API_BASE_URL}/api/health`, {
134
- headers: {
135
- 'x-api-key': API_TOKEN,
136
- },
137
- });
138
-
139
- if (!response.ok) {
140
- console.warn(
141
- '⚠️ API connection failed, falling back to direct Supabase upload',
142
- );
143
- apiMode = false;
144
- } else {
145
- console.log('✅ Connected to Arela API');
146
- return;
147
- }
148
- } catch (err) {
149
- console.warn(
150
- '⚠️ API connection failed, falling back to direct Supabase upload',
151
- );
152
- apiMode = false;
153
- }
154
- }
155
-
156
- // Initialize Supabase client if not in API mode or if forced
157
- if (!apiMode || forceSupabase) {
158
- if (!supabaseUrl || !supabaseKey || !bucket) {
159
- console.error(
160
- '⚠️ Missing credentials. Please set either:\n' +
161
- ' - ARELA_API_URL and ARELA_API_TOKEN for API mode, or\n' +
162
- ' - SUPABASE_URL, SUPABASE_KEY, and SUPABASE_BUCKET for direct mode',
163
- );
164
- process.exit(1);
165
- }
166
-
167
- supabase = createClient(supabaseUrl, supabaseKey);
168
-
169
- try {
170
- const { error } = await supabase.storage.from(bucket).list('');
171
- if (error) {
172
- console.error('⚠️ Error connecting to Supabase:', error.message);
173
- process.exit(1);
174
- }
175
- console.log('✅ Connected to Supabase (direct mode)');
176
- } catch (err) {
177
- console.error('⚠️ Error:', err.message);
178
- process.exit(1);
179
- }
180
- }
181
- };
182
-
183
- const logFilePath = path.resolve(process.cwd(), 'arela-upload.log');
184
-
185
- /**
186
- * OPTIMIZED: Log buffer to reduce I/O operations
187
- */
188
- let logBuffer = [];
189
- const LOG_BUFFER_SIZE = 100; // Flush every 100 log entries
190
- let lastFlushTime = Date.now();
191
- const LOG_FLUSH_INTERVAL = 5000; // Flush every 5 seconds
192
-
193
- const flushLogBuffer = () => {
194
- if (logBuffer.length === 0) return;
195
-
196
- try {
197
- const logContent = logBuffer.join('\n') + '\n';
198
- fs.appendFileSync(logFilePath, logContent);
199
- logBuffer = [];
200
- lastFlushTime = Date.now();
201
- } catch (error) {
202
- console.error(`❌ Error writing to log file: ${error.code} | ${error.message} | path: ${logFilePath}`);
203
- }
204
- };
205
-
206
- const writeLog = (message) => {
207
- try {
208
- const timestamp = new Date().toISOString();
209
- logBuffer.push(`[${timestamp}] ${message}`);
210
-
211
- // Flush if buffer is full or enough time has passed
212
- const now = Date.now();
213
- if (
214
- logBuffer.length >= LOG_BUFFER_SIZE ||
215
- now - lastFlushTime >= LOG_FLUSH_INTERVAL
216
- ) {
217
- flushLogBuffer();
218
- }
219
- } catch (error) {
220
- console.error(`❌ Error buffering log message: ${error.message}`);
221
- }
222
- };
223
-
224
- // Ensure logs are flushed on process exit
225
- process.on('exit', flushLogBuffer);
226
- process.on('SIGINT', () => {
227
- flushLogBuffer();
228
- process.exit(0);
229
- });
230
- process.on('SIGTERM', () => {
231
- flushLogBuffer();
232
- process.exit(0);
233
- });
234
-
235
- /**
236
- * OPTIMIZED: Conditional logging to reduce console overhead
237
- */
238
- const VERBOSE_LOGGING = process.env.VERBOSE_LOGGING === 'true';
239
- const BATCH_DELAY = parseInt(process.env.BATCH_DELAY) || 100; // Configurable delay between batches
240
- const PROGRESS_UPDATE_INTERVAL =
241
- parseInt(process.env.PROGRESS_UPDATE_INTERVAL) || 10; // Update progress every N items
242
-
243
- const logVerbose = (message) => {
244
- if (VERBOSE_LOGGING) {
245
- console.log(message);
246
- }
247
- };
248
- const batchReadFileStats = (filePaths) => {
249
- const results = [];
250
-
251
- for (const filePath of filePaths) {
252
- try {
253
- const stats = fs.statSync(filePath);
254
- results.push({ path: filePath, stats, error: null });
255
- } catch (error) {
256
- results.push({ path: filePath, stats: null, error: error.message });
257
- }
258
- }
259
-
260
- return results;
261
- };
262
-
263
- /**
264
- * OPTIMIZED: Cache for year/pedimento detection results to avoid redundant parsing
265
- */
266
- const pathDetectionCache = new Map();
267
-
268
- /**
269
- * OPTIMIZED: Clear the path detection cache (useful for testing or long-running processes)
270
- */
271
- const clearPathDetectionCache = () => {
272
- pathDetectionCache.clear();
273
- };
274
-
275
- /**
276
- * OPTIMIZED: Get detection results with caching
277
- */
278
- const getCachedPathDetection = (filePath, basePath) => {
279
- const cacheKey = `${filePath}|${basePath}`;
280
-
281
- if (pathDetectionCache.has(cacheKey)) {
282
- return pathDetectionCache.get(cacheKey);
283
- }
284
-
285
- const detection = extractYearAndPedimentoFromPath(filePath, basePath);
286
- pathDetectionCache.set(cacheKey, detection);
287
-
288
- return detection;
289
- };
290
-
291
- /**
292
- * Extracts year and pedimento number from file path
293
- * Supports patterns like:
294
- * - /path/to/2024/4023260/file.pdf
295
- * - /path/to/pedimentos/2024/4023260/file.pdf
296
- * - /path/to/docs/año2024/ped4023260/file.pdf
297
- */
298
- const extractYearAndPedimentoFromPath = (filePath, basePath) => {
299
- try {
300
- const relativePath = path.relative(basePath, filePath);
301
- const pathParts = relativePath.split(path.sep);
302
-
303
- let year = null;
304
- let pedimento = null;
305
-
306
- // Pattern 1: Direct year/pedimento structure (2024/4023260)
307
- for (let i = 0; i < pathParts.length - 1; i++) {
308
- const part = pathParts[i];
309
- const nextPart = pathParts[i + 1];
310
-
311
- // Check if current part looks like a year (2020-2030)
312
- const yearMatch = part.match(/^(202[0-9])$/);
313
- if (yearMatch && nextPart) {
314
- year = yearMatch[1];
315
-
316
- // Check if next part looks like a pedimento (4-8 digits)
317
- const pedimentoMatch = nextPart.match(/^(\d{4,8})$/);
318
- if (pedimentoMatch) {
319
- pedimento = pedimentoMatch[1];
320
- break;
321
- }
322
- }
323
- }
324
-
325
- // Pattern 2: Named patterns (año2024, ped4023260)
326
- if (!year || !pedimento) {
327
- for (const part of pathParts) {
328
- if (!year) {
329
- const namedYearMatch = part.match(/(?:año|year|anio)(\d{4})/i);
330
- if (namedYearMatch) {
331
- year = namedYearMatch[1];
332
- }
333
- }
334
-
335
- if (!pedimento) {
336
- const namedPedimentoMatch = part.match(
337
- /(?:ped|pedimento|pedi)(\d{4,8})/i,
338
- );
339
- if (namedPedimentoMatch) {
340
- pedimento = namedPedimentoMatch[1];
341
- }
342
- }
343
- }
344
- }
345
-
346
- // Pattern 3: Loose year detection in any part
347
- if (!year) {
348
- for (const part of pathParts) {
349
- const yearMatch = part.match(/(202[0-9])/);
350
- if (yearMatch) {
351
- year = yearMatch[1];
352
- break;
353
- }
354
- }
355
- }
356
-
357
- // Pattern 4: Loose pedimento detection (4-8 consecutive digits)
358
- if (!pedimento) {
359
- for (const part of pathParts) {
360
- const pedimentoMatch = part.match(/(\d{4,8})/);
361
- if (pedimentoMatch && pedimentoMatch[1].length >= 4) {
362
- pedimento = pedimentoMatch[1];
363
- break;
364
- }
365
- }
366
- }
367
-
368
- return { year, pedimento, detected: !!(year && pedimento) };
369
- } catch (error) {
370
- return {
371
- year: null,
372
- pedimento: null,
373
- detected: false,
374
- error: error.message,
375
- };
376
- }
377
- };
378
-
379
- /**
380
- * OPTIMIZED: Get processed paths with caching and buffered log reading
381
- */
382
- let processedPathsCache = null;
383
- let lastLogModTime = 0;
384
-
385
- const getProcessedPaths = () => {
386
- try {
387
- // Check if log file exists
388
- if (!fs.existsSync(logFilePath)) {
389
- return new Set();
390
- }
391
-
392
- // Check if cache is still valid
393
- const logStats = fs.statSync(logFilePath);
394
- if (processedPathsCache && logStats.mtime.getTime() === lastLogModTime) {
395
- return processedPathsCache;
396
- }
397
-
398
- // Read and parse log file
399
- const processed = new Set();
400
- const content = fs.readFileSync(logFilePath, 'utf-8');
401
-
402
- // Use more efficient regex with global flag
403
- const regex = /(SUCCESS|SKIPPED): .*? -> (.+)/g;
404
- let match;
405
-
406
- while ((match = regex.exec(content)) !== null) {
407
- const path = match[2];
408
- if (path) {
409
- processed.add(path.trim());
410
- }
411
- }
412
-
413
- // Update cache
414
- processedPathsCache = processed;
415
- lastLogModTime = logStats.mtime.getTime();
416
-
417
- return processed;
418
- } catch (error) {
419
- console.error(`⚠️ Error reading processed paths: ${error.message}`);
420
- return new Set();
421
- }
422
- };
423
-
424
- /**
425
- * Upload files to Arela API with automatic detection and organization
426
- */
427
- const uploadToApi = async (files, options) => {
428
- const formData = new FormData();
429
-
430
- files.forEach((file) => {
431
- const fileBuffer = fs.readFileSync(file.path);
432
- formData.append('files', fileBuffer, {
433
- filename: file.name,
434
- contentType: file.contentType,
435
- });
436
- });
437
-
438
- if (bucket) formData.append('bucket', bucket);
439
- if (options.prefix) formData.append('prefix', options.prefix);
440
-
441
- // Nueva funcionalidad: estructura de carpetas personalizada
442
- let combinedStructure = null;
443
- let cachedDetection = null; // Cache detection result to avoid redundant calls
444
-
445
- if (
446
- options.folderStructure &&
447
- options.autoDetectStructure &&
448
- files.length > 0
449
- ) {
450
- // Combine custom folder structure with auto-detection
451
- const firstFile = files[0];
452
- cachedDetection = getCachedPathDetection(firstFile.path, process.cwd());
453
-
454
- if (cachedDetection.detected) {
455
- const autoStructure = `${cachedDetection.year}/${cachedDetection.pedimento}`;
456
- combinedStructure = `${options.folderStructure}/${autoStructure}`;
457
- formData.append('folderStructure', combinedStructure);
458
- console.log(
459
- `📁 Combined folder structure: ${options.folderStructure} + ${autoStructure} = ${combinedStructure}`,
460
- );
461
- } else {
462
- // Fallback to just custom structure if auto-detection fails
463
- formData.append('folderStructure', options.folderStructure);
464
- console.log(
465
- `📁 Using custom folder structure (auto-detection failed): ${options.folderStructure}`,
466
- );
467
- }
468
- } else if (options.folderStructure) {
469
- formData.append('folderStructure', options.folderStructure);
470
- console.log(`📁 Using custom folder structure: ${options.folderStructure}`);
471
- } else if (options.autoDetectStructure && files.length > 0) {
472
- // Try to auto-detect from the first file if no explicit structure is provided
473
- const firstFile = files[0];
474
- cachedDetection = getCachedPathDetection(firstFile.path, process.cwd());
475
-
476
- if (cachedDetection.detected) {
477
- const autoStructure = `${cachedDetection.year}/${cachedDetection.pedimento}`;
478
- formData.append('folderStructure', autoStructure);
479
- }
480
- }
481
-
482
- // Si se especifica clientPath para user_metadata
483
- if (options.clientPath) {
484
- formData.append('clientPath', options.clientPath);
485
- }
486
-
487
- formData.append('autoDetect', String(options.autoDetect ?? true));
488
- formData.append('autoOrganize', String(options.autoOrganize ?? true));
489
- formData.append('batchSize', String(options.batchSize || 10));
490
- formData.append('clientVersion', packageVersion);
491
-
492
- const response = await fetch(
493
- `${API_BASE_URL}/api/storage/batch-upload-and-process`,
494
- {
495
- method: 'POST',
496
- headers: {
497
- 'x-api-key': API_TOKEN,
498
- },
499
- body: formData,
500
- },
501
- );
502
-
503
- if (!response.ok) {
504
- const errorText = await response.text();
505
- throw new Error(
506
- `API request failed: ${response.status} ${response.statusText} - ${errorText}`,
507
- );
508
- }
509
-
510
- return response.json();
511
- };
512
-
513
- /**
514
- * Upload file directly to Supabase (fallback method)
515
- */
516
- const uploadToSupabase = async (file, uploadPath) => {
517
- const content = fs.readFileSync(file);
518
- const contentType = mime.lookup(file) || 'application/octet-stream';
519
-
520
- const { data, error } = await supabase.storage
521
- .from(bucket)
522
- .upload(uploadPath.replace(/\\/g, '/'), content, {
523
- upsert: true,
524
- contentType,
525
- });
526
-
527
- if (error) {
528
- throw new Error(error.message);
529
- }
530
-
531
- return data;
532
- };
533
-
534
- /**
535
- * Insert file stats into uploader table with document detection
536
- */
537
- const insertStatsToUploaderTable = async (files, options) => {
538
- if (!supabase) {
539
- throw new Error(
540
- 'Supabase client not initialized. Stats mode requires Supabase connection.',
541
- );
542
- }
543
-
544
- const detectionService = new FileDetectionService();
545
- const records = [];
546
-
547
- for (const file of files) {
548
- // OPTIMIZED: Use pre-computed stats if available, otherwise call fs.statSync
549
- const stats = file.stats || fs.statSync(file.path);
550
- const originalPath = options.clientPath || file.path;
551
-
552
- // Check if record already exists
553
- const { data: existingRecords, error: checkError } = await supabase
554
- .from('uploader')
555
- .select('id, original_path')
556
- .eq('original_path', originalPath)
557
- .limit(1);
558
-
559
- if (checkError) {
560
- console.error(
561
- `❌ Error checking for existing record: ${checkError.message}`,
562
- );
563
- continue;
564
- }
565
-
566
- if (existingRecords && existingRecords.length > 0) {
567
- console.log(`⏭️ Skipping duplicate: ${path.basename(file.path)}`);
568
- continue;
569
- }
570
-
571
- // Initialize record with basic file stats
572
- const record = {
573
- document_type: null,
574
- size: stats.size,
575
- num_pedimento: null,
576
- filename: file.originalName || path.basename(file.path),
577
- original_path: originalPath,
578
- arela_path: null,
579
- status: 'stats',
580
- rfc: null,
581
- message: null,
582
- };
583
-
584
- // Try to detect document type for supported files
585
- if (detectionService.isSupportedFileType(file.path)) {
586
- try {
587
- const detection = await detectionService.detectFile(file.path);
588
-
589
- if (detection.detectedType) {
590
- record.document_type = detection.detectedType;
591
- record.num_pedimento = detection.detectedPedimento;
592
- record.status = 'detected';
593
-
594
- // Set arela_path for pedimento_simplificado documents
595
- if (detection.arelaPath) {
596
- record.arela_path = detection.arelaPath;
597
- }
598
-
599
- // Extract RFC from fields if available
600
- const rfcField = detection.fields.find(
601
- (f) => f.name === 'rfc' && f.found,
602
- );
603
- if (rfcField) {
604
- record.rfc = rfcField.value;
605
- }
606
- } else {
607
- record.status = 'not-detected';
608
- if (detection.error) {
609
- record.message = detection.error;
610
- }
611
- }
612
- } catch (error) {
613
- console.error(`❌ Error detecting ${record.filename}:`, error.message);
614
- record.status = 'detection-error';
615
- record.message = error.message;
616
- }
617
- } else {
618
- record.status = 'unsupported';
619
- record.message = 'File type not supported for detection';
620
- }
621
-
622
- records.push(record);
623
- }
624
-
625
- if (records.length === 0) {
626
- console.log('📝 No new records to insert (all were duplicates or errors)');
627
- return [];
628
- }
629
-
630
- console.log(
631
- `💾 Inserting ${records.length} new records into uploader table...`,
632
- );
633
-
634
- const { data, error } = await supabase
635
- .from('uploader')
636
- .insert(records)
637
- .select();
638
-
639
- if (error) {
640
- throw new Error(`Failed to insert stats records: ${error.message}`);
641
- }
642
-
643
- return data;
644
- };
645
-
646
- /**
647
- * OPTIMIZED: Insert ONLY file stats into uploader table (Phase 1)
648
- * No file reading, no detection - just filesystem metadata
649
- * Returns summary statistics instead of full records for better performance
650
- */
651
- const insertStatsOnlyToUploaderTable = async (files, options) => {
652
- if (!supabase) {
653
- throw new Error(
654
- 'Supabase client not initialized. Stats mode requires Supabase connection.',
655
- );
656
- }
657
-
658
- const batchSize = 1000; // Large batch size for performance
659
- const allRecords = [];
660
-
661
- // Prepare all file stats data first - OPTIMIZED to use pre-computed stats
662
- console.log('📊 Collecting filesystem stats...');
663
- for (const file of files) {
664
- try {
665
- // Use pre-computed stats if available, otherwise call fs.statSync
666
- const stats = file.stats || fs.statSync(file.path);
667
- const originalPath = options.clientPath || file.path;
668
- const fileExtension = path
669
- .extname(file.path)
670
- .toLowerCase()
671
- .replace('.', '');
672
-
673
- const record = {
674
- document_type: null,
675
- size: stats.size,
676
- num_pedimento: null,
677
- filename: file.originalName || path.basename(file.path),
678
- original_path: originalPath,
679
- arela_path: null,
680
- status: 'fs-stats',
681
- rfc: null,
682
- message: null,
683
- file_extension: fileExtension,
684
- created_at: new Date().toISOString(),
685
- modified_at: stats.mtime.toISOString(),
686
- };
687
-
688
- allRecords.push(record);
689
- } catch (error) {
690
- console.error(`❌ Error reading stats for ${file.path}:`, error.message);
691
- }
692
- }
693
-
694
- if (allRecords.length === 0) {
695
- console.log('📝 No file stats to insert');
696
- return { totalInserted: 0, totalSkipped: 0, totalProcessed: 0 };
697
- }
698
-
699
- console.log(
700
- `💾 Bulk inserting ${allRecords.length} file stats in batches of ${batchSize}...`,
701
- );
702
-
703
- let totalInserted = 0;
704
- let totalSkipped = 0;
705
-
706
- // Process in batches for optimal performance
707
- for (let i = 0; i < allRecords.length; i += batchSize) {
708
- const batch = allRecords.slice(i, i + batchSize);
709
-
710
- try {
711
- // OPTIMIZED: Use upsert without select to avoid unnecessary data transfer
712
- const { error, count } = await supabase.from('uploader').upsert(batch, {
713
- onConflict: 'original_path',
714
- ignoreDuplicates: false,
715
- count: 'exact',
716
- });
717
-
718
- if (error) {
719
- console.error(
720
- `❌ Error inserting batch ${Math.floor(i / batchSize) + 1}:`,
721
- error.message,
722
- );
723
- continue;
724
- }
725
-
726
- // For upsert operations, we can't easily distinguish between inserts and updates
727
- // from the count alone, but we can estimate based on the assumption that most
728
- // operations in --stats-only mode are likely new inserts
729
- const batchProcessed = batch.length;
730
-
731
- // Since we're using upsert with ignoreDuplicates: false, the count represents
732
- // the actual number of rows affected (both inserts and updates)
733
- const affected = count || batchProcessed;
734
-
735
- // For simplicity and performance, we'll assume most are new inserts in stats-only mode
736
- // This is reasonable since stats-only is typically run on new file sets
737
- totalInserted += affected;
738
-
739
- console.log(
740
- `✅ Batch ${Math.floor(i / batchSize) + 1}: ${affected} rows processed`,
741
- );
742
- } catch (error) {
743
- console.error(
744
- `❌ Unexpected error in batch ${Math.floor(i / batchSize) + 1}:`,
745
- error.message,
746
- );
747
- }
748
- }
749
-
750
- // Calculate skipped as difference between total records and inserted
751
- totalSkipped = allRecords.length - totalInserted;
752
-
753
- console.log(
754
- `📊 Phase 1 Summary: ${totalInserted} records processed, estimated ${totalSkipped} were updates`,
755
- );
756
-
757
- return {
758
- totalInserted,
759
- totalSkipped,
760
- totalProcessed: allRecords.length,
761
- };
762
- };
763
-
764
- /**
765
- * PHASE 2: Process PDF files for pedimento-simplificado detection
766
- * Only processes files with status 'fs-stats' and file_extension 'pdf'
767
- * Processes records in chunks of 1000 to avoid loading all records into memory
768
- */
769
- const detectPedimentosInDatabase = async (options = {}) => {
770
- if (!supabase) {
771
- throw new Error('Supabase client not initialized.');
772
- }
773
-
774
- console.log(
775
- '🔍 Phase 2: Starting PDF detection for pedimento-simplificado documents...',
776
- );
777
-
778
- const detectionService = new FileDetectionService();
779
- const processingBatchSize = parseInt(options.batchSize) || 10; // Smaller batches for file I/O
780
- const queryBatchSize = 1000; // Process 1000 records at a time
781
-
782
- let totalDetected = 0;
783
- let totalProcessed = 0;
784
- let totalErrors = 0;
785
- let offset = 0;
786
- let chunkNumber = 1;
787
-
788
- console.log('� Processing PDF files in chunks of 1000 records...');
789
-
790
- // Process records in chunks of 1000
791
- while (true) {
792
- console.log(
793
- `\n📥 Fetching chunk ${chunkNumber} (records ${offset + 1} to ${offset + queryBatchSize})...`,
794
- );
795
-
796
- // Fetch next chunk of PDF records
797
- const { data: pdfRecords, error: queryError } = await supabase
798
- .from('uploader')
799
- .select('id, original_path, filename, file_extension, status')
800
- .eq('status', 'fs-stats')
801
- .eq('file_extension', 'pdf')
802
- .ilike('filename', '%simp%')
803
- .range(offset, offset + queryBatchSize - 1);
804
-
805
- if (queryError) {
806
- throw new Error(
807
- `Failed to fetch PDF records chunk ${chunkNumber}: ${queryError.message}`,
808
- );
809
- }
810
-
811
- // If no records found, we're done
812
- if (!pdfRecords || pdfRecords.length === 0) {
813
- console.log(`📝 No more PDF files found. Processing completed.`);
814
- break;
815
- }
816
-
817
- console.log(
818
- `� Processing chunk ${chunkNumber}: ${pdfRecords.length} PDF records`,
819
- );
820
-
821
- // Create progress bar for this chunk
822
- const progressBar = new cliProgress.SingleBar({
823
- format: `🔍 Chunk ${chunkNumber} |{bar}| {percentage}% | {value}/{total} | Detected: {detected} | Errors: {errors}`,
824
- barCompleteChar: '█',
825
- barIncompleteChar: '░',
826
- hideCursor: true,
827
- });
828
-
829
- progressBar.start(pdfRecords.length, 0, { detected: 0, errors: 0 });
830
-
831
- let chunkDetected = 0;
832
- let chunkProcessed = 0;
833
- let chunkErrors = 0;
834
-
835
- // Process files in smaller batches within this chunk
836
- for (let i = 0; i < pdfRecords.length; i += processingBatchSize) {
837
- const batch = pdfRecords.slice(i, i + processingBatchSize);
838
- const updatePromises = [];
839
-
840
- for (const record of batch) {
841
- try {
842
- // Check if file still exists
843
- if (!fs.existsSync(record.original_path)) {
844
- updatePromises.push(
845
- supabase
846
- .from('uploader')
847
- .update({
848
- status: 'file-not-found',
849
- message: 'File no longer exists at original path',
850
- })
851
- .eq('id', record.id),
852
- );
853
- chunkErrors++;
854
- totalErrors++;
855
- continue;
856
- }
857
-
858
- // Perform detection
859
- const detection = await detectionService.detectFile(
860
- record.original_path,
861
- );
862
- chunkProcessed++;
863
- totalProcessed++;
864
-
865
- const updateData = {
866
- status: detection.detectedType ? 'detected' : 'not-detected',
867
- document_type: detection.detectedType,
868
- num_pedimento: detection.detectedPedimento,
869
- arela_path: detection.arelaPath,
870
- message: detection.error || null,
871
- };
872
-
873
- // Extract RFC from fields if available
874
- if (detection.fields) {
875
- const rfcField = detection.fields.find(
876
- (f) => f.name === 'rfc' && f.found,
877
- );
878
- if (rfcField) {
879
- updateData.rfc = rfcField.value;
880
- }
881
- }
882
-
883
- if (detection.detectedType) {
884
- chunkDetected++;
885
- totalDetected++;
886
- }
887
-
888
- updatePromises.push(
889
- supabase.from('uploader').update(updateData).eq('id', record.id),
890
- );
891
- } catch (error) {
892
- console.error(
893
- `❌ Error detecting ${record.filename}:`,
894
- error.message,
895
- );
896
- chunkErrors++;
897
- totalErrors++;
898
-
899
- updatePromises.push(
900
- supabase
901
- .from('uploader')
902
- .update({
903
- status: 'detection-error',
904
- message: error.message,
905
- })
906
- .eq('id', record.id),
907
- );
908
- }
909
- }
910
-
911
- // Execute all updates in parallel for this batch
912
- try {
913
- await Promise.all(updatePromises);
914
- } catch (error) {
915
- console.error(
916
- `❌ Error updating batch in chunk ${chunkNumber}:`,
917
- error.message,
918
- );
919
- }
920
-
921
- // Update progress for this chunk
922
- progressBar.update(Math.min(i + processingBatchSize, pdfRecords.length), {
923
- detected: chunkDetected,
924
- errors: chunkErrors,
925
- });
926
- }
927
-
928
- progressBar.stop();
929
-
930
- console.log(
931
- `✅ Chunk ${chunkNumber} completed: ${chunkDetected} detected, ${chunkProcessed} processed, ${chunkErrors} errors`,
932
- );
933
-
934
- // Move to next chunk
935
- offset += queryBatchSize;
936
- chunkNumber++;
937
-
938
- // If we got fewer records than queryBatchSize, we've reached the end
939
- if (pdfRecords.length < queryBatchSize) {
940
- console.log(
941
- `📝 Reached end of records (chunk had ${pdfRecords.length} records).`,
942
- );
943
- break;
944
- }
945
-
946
- // Small delay between chunks to avoid overwhelming the database
947
- await new Promise((resolve) => setTimeout(resolve, 500));
948
- }
949
-
950
- console.log(
951
- `📊 Phase 2 Summary: ${totalDetected} detected, ${totalProcessed} processed, ${totalErrors} errors`,
952
- );
953
- return {
954
- detectedCount: totalDetected,
955
- processedCount: totalProcessed,
956
- errorCount: totalErrors,
957
- };
958
- };
959
-
960
- const processFilesInBatches = async (
961
- files,
962
- batchSize,
963
- options,
964
- basePath,
965
- folder,
966
- sourcePath,
967
- processedPaths,
968
- ) => {
969
- let totalUploaded = 0;
970
- let totalDetected = 0;
971
- let totalOrganized = 0;
972
- let totalErrors = 0;
973
- let totalSkipped = 0;
974
-
975
- const messageBuffer = [];
976
-
977
- const progressBarFormat = options.statsOnly
978
- ? '📊 Processing [{bar}] {percentage}% | {value}/{total} files | Stats: {successCount} | Errors: {failureCount} | Duplicates: {skippedCount}'
979
- : '📂 Processing [{bar}] {percentage}% | {value}/{total} files | Success: {successCount} | Errors: {failureCount} | Skipped: {skippedCount}';
980
-
981
- const progressBar = new cliProgress.SingleBar({
982
- format: progressBarFormat,
983
- barCompleteChar: '█',
984
- barIncompleteChar: '░',
985
- hideCursor: true,
986
- });
987
-
988
- progressBar.start(files.length, 0, {
989
- successCount: 0,
990
- failureCount: 0,
991
- skippedCount: 0,
992
- });
993
-
994
- if (options.statsOnly) {
995
- // OPTIMIZED Stats-only mode - Only read filesystem stats, no file detection
996
- console.log(
997
- '📊 Phase 1: Processing files in optimized stats-only mode (no detection)...',
998
- );
999
-
1000
- for (let i = 0; i < files.length; i += batchSize) {
1001
- const batch = files.slice(i, i + batchSize);
1002
-
1003
- // OPTIMIZED: Batch read file stats to reduce I/O overhead
1004
- const fileStatsResults = batchReadFileStats(batch);
1005
- const statsFiles = fileStatsResults
1006
- .filter((result) => result.stats !== null) // Only include files with valid stats
1007
- .map((result) => {
1008
- const originalFileName = path.basename(result.path);
1009
-
1010
- return {
1011
- path: result.path,
1012
- originalName: originalFileName,
1013
- stats: result.stats, // Pass pre-computed stats to avoid redundant calls
1014
- };
1015
- });
1016
-
1017
- // Log any files that couldn't be read
1018
- const failedFiles = fileStatsResults.filter(
1019
- (result) => result.error !== null,
1020
- );
1021
- if (failedFiles.length > 0) {
1022
- console.log(
1023
- `⚠️ Could not read stats for ${failedFiles.length} files in batch`,
1024
- );
1025
- failedFiles.forEach((failed) => {
1026
- console.error(` ❌ ${failed.path}: ${failed.error}`);
1027
- });
1028
- }
1029
-
1030
- try {
1031
- const result = await insertStatsOnlyToUploaderTable(
1032
- statsFiles,
1033
- options,
1034
- );
1035
-
1036
- totalUploaded += result.totalInserted;
1037
- totalSkipped += result.totalSkipped;
1038
- totalErrors += failedFiles.length; // Count failed file reads as errors
1039
-
1040
- progressBar.update(Math.min(i + batch.length, files.length), {
1041
- successCount: totalUploaded,
1042
- failureCount: totalErrors,
1043
- skippedCount: totalSkipped,
1044
- });
1045
- } catch (error) {
1046
- console.error(`❌ Error processing stats batch:`, error.message);
1047
- totalErrors += batch.length;
1048
-
1049
- progressBar.update(Math.min(i + batch.length, files.length), {
1050
- successCount: totalUploaded,
1051
- failureCount: totalErrors,
1052
- skippedCount: totalSkipped,
1053
- });
1054
- }
1055
- }
1056
- } else if (apiMode && !options.forceSupabase) {
1057
- // API Mode - Process in batches
1058
- for (let i = 0; i < files.length; i += batchSize) {
1059
- const batch = files.slice(i, i + batchSize);
1060
- let sanitizedRelativePath;
1061
-
1062
- const apiFiles = batch
1063
- .map((file) => {
1064
- const relativePathRaw = path
1065
- .relative(basePath, file)
1066
- .replace(/^[\\/]+/, '')
1067
- .replace(/\\/g, '/');
1068
-
1069
- const pathParts = relativePathRaw.split('/');
1070
- const originalFileName = pathParts[pathParts.length - 1];
1071
- const sanitizedFileName = sanitizeFileName(originalFileName);
1072
- pathParts[pathParts.length - 1] = sanitizedFileName;
1073
- sanitizedRelativePath = pathParts.join('/');
1074
-
1075
- let uploadPath;
1076
-
1077
- // Handle combined folder structure + auto-detection
1078
- if (options.folderStructure && options.autoDetectStructure) {
1079
- // OPTIMIZED: Use cached detection to avoid redundant parsing
1080
- const detection = getCachedPathDetection(file, basePath);
1081
- if (detection.detected) {
1082
- const autoStructure = `${detection.year}/${detection.pedimento}`;
1083
- const combinedStructure = `${options.folderStructure}/${autoStructure}`;
1084
- uploadPath = path.posix.join(
1085
- combinedStructure,
1086
- sanitizedFileName,
1087
- );
1088
- logVerbose(
1089
- `📁 Combined structure: ${options.folderStructure}/${autoStructure} for ${originalFileName} -> ${uploadPath}`,
1090
- );
1091
- } else {
1092
- // Fallback to just custom structure if auto-detection fails
1093
- uploadPath = path.posix.join(
1094
- options.folderStructure,
1095
- sanitizedFileName,
1096
- );
1097
- logVerbose(
1098
- `📁 Custom structure (auto-detection failed): ${uploadPath}`,
1099
- );
1100
- }
1101
- } else if (options.folderStructure) {
1102
- // Use custom folder structure only
1103
- uploadPath = path.posix.join(
1104
- options.folderStructure,
1105
- sanitizedFileName,
1106
- );
1107
- logVerbose(`📁 Custom structure: ${uploadPath}`);
1108
- } else if (options.autoDetectStructure) {
1109
- // Auto-detect structure from path if enabled - OPTIMIZED: Use cached detection
1110
- const detection = getCachedPathDetection(file, basePath);
1111
- if (detection.detected) {
1112
- const autoStructure = `${detection.year}/${detection.pedimento}`;
1113
- uploadPath = path.posix.join(autoStructure, sanitizedFileName);
1114
- console.log(
1115
- `🔍 Auto-detected: ${autoStructure} for ${originalFileName} -> ${uploadPath}`,
1116
- );
1117
- } else {
1118
- uploadPath = options.prefix
1119
- ? path.posix.join(options.prefix, sanitizedRelativePath)
1120
- : sanitizedRelativePath;
1121
- console.log(`📁 Using relative path: ${uploadPath}`);
1122
- }
1123
- } else {
1124
- uploadPath = options.prefix
1125
- ? path.posix.join(options.prefix, sanitizedRelativePath)
1126
- : sanitizedRelativePath;
1127
- console.log(`📁 Using standard path: ${uploadPath}`);
1128
- }
1129
-
1130
- if (processedPaths.has(uploadPath)) {
1131
- totalSkipped++;
1132
- writeLog(`SKIPPED: ${file} -> ${uploadPath}`);
1133
- return null;
1134
- }
1135
-
1136
- return {
1137
- path: file,
1138
- name: sanitizedFileName,
1139
- originalName: originalFileName,
1140
- uploadPath: uploadPath.replace(/\\/g, '/'), // Ensure forward slashes
1141
- contentType: mime.lookup(file) || 'application/octet-stream',
1142
- };
1143
- })
1144
- .filter(Boolean);
1145
-
1146
- if (apiFiles.length > 0) {
1147
- // console.log(`🔄 Processing batch of ${apiFiles.length} files`);
1148
- // apiFiles.forEach(f => console.log(` 📄 ${f.name} -> ${f.uploadPath}`));
4
+ import appConfig from './config/config.js';
5
+ import UploadCommand from './commands/UploadCommand.js';
6
+ import ErrorHandler from './errors/ErrorHandler.js';
7
+ import logger from './services/LoggingService.js';
1149
8
 
9
+ /**
10
+ * Arela Uploader CLI
11
+ * Professional file uploader with document detection and organization
12
+ */
13
+ class ArelaUploaderCLI {
14
+ constructor() {
15
+ this.program = new Command();
16
+ this.errorHandler = new ErrorHandler(logger);
17
+ this.uploadCommand = new UploadCommand();
18
+
19
+ this.#setupProgram();
20
+ this.#setupCommands();
21
+ this.#setupErrorHandling();
22
+ }
23
+
24
+ /**
25
+ * Setup the main program configuration
26
+ * @private
27
+ */
28
+ #setupProgram() {
29
+ this.program
30
+ .name('arela')
31
+ .description('CLI to upload files/directories to Arela with automatic processing')
32
+ .version(appConfig.packageVersion)
33
+ .option('-v, --verbose', 'Enable verbose logging')
34
+ .option('--clear-log', 'Clear the log file before starting');
35
+ }
36
+
37
+ /**
38
+ * Setup CLI commands
39
+ * @private
40
+ */
41
+ #setupCommands() {
42
+ // Main upload command
43
+ this.program
44
+ .command('upload')
45
+ .description('Upload files to Arela with automatic processing')
46
+ .option('-b, --batch-size <size>', 'Number of files to process in each batch', '10')
47
+ .option('-p, --prefix <prefix>', 'Prefix for uploaded files')
48
+ .option('--folder-structure <structure>', 'Custom folder structure for organization')
49
+ .option('--client-path <path>', 'Override client path for metadata')
50
+ .option('--auto-detect-structure', 'Automatically detect folder structure from file paths')
51
+ .option('--auto-detect', 'Enable automatic document type detection')
52
+ .option('--auto-organize', 'Enable automatic file organization')
53
+ .option('--force-supabase', 'Force direct Supabase upload mode')
54
+ .option('--skip-processed', 'Skip files that have already been processed')
55
+ .option('--show-stats', 'Show performance statistics')
56
+ .option('--upload-by-rfc', 'Upload files based on specific RFC values from UPLOAD_RFCS')
57
+ .option('--run-all-phases', 'Run all processing phases (stats, detection, organization)')
58
+ .action(async (options) => {
1150
59
  try {
1151
- // Use clientPath from options if specified, otherwise construct from detection or folder
1152
- let clientPath = options.clientPath;
1153
-
1154
- if (!clientPath && apiFiles.length > 0) {
1155
- const firstFile = apiFiles[0];
1156
- // OPTIMIZED: Use cached detection to avoid redundant parsing
1157
- const detection = getCachedPathDetection(firstFile.path, basePath);
1158
- if (detection.detected) {
1159
- // clientPath = `${detection.year}/${detection.pedimento}/`;
1160
- clientPath = path
1161
- .resolve(basePath, sanitizedRelativePath)
1162
- .replace(/\\/g, '/');
1163
- } else {
1164
- // Fallback to folder structure if no year/pedimento detected
1165
- clientPath = path.resolve(basePath, folder).replace(/\\/g, '/');
1166
- }
60
+ // Handle --upload-by-rfc as a specific operation
61
+ if (options.uploadByRfc) {
62
+ const databaseService = await import('./services/DatabaseService.js');
63
+ console.log('🎯 Running RFC-based upload...');
64
+ const result = await databaseService.default.uploadFilesByRfc({
65
+ batchSize: parseInt(options.batchSize) || 10,
66
+ showProgress: true,
67
+ folderStructure: options.folderStructure,
68
+ });
69
+ console.log(`✅ RFC upload completed: ${result.processedCount} processed, ${result.uploadedCount} uploaded, ${result.errorCount} errors`);
70
+ return;
1167
71
  }
1168
-
1169
- const result = await uploadToApi(apiFiles, {
1170
- ...options,
1171
- clientPath: clientPath,
1172
- });
1173
-
1174
- totalUploaded += result.stats.uploadedCount;
1175
- totalDetected += result.stats.detectedCount;
1176
- totalOrganized += result.stats.organizedCount;
1177
- totalErrors += result.stats.errorCount;
1178
-
1179
- result.uploaded.forEach((upload) => {
1180
- const apiFile = apiFiles.find(
1181
- (f) => f.name === upload.originalName,
1182
- );
1183
- if (apiFile) {
1184
- writeLog(`SUCCESS: ${apiFile.path} -> ${apiFile.uploadPath}`);
1185
- processedPaths.add(apiFile.uploadPath);
1186
- }
1187
- });
1188
-
1189
- result.errors.forEach((error) => {
1190
- writeLog(
1191
- `ERROR: ${error.fileName}: ${error.error} (${error.step})`,
1192
- );
1193
- messageBuffer.push(
1194
- `❌ ${error.fileName}: ${error.error} (${error.step})`,
1195
- );
1196
- });
72
+
73
+ await this.uploadCommand.execute(options);
1197
74
  } catch (error) {
1198
- totalErrors += apiFiles.length;
1199
- apiFiles.forEach((file) => {
1200
- writeLog(`ERROR: ${file.path}: ${error.message}`);
1201
- messageBuffer.push(`❌ ${file.name}: ${error.message}`);
1202
- });
75
+ this.errorHandler.handleFatalError(error, { command: 'upload' });
1203
76
  }
1204
- }
1205
-
1206
- progressBar.update(i + batch.length, {
1207
- successCount: totalUploaded,
1208
- failureCount: totalErrors,
1209
- skippedCount: totalSkipped,
1210
77
  });
1211
78
 
1212
- if (i + batchSize < files.length) {
1213
- await new Promise((resolve) => setTimeout(resolve, BATCH_DELAY));
1214
- }
1215
- }
1216
- } else {
1217
- // Direct Supabase mode
1218
- for (let i = 0; i < files.length; i++) {
1219
- const file = files[i];
1220
- try {
1221
- const relativePath = path.relative(basePath, file);
1222
- let uploadPath;
1223
-
1224
- // Handle combined folder structure + auto-detection
1225
- if (options.folderStructure && options.autoDetectStructure) {
1226
- const detection = getCachedPathDetection(file, basePath);
1227
- if (detection.detected) {
1228
- const autoStructure = `${detection.year}/${detection.pedimento}`;
1229
- const combinedStructure = `${options.folderStructure}/${autoStructure}`;
1230
- const fileName = path.basename(file);
1231
- uploadPath = path.join(combinedStructure, fileName);
1232
- console.log(
1233
- `📁 Combined structure: ${options.folderStructure}/${autoStructure} for ${fileName}`,
1234
- );
1235
- } else {
1236
- // Fallback to just custom structure if auto-detection fails
1237
- const fileName = path.basename(file);
1238
- uploadPath = path.join(options.folderStructure, fileName);
1239
- console.log(
1240
- `📁 Custom structure (auto-detection failed): ${uploadPath}`,
1241
- );
1242
- }
1243
- } else if (options.folderStructure) {
1244
- // Use custom folder structure only
1245
- const fileName = path.basename(file);
1246
- uploadPath = path.join(options.folderStructure, fileName);
1247
- console.log(`📁 Custom structure: ${uploadPath}`);
1248
- } else if (options.autoDetectStructure) {
1249
- // Auto-detect structure from path if enabled - OPTIMIZED: Use cached detection
1250
- const detection = getCachedPathDetection(file, basePath);
1251
- if (detection.detected) {
1252
- const autoStructure = `${detection.year}/${detection.pedimento}`;
1253
- const fileName = path.basename(file);
1254
- uploadPath = path.join(autoStructure, fileName);
1255
- } else {
1256
- uploadPath = options.prefix
1257
- ? path.join(options.prefix, relativePath)
1258
- : relativePath;
1259
- }
1260
- } else {
1261
- uploadPath = options.prefix
1262
- ? path.join(options.prefix, relativePath)
1263
- : relativePath;
1264
- }
1265
-
1266
- if (processedPaths.has(uploadPath)) {
1267
- totalSkipped++;
1268
- writeLog(`SKIPPED: ${file} -> ${uploadPath}`);
1269
- } else {
1270
- await uploadToSupabase(file, uploadPath);
1271
- totalUploaded++;
1272
- writeLog(`SUCCESS: ${file} -> ${uploadPath}`);
1273
- processedPaths.add(uploadPath);
79
+ // Stats-only command
80
+ this.program
81
+ .command('stats')
82
+ .description('Collect file statistics without uploading')
83
+ .option('-b, --batch-size <size>', 'Number of files to process in each batch', '10')
84
+ .option('--client-path <path>', 'Override client path for metadata')
85
+ .option('--stats-only', 'Collect file statistics without uploading (backward compatibility)')
86
+ .option('--run-all-phases', 'Run all processing phases (stats, detection, organization)')
87
+ .option('--show-stats', 'Show performance statistics')
88
+ .action(async (options) => {
89
+ try {
90
+ const statsOptions = { ...options, statsOnly: true };
91
+ await this.uploadCommand.execute(statsOptions);
92
+ } catch (error) {
93
+ this.errorHandler.handleFatalError(error, { command: 'stats' });
1274
94
  }
1275
- } catch (error) {
1276
- totalErrors++;
1277
- writeLog(`ERROR: ${file}: ${error.message}`);
1278
- messageBuffer.push(`❌ ${path.basename(file)}: ${error.message}`);
1279
- }
1280
-
1281
- progressBar.update(i + 1, {
1282
- successCount: totalUploaded,
1283
- failureCount: totalErrors,
1284
- skippedCount: totalSkipped,
1285
95
  });
1286
- }
1287
- }
1288
-
1289
- progressBar.stop();
1290
-
1291
- const errorMessages = messageBuffer.filter((msg) => msg.startsWith('❌'));
1292
- if (errorMessages.length > 0) {
1293
- console.log('\n🚨 Errors encountered during processing:');
1294
- errorMessages.forEach((msg) => console.error(msg));
1295
- }
1296
-
1297
- return {
1298
- successCount: totalUploaded,
1299
- detectedCount: totalDetected,
1300
- organizedCount: totalOrganized,
1301
- failureCount: totalErrors,
1302
- skippedCount: totalSkipped,
1303
- };
1304
- };
1305
-
1306
- /**
1307
- * Upload files to Arela API based on specific RFC values
1308
- */
1309
- const uploadFilesByRfc = async (options = {}) => {
1310
- if (!supabase) {
1311
- console.error('❌ Supabase client not initialized');
1312
- process.exit(1);
1313
- }
1314
-
1315
- if (!API_BASE_URL || !API_TOKEN) {
1316
- console.error(
1317
- '❌ Arela API configuration missing. Please set ARELA_API_URL and ARELA_API_TOKEN environment variables.',
1318
- );
1319
- process.exit(1);
1320
- }
1321
-
1322
- if (!uploadRfcs || uploadRfcs.length === 0) {
1323
- console.error(
1324
- '❌ No RFCs specified. Please set UPLOAD_RFCS environment variable with pipe-separated RFC values.',
1325
- );
1326
- console.error(
1327
- ' Example: UPLOAD_RFCS="RFC123456789|RFC987654321|RFC555444333"',
1328
- );
1329
- process.exit(1);
1330
- }
1331
-
1332
- console.log('🎯 RFC-based Upload Mode');
1333
- console.log(`📋 Target RFCs: ${uploadRfcs.join(', ')}`);
1334
- console.log('🔍 Searching for files to upload...');
1335
-
1336
- // Step 1: Get all records that match the specified RFCs and have arela_path
1337
- const { data: rfcRecords, error: rfcError } = await supabase
1338
- .from('uploader')
1339
- .select('arela_path')
1340
- .in('rfc', uploadRfcs)
1341
- .not('arela_path', 'is', null);
1342
-
1343
- if (rfcError) {
1344
- console.error('❌ Error fetching RFC records:', rfcError.message);
1345
- return { processedCount: 0, uploadedCount: 0, errorCount: 1 };
1346
- }
1347
-
1348
- if (!rfcRecords || rfcRecords.length === 0) {
1349
- console.log('ℹ️ No files found for the specified RFCs with arela_path');
1350
- console.log(
1351
- ` Make sure files for RFCs [${uploadRfcs.join(', ')}] have been processed and have arela_path values`,
1352
- );
1353
- return { processedCount: 0, uploadedCount: 0, errorCount: 0 };
1354
- }
1355
-
1356
- // Step 2: Get unique arela_paths from the RFC matches
1357
- const uniqueArelaPaths = [...new Set(rfcRecords.map((r) => r.arela_path))];
1358
- console.log(
1359
- `� Found ${uniqueArelaPaths.length} unique arela_path(s) for the specified RFCs`,
1360
- );
1361
-
1362
- // Step 3: Get ALL files that have these arela_paths (including supporting documents)
1363
- // Process arela_paths in smaller chunks to avoid URI length limits
1364
- let allRelatedFiles = [];
1365
- const arelaPathChunkSize = 50; // Process 50 arela_paths at a time to avoid URI limits
1366
- const queryBatchSize = 1000;
1367
-
1368
- console.log('📥 Fetching all related files (processing arela_paths in chunks to avoid URI limits)...');
1369
-
1370
- // Process arela_paths in chunks
1371
- for (let i = 0; i < uniqueArelaPaths.length; i += arelaPathChunkSize) {
1372
- const arelaPathChunk = uniqueArelaPaths.slice(i, i + arelaPathChunkSize);
1373
- console.log(` Processing arela_path chunk ${Math.floor(i / arelaPathChunkSize) + 1}/${Math.ceil(uniqueArelaPaths.length / arelaPathChunkSize)} (${arelaPathChunk.length} paths)`);
1374
-
1375
- // For each chunk of arela_paths, use pagination to get all related files
1376
- let hasMore = true;
1377
- let offset = 0;
1378
-
1379
- while (hasMore) {
1380
- const { data: batch, error: queryError } = await supabase
1381
- .from('uploader')
1382
- .select('id, original_path, arela_path, filename, rfc, document_type')
1383
- .in('arela_path', arelaPathChunk)
1384
- .not('original_path', 'is', null)
1385
- .range(offset, offset + queryBatchSize - 1);
1386
-
1387
- if (queryError) {
1388
- console.error(`❌ Error fetching related files for chunk ${Math.floor(i / arelaPathChunkSize) + 1}:`, queryError.message);
1389
- return { processedCount: 0, uploadedCount: 0, errorCount: 1 };
1390
- }
1391
-
1392
- if (!batch || batch.length === 0) {
1393
- hasMore = false;
1394
- } else {
1395
- allRelatedFiles = allRelatedFiles.concat(batch);
1396
- offset += queryBatchSize;
1397
-
1398
- // If we got less than queryBatchSize, we've reached the end for this chunk
1399
- if (batch.length < queryBatchSize) {
1400
- hasMore = false;
1401
- }
1402
- }
1403
- }
1404
-
1405
- // Small delay between chunks to avoid overwhelming the database
1406
- if (i + arelaPathChunkSize < uniqueArelaPaths.length) {
1407
- await new Promise((resolve) => setTimeout(resolve, 100));
1408
- }
1409
- }
1410
-
1411
- if (!allRelatedFiles || allRelatedFiles.length === 0) {
1412
- console.log('ℹ️ No related files found for the arela_paths');
1413
- return { processedCount: 0, uploadedCount: 0, errorCount: 0 };
1414
- }
1415
-
1416
- console.log(
1417
- `📁 Found ${allRelatedFiles.length} total files to upload (including supporting documents)`,
1418
- );
1419
-
1420
- // Group by RFC and arela_path for better organization
1421
- const filesByRfc = allRelatedFiles.reduce((acc, record) => {
1422
- const rfc = record.rfc || 'No RFC';
1423
- if (!acc[rfc]) {
1424
- acc[rfc] = [];
1425
- }
1426
- acc[rfc].push(record);
1427
- return acc;
1428
- }, {});
1429
-
1430
- console.log('📊 Files by RFC (including supporting documents):');
1431
- for (const [rfc, files] of Object.entries(filesByRfc)) {
1432
- const documentTypes = [
1433
- ...new Set(files.map((f) => f.document_type || 'Unknown')),
1434
- ];
1435
- console.log(
1436
- ` ${rfc}: ${files.length} files (${documentTypes.join(', ')})`,
1437
- );
1438
- }
1439
-
1440
- // Group by arela_path for upload organization
1441
- const filesByPath = allRelatedFiles.reduce((acc, record) => {
1442
- const path = record.arela_path;
1443
- if (!acc[path]) {
1444
- acc[path] = [];
1445
- }
1446
- acc[path].push(record);
1447
- return acc;
1448
- }, {});
1449
-
1450
- console.log('� Files grouped by arela_path:');
1451
- for (const [path, files] of Object.entries(filesByPath)) {
1452
- console.log(` ${path}: ${files.length} files`);
1453
- }
1454
-
1455
- let totalProcessed = 0;
1456
- let totalUploaded = 0;
1457
- let totalErrors = 0;
1458
- let totalSkipped = 0;
1459
-
1460
- // Create progress bar
1461
- const progressBar = new cliProgress.SingleBar({
1462
- format:
1463
- '🚀 Uploading files |{bar}| {percentage}% | {value}/{total} | Uploaded: {uploaded} | Errors: {errors} | Skipped: {skipped}',
1464
- barCompleteChar: '█',
1465
- barIncompleteChar: '░',
1466
- hideCursor: true,
1467
- });
1468
-
1469
- if (options.showProgress !== false) {
1470
- progressBar.start(allRelatedFiles.length, 0, {
1471
- uploaded: 0,
1472
- errors: 0,
1473
- skipped: 0,
1474
- });
1475
- }
1476
-
1477
- const batchSize = parseInt(options.batchSize) || 10;
1478
- console.log(`📦 Processing in batches of ${batchSize} files`);
1479
-
1480
- // Process files in batches
1481
- for (let i = 0; i < allRelatedFiles.length; i += batchSize) {
1482
- const batch = allRelatedFiles.slice(i, i + batchSize);
1483
- const batchNumber = Math.floor(i / batchSize) + 1;
1484
- const totalBatches = Math.ceil(allRelatedFiles.length / batchSize);
1485
-
1486
- console.log(
1487
- `\n📦 Processing batch ${batchNumber}/${totalBatches} (${batch.length} files)`,
1488
- );
1489
-
1490
- // Prepare files for upload
1491
- const filesToUpload = [];
1492
-
1493
- for (const record of batch) {
1494
- totalProcessed++;
1495
-
1496
- try {
1497
- const originalPath = record.original_path;
1498
-
1499
- // Check if file exists
1500
- if (!fs.existsSync(originalPath)) {
1501
- console.log(` ⚠️ File not found: ${originalPath}`);
1502
- totalSkipped++;
1503
- continue;
1504
- }
1505
-
1506
- // OPTIMIZED: Read file and get size from buffer instead of separate fs.statSync call
1507
- const fileBuffer = fs.readFileSync(originalPath);
1508
-
1509
- filesToUpload.push({
1510
- path: originalPath,
1511
- buffer: fileBuffer,
1512
- size: fileBuffer.length, // Get size from buffer instead of fs.statSync
1513
- name: record.filename,
1514
- arelaPath: record.arela_path,
1515
- rfc: record.rfc,
1516
- documentType: record.document_type,
1517
- });
1518
- } catch (error) {
1519
- console.error(
1520
- ` ❌ Error reading file ${record.original_path}:`,
1521
- error.message,
1522
- );
1523
- totalErrors++;
1524
- }
1525
96
 
1526
- if (options.showProgress !== false) {
1527
- progressBar.update(totalProcessed, {
1528
- uploaded: totalUploaded,
1529
- errors: totalErrors,
1530
- skipped: totalSkipped,
1531
- });
1532
- }
1533
- }
1534
-
1535
- // Upload the batch if we have files
1536
- if (filesToUpload.length > 0) {
1537
- try {
1538
- console.log(
1539
- ` 🚀 Uploading ${filesToUpload.length} files to Arela API...`,
1540
- );
1541
-
1542
- const formData = new FormData();
1543
-
1544
- // Add files to form data
1545
- filesToUpload.forEach((file, index) => {
1546
- formData.append(`files`, file.buffer, {
1547
- filename: file.name,
1548
- contentType: mime.lookup(file.name) || 'application/octet-stream',
1549
- });
1550
- });
1551
-
1552
- // Instead of using per-file folder structures, we'll group by arela_path and upload separately
1553
- // Group files by their arela_path to upload them in correct structure
1554
- const filesByPath = filesToUpload.reduce((acc, file) => {
1555
- const path = file.arelaPath.replace(/\/$/, '');
1556
- if (!acc[path]) {
1557
- acc[path] = [];
1558
- }
1559
- acc[path].push(file);
1560
- return acc;
1561
- }, {});
1562
-
1563
- // Upload each group separately with its folder structure
1564
- for (const [arelaPath, pathFiles] of Object.entries(filesByPath)) {
1565
- const pathFormData = new FormData();
1566
-
1567
- pathFiles.forEach((file) => {
1568
- pathFormData.append('files', file.buffer, {
1569
- filename: file.name,
1570
- contentType: mime.lookup(file.name) || 'application/octet-stream',
97
+ // Detection command
98
+ this.program
99
+ .command('detect')
100
+ .description('Run document detection on existing file records')
101
+ .option('-b, --batch-size <size>', 'Number of files to process in each batch', '10')
102
+ .option('--detect-pdfs', 'Run PDF detection on existing database records (backward compatibility)')
103
+ .option('--propagate-arela-path', 'Propagate arela_path from pedimento records to related files')
104
+ .action(async (options) => {
105
+ try {
106
+ const databaseService = await import('./services/DatabaseService.js');
107
+
108
+ // Handle --propagate-arela-path as a specific operation
109
+ if (options.propagateArelaPath) {
110
+ console.log('🔄 Running arela_path propagation...');
111
+ const result = await databaseService.default.propagateArelaPath({
112
+ showProgress: true,
1571
113
  });
1572
- });
1573
-
1574
- // Set folder structure for this group - concatenate custom prefix with arela_path
1575
- const folderStructure = options.folderStructure
1576
- ? `${options.folderStructure}/${arelaPath}`
1577
- .replace(/\/+/g, '/')
1578
- .replace(/\/$/, '')
1579
- : arelaPath;
1580
- pathFormData.append('folderStructure', folderStructure);
1581
- pathFormData.append('autoDetect', 'true');
1582
- pathFormData.append('autoOrganize', 'false');
1583
- pathFormData.append('batchSize', String(pathFiles.length));
1584
- pathFormData.append('clientVersion', packageVersion);
1585
- if (bucket) {
1586
- pathFormData.append('bucket', bucket);
1587
- }
1588
-
1589
- console.log(
1590
- ` 📁 Uploading ${pathFiles.length} files to: ${folderStructure}`,
1591
- );
1592
-
1593
- const response = await fetch(
1594
- `${API_BASE_URL}/api/storage/batch-upload-and-process`,
1595
- {
1596
- method: 'POST',
1597
- headers: {
1598
- 'x-api-key': API_TOKEN,
1599
- },
1600
- body: pathFormData,
1601
- },
1602
- );
1603
-
1604
- if (!response.ok) {
1605
- const errorText = await response.text();
1606
- throw new Error(`HTTP ${response.status}: ${errorText}`);
1607
- }
1608
-
1609
- const result = await response.json();
1610
-
1611
- // Check if upload was successful based on stats rather than success field
1612
- const isSuccessful =
1613
- result.stats &&
1614
- result.stats.uploadedCount > 0 &&
1615
- result.stats.errorCount === 0;
1616
-
1617
- if (isSuccessful) {
1618
- console.log(
1619
- ` ✅ Group uploaded: ${result.stats.uploadedCount} files to ${folderStructure}`,
1620
- );
1621
- totalUploaded += result.stats.uploadedCount;
1622
-
1623
- if (result.stats.detectedCount > 0) {
1624
- console.log(
1625
- ` 🔍 Files detected: ${result.stats.detectedCount}`,
1626
- );
1627
- }
1628
- if (result.stats.organizedCount > 0) {
1629
- console.log(
1630
- ` 📁 Files organized: ${result.stats.organizedCount}`,
1631
- );
1632
- }
1633
- } else {
1634
- console.error(` ❌ Upload failed for ${folderStructure}:`);
1635
- if (result.errors && result.errors.length > 0) {
1636
- result.errors.forEach((error) => {
1637
- console.error(` - ${error.fileName}: ${error.error}`);
1638
- });
1639
- }
1640
- totalErrors += pathFiles.length;
114
+ console.log(`✅ Propagation completed: ${result.processedCount} processed, ${result.updatedCount} updated, ${result.errorCount} errors`);
115
+ return;
1641
116
  }
1642
-
1643
- // Small delay between path groups
1644
- await new Promise((resolve) => setTimeout(resolve, 100));
117
+
118
+ // Default behavior: run PDF detection
119
+ console.log('🔍 Running PDF detection on existing database records...');
120
+ const result = await databaseService.default.detectPedimentosInDatabase({
121
+ batchSize: parseInt(options.batchSize) || 10,
122
+ });
123
+ console.log(`✅ Detection completed: ${result.detectedCount} detected, ${result.processedCount} processed, ${result.errorCount} errors`);
124
+ } catch (error) {
125
+ this.errorHandler.handleFatalError(error, { command: 'detect' });
1645
126
  }
1646
- } catch (error) {
1647
- console.error(
1648
- ` ❌ Error uploading batch ${batchNumber}:`,
1649
- error.message,
1650
- );
1651
- totalErrors += filesToUpload.length;
1652
- }
1653
- }
1654
-
1655
- // Small delay between batches
1656
- if (i + batchSize < allRelatedFiles.length) {
1657
- await new Promise((resolve) => setTimeout(resolve, BATCH_DELAY));
1658
- }
1659
- }
1660
-
1661
- if (options.showProgress !== false) {
1662
- progressBar.stop();
1663
- }
1664
-
1665
- console.log(`\n${'='.repeat(60)}`);
1666
- console.log(`🎯 RFC-BASED UPLOAD COMPLETED`);
1667
- console.log(`${'='.repeat(60)}`);
1668
- console.log(` 📋 Files processed: ${totalProcessed}`);
1669
- console.log(` ✅ Files uploaded: ${totalUploaded}`);
1670
- console.log(` ⏭️ Files skipped: ${totalSkipped}`);
1671
- console.log(` ❌ Errors: ${totalErrors}`);
1672
- console.log(`${'='.repeat(60)}\n`);
1673
-
1674
- return {
1675
- processedCount: totalProcessed,
1676
- uploadedCount: totalUploaded,
1677
- skippedCount: totalSkipped,
1678
- errorCount: totalErrors,
1679
- };
1680
- };
1681
-
1682
- /**
1683
- * Propagate arela_path from pedimento_simplificado records to related files with same base path
1684
- */
1685
- const propagateArelaPath = async (options = {}) => {
1686
- if (!supabase) {
1687
- console.error('❌ Supabase client not initialized');
1688
- process.exit(1);
1689
- }
1690
-
1691
- console.log('🔍 Finding pedimento_simplificado records with arela_path...');
1692
-
1693
- // Get all pedimento_simplificado records that have arela_path
1694
- const { data: pedimentoRecords, error: pedimentoError } = await supabase
1695
- .from('uploader')
1696
- .select('id, original_path, arela_path, filename')
1697
- .eq('document_type', 'pedimento_simplificado')
1698
- .not('arela_path', 'is', null);
1699
-
1700
- if (pedimentoError) {
1701
- console.error(
1702
- '❌ Error fetching pedimento records:',
1703
- pedimentoError.message,
1704
- );
1705
- return { processedCount: 0, updatedCount: 0, errorCount: 1 };
1706
- }
1707
-
1708
- if (!pedimentoRecords || pedimentoRecords.length === 0) {
1709
- console.log('ℹ️ No pedimento_simplificado records with arela_path found');
1710
- return { processedCount: 0, updatedCount: 0, errorCount: 0 };
1711
- }
1712
-
1713
- console.log(
1714
- `📋 Found ${pedimentoRecords.length} pedimento records with arela_path`,
1715
- );
1716
-
1717
- let totalProcessed = 0;
1718
- let totalUpdated = 0;
1719
- let totalErrors = 0;
1720
-
1721
- // Create progress bar
1722
- const progressBar = new cliProgress.SingleBar({
1723
- format:
1724
- '🔄 Propagating paths |{bar}| {percentage}% | {value}/{total} | Updated: {updated} | Errors: {errors}',
1725
- barCompleteChar: '█',
1726
- barIncompleteChar: '░',
1727
- hideCursor: true,
1728
- });
1729
-
1730
- if (options.showProgress !== false) {
1731
- progressBar.start(pedimentoRecords.length, 0, {
1732
- updated: 0,
1733
- errors: 0,
1734
- });
1735
- }
1736
-
1737
- // Process each pedimento record
1738
- for (const pedimento of pedimentoRecords) {
1739
- try {
1740
- totalProcessed++;
1741
-
1742
- // Extract base path from original_path (remove filename)
1743
- const basePath = path.dirname(pedimento.original_path);
1744
-
1745
- console.log(`\n🔍 Processing: ${pedimento.filename}`);
1746
- console.log(` 📁 Base path: ${basePath}`);
1747
-
1748
- // Extract folder part from existing arela_path by removing the filename
1749
- const existingPath = pedimento.arela_path;
1750
- const folderArelaPath = existingPath.includes('/')
1751
- ? existingPath.substring(0, existingPath.lastIndexOf('/')) + '/'
1752
- : existingPath.endsWith('/')
1753
- ? existingPath
1754
- : existingPath + '/';
1755
-
1756
- console.log(` 🎯 Original arela path: ${existingPath}`);
1757
- console.log(` 📁 Folder arela path: ${folderArelaPath}`);
1758
-
1759
- // Find all files with the same base path that don't have arela_path yet
1760
- const { data: relatedFiles, error: relatedError } = await supabase
1761
- .from('uploader')
1762
- .select('id, filename, original_path')
1763
- .like('original_path', `${basePath}%`)
1764
- .is('arela_path', null)
1765
- .neq('id', pedimento.id); // Exclude the pedimento itself
1766
-
1767
- if (relatedError) {
1768
- console.error(
1769
- `❌ Error finding related files for ${pedimento.filename}:`,
1770
- relatedError.message,
1771
- );
1772
- totalErrors++;
1773
- continue;
1774
- }
1775
-
1776
- if (!relatedFiles || relatedFiles.length === 0) {
1777
- console.log(` ℹ️ No related files found needing arela_path update`);
1778
- continue;
1779
- }
1780
-
1781
- console.log(
1782
- ` 📄 Found ${relatedFiles.length} related files to update:`,
1783
- );
1784
-
1785
- // Show first 10 files, then indicate if there are more
1786
- const filesToShow = relatedFiles.slice(0, 10);
1787
- filesToShow.forEach((file) => {
1788
- console.log(` - ${file.filename}`);
1789
127
  });
1790
128
 
1791
- if (relatedFiles.length > 10) {
1792
- console.log(` ... and ${relatedFiles.length - 10} more files`);
1793
- }
1794
-
1795
- // Process files in batches to avoid URI length limitations
1796
- const BATCH_SIZE = 50; // Process 50 files at a time
1797
- const fileIds = relatedFiles.map((f) => f.id);
1798
- let batchErrors = 0;
1799
- let batchUpdated = 0;
1800
-
1801
- console.log(
1802
- ` 🔄 Processing ${relatedFiles.length} files in batches of ${BATCH_SIZE}...`,
1803
- );
1804
-
1805
- for (let i = 0; i < fileIds.length; i += BATCH_SIZE) {
1806
- const batchIds = fileIds.slice(i, i + BATCH_SIZE);
1807
- const batchNumber = Math.floor(i / BATCH_SIZE) + 1;
1808
- const totalBatches = Math.ceil(fileIds.length / BATCH_SIZE);
1809
-
1810
- console.log(
1811
- ` 📦 Batch ${batchNumber}/${totalBatches}: Updating ${batchIds.length} files...`,
1812
- );
1813
-
129
+ // Configuration command
130
+ this.program
131
+ .command('config')
132
+ .description('Show current configuration')
133
+ .action(() => {
1814
134
  try {
1815
- const { error: updateError } = await supabase
1816
- .from('uploader')
1817
- .update({ arela_path: folderArelaPath })
1818
- .in('id', batchIds);
1819
-
1820
- if (updateError) {
1821
- console.error(
1822
- ` ❌ Error in batch ${batchNumber}:`,
1823
- updateError.message,
1824
- );
1825
- batchErrors++;
1826
- } else {
1827
- console.log(
1828
- ` ✅ Batch ${batchNumber} completed: ${batchIds.length} files updated`,
1829
- );
1830
- batchUpdated += batchIds.length;
1831
- }
135
+ this.#showConfiguration();
1832
136
  } catch (error) {
1833
- console.error(
1834
- ` ❌ Exception in batch ${batchNumber}:`,
1835
- error.message,
1836
- );
1837
- batchErrors++;
1838
- }
1839
-
1840
- // Small delay between batches to avoid overwhelming the database
1841
- if (i + BATCH_SIZE < fileIds.length) {
1842
- await new Promise((resolve) => setTimeout(resolve, 100));
137
+ this.errorHandler.handleFatalError(error, { command: 'config' });
1843
138
  }
1844
- }
1845
-
1846
- if (batchErrors > 0) {
1847
- console.error(
1848
- `❌ ${batchErrors} batch(es) failed for ${pedimento.filename}`,
1849
- );
1850
- totalErrors++;
1851
- } else {
1852
- console.log(` 🎯 Successfully updated ${batchUpdated} related files`);
1853
- totalUpdated += batchUpdated;
1854
- }
1855
- } catch (error) {
1856
- console.error(
1857
- `❌ Error processing ${pedimento.filename}:`,
1858
- error.message,
1859
- );
1860
- totalErrors++;
1861
- }
1862
-
1863
- if (options.showProgress !== false) {
1864
- progressBar.update(totalProcessed, {
1865
- updated: totalUpdated,
1866
- errors: totalErrors,
1867
139
  });
1868
- }
1869
- }
1870
-
1871
- if (options.showProgress !== false) {
1872
- progressBar.stop();
1873
- }
1874
-
1875
- console.log(`\n${'='.repeat(60)}`);
1876
- console.log(`🎯 ARELA PATH PROPAGATION COMPLETED`);
1877
- console.log(`${'='.repeat(60)}`);
1878
- console.log(` 📋 Pedimento records processed: ${totalProcessed}`);
1879
- console.log(` ✅ Related files updated: ${totalUpdated}`);
1880
- console.log(` ❌ Errors: ${totalErrors}`);
1881
- console.log(`${'='.repeat(60)}\n`);
1882
140
 
1883
- return {
1884
- processedCount: totalProcessed,
1885
- updatedCount: totalUpdated,
1886
- errorCount: totalErrors,
1887
- };
1888
- };
1889
-
1890
- program
1891
- .name('arela-uploader')
1892
- .description(
1893
- 'CLI to upload folders to Arela API or Supabase Storage with automatic processing',
1894
- )
1895
- .option('-v, --version', 'output the version number')
1896
- .option('-p, --prefix <prefix>', 'Prefix path in bucket', '')
1897
- .option('-b, --bucket <bucket>', 'Bucket name override')
1898
- .option('--force-supabase', 'Force direct Supabase upload (skip API)')
1899
- .option(
1900
- '--no-auto-detect',
1901
- 'Disable automatic file detection (API mode only)',
1902
- )
1903
- .option(
1904
- '--no-auto-organize',
1905
- 'Disable automatic file organization (API mode only)',
1906
- )
1907
- .option(
1908
- '-c, --concurrency <number>',
1909
- 'Files per batch for processing (default: 10)',
1910
- '10',
1911
- )
1912
- .option('--batch-size <number>', 'API batch size (default: 10)', '10')
1913
- .option('--show-stats', 'Show detailed processing statistics')
1914
- .option(
1915
- '--folder-structure <structure>',
1916
- 'Custom folder structure (e.g., "2024/4023260" or "cliente1/pedimentos")',
1917
- )
1918
- .option(
1919
- '--auto-detect-structure',
1920
- 'Automatically detect year/pedimento from file paths',
1921
- )
1922
- .option('--client-path <path>', 'Client path for metadata tracking')
1923
- .option(
1924
- '--stats-only',
1925
- 'Phase 1: Only read filesystem stats and insert to database (no file reading or detection)',
1926
- )
1927
- .option('--no-detect', 'Disable document type detection in stats-only mode')
1928
- .option(
1929
- '--detect-pdfs',
1930
- 'Phase 2: Process PDF files in database for pedimento-simplificado detection',
1931
- )
1932
- .option(
1933
- '--propagate-arela-path',
1934
- 'Phase 3: Propagate arela_path from pedimento_simplificado records to related files with same base path',
1935
- )
1936
- .option(
1937
- '--upload-by-rfc',
1938
- 'Phase 4: Upload files to Arela API based on RFC values from UPLOAD_RFCS environment variable',
1939
- )
1940
- .option(
1941
- '--run-all-phases',
1942
- 'Run all 4 phases in sequence: stats → detect → propagate → upload',
1943
- )
1944
- .action(async (options) => {
1945
- if (options.version) {
1946
- console.log(packageVersion);
1947
- process.exit(0);
1948
- }
1949
-
1950
- // Handle detect-pdfs option (Phase 2)
1951
- if (options.detectPdfs) {
1952
- console.log('🔍 Starting Phase 2: PDF Detection');
1953
- await checkCredentials(true); // Force Supabase mode
1954
-
1955
- const result = await detectPedimentosInDatabase({
1956
- batchSize: parseInt(options.batchSize) || 10,
141
+ // Query command for inspection
142
+ this.program
143
+ .command('query')
144
+ .description('Query database for file status and information')
145
+ .option('--ready-files', 'Show files that are ready for upload (detected but not uploaded)')
146
+ .action(async (options) => {
147
+ try {
148
+ const databaseService = await import('./services/DatabaseService.js');
149
+
150
+ if (options.readyFiles) {
151
+ console.log('🔍 Querying files ready for upload...');
152
+
153
+ const readyFiles = await databaseService.default.getFilesReadyForUpload();
154
+
155
+ if (readyFiles.length === 0) {
156
+ console.log('ℹ️ No files are currently ready for upload');
157
+ console.log(' Tip: Run "arela detect" and "arela detect --propagate-arela-path" first to prepare files for upload');
158
+ } else {
159
+ console.log(`\n📋 ${readyFiles.length} files are ready for upload!`);
160
+ console.log(' Use "arela upload --upload-by-rfc" to upload them to Arela API');
161
+ }
162
+ return;
163
+ }
164
+
165
+ // Default behavior: show help for query command
166
+ console.log('Available query options:');
167
+ console.log(' --ready-files Show files ready for upload');
168
+ } catch (error) {
169
+ this.errorHandler.handleFatalError(error, { command: 'query' });
170
+ }
1957
171
  });
1958
172
 
1959
- console.log(
1960
- `✅ Phase 2 Complete: ${result.detectedCount} detected, ${result.errorCount} errors`,
1961
- );
1962
- return;
1963
- }
1964
-
1965
- // Handle run-all-phases option
1966
- if (options.runAllPhases) {
1967
- console.log('🚀 Starting all 4 phases in sequence...');
1968
- await checkCredentials(true); // Force Supabase mode
1969
-
1970
- // Phase 1: Stats collection
1971
- console.log('\n📊 === PHASE 1: Filesystem Stats ===');
1972
- options.statsOnly = true;
1973
- // Continue with normal processing to run Phase 1
1974
-
1975
- // The rest will be handled after Phase 1 completes
1976
- }
1977
-
1978
- // Handle propagate-arela-path option
1979
- if (options.propagateArelaPath) {
1980
- // Initialize Supabase credentials for propagation
1981
- await checkCredentials(true); // Force Supabase mode
1982
-
1983
- const result = await propagateArelaPath({
1984
- showProgress: options.showStats || true,
173
+ // Version command (already handled by program.version())
174
+
175
+ // Help command
176
+ this.program
177
+ .command('help')
178
+ .description('Show help information')
179
+ .action(() => {
180
+ this.program.help();
1985
181
  });
182
+ }
1986
183
 
1987
- if (result.errorCount > 0) {
1988
- process.exit(1);
1989
- }
1990
- return;
1991
- }
1992
-
1993
- // Handle upload-by-rfc option
1994
- if (options.uploadByRfc) {
1995
- // RFC upload needs both Supabase (for database queries) and API (for uploads)
1996
- await checkCredentials(false); // Initialize API mode
1997
-
1998
- // Also initialize Supabase for database queries
1999
- if (!supabase) {
2000
- if (!supabaseUrl || !supabaseKey) {
2001
- console.error(
2002
- '❌ RFC upload requires Supabase credentials for database queries.',
2003
- );
2004
- console.error(
2005
- ' Please set SUPABASE_URL and SUPABASE_KEY environment variables.',
2006
- );
2007
- process.exit(1);
2008
- }
2009
-
2010
- supabase = createClient(supabaseUrl, supabaseKey);
2011
- console.log('✅ Connected to Supabase for database queries');
2012
- }
184
+ /**
185
+ * Setup global error handling
186
+ * @private
187
+ */
188
+ #setupErrorHandling() {
189
+ // Handle uncaught exceptions
190
+ process.on('uncaughtException', (error) => {
191
+ this.errorHandler.handleFatalError(error, { context: 'uncaughtException' });
192
+ });
2013
193
 
2014
- const result = await uploadFilesByRfc({
2015
- showProgress: options.showStats || true,
2016
- batchSize: parseInt(options.batchSize) || 10,
2017
- folderStructure: options.folderStructure,
194
+ // Handle unhandled promise rejections
195
+ process.on('unhandledRejection', (reason, promise) => {
196
+ const error = reason instanceof Error ? reason : new Error(String(reason));
197
+ this.errorHandler.handleFatalError(error, {
198
+ context: 'unhandledRejection',
199
+ promise: promise.toString()
2018
200
  });
201
+ });
2019
202
 
2020
- if (result.errorCount > 0) {
2021
- process.exit(1);
2022
- }
2023
- return;
2024
- }
2025
-
2026
- // Initialize credentials with force supabase flag (for stats mode, always need Supabase)
2027
- await checkCredentials(options.forceSupabase || options.statsOnly);
2028
-
2029
- if (!basePath || !sources || sources.length === 0) {
2030
- console.error(
2031
- '⚠️ UPLOAD_BASE_PATH or UPLOAD_SOURCES not defined in environment variables.',
2032
- );
2033
- process.exit(1);
2034
- }
2035
-
2036
- const batchSize = parseInt(options.batchSize) || 10;
2037
- const concurrency = parseInt(options.concurrency) || 10;
2038
-
2039
- if (options.statsOnly) {
2040
- console.log(
2041
- '📊 Mode: Stats Only - Reading file stats and inserting to uploader table',
2042
- );
2043
- console.log('🚫 Files will NOT be uploaded');
2044
- if (options.detect !== false) {
2045
- console.log('🔍 Document type detection ENABLED for supported files');
2046
- } else {
2047
- console.log('🔍 Document type detection DISABLED');
2048
- }
2049
- } else {
2050
- console.log(
2051
- `🚀 Mode: ${apiMode ? 'Arela API with auto-processing' : 'Direct Supabase'}`,
2052
- );
2053
- }
2054
- console.log(`📦 Batch size: ${batchSize}`);
2055
- console.log(`⚡ Concurrency: ${concurrency}`);
2056
-
2057
- const processedPaths = getProcessedPaths();
2058
- let globalSuccess = 0;
2059
- let globalDetected = 0;
2060
- let globalOrganized = 0;
2061
- let globalFailure = 0;
2062
- let globalSkipped = 0;
2063
-
2064
- for (const folder of sources) {
2065
- const sourcePath = path.resolve(basePath, folder).replace(/\\/g, '/');
2066
- console.log(`📂 Processing folder: ${sourcePath}`);
2067
-
2068
- try {
2069
- const stats = fs.statSync(sourcePath);
2070
- const files = stats.isDirectory()
2071
- ? await globby([`${sourcePath}/**/*`], { onlyFiles: true })
2072
- : [sourcePath];
2073
-
2074
- console.log(`📊 Found ${files.length} files to process`);
2075
-
2076
- const result = await processFilesInBatches(
2077
- files,
2078
- batchSize,
2079
- options,
2080
- basePath,
2081
- folder,
2082
- sourcePath,
2083
- processedPaths,
2084
- );
2085
-
2086
- globalSuccess += result.successCount;
2087
- globalDetected += result.detectedCount || 0;
2088
- globalOrganized += result.organizedCount || 0;
2089
- globalFailure += result.failureCount;
2090
- globalSkipped += result.skippedCount;
203
+ // Handle SIGINT (Ctrl+C)
204
+ process.on('SIGINT', () => {
205
+ console.log('\n👋 Received SIGINT. Gracefully shutting down...');
206
+ logger.info('Application interrupted by user (SIGINT)');
207
+ logger.flush();
208
+ process.exit(0);
209
+ });
2091
210
 
2092
- console.log(`\n📦 Summary for ${folder}:`);
2093
- if (options.statsOnly) {
2094
- console.log(` 📊 Stats recorded: ${result.successCount}`);
2095
- } else {
2096
- console.log(` ✅ Uploaded: ${result.successCount}`);
2097
- if (apiMode) {
2098
- console.log(` 🔍 Detected: ${result.detectedCount || 0}`);
2099
- console.log(` 📁 Organized: ${result.organizedCount || 0}`);
2100
- }
2101
- }
2102
- console.log(` ❌ Errors: ${result.failureCount}`);
2103
- if (options.statsOnly) {
2104
- console.log(` ⏭️ Duplicates: ${result.skippedCount}`);
2105
- } else {
2106
- console.log(` ⏭️ Skipped: ${result.skippedCount}`);
2107
- }
211
+ // Handle SIGTERM
212
+ process.on('SIGTERM', () => {
213
+ console.log('\n👋 Received SIGTERM. Gracefully shutting down...');
214
+ logger.info('Application terminated by system (SIGTERM)');
215
+ logger.flush();
216
+ process.exit(0);
217
+ });
218
+ }
2108
219
 
2109
- writeLog(
2110
- `📦 Summary for ${folder}: Success: ${result.successCount}, Detected: ${result.detectedCount || 0}, Organized: ${result.organizedCount || 0}, Errors: ${result.failureCount}, ${options.statsOnly ? 'Duplicates' : 'Skipped'}: ${result.skippedCount}`,
2111
- );
2112
- } catch (err) {
2113
- console.error(`⚠️ Error processing folder ${folder}:`, err.message);
2114
- writeLog(`⚠️ Error processing folder ${folder}: ${err.message}`);
2115
- globalFailure++;
220
+ /**
221
+ * Show current configuration
222
+ * @private
223
+ */
224
+ #showConfiguration() {
225
+ console.log('🔧 Current Configuration:');
226
+ console.log(` Version: ${appConfig.packageVersion}`);
227
+ console.log('\n📡 API Configuration:');
228
+ console.log(` Base URL: ${appConfig.api.baseUrl || 'Not configured'}`);
229
+ console.log(` Token: ${appConfig.api.token ? '✅ Set' : '❌ Not set'}`);
230
+ console.log('\n🗄️ Supabase Configuration:');
231
+ console.log(` URL: ${appConfig.supabase.url || 'Not configured'}`);
232
+ console.log(` Key: ${appConfig.supabase.key ? '✅ Set' : '❌ Not set'}`);
233
+ console.log(` Bucket: ${appConfig.supabase.bucket || 'Not configured'}`);
234
+ console.log('\n📁 Upload Configuration:');
235
+ console.log(` Base Path: ${appConfig.upload.basePath || 'Not configured'}`);
236
+ console.log(` Sources: ${appConfig.upload.sources?.join(', ') || 'Not configured'}`);
237
+ console.log(` RFCs: ${appConfig.upload.rfcs?.join(', ') || 'Not configured'}`);
238
+ console.log('\n⚡ Performance Configuration:');
239
+ console.log(` Batch Delay: ${appConfig.performance.batchDelay}ms`);
240
+ console.log(` Progress Update Interval: ${appConfig.performance.progressUpdateInterval}`);
241
+ console.log(` Log Buffer Size: ${appConfig.performance.logBufferSize}`);
242
+ console.log('\n📝 Logging Configuration:');
243
+ console.log(` Verbose: ${appConfig.logging.verbose ? '✅ Enabled' : '❌ Disabled'}`);
244
+ console.log(` Log File: ${appConfig.logging.logFilePath}`);
245
+ console.log('\n🎯 Service Availability:');
246
+ console.log(` API Mode: ${appConfig.isApiModeAvailable() ? '✅ Available' : '❌ Not available'}`);
247
+ console.log(` Supabase Mode: ${appConfig.isSupabaseModeAvailable() ? '✅ Available' : '❌ Not available'}`);
248
+ }
249
+
250
+ /**
251
+ * Parse command line arguments and execute
252
+ */
253
+ async run() {
254
+ try {
255
+ // Set verbose mode if requested globally
256
+ const args = process.argv;
257
+ if (args.includes('-v') || args.includes('--verbose')) {
258
+ logger.setVerbose(true);
2116
259
  }
2117
- }
2118
-
2119
- console.log(`\n${'='.repeat(60)}`);
2120
- if (options.statsOnly) {
2121
- console.log(`📊 STATS COLLECTION COMPLETED`);
2122
- console.log(`${'='.repeat(60)}`);
2123
- console.log(` 📊 Total stats recorded: ${globalSuccess}`);
2124
- } else {
2125
- console.log(`🎯 ${apiMode ? 'ARELA API' : 'SUPABASE'} UPLOAD COMPLETED`);
2126
- console.log(`${'='.repeat(60)}`);
2127
- console.log(` ✅ Total uploaded: ${globalSuccess}`);
2128
- if (apiMode) {
2129
- console.log(` 🔍 Total detected: ${globalDetected}`);
2130
- console.log(` 📁 Total organized: ${globalOrganized}`);
260
+
261
+ // Clear log if requested globally
262
+ if (args.includes('--clear-log')) {
263
+ logger.clearLogFile();
264
+ logger.info('Log file cleared');
2131
265
  }
2132
- }
2133
- if (options.statsOnly) {
2134
- console.log(` ⏭️ Total duplicates: ${globalSkipped}`);
2135
- } else {
2136
- console.log(` ⏭️ Total skipped: ${globalSkipped}`);
2137
- }
2138
- console.log(` ❌ Total errors: ${globalFailure}`);
2139
- console.log(` 📜 Log file: ${logFilePath}`);
2140
- console.log(`${'='.repeat(60)}\n`);
2141
266
 
2142
- // Continue with remaining phases if running all phases
2143
- if (options.runAllPhases && options.statsOnly) {
2144
- try {
2145
- // Phase 2: PDF Detection
2146
- console.log('\n🔍 === PHASE 2: PDF Detection ===');
2147
- const detectionResult = await detectPedimentosInDatabase({
2148
- batchSize: parseInt(options.batchSize) || 10,
2149
- });
2150
- console.log(
2151
- `✅ Phase 2 Complete: ${detectionResult.detectedCount} detected, ${detectionResult.errorCount} errors`,
2152
- );
267
+ // Log application start
268
+ logger.info(`Arela Uploader v${appConfig.packageVersion} started`);
269
+ logger.info(`Command: ${args.slice(2).join(' ')}`);
2153
270
 
2154
- // Phase 3: Propagate arela_path
2155
- console.log('\n📁 === PHASE 3: Propagate Arela Paths ===');
2156
- const propagateResult = await propagateArelaPath({
2157
- showProgress: options.showStats || true,
2158
- });
2159
- console.log(
2160
- `✅ Phase 3 Complete: ${propagateResult.updatedCount || 0} paths propagated`,
2161
- );
2162
-
2163
- // Phase 4: Upload by RFC
2164
- if (uploadRfcs && uploadRfcs.length > 0) {
2165
- console.log('\n🚀 === PHASE 4: Upload by RFC ===');
2166
-
2167
- // Initialize API mode for uploads
2168
- await checkCredentials(false);
2169
-
2170
- const uploadResult = await uploadFilesByRfc({
2171
- showProgress: options.showStats || true,
2172
- batchSize: parseInt(options.batchSize) || 10,
2173
- folderStructure: options.folderStructure,
2174
- });
2175
- console.log(`✅ Phase 4 Complete: Upload finished`);
2176
- } else {
2177
- console.log('\n⚠️ === PHASE 4: Upload by RFC ===');
2178
- console.log(
2179
- '⚠️ UPLOAD_RFCS environment variable not configured, skipping Phase 4',
2180
- );
2181
- }
2182
-
2183
- console.log('\n🎉 All 4 phases completed successfully!');
2184
- } catch (error) {
2185
- console.error(`❌ Error in multi-phase execution:`, error.message);
2186
- process.exit(1);
2187
- }
2188
- }
2189
-
2190
- if (
2191
- options.showStats &&
2192
- (sanitizationCache.size > 0 || pathDetectionCache.size > 0)
2193
- ) {
2194
- console.log(`📊 Performance Statistics:`);
2195
- if (sanitizationCache.size > 0) {
2196
- console.log(
2197
- ` 🗂️ Sanitization cache entries: ${sanitizationCache.size}`,
2198
- );
2199
- }
2200
- if (pathDetectionCache.size > 0) {
2201
- console.log(
2202
- ` 📁 Path detection cache entries: ${pathDetectionCache.size}`,
2203
- );
2204
- }
271
+ // Parse and execute commands
272
+ await this.program.parseAsync();
273
+
274
+ } catch (error) {
275
+ this.errorHandler.handleFatalError(error, { context: 'cli-execution' });
2205
276
  }
277
+ }
278
+ }
2206
279
 
2207
- // OPTIMIZED: Ensure log buffer is flushed before exit
2208
- flushLogBuffer();
2209
- });
2210
-
2211
- program.parse();
280
+ // Create and run the CLI application
281
+ const cli = new ArelaUploaderCLI();
282
+ await cli.run();