@arela/uploader 0.2.4 → 0.2.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/index.js CHANGED
@@ -1,2628 +1,282 @@
1
1
  #!/usr/bin/env node
2
- import { createClient } from '@supabase/supabase-js';
3
- import cliProgress from 'cli-progress';
4
2
  import { Command } from 'commander';
5
- import { config } from 'dotenv';
6
- import FormData from 'form-data';
7
- import fs from 'fs';
8
- import { globby } from 'globby';
9
- import mime from 'mime-types';
10
- import fetch from 'node-fetch';
11
- import path from 'path';
12
3
 
13
- import { FileDetectionService } from './file-detection.js';
14
-
15
- config();
16
-
17
- const program = new Command();
18
-
19
- // Read package.json version at startup
20
- let packageVersion = '0.2.4'; // fallback
21
- try {
22
- const __filename = new URL(import.meta.url).pathname;
23
- const __dirname = path.dirname(__filename);
24
- const packageJsonPath = path.resolve(__dirname, '../package.json');
25
- const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf-8'));
26
- packageVersion = packageJson.version || '1.0.0';
27
- } catch (error) {
28
- console.warn('⚠️ Could not read package.json version, using fallback');
29
- }
30
-
31
- // Configuración de Supabase (original)
32
- const supabaseUrl = process.env.SUPABASE_URL;
33
- const supabaseKey = process.env.SUPABASE_KEY;
34
- const bucket = process.env.SUPABASE_BUCKET;
35
-
36
- // Configuración de API (nueva)
37
- const API_BASE_URL = process.env.ARELA_API_URL;
38
- const API_TOKEN = process.env.ARELA_API_TOKEN;
39
-
40
- // Configuración del uploader mejorado
41
- const basePath = process.env.UPLOAD_BASE_PATH;
42
- const sources = process.env.UPLOAD_SOURCES?.split('|')
43
- .map((s) => s.trim())
44
- .filter(Boolean);
45
-
46
- // Configuración de RFCs para upload
47
- console.log('🔧 Configured RFCs for upload:', process.env.UPLOAD_RFCS);
48
- const uploadRfcs = process.env.UPLOAD_RFCS?.split('|')
49
- .map((s) => s.trim())
50
- .filter(Boolean);
51
-
52
- let supabase;
53
- let apiMode = false;
54
-
55
- // Pre-compiled regex patterns for better performance (from original complex uploader)
56
- const SANITIZATION_PATTERNS = [
57
- [/[áàâäãåāăą]/gi, 'a'],
58
- [/[éèêëēĕėę]/gi, 'e'],
59
- [/[íìîïīĭį]/gi, 'i'],
60
- [/[óòôöõōŏő]/gi, 'o'],
61
- [/[úùûüūŭů]/gi, 'u'],
62
- [/[ñň]/gi, 'n'],
63
- [/[ç]/gi, 'c'],
64
- [/[ý]/gi, 'y'],
65
- [/[멕]/g, 'meok'],
66
- [/[시]/g, 'si'],
67
- [/[코]/g, 'ko'],
68
- [/[용]/g, 'yong'],
69
- [/[가-힣]/g, 'kr'],
70
- [/[\u0300-\u036f]/g, ''],
71
- [/[\\?%*:|"<>[\]~`^]/g, '-'],
72
- [/[{}]/g, '-'],
73
- [/[&]/g, 'and'],
74
- [/[()]/g, ''],
75
- [/\s+/g, '-'],
76
- [/-+/g, '-'],
77
- [/^-+|-+$/g, ''],
78
- [/^\.+/, ''],
79
- [/[^\w.-]/g, ''],
80
- ];
81
-
82
- const sanitizationCache = new Map();
83
-
84
- const sanitizeFileName = (fileName) => {
85
- if (sanitizationCache.has(fileName)) {
86
- return sanitizationCache.get(fileName);
87
- }
88
-
89
- const ext = path.extname(fileName);
90
- const nameWithoutExt = path.basename(fileName, ext);
91
-
92
- if (/^[a-zA-Z0-9._-]+$/.test(nameWithoutExt)) {
93
- const result = fileName;
94
- sanitizationCache.set(fileName, result);
95
- return result;
96
- }
97
-
98
- let sanitized = nameWithoutExt.normalize('NFD');
99
-
100
- for (const [pattern, replacement] of SANITIZATION_PATTERNS) {
101
- sanitized = sanitized.replace(pattern, replacement);
102
- }
103
-
104
- // Additional sanitization for problematic characters
105
- sanitized = sanitized
106
- .replace(/~/g, '-') // Replace tildes
107
- .replace(/\s+/g, '-') // Replace spaces with dashes
108
- .replace(/\.+/g, '-') // Replace multiple dots with dashes
109
- .replace(/-+/g, '-') // Collapse multiple dashes
110
- .replace(/^-+|-+$/g, ''); // Remove leading/trailing dashes
111
-
112
- if (!sanitized) {
113
- sanitized = 'unnamed_file';
114
- }
115
-
116
- const result = sanitized + ext;
117
- sanitizationCache.set(fileName, result);
118
- return result;
119
- };
120
-
121
- const checkCredentials = async (forceSupabase = false) => {
122
- // Force Supabase mode if explicitly requested
123
- if (forceSupabase) {
124
- console.log('🔧 Force Supabase mode enabled - skipping API');
125
- apiMode = false;
126
- } else if (API_BASE_URL && API_TOKEN) {
127
- console.log(
128
- '🌐 API mode enabled - files will be uploaded to Arela API with automatic processing',
129
- );
130
- apiMode = true;
131
-
132
- try {
133
- const response = await fetch(`${API_BASE_URL}/api/health`, {
134
- headers: {
135
- 'x-api-key': API_TOKEN,
136
- },
137
- });
138
-
139
- if (!response.ok) {
140
- console.warn(
141
- '⚠️ API connection failed, falling back to direct Supabase upload',
142
- );
143
- apiMode = false;
144
- } else {
145
- console.log('✅ Connected to Arela API');
146
- return;
147
- }
148
- } catch (err) {
149
- console.warn(
150
- '⚠️ API connection failed, falling back to direct Supabase upload',
151
- );
152
- apiMode = false;
153
- }
154
- }
155
-
156
- // Initialize Supabase client if not in API mode or if forced
157
- if (!apiMode || forceSupabase) {
158
- if (!supabaseUrl || !supabaseKey || !bucket) {
159
- console.error(
160
- '⚠️ Missing credentials. Please set either:\n' +
161
- ' - ARELA_API_URL and ARELA_API_TOKEN for API mode, or\n' +
162
- ' - SUPABASE_URL, SUPABASE_KEY, and SUPABASE_BUCKET for direct mode',
163
- );
164
- process.exit(1);
165
- }
166
-
167
- supabase = createClient(supabaseUrl, supabaseKey);
168
-
169
- try {
170
- const { error } = await supabase.storage.from(bucket).list('');
171
- if (error) {
172
- console.error('⚠️ Error connecting to Supabase:', error.message);
173
- process.exit(1);
174
- }
175
- console.log('✅ Connected to Supabase (direct mode)');
176
- } catch (err) {
177
- console.error('⚠️ Error:', err.message);
178
- process.exit(1);
179
- }
180
- }
181
- };
182
-
183
- const logFilePath = path.resolve(process.cwd(), 'arela-upload.log');
184
-
185
- /**
186
- * OPTIMIZED: Log buffer to reduce I/O operations
187
- */
188
- let logBuffer = [];
189
- const LOG_BUFFER_SIZE = 100; // Flush every 100 log entries
190
- let lastFlushTime = Date.now();
191
- const LOG_FLUSH_INTERVAL = 5000; // Flush every 5 seconds
192
-
193
- const flushLogBuffer = () => {
194
- if (logBuffer.length === 0) return;
195
-
196
- try {
197
- const logContent = logBuffer.join('\n') + '\n';
198
- fs.appendFileSync(logFilePath, logContent);
199
- logBuffer = [];
200
- lastFlushTime = Date.now();
201
- } catch (error) {
202
- console.error(`❌ Error writing to log file: ${error.code} | ${error.message} | path: ${logFilePath}`);
203
- }
204
- };
205
-
206
- const writeLog = (message) => {
207
- try {
208
- const timestamp = new Date().toISOString();
209
- logBuffer.push(`[${timestamp}] ${message}`);
210
-
211
- // Flush if buffer is full or enough time has passed
212
- const now = Date.now();
213
- if (
214
- logBuffer.length >= LOG_BUFFER_SIZE ||
215
- now - lastFlushTime >= LOG_FLUSH_INTERVAL
216
- ) {
217
- flushLogBuffer();
218
- }
219
- } catch (error) {
220
- console.error(`❌ Error buffering log message: ${error.message}`);
221
- }
222
- };
223
-
224
- // Ensure logs are flushed on process exit
225
- process.on('exit', flushLogBuffer);
226
- process.on('SIGINT', () => {
227
- flushLogBuffer();
228
- process.exit(0);
229
- });
230
- process.on('SIGTERM', () => {
231
- flushLogBuffer();
232
- process.exit(0);
233
- });
234
-
235
- /**
236
- * OPTIMIZED: Conditional logging to reduce console overhead
237
- */
238
- const VERBOSE_LOGGING = process.env.VERBOSE_LOGGING === 'true';
239
- const BATCH_DELAY = parseInt(process.env.BATCH_DELAY) || 100; // Configurable delay between batches
240
- const PROGRESS_UPDATE_INTERVAL =
241
- parseInt(process.env.PROGRESS_UPDATE_INTERVAL) || 10; // Update progress every N items
242
-
243
- const logVerbose = (message) => {
244
- if (VERBOSE_LOGGING) {
245
- console.log(message);
246
- }
247
- };
248
- const batchReadFileStats = (filePaths) => {
249
- const results = [];
250
-
251
- for (const filePath of filePaths) {
252
- try {
253
- const stats = fs.statSync(filePath);
254
- results.push({ path: filePath, stats, error: null });
255
- } catch (error) {
256
- results.push({ path: filePath, stats: null, error: error.message });
257
- }
258
- }
259
-
260
- return results;
261
- };
262
-
263
- /**
264
- * OPTIMIZED: Cache for year/pedimento detection results to avoid redundant parsing
265
- */
266
- const pathDetectionCache = new Map();
267
-
268
- /**
269
- * OPTIMIZED: Clear the path detection cache (useful for testing or long-running processes)
270
- */
271
- const clearPathDetectionCache = () => {
272
- pathDetectionCache.clear();
273
- };
274
-
275
- /**
276
- * OPTIMIZED: Get detection results with caching
277
- */
278
- const getCachedPathDetection = (filePath, basePath) => {
279
- const cacheKey = `${filePath}|${basePath}`;
280
-
281
- if (pathDetectionCache.has(cacheKey)) {
282
- return pathDetectionCache.get(cacheKey);
283
- }
284
-
285
- const detection = extractYearAndPedimentoFromPath(filePath, basePath);
286
- pathDetectionCache.set(cacheKey, detection);
287
-
288
- return detection;
289
- };
4
+ import appConfig from './config/config.js';
5
+ import UploadCommand from './commands/UploadCommand.js';
6
+ import ErrorHandler from './errors/ErrorHandler.js';
7
+ import logger from './services/LoggingService.js';
290
8
 
291
9
  /**
292
- * Extracts year and pedimento number from file path
293
- * Supports patterns like:
294
- * - /path/to/2024/4023260/file.pdf
295
- * - /path/to/pedimentos/2024/4023260/file.pdf
296
- * - /path/to/docs/año2024/ped4023260/file.pdf
10
+ * Arela Uploader CLI
11
+ * Professional file uploader with document detection and organization
297
12
  */
298
- const extractYearAndPedimentoFromPath = (filePath, basePath) => {
299
- try {
300
- const relativePath = path.relative(basePath, filePath);
301
- const pathParts = relativePath.split(path.sep);
302
-
303
- let year = null;
304
- let pedimento = null;
305
-
306
- // Pattern 1: Direct year/pedimento structure (2024/4023260)
307
- for (let i = 0; i < pathParts.length - 1; i++) {
308
- const part = pathParts[i];
309
- const nextPart = pathParts[i + 1];
310
-
311
- // Check if current part looks like a year (2020-2030)
312
- const yearMatch = part.match(/^(202[0-9])$/);
313
- if (yearMatch && nextPart) {
314
- year = yearMatch[1];
315
-
316
- // Check if next part looks like a pedimento (4-8 digits)
317
- const pedimentoMatch = nextPart.match(/^(\d{4,8})$/);
318
- if (pedimentoMatch) {
319
- pedimento = pedimentoMatch[1];
320
- break;
13
+ class ArelaUploaderCLI {
14
+ constructor() {
15
+ this.program = new Command();
16
+ this.errorHandler = new ErrorHandler(logger);
17
+ this.uploadCommand = new UploadCommand();
18
+
19
+ this.#setupProgram();
20
+ this.#setupCommands();
21
+ this.#setupErrorHandling();
22
+ }
23
+
24
+ /**
25
+ * Setup the main program configuration
26
+ * @private
27
+ */
28
+ #setupProgram() {
29
+ this.program
30
+ .name('arela')
31
+ .description('CLI to upload files/directories to Arela with automatic processing')
32
+ .version(appConfig.packageVersion)
33
+ .option('-v, --verbose', 'Enable verbose logging')
34
+ .option('--clear-log', 'Clear the log file before starting');
35
+ }
36
+
37
+ /**
38
+ * Setup CLI commands
39
+ * @private
40
+ */
41
+ #setupCommands() {
42
+ // Main upload command
43
+ this.program
44
+ .command('upload')
45
+ .description('Upload files to Arela with automatic processing')
46
+ .option('-b, --batch-size <size>', 'Number of files to process in each batch', '10')
47
+ .option('-p, --prefix <prefix>', 'Prefix for uploaded files')
48
+ .option('--folder-structure <structure>', 'Custom folder structure for organization')
49
+ .option('--client-path <path>', 'Override client path for metadata')
50
+ .option('--auto-detect-structure', 'Automatically detect folder structure from file paths')
51
+ .option('--auto-detect', 'Enable automatic document type detection')
52
+ .option('--auto-organize', 'Enable automatic file organization')
53
+ .option('--force-supabase', 'Force direct Supabase upload mode')
54
+ .option('--skip-processed', 'Skip files that have already been processed')
55
+ .option('--show-stats', 'Show performance statistics')
56
+ .option('--upload-by-rfc', 'Upload files based on specific RFC values from UPLOAD_RFCS')
57
+ .option('--run-all-phases', 'Run all processing phases (stats, detection, organization)')
58
+ .action(async (options) => {
59
+ try {
60
+ // Handle --upload-by-rfc as a specific operation
61
+ if (options.uploadByRfc) {
62
+ const databaseService = await import('./services/DatabaseService.js');
63
+ console.log('🎯 Running RFC-based upload...');
64
+ const result = await databaseService.default.uploadFilesByRfc({
65
+ batchSize: parseInt(options.batchSize) || 10,
66
+ showProgress: true,
67
+ folderStructure: options.folderStructure,
68
+ });
69
+ console.log(`✅ RFC upload completed: ${result.processedCount} processed, ${result.uploadedCount} uploaded, ${result.errorCount} errors`);
70
+ return;
71
+ }
72
+
73
+ await this.uploadCommand.execute(options);
74
+ } catch (error) {
75
+ this.errorHandler.handleFatalError(error, { command: 'upload' });
321
76
  }
322
- }
323
- }
77
+ });
324
78
 
325
- // Pattern 2: Named patterns (año2024, ped4023260)
326
- if (!year || !pedimento) {
327
- for (const part of pathParts) {
328
- if (!year) {
329
- const namedYearMatch = part.match(/(?:año|year|anio)(\d{4})/i);
330
- if (namedYearMatch) {
331
- year = namedYearMatch[1];
332
- }
79
+ // Stats-only command
80
+ this.program
81
+ .command('stats')
82
+ .description('Collect file statistics without uploading')
83
+ .option('-b, --batch-size <size>', 'Number of files to process in each batch', '10')
84
+ .option('--client-path <path>', 'Override client path for metadata')
85
+ .option('--stats-only', 'Collect file statistics without uploading (backward compatibility)')
86
+ .option('--run-all-phases', 'Run all processing phases (stats, detection, organization)')
87
+ .option('--show-stats', 'Show performance statistics')
88
+ .action(async (options) => {
89
+ try {
90
+ const statsOptions = { ...options, statsOnly: true };
91
+ await this.uploadCommand.execute(statsOptions);
92
+ } catch (error) {
93
+ this.errorHandler.handleFatalError(error, { command: 'stats' });
333
94
  }
95
+ });
334
96
 
335
- if (!pedimento) {
336
- const namedPedimentoMatch = part.match(
337
- /(?:ped|pedimento|pedi)(\d{4,8})/i,
338
- );
339
- if (namedPedimentoMatch) {
340
- pedimento = namedPedimentoMatch[1];
97
+ // Detection command
98
+ this.program
99
+ .command('detect')
100
+ .description('Run document detection on existing file records')
101
+ .option('-b, --batch-size <size>', 'Number of files to process in each batch', '10')
102
+ .option('--detect-pdfs', 'Run PDF detection on existing database records (backward compatibility)')
103
+ .option('--propagate-arela-path', 'Propagate arela_path from pedimento records to related files')
104
+ .action(async (options) => {
105
+ try {
106
+ const databaseService = await import('./services/DatabaseService.js');
107
+
108
+ // Handle --propagate-arela-path as a specific operation
109
+ if (options.propagateArelaPath) {
110
+ console.log('🔄 Running arela_path propagation...');
111
+ const result = await databaseService.default.propagateArelaPath({
112
+ showProgress: true,
113
+ });
114
+ console.log(`✅ Propagation completed: ${result.processedCount} processed, ${result.updatedCount} updated, ${result.errorCount} errors`);
115
+ return;
341
116
  }
117
+
118
+ // Default behavior: run PDF detection
119
+ console.log('🔍 Running PDF detection on existing database records...');
120
+ const result = await databaseService.default.detectPedimentosInDatabase({
121
+ batchSize: parseInt(options.batchSize) || 10,
122
+ });
123
+ console.log(`✅ Detection completed: ${result.detectedCount} detected, ${result.processedCount} processed, ${result.errorCount} errors`);
124
+ } catch (error) {
125
+ this.errorHandler.handleFatalError(error, { command: 'detect' });
342
126
  }
343
- }
344
- }
127
+ });
345
128
 
346
- // Pattern 3: Loose year detection in any part
347
- if (!year) {
348
- for (const part of pathParts) {
349
- const yearMatch = part.match(/(202[0-9])/);
350
- if (yearMatch) {
351
- year = yearMatch[1];
352
- break;
129
+ // Configuration command
130
+ this.program
131
+ .command('config')
132
+ .description('Show current configuration')
133
+ .action(() => {
134
+ try {
135
+ this.#showConfiguration();
136
+ } catch (error) {
137
+ this.errorHandler.handleFatalError(error, { command: 'config' });
353
138
  }
354
- }
355
- }
139
+ });
356
140
 
357
- // Pattern 4: Loose pedimento detection (4-8 consecutive digits)
358
- if (!pedimento) {
359
- for (const part of pathParts) {
360
- const pedimentoMatch = part.match(/(\d{4,8})/);
361
- if (pedimentoMatch && pedimentoMatch[1].length >= 4) {
362
- pedimento = pedimentoMatch[1];
363
- break;
141
+ // Query command for inspection
142
+ this.program
143
+ .command('query')
144
+ .description('Query database for file status and information')
145
+ .option('--ready-files', 'Show files that are ready for upload (detected but not uploaded)')
146
+ .action(async (options) => {
147
+ try {
148
+ const databaseService = await import('./services/DatabaseService.js');
149
+
150
+ if (options.readyFiles) {
151
+ console.log('🔍 Querying files ready for upload...');
152
+
153
+ const readyFiles = await databaseService.default.getFilesReadyForUpload();
154
+
155
+ if (readyFiles.length === 0) {
156
+ console.log('ℹ️ No files are currently ready for upload');
157
+ console.log(' Tip: Run "arela detect" and "arela detect --propagate-arela-path" first to prepare files for upload');
158
+ } else {
159
+ console.log(`\n📋 ${readyFiles.length} files are ready for upload!`);
160
+ console.log(' Use "arela upload --upload-by-rfc" to upload them to Arela API');
161
+ }
162
+ return;
163
+ }
164
+
165
+ // Default behavior: show help for query command
166
+ console.log('Available query options:');
167
+ console.log(' --ready-files Show files ready for upload');
168
+ } catch (error) {
169
+ this.errorHandler.handleFatalError(error, { command: 'query' });
364
170
  }
365
- }
366
- }
367
-
368
- return { year, pedimento, detected: !!(year && pedimento) };
369
- } catch (error) {
370
- return {
371
- year: null,
372
- pedimento: null,
373
- detected: false,
374
- error: error.message,
375
- };
376
- }
377
- };
378
-
379
- /**
380
- * OPTIMIZED: Get processed paths with caching and buffered log reading
381
- */
382
- let processedPathsCache = null;
383
- let lastLogModTime = 0;
384
-
385
- const getProcessedPaths = () => {
386
- try {
387
- // Check if log file exists
388
- if (!fs.existsSync(logFilePath)) {
389
- return new Set();
390
- }
391
-
392
- // Check if cache is still valid
393
- const logStats = fs.statSync(logFilePath);
394
- if (processedPathsCache && logStats.mtime.getTime() === lastLogModTime) {
395
- return processedPathsCache;
396
- }
397
-
398
- // Read and parse log file
399
- const processed = new Set();
400
- const content = fs.readFileSync(logFilePath, 'utf-8');
401
-
402
- // Use more efficient regex with global flag
403
- const regex = /(SUCCESS|SKIPPED): .*? -> (.+)/g;
404
- let match;
405
-
406
- while ((match = regex.exec(content)) !== null) {
407
- const path = match[2];
408
- if (path) {
409
- processed.add(path.trim());
410
- }
411
- }
412
-
413
- // Update cache
414
- processedPathsCache = processed;
415
- lastLogModTime = logStats.mtime.getTime();
171
+ });
416
172
 
417
- return processed;
418
- } catch (error) {
419
- console.error(`⚠️ Error reading processed paths: ${error.message}`);
420
- return new Set();
173
+ // Version command (already handled by program.version())
174
+
175
+ // Help command
176
+ this.program
177
+ .command('help')
178
+ .description('Show help information')
179
+ .action(() => {
180
+ this.program.help();
181
+ });
421
182
  }
422
- };
423
-
424
- /**
425
- * Upload files to Arela API with automatic detection and organization
426
- */
427
- const uploadToApi = async (files, options) => {
428
- const formData = new FormData();
429
183
 
430
- files.forEach((file) => {
431
- const fileBuffer = fs.readFileSync(file.path);
432
- formData.append('files', fileBuffer, {
433
- filename: file.name,
434
- contentType: file.contentType,
184
+ /**
185
+ * Setup global error handling
186
+ * @private
187
+ */
188
+ #setupErrorHandling() {
189
+ // Handle uncaught exceptions
190
+ process.on('uncaughtException', (error) => {
191
+ this.errorHandler.handleFatalError(error, { context: 'uncaughtException' });
435
192
  });
436
- });
437
-
438
- if (bucket) formData.append('bucket', bucket);
439
- if (options.prefix) formData.append('prefix', options.prefix);
440
-
441
- // Nueva funcionalidad: estructura de carpetas personalizada
442
- let combinedStructure = null;
443
- let cachedDetection = null; // Cache detection result to avoid redundant calls
444
-
445
- if (
446
- options.folderStructure &&
447
- options.autoDetectStructure &&
448
- files.length > 0
449
- ) {
450
- // Combine custom folder structure with auto-detection
451
- const firstFile = files[0];
452
- cachedDetection = getCachedPathDetection(firstFile.path, process.cwd());
453
-
454
- if (cachedDetection.detected) {
455
- const autoStructure = `${cachedDetection.year}/${cachedDetection.pedimento}`;
456
- combinedStructure = `${options.folderStructure}/${autoStructure}`;
457
- formData.append('folderStructure', combinedStructure);
458
- console.log(
459
- `📁 Combined folder structure: ${options.folderStructure} + ${autoStructure} = ${combinedStructure}`,
460
- );
461
- } else {
462
- // Fallback to just custom structure if auto-detection fails
463
- formData.append('folderStructure', options.folderStructure);
464
- console.log(
465
- `📁 Using custom folder structure (auto-detection failed): ${options.folderStructure}`,
466
- );
467
- }
468
- } else if (options.folderStructure) {
469
- formData.append('folderStructure', options.folderStructure);
470
- console.log(`📁 Using custom folder structure: ${options.folderStructure}`);
471
- } else if (options.autoDetectStructure && files.length > 0) {
472
- // Try to auto-detect from the first file if no explicit structure is provided
473
- const firstFile = files[0];
474
- cachedDetection = getCachedPathDetection(firstFile.path, process.cwd());
475
-
476
- if (cachedDetection.detected) {
477
- const autoStructure = `${cachedDetection.year}/${cachedDetection.pedimento}`;
478
- formData.append('folderStructure', autoStructure);
479
- }
480
- }
481
-
482
- // Si se especifica clientPath para user_metadata
483
- if (options.clientPath) {
484
- formData.append('clientPath', options.clientPath);
485
- }
486
-
487
- formData.append('autoDetect', String(options.autoDetect ?? true));
488
- formData.append('autoOrganize', String(options.autoOrganize ?? true));
489
- formData.append('batchSize', String(options.batchSize || 10));
490
- formData.append('clientVersion', packageVersion);
491
-
492
- const response = await fetch(
493
- `${API_BASE_URL}/api/storage/batch-upload-and-process`,
494
- {
495
- method: 'POST',
496
- headers: {
497
- 'x-api-key': API_TOKEN,
498
- },
499
- body: formData,
500
- },
501
- );
502
-
503
- if (!response.ok) {
504
- const errorText = await response.text();
505
- throw new Error(
506
- `API request failed: ${response.status} ${response.statusText} - ${errorText}`,
507
- );
508
- }
509
193
 
510
- return response.json();
511
- };
512
-
513
- /**
514
- * Upload file directly to Supabase (fallback method)
515
- */
516
- const uploadToSupabase = async (file, uploadPath) => {
517
- const content = fs.readFileSync(file);
518
- const contentType = mime.lookup(file) || 'application/octet-stream';
519
-
520
- const { data, error } = await supabase.storage
521
- .from(bucket)
522
- .upload(uploadPath.replace(/\\/g, '/'), content, {
523
- upsert: true,
524
- contentType,
194
+ // Handle unhandled promise rejections
195
+ process.on('unhandledRejection', (reason, promise) => {
196
+ const error = reason instanceof Error ? reason : new Error(String(reason));
197
+ this.errorHandler.handleFatalError(error, {
198
+ context: 'unhandledRejection',
199
+ promise: promise.toString()
200
+ });
525
201
  });
526
202
 
527
- if (error) {
528
- throw new Error(error.message);
529
- }
530
-
531
- return data;
532
- };
533
-
534
- /**
535
- * Insert file stats into uploader table with document detection
536
- */
537
- const insertStatsToUploaderTable = async (files, options) => {
538
- if (!supabase) {
539
- throw new Error(
540
- 'Supabase client not initialized. Stats mode requires Supabase connection.',
541
- );
542
- }
543
-
544
- const detectionService = new FileDetectionService();
545
- const records = [];
546
-
547
- for (const file of files) {
548
- // OPTIMIZED: Use pre-computed stats if available, otherwise call fs.statSync
549
- const stats = file.stats || fs.statSync(file.path);
550
- const originalPath = options.clientPath || file.path;
551
-
552
- // Check if record already exists
553
- const { data: existingRecords, error: checkError } = await supabase
554
- .from('uploader')
555
- .select('id, original_path')
556
- .eq('original_path', originalPath)
557
- .limit(1);
558
-
559
- if (checkError) {
560
- console.error(
561
- `❌ Error checking for existing record: ${checkError.message}`,
562
- );
563
- continue;
564
- }
565
-
566
- if (existingRecords && existingRecords.length > 0) {
567
- console.log(`⏭️ Skipping duplicate: ${path.basename(file.path)}`);
568
- continue;
569
- }
570
-
571
- // Initialize record with basic file stats
572
- const record = {
573
- document_type: null,
574
- size: stats.size,
575
- num_pedimento: null,
576
- filename: file.originalName || path.basename(file.path),
577
- original_path: originalPath,
578
- arela_path: null,
579
- status: 'stats',
580
- rfc: null,
581
- message: null,
582
- };
583
-
584
- // Try to detect document type for supported files
585
- if (detectionService.isSupportedFileType(file.path)) {
586
- try {
587
- const detection = await detectionService.detectFile(file.path);
588
-
589
- if (detection.detectedType) {
590
- record.document_type = detection.detectedType;
591
- record.num_pedimento = detection.detectedPedimento;
592
- record.status = 'detected';
593
-
594
- // Set arela_path for pedimento_simplificado documents
595
- if (detection.arelaPath) {
596
- record.arela_path = detection.arelaPath;
597
- }
598
-
599
- // Extract RFC from fields if available
600
- const rfcField = detection.fields.find(
601
- (f) => f.name === 'rfc' && f.found,
602
- );
603
- if (rfcField) {
604
- record.rfc = rfcField.value;
605
- }
606
- } else {
607
- record.status = 'not-detected';
608
- if (detection.error) {
609
- record.message = detection.error;
610
- }
611
- }
612
- } catch (error) {
613
- console.error(`❌ Error detecting ${record.filename}:`, error.message);
614
- record.status = 'detection-error';
615
- record.message = error.message;
616
- }
617
- } else {
618
- record.status = 'unsupported';
619
- record.message = 'File type not supported for detection';
620
- }
621
-
622
- records.push(record);
623
- }
624
-
625
- if (records.length === 0) {
626
- console.log('📝 No new records to insert (all were duplicates or errors)');
627
- return [];
628
- }
629
-
630
- console.log(
631
- `💾 Inserting ${records.length} new records into uploader table...`,
632
- );
633
-
634
- const { data, error } = await supabase
635
- .from('uploader')
636
- .insert(records)
637
- .select();
638
-
639
- if (error) {
640
- throw new Error(`Failed to insert stats records: ${error.message}`);
641
- }
642
-
643
- return data;
644
- };
645
-
646
- /**
647
- * OPTIMIZED: Insert ONLY file stats into uploader table (Phase 1)
648
- * No file reading, no detection - just filesystem metadata
649
- * Returns summary statistics instead of full records for better performance
650
- */
651
- const insertStatsOnlyToUploaderTable = async (files, options) => {
652
- if (!supabase) {
653
- throw new Error(
654
- 'Supabase client not initialized. Stats mode requires Supabase connection.',
655
- );
656
- }
657
-
658
- const batchSize = 1000; // Large batch size for performance
659
- const allRecords = [];
660
-
661
- // Prepare all file stats data first - OPTIMIZED to use pre-computed stats
662
- console.log('📊 Collecting filesystem stats...');
663
- for (const file of files) {
664
- try {
665
- // Use pre-computed stats if available, otherwise call fs.statSync
666
- const stats = file.stats || fs.statSync(file.path);
667
- const originalPath = options.clientPath || file.path;
668
- const fileExtension = path
669
- .extname(file.path)
670
- .toLowerCase()
671
- .replace('.', '');
672
-
673
- const record = {
674
- document_type: null,
675
- size: stats.size,
676
- num_pedimento: null,
677
- filename: file.originalName || path.basename(file.path),
678
- original_path: originalPath,
679
- arela_path: null,
680
- status: 'fs-stats',
681
- rfc: null,
682
- message: null,
683
- file_extension: fileExtension,
684
- created_at: new Date().toISOString(),
685
- modified_at: stats.mtime.toISOString(),
686
- };
687
-
688
- allRecords.push(record);
689
- } catch (error) {
690
- console.error(`❌ Error reading stats for ${file.path}:`, error.message);
691
- }
692
- }
203
+ // Handle SIGINT (Ctrl+C)
204
+ process.on('SIGINT', () => {
205
+ console.log('\n👋 Received SIGINT. Gracefully shutting down...');
206
+ logger.info('Application interrupted by user (SIGINT)');
207
+ logger.flush();
208
+ process.exit(0);
209
+ });
693
210
 
694
- if (allRecords.length === 0) {
695
- console.log('📝 No file stats to insert');
696
- return { totalInserted: 0, totalSkipped: 0, totalProcessed: 0 };
211
+ // Handle SIGTERM
212
+ process.on('SIGTERM', () => {
213
+ console.log('\n👋 Received SIGTERM. Gracefully shutting down...');
214
+ logger.info('Application terminated by system (SIGTERM)');
215
+ logger.flush();
216
+ process.exit(0);
217
+ });
697
218
  }
698
219
 
699
- console.log(
700
- `💾 Bulk inserting ${allRecords.length} file stats in batches of ${batchSize}...`,
701
- );
702
-
703
- let totalInserted = 0;
704
- let totalSkipped = 0;
705
-
706
- // Process in batches for optimal performance
707
- for (let i = 0; i < allRecords.length; i += batchSize) {
708
- const batch = allRecords.slice(i, i + batchSize);
709
-
220
+ /**
221
+ * Show current configuration
222
+ * @private
223
+ */
224
+ #showConfiguration() {
225
+ console.log('🔧 Current Configuration:');
226
+ console.log(` Version: ${appConfig.packageVersion}`);
227
+ console.log('\n📡 API Configuration:');
228
+ console.log(` Base URL: ${appConfig.api.baseUrl || 'Not configured'}`);
229
+ console.log(` Token: ${appConfig.api.token ? '✅ Set' : '❌ Not set'}`);
230
+ console.log('\n🗄️ Supabase Configuration:');
231
+ console.log(` URL: ${appConfig.supabase.url || 'Not configured'}`);
232
+ console.log(` Key: ${appConfig.supabase.key ? '✅ Set' : '❌ Not set'}`);
233
+ console.log(` Bucket: ${appConfig.supabase.bucket || 'Not configured'}`);
234
+ console.log('\n📁 Upload Configuration:');
235
+ console.log(` Base Path: ${appConfig.upload.basePath || 'Not configured'}`);
236
+ console.log(` Sources: ${appConfig.upload.sources?.join(', ') || 'Not configured'}`);
237
+ console.log(` RFCs: ${appConfig.upload.rfcs?.join(', ') || 'Not configured'}`);
238
+ console.log('\n⚡ Performance Configuration:');
239
+ console.log(` Batch Delay: ${appConfig.performance.batchDelay}ms`);
240
+ console.log(` Progress Update Interval: ${appConfig.performance.progressUpdateInterval}`);
241
+ console.log(` Log Buffer Size: ${appConfig.performance.logBufferSize}`);
242
+ console.log('\n📝 Logging Configuration:');
243
+ console.log(` Verbose: ${appConfig.logging.verbose ? '✅ Enabled' : '❌ Disabled'}`);
244
+ console.log(` Log File: ${appConfig.logging.logFilePath}`);
245
+ console.log('\n🎯 Service Availability:');
246
+ console.log(` API Mode: ${appConfig.isApiModeAvailable() ? '✅ Available' : '❌ Not available'}`);
247
+ console.log(` Supabase Mode: ${appConfig.isSupabaseModeAvailable() ? '✅ Available' : '❌ Not available'}`);
248
+ }
249
+
250
+ /**
251
+ * Parse command line arguments and execute
252
+ */
253
+ async run() {
710
254
  try {
711
- // OPTIMIZED: Use upsert without select to avoid unnecessary data transfer
712
- const { error, count } = await supabase.from('uploader').upsert(batch, {
713
- onConflict: 'original_path',
714
- ignoreDuplicates: false,
715
- count: 'exact',
716
- });
717
-
718
- if (error) {
719
- console.error(
720
- `❌ Error inserting batch ${Math.floor(i / batchSize) + 1}:`,
721
- error.message,
722
- );
723
- continue;
724
- }
725
-
726
- // For upsert operations, we can't easily distinguish between inserts and updates
727
- // from the count alone, but we can estimate based on the assumption that most
728
- // operations in --stats-only mode are likely new inserts
729
- const batchProcessed = batch.length;
730
-
731
- // Since we're using upsert with ignoreDuplicates: false, the count represents
732
- // the actual number of rows affected (both inserts and updates)
733
- const affected = count || batchProcessed;
734
-
735
- // For simplicity and performance, we'll assume most are new inserts in stats-only mode
736
- // This is reasonable since stats-only is typically run on new file sets
737
- totalInserted += affected;
738
-
739
- console.log(
740
- `✅ Batch ${Math.floor(i / batchSize) + 1}: ${affected} rows processed`,
741
- );
742
- } catch (error) {
743
- console.error(
744
- `❌ Unexpected error in batch ${Math.floor(i / batchSize) + 1}:`,
745
- error.message,
746
- );
747
- }
748
- }
749
-
750
- // Calculate skipped as difference between total records and inserted
751
- totalSkipped = allRecords.length - totalInserted;
752
-
753
- console.log(
754
- `📊 Phase 1 Summary: ${totalInserted} records processed, estimated ${totalSkipped} were updates`,
755
- );
756
-
757
- return {
758
- totalInserted,
759
- totalSkipped,
760
- totalProcessed: allRecords.length,
761
- };
762
- };
763
-
764
- /**
765
- * PHASE 2: Process PDF files for pedimento-simplificado detection
766
- * Only processes files with status 'fs-stats' and file_extension 'pdf'
767
- * Processes records in chunks of 1000 to avoid loading all records into memory
768
- */
769
- const detectPedimentosInDatabase = async (options = {}) => {
770
- if (!supabase) {
771
- throw new Error('Supabase client not initialized.');
772
- }
773
-
774
- console.log(
775
- '🔍 Phase 2: Starting PDF detection for pedimento-simplificado documents...',
776
- );
777
- writeLog(
778
- '🔍 Phase 2: Starting PDF detection for pedimento-simplificado documents',
779
- );
780
-
781
- const detectionService = new FileDetectionService();
782
- const processingBatchSize = parseInt(options.batchSize) || 10; // Smaller batches for file I/O
783
- const queryBatchSize = 1000; // Process 1000 records at a time
784
-
785
- let totalDetected = 0;
786
- let totalProcessed = 0;
787
- let totalErrors = 0;
788
- let offset = 0;
789
- let chunkNumber = 1;
790
-
791
- console.log('� Processing PDF files in chunks of 1000 records...');
792
- writeLog('📝 Starting PDF detection processing in chunks of 1000 records');
793
-
794
- // Process records in chunks of 1000
795
- while (true) {
796
- console.log(
797
- `\n📥 Fetching chunk ${chunkNumber} (records ${offset + 1} to ${offset + queryBatchSize})...`,
798
- );
799
- writeLog(
800
- `📥 Fetching chunk ${chunkNumber} (records ${offset + 1} to ${offset + queryBatchSize})`,
801
- );
802
-
803
- // Fetch next chunk of PDF records
804
- const { data: pdfRecords, error: queryError } = await supabase
805
- .from('uploader')
806
- .select('id, original_path, filename, file_extension, status')
807
- .eq('status', 'fs-stats')
808
- .eq('file_extension', 'pdf')
809
- .ilike('filename', '%simp%')
810
- .range(offset, offset + queryBatchSize - 1);
811
-
812
- if (queryError) {
813
- throw new Error(
814
- `Failed to fetch PDF records chunk ${chunkNumber}: ${queryError.message}`,
815
- );
816
- }
817
-
818
- // If no records found, we're done
819
- if (!pdfRecords || pdfRecords.length === 0) {
820
- console.log(`📝 No more PDF files found. Processing completed.`);
821
- writeLog(
822
- `📝 No more PDF files found. Processing completed at chunk ${chunkNumber}`,
823
- );
824
- break;
825
- }
826
-
827
- console.log(
828
- `� Processing chunk ${chunkNumber}: ${pdfRecords.length} PDF records`,
829
- );
830
- writeLog(
831
- `📊 Processing chunk ${chunkNumber}: ${pdfRecords.length} PDF records`,
832
- );
833
-
834
- // Create progress bar for this chunk
835
- const progressBar = new cliProgress.SingleBar({
836
- format: `🔍 Chunk ${chunkNumber} |{bar}| {percentage}% | {value}/{total} | Detected: {detected} | Errors: {errors}`,
837
- barCompleteChar: '█',
838
- barIncompleteChar: '░',
839
- hideCursor: true,
840
- });
841
-
842
- progressBar.start(pdfRecords.length, 0, { detected: 0, errors: 0 });
843
-
844
- let chunkDetected = 0;
845
- let chunkProcessed = 0;
846
- let chunkErrors = 0;
847
-
848
- // Process files in smaller batches within this chunk
849
- for (let i = 0; i < pdfRecords.length; i += processingBatchSize) {
850
- const batch = pdfRecords.slice(i, i + processingBatchSize);
851
- const updatePromises = [];
852
-
853
- for (const record of batch) {
854
- try {
855
- // Check if file still exists
856
- if (!fs.existsSync(record.original_path)) {
857
- writeLog(
858
- `⚠️ FILE NOT FOUND: ${record.filename} at ${record.original_path}`,
859
- );
860
- updatePromises.push(
861
- supabase
862
- .from('uploader')
863
- .update({
864
- status: 'file-not-found',
865
- message: 'File no longer exists at original path',
866
- })
867
- .eq('id', record.id),
868
- );
869
- chunkErrors++;
870
- totalErrors++;
871
- continue;
872
- }
873
-
874
- // Perform detection
875
- const detection = await detectionService.detectFile(
876
- record.original_path,
877
- );
878
- chunkProcessed++;
879
- totalProcessed++;
880
-
881
- const updateData = {
882
- status: detection.detectedType ? 'detected' : 'not-detected',
883
- document_type: detection.detectedType,
884
- num_pedimento: detection.detectedPedimento,
885
- arela_path: detection.arelaPath,
886
- message: detection.error || null,
887
- };
888
-
889
- // Extract RFC from fields if available
890
- if (detection.fields) {
891
- const rfcField = detection.fields.find(
892
- (f) => f.name === 'rfc' && f.found,
893
- );
894
- if (rfcField) {
895
- updateData.rfc = rfcField.value;
896
- }
897
- }
898
-
899
- if (detection.detectedType) {
900
- chunkDetected++;
901
- totalDetected++;
902
- writeLog(
903
- `✅ DETECTED: ${record.filename} -> ${detection.detectedType} | Pedimento: ${detection.detectedPedimento || 'N/A'} | RFC: ${detection.fields?.rfc || 'N/A'}`,
904
- );
905
- } else {
906
- writeLog(
907
- `⏭️ NOT DETECTED: ${record.filename} - No pedimento-simplificado pattern found`,
908
- );
909
- }
910
-
911
- updatePromises.push(
912
- supabase.from('uploader').update(updateData).eq('id', record.id),
913
- );
914
- } catch (error) {
915
- console.error(
916
- `❌ Error detecting ${record.filename}:`,
917
- error.message,
918
- );
919
- writeLog(`❌ ERROR detecting ${record.filename}: ${error.message}`);
920
- chunkErrors++;
921
- totalErrors++;
922
-
923
- updatePromises.push(
924
- supabase
925
- .from('uploader')
926
- .update({
927
- status: 'detection-error',
928
- message: error.message,
929
- })
930
- .eq('id', record.id),
931
- );
932
- }
255
+ // Set verbose mode if requested globally
256
+ const args = process.argv;
257
+ if (args.includes('-v') || args.includes('--verbose')) {
258
+ logger.setVerbose(true);
933
259
  }
934
-
935
- // Execute all updates in parallel for this batch
936
- try {
937
- await Promise.all(updatePromises);
938
- } catch (error) {
939
- console.error(
940
- `❌ Error updating batch in chunk ${chunkNumber}:`,
941
- error.message,
942
- );
260
+
261
+ // Clear log if requested globally
262
+ if (args.includes('--clear-log')) {
263
+ logger.clearLogFile();
264
+ logger.info('Log file cleared');
943
265
  }
944
266
 
945
- // Update progress for this chunk
946
- progressBar.update(Math.min(i + processingBatchSize, pdfRecords.length), {
947
- detected: chunkDetected,
948
- errors: chunkErrors,
949
- });
950
- }
951
-
952
- progressBar.stop();
267
+ // Log application start
268
+ logger.info(`Arela Uploader v${appConfig.packageVersion} started`);
269
+ logger.info(`Command: ${args.slice(2).join(' ')}`);
953
270
 
954
- console.log(
955
- `✅ Chunk ${chunkNumber} completed: ${chunkDetected} detected, ${chunkProcessed} processed, ${chunkErrors} errors`,
956
- );
957
- writeLog(
958
- `✅ Chunk ${chunkNumber} completed: ${chunkDetected} detected, ${chunkProcessed} processed, ${chunkErrors} errors`,
959
- );
960
-
961
- // Move to next chunk
962
- offset += queryBatchSize;
963
- chunkNumber++;
964
-
965
- // If we got fewer records than queryBatchSize, we've reached the end
966
- if (pdfRecords.length < queryBatchSize) {
967
- console.log(
968
- `📝 Reached end of records (chunk had ${pdfRecords.length} records).`,
969
- );
970
- writeLog(
971
- `📝 Reached end of records (chunk had ${pdfRecords.length} records)`,
972
- );
973
- break;
271
+ // Parse and execute commands
272
+ await this.program.parseAsync();
273
+
274
+ } catch (error) {
275
+ this.errorHandler.handleFatalError(error, { context: 'cli-execution' });
974
276
  }
975
-
976
- // Small delay between chunks to avoid overwhelming the database
977
- await new Promise((resolve) => setTimeout(resolve, 500));
978
277
  }
278
+ }
979
279
 
980
- console.log(
981
- `📊 Phase 2 Summary: ${totalDetected} detected, ${totalProcessed} processed, ${totalErrors} errors`,
982
- );
983
-
984
- // Write comprehensive log summary
985
- writeLog(
986
- `📊 PHASE 2 PDF DETECTION COMPLETED - Summary: Detected: ${totalDetected} pedimento-simplificado documents, Processed: ${totalProcessed} PDF files, Errors: ${totalErrors}`,
987
- );
988
-
989
- // Ensure logs are flushed
990
- flushLogBuffer();
991
-
992
- return {
993
- detectedCount: totalDetected,
994
- processedCount: totalProcessed,
995
- errorCount: totalErrors,
996
- };
997
- };
998
-
999
- const processFilesInBatches = async (
1000
- files,
1001
- batchSize,
1002
- options,
1003
- basePath,
1004
- folder,
1005
- sourcePath,
1006
- processedPaths,
1007
- ) => {
1008
- let totalUploaded = 0;
1009
- let totalDetected = 0;
1010
- let totalOrganized = 0;
1011
- let totalErrors = 0;
1012
- let totalSkipped = 0;
1013
-
1014
- const messageBuffer = [];
1015
-
1016
- const progressBarFormat = options.statsOnly
1017
- ? '📊 Processing [{bar}] {percentage}% | {value}/{total} files | Stats: {successCount} | Errors: {failureCount} | Duplicates: {skippedCount}'
1018
- : '📂 Processing [{bar}] {percentage}% | {value}/{total} files | Success: {successCount} | Errors: {failureCount} | Skipped: {skippedCount}';
1019
-
1020
- const progressBar = new cliProgress.SingleBar({
1021
- format: progressBarFormat,
1022
- barCompleteChar: '█',
1023
- barIncompleteChar: '░',
1024
- hideCursor: true,
1025
- });
1026
-
1027
- progressBar.start(files.length, 0, {
1028
- successCount: 0,
1029
- failureCount: 0,
1030
- skippedCount: 0,
1031
- });
1032
-
1033
- if (options.statsOnly) {
1034
- // OPTIMIZED Stats-only mode - Only read filesystem stats, no file detection
1035
- console.log(
1036
- '📊 Phase 1: Processing files in optimized stats-only mode (no detection)...',
1037
- );
1038
-
1039
- for (let i = 0; i < files.length; i += batchSize) {
1040
- const batch = files.slice(i, i + batchSize);
1041
-
1042
- // OPTIMIZED: Batch read file stats to reduce I/O overhead
1043
- const fileStatsResults = batchReadFileStats(batch);
1044
- const statsFiles = fileStatsResults
1045
- .filter((result) => result.stats !== null) // Only include files with valid stats
1046
- .map((result) => {
1047
- const originalFileName = path.basename(result.path);
1048
-
1049
- return {
1050
- path: result.path,
1051
- originalName: originalFileName,
1052
- stats: result.stats, // Pass pre-computed stats to avoid redundant calls
1053
- };
1054
- });
1055
-
1056
- // Log any files that couldn't be read
1057
- const failedFiles = fileStatsResults.filter(
1058
- (result) => result.error !== null,
1059
- );
1060
- if (failedFiles.length > 0) {
1061
- console.log(
1062
- `⚠️ Could not read stats for ${failedFiles.length} files in batch`,
1063
- );
1064
- failedFiles.forEach((failed) => {
1065
- console.error(` ❌ ${failed.path}: ${failed.error}`);
1066
- });
1067
- }
1068
-
1069
- try {
1070
- const result = await insertStatsOnlyToUploaderTable(
1071
- statsFiles,
1072
- options,
1073
- );
1074
-
1075
- totalUploaded += result.totalInserted;
1076
- totalSkipped += result.totalSkipped;
1077
- totalErrors += failedFiles.length; // Count failed file reads as errors
1078
-
1079
- progressBar.update(Math.min(i + batch.length, files.length), {
1080
- successCount: totalUploaded,
1081
- failureCount: totalErrors,
1082
- skippedCount: totalSkipped,
1083
- });
1084
- } catch (error) {
1085
- console.error(`❌ Error processing stats batch:`, error.message);
1086
- totalErrors += batch.length;
1087
-
1088
- progressBar.update(Math.min(i + batch.length, files.length), {
1089
- successCount: totalUploaded,
1090
- failureCount: totalErrors,
1091
- skippedCount: totalSkipped,
1092
- });
1093
- }
1094
- }
1095
- } else if (apiMode && !options.forceSupabase) {
1096
- // API Mode - Process in batches
1097
- for (let i = 0; i < files.length; i += batchSize) {
1098
- const batch = files.slice(i, i + batchSize);
1099
- let sanitizedRelativePath;
1100
-
1101
- const apiFiles = batch
1102
- .map((file) => {
1103
- const relativePathRaw = path
1104
- .relative(basePath, file)
1105
- .replace(/^[\\/]+/, '')
1106
- .replace(/\\/g, '/');
1107
-
1108
- const pathParts = relativePathRaw.split('/');
1109
- const originalFileName = pathParts[pathParts.length - 1];
1110
- const sanitizedFileName = sanitizeFileName(originalFileName);
1111
- pathParts[pathParts.length - 1] = sanitizedFileName;
1112
- sanitizedRelativePath = pathParts.join('/');
1113
-
1114
- let uploadPath;
1115
-
1116
- // Handle combined folder structure + auto-detection
1117
- if (options.folderStructure && options.autoDetectStructure) {
1118
- // OPTIMIZED: Use cached detection to avoid redundant parsing
1119
- const detection = getCachedPathDetection(file, basePath);
1120
- if (detection.detected) {
1121
- const autoStructure = `${detection.year}/${detection.pedimento}`;
1122
- const combinedStructure = `${options.folderStructure}/${autoStructure}`;
1123
- uploadPath = path.posix.join(
1124
- combinedStructure,
1125
- sanitizedFileName,
1126
- );
1127
- logVerbose(
1128
- `📁 Combined structure: ${options.folderStructure}/${autoStructure} for ${originalFileName} -> ${uploadPath}`,
1129
- );
1130
- } else {
1131
- // Fallback to just custom structure if auto-detection fails
1132
- uploadPath = path.posix.join(
1133
- options.folderStructure,
1134
- sanitizedFileName,
1135
- );
1136
- logVerbose(
1137
- `📁 Custom structure (auto-detection failed): ${uploadPath}`,
1138
- );
1139
- }
1140
- } else if (options.folderStructure) {
1141
- // Use custom folder structure only
1142
- uploadPath = path.posix.join(
1143
- options.folderStructure,
1144
- sanitizedFileName,
1145
- );
1146
- logVerbose(`📁 Custom structure: ${uploadPath}`);
1147
- } else if (options.autoDetectStructure) {
1148
- // Auto-detect structure from path if enabled - OPTIMIZED: Use cached detection
1149
- const detection = getCachedPathDetection(file, basePath);
1150
- if (detection.detected) {
1151
- const autoStructure = `${detection.year}/${detection.pedimento}`;
1152
- uploadPath = path.posix.join(autoStructure, sanitizedFileName);
1153
- console.log(
1154
- `🔍 Auto-detected: ${autoStructure} for ${originalFileName} -> ${uploadPath}`,
1155
- );
1156
- } else {
1157
- uploadPath = options.prefix
1158
- ? path.posix.join(options.prefix, sanitizedRelativePath)
1159
- : sanitizedRelativePath;
1160
- console.log(`📁 Using relative path: ${uploadPath}`);
1161
- }
1162
- } else {
1163
- uploadPath = options.prefix
1164
- ? path.posix.join(options.prefix, sanitizedRelativePath)
1165
- : sanitizedRelativePath;
1166
- console.log(`📁 Using standard path: ${uploadPath}`);
1167
- }
1168
-
1169
- if (processedPaths.has(uploadPath)) {
1170
- totalSkipped++;
1171
- writeLog(`SKIPPED: ${file} -> ${uploadPath}`);
1172
- return null;
1173
- }
1174
-
1175
- return {
1176
- path: file,
1177
- name: sanitizedFileName,
1178
- originalName: originalFileName,
1179
- uploadPath: uploadPath.replace(/\\/g, '/'), // Ensure forward slashes
1180
- contentType: mime.lookup(file) || 'application/octet-stream',
1181
- };
1182
- })
1183
- .filter(Boolean);
1184
-
1185
- if (apiFiles.length > 0) {
1186
- // console.log(`🔄 Processing batch of ${apiFiles.length} files`);
1187
- // apiFiles.forEach(f => console.log(` 📄 ${f.name} -> ${f.uploadPath}`));
1188
-
1189
- try {
1190
- // Use clientPath from options if specified, otherwise construct from detection or folder
1191
- let clientPath = options.clientPath;
1192
-
1193
- if (!clientPath && apiFiles.length > 0) {
1194
- const firstFile = apiFiles[0];
1195
- // OPTIMIZED: Use cached detection to avoid redundant parsing
1196
- const detection = getCachedPathDetection(firstFile.path, basePath);
1197
- if (detection.detected) {
1198
- // clientPath = `${detection.year}/${detection.pedimento}/`;
1199
- clientPath = path
1200
- .resolve(basePath, sanitizedRelativePath)
1201
- .replace(/\\/g, '/');
1202
- } else {
1203
- // Fallback to folder structure if no year/pedimento detected
1204
- clientPath = path.resolve(basePath, folder).replace(/\\/g, '/');
1205
- }
1206
- }
1207
-
1208
- const result = await uploadToApi(apiFiles, {
1209
- ...options,
1210
- clientPath: clientPath,
1211
- });
1212
-
1213
- totalUploaded += result.stats.uploadedCount;
1214
- totalDetected += result.stats.detectedCount;
1215
- totalOrganized += result.stats.organizedCount;
1216
- totalErrors += result.stats.errorCount;
1217
-
1218
- result.uploaded.forEach((upload) => {
1219
- const apiFile = apiFiles.find(
1220
- (f) => f.name === upload.originalName,
1221
- );
1222
- if (apiFile) {
1223
- writeLog(`SUCCESS: ${apiFile.path} -> ${apiFile.uploadPath}`);
1224
- processedPaths.add(apiFile.uploadPath);
1225
- }
1226
- });
1227
-
1228
- // Update status to "file-uploaded" for successfully uploaded files
1229
- if (result.uploaded && result.uploaded.length > 0 && supabase) {
1230
- try {
1231
- const uploadedFilePaths = result.uploaded
1232
- .map((upload) => {
1233
- const apiFile = apiFiles.find(
1234
- (f) =>
1235
- f.name === upload.originalName ||
1236
- f.originalName === upload.originalName,
1237
- );
1238
- return apiFile ? apiFile.path : null;
1239
- })
1240
- .filter(Boolean);
1241
-
1242
- if (uploadedFilePaths.length > 0) {
1243
- await supabase
1244
- .from('uploader')
1245
- .update({ status: 'file-uploaded' })
1246
- .in('original_path', uploadedFilePaths);
1247
-
1248
- console.log(
1249
- ` 📝 Updated status to "file-uploaded" for ${uploadedFilePaths.length} files`,
1250
- );
1251
- }
1252
- } catch (error) {
1253
- console.error(
1254
- ` ⚠️ Error updating status for uploaded files: ${error.message}`,
1255
- );
1256
- }
1257
- }
1258
-
1259
- result.errors.forEach((error) => {
1260
- writeLog(
1261
- `ERROR: ${error.fileName}: ${error.error} (${error.step})`,
1262
- );
1263
- messageBuffer.push(
1264
- `❌ ${error.fileName}: ${error.error} (${error.step})`,
1265
- );
1266
- });
1267
- } catch (error) {
1268
- totalErrors += apiFiles.length;
1269
- apiFiles.forEach((file) => {
1270
- writeLog(`ERROR: ${file.path}: ${error.message}`);
1271
- messageBuffer.push(`❌ ${file.name}: ${error.message}`);
1272
- });
1273
- }
1274
- }
1275
-
1276
- progressBar.update(i + batch.length, {
1277
- successCount: totalUploaded,
1278
- failureCount: totalErrors,
1279
- skippedCount: totalSkipped,
1280
- });
1281
-
1282
- if (i + batchSize < files.length) {
1283
- await new Promise((resolve) => setTimeout(resolve, BATCH_DELAY));
1284
- }
1285
- }
1286
- } else {
1287
- // Direct Supabase mode
1288
- for (let i = 0; i < files.length; i++) {
1289
- const file = files[i];
1290
- try {
1291
- const relativePath = path.relative(basePath, file);
1292
- let uploadPath;
1293
-
1294
- // Handle combined folder structure + auto-detection
1295
- if (options.folderStructure && options.autoDetectStructure) {
1296
- const detection = getCachedPathDetection(file, basePath);
1297
- if (detection.detected) {
1298
- const autoStructure = `${detection.year}/${detection.pedimento}`;
1299
- const combinedStructure = `${options.folderStructure}/${autoStructure}`;
1300
- const fileName = path.basename(file);
1301
- uploadPath = path.join(combinedStructure, fileName);
1302
- console.log(
1303
- `📁 Combined structure: ${options.folderStructure}/${autoStructure} for ${fileName}`,
1304
- );
1305
- } else {
1306
- // Fallback to just custom structure if auto-detection fails
1307
- const fileName = path.basename(file);
1308
- uploadPath = path.join(options.folderStructure, fileName);
1309
- console.log(
1310
- `📁 Custom structure (auto-detection failed): ${uploadPath}`,
1311
- );
1312
- }
1313
- } else if (options.folderStructure) {
1314
- // Use custom folder structure only
1315
- const fileName = path.basename(file);
1316
- uploadPath = path.join(options.folderStructure, fileName);
1317
- console.log(`📁 Custom structure: ${uploadPath}`);
1318
- } else if (options.autoDetectStructure) {
1319
- // Auto-detect structure from path if enabled - OPTIMIZED: Use cached detection
1320
- const detection = getCachedPathDetection(file, basePath);
1321
- if (detection.detected) {
1322
- const autoStructure = `${detection.year}/${detection.pedimento}`;
1323
- const fileName = path.basename(file);
1324
- uploadPath = path.join(autoStructure, fileName);
1325
- } else {
1326
- uploadPath = options.prefix
1327
- ? path.join(options.prefix, relativePath)
1328
- : relativePath;
1329
- }
1330
- } else {
1331
- uploadPath = options.prefix
1332
- ? path.join(options.prefix, relativePath)
1333
- : relativePath;
1334
- }
1335
-
1336
- if (processedPaths.has(uploadPath)) {
1337
- totalSkipped++;
1338
- writeLog(`SKIPPED: ${file} -> ${uploadPath}`);
1339
-
1340
- // Update status to "file-uploaded" for skipped files (they already exist)
1341
- if (supabase) {
1342
- try {
1343
- await supabase
1344
- .from('uploader')
1345
- .update({ status: 'file-uploaded' })
1346
- .eq('original_path', file);
1347
- } catch (error) {
1348
- console.error(
1349
- ` ⚠️ Error updating status for skipped file: ${error.message}`,
1350
- );
1351
- }
1352
- }
1353
- } else {
1354
- await uploadToSupabase(file, uploadPath);
1355
- totalUploaded++;
1356
- writeLog(`SUCCESS: ${file} -> ${uploadPath}`);
1357
- processedPaths.add(uploadPath);
1358
-
1359
- // Update status to "file-uploaded" for successfully uploaded files
1360
- if (supabase) {
1361
- try {
1362
- await supabase
1363
- .from('uploader')
1364
- .update({ status: 'file-uploaded' })
1365
- .eq('original_path', file);
1366
- } catch (error) {
1367
- console.error(
1368
- ` ⚠️ Error updating status for uploaded file: ${error.message}`,
1369
- );
1370
- }
1371
- }
1372
- }
1373
- } catch (error) {
1374
- totalErrors++;
1375
- writeLog(`ERROR: ${file}: ${error.message}`);
1376
- messageBuffer.push(`❌ ${path.basename(file)}: ${error.message}`);
1377
- }
1378
-
1379
- progressBar.update(i + 1, {
1380
- successCount: totalUploaded,
1381
- failureCount: totalErrors,
1382
- skippedCount: totalSkipped,
1383
- });
1384
- }
1385
- }
1386
-
1387
- progressBar.stop();
1388
-
1389
- const errorMessages = messageBuffer.filter((msg) => msg.startsWith('❌'));
1390
- if (errorMessages.length > 0) {
1391
- console.log('\n🚨 Errors encountered during processing:');
1392
- errorMessages.forEach((msg) => console.error(msg));
1393
- }
1394
-
1395
- return {
1396
- successCount: totalUploaded,
1397
- detectedCount: totalDetected,
1398
- organizedCount: totalOrganized,
1399
- failureCount: totalErrors,
1400
- skippedCount: totalSkipped,
1401
- };
1402
- };
1403
-
1404
- /**
1405
- * Upload files to Arela API based on specific RFC values
1406
- */
1407
- const uploadFilesByRfc = async (options = {}) => {
1408
- if (!supabase) {
1409
- console.error('❌ Supabase client not initialized');
1410
- process.exit(1);
1411
- }
1412
-
1413
- if (!API_BASE_URL || !API_TOKEN) {
1414
- console.error(
1415
- '❌ Arela API configuration missing. Please set ARELA_API_URL and ARELA_API_TOKEN environment variables.',
1416
- );
1417
- process.exit(1);
1418
- }
1419
-
1420
- if (!uploadRfcs || uploadRfcs.length === 0) {
1421
- console.error(
1422
- '❌ No RFCs specified. Please set UPLOAD_RFCS environment variable with pipe-separated RFC values.',
1423
- );
1424
- console.error(
1425
- ' Example: UPLOAD_RFCS="RFC123456789|RFC987654321|RFC555444333"',
1426
- );
1427
- process.exit(1);
1428
- }
1429
-
1430
- console.log('🎯 RFC-based Upload Mode');
1431
- console.log(`📋 Target RFCs: ${uploadRfcs.join(', ')}`);
1432
- console.log('🔍 Searching for files to upload...');
1433
-
1434
- // First, count total files for the RFCs to show filtering effect
1435
- const { count: totalRfcFiles, error: countError } = await supabase
1436
- .from('uploader')
1437
- .select('*', { count: 'exact', head: true })
1438
- .in('rfc', uploadRfcs)
1439
- .not('arela_path', 'is', null);
1440
-
1441
- if (countError) {
1442
- console.warn('⚠️ Could not count total RFC files:', countError.message);
1443
- } else {
1444
- console.log(`📊 Total files for specified RFCs: ${totalRfcFiles || 0}`);
1445
- }
1446
-
1447
- // Step 1: Get all pedimento_simplificado records that match the specified RFCs and have arela_path
1448
- console.log(
1449
- '🎯 Finding pedimento_simplificado records for specified RFCs...',
1450
- );
1451
- const { data: pedimentoRfcRecords, error: pedimentoRfcError } = await supabase
1452
- .from('uploader')
1453
- .select('arela_path')
1454
- .eq('document_type', 'pedimento_simplificado')
1455
- .in('rfc', uploadRfcs)
1456
- .not('arela_path', 'is', null);
1457
-
1458
- if (pedimentoRfcError) {
1459
- console.error(
1460
- '❌ Error fetching pedimento RFC records:',
1461
- pedimentoRfcError.message,
1462
- );
1463
- return { processedCount: 0, uploadedCount: 0, errorCount: 1 };
1464
- }
1465
-
1466
- if (!pedimentoRfcRecords || pedimentoRfcRecords.length === 0) {
1467
- console.log(
1468
- 'ℹ️ No pedimento_simplificado records found for the specified RFCs with arela_path',
1469
- );
1470
- return { processedCount: 0, uploadedCount: 0, errorCount: 0 };
1471
- }
1472
-
1473
- // Get unique arela_paths from pedimento records
1474
- const uniqueArelaPaths = [
1475
- ...new Set(pedimentoRfcRecords.map((r) => r.arela_path)),
1476
- ];
1477
- console.log(
1478
- `📋 Found ${pedimentoRfcRecords.length} pedimento records with ${uniqueArelaPaths.length} unique arela_paths for specified RFCs`,
1479
- );
1480
-
1481
- // Step 2: Get all files with these arela_paths that haven't been uploaded yet
1482
- let rfcRecords = [];
1483
- const chunkSize = 50;
1484
-
1485
- for (let i = 0; i < uniqueArelaPaths.length; i += chunkSize) {
1486
- const pathChunk = uniqueArelaPaths.slice(i, i + chunkSize);
1487
-
1488
- const { data: chunkFiles, error: chunkError } = await supabase
1489
- .from('uploader')
1490
- .select('arela_path')
1491
- .in('arela_path', pathChunk)
1492
- .neq('status', 'file-uploaded')
1493
- .not('arela_path', 'is', null);
1494
-
1495
- if (chunkError) {
1496
- console.error(
1497
- '❌ Error fetching files for arela_paths chunk:',
1498
- chunkError.message,
1499
- );
1500
- return { processedCount: 0, uploadedCount: 0, errorCount: 1 };
1501
- }
1502
-
1503
- if (chunkFiles && chunkFiles.length > 0) {
1504
- rfcRecords = rfcRecords.concat(chunkFiles);
1505
- }
1506
- }
1507
-
1508
- if (!rfcRecords || rfcRecords.length === 0) {
1509
- if (totalRfcFiles && totalRfcFiles > 0) {
1510
- console.log(
1511
- `ℹ️ All ${totalRfcFiles} files for the specified RFCs are already uploaded (status: file-uploaded)`,
1512
- );
1513
- console.log(' No new files to upload.');
1514
- } else {
1515
- console.log('ℹ️ No files found for the specified RFCs with arela_path');
1516
- console.log(
1517
- ` Make sure files for RFCs [${uploadRfcs.join(', ')}] have been processed and have arela_path values`,
1518
- );
1519
- }
1520
- return { processedCount: 0, uploadedCount: 0, errorCount: 0 };
1521
- }
1522
-
1523
- // Show filtering effect
1524
- const uploadableArelaPaths = [
1525
- ...new Set(rfcRecords.map((r) => r.arela_path)),
1526
- ];
1527
- const skipped = (totalRfcFiles || 0) - rfcRecords.length;
1528
- if (skipped > 0) {
1529
- console.log(
1530
- `📊 Found ${rfcRecords.length} files ready for upload (${skipped} already uploaded, skipped)`,
1531
- );
1532
- } else {
1533
- console.log(`📊 Found ${rfcRecords.length} files ready for upload`);
1534
- }
1535
-
1536
- console.log(
1537
- `🎯 Found ${uploadableArelaPaths.length} unique arela_path(s) with files ready for upload`,
1538
- );
1539
-
1540
- // Step 3: Get ALL files that have these arela_paths (including supporting documents)
1541
- // Process arela_paths in smaller chunks to avoid URI length limits
1542
- let allRelatedFiles = [];
1543
- const arelaPathChunkSize = 50; // Process 50 arela_paths at a time to avoid URI limits
1544
- const queryBatchSize = 1000;
1545
-
1546
- console.log(
1547
- '📥 Fetching all related files (processing arela_paths in chunks to avoid URI limits)...',
1548
- );
1549
-
1550
- // Process arela_paths in chunks
1551
- for (let i = 0; i < uploadableArelaPaths.length; i += arelaPathChunkSize) {
1552
- const arelaPathChunk = uploadableArelaPaths.slice(
1553
- i,
1554
- i + arelaPathChunkSize,
1555
- );
1556
- console.log(
1557
- ` Processing arela_path chunk ${Math.floor(i / arelaPathChunkSize) + 1}/${Math.ceil(uploadableArelaPaths.length / arelaPathChunkSize)} (${arelaPathChunk.length} paths)`,
1558
- );
1559
-
1560
- // For each chunk of arela_paths, use pagination to get all related files
1561
- let hasMore = true;
1562
- let offset = 0;
1563
-
1564
- while (hasMore) {
1565
- const { data: batch, error: queryError } = await supabase
1566
- .from('uploader')
1567
- .select('id, original_path, arela_path, filename, rfc, document_type')
1568
- .in('arela_path', arelaPathChunk)
1569
- .not('original_path', 'is', null)
1570
- .neq('status', 'file-uploaded')
1571
- .range(offset, offset + queryBatchSize - 1);
1572
-
1573
- if (queryError) {
1574
- console.error(
1575
- `❌ Error fetching related files for chunk ${Math.floor(i / arelaPathChunkSize) + 1}:`,
1576
- queryError.message,
1577
- );
1578
- return { processedCount: 0, uploadedCount: 0, errorCount: 1 };
1579
- }
1580
-
1581
- if (!batch || batch.length === 0) {
1582
- hasMore = false;
1583
- } else {
1584
- allRelatedFiles = allRelatedFiles.concat(batch);
1585
- offset += queryBatchSize;
1586
-
1587
- // If we got less than queryBatchSize, we've reached the end for this chunk
1588
- if (batch.length < queryBatchSize) {
1589
- hasMore = false;
1590
- }
1591
- }
1592
- }
1593
-
1594
- // Small delay between chunks to avoid overwhelming the database
1595
- if (i + arelaPathChunkSize < uploadableArelaPaths.length) {
1596
- await new Promise((resolve) => setTimeout(resolve, 100));
1597
- }
1598
- }
1599
-
1600
- if (!allRelatedFiles || allRelatedFiles.length === 0) {
1601
- console.log('ℹ️ No related files found for the arela_paths');
1602
- return { processedCount: 0, uploadedCount: 0, errorCount: 0 };
1603
- }
1604
-
1605
- console.log(
1606
- `📁 Found ${allRelatedFiles.length} total files to upload (including supporting documents, excluding already uploaded)`,
1607
- );
1608
-
1609
- // Group by RFC and arela_path for better organization
1610
- const filesByRfc = allRelatedFiles.reduce((acc, record) => {
1611
- const rfc = record.rfc || 'No RFC';
1612
- if (!acc[rfc]) {
1613
- acc[rfc] = [];
1614
- }
1615
- acc[rfc].push(record);
1616
- return acc;
1617
- }, {});
1618
-
1619
- console.log('📊 Files by RFC (including supporting documents):');
1620
- for (const [rfc, files] of Object.entries(filesByRfc)) {
1621
- const documentTypes = [
1622
- ...new Set(files.map((f) => f.document_type || 'Unknown')),
1623
- ];
1624
- console.log(
1625
- ` ${rfc}: ${files.length} files (${documentTypes.join(', ')})`,
1626
- );
1627
- }
1628
-
1629
- // Group by arela_path for upload organization
1630
- const filesByPath = allRelatedFiles.reduce((acc, record) => {
1631
- const path = record.arela_path;
1632
- if (!acc[path]) {
1633
- acc[path] = [];
1634
- }
1635
- acc[path].push(record);
1636
- return acc;
1637
- }, {});
1638
-
1639
- console.log('� Files grouped by arela_path:');
1640
- for (const [path, files] of Object.entries(filesByPath)) {
1641
- console.log(` ${path}: ${files.length} files`);
1642
- }
1643
-
1644
- let totalProcessed = 0;
1645
- let totalUploaded = 0;
1646
- let totalErrors = 0;
1647
- let totalSkipped = 0;
1648
-
1649
- // Create progress bar
1650
- const progressBar = new cliProgress.SingleBar({
1651
- format:
1652
- '🚀 Uploading files |{bar}| {percentage}% | {value}/{total} | Uploaded: {uploaded} | Errors: {errors} | Skipped: {skipped}',
1653
- barCompleteChar: '█',
1654
- barIncompleteChar: '░',
1655
- hideCursor: true,
1656
- });
1657
-
1658
- if (options.showProgress !== false) {
1659
- progressBar.start(allRelatedFiles.length, 0, {
1660
- uploaded: 0,
1661
- errors: 0,
1662
- skipped: 0,
1663
- });
1664
- }
1665
-
1666
- const batchSize = parseInt(options.batchSize) || 10;
1667
- console.log(`📦 Processing in batches of ${batchSize} files`);
1668
-
1669
- // Process files in batches
1670
- for (let i = 0; i < allRelatedFiles.length; i += batchSize) {
1671
- const batch = allRelatedFiles.slice(i, i + batchSize);
1672
- const batchNumber = Math.floor(i / batchSize) + 1;
1673
- const totalBatches = Math.ceil(allRelatedFiles.length / batchSize);
1674
-
1675
- console.log(
1676
- `\n📦 Processing batch ${batchNumber}/${totalBatches} (${batch.length} files)`,
1677
- );
1678
-
1679
- // Prepare files for upload
1680
- const filesToUpload = [];
1681
-
1682
- for (const record of batch) {
1683
- totalProcessed++;
1684
-
1685
- try {
1686
- const originalPath = record.original_path;
1687
-
1688
- // Check if file exists
1689
- if (!fs.existsSync(originalPath)) {
1690
- console.log(` ⚠️ File not found: ${originalPath}`);
1691
- totalSkipped++;
1692
- continue;
1693
- }
1694
-
1695
- // OPTIMIZED: Read file and get size from buffer instead of separate fs.statSync call
1696
- const fileBuffer = fs.readFileSync(originalPath);
1697
-
1698
- filesToUpload.push({
1699
- path: originalPath,
1700
- buffer: fileBuffer,
1701
- size: fileBuffer.length, // Get size from buffer instead of fs.statSync
1702
- name: record.filename,
1703
- arelaPath: record.arela_path,
1704
- rfc: record.rfc,
1705
- documentType: record.document_type,
1706
- });
1707
- } catch (error) {
1708
- console.error(
1709
- ` ❌ Error reading file ${record.original_path}:`,
1710
- error.message,
1711
- );
1712
- totalErrors++;
1713
- }
1714
-
1715
- if (options.showProgress !== false) {
1716
- progressBar.update(totalProcessed, {
1717
- uploaded: totalUploaded,
1718
- errors: totalErrors,
1719
- skipped: totalSkipped,
1720
- });
1721
- }
1722
- }
1723
-
1724
- // Upload the batch if we have files
1725
- if (filesToUpload.length > 0) {
1726
- try {
1727
- console.log(
1728
- ` 🚀 Uploading ${filesToUpload.length} files to Arela API...`,
1729
- );
1730
-
1731
- const formData = new FormData();
1732
-
1733
- // Add files to form data
1734
- filesToUpload.forEach((file, index) => {
1735
- formData.append(`files`, file.buffer, {
1736
- filename: file.name,
1737
- contentType: mime.lookup(file.name) || 'application/octet-stream',
1738
- });
1739
- });
1740
-
1741
- // Instead of using per-file folder structures, we'll group by arela_path and upload separately
1742
- // Group files by their arela_path to upload them in correct structure
1743
- const filesByPath = filesToUpload.reduce((acc, file) => {
1744
- const path = file.arelaPath.replace(/\/$/, '');
1745
- if (!acc[path]) {
1746
- acc[path] = [];
1747
- }
1748
- acc[path].push(file);
1749
- return acc;
1750
- }, {});
1751
-
1752
- // Upload each group separately with its folder structure
1753
- for (const [arelaPath, pathFiles] of Object.entries(filesByPath)) {
1754
- const pathFormData = new FormData();
1755
-
1756
- pathFiles.forEach((file) => {
1757
- pathFormData.append('files', file.buffer, {
1758
- filename: file.name,
1759
- contentType: mime.lookup(file.name) || 'application/octet-stream',
1760
- });
1761
- });
1762
-
1763
- // Set folder structure for this group - concatenate custom prefix with arela_path
1764
- const folderStructure = options.folderStructure
1765
- ? `${options.folderStructure}/${arelaPath}`
1766
- .replace(/\/+/g, '/')
1767
- .replace(/\/$/, '')
1768
- : arelaPath;
1769
- pathFormData.append('folderStructure', folderStructure);
1770
- pathFormData.append('autoDetect', 'true');
1771
- pathFormData.append('autoOrganize', 'false');
1772
- pathFormData.append('batchSize', String(pathFiles.length));
1773
- pathFormData.append('clientVersion', packageVersion);
1774
- if (bucket) {
1775
- pathFormData.append('bucket', bucket);
1776
- }
1777
-
1778
- console.log(
1779
- ` 📁 Uploading ${pathFiles.length} files to: ${folderStructure}`,
1780
- );
1781
-
1782
- const response = await fetch(
1783
- `${API_BASE_URL}/api/storage/batch-upload-and-process`,
1784
- {
1785
- method: 'POST',
1786
- headers: {
1787
- 'x-api-key': API_TOKEN,
1788
- },
1789
- body: pathFormData,
1790
- },
1791
- );
1792
-
1793
- if (!response.ok) {
1794
- const errorText = await response.text();
1795
- throw new Error(`HTTP ${response.status}: ${errorText}`);
1796
- }
1797
-
1798
- const result = await response.json();
1799
-
1800
- // Check if upload was successful based on stats rather than success field
1801
- const isSuccessful =
1802
- result.stats &&
1803
- result.stats.uploadedCount > 0 &&
1804
- result.stats.errorCount === 0;
1805
-
1806
- if (isSuccessful) {
1807
- console.log(
1808
- ` ✅ Group uploaded: ${result.stats.uploadedCount} files to ${folderStructure}`,
1809
- );
1810
- totalUploaded += result.stats.uploadedCount;
1811
-
1812
- if (result.stats.detectedCount > 0) {
1813
- console.log(
1814
- ` 🔍 Files detected: ${result.stats.detectedCount}`,
1815
- );
1816
- }
1817
- if (result.stats.organizedCount > 0) {
1818
- console.log(
1819
- ` 📁 Files organized: ${result.stats.organizedCount}`,
1820
- );
1821
- }
1822
-
1823
- // Update status to "file-uploaded" for successfully uploaded files
1824
- try {
1825
- const uploadedFilePaths = pathFiles.map((file) => file.path);
1826
- await supabase
1827
- .from('uploader')
1828
- .update({ status: 'file-uploaded' })
1829
- .in('original_path', uploadedFilePaths);
1830
-
1831
- console.log(
1832
- ` 📝 Updated status to "file-uploaded" for ${uploadedFilePaths.length} files`,
1833
- );
1834
- } catch (error) {
1835
- console.error(
1836
- ` ⚠️ Error updating status for uploaded files: ${error.message}`,
1837
- );
1838
- }
1839
- } else {
1840
- console.error(` ❌ Upload failed for ${folderStructure}:`);
1841
- if (result.errors && result.errors.length > 0) {
1842
- result.errors.forEach((error) => {
1843
- console.error(` - ${error.fileName}: ${error.error}`);
1844
- });
1845
- }
1846
- totalErrors += pathFiles.length;
1847
- }
1848
-
1849
- // Handle files that already exist (usually indicated in result.uploaded or result.skipped)
1850
- if (result.uploaded && result.uploaded.length > 0) {
1851
- try {
1852
- const alreadyUploadedPaths = result.uploaded
1853
- .filter(
1854
- (upload) =>
1855
- upload.status === 'already_exists' || upload.alreadyExists,
1856
- )
1857
- .map((upload) => {
1858
- // Find the corresponding file path from pathFiles
1859
- const matchingFile = pathFiles.find(
1860
- (f) =>
1861
- f.name === upload.fileName ||
1862
- f.name === upload.originalName,
1863
- );
1864
- return matchingFile ? matchingFile.path : null;
1865
- })
1866
- .filter(Boolean);
1867
-
1868
- if (alreadyUploadedPaths.length > 0) {
1869
- await supabase
1870
- .from('uploader')
1871
- .update({ status: 'file-uploaded' })
1872
- .in('original_path', alreadyUploadedPaths);
1873
-
1874
- console.log(
1875
- ` 📝 Updated status to "file-uploaded" for ${alreadyUploadedPaths.length} already existing files`,
1876
- );
1877
- }
1878
- } catch (error) {
1879
- console.error(
1880
- ` ⚠️ Error updating status for already existing files: ${error.message}`,
1881
- );
1882
- }
1883
- }
1884
-
1885
- // Small delay between path groups
1886
- await new Promise((resolve) => setTimeout(resolve, 100));
1887
- }
1888
- } catch (error) {
1889
- console.error(
1890
- ` ❌ Error uploading batch ${batchNumber}:`,
1891
- error.message,
1892
- );
1893
- totalErrors += filesToUpload.length;
1894
- }
1895
- }
1896
-
1897
- // Small delay between batches
1898
- if (i + batchSize < allRelatedFiles.length) {
1899
- await new Promise((resolve) => setTimeout(resolve, BATCH_DELAY));
1900
- }
1901
- }
1902
-
1903
- if (options.showProgress !== false) {
1904
- progressBar.stop();
1905
- }
1906
-
1907
- console.log(`\n${'='.repeat(60)}`);
1908
- console.log(`🎯 RFC-BASED UPLOAD COMPLETED`);
1909
- console.log(`${'='.repeat(60)}`);
1910
- console.log(` 📋 Files processed: ${totalProcessed}`);
1911
- console.log(` ✅ Files uploaded: ${totalUploaded}`);
1912
- console.log(` ⏭️ Files skipped: ${totalSkipped}`);
1913
- console.log(` ❌ Errors: ${totalErrors}`);
1914
- console.log(`${'='.repeat(60)}\n`);
1915
-
1916
- return {
1917
- processedCount: totalProcessed,
1918
- uploadedCount: totalUploaded,
1919
- skippedCount: totalSkipped,
1920
- errorCount: totalErrors,
1921
- };
1922
- };
1923
-
1924
- /**
1925
- * Propagate arela_path from pedimento_simplificado records to related files with same base path
1926
- */
1927
- const propagateArelaPath = async (options = {}) => {
1928
- if (!supabase) {
1929
- console.error('❌ Supabase client not initialized');
1930
- process.exit(1);
1931
- }
1932
-
1933
- console.log('🔍 Finding pedimento_simplificado records with arela_path...');
1934
- writeLog('🔍 Starting arela_path propagation process');
1935
-
1936
- // Get all pedimento_simplificado records that have arela_path
1937
- const { data: pedimentoRecords, error: pedimentoError } = await supabase
1938
- .from('uploader')
1939
- .select('id, original_path, arela_path, filename')
1940
- .eq('document_type', 'pedimento_simplificado')
1941
- .not('arela_path', 'is', null);
1942
-
1943
- if (pedimentoError) {
1944
- console.error(
1945
- '❌ Error fetching pedimento records:',
1946
- pedimentoError.message,
1947
- );
1948
- return { processedCount: 0, updatedCount: 0, errorCount: 1 };
1949
- }
1950
-
1951
- if (!pedimentoRecords || pedimentoRecords.length === 0) {
1952
- console.log('ℹ️ No pedimento_simplificado records with arela_path found');
1953
- writeLog('ℹ️ No pedimento_simplificado records with arela_path found');
1954
- return { processedCount: 0, updatedCount: 0, errorCount: 0 };
1955
- }
1956
-
1957
- console.log(
1958
- `📋 Found ${pedimentoRecords.length} pedimento records with arela_path`,
1959
- );
1960
- writeLog(
1961
- `📋 Found ${pedimentoRecords.length} pedimento records with arela_path to process`,
1962
- );
1963
-
1964
- let totalProcessed = 0;
1965
- let totalUpdated = 0;
1966
- let totalErrors = 0;
1967
-
1968
- // Create progress bar
1969
- const progressBar = new cliProgress.SingleBar({
1970
- format:
1971
- '🔄 Propagating paths |{bar}| {percentage}% | {value}/{total} | Updated: {updated} | Errors: {errors}',
1972
- barCompleteChar: '█',
1973
- barIncompleteChar: '░',
1974
- hideCursor: true,
1975
- });
1976
-
1977
- if (options.showProgress !== false) {
1978
- progressBar.start(pedimentoRecords.length, 0, {
1979
- updated: 0,
1980
- errors: 0,
1981
- });
1982
- }
1983
-
1984
- // Process each pedimento record
1985
- for (const pedimento of pedimentoRecords) {
1986
- try {
1987
- totalProcessed++;
1988
-
1989
- // Extract base path from original_path (remove filename)
1990
- const basePath = path.dirname(pedimento.original_path);
1991
-
1992
- console.log(`\n🔍 Processing: ${pedimento.filename}`);
1993
- console.log(` 📁 Base path: ${basePath}`);
1994
- writeLog(
1995
- `🔍 Processing pedimento: ${pedimento.filename} | Base path: ${basePath}`,
1996
- );
1997
-
1998
- // Extract folder part from existing arela_path by removing the filename
1999
- const existingPath = pedimento.arela_path;
2000
- const folderArelaPath = existingPath.includes('/')
2001
- ? existingPath.substring(0, existingPath.lastIndexOf('/')) + '/'
2002
- : existingPath.endsWith('/')
2003
- ? existingPath
2004
- : existingPath + '/';
2005
-
2006
- console.log(` 🎯 Original arela path: ${existingPath}`);
2007
- console.log(` 📁 Folder arela path: ${folderArelaPath}`);
2008
-
2009
- // Find all files with the same base path that don't have arela_path yet
2010
- const { data: relatedFiles, error: relatedError } = await supabase
2011
- .from('uploader')
2012
- .select('id, filename, original_path')
2013
- .like('original_path', `${basePath}%`)
2014
- .is('arela_path', null)
2015
- .neq('id', pedimento.id); // Exclude the pedimento itself
2016
-
2017
- if (relatedError) {
2018
- console.error(
2019
- `❌ Error finding related files for ${pedimento.filename}:`,
2020
- relatedError.message,
2021
- );
2022
- totalErrors++;
2023
- continue;
2024
- }
2025
-
2026
- if (!relatedFiles || relatedFiles.length === 0) {
2027
- console.log(` ℹ️ No related files found needing arela_path update`);
2028
- writeLog(`ℹ️ No related files found for ${pedimento.filename}`);
2029
- continue;
2030
- }
2031
-
2032
- console.log(
2033
- ` 📄 Found ${relatedFiles.length} related files to update:`,
2034
- );
2035
- writeLog(
2036
- `📄 Found ${relatedFiles.length} related files to update for ${pedimento.filename}`,
2037
- );
2038
-
2039
- // Show first 10 files, then indicate if there are more
2040
- const filesToShow = relatedFiles.slice(0, 10);
2041
- filesToShow.forEach((file) => {
2042
- console.log(` - ${file.filename}`);
2043
- });
2044
-
2045
- if (relatedFiles.length > 10) {
2046
- console.log(` ... and ${relatedFiles.length - 10} more files`);
2047
- }
2048
-
2049
- // Process files in batches to avoid URI length limitations
2050
- const BATCH_SIZE = 50; // Process 50 files at a time
2051
- const fileIds = relatedFiles.map((f) => f.id);
2052
- let batchErrors = 0;
2053
- let batchUpdated = 0;
2054
-
2055
- console.log(
2056
- ` 🔄 Processing ${relatedFiles.length} files in batches of ${BATCH_SIZE}...`,
2057
- );
2058
-
2059
- for (let i = 0; i < fileIds.length; i += BATCH_SIZE) {
2060
- const batchIds = fileIds.slice(i, i + BATCH_SIZE);
2061
- const batchNumber = Math.floor(i / BATCH_SIZE) + 1;
2062
- const totalBatches = Math.ceil(fileIds.length / BATCH_SIZE);
2063
-
2064
- console.log(
2065
- ` 📦 Batch ${batchNumber}/${totalBatches}: Updating ${batchIds.length} files...`,
2066
- );
2067
-
2068
- try {
2069
- const { error: updateError } = await supabase
2070
- .from('uploader')
2071
- .update({ arela_path: folderArelaPath })
2072
- .in('id', batchIds);
2073
-
2074
- if (updateError) {
2075
- console.error(
2076
- ` ❌ Error in batch ${batchNumber}:`,
2077
- updateError.message,
2078
- );
2079
- batchErrors++;
2080
- } else {
2081
- console.log(
2082
- ` ✅ Batch ${batchNumber} completed: ${batchIds.length} files updated`,
2083
- );
2084
- batchUpdated += batchIds.length;
2085
- }
2086
- } catch (error) {
2087
- console.error(
2088
- ` ❌ Exception in batch ${batchNumber}:`,
2089
- error.message,
2090
- );
2091
- batchErrors++;
2092
- }
2093
-
2094
- // Small delay between batches to avoid overwhelming the database
2095
- if (i + BATCH_SIZE < fileIds.length) {
2096
- await new Promise((resolve) => setTimeout(resolve, 100));
2097
- }
2098
- }
2099
-
2100
- if (batchErrors > 0) {
2101
- console.error(
2102
- `❌ ${batchErrors} batch(es) failed for ${pedimento.filename}`,
2103
- );
2104
- writeLog(
2105
- `❌ ${batchErrors} batch(es) failed for ${pedimento.filename}`,
2106
- );
2107
- totalErrors++;
2108
- } else {
2109
- console.log(` 🎯 Successfully updated ${batchUpdated} related files`);
2110
- writeLog(
2111
- `✅ Successfully updated ${batchUpdated} related files for ${pedimento.filename} -> ${folderArelaPath}`,
2112
- );
2113
- totalUpdated += batchUpdated;
2114
- }
2115
- } catch (error) {
2116
- console.error(
2117
- `❌ Error processing ${pedimento.filename}:`,
2118
- error.message,
2119
- );
2120
- writeLog(`❌ Error processing ${pedimento.filename}: ${error.message}`);
2121
- totalErrors++;
2122
- }
2123
-
2124
- if (options.showProgress !== false) {
2125
- progressBar.update(totalProcessed, {
2126
- updated: totalUpdated,
2127
- errors: totalErrors,
2128
- });
2129
- }
2130
- }
2131
-
2132
- if (options.showProgress !== false) {
2133
- progressBar.stop();
2134
- }
2135
-
2136
- console.log(`\n${'='.repeat(60)}`);
2137
- console.log(`🎯 ARELA PATH PROPAGATION COMPLETED`);
2138
- console.log(`${'='.repeat(60)}`);
2139
- console.log(` 📋 Pedimento records processed: ${totalProcessed}`);
2140
- console.log(` ✅ Related files updated: ${totalUpdated}`);
2141
- console.log(` ❌ Errors: ${totalErrors}`);
2142
- console.log(`${'='.repeat(60)}\n`);
2143
-
2144
- // Write comprehensive log summary
2145
- writeLog(
2146
- `🎯 ARELA PATH PROPAGATION COMPLETED - Summary: Processed: ${totalProcessed} pedimento records, Updated: ${totalUpdated} related files, Errors: ${totalErrors}`,
2147
- );
2148
-
2149
- // Ensure logs are flushed
2150
- flushLogBuffer();
2151
-
2152
- return {
2153
- processedCount: totalProcessed,
2154
- updatedCount: totalUpdated,
2155
- errorCount: totalErrors,
2156
- };
2157
- };
2158
-
2159
- /**
2160
- * Helper function to query files that need to be uploaded
2161
- * These are files that have been detected but not yet uploaded
2162
- * Uses the same RFC filtering logic as uploadFilesByRfc for consistency
2163
- */
2164
- const getFilesReadyForUpload = async (options = {}) => {
2165
- if (!supabase) {
2166
- throw new Error('Supabase client not initialized');
2167
- }
2168
-
2169
- console.log('🔍 Querying files ready for upload...');
2170
-
2171
- // Check if UPLOAD_RFCS is configured
2172
- if (!uploadRfcs || uploadRfcs.length === 0) {
2173
- console.log(
2174
- 'ℹ️ No UPLOAD_RFCS configured. Please set UPLOAD_RFCS environment variable to see files ready for upload.',
2175
- );
2176
- console.log(
2177
- ' Example: UPLOAD_RFCS="RFC123456789|RFC987654321|RFC555444333"',
2178
- );
2179
- return [];
2180
- }
2181
-
2182
- console.log(`🎯 Using RFC filter: ${uploadRfcs.join(', ')}`);
2183
-
2184
- // Step 1: Find pedimento_simplificado documents for the specified RFCs that have arela_path
2185
- console.log(
2186
- '🎯 Finding pedimento_simplificado documents for specified RFCs with arela_path...',
2187
- );
2188
- const { data: pedimentoRecords, error: pedimentoError } = await supabase
2189
- .from('uploader')
2190
- .select('arela_path')
2191
- .eq('document_type', 'pedimento_simplificado')
2192
- .in('rfc', uploadRfcs)
2193
- .not('arela_path', 'is', null);
2194
-
2195
- if (pedimentoError) {
2196
- throw new Error(
2197
- `Error querying pedimento_simplificado records: ${pedimentoError.message}`,
2198
- );
2199
- }
2200
-
2201
- if (!pedimentoRecords || pedimentoRecords.length === 0) {
2202
- console.log('ℹ️ No pedimento_simplificado records with arela_path found');
2203
- return [];
2204
- }
2205
-
2206
- // Get unique arela_paths
2207
- const uniqueArelaPaths = [
2208
- ...new Set(pedimentoRecords.map((r) => r.arela_path)),
2209
- ];
2210
- console.log(
2211
- `📋 Found ${pedimentoRecords.length} pedimento records with ${uniqueArelaPaths.length} unique arela_paths`,
2212
- );
2213
-
2214
- // Step 2: Find all related files with these arela_paths that haven't been uploaded yet
2215
- console.log('🔍 Finding all related files that need to be uploaded...');
2216
-
2217
- // Process arela_paths in chunks to avoid URI length limits
2218
- let allReadyFiles = [];
2219
- const chunkSize = 50;
2220
-
2221
- for (let i = 0; i < uniqueArelaPaths.length; i += chunkSize) {
2222
- const pathChunk = uniqueArelaPaths.slice(i, i + chunkSize);
2223
-
2224
- const { data: chunkFiles, error: chunkError } = await supabase
2225
- .from('uploader')
2226
- .select(
2227
- 'id, original_path, arela_path, filename, rfc, document_type, status',
2228
- )
2229
- .in('arela_path', pathChunk)
2230
- .neq('status', 'file-uploaded')
2231
- .not('original_path', 'is', null);
2232
-
2233
- if (chunkError) {
2234
- throw new Error(
2235
- `Error querying files for arela_paths chunk: ${chunkError.message}`,
2236
- );
2237
- }
2238
-
2239
- if (chunkFiles && chunkFiles.length > 0) {
2240
- allReadyFiles = allReadyFiles.concat(chunkFiles);
2241
- }
2242
- }
2243
-
2244
- const readyFiles = allReadyFiles;
2245
-
2246
- console.log(`📋 Found ${readyFiles?.length || 0} files ready for upload`);
2247
-
2248
- if (readyFiles && readyFiles.length > 0) {
2249
- // Group by document type for summary
2250
- const byDocType = readyFiles.reduce((acc, file) => {
2251
- const docType = file.document_type || 'Unknown';
2252
- acc[docType] = (acc[docType] || 0) + 1;
2253
- return acc;
2254
- }, {});
2255
-
2256
- console.log('📊 Files by document type:');
2257
- for (const [docType, count] of Object.entries(byDocType)) {
2258
- console.log(` ${docType}: ${count} files`);
2259
- }
2260
-
2261
- // Group by RFC
2262
- const byRfc = readyFiles.reduce((acc, file) => {
2263
- const rfc = file.rfc || 'No RFC';
2264
- acc[rfc] = (acc[rfc] || 0) + 1;
2265
- return acc;
2266
- }, {});
2267
-
2268
- console.log('📊 Files by RFC:');
2269
- for (const [rfc, count] of Object.entries(byRfc)) {
2270
- console.log(` ${rfc}: ${count} files`);
2271
- }
2272
- }
2273
-
2274
- return readyFiles || [];
2275
- };
2276
-
2277
- program
2278
- .name('arela-uploader')
2279
- .description(
2280
- 'CLI to upload folders to Arela API or Supabase Storage with automatic processing\n\n' +
2281
- 'Status workflow:\n' +
2282
- ' fs-stats → detected → file-uploaded\n' +
2283
- ' ├─ Phase 1: --stats-only (collects filesystem stats, status: fs-stats)\n' +
2284
- ' ├─ Phase 2: --detect-pdfs (detects document types, status: detected)\n' +
2285
- ' ├─ Phase 3: --propagate-arela-path (organizes files by pedimento)\n' +
2286
- ' └─ Phase 4: --upload-by-rfc (uploads files, status: file-uploaded)\n\n' +
2287
- 'Use --query-ready-files to see files ready for upload (status: detected with arela_path)',
2288
- )
2289
- .option('-v, --version', 'output the version number')
2290
- .option('-p, --prefix <prefix>', 'Prefix path in bucket', '')
2291
- .option('-b, --bucket <bucket>', 'Bucket name override')
2292
- .option('--force-supabase', 'Force direct Supabase upload (skip API)')
2293
- .option(
2294
- '--no-auto-detect',
2295
- 'Disable automatic file detection (API mode only)',
2296
- )
2297
- .option(
2298
- '--no-auto-organize',
2299
- 'Disable automatic file organization (API mode only)',
2300
- )
2301
- .option(
2302
- '-c, --concurrency <number>',
2303
- 'Files per batch for processing (default: 10)',
2304
- '10',
2305
- )
2306
- .option('--batch-size <number>', 'API batch size (default: 10)', '10')
2307
- .option('--show-stats', 'Show detailed processing statistics')
2308
- .option(
2309
- '--folder-structure <structure>',
2310
- 'Custom folder structure (e.g., "2024/4023260" or "cliente1/pedimentos")',
2311
- )
2312
- .option(
2313
- '--auto-detect-structure',
2314
- 'Automatically detect year/pedimento from file paths',
2315
- )
2316
- .option('--client-path <path>', 'Client path for metadata tracking')
2317
- .option(
2318
- '--stats-only',
2319
- 'Phase 1: Only read filesystem stats and insert to database (no file reading or detection)',
2320
- )
2321
- .option('--no-detect', 'Disable document type detection in stats-only mode')
2322
- .option(
2323
- '--detect-pdfs',
2324
- 'Phase 2: Process PDF files in database for pedimento-simplificado detection',
2325
- )
2326
- .option(
2327
- '--propagate-arela-path',
2328
- 'Phase 3: Propagate arela_path from pedimento_simplificado records to related files with same base path',
2329
- )
2330
- .option(
2331
- '--upload-by-rfc',
2332
- 'Phase 4: Upload files to Arela API based on RFC values from UPLOAD_RFCS environment variable',
2333
- )
2334
- .option(
2335
- '--run-all-phases',
2336
- 'Run all 4 phases in sequence: stats → detect → propagate → upload',
2337
- )
2338
- .option(
2339
- '--query-ready-files',
2340
- 'Query and display files that are ready for upload (have been detected but not uploaded)',
2341
- )
2342
- .action(async (options) => {
2343
- if (options.version) {
2344
- console.log(packageVersion);
2345
- process.exit(0);
2346
- }
2347
-
2348
- // Handle detect-pdfs option (Phase 2)
2349
- if (options.detectPdfs) {
2350
- console.log('🔍 Starting Phase 2: PDF Detection');
2351
- await checkCredentials(true); // Force Supabase mode
2352
-
2353
- const result = await detectPedimentosInDatabase({
2354
- batchSize: parseInt(options.batchSize) || 10,
2355
- });
2356
-
2357
- console.log(
2358
- `✅ Phase 2 Complete: ${result.detectedCount} detected, ${result.errorCount} errors`,
2359
- );
2360
- return;
2361
- }
2362
-
2363
- // Handle query-ready-files option
2364
- if (options.queryReadyFiles) {
2365
- await checkCredentials(true); // Force Supabase mode
2366
-
2367
- const readyFiles = await getFilesReadyForUpload();
2368
-
2369
- if (readyFiles.length === 0) {
2370
- console.log('ℹ️ No files are currently ready for upload');
2371
- console.log(
2372
- ' Tip: Run --detect-pdfs and --propagate-arela-path first to prepare files for upload',
2373
- );
2374
- } else {
2375
- console.log(`\n📋 ${readyFiles.length} files are ready for upload!`);
2376
- console.log(' Use --upload-by-rfc to upload them to Arela API');
2377
- }
2378
-
2379
- return;
2380
- }
2381
-
2382
- // Handle run-all-phases option
2383
- if (options.runAllPhases) {
2384
- console.log('🚀 Starting all 4 phases in sequence...');
2385
- await checkCredentials(true); // Force Supabase mode
2386
-
2387
- // Phase 1: Stats collection
2388
- console.log('\n📊 === PHASE 1: Filesystem Stats ===');
2389
- options.statsOnly = true;
2390
- // Continue with normal processing to run Phase 1
2391
-
2392
- // The rest will be handled after Phase 1 completes
2393
- }
2394
-
2395
- // Handle propagate-arela-path option
2396
- if (options.propagateArelaPath) {
2397
- // Initialize Supabase credentials for propagation
2398
- await checkCredentials(true); // Force Supabase mode
2399
-
2400
- const result = await propagateArelaPath({
2401
- showProgress: options.showStats || true,
2402
- });
2403
-
2404
- if (result.errorCount > 0) {
2405
- process.exit(1);
2406
- }
2407
- return;
2408
- }
2409
-
2410
- // Handle upload-by-rfc option
2411
- if (options.uploadByRfc) {
2412
- // RFC upload needs both Supabase (for database queries) and API (for uploads)
2413
- await checkCredentials(false); // Initialize API mode
2414
-
2415
- // Also initialize Supabase for database queries
2416
- if (!supabase) {
2417
- if (!supabaseUrl || !supabaseKey) {
2418
- console.error(
2419
- '❌ RFC upload requires Supabase credentials for database queries.',
2420
- );
2421
- console.error(
2422
- ' Please set SUPABASE_URL and SUPABASE_KEY environment variables.',
2423
- );
2424
- process.exit(1);
2425
- }
2426
-
2427
- supabase = createClient(supabaseUrl, supabaseKey);
2428
- console.log('✅ Connected to Supabase for database queries');
2429
- }
2430
-
2431
- const result = await uploadFilesByRfc({
2432
- showProgress: options.showStats || true,
2433
- batchSize: parseInt(options.batchSize) || 10,
2434
- folderStructure: options.folderStructure,
2435
- });
2436
-
2437
- if (result.errorCount > 0) {
2438
- process.exit(1);
2439
- }
2440
- return;
2441
- }
2442
-
2443
- // Initialize credentials with force supabase flag (for stats mode, always need Supabase)
2444
- await checkCredentials(options.forceSupabase || options.statsOnly);
2445
-
2446
- if (!basePath || !sources || sources.length === 0) {
2447
- console.error(
2448
- '⚠️ UPLOAD_BASE_PATH or UPLOAD_SOURCES not defined in environment variables.',
2449
- );
2450
- process.exit(1);
2451
- }
2452
-
2453
- const batchSize = parseInt(options.batchSize) || 10;
2454
- const concurrency = parseInt(options.concurrency) || 10;
2455
-
2456
- if (options.statsOnly) {
2457
- console.log(
2458
- '📊 Mode: Stats Only - Reading file stats and inserting to uploader table',
2459
- );
2460
- console.log('🚫 Files will NOT be uploaded');
2461
- if (options.detect !== false) {
2462
- console.log('🔍 Document type detection ENABLED for supported files');
2463
- } else {
2464
- console.log('🔍 Document type detection DISABLED');
2465
- }
2466
- } else {
2467
- console.log(
2468
- `🚀 Mode: ${apiMode ? 'Arela API with auto-processing' : 'Direct Supabase'}`,
2469
- );
2470
- }
2471
- console.log(`📦 Batch size: ${batchSize}`);
2472
- console.log(`⚡ Concurrency: ${concurrency}`);
2473
-
2474
- const processedPaths = getProcessedPaths();
2475
- let globalSuccess = 0;
2476
- let globalDetected = 0;
2477
- let globalOrganized = 0;
2478
- let globalFailure = 0;
2479
- let globalSkipped = 0;
2480
-
2481
- for (const folder of sources) {
2482
- const sourcePath = path.resolve(basePath, folder).replace(/\\/g, '/');
2483
- console.log(`📂 Processing folder: ${sourcePath}`);
2484
-
2485
- try {
2486
- const stats = fs.statSync(sourcePath);
2487
- const files = stats.isDirectory()
2488
- ? await globby([`${sourcePath}/**/*`], { onlyFiles: true })
2489
- : [sourcePath];
2490
-
2491
- console.log(`📊 Found ${files.length} files to process`);
2492
-
2493
- const result = await processFilesInBatches(
2494
- files,
2495
- batchSize,
2496
- options,
2497
- basePath,
2498
- folder,
2499
- sourcePath,
2500
- processedPaths,
2501
- );
2502
-
2503
- globalSuccess += result.successCount;
2504
- globalDetected += result.detectedCount || 0;
2505
- globalOrganized += result.organizedCount || 0;
2506
- globalFailure += result.failureCount;
2507
- globalSkipped += result.skippedCount;
2508
-
2509
- console.log(`\n📦 Summary for ${folder}:`);
2510
- if (options.statsOnly) {
2511
- console.log(` 📊 Stats recorded: ${result.successCount}`);
2512
- } else {
2513
- console.log(` ✅ Uploaded: ${result.successCount}`);
2514
- if (apiMode) {
2515
- console.log(` 🔍 Detected: ${result.detectedCount || 0}`);
2516
- console.log(` 📁 Organized: ${result.organizedCount || 0}`);
2517
- }
2518
- }
2519
- console.log(` ❌ Errors: ${result.failureCount}`);
2520
- if (options.statsOnly) {
2521
- console.log(` ⏭️ Duplicates: ${result.skippedCount}`);
2522
- } else {
2523
- console.log(` ⏭️ Skipped: ${result.skippedCount}`);
2524
- }
2525
-
2526
- writeLog(
2527
- `📦 Summary for ${folder}: Success: ${result.successCount}, Detected: ${result.detectedCount || 0}, Organized: ${result.organizedCount || 0}, Errors: ${result.failureCount}, ${options.statsOnly ? 'Duplicates' : 'Skipped'}: ${result.skippedCount}`,
2528
- );
2529
- } catch (err) {
2530
- console.error(`⚠️ Error processing folder ${folder}:`, err.message);
2531
- writeLog(`⚠️ Error processing folder ${folder}: ${err.message}`);
2532
- globalFailure++;
2533
- }
2534
- }
2535
-
2536
- console.log(`\n${'='.repeat(60)}`);
2537
- if (options.statsOnly) {
2538
- console.log(`📊 STATS COLLECTION COMPLETED`);
2539
- console.log(`${'='.repeat(60)}`);
2540
- console.log(` 📊 Total stats recorded: ${globalSuccess}`);
2541
- } else {
2542
- console.log(`🎯 ${apiMode ? 'ARELA API' : 'SUPABASE'} UPLOAD COMPLETED`);
2543
- console.log(`${'='.repeat(60)}`);
2544
- console.log(` ✅ Total uploaded: ${globalSuccess}`);
2545
- if (apiMode) {
2546
- console.log(` 🔍 Total detected: ${globalDetected}`);
2547
- console.log(` 📁 Total organized: ${globalOrganized}`);
2548
- }
2549
- }
2550
- if (options.statsOnly) {
2551
- console.log(` ⏭️ Total duplicates: ${globalSkipped}`);
2552
- } else {
2553
- console.log(` ⏭️ Total skipped: ${globalSkipped}`);
2554
- }
2555
- console.log(` ❌ Total errors: ${globalFailure}`);
2556
- console.log(` 📜 Log file: ${logFilePath}`);
2557
- console.log(`${'='.repeat(60)}\n`);
2558
-
2559
- // Continue with remaining phases if running all phases
2560
- if (options.runAllPhases && options.statsOnly) {
2561
- try {
2562
- // Phase 2: PDF Detection
2563
- console.log('\n🔍 === PHASE 2: PDF Detection ===');
2564
- const detectionResult = await detectPedimentosInDatabase({
2565
- batchSize: parseInt(options.batchSize) || 10,
2566
- });
2567
- console.log(
2568
- `✅ Phase 2 Complete: ${detectionResult.detectedCount} detected, ${detectionResult.errorCount} errors`,
2569
- );
2570
-
2571
- // Phase 3: Propagate arela_path
2572
- console.log('\n📁 === PHASE 3: Propagate Arela Paths ===');
2573
- const propagateResult = await propagateArelaPath({
2574
- showProgress: options.showStats || true,
2575
- });
2576
- console.log(
2577
- `✅ Phase 3 Complete: ${propagateResult.updatedCount || 0} paths propagated`,
2578
- );
2579
-
2580
- // Phase 4: Upload by RFC
2581
- if (uploadRfcs && uploadRfcs.length > 0) {
2582
- console.log('\n🚀 === PHASE 4: Upload by RFC ===');
2583
-
2584
- // Initialize API mode for uploads
2585
- await checkCredentials(false);
2586
-
2587
- const uploadResult = await uploadFilesByRfc({
2588
- showProgress: options.showStats || true,
2589
- batchSize: parseInt(options.batchSize) || 10,
2590
- folderStructure: options.folderStructure,
2591
- });
2592
- console.log(`✅ Phase 4 Complete: Upload finished`);
2593
- } else {
2594
- console.log('\n⚠️ === PHASE 4: Upload by RFC ===');
2595
- console.log(
2596
- '⚠️ UPLOAD_RFCS environment variable not configured, skipping Phase 4',
2597
- );
2598
- }
2599
-
2600
- console.log('\n🎉 All 4 phases completed successfully!');
2601
- } catch (error) {
2602
- console.error(`❌ Error in multi-phase execution:`, error.message);
2603
- process.exit(1);
2604
- }
2605
- }
2606
-
2607
- if (
2608
- options.showStats &&
2609
- (sanitizationCache.size > 0 || pathDetectionCache.size > 0)
2610
- ) {
2611
- console.log(`📊 Performance Statistics:`);
2612
- if (sanitizationCache.size > 0) {
2613
- console.log(
2614
- ` 🗂️ Sanitization cache entries: ${sanitizationCache.size}`,
2615
- );
2616
- }
2617
- if (pathDetectionCache.size > 0) {
2618
- console.log(
2619
- ` 📁 Path detection cache entries: ${pathDetectionCache.size}`,
2620
- );
2621
- }
2622
- }
2623
-
2624
- // OPTIMIZED: Ensure log buffer is flushed before exit
2625
- flushLogBuffer();
2626
- });
2627
-
2628
- program.parse();
280
+ // Create and run the CLI application
281
+ const cli = new ArelaUploaderCLI();
282
+ await cli.run();