@arela/uploader 1.0.7 → 1.0.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/.env.template CHANGED
@@ -9,6 +9,15 @@
9
9
  ARELA_API_URL=https://your-arela-api-url.com
10
10
  ARELA_API_TOKEN=your-api-token-here
11
11
 
12
+ # API Agencia - Configura aquí la URL y Token de la agencia activa
13
+ ARELA_API_AGENCIA_URL=https://agencia-api-example.com
14
+ ARELA_API_AGENCIA_TOKEN=your-agencia-api-token-here
15
+
16
+ # API Cliente - Configura aquí la URL y Token del cliente activo
17
+ ARELA_API_CLIENTE_URL=https://cliente-api-example.com
18
+ ARELA_API_CLIENTE_TOKEN=your-cliente-api-token-here
19
+
20
+
12
21
  # Supabase Configuration (fallback)
13
22
  SUPABASE_URL=https://your-supabase-url.supabase.co
14
23
  SUPABASE_KEY=your-supabase-key-here
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@arela/uploader",
3
- "version": "1.0.7",
3
+ "version": "1.0.9",
4
4
  "description": "CLI to upload files/directories to Arela",
5
5
  "bin": {
6
6
  "arela": "./src/index.js"
@@ -42,23 +42,20 @@ export class IdentifyCommand {
42
42
  // Validate scan configuration (need same config as scan command)
43
43
  appConfig.validateScanConfig();
44
44
 
45
- // Import ScanApiService dynamically
45
+ // Determine API target
46
+ const apiTarget = options.api || 'default';
47
+
48
+ // Import ScanApiService dynamically and initialize with target
46
49
  const { default: ScanApiService } = await import(
47
50
  '../services/ScanApiService.js'
48
51
  );
49
- this.scanApiService = new ScanApiService();
50
-
51
- // Set API target if specified
52
- if (options.api) {
53
- appConfig.setApiTarget(options.api);
54
- this.scanApiService = new ScanApiService();
55
- }
52
+ this.scanApiService = new ScanApiService(apiTarget);
56
53
 
57
54
  const scanConfig = appConfig.getScanConfig();
58
55
  const batchSize = parseInt(options.batchSize) || 100;
59
56
 
60
57
  logger.info('🔍 Starting arela identify command');
61
- logger.info(`🎯 API Target: ${options.api || 'default'}`);
58
+ logger.info(`🎯 API Target: ${apiTarget}`);
62
59
  logger.info(`📦 Batch Size: ${batchSize}`);
63
60
 
64
61
  // Fetch all tables for this instance
@@ -273,7 +270,7 @@ export class IdentifyCommand {
273
270
  arelaPath: null,
274
271
  detectionError:
275
272
  'FILE_NOT_FOUND: File does not exist on filesystem. May have been moved or deleted after scan.',
276
- isPedimento: null, // Unknown - can't determine
273
+ isPedimento: null, // Unknown - can't determine
277
274
  };
278
275
  }
279
276
 
@@ -289,7 +286,7 @@ export class IdentifyCommand {
289
286
  rfc: null,
290
287
  arelaPath: null,
291
288
  detectionError: `FILE_TOO_LARGE: File size ${(stats.size / 1024 / 1024).toFixed(2)}MB exceeds ${maxSizeBytes / 1024 / 1024}MB limit.`,
292
- isPedimento: null, // Unknown - can't determine
289
+ isPedimento: null, // Unknown - can't determine
293
290
  };
294
291
  }
295
292
 
@@ -306,13 +303,16 @@ export class IdentifyCommand {
306
303
  rfc: result.rfc,
307
304
  arelaPath: result.arelaPath,
308
305
  detectionError: result.error,
309
- isPedimento: true, // Confirmed pedimento
306
+ isPedimento: true, // Confirmed pedimento
310
307
  };
311
308
  }
312
309
 
313
310
  // If no detection, determine if it's definitely not a pedimento
314
311
  // This helps avoid re-processing files we know aren't pedimentos
315
- const isDefinitelyNotPedimento = this.#isDefinitelyNotPedimento(result, file);
312
+ const isDefinitelyNotPedimento = this.#isDefinitelyNotPedimento(
313
+ result,
314
+ file,
315
+ );
316
316
 
317
317
  // Build descriptive error message
318
318
  let detectionError = null;
@@ -340,7 +340,7 @@ export class IdentifyCommand {
340
340
  rfc: result.rfc,
341
341
  arelaPath: result.arelaPath,
342
342
  detectionError,
343
- isPedimento: isDefinitelyNotPedimento ? false : null, // false = not pedimento, null = unknown
343
+ isPedimento: isDefinitelyNotPedimento ? false : null, // false = not pedimento, null = unknown
344
344
  };
345
345
  } catch (error) {
346
346
  logger.warn(
@@ -367,7 +367,7 @@ export class IdentifyCommand {
367
367
  rfc: null,
368
368
  arelaPath: null,
369
369
  detectionError: `${errorCategory}: ${error.message}`,
370
- isPedimento: null, // Unknown - error occurred
370
+ isPedimento: null, // Unknown - error occurred
371
371
  };
372
372
  }
373
373
  }),
@@ -39,8 +39,10 @@ export class PropagateCommand {
39
39
  // Step 1: Validate configuration
40
40
  await this.#validateConfiguration();
41
41
 
42
- // Step 2: Initialize API service
43
- this.scanApiService = new ScanApiService();
42
+ // Step 2: Initialize API service with configured target
43
+ const apiTarget = this.options.api || 'default';
44
+ this.scanApiService = new ScanApiService(apiTarget);
45
+ console.log(`🎯 API Target: ${apiTarget}`);
44
46
 
45
47
  // Step 3: Fetch all tables for this instance
46
48
  const scanConfig = appConfig.getScanConfig();
@@ -15,7 +15,8 @@ import appConfig from '../config/config.js';
15
15
  */
16
16
  export class PushCommand {
17
17
  constructor() {
18
- this.scanApiService = new ScanApiService();
18
+ // ScanApiService will be initialized in execute() with proper API target
19
+ this.scanApiService = null;
19
20
  }
20
21
 
21
22
  /**
@@ -37,7 +38,6 @@ export class PushCommand {
37
38
  // Get configuration
38
39
  const scanConfig = appConfig.getScanConfig();
39
40
  const pushConfig = appConfig.getPushConfig();
40
- const tableName = scanConfig.tableName;
41
41
 
42
42
  // Override folderStructure from command option if provided
43
43
  if (options.folderStructure) {
@@ -48,26 +48,40 @@ export class PushCommand {
48
48
  .replace(/\/+$/, '');
49
49
  }
50
50
 
51
- // Set API target for scan/push operations
52
- const scanApiTarget = options.api || options.scanApi || 'default';
53
- const pushApiTarget = options.pushApi || scanApiTarget;
51
+ // Determine API targets for scan (read) and push (upload) operations
52
+ // Priority: explicit scan-api/push-api > source-api/target-api > api > default
53
+ const scanApiTarget =
54
+ options.scanApi || options.sourceApi || options.api || 'default';
55
+ const pushApiTarget =
56
+ options.pushApi || options.targetApi || options.api || scanApiTarget;
54
57
 
55
- if (scanApiTarget !== 'default') {
56
- appConfig.setApiTarget(scanApiTarget);
57
- this.scanApiService = new ScanApiService(); // Reinitialize with new target
58
- }
58
+ // Initialize ScanApiService with the scan API target
59
+ this.scanApiService = new ScanApiService(scanApiTarget);
59
60
 
60
- // Get upload API configuration
61
+ // Get upload API configuration for the push target
61
62
  const uploadApiConfig = appConfig.getApiConfig(pushApiTarget);
62
63
 
63
- console.log(`🎯 Scan API Target: ${scanApiTarget}`);
64
- console.log(
65
- `🎯 Upload API Target: ${pushApiTarget} → ${uploadApiConfig.baseUrl}`,
66
- );
64
+ // Display API configuration
65
+ const isCrossTenant = scanApiTarget !== pushApiTarget;
66
+ if (isCrossTenant) {
67
+ console.log('🔗 Cross-tenant mode enabled:');
68
+ console.log(
69
+ ` 📖 Scan API (read): ${scanApiTarget} → ${appConfig.getApiConfig(scanApiTarget).baseUrl}`,
70
+ );
71
+ console.log(
72
+ ` 📝 Push API (upload): ${pushApiTarget} → ${uploadApiConfig.baseUrl}`,
73
+ );
74
+ } else {
75
+ console.log(
76
+ `🎯 API Target: ${scanApiTarget} → ${uploadApiConfig.baseUrl}`,
77
+ );
78
+ }
67
79
  console.log(`📦 Fetch Batch Size: ${options.batchSize}`);
68
80
  console.log(`📤 Upload Batch Size: ${options.uploadBatchSize}`);
69
81
  if (pushConfig.folderStructure) {
70
- console.log(`📁 Folder Structure Prefix: ${pushConfig.folderStructure}`);
82
+ console.log(
83
+ `📁 Folder Structure Prefix: ${pushConfig.folderStructure}`,
84
+ );
71
85
  }
72
86
 
73
87
  // Apply filters
@@ -294,7 +308,6 @@ export class PushCommand {
294
308
  );
295
309
 
296
310
  // Update counters from API response
297
- // Note: The CLI endpoint now handles updating the scan table directly
298
311
  batchResults.forEach((result) => {
299
312
  results.processed++;
300
313
  if (result.uploaded) {
@@ -304,6 +317,17 @@ export class PushCommand {
304
317
  }
305
318
  });
306
319
 
320
+ // Update scan table via scanApi (supports cross-tenant mode)
321
+ // This is called on scanApi, not pushApi, because scan tables live on scanApi
322
+ try {
323
+ await this.scanApiService.batchUpdateUpload(tableName, batchResults);
324
+ } catch (updateError) {
325
+ logger.error(
326
+ `Failed to update scan table for batch: ${updateError.message}`,
327
+ );
328
+ // Don't fail the entire process, just log the error
329
+ }
330
+
307
331
  // Update progress bar
308
332
  const elapsed = (Date.now() - results.startTime) / 1000;
309
333
  const speed =
@@ -329,7 +353,7 @@ export class PushCommand {
329
353
 
330
354
  /**
331
355
  * Upload a batch of files using the new CLI upload endpoint
332
- * The endpoint updates the CLI scan table directly
356
+ * Note: Scan table is updated separately via scanApi after this batch completes
333
357
  * @private
334
358
  */
335
359
  async #uploadBatchViaCli(tableName, files, uploadApiConfig) {
@@ -368,8 +392,6 @@ export class PushCommand {
368
392
  if (!fs.existsSync(file.absolute_path)) {
369
393
  result.uploadError =
370
394
  'FILE_NOT_FOUND: File does not exist on filesystem';
371
- // Update the scan table with the error
372
- await this.scanApiService.batchUpdateUpload(tableName, [result]);
373
395
  return result;
374
396
  }
375
397
 
@@ -377,7 +399,6 @@ export class PushCommand {
377
399
  const stats = fs.statSync(file.absolute_path);
378
400
  if (!stats.isFile()) {
379
401
  result.uploadError = 'NOT_A_FILE: Path is not a regular file';
380
- await this.scanApiService.batchUpdateUpload(tableName, [result]);
381
402
  return result;
382
403
  }
383
404
 
@@ -1,5 +1,5 @@
1
1
  import cliProgress from 'cli-progress';
2
- import { globbyStream } from 'globby';
2
+ import { globby, globbyStream } from 'globby';
3
3
  import path from 'path';
4
4
  import { Transform } from 'stream';
5
5
  import { pipeline } from 'stream/promises';
@@ -26,6 +26,8 @@ export class ScanCommand {
26
26
  * Execute the scan command
27
27
  * @param {Object} options - Command options
28
28
  * @param {boolean} options.countFirst - Count files first for percentage-based progress
29
+ * @param {string} options.api - API target: 'default', 'agencia', or 'cliente'
30
+ * @param {boolean} options.stream - Use streaming file discovery (default: true, use --no-stream to disable)
29
31
  */
30
32
  async execute(options = {}) {
31
33
  const startTime = Date.now();
@@ -34,21 +36,28 @@ export class ScanCommand {
34
36
  // Validate scan configuration
35
37
  appConfig.validateScanConfig();
36
38
 
37
- // Import ScanApiService dynamically
39
+ // Determine API target
40
+ const apiTarget = options.api || 'default';
41
+
42
+ // Import ScanApiService dynamically and initialize with target
38
43
  const { default: ScanApiService } = await import(
39
44
  '../services/ScanApiService.js'
40
45
  );
41
- this.scanApiService = new ScanApiService();
46
+ this.scanApiService = new ScanApiService(apiTarget);
42
47
 
43
48
  const scanConfig = appConfig.getScanConfig();
44
49
  // Ensure basePath is absolute for scan operations
45
50
  const basePath = PathNormalizer.toAbsolutePath(appConfig.getBasePath());
46
51
 
47
52
  logger.info('🔍 Starting arela scan command');
53
+ logger.info(`🎯 API Target: ${apiTarget}`);
48
54
  logger.info(`📦 Company: ${scanConfig.companySlug}`);
49
55
  logger.info(`🖥️ Server: ${scanConfig.serverId}`);
50
56
  logger.info(`📂 Base Path: ${basePath}`);
51
57
  logger.info(`📊 Directory Level: ${scanConfig.directoryLevel}`);
58
+ logger.info(
59
+ `🔄 File Discovery: ${options.stream !== false ? 'Streaming (globbyStream)' : 'Synchronous (globby)'}`,
60
+ );
52
61
 
53
62
  // Step 1: Discover directories at specified level
54
63
  logger.info('\n🔍 Discovering directories...');
@@ -89,7 +98,11 @@ export class ScanCommand {
89
98
  let totalFiles = null;
90
99
  if (options.countFirst) {
91
100
  logger.info('\n🔢 Counting files...');
92
- totalFiles = await this.#countFiles(basePath, scanConfig);
101
+ totalFiles = await this.#countFiles(
102
+ basePath,
103
+ scanConfig,
104
+ options.stream !== false,
105
+ );
93
106
  logger.info(`📊 Found ${totalFiles.toLocaleString()} files to scan`);
94
107
  }
95
108
 
@@ -109,6 +122,7 @@ export class ScanCommand {
109
122
  scanConfig,
110
123
  reg.tableName,
111
124
  null, // Don't use percentage for individual directories
125
+ options.stream !== false, // Use streaming by default, --no-stream sets stream=false
112
126
  );
113
127
 
114
128
  // Step 4: Complete scan for this directory
@@ -276,22 +290,37 @@ export class ScanCommand {
276
290
  /**
277
291
  * Count files for percentage-based progress
278
292
  * @private
293
+ * @param {string} basePath - Base path to count from
294
+ * @param {Object} scanConfig - Scan configuration
295
+ * @param {boolean} useStream - Use streaming (globbyStream) or sync (globby) approach
279
296
  */
280
- async #countFiles(basePath, scanConfig) {
297
+ async #countFiles(basePath, scanConfig, useStream = true) {
281
298
  const sources = appConfig.getUploadSources();
282
299
  let totalCount = 0;
283
300
 
284
301
  for (const source of sources) {
285
302
  const sourcePath = path.resolve(basePath, source);
286
- const files = await globbyStream('**/*', {
287
- cwd: sourcePath,
288
- onlyFiles: true,
289
- absolute: true,
290
- });
291
303
 
292
- for await (const file of files) {
293
- if (!this.#shouldExcludeFile(file, scanConfig.excludePatterns)) {
294
- totalCount++;
304
+ if (useStream) {
305
+ // Streaming approach
306
+ const files = await globbyStream('**/*', {
307
+ cwd: sourcePath,
308
+ onlyFiles: true,
309
+ absolute: true,
310
+ });
311
+
312
+ for await (const file of files) {
313
+ if (!this.#shouldExcludeFile(file, scanConfig.excludePatterns)) {
314
+ totalCount++;
315
+ }
316
+ }
317
+ } else {
318
+ // Synchronous approach (original method)
319
+ const files = await globby([`${sourcePath}/**/*`], { onlyFiles: true });
320
+ for (const file of files) {
321
+ if (!this.#shouldExcludeFile(file, scanConfig.excludePatterns)) {
322
+ totalCount++;
323
+ }
295
324
  }
296
325
  }
297
326
  }
@@ -302,12 +331,18 @@ export class ScanCommand {
302
331
  /**
303
332
  * Stream files from a single directory and upload stats in batches
304
333
  * @private
334
+ * @param {string} dirPath - Directory path to scan
335
+ * @param {Object} scanConfig - Scan configuration
336
+ * @param {string} tableName - Target table name
337
+ * @param {number|null} totalFiles - Total files count for progress (optional)
338
+ * @param {boolean} useStream - Use streaming (globbyStream) or sync (globby) approach
305
339
  */
306
340
  async #streamScanDirectory(
307
341
  dirPath,
308
342
  scanConfig,
309
343
  tableName,
310
344
  totalFiles = null,
345
+ useStream = true,
311
346
  ) {
312
347
  // For directory-level scanning, we scan the directory directly
313
348
  const batchSize = scanConfig.batchSize || 2000;
@@ -323,61 +358,67 @@ export class ScanCommand {
323
358
  const progressBar = this.#createProgressBar(totalFiles);
324
359
 
325
360
  try {
326
- // Create stream with stats option
327
- const fileStream = globbyStream('**/*', {
328
- cwd: dirPath,
329
- onlyFiles: true,
330
- absolute: true,
331
- stats: true, // Get file stats during discovery
332
- });
333
-
334
- // Process each file from stream
335
- for await (const entry of fileStream) {
336
- // globby with stats:true returns {path, stats} objects
337
- const filePath = typeof entry === 'string' ? entry : entry.path;
338
- const stats = typeof entry === 'object' ? entry.stats : null;
339
-
340
- // Check if file should be excluded
341
- if (this.#shouldExcludeFile(filePath, scanConfig.excludePatterns)) {
342
- filesSkipped++;
343
- continue;
344
- }
345
-
346
- // Get file stats (use from globby or fetch manually)
347
- const fileStats = stats || FileOperations.getFileStats(filePath);
348
- if (!fileStats) {
349
- logger.debug(`⚠️ Could not read stats: ${filePath}`);
350
- filesSkipped++;
351
- continue;
352
- }
353
-
354
- // Normalize file record
355
- const record = this.#normalizeFileRecord(
356
- filePath,
357
- fileStats,
358
- dirPath,
359
- scanTimestamp,
360
- );
361
+ if (useStream) {
362
+ // Streaming approach: use globbyStream with stats option
363
+ const fileStream = globbyStream('**/*', {
364
+ cwd: dirPath,
365
+ onlyFiles: true,
366
+ absolute: true,
367
+ stats: true, // Get file stats during discovery
368
+ });
361
369
 
362
- currentBatch.push(record);
363
- filesScanned++;
364
- totalSize += record.sizeBytes;
370
+ // Process each file from stream
371
+ for await (const entry of fileStream) {
372
+ // globby with stats:true returns {path, stats} objects
373
+ const filePath = typeof entry === 'string' ? entry : entry.path;
374
+ const stats = typeof entry === 'object' ? entry.stats : null;
375
+
376
+ const result = await this.#processFileEntry(
377
+ filePath,
378
+ stats,
379
+ dirPath,
380
+ scanConfig,
381
+ scanTimestamp,
382
+ tableName,
383
+ currentBatch,
384
+ batchSize,
385
+ progressBar,
386
+ totalFiles,
387
+ { filesScanned, filesInserted, filesSkipped, totalSize },
388
+ );
365
389
 
366
- // Update progress
367
- if (totalFiles) {
368
- progressBar.update(filesScanned);
369
- } else {
370
- // Show throughput instead of percentage
371
- const elapsed = (Date.now() - progressBar.startTime) / 1000;
372
- const rate = (filesScanned / elapsed).toFixed(1);
373
- progressBar.update(filesScanned, { rate });
390
+ filesScanned = result.filesScanned;
391
+ filesInserted = result.filesInserted;
392
+ filesSkipped = result.filesSkipped;
393
+ totalSize = result.totalSize;
394
+ currentBatch = result.currentBatch;
374
395
  }
396
+ } else {
397
+ // Synchronous approach: use globby (original method from UploadCommand)
398
+ logger.debug('Using synchronous file discovery (globby)...');
399
+ const files = await globby([`${dirPath}/**/*`], { onlyFiles: true });
400
+ logger.debug(`Found ${files.length} files to process`);
401
+
402
+ for (const filePath of files) {
403
+ const result = await this.#processFileEntry(
404
+ filePath,
405
+ null, // Stats will be fetched manually
406
+ dirPath,
407
+ scanConfig,
408
+ scanTimestamp,
409
+ tableName,
410
+ currentBatch,
411
+ batchSize,
412
+ progressBar,
413
+ totalFiles,
414
+ { filesScanned, filesInserted, filesSkipped, totalSize },
415
+ );
375
416
 
376
- // Upload batch when full
377
- if (currentBatch.length >= batchSize) {
378
- const inserted = await this.#uploadBatch(tableName, currentBatch);
379
- filesInserted += inserted;
380
- currentBatch = [];
417
+ filesScanned = result.filesScanned;
418
+ filesInserted = result.filesInserted;
419
+ filesSkipped = result.filesSkipped;
420
+ totalSize = result.totalSize;
421
+ currentBatch = result.currentBatch;
381
422
  }
382
423
  }
383
424
  } catch (error) {
@@ -400,6 +441,89 @@ export class ScanCommand {
400
441
  };
401
442
  }
402
443
 
444
+ /**
445
+ * Process a single file entry (used by both streaming and sync approaches)
446
+ * @private
447
+ */
448
+ async #processFileEntry(
449
+ filePath,
450
+ stats,
451
+ dirPath,
452
+ scanConfig,
453
+ scanTimestamp,
454
+ tableName,
455
+ currentBatch,
456
+ batchSize,
457
+ progressBar,
458
+ totalFiles,
459
+ counters,
460
+ ) {
461
+ let { filesScanned, filesInserted, filesSkipped, totalSize } = counters;
462
+
463
+ // Check if file should be excluded
464
+ if (this.#shouldExcludeFile(filePath, scanConfig.excludePatterns)) {
465
+ filesSkipped++;
466
+ return {
467
+ filesScanned,
468
+ filesInserted,
469
+ filesSkipped,
470
+ totalSize,
471
+ currentBatch,
472
+ };
473
+ }
474
+
475
+ // Get file stats (use from globby or fetch manually)
476
+ const fileStats = stats || FileOperations.getFileStats(filePath);
477
+ if (!fileStats) {
478
+ logger.debug(`⚠️ Could not read stats: ${filePath}`);
479
+ filesSkipped++;
480
+ return {
481
+ filesScanned,
482
+ filesInserted,
483
+ filesSkipped,
484
+ totalSize,
485
+ currentBatch,
486
+ };
487
+ }
488
+
489
+ // Normalize file record
490
+ const record = this.#normalizeFileRecord(
491
+ filePath,
492
+ fileStats,
493
+ dirPath,
494
+ scanTimestamp,
495
+ );
496
+
497
+ currentBatch.push(record);
498
+ filesScanned++;
499
+ totalSize += record.sizeBytes;
500
+
501
+ // Update progress
502
+ if (totalFiles) {
503
+ progressBar.update(filesScanned);
504
+ } else {
505
+ // Show throughput instead of percentage
506
+ const elapsed = (Date.now() - progressBar.startTime) / 1000;
507
+ const rate = (filesScanned / elapsed).toFixed(1);
508
+ progressBar.update(filesScanned, { rate });
509
+ }
510
+
511
+ // Upload batch when full
512
+ if (currentBatch.length >= batchSize) {
513
+ const inserted = await this.#uploadBatch(tableName, currentBatch);
514
+ filesInserted += inserted;
515
+ currentBatch = [];
516
+ }
517
+
518
+ return {
519
+ filesScanned,
520
+ filesInserted,
521
+ filesSkipped,
522
+ totalSize,
523
+ currentBatch,
524
+ };
525
+ }
526
+
403
527
  /**
404
528
  * Upload a batch of file records
405
529
  * @private
@@ -34,10 +34,10 @@ class Config {
34
34
  const __dirname = path.dirname(__filename);
35
35
  const packageJsonPath = path.resolve(__dirname, '../../package.json');
36
36
  const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf-8'));
37
- return packageJson.version || '1.0.7';
37
+ return packageJson.version || '1.0.9';
38
38
  } catch (error) {
39
39
  console.warn('⚠️ Could not read package.json version, using fallback');
40
- return '1.0.7';
40
+ return '1.0.9';
41
41
  }
42
42
  }
43
43
 
package/src/index.js CHANGED
@@ -185,6 +185,10 @@ class ArelaUploaderCLI {
185
185
  '--count-first',
186
186
  'Count files first for percentage-based progress (slower start)',
187
187
  )
188
+ .option(
189
+ '--no-stream',
190
+ 'Use synchronous file discovery instead of streaming (original approach)',
191
+ )
188
192
  .action(async (options) => {
189
193
  try {
190
194
  // Set API target if specified
@@ -381,6 +385,14 @@ class ArelaUploaderCLI {
381
385
  '--push-api <target>',
382
386
  'API for uploading files: default|agencia|cliente',
383
387
  )
388
+ .option(
389
+ '--source-api <target>',
390
+ 'Source API for reading data (cross-tenant mode): agencia|cliente',
391
+ )
392
+ .option(
393
+ '--target-api <target>',
394
+ 'Target API for uploading files (cross-tenant mode): agencia|cliente',
395
+ )
384
396
  .option(
385
397
  '-b, --batch-size <size>',
386
398
  'Number of files to fetch per batch',
@@ -406,6 +418,18 @@ class ArelaUploaderCLI {
406
418
  .option('--show-stats', 'Show performance statistics')
407
419
  .action(async (options) => {
408
420
  try {
421
+ // Handle cross-tenant mode (source and target APIs)
422
+ // Map source-api/target-api to scan-api/push-api for consistency
423
+ if (options.sourceApi && options.targetApi) {
424
+ appConfig.setCrossTenantTargets(
425
+ options.sourceApi,
426
+ options.targetApi,
427
+ );
428
+ // Also set scan-api and push-api for PushCommand compatibility
429
+ options.scanApi = options.sourceApi;
430
+ options.pushApi = options.targetApi;
431
+ }
432
+
409
433
  // Parse comma-separated values
410
434
  if (options.rfcs) {
411
435
  options.rfcs = options.rfcs
@@ -10,8 +10,12 @@ import logger from './LoggingService.js';
10
10
  * Handles API communication for the arela scan command
11
11
  */
12
12
  export class ScanApiService {
13
- constructor() {
14
- const apiConfig = appConfig.getApiConfig();
13
+ /**
14
+ * @param {string|null} apiTarget - API target: 'default', 'agencia', 'cliente', or null (uses active target)
15
+ */
16
+ constructor(apiTarget = null) {
17
+ this.apiTarget = apiTarget;
18
+ const apiConfig = appConfig.getApiConfig(apiTarget);
15
19
  this.baseUrl = apiConfig.baseUrl;
16
20
  this.token = apiConfig.token;
17
21