@arela/uploader 0.3.0 โ 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +130 -5
- package/docs/API_ENDPOINTS_FOR_DETECTION.md +647 -0
- package/docs/QUICK_REFERENCE_API_DETECTION.md +264 -0
- package/docs/REFACTORING_SUMMARY_DETECT_PEDIMENTOS.md +200 -0
- package/package.json +1 -1
- package/src/commands/WatchCommand.js +47 -10
- package/src/config/config.js +157 -2
- package/src/document-types/support-document.js +4 -5
- package/src/file-detection.js +7 -0
- package/src/index.js +119 -4
- package/src/services/AutoProcessingService.js +146 -36
- package/src/services/DatabaseService.js +341 -517
- package/src/services/upload/ApiUploadService.js +426 -4
- package/src/services/upload/MultiApiUploadService.js +233 -0
- package/src/services/upload/UploadServiceFactory.js +24 -0
- package/src/utils/FileOperations.js +6 -3
- package/src/utils/WatchEventHandler.js +14 -9
- package/.envbackup +0 -37
- package/SUPABASE_UPLOAD_FIX.md +0 -157
|
@@ -13,12 +13,15 @@ import { BaseUploadService } from './BaseUploadService.js';
|
|
|
13
13
|
/**
|
|
14
14
|
* API Upload Service
|
|
15
15
|
* Handles uploads to the Arela API with automatic processing
|
|
16
|
+
* Supports multiple API targets (agencia, cliente, default)
|
|
16
17
|
*/
|
|
17
18
|
export class ApiUploadService extends BaseUploadService {
|
|
18
19
|
constructor() {
|
|
19
20
|
super();
|
|
20
|
-
|
|
21
|
-
this.
|
|
21
|
+
// Flag to indicate if config is externally set (cross-tenant mode)
|
|
22
|
+
this._isExternalConfig = false;
|
|
23
|
+
// Get initial API config (can be overridden at runtime via setApiTarget)
|
|
24
|
+
this.#updateApiConfig();
|
|
22
25
|
|
|
23
26
|
// Get API connection settings from config/environment
|
|
24
27
|
const maxApiConnections = parseInt(process.env.MAX_API_CONNECTIONS) || 10;
|
|
@@ -51,6 +54,47 @@ export class ApiUploadService extends BaseUploadService {
|
|
|
51
54
|
);
|
|
52
55
|
}
|
|
53
56
|
|
|
57
|
+
/**
|
|
58
|
+
* Update API configuration from appConfig
|
|
59
|
+
* Called on initialization and when API target changes
|
|
60
|
+
* Skip if externally configured (cross-tenant mode)
|
|
61
|
+
* @private
|
|
62
|
+
*/
|
|
63
|
+
#updateApiConfig() {
|
|
64
|
+
// Skip update if config is externally set
|
|
65
|
+
if (this._isExternalConfig) {
|
|
66
|
+
return;
|
|
67
|
+
}
|
|
68
|
+
const apiConfig = appConfig.getApiConfig();
|
|
69
|
+
this.baseUrl = apiConfig.baseUrl;
|
|
70
|
+
this.token = apiConfig.token;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
/**
|
|
74
|
+
* Set external configuration (for cross-tenant mode)
|
|
75
|
+
* Prevents automatic config refresh from overwriting these values
|
|
76
|
+
* @param {string} baseUrl - API base URL
|
|
77
|
+
* @param {string} token - API token
|
|
78
|
+
*/
|
|
79
|
+
setExternalConfig(baseUrl, token) {
|
|
80
|
+
this._isExternalConfig = true;
|
|
81
|
+
this.baseUrl = baseUrl;
|
|
82
|
+
this.token = token;
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
/**
|
|
86
|
+
* Get current API configuration (refreshes from appConfig)
|
|
87
|
+
* This ensures we always use the latest target configuration
|
|
88
|
+
* @returns {Object} Current API config with baseUrl and token
|
|
89
|
+
*/
|
|
90
|
+
getApiConfig() {
|
|
91
|
+
this.#updateApiConfig();
|
|
92
|
+
return {
|
|
93
|
+
baseUrl: this.baseUrl,
|
|
94
|
+
token: this.token,
|
|
95
|
+
};
|
|
96
|
+
}
|
|
97
|
+
|
|
54
98
|
/**
|
|
55
99
|
* Upload files to Arela API with automatic detection and organization
|
|
56
100
|
* @param {Array} files - Array of file objects
|
|
@@ -58,6 +102,8 @@ export class ApiUploadService extends BaseUploadService {
|
|
|
58
102
|
* @returns {Promise<Object>} API response
|
|
59
103
|
*/
|
|
60
104
|
async upload(files, options) {
|
|
105
|
+
// Refresh config to get current API target
|
|
106
|
+
this.#updateApiConfig();
|
|
61
107
|
// Validate files parameter
|
|
62
108
|
if (!files || !Array.isArray(files)) {
|
|
63
109
|
logger.warn(`Invalid files parameter: ${typeof files}`);
|
|
@@ -67,8 +113,9 @@ export class ApiUploadService extends BaseUploadService {
|
|
|
67
113
|
const formData = new FormData();
|
|
68
114
|
|
|
69
115
|
// Filter out system files (macOS, Windows, etc.)
|
|
70
|
-
const systemFilePattern =
|
|
71
|
-
|
|
116
|
+
const systemFilePattern =
|
|
117
|
+
/^\.|__pycache__|\.pyc|\.swp|\.swo|Thumbs\.db|desktop\.ini|DS_Store|\$RECYCLE\.BIN|System Volume Information|~\$|\.tmp/i;
|
|
118
|
+
const filteredFiles = files.filter((file) => {
|
|
72
119
|
const fileName = file.name || path.basename(file.path);
|
|
73
120
|
if (systemFilePattern.test(fileName)) {
|
|
74
121
|
logger.warn(`Skipping system file from upload: ${fileName}`);
|
|
@@ -131,6 +178,11 @@ export class ApiUploadService extends BaseUploadService {
|
|
|
131
178
|
formData.append('clientPath', options.clientPath);
|
|
132
179
|
}
|
|
133
180
|
|
|
181
|
+
// Add RFC for multi-database routing (required for cross-tenant uploads)
|
|
182
|
+
if (options.rfc) {
|
|
183
|
+
formData.append('rfc', options.rfc);
|
|
184
|
+
}
|
|
185
|
+
|
|
134
186
|
// Add processing options
|
|
135
187
|
formData.append('autoDetect', String(options.autoDetect ?? true));
|
|
136
188
|
formData.append('autoOrganize', String(options.autoOrganize ?? false));
|
|
@@ -181,6 +233,9 @@ export class ApiUploadService extends BaseUploadService {
|
|
|
181
233
|
* @returns {Promise<boolean>} True if available
|
|
182
234
|
*/
|
|
183
235
|
async isAvailable() {
|
|
236
|
+
// Refresh config to get current API target
|
|
237
|
+
this.#updateApiConfig();
|
|
238
|
+
|
|
184
239
|
if (!this.baseUrl || !this.token) {
|
|
185
240
|
return false;
|
|
186
241
|
}
|
|
@@ -200,6 +255,373 @@ export class ApiUploadService extends BaseUploadService {
|
|
|
200
255
|
}
|
|
201
256
|
}
|
|
202
257
|
|
|
258
|
+
/**
|
|
259
|
+
* Batch upsert file stats to uploader table
|
|
260
|
+
* @param {Array} records - Array of record objects to upsert
|
|
261
|
+
* @returns {Promise<Object>} Upsert results { inserted, updated, total }
|
|
262
|
+
*/
|
|
263
|
+
async batchUpsertStats(records) {
|
|
264
|
+
if (!records || !Array.isArray(records) || records.length === 0) {
|
|
265
|
+
return { inserted: 0, updated: 0, total: 0 };
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
try {
|
|
269
|
+
const isHttps = this.baseUrl.startsWith('https');
|
|
270
|
+
const response = await fetch(
|
|
271
|
+
`${this.baseUrl}/api/uploader/batch-upsert`,
|
|
272
|
+
{
|
|
273
|
+
method: 'POST',
|
|
274
|
+
headers: {
|
|
275
|
+
'x-api-key': this.token,
|
|
276
|
+
'Content-Type': 'application/json',
|
|
277
|
+
},
|
|
278
|
+
body: JSON.stringify(records),
|
|
279
|
+
agent: isHttps ? this.httpsAgent : this.httpAgent,
|
|
280
|
+
},
|
|
281
|
+
);
|
|
282
|
+
|
|
283
|
+
if (!response.ok) {
|
|
284
|
+
const errorText = await response.text();
|
|
285
|
+
throw new Error(
|
|
286
|
+
`Batch upsert failed: ${response.status} ${response.statusText} - ${errorText}`,
|
|
287
|
+
);
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
const result = await response.json();
|
|
291
|
+
return result;
|
|
292
|
+
} catch (error) {
|
|
293
|
+
logger.error(`Batch upsert API error: ${error.message}`);
|
|
294
|
+
throw error;
|
|
295
|
+
}
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
/**
|
|
299
|
+
* Fetch PDF records for pedimento detection
|
|
300
|
+
* @param {Object} options - Query options
|
|
301
|
+
* @param {number} options.offset - Pagination offset
|
|
302
|
+
* @param {number} options.limit - Number of records to fetch
|
|
303
|
+
* @returns {Promise<Object>} { data: Array, error: Error|null }
|
|
304
|
+
*/
|
|
305
|
+
async fetchPdfRecordsForDetection(options = {}) {
|
|
306
|
+
const { offset = 0, limit = 100 } = options;
|
|
307
|
+
|
|
308
|
+
try {
|
|
309
|
+
const isHttps = this.baseUrl.startsWith('https');
|
|
310
|
+
const url = new URL(`${this.baseUrl}/api/uploader/pdf-records`);
|
|
311
|
+
url.searchParams.append('offset', offset);
|
|
312
|
+
url.searchParams.append('limit', limit);
|
|
313
|
+
url.searchParams.append('status', 'fs-stats');
|
|
314
|
+
url.searchParams.append('file_extension', 'pdf');
|
|
315
|
+
url.searchParams.append('is_like_simplificado', 'true');
|
|
316
|
+
|
|
317
|
+
const response = await fetch(url.toString(), {
|
|
318
|
+
method: 'GET',
|
|
319
|
+
headers: {
|
|
320
|
+
'x-api-key': this.token,
|
|
321
|
+
'Content-Type': 'application/json',
|
|
322
|
+
},
|
|
323
|
+
agent: isHttps ? this.httpsAgent : this.httpAgent,
|
|
324
|
+
});
|
|
325
|
+
|
|
326
|
+
if (!response.ok) {
|
|
327
|
+
const errorText = await response.text();
|
|
328
|
+
return {
|
|
329
|
+
data: null,
|
|
330
|
+
error: new Error(
|
|
331
|
+
`Failed to fetch PDF records: ${response.status} ${response.statusText} - ${errorText}`,
|
|
332
|
+
),
|
|
333
|
+
};
|
|
334
|
+
}
|
|
335
|
+
|
|
336
|
+
const data = await response.json();
|
|
337
|
+
return { data, error: null };
|
|
338
|
+
} catch (error) {
|
|
339
|
+
logger.error(`API fetch PDF records error: ${error.message}`);
|
|
340
|
+
return { data: null, error };
|
|
341
|
+
}
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
/**
|
|
345
|
+
* Batch update detection results
|
|
346
|
+
* @param {Array} updates - Array of update objects with { id, ...updateData }
|
|
347
|
+
* @returns {Promise<Object>} Update result { success: boolean, updated: number, errors: Array }
|
|
348
|
+
*/
|
|
349
|
+
async batchUpdateDetectionResults(updates) {
|
|
350
|
+
if (!updates || !Array.isArray(updates) || updates.length === 0) {
|
|
351
|
+
return { success: true, updated: 0, errors: [] };
|
|
352
|
+
}
|
|
353
|
+
|
|
354
|
+
try {
|
|
355
|
+
const isHttps = this.baseUrl.startsWith('https');
|
|
356
|
+
const response = await fetch(
|
|
357
|
+
`${this.baseUrl}/api/uploader/batch-update-detection`,
|
|
358
|
+
{
|
|
359
|
+
method: 'PATCH',
|
|
360
|
+
headers: {
|
|
361
|
+
'x-api-key': this.token,
|
|
362
|
+
'Content-Type': 'application/json',
|
|
363
|
+
},
|
|
364
|
+
body: JSON.stringify({ updates }),
|
|
365
|
+
agent: isHttps ? this.httpsAgent : this.httpAgent,
|
|
366
|
+
},
|
|
367
|
+
);
|
|
368
|
+
|
|
369
|
+
if (!response.ok) {
|
|
370
|
+
const errorText = await response.text();
|
|
371
|
+
throw new Error(
|
|
372
|
+
`Batch update failed: ${response.status} ${response.statusText} - ${errorText}`,
|
|
373
|
+
);
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
const result = await response.json();
|
|
377
|
+
return result;
|
|
378
|
+
} catch (error) {
|
|
379
|
+
logger.error(`Batch update API error: ${error.message}`);
|
|
380
|
+
return {
|
|
381
|
+
success: false,
|
|
382
|
+
updated: 0,
|
|
383
|
+
errors: [{ message: error.message }],
|
|
384
|
+
};
|
|
385
|
+
}
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
/**
|
|
389
|
+
* Execute arela_path propagation on the backend
|
|
390
|
+
* This triggers a server-side process that propagates arela_path from pedimentos to related files
|
|
391
|
+
* @param {Object} options - Propagation options
|
|
392
|
+
* @param {Array} options.years - Optional year filter
|
|
393
|
+
* @returns {Promise<Object>} Propagation result { success: boolean, processedCount, updatedCount, errorCount }
|
|
394
|
+
*/
|
|
395
|
+
async propagateArelaPath(options = {}) {
|
|
396
|
+
const { years = [] } = options;
|
|
397
|
+
|
|
398
|
+
try {
|
|
399
|
+
const isHttps = this.baseUrl.startsWith('https');
|
|
400
|
+
const response = await fetch(
|
|
401
|
+
`${this.baseUrl}/api/uploader/propagate-arela-path`,
|
|
402
|
+
{
|
|
403
|
+
method: 'POST',
|
|
404
|
+
headers: {
|
|
405
|
+
'x-api-key': this.token,
|
|
406
|
+
'Content-Type': 'application/json',
|
|
407
|
+
},
|
|
408
|
+
body: JSON.stringify({ years }),
|
|
409
|
+
agent: isHttps ? this.httpsAgent : this.httpAgent,
|
|
410
|
+
},
|
|
411
|
+
);
|
|
412
|
+
|
|
413
|
+
if (!response.ok) {
|
|
414
|
+
const errorText = await response.text();
|
|
415
|
+
throw new Error(
|
|
416
|
+
`Arela path propagation failed: ${response.status} ${response.statusText} - ${errorText}`,
|
|
417
|
+
);
|
|
418
|
+
}
|
|
419
|
+
|
|
420
|
+
const result = await response.json();
|
|
421
|
+
return result;
|
|
422
|
+
} catch (error) {
|
|
423
|
+
logger.error(`Propagate arela_path API error: ${error.message}`);
|
|
424
|
+
return {
|
|
425
|
+
success: false,
|
|
426
|
+
processedCount: 0,
|
|
427
|
+
updatedCount: 0,
|
|
428
|
+
errorCount: 1,
|
|
429
|
+
error: error.message,
|
|
430
|
+
};
|
|
431
|
+
}
|
|
432
|
+
}
|
|
433
|
+
|
|
434
|
+
/**
|
|
435
|
+
* Fetch RFC file count
|
|
436
|
+
* @param {Object} options - Query options
|
|
437
|
+
* @param {Array} options.rfcs - Array of RFC values to filter
|
|
438
|
+
* @returns {Promise<Object>} { count: number, error: Error|null }
|
|
439
|
+
*/
|
|
440
|
+
async fetchRfcFileCount(options = {}) {
|
|
441
|
+
const { rfcs = [] } = options;
|
|
442
|
+
|
|
443
|
+
if (!rfcs || rfcs.length === 0) {
|
|
444
|
+
return { count: 0, error: null };
|
|
445
|
+
}
|
|
446
|
+
|
|
447
|
+
try {
|
|
448
|
+
const isHttps = this.baseUrl.startsWith('https');
|
|
449
|
+
const url = new URL(`${this.baseUrl}/api/uploader/rfc-file-count`);
|
|
450
|
+
url.searchParams.append('rfcs', rfcs.join(','));
|
|
451
|
+
|
|
452
|
+
const response = await fetch(url.toString(), {
|
|
453
|
+
method: 'GET',
|
|
454
|
+
headers: {
|
|
455
|
+
'x-api-key': this.token,
|
|
456
|
+
'Content-Type': 'application/json',
|
|
457
|
+
},
|
|
458
|
+
agent: isHttps ? this.httpsAgent : this.httpAgent,
|
|
459
|
+
});
|
|
460
|
+
|
|
461
|
+
if (!response.ok) {
|
|
462
|
+
const errorText = await response.text();
|
|
463
|
+
return {
|
|
464
|
+
count: 0,
|
|
465
|
+
error: new Error(
|
|
466
|
+
`Failed to fetch RFC file count: ${response.status} ${response.statusText} - ${errorText}`,
|
|
467
|
+
),
|
|
468
|
+
};
|
|
469
|
+
}
|
|
470
|
+
|
|
471
|
+
const data = await response.json();
|
|
472
|
+
return { count: data.count || 0, error: null };
|
|
473
|
+
} catch (error) {
|
|
474
|
+
logger.error(`API fetch RFC file count error: ${error.message}`);
|
|
475
|
+
return { count: 0, error };
|
|
476
|
+
}
|
|
477
|
+
}
|
|
478
|
+
|
|
479
|
+
/**
|
|
480
|
+
* Fetch pedimento records by RFC
|
|
481
|
+
* @param {Object} options - Query options
|
|
482
|
+
* @param {Array} options.rfcs - Array of RFC values to filter
|
|
483
|
+
* @param {Array} options.years - Optional year filter
|
|
484
|
+
* @param {number} options.offset - Pagination offset
|
|
485
|
+
* @param {number} options.limit - Number of records to fetch
|
|
486
|
+
* @returns {Promise<Object>} { data: Array, error: Error|null }
|
|
487
|
+
*/
|
|
488
|
+
async fetchPedimentosByRfc(options = {}) {
|
|
489
|
+
const { rfcs = [], years = [], offset = 0, limit = 500 } = options;
|
|
490
|
+
|
|
491
|
+
if (!rfcs || rfcs.length === 0) {
|
|
492
|
+
return { data: [], error: null };
|
|
493
|
+
}
|
|
494
|
+
|
|
495
|
+
try {
|
|
496
|
+
const isHttps = this.baseUrl.startsWith('https');
|
|
497
|
+
const url = new URL(`${this.baseUrl}/api/uploader/pedimentos-by-rfc`);
|
|
498
|
+
url.searchParams.append('rfcs', rfcs.join(','));
|
|
499
|
+
if (years && years.length > 0) {
|
|
500
|
+
url.searchParams.append('years', years.join(','));
|
|
501
|
+
}
|
|
502
|
+
url.searchParams.append('offset', offset);
|
|
503
|
+
url.searchParams.append('limit', limit);
|
|
504
|
+
|
|
505
|
+
const response = await fetch(url.toString(), {
|
|
506
|
+
method: 'GET',
|
|
507
|
+
headers: {
|
|
508
|
+
'x-api-key': this.token,
|
|
509
|
+
'Content-Type': 'application/json',
|
|
510
|
+
},
|
|
511
|
+
agent: isHttps ? this.httpsAgent : this.httpAgent,
|
|
512
|
+
});
|
|
513
|
+
|
|
514
|
+
if (!response.ok) {
|
|
515
|
+
const errorText = await response.text();
|
|
516
|
+
return {
|
|
517
|
+
data: null,
|
|
518
|
+
error: new Error(
|
|
519
|
+
`Failed to fetch pedimentos by RFC: ${response.status} ${response.statusText} - ${errorText}`,
|
|
520
|
+
),
|
|
521
|
+
};
|
|
522
|
+
}
|
|
523
|
+
|
|
524
|
+
const data = await response.json();
|
|
525
|
+
return { data, error: null };
|
|
526
|
+
} catch (error) {
|
|
527
|
+
logger.error(`API fetch pedimentos by RFC error: ${error.message}`);
|
|
528
|
+
return { data: null, error };
|
|
529
|
+
}
|
|
530
|
+
}
|
|
531
|
+
|
|
532
|
+
/**
|
|
533
|
+
* Fetch files for upload by arela_path
|
|
534
|
+
* @param {Object} options - Query options
|
|
535
|
+
* @param {Array} options.arelaPaths - Array of arela_path values to filter
|
|
536
|
+
* @param {number} options.offset - Pagination offset
|
|
537
|
+
* @param {number} options.limit - Number of records to fetch
|
|
538
|
+
* @returns {Promise<Object>} { data: Array, error: Error|null }
|
|
539
|
+
*/
|
|
540
|
+
async fetchFilesForUpload(options = {}) {
|
|
541
|
+
const { arelaPaths = [], offset = 0, limit = 1000 } = options;
|
|
542
|
+
|
|
543
|
+
if (!arelaPaths || arelaPaths.length === 0) {
|
|
544
|
+
return { data: [], error: null };
|
|
545
|
+
}
|
|
546
|
+
|
|
547
|
+
try {
|
|
548
|
+
const isHttps = this.baseUrl.startsWith('https');
|
|
549
|
+
const url = new URL(`${this.baseUrl}/api/uploader/files-for-upload`);
|
|
550
|
+
url.searchParams.append('arela_paths', arelaPaths.join('|'));
|
|
551
|
+
url.searchParams.append('offset', offset);
|
|
552
|
+
url.searchParams.append('limit', limit);
|
|
553
|
+
|
|
554
|
+
const response = await fetch(url.toString(), {
|
|
555
|
+
method: 'GET',
|
|
556
|
+
headers: {
|
|
557
|
+
'x-api-key': this.token,
|
|
558
|
+
'Content-Type': 'application/json',
|
|
559
|
+
},
|
|
560
|
+
agent: isHttps ? this.httpsAgent : this.httpAgent,
|
|
561
|
+
});
|
|
562
|
+
|
|
563
|
+
if (!response.ok) {
|
|
564
|
+
const errorText = await response.text();
|
|
565
|
+
return {
|
|
566
|
+
data: null,
|
|
567
|
+
error: new Error(
|
|
568
|
+
`Failed to fetch files for upload: ${response.status} ${response.statusText} - ${errorText}`,
|
|
569
|
+
),
|
|
570
|
+
};
|
|
571
|
+
}
|
|
572
|
+
|
|
573
|
+
const data = await response.json();
|
|
574
|
+
return { data, error: null };
|
|
575
|
+
} catch (error) {
|
|
576
|
+
logger.error(`API fetch files for upload error: ${error.message}`);
|
|
577
|
+
return { data: null, error };
|
|
578
|
+
}
|
|
579
|
+
}
|
|
580
|
+
|
|
581
|
+
/**
|
|
582
|
+
* Update file status after upload
|
|
583
|
+
* @param {Array} updates - Array of update objects with { id, status, message, processing_status }
|
|
584
|
+
* @returns {Promise<Object>} Update result { success: boolean, updated: number, errors: Array }
|
|
585
|
+
*/
|
|
586
|
+
async updateFileStatus(updates) {
|
|
587
|
+
if (!updates || !Array.isArray(updates) || updates.length === 0) {
|
|
588
|
+
return { success: true, updated: 0, errors: [] };
|
|
589
|
+
}
|
|
590
|
+
|
|
591
|
+
try {
|
|
592
|
+
const isHttps = this.baseUrl.startsWith('https');
|
|
593
|
+
const response = await fetch(
|
|
594
|
+
`${this.baseUrl}/api/uploader/batch-update-status`,
|
|
595
|
+
{
|
|
596
|
+
method: 'PATCH',
|
|
597
|
+
headers: {
|
|
598
|
+
'x-api-key': this.token,
|
|
599
|
+
'Content-Type': 'application/json',
|
|
600
|
+
},
|
|
601
|
+
body: JSON.stringify({ updates }),
|
|
602
|
+
agent: isHttps ? this.httpsAgent : this.httpAgent,
|
|
603
|
+
},
|
|
604
|
+
);
|
|
605
|
+
|
|
606
|
+
if (!response.ok) {
|
|
607
|
+
const errorText = await response.text();
|
|
608
|
+
throw new Error(
|
|
609
|
+
`Batch status update failed: ${response.status} ${response.statusText} - ${errorText}`,
|
|
610
|
+
);
|
|
611
|
+
}
|
|
612
|
+
|
|
613
|
+
const result = await response.json();
|
|
614
|
+
return result;
|
|
615
|
+
} catch (error) {
|
|
616
|
+
logger.error(`Batch status update API error: ${error.message}`);
|
|
617
|
+
return {
|
|
618
|
+
success: false,
|
|
619
|
+
updated: 0,
|
|
620
|
+
errors: [{ message: error.message }],
|
|
621
|
+
};
|
|
622
|
+
}
|
|
623
|
+
}
|
|
624
|
+
|
|
203
625
|
/**
|
|
204
626
|
* Get service name
|
|
205
627
|
* @returns {string} Service name
|
|
@@ -0,0 +1,233 @@
|
|
|
1
|
+
import { Agent } from 'http';
|
|
2
|
+
import { Agent as HttpsAgent } from 'https';
|
|
3
|
+
import fetch from 'node-fetch';
|
|
4
|
+
|
|
5
|
+
import appConfig from '../../config/config.js';
|
|
6
|
+
import logger from '../LoggingService.js';
|
|
7
|
+
import { ApiUploadService } from './ApiUploadService.js';
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* Multi-API Upload Service
|
|
11
|
+
*
|
|
12
|
+
* Extends ApiUploadService to support uploading to multiple API instances
|
|
13
|
+
* based on RFC/client routing. Uses service discovery from Global API.
|
|
14
|
+
*/
|
|
15
|
+
export class MultiApiUploadService extends ApiUploadService {
|
|
16
|
+
constructor() {
|
|
17
|
+
super();
|
|
18
|
+
|
|
19
|
+
// Service discovery configuration
|
|
20
|
+
this.globalApiUrl = process.env.API_GLOBAL_URL || appConfig.api.baseUrl;
|
|
21
|
+
this.serviceCache = new Map(); // RFC -> { url, cachedAt }
|
|
22
|
+
this.cacheTtlMs = parseInt(process.env.SERVICE_CACHE_TTL) || 300000; // 5 minutes
|
|
23
|
+
|
|
24
|
+
logger.info('๐ MultiApiUploadService initialized');
|
|
25
|
+
logger.info(` Global API: ${this.globalApiUrl}`);
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Discover service URL for a specific RFC
|
|
30
|
+
* @param {string} rfc - The client RFC
|
|
31
|
+
* @returns {Promise<string|null>} Service URL or null
|
|
32
|
+
*/
|
|
33
|
+
async discoverServiceByRfc(rfc) {
|
|
34
|
+
const rfcUpper = rfc.toUpperCase();
|
|
35
|
+
|
|
36
|
+
// Check cache first
|
|
37
|
+
const cached = this.serviceCache.get(rfcUpper);
|
|
38
|
+
if (cached && Date.now() - cached.cachedAt < this.cacheTtlMs) {
|
|
39
|
+
logger.debug(
|
|
40
|
+
`๐ Using cached service URL for ${rfcUpper}: ${cached.url}`,
|
|
41
|
+
);
|
|
42
|
+
return cached.url;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
// Fetch from Global API
|
|
46
|
+
try {
|
|
47
|
+
const response = await fetch(
|
|
48
|
+
`${this.globalApiUrl}/api/service-registry/by-rfc/${rfcUpper}`,
|
|
49
|
+
{
|
|
50
|
+
method: 'GET',
|
|
51
|
+
headers: {
|
|
52
|
+
Authorization: `Bearer ${this.token}`,
|
|
53
|
+
'Content-Type': 'application/json',
|
|
54
|
+
},
|
|
55
|
+
agent: this.globalApiUrl.startsWith('https')
|
|
56
|
+
? this.httpsAgent
|
|
57
|
+
: this.httpAgent,
|
|
58
|
+
},
|
|
59
|
+
);
|
|
60
|
+
|
|
61
|
+
if (!response.ok) {
|
|
62
|
+
logger.warn(
|
|
63
|
+
`โ ๏ธ Service discovery failed for ${rfcUpper}: ${response.status}`,
|
|
64
|
+
);
|
|
65
|
+
return null;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
const data = await response.json();
|
|
69
|
+
|
|
70
|
+
if (data.url) {
|
|
71
|
+
// Cache the result
|
|
72
|
+
this.serviceCache.set(rfcUpper, {
|
|
73
|
+
url: data.url,
|
|
74
|
+
cachedAt: Date.now(),
|
|
75
|
+
});
|
|
76
|
+
|
|
77
|
+
logger.info(`๐ Discovered service for ${rfcUpper}: ${data.url}`);
|
|
78
|
+
return data.url;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
return null;
|
|
82
|
+
} catch (error) {
|
|
83
|
+
logger.error(
|
|
84
|
+
`โ Service discovery error for ${rfcUpper}: ${error.message}`,
|
|
85
|
+
);
|
|
86
|
+
return null;
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
/**
|
|
91
|
+
* Clear service cache
|
|
92
|
+
*/
|
|
93
|
+
clearCache() {
|
|
94
|
+
this.serviceCache.clear();
|
|
95
|
+
logger.info('๐งน Service cache cleared');
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
/**
|
|
99
|
+
* Clear cache for specific RFC
|
|
100
|
+
* @param {string} rfc - The RFC to clear
|
|
101
|
+
*/
|
|
102
|
+
clearRfcCache(rfc) {
|
|
103
|
+
this.serviceCache.delete(rfc.toUpperCase());
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
/**
|
|
107
|
+
* Upload files to the appropriate API based on RFC
|
|
108
|
+
* @param {Array} files - Array of file objects
|
|
109
|
+
* @param {Object} options - Upload options including rfc
|
|
110
|
+
* @returns {Promise<Object>} API response
|
|
111
|
+
*/
|
|
112
|
+
async upload(files, options) {
|
|
113
|
+
const { rfc } = options;
|
|
114
|
+
|
|
115
|
+
if (!rfc) {
|
|
116
|
+
// No RFC specified, use default API
|
|
117
|
+
return super.upload(files, options);
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
// Discover the service URL for this RFC
|
|
121
|
+
const serviceUrl = await this.discoverServiceByRfc(rfc);
|
|
122
|
+
|
|
123
|
+
if (serviceUrl) {
|
|
124
|
+
// Override baseUrl for this upload
|
|
125
|
+
const originalBaseUrl = this.baseUrl;
|
|
126
|
+
this.baseUrl = serviceUrl;
|
|
127
|
+
|
|
128
|
+
try {
|
|
129
|
+
logger.info(
|
|
130
|
+
`๐ค Uploading ${files.length} files to ${serviceUrl} (RFC: ${rfc})`,
|
|
131
|
+
);
|
|
132
|
+
const result = await super.upload(files, options);
|
|
133
|
+
return result;
|
|
134
|
+
} finally {
|
|
135
|
+
// Restore original baseUrl
|
|
136
|
+
this.baseUrl = originalBaseUrl;
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
// Fallback to default API if service discovery fails
|
|
141
|
+
logger.warn(`โ ๏ธ No specific service found for ${rfc}, using default API`);
|
|
142
|
+
return super.upload(files, options);
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
/**
|
|
146
|
+
* Upload files to multiple RFCs in parallel
|
|
147
|
+
* @param {Object} filesByRfc - Map of RFC -> files array
|
|
148
|
+
* @param {Object} options - Upload options
|
|
149
|
+
* @returns {Promise<Map<string, Object>>} Results by RFC
|
|
150
|
+
*/
|
|
151
|
+
async uploadToMultipleRfcs(filesByRfc, options = {}) {
|
|
152
|
+
const results = new Map();
|
|
153
|
+
const rfcs = Object.keys(filesByRfc);
|
|
154
|
+
|
|
155
|
+
logger.info(`๐ค Starting multi-RFC upload for ${rfcs.length} RFCs`);
|
|
156
|
+
|
|
157
|
+
// Process RFCs in parallel (with concurrency limit)
|
|
158
|
+
const concurrencyLimit = parseInt(process.env.MULTI_RFC_CONCURRENCY) || 3;
|
|
159
|
+
const chunks = [];
|
|
160
|
+
|
|
161
|
+
for (let i = 0; i < rfcs.length; i += concurrencyLimit) {
|
|
162
|
+
chunks.push(rfcs.slice(i, i + concurrencyLimit));
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
for (const chunk of chunks) {
|
|
166
|
+
await Promise.all(
|
|
167
|
+
chunk.map(async (rfc) => {
|
|
168
|
+
try {
|
|
169
|
+
const files = filesByRfc[rfc];
|
|
170
|
+
const result = await this.upload(files, { ...options, rfc });
|
|
171
|
+
results.set(rfc, { success: true, result });
|
|
172
|
+
} catch (error) {
|
|
173
|
+
logger.error(`โ Upload failed for RFC ${rfc}: ${error.message}`);
|
|
174
|
+
results.set(rfc, { success: false, error: error.message });
|
|
175
|
+
}
|
|
176
|
+
}),
|
|
177
|
+
);
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
// Log summary
|
|
181
|
+
const successful = [...results.values()].filter((r) => r.success).length;
|
|
182
|
+
const failed = results.size - successful;
|
|
183
|
+
logger.info(
|
|
184
|
+
`โ
Multi-RFC upload complete: ${successful} successful, ${failed} failed`,
|
|
185
|
+
);
|
|
186
|
+
|
|
187
|
+
return results;
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
/**
|
|
191
|
+
* Get all available services from Global API
|
|
192
|
+
* @returns {Promise<Array>} List of services
|
|
193
|
+
*/
|
|
194
|
+
async getAllServices() {
|
|
195
|
+
try {
|
|
196
|
+
const response = await fetch(
|
|
197
|
+
`${this.globalApiUrl}/api/service-registry/services`,
|
|
198
|
+
{
|
|
199
|
+
method: 'GET',
|
|
200
|
+
headers: {
|
|
201
|
+
Authorization: `Bearer ${this.token}`,
|
|
202
|
+
'Content-Type': 'application/json',
|
|
203
|
+
},
|
|
204
|
+
agent: this.globalApiUrl.startsWith('https')
|
|
205
|
+
? this.httpsAgent
|
|
206
|
+
: this.httpAgent,
|
|
207
|
+
},
|
|
208
|
+
);
|
|
209
|
+
|
|
210
|
+
if (!response.ok) {
|
|
211
|
+
throw new Error(`Failed to fetch services: ${response.status}`);
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
const data = await response.json();
|
|
215
|
+
return data.services || [];
|
|
216
|
+
} catch (error) {
|
|
217
|
+
logger.error(`โ Failed to get services: ${error.message}`);
|
|
218
|
+
return [];
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
/**
|
|
223
|
+
* Pre-warm cache for a list of RFCs
|
|
224
|
+
* @param {Array<string>} rfcs - List of RFCs to cache
|
|
225
|
+
*/
|
|
226
|
+
async prewarmCache(rfcs) {
|
|
227
|
+
logger.info(`๐ฅ Pre-warming cache for ${rfcs.length} RFCs`);
|
|
228
|
+
|
|
229
|
+
await Promise.all(rfcs.map((rfc) => this.discoverServiceByRfc(rfc)));
|
|
230
|
+
|
|
231
|
+
logger.info('โ
Cache pre-warm complete');
|
|
232
|
+
}
|
|
233
|
+
}
|